Merge "aaudio: add missing symbols and tests for capture privacy API"
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index 1c44e65..713f0b7 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -22,6 +22,12 @@
namespace.default.search.paths = /apex/com.android.media.swcodec/${LIB}
namespace.default.asan.search.paths = /apex/com.android.media.swcodec/${LIB}
+# Below lines are required to be able to access libs in APEXes which are
+# actually symlinks to the files under /system/lib. The symlinks exist for
+# bundled APEXes to reduce space.
+namespace.default.permitted.paths = /system/${LIB}
+namespace.default.asan.permitted.paths = /system/${LIB}
+
namespace.default.links = platform
# TODO: replace the following when apex has a way to auto-generate this list
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index cdb7ac3..b183ccc 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -83,8 +83,9 @@
* @param operatingMode The kind of session to create; either NORMAL_MODE or
* CONSTRAINED_HIGH_SPEED_MODE. Must be a non-negative value.
* @param sessionParams Session wide camera parameters
+ * @return a list of stream ids that can be used in offline mode via "switchToOffline"
*/
- void endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
+ int[] endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
/**
* Check whether a particular session configuration has camera device
@@ -189,5 +190,5 @@
* @return Offline session object.
*/
ICameraOfflineSession switchToOffline(in ICameraDeviceCallbacks callbacks,
- in Surface[] offlineOutputs);
+ in int[] offlineOutputIds);
}
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index d8220eb..56f209c 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -57,6 +57,9 @@
"libmediandk",
"libnativewindow",
],
+ header_libs: [
+ "jni_headers",
+ ],
cflags: [
"-fvisibility=hidden",
"-DEXPORT=__attribute__ ((visibility (\"default\")))",
@@ -121,7 +124,6 @@
"android.frameworks.cameraservice.common@2.0",
"android.frameworks.cameraservice.service@2.0",
],
-
static_libs: [
"android.hardware.camera.common@1.0-helper",
"libarect",
diff --git a/camera/ndk/NdkCameraMetadata.cpp b/camera/ndk/NdkCameraMetadata.cpp
index 9a39ed8..99691ed 100644
--- a/camera/ndk/NdkCameraMetadata.cpp
+++ b/camera/ndk/NdkCameraMetadata.cpp
@@ -26,6 +26,68 @@
using namespace android;
+#ifndef __ANDROID_VNDK__
+namespace {
+
+constexpr const char* android_hardware_camera2_CameraMetadata_jniClassName =
+ "android/hardware/camera2/CameraMetadata";
+constexpr const char* android_hardware_camera2_CameraCharacteristics_jniClassName =
+ "android/hardware/camera2/CameraCharacteristics";
+constexpr const char* android_hardware_camera2_CaptureResult_jniClassName =
+ "android/hardware/camera2/CaptureResult";
+
+jclass android_hardware_camera2_CameraCharacteristics_clazz = nullptr;
+jclass android_hardware_camera2_CaptureResult_clazz = nullptr;
+jmethodID android_hardware_camera2_CameraMetadata_getNativeMetadataPtr = nullptr;
+
+// Called at most once to initializes global variables used by JNI.
+bool InitJni(JNIEnv* env) {
+ // From C++11 onward, static initializers are guaranteed to be executed at most once,
+ // even if called from multiple threads.
+ static bool ok = [env]() -> bool {
+ const jclass cameraMetadataClazz = env->FindClass(
+ android_hardware_camera2_CameraMetadata_jniClassName);
+ if (cameraMetadataClazz == nullptr) {
+ return false;
+ }
+ android_hardware_camera2_CameraMetadata_getNativeMetadataPtr =
+ env->GetMethodID(cameraMetadataClazz, "getNativeMetadataPtr", "()J");
+ if (android_hardware_camera2_CameraMetadata_getNativeMetadataPtr == nullptr) {
+ return false;
+ }
+
+ android_hardware_camera2_CameraCharacteristics_clazz = env->FindClass(
+ android_hardware_camera2_CameraCharacteristics_jniClassName);
+ if (android_hardware_camera2_CameraCharacteristics_clazz == nullptr) {
+ return false;
+ }
+
+ android_hardware_camera2_CaptureResult_clazz = env->FindClass(
+ android_hardware_camera2_CaptureResult_jniClassName);
+ if (android_hardware_camera2_CaptureResult_clazz == nullptr) {
+ return false;
+ }
+
+ return true;
+ }();
+ return ok;
+}
+
+// Given cameraMetadata, an instance of android.hardware.camera2.CameraMetadata, invokes
+// cameraMetadata.getNativeMetadataPtr() and returns it as a CameraMetadata*.
+CameraMetadata* CameraMetadata_getNativeMetadataPtr(JNIEnv* env, jobject cameraMetadata) {
+ if (cameraMetadata == nullptr) {
+ ALOGE("%s: Invalid Java CameraMetadata object.", __FUNCTION__);
+ return nullptr;
+ }
+ jlong ret = env->CallLongMethod(cameraMetadata,
+ android_hardware_camera2_CameraMetadata_getNativeMetadataPtr);
+ return reinterpret_cast<CameraMetadata *>(ret);
+}
+
+} // namespace
+#endif /* __ANDROID_VNDK__ */
+
EXPORT
camera_status_t ACameraMetadata_getConstEntry(
const ACameraMetadata* acm, uint32_t tag, ACameraMetadata_const_entry* entry) {
@@ -58,7 +120,7 @@
return nullptr;
}
ACameraMetadata* copy = new ACameraMetadata(*src);
- copy->incStrong((void*) ACameraMetadata_copy);
+ copy->incStrong(/*id=*/(void*) ACameraMetadata_copy);
return copy;
}
@@ -86,3 +148,34 @@
return staticMetadata->isLogicalMultiCamera(numPhysicalCameras, physicalCameraIds);
}
+
+#ifndef __ANDROID_VNDK__
+EXPORT
+ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata) {
+ ATRACE_CALL();
+
+ const bool ok = InitJni(env);
+ LOG_ALWAYS_FATAL_IF(!ok, "Failed to find CameraMetadata Java classes.");
+
+ if (cameraMetadata == nullptr) {
+ return nullptr;
+ }
+
+ ACameraMetadata::ACAMERA_METADATA_TYPE type;
+ if (env->IsInstanceOf(cameraMetadata,
+ android_hardware_camera2_CameraCharacteristics_clazz)) {
+ type = ACameraMetadata::ACM_CHARACTERISTICS;
+ } else if (env->IsInstanceOf(cameraMetadata,
+ android_hardware_camera2_CaptureResult_clazz)) {
+ type = ACameraMetadata::ACM_RESULT;
+ } else {
+ return nullptr;
+ }
+
+ CameraMetadata* src = CameraMetadata_getNativeMetadataPtr(env,
+ cameraMetadata);
+ ACameraMetadata* output = new ACameraMetadata(src, type);
+ output->incStrong(/*id=*/(void*) ACameraMetadata_fromCameraMetadata);
+ return output;
+}
+#endif /* __ANDROID_VNDK__ */
\ No newline at end of file
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 46a8dae..0d7180a 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -710,7 +710,8 @@
if ((sessionParameters != nullptr) && (sessionParameters->settings != nullptr)) {
params.append(sessionParameters->settings->getInternalData());
}
- remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params);
+ std::vector<int> offlineStreamIds;
+ remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params, &offlineStreamIds);
if (remoteRet.serviceSpecificErrorCode() == hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT) {
ALOGE("Camera device %s cannnot support app output configuration: %s", getId(),
remoteRet.toString8().string());
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index e15e1a0..0ff78ab 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -28,7 +28,37 @@
* ACameraMetadata Implementation
*/
ACameraMetadata::ACameraMetadata(camera_metadata_t* buffer, ACAMERA_METADATA_TYPE type) :
- mData(buffer), mType(type) {
+ mData(new CameraMetadata(buffer)),
+ mOwnsData(true),
+ mType(type) {
+ init();
+}
+
+ACameraMetadata::ACameraMetadata(CameraMetadata* cameraMetadata, ACAMERA_METADATA_TYPE type) :
+ mData(cameraMetadata),
+ mOwnsData(false),
+ mType(type) {
+ init();
+}
+
+ACameraMetadata::ACameraMetadata(const ACameraMetadata& other) :
+ mOwnsData(other.mOwnsData),
+ mType(other.mType) {
+ if (other.mOwnsData) {
+ mData = new CameraMetadata(*(other.mData));
+ } else {
+ mData = other.mData;
+ }
+}
+
+ACameraMetadata::~ACameraMetadata() {
+ if (mOwnsData) {
+ delete mData;
+ }
+}
+
+void
+ACameraMetadata::init() {
if (mType == ACM_CHARACTERISTICS) {
filterUnsupportedFeatures();
filterStreamConfigurations();
@@ -61,7 +91,7 @@
void
ACameraMetadata::filterUnsupportedFeatures() {
// Hide unsupported capabilities (reprocessing)
- camera_metadata_entry entry = mData.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ camera_metadata_entry entry = mData->find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
if (entry.count == 0 || entry.type != TYPE_BYTE) {
ALOGE("%s: malformed available capability key! count %zu, type %d",
__FUNCTION__, entry.count, entry.type);
@@ -80,7 +110,7 @@
}
}
}
- mData.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities);
+ mData->update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities);
}
void
@@ -118,7 +148,7 @@
const int STREAM_WIDTH_OFFSET = 1;
const int STREAM_HEIGHT_OFFSET = 2;
const int STREAM_DURATION_OFFSET = 3;
- camera_metadata_entry entry = mData.find(tag);
+ camera_metadata_entry entry = mData->find(tag);
if (entry.count == 0 || entry.count % 4 || entry.type != TYPE_INT64) {
ALOGE("%s: malformed duration key %d! count %zu, type %d",
__FUNCTION__, tag, entry.count, entry.type);
@@ -194,7 +224,7 @@
}
}
- mData.update(tag, filteredDurations);
+ mData->update(tag, filteredDurations);
}
void
@@ -204,7 +234,7 @@
const int STREAM_WIDTH_OFFSET = 1;
const int STREAM_HEIGHT_OFFSET = 2;
const int STREAM_IS_INPUT_OFFSET = 3;
- camera_metadata_entry entry = mData.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+ camera_metadata_entry entry = mData->find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
if (entry.count > 0 && (entry.count % 4 || entry.type != TYPE_INT32)) {
ALOGE("%s: malformed available stream configuration key! count %zu, type %d",
__FUNCTION__, entry.count, entry.type);
@@ -234,10 +264,10 @@
}
if (filteredStreamConfigs.size() > 0) {
- mData.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, filteredStreamConfigs);
+ mData->update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, filteredStreamConfigs);
}
- entry = mData.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
+ entry = mData->find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
if (entry.count > 0 && (entry.count % 4 || entry.type != TYPE_INT32)) {
ALOGE("%s: malformed available depth stream configuration key! count %zu, type %d",
__FUNCTION__, entry.count, entry.type);
@@ -270,11 +300,11 @@
}
if (filteredDepthStreamConfigs.size() > 0) {
- mData.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+ mData->update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
filteredDepthStreamConfigs);
}
- entry = mData.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+ entry = mData->find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
Vector<int32_t> filteredHeicStreamConfigs;
filteredHeicStreamConfigs.setCapacity(entry.count);
@@ -297,9 +327,9 @@
filteredHeicStreamConfigs.push_back(height);
filteredHeicStreamConfigs.push_back(isInput);
}
- mData.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, filteredHeicStreamConfigs);
+ mData->update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, filteredHeicStreamConfigs);
- entry = mData.find(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+ entry = mData->find(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
Vector<int32_t> filteredDynamicDepthStreamConfigs;
filteredDynamicDepthStreamConfigs.setCapacity(entry.count);
@@ -322,7 +352,7 @@
filteredDynamicDepthStreamConfigs.push_back(height);
filteredDynamicDepthStreamConfigs.push_back(isInput);
}
- mData.update(ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
+ mData->update(ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
filteredDynamicDepthStreamConfigs);
}
@@ -343,7 +373,7 @@
Mutex::Autolock _l(mLock);
- camera_metadata_ro_entry rawEntry = mData.find(tag);
+ camera_metadata_ro_entry rawEntry = static_cast<const CameraMetadata*>(mData)->find(tag);
if (rawEntry.count == 0) {
ALOGE("%s: cannot find metadata tag %d", __FUNCTION__, tag);
return ACAMERA_ERROR_METADATA_NOT_FOUND;
@@ -390,9 +420,9 @@
/*out*/const uint32_t** tags) const {
Mutex::Autolock _l(mLock);
if (mTags.size() == 0) {
- size_t entry_count = mData.entryCount();
+ size_t entry_count = mData->entryCount();
mTags.setCapacity(entry_count);
- const camera_metadata_t* rawMetadata = mData.getAndLock();
+ const camera_metadata_t* rawMetadata = mData->getAndLock();
for (size_t i = 0; i < entry_count; i++) {
camera_metadata_ro_entry_t entry;
int ret = get_camera_metadata_ro_entry(rawMetadata, i, &entry);
@@ -405,7 +435,7 @@
mTags.push_back(entry.tag);
}
}
- mData.unlock(rawMetadata);
+ mData->unlock(rawMetadata);
}
*numTags = mTags.size();
@@ -415,7 +445,7 @@
const CameraMetadata&
ACameraMetadata::getInternalData() const {
- return mData;
+ return (*mData);
}
bool
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 97f7f48..a57b2ff 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -36,8 +36,8 @@
using namespace android;
/**
- * ACameraMetadata opaque struct definition
- * Leave outside of android namespace because it's NDK struct
+ * ACameraMetadata is an opaque struct definition.
+ * It is intentionally left outside of the android namespace because it's NDK struct.
*/
struct ACameraMetadata : public RefBase {
public:
@@ -47,11 +47,20 @@
ACM_RESULT, // Read only
} ACAMERA_METADATA_TYPE;
- // Takes ownership of pass-in buffer
- ACameraMetadata(camera_metadata_t *buffer, ACAMERA_METADATA_TYPE type);
- // Clone
- ACameraMetadata(const ACameraMetadata& other) :
- mData(other.mData), mType(other.mType) {};
+ // Constructs a ACameraMetadata that takes ownership of `buffer`.
+ ACameraMetadata(camera_metadata_t* buffer, ACAMERA_METADATA_TYPE type);
+
+ // Constructs a ACameraMetadata that is a view of `cameraMetadata`.
+ // `cameraMetadata` will not be deleted by ~ACameraMetadata().
+ ACameraMetadata(CameraMetadata* cameraMetadata, ACAMERA_METADATA_TYPE type);
+
+ // Copy constructor.
+ //
+ // If `other` owns its CameraMetadata, then makes a deep copy.
+ // Otherwise, the new instance is also a view of the same data.
+ ACameraMetadata(const ACameraMetadata& other);
+
+ ~ACameraMetadata();
camera_status_t getConstEntry(uint32_t tag, ACameraMetadata_const_entry* entry) const;
@@ -70,6 +79,9 @@
private:
+ // Common code called by constructors.
+ void init();
+
// This function does not check whether the capability passed to it is valid.
// The caller must make sure that it is.
bool isNdkSupportedCapability(const int32_t capability);
@@ -95,11 +107,11 @@
status_t ret = OK;
if (count == 0 && data == nullptr) {
- ret = mData.erase(tag);
+ ret = mData->erase(tag);
} else {
// Here we have to use reinterpret_cast because the NDK data type is
// exact copy of internal data type but they do not inherit from each other
- ret = mData.update(tag, reinterpret_cast<const INTERNAL_T*>(data), count);
+ ret = mData->update(tag, reinterpret_cast<const INTERNAL_T*>(data), count);
}
if (ret == OK) {
@@ -110,10 +122,14 @@
}
}
- // guard access of public APIs: get/update/getTags
- mutable Mutex mLock;
- CameraMetadata mData;
- mutable Vector<uint32_t> mTags; // updated in getTags, cleared by update
+ // Guard access of public APIs: get/update/getTags.
+ mutable Mutex mLock;
+
+ CameraMetadata* mData;
+ // If true, has ownership of mData. Otherwise, mData is a view of an external instance.
+ bool mOwnsData;
+
+ mutable Vector<uint32_t> mTags; // Updated by `getTags()`, cleared by `update()`.
const ACAMERA_METADATA_TYPE mType;
static std::unordered_set<uint32_t> sSystemTags;
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 9bbfb83..ee75610 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -39,6 +39,12 @@
#include <stdint.h>
#include <sys/cdefs.h>
+#ifndef __ANDROID_VNDK__
+#if __ANDROID_API__ >= 30
+#include "jni.h"
+#endif /* __ANDROID_API__ >= 30 */
+#endif /* __ANDROID_VNDK__ */
+
#include "NdkCameraError.h"
#include "NdkCameraMetadataTags.h"
@@ -255,8 +261,40 @@
#endif /* __ANDROID_API__ >= 29 */
+#ifndef __ANDROID_VNDK__
+#if __ANDROID_API__ >= 30
+
+/**
+ * Return a {@link ACameraMetadata} that references the same data as
+ * {@link cameraMetadata}, which is an instance of
+ * {@link android.hardware.camera2.CameraMetadata} (e.g., a
+ * {@link android.hardware.camera2.CameraCharacteristics} or
+ * {@link android.hardware.camera2.CaptureResult}).
+ *
+ * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
+ * after application is done using it.</p>
+ *
+ * <p>This function does not affect the lifetime of {@link cameraMetadata}. Attempting to use the
+ * returned ACameraMetadata object after {@link cameraMetadata} has been garbage collected is
+ * unsafe. To manage the lifetime beyond the current JNI function call, use
+ * {@code env->NewGlobalRef()} and {@code env->DeleteGlobalRef()}.
+ *
+ * @param env the JNI environment.
+ * @param cameraMetadata the source {@link android.hardware.camera2.CameraMetadata} from which the
+ * returned {@link ACameraMetadata} is a view.
+ *
+ * @return a valid ACameraMetadata pointer or NULL if {@link cameraMetadata} is null or not a valid
+ * instance of {@link android.hardware.camera2.CameraMetadata}.
+ *
+ */
+ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata)
+ __INTRODUCED_IN(30);
+
+#endif /* __ANDROID_API__ >= 30 */
+#endif /* __ANDROID_VNDK__ */
+
__END_DECLS
#endif /* _NDK_CAMERA_METADATA_H */
-/** @} */
+/** @} */
\ No newline at end of file
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 49783db..3ac3ded 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -2509,6 +2509,11 @@
* <p><code>p' = Rp</code></p>
* <p>where <code>p</code> is in the device sensor coordinate system, and
* <code>p'</code> is in the camera-oriented coordinate system.</p>
+ * <p>If ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, the quaternion rotation cannot
+ * be accurately represented by the camera device, and will be represented by
+ * default values matching its default facing.</p>
+ *
+ * @see ACAMERA_LENS_POSE_REFERENCE
*/
ACAMERA_LENS_POSE_ROTATION = // float[4]
ACAMERA_LENS_START + 6,
@@ -2549,6 +2554,8 @@
* <p>When ACAMERA_LENS_POSE_REFERENCE is GYROSCOPE, then this position is relative to
* the center of the primary gyroscope on the device. The axis definitions are the same as
* with PRIMARY_CAMERA.</p>
+ * <p>When ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, this position cannot be accurately
+ * represented by the camera device, and will be represented as <code>(0, 0, 0)</code>.</p>
*
* @see ACAMERA_LENS_DISTORTION
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
@@ -2691,8 +2698,10 @@
ACAMERA_LENS_RADIAL_DISTORTION = // Deprecated! DO NOT USE
ACAMERA_LENS_START + 11,
/**
- * <p>The origin for ACAMERA_LENS_POSE_TRANSLATION.</p>
+ * <p>The origin for ACAMERA_LENS_POSE_TRANSLATION, and the accuracy of
+ * ACAMERA_LENS_POSE_TRANSLATION and ACAMERA_LENS_POSE_ROTATION.</p>
*
+ * @see ACAMERA_LENS_POSE_ROTATION
* @see ACAMERA_LENS_POSE_TRANSLATION
*
* <p>Type: byte (acamera_metadata_enum_android_lens_pose_reference_t)</p>
@@ -7539,6 +7548,19 @@
*/
ACAMERA_LENS_POSE_REFERENCE_GYROSCOPE = 1,
+ /**
+ * <p>The camera device cannot represent the values of ACAMERA_LENS_POSE_TRANSLATION
+ * and ACAMERA_LENS_POSE_ROTATION accurately enough. One such example is a camera device
+ * on the cover of a foldable phone: in order to measure the pose translation and rotation,
+ * some kind of hinge position sensor would be needed.</p>
+ * <p>The value of ACAMERA_LENS_POSE_TRANSLATION must be all zeros, and
+ * ACAMERA_LENS_POSE_ROTATION must be values matching its default facing.</p>
+ *
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_LENS_POSE_REFERENCE_UNDEFINED = 2,
+
} acamera_metadata_enum_android_lens_pose_reference_t;
@@ -8380,12 +8402,16 @@
// ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
typedef enum acamera_metadata_enum_acamera_sensor_info_timestamp_source {
/**
- * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic,
- * but can not be compared to timestamps from other subsystems
- * (e.g. accelerometer, gyro etc.), or other instances of the same or different
- * camera devices in the same system. Timestamps between streams and results for
- * a single camera instance are comparable, and the timestamps for all buffers
- * and the result metadata generated by a single capture are identical.</p>
+ * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic, but can
+ * not be compared to timestamps from other subsystems (e.g. accelerometer, gyro etc.),
+ * or other instances of the same or different camera devices in the same system with
+ * accuracy. However, the timestamps are roughly in the same timebase as
+ * <a href="https://developer.android.com/reference/android/os/SystemClock.html#uptimeMillis">SystemClock#uptimeMillis</a>. The accuracy is sufficient for tasks
+ * like A/V synchronization for video recording, at least, and the timestamps can be
+ * directly used together with timestamps from the audio subsystem for that task.</p>
+ * <p>Timestamps between streams and results for a single camera instance are comparable,
+ * and the timestamps for all buffers and the result metadata generated by a single
+ * capture are identical.</p>
*
* @see ACAMERA_SENSOR_TIMESTAMP
*/
@@ -8395,6 +8421,14 @@
* <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
* <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>,
* and they can be compared to other timestamps using that base.</p>
+ * <p>When buffers from a REALTIME device are passed directly to a video encoder from the
+ * camera, automatic compensation is done to account for differing timebases of the
+ * audio and camera subsystems. If the application is receiving buffers and then later
+ * sending them to a video encoder or other application where they are compared with
+ * audio subsystem timestamps or similar, this compensation is not present. In those
+ * cases, applications need to adjust the timestamps themselves. Since <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a> and <a href="https://developer.android.com/reference/android/os/SystemClock.html#uptimeMillis">SystemClock#uptimeMillis</a> only diverge while the device is asleep, an
+ * offset between the two sources can be measured once per active session and applied
+ * to timestamps for sufficient accuracy for A/V sync.</p>
*
* @see ACAMERA_SENSOR_TIMESTAMP
*/
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index b6f1553..2b630db 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -31,6 +31,7 @@
ACameraMetadata_getAllTags;
ACameraMetadata_getConstEntry;
ACameraMetadata_isLogicalMultiCamera; # introduced=29
+ ACameraMetadata_fromCameraMetadata; # introduced=30
ACameraOutputTarget_create;
ACameraOutputTarget_free;
ACaptureRequest_addTarget;
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 9a18b10..cd5bdd1 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -498,7 +498,9 @@
EXPECT_TRUE(res.isOk()) << res;
EXPECT_LE(0, streamId);
CameraMetadata sessionParams;
- res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
+ std::vector<int> offlineStreamIds;
+ res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams,
+ &offlineStreamIds);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_FALSE(callbacks->hadError());
@@ -609,7 +611,8 @@
EXPECT_TRUE(res.isOk()) << res;
res = device->deleteStream(streamId);
EXPECT_TRUE(res.isOk()) << res;
- res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
+ res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams,
+ &offlineStreamIds);
EXPECT_TRUE(res.isOk()) << res;
sleep(/*second*/1); // allow some time for errors to show up, if any
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index b534f8a..c66dea2 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -84,14 +84,13 @@
using android::sp;
using android::status_t;
-using android::DISPLAY_ORIENTATION_0;
-using android::DISPLAY_ORIENTATION_180;
-using android::DISPLAY_ORIENTATION_90;
using android::INVALID_OPERATION;
using android::NAME_NOT_FOUND;
using android::NO_ERROR;
using android::UNKNOWN_ERROR;
+namespace ui = android::ui;
+
static const uint32_t kMinBitRate = 100000; // 0.1Mbps
static const uint32_t kMaxBitRate = 200 * 1000000; // 200Mbps
static const uint32_t kMaxTimeLimitSec = 180; // 3 minutes
@@ -328,7 +327,7 @@
}
t.setDisplayProjection(dpy,
- gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
+ gRotate ? ui::ROTATION_90 : ui::ROTATION_0,
layerStackRect, displayRect);
return NO_ERROR;
}
@@ -414,7 +413,7 @@
*/
static status_t runEncoder(const sp<MediaCodec>& encoder,
AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
- const sp<IBinder>& virtualDpy, uint8_t orientation) {
+ const sp<IBinder>& virtualDpy, ui::Rotation orientation) {
static int kTimeout = 250000; // be responsive on signal
status_t err;
ssize_t trackIdx = -1;
@@ -484,7 +483,7 @@
if (err != NO_ERROR) {
ALOGW("getDisplayInfo(main) failed: %d", err);
} else if (orientation != displayInfo.orientation) {
- ALOGD("orientation changed, now %d", displayInfo.orientation);
+ ALOGD("orientation changed, now %s", toCString(displayInfo.orientation));
SurfaceComposerClient::Transaction t;
setDisplayProjection(t, virtualDpy, displayInfo);
t.apply();
@@ -691,9 +690,9 @@
}
if (gVerbose) {
- printf("Display is %dx%d @%.2ffps (orientation=%u), layerStack=%u\n",
+ printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
displayInfo.viewportW, displayInfo.viewportH, displayInfo.fps,
- displayInfo.orientation, displayInfo.layerStack);
+ toCString(displayInfo.orientation), displayInfo.layerStack);
fflush(stdout);
}
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index defc94f..7b447d3 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -23,6 +23,7 @@
LOCAL_MODULE_TAGS := optional
+LOCAL_SYSTEM_EXT_MODULE:= true
LOCAL_MODULE:= stagefright
include $(BUILD_EXECUTABLE)
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 6b1b475..255640f 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -165,4 +165,8 @@
"libmediadrmmetrics_full",
"libutils",
],
+
+ header_libs: [
+ "libstagefright_foundation_headers",
+ ],
}
diff --git a/include/media/audiohal b/include/media/audiohal
deleted file mode 120000
index f400582..0000000
--- a/include/media/audiohal
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libaudiohal/include/media/audiohal/
\ No newline at end of file
diff --git a/include/media/nbaio/AudioStreamOutSink.h b/include/media/nbaio/AudioStreamOutSink.h
deleted file mode 120000
index 43bfac5..0000000
--- a/include/media/nbaio/AudioStreamOutSink.h
+++ /dev/null
@@ -1 +0,0 @@
-../../../media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
\ No newline at end of file
diff --git a/include/media/nbaio/NBAIO.h b/include/media/nbaio/NBAIO.h
deleted file mode 120000
index ff6a151..0000000
--- a/include/media/nbaio/NBAIO.h
+++ /dev/null
@@ -1 +0,0 @@
-../../../media/libnbaio/include_mono/media/nbaio/NBAIO.h
\ No newline at end of file
diff --git a/include/media/nbaio/SingleStateQueue.h b/include/media/nbaio/SingleStateQueue.h
deleted file mode 120000
index d3e0553..0000000
--- a/include/media/nbaio/SingleStateQueue.h
+++ /dev/null
@@ -1 +0,0 @@
-../../../media/libnbaio/include_mono/media/nbaio/SingleStateQueue.h
\ No newline at end of file
diff --git a/include/media/stagefright b/include/media/stagefright
deleted file mode 120000
index 5393f68..0000000
--- a/include/media/stagefright
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libstagefright/include/media/stagefright/
\ No newline at end of file
diff --git a/media/audioserver/Android.mk b/media/audioserver/Android.mk
index fcf7cb1..79b77a0 100644
--- a/media/audioserver/Android.mk
+++ b/media/audioserver/Android.mk
@@ -22,6 +22,9 @@
libutils \
libvibrator
+LOCAL_HEADER_LIBRARIES := \
+ libaudiohal_headers \
+
# TODO oboeservice is the old folder name for aaudioservice. It will be changed.
LOCAL_C_INCLUDES := \
frameworks/av/services/audioflinger \
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index bb910ad..56813c4 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -501,7 +501,7 @@
status_t C2SoftAvcDec::initDecoder() {
if (OK != createDecoder()) return UNKNOWN_ERROR;
mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
- mStride = ALIGN64(mWidth);
+ mStride = ALIGN128(mWidth);
mSignalledError = false;
resetPlugin();
(void) setNumCores();
@@ -909,7 +909,7 @@
if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
if (mHeaderDecoded == false) {
mHeaderDecoded = true;
- setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+ setParams(ALIGN128(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
}
if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
mWidth = s_decode_op.u4_pic_wd;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 389ea61..6db4387 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -497,7 +497,7 @@
status_t C2SoftHevcDec::initDecoder() {
if (OK != createDecoder()) return UNKNOWN_ERROR;
mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
- mStride = ALIGN64(mWidth);
+ mStride = ALIGN128(mWidth);
mSignalledError = false;
resetPlugin();
(void) setNumCores();
@@ -904,7 +904,7 @@
if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
if (mHeaderDecoded == false) {
mHeaderDecoded = true;
- setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+ setParams(ALIGN128(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
}
if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
mWidth = s_decode_op.u4_pic_wd;
diff --git a/media/extractors/Android.bp b/media/extractors/Android.bp
new file mode 100644
index 0000000..bb42580
--- /dev/null
+++ b/media/extractors/Android.bp
@@ -0,0 +1,46 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_defaults {
+ name: "extractor-defaults",
+
+ include_dirs: [
+ "frameworks/av/media/libstagefright/include",
+ ],
+
+ shared_libs: [
+ "liblog",
+ "libmediandk#29",
+ ],
+
+ relative_install_path: "extractors",
+
+ compile_multilib: "first",
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ "-fvisibility=hidden",
+ ],
+
+ version_script: "exports.lds",
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
\ No newline at end of file
diff --git a/media/extractors/aac/Android.bp b/media/extractors/aac/Android.bp
index 53b394f..60d3ae1 100644
--- a/media/extractors/aac/Android.bp
+++ b/media/extractors/aac/Android.bp
@@ -1,40 +1,13 @@
cc_library {
+ name: "libaacextractor",
+ defaults: ["extractor-defaults"],
srcs: ["AACExtractor.cpp"],
- include_dirs: [
- "frameworks/av/media/libstagefright/",
- ],
-
- shared_libs: [
- "liblog",
- "libmediandk",
- ],
-
static_libs: [
"libstagefright_foundation",
"libstagefright_metadatautils",
"libutils",
],
- name: "libaacextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/amr/Android.bp b/media/extractors/amr/Android.bp
index cd76062..49c9567 100644
--- a/media/extractors/amr/Android.bp
+++ b/media/extractors/amr/Android.bp
@@ -1,38 +1,11 @@
cc_library {
+ name: "libamrextractor",
+ defaults: ["extractor-defaults"],
srcs: ["AMRExtractor.cpp"],
- include_dirs: [
- "frameworks/av/media/libstagefright/include",
- ],
-
- shared_libs: [
- "liblog",
- "libmediandk",
- ],
-
static_libs: [
"libstagefright_foundation",
],
- name: "libamrextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index c669b34..3675611 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -1,16 +1,15 @@
cc_library {
+ name: "libflacextractor",
+ defaults: ["extractor-defaults"],
srcs: ["FLACExtractor.cpp"],
include_dirs: [
- "frameworks/av/media/libstagefright/include",
"external/flac/include",
],
shared_libs: [
"libbinder_ndk",
- "liblog",
- "libmediandk",
],
static_libs: [
@@ -21,24 +20,4 @@
"libutils",
],
- name: "libflacextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/midi/Android.bp b/media/extractors/midi/Android.bp
index 40c91e7..592ffa9 100644
--- a/media/extractors/midi/Android.bp
+++ b/media/extractors/midi/Android.bp
@@ -1,43 +1,18 @@
cc_library {
+ name: "libmidiextractor",
+ defaults: ["extractor-defaults"],
srcs: ["MidiExtractor.cpp"],
- include_dirs: [
- "frameworks/av/media/libstagefright/include",
- ],
-
header_libs: [
"libmedia_headers",
],
- shared_libs: [
- "liblog",
- "libmediandk",
- ],
-
static_libs: [
"libmedia_midiiowrapper",
"libsonivox",
"libstagefright_foundation"
],
- name: "libmidiextractor",
- relative_install_path: "extractors",
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
}
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
index 942c88a..7ad8cc1 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/extractors/mkv/Android.bp
@@ -1,4 +1,6 @@
cc_library {
+ name: "libmkvextractor",
+ defaults: ["extractor-defaults"],
srcs: ["MatroskaExtractor.cpp"],
@@ -6,12 +8,9 @@
"external/flac/include",
"external/libvpx/libwebm",
"frameworks/av/media/libstagefright/flac/dec",
- "frameworks/av/media/libstagefright/include",
],
shared_libs: [
- "liblog",
- "libmediandk",
"libstagefright_flacdec",
],
@@ -22,24 +21,4 @@
"libutils",
],
- name: "libmkvextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/mp3/Android.bp b/media/extractors/mp3/Android.bp
index 6f02b0f..102ac81 100644
--- a/media/extractors/mp3/Android.bp
+++ b/media/extractors/mp3/Android.bp
@@ -1,44 +1,16 @@
cc_library {
-
+ name: "libmp3extractor",
+ defaults: ["extractor-defaults"],
srcs: [
"MP3Extractor.cpp",
"VBRISeeker.cpp",
"XINGSeeker.cpp",
],
- include_dirs: [
- "frameworks/av/media/libstagefright/include",
- ],
-
- shared_libs: [
- "liblog",
- "libmediandk",
- ],
-
static_libs: [
"libutils",
"libstagefright_id3",
"libstagefright_foundation",
],
- name: "libmp3extractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/mp4/Android.bp b/media/extractors/mp4/Android.bp
index d9f11fc..e48e1b7 100644
--- a/media/extractors/mp4/Android.bp
+++ b/media/extractors/mp4/Android.bp
@@ -1,5 +1,6 @@
-cc_defaults {
- name: "libmp4extractor_defaults",
+cc_library {
+ name: "libmp4extractor",
+ defaults: ["extractor-defaults"],
srcs: [
"AC4Parser.cpp",
@@ -9,50 +10,10 @@
"SampleTable.cpp",
],
- include_dirs: [
- "frameworks/av/media/libstagefright/",
- ],
-
- shared_libs: [
- "liblog",
- "libmediandk"
- ],
-
static_libs: [
"libstagefright_esds",
"libstagefright_foundation",
"libstagefright_id3",
"libutils",
],
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
- relative_install_path: "extractors",
- compile_multilib: "first",
-}
-
-cc_library {
-
-
- name: "libmp4extractor",
- defaults: ["libmp4extractor_defaults"],
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
-}
-
-cc_library_static {
- name: "libmp4extractor_fuzzing",
-
- defaults: ["libmp4extractor_defaults"],
}
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 9e0bc96..c8079eb 100755
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -31,8 +31,9 @@
#include "MPEG4Extractor.h"
#include "SampleTable.h"
#include "ItemTable.h"
-#include "include/ESDS.h"
+#include <ESDS.h>
+#include <ID3.h>
#include <media/stagefright/DataSourceBase.h>
#include <media/ExtractorUtils.h>
#include <media/stagefright/foundation/ABitReader.h>
@@ -52,7 +53,6 @@
#include <utils/String8.h>
#include <byteswap.h>
-#include "include/ID3.h"
#ifndef UINT32_MAX
#define UINT32_MAX (4294967295U)
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index 8d9145d..ef8431e 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -1,4 +1,7 @@
cc_library {
+ name: "libmpeg2extractor",
+
+ defaults: ["extractor-defaults"],
srcs: [
"ExtractorBundle.cpp",
@@ -6,15 +9,8 @@
"MPEG2TSExtractor.cpp",
],
- include_dirs: [
- "frameworks/av/media/libstagefright",
- "frameworks/av/media/libstagefright/include",
- ],
-
shared_libs: [
"libcgrouprc#29",
- "liblog#10000",
- "libmediandk#29",
"libvndksupport#29",
],
@@ -46,27 +42,6 @@
"libutils",
],
- name: "libmpeg2extractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- // STOPSHIP: turn on cfi once b/139945549 is resolved.
- cfi: false,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
apex_available: [
"com.android.media",
"test_com.android.media",
diff --git a/media/extractors/mpeg2/MPEG2PSExtractor.cpp b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
index 002a855..d431b05 100644
--- a/media/extractors/mpeg2/MPEG2PSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
@@ -20,8 +20,8 @@
#include "MPEG2PSExtractor.h"
-#include "mpeg2ts/AnotherPacketSource.h"
-#include "mpeg2ts/ESQueue.h"
+#include <AnotherPacketSource.h>
+#include <ESQueue.h>
#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.cpp b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
index f4643c5..9e093eb 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
@@ -37,8 +37,7 @@
#include <media/stagefright/Utils.h>
#include <utils/String8.h>
-#include "mpeg2ts/AnotherPacketSource.h"
-#include "mpeg2ts/ATSParser.h"
+#include <AnotherPacketSource.h>
#include <hidl/HybridInterface.h>
#include <android/hardware/cas/1.0/ICas.h>
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.h b/media/extractors/mpeg2/MPEG2TSExtractor.h
index dcd1e7b..fd77b08 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.h
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.h
@@ -27,7 +27,7 @@
#include <utils/KeyedVector.h>
#include <utils/Vector.h>
-#include "mpeg2ts/ATSParser.h"
+#include <ATSParser.h>
namespace android {
diff --git a/media/extractors/ogg/Android.bp b/media/extractors/ogg/Android.bp
index e661b5d..7aed683 100644
--- a/media/extractors/ogg/Android.bp
+++ b/media/extractors/ogg/Android.bp
@@ -1,9 +1,11 @@
cc_library {
+ name: "liboggextractor",
+
+ defaults: ["extractor-defaults"],
srcs: ["OggExtractor.cpp"],
include_dirs: [
- "frameworks/av/media/libstagefright/include",
"external/tremolo",
],
@@ -11,11 +13,6 @@
"libaudio_system_headers",
],
- shared_libs: [
- "liblog",
- "libmediandk",
- ],
-
static_libs: [
"libstagefright_foundation",
"libstagefright_metadatautils",
@@ -23,24 +20,4 @@
"libvorbisidec",
],
- name: "liboggextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
new file mode 100644
index 0000000..059c308
--- /dev/null
+++ b/media/extractors/tests/Android.bp
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+ name: "ExtractorUnitTest",
+ gtest: true,
+
+ srcs: ["ExtractorUnitTest.cpp"],
+
+ static_libs: [
+ "libaacextractor",
+ "libamrextractor",
+ "libmp3extractor",
+ "libwavextractor",
+ "liboggextractor",
+ "libflacextractor",
+ "libmidiextractor",
+ "libmkvextractor",
+ "libmpeg2extractor",
+ "libmp4extractor",
+ "libaudioutils",
+ "libdatasource",
+
+ "libstagefright",
+ "libstagefright_id3",
+ "libstagefright_flacdec",
+ "libstagefright_esds",
+ "libstagefright_mpeg2support",
+ "libstagefright_mpeg2extractor",
+ "libstagefright_foundation",
+ "libstagefright_metadatautils",
+
+ "libmedia_midiiowrapper",
+ "libsonivox",
+ "libvorbisidec",
+ "libwebm",
+ "libFLAC",
+ ],
+
+ shared_libs: [
+ "android.hardware.cas@1.0",
+ "android.hardware.cas.native@1.0",
+ "android.hidl.token@1.0-utils",
+ "android.hidl.allocator@1.0",
+ "libbinder",
+ "libbinder_ndk",
+ "libutils",
+ "liblog",
+ "libcutils",
+ "libmediandk",
+ "libmedia",
+ "libcrypto",
+ "libhidlmemory",
+ "libhidlbase",
+ ],
+
+ include_dirs: [
+ "frameworks/av/media/extractors/",
+ "frameworks/av/media/libstagefright/",
+ ],
+
+ compile_multilib: "first",
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ ldflags: [
+ "-Wl",
+ "-Bsymbolic",
+ // to ignore duplicate symbol: GETEXTRACTORDEF
+ "-z muldefs",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/extractors/tests/ExtractorUnitTest.cpp b/media/extractors/tests/ExtractorUnitTest.cpp
new file mode 100644
index 0000000..518166e
--- /dev/null
+++ b/media/extractors/tests/ExtractorUnitTest.cpp
@@ -0,0 +1,528 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ExtractorUnitTest"
+#include <utils/Log.h>
+
+#include <datasource/FileSource.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaDataUtils.h>
+
+#include "aac/AACExtractor.h"
+#include "amr/AMRExtractor.h"
+#include "flac/FLACExtractor.h"
+#include "midi/MidiExtractor.h"
+#include "mkv/MatroskaExtractor.h"
+#include "mp3/MP3Extractor.h"
+#include "mp4/MPEG4Extractor.h"
+#include "mp4/SampleTable.h"
+#include "mpeg2/MPEG2PSExtractor.h"
+#include "mpeg2/MPEG2TSExtractor.h"
+#include "ogg/OggExtractor.h"
+#include "wav/WAVExtractor.h"
+
+#include "ExtractorUnitTestEnvironment.h"
+
+using namespace android;
+
+#define OUTPUT_DUMP_FILE "/data/local/tmp/extractorOutput"
+
+constexpr int32_t kMaxCount = 10;
+constexpr int32_t kOpusSeekPreRollUs = 80000; // 80 ms;
+
+static ExtractorUnitTestEnvironment *gEnv = nullptr;
+
+class ExtractorUnitTest : public ::testing::TestWithParam<pair<string, string>> {
+ public:
+ ExtractorUnitTest() : mInputFp(nullptr), mDataSource(nullptr), mExtractor(nullptr) {}
+
+ ~ExtractorUnitTest() {
+ if (mInputFp) {
+ fclose(mInputFp);
+ mInputFp = nullptr;
+ }
+ if (mDataSource) {
+ mDataSource.clear();
+ mDataSource = nullptr;
+ }
+ if (mExtractor) {
+ delete mExtractor;
+ mExtractor = nullptr;
+ }
+ }
+
+ virtual void SetUp() override {
+ mExtractorName = unknown_comp;
+ mDisableTest = false;
+
+ static const std::map<std::string, standardExtractors> mapExtractor = {
+ {"aac", AAC}, {"amr", AMR}, {"mp3", MP3}, {"ogg", OGG},
+ {"wav", WAV}, {"mkv", MKV}, {"flac", FLAC}, {"midi", MIDI},
+ {"mpeg4", MPEG4}, {"mpeg2ts", MPEG2TS}, {"mpeg2ps", MPEG2PS}};
+ // Find the component type
+ string writerFormat = GetParam().first;
+ if (mapExtractor.find(writerFormat) != mapExtractor.end()) {
+ mExtractorName = mapExtractor.at(writerFormat);
+ }
+ if (mExtractorName == standardExtractors::unknown_comp) {
+ cout << "[ WARN ] Test Skipped. Invalid extractor\n";
+ mDisableTest = true;
+ }
+ }
+
+ int32_t setDataSource(string inputFileName);
+
+ int32_t createExtractor();
+
+ enum standardExtractors {
+ AAC,
+ AMR,
+ FLAC,
+ MIDI,
+ MKV,
+ MP3,
+ MPEG4,
+ MPEG2PS,
+ MPEG2TS,
+ OGG,
+ WAV,
+ unknown_comp,
+ };
+
+ bool mDisableTest;
+ standardExtractors mExtractorName;
+
+ FILE *mInputFp;
+ sp<DataSource> mDataSource;
+ MediaExtractorPluginHelper *mExtractor;
+};
+
+int32_t ExtractorUnitTest::setDataSource(string inputFileName) {
+ mInputFp = fopen(inputFileName.c_str(), "rb");
+ if (!mInputFp) {
+ ALOGE("Unable to open input file for reading");
+ return -1;
+ }
+ struct stat buf;
+ stat(inputFileName.c_str(), &buf);
+ int32_t fd = fileno(mInputFp);
+ mDataSource = new FileSource(dup(fd), 0, buf.st_size);
+ if (!mDataSource) return -1;
+ return 0;
+}
+
+int32_t ExtractorUnitTest::createExtractor() {
+ switch (mExtractorName) {
+ case AAC:
+ mExtractor = new AACExtractor(new DataSourceHelper(mDataSource->wrap()), 0);
+ break;
+ case AMR:
+ mExtractor = new AMRExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MP3:
+ mExtractor = new MP3Extractor(new DataSourceHelper(mDataSource->wrap()), nullptr);
+ break;
+ case OGG:
+ mExtractor = new OggExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case WAV:
+ mExtractor = new WAVExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MKV:
+ mExtractor = new MatroskaExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case FLAC:
+ mExtractor = new FLACExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MPEG4:
+ mExtractor = new MPEG4Extractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MPEG2TS:
+ mExtractor = new MPEG2TSExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MPEG2PS:
+ mExtractor = new MPEG2PSExtractor(new DataSourceHelper(mDataSource->wrap()));
+ break;
+ case MIDI:
+ mExtractor = new MidiExtractor(mDataSource->wrap());
+ break;
+ default:
+ return -1;
+ }
+ if (!mExtractor) return -1;
+ return 0;
+}
+
+void getSeekablePoints(vector<int64_t> &seekablePoints, MediaTrackHelper *track) {
+ int32_t status = 0;
+ if (!seekablePoints.empty()) {
+ seekablePoints.clear();
+ }
+ int64_t timeStamp;
+ while (status != AMEDIA_ERROR_END_OF_STREAM) {
+ MediaBufferHelper *buffer = nullptr;
+ status = track->read(&buffer);
+ if (buffer) {
+ AMediaFormat *metaData = buffer->meta_data();
+ int32_t isSync = 0;
+ AMediaFormat_getInt32(metaData, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, &isSync);
+ if (isSync) {
+ AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+ seekablePoints.push_back(timeStamp);
+ }
+ buffer->release();
+ }
+ }
+}
+
+TEST_P(ExtractorUnitTest, CreateExtractorTest) {
+ if (mDisableTest) return;
+
+ ALOGV("Checks if a valid extractor is created for a given input file");
+ string inputFileName = gEnv->getRes() + GetParam().second;
+
+ ASSERT_EQ(setDataSource(inputFileName), 0)
+ << "SetDataSource failed for" << GetParam().first << "extractor";
+
+ ASSERT_EQ(createExtractor(), 0)
+ << "Extractor creation failed for" << GetParam().first << "extractor";
+
+ // A valid extractor instace should return success for following calls
+ ASSERT_GT(mExtractor->countTracks(), 0);
+
+ AMediaFormat *format = AMediaFormat_new();
+ ASSERT_NE(format, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+ ASSERT_EQ(mExtractor->getMetaData(format), AMEDIA_OK);
+ AMediaFormat_delete(format);
+}
+
+TEST_P(ExtractorUnitTest, ExtractorTest) {
+ if (mDisableTest) return;
+
+ ALOGV("Validates %s Extractor for a given input file", GetParam().first.c_str());
+ string inputFileName = gEnv->getRes() + GetParam().second;
+
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ MediaTrackHelper *track = mExtractor->getTrack(idx);
+ ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+ CMediaTrack *cTrack = wrap(track);
+ ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+ MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+ status = cTrack->start(track, bufferGroup->wrap());
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+ FILE *outFp = fopen((OUTPUT_DUMP_FILE + to_string(idx)).c_str(), "wb");
+ if (!outFp) {
+ ALOGW("Unable to open output file for dumping extracted stream");
+ }
+
+ while (status != AMEDIA_ERROR_END_OF_STREAM) {
+ MediaBufferHelper *buffer = nullptr;
+ status = track->read(&buffer);
+ ALOGV("track->read Status = %d buffer %p", status, buffer);
+ if (buffer) {
+ ALOGV("buffer->data %p buffer->size() %zu buffer->range_length() %zu",
+ buffer->data(), buffer->size(), buffer->range_length());
+ if (outFp) fwrite(buffer->data(), 1, buffer->range_length(), outFp);
+ buffer->release();
+ }
+ }
+ if (outFp) fclose(outFp);
+ status = cTrack->stop(track);
+ ASSERT_EQ(OK, status) << "Failed to stop the track";
+ delete bufferGroup;
+ delete track;
+ }
+}
+
+TEST_P(ExtractorUnitTest, MetaDataComparisonTest) {
+ if (mDisableTest) return;
+
+ ALOGV("Validates Extractor's meta data for a given input file");
+ string inputFileName = gEnv->getRes() + GetParam().second;
+
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+ AMediaFormat *extractorFormat = AMediaFormat_new();
+ ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
+ AMediaFormat *trackFormat = AMediaFormat_new();
+ ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ MediaTrackHelper *track = mExtractor->getTrack(idx);
+ ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+ CMediaTrack *cTrack = wrap(track);
+ ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+ MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+ status = cTrack->start(track, bufferGroup->wrap());
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+ status = mExtractor->getTrackMetaData(extractorFormat, idx, 1);
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to get trackMetaData";
+
+ status = track->getFormat(trackFormat);
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+ const char *extractorMime, *trackMime;
+ AMediaFormat_getString(extractorFormat, AMEDIAFORMAT_KEY_MIME, &extractorMime);
+ AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &trackMime);
+ ASSERT_TRUE(!strcmp(extractorMime, trackMime))
+ << "Extractor's format doesn't match track format";
+
+ if (!strncmp(extractorMime, "audio/", 6)) {
+ int32_t exSampleRate, exChannelCount;
+ int32_t trackSampleRate, trackChannelCount;
+ ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+ &exChannelCount));
+ ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE,
+ &exSampleRate));
+ ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+ &trackChannelCount));
+ ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE,
+ &trackSampleRate));
+ ASSERT_EQ(exChannelCount, trackChannelCount) << "ChannelCount not as expected";
+ ASSERT_EQ(exSampleRate, trackSampleRate) << "SampleRate not as expected";
+ } else {
+ int32_t exWidth, exHeight;
+ int32_t trackWidth, trackHeight;
+ ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_WIDTH, &exWidth));
+ ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_HEIGHT, &exHeight));
+ ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_WIDTH, &trackWidth));
+ ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_HEIGHT, &trackHeight));
+ ASSERT_EQ(exWidth, trackWidth) << "Width not as expected";
+ ASSERT_EQ(exHeight, trackHeight) << "Height not as expected";
+ }
+ status = cTrack->stop(track);
+ ASSERT_EQ(OK, status) << "Failed to stop the track";
+ delete bufferGroup;
+ delete track;
+ }
+ AMediaFormat_delete(trackFormat);
+ AMediaFormat_delete(extractorFormat);
+}
+
+TEST_P(ExtractorUnitTest, MultipleStartStopTest) {
+ if (mDisableTest) return;
+
+ ALOGV("Test %s extractor for multiple start and stop calls", GetParam().first.c_str());
+ string inputFileName = gEnv->getRes() + GetParam().second;
+
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+ // start/stop the tracks multiple times
+ for (int32_t count = 0; count < kMaxCount; count++) {
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ MediaTrackHelper *track = mExtractor->getTrack(idx);
+ ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+ CMediaTrack *cTrack = wrap(track);
+ ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+ MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+ status = cTrack->start(track, bufferGroup->wrap());
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+ MediaBufferHelper *buffer = nullptr;
+ status = track->read(&buffer);
+ if (buffer) {
+ ALOGV("buffer->data %p buffer->size() %zu buffer->range_length() %zu",
+ buffer->data(), buffer->size(), buffer->range_length());
+ buffer->release();
+ }
+ status = cTrack->stop(track);
+ ASSERT_EQ(OK, status) << "Failed to stop the track";
+ delete bufferGroup;
+ delete track;
+ }
+ }
+}
+
+TEST_P(ExtractorUnitTest, SeekTest) {
+ // Both Flac and Wav extractor can give samples from any pts and mark the given sample as
+ // sync frame. So, this seek test is not applicable to FLAC and WAV extractors
+ if (mDisableTest || mExtractorName == FLAC || mExtractorName == WAV) return;
+
+ ALOGV("Validates %s Extractor behaviour for different seek modes", GetParam().first.c_str());
+ string inputFileName = gEnv->getRes() + GetParam().second;
+
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+ uint32_t seekFlag = mExtractor->flags();
+ if (!(seekFlag & MediaExtractorPluginHelper::CAN_SEEK)) {
+ cout << "[ WARN ] Test Skipped. " << GetParam().first
+ << " Extractor doesn't support seek\n";
+ return;
+ }
+
+ vector<int64_t> seekablePoints;
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ MediaTrackHelper *track = mExtractor->getTrack(idx);
+ ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+ CMediaTrack *cTrack = wrap(track);
+ ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+ // Get all the seekable points of a given input
+ MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+ status = cTrack->start(track, bufferGroup->wrap());
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+ getSeekablePoints(seekablePoints, track);
+ ASSERT_GT(seekablePoints.size(), 0)
+ << "Failed to get seekable points for " << GetParam().first << " extractor";
+
+ AMediaFormat *trackFormat = AMediaFormat_new();
+ ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null format";
+ status = track->getFormat(trackFormat);
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+ bool isOpus = false;
+ const char *mime;
+ AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+ if (!strcmp(mime, "audio/opus")) isOpus = true;
+ AMediaFormat_delete(trackFormat);
+
+ int32_t seekIdx = 0;
+ size_t seekablePointsSize = seekablePoints.size();
+ for (int32_t mode = CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC;
+ mode <= CMediaTrackReadOptions::SEEK_CLOSEST; mode++) {
+ for (int32_t seekCount = 0; seekCount < kMaxCount; seekCount++) {
+ seekIdx = rand() % seekablePointsSize + 1;
+ if (seekIdx >= seekablePointsSize) seekIdx = seekablePointsSize - 1;
+
+ int64_t seekToTimeStamp = seekablePoints[seekIdx];
+ if (seekablePointsSize > 1) {
+ int64_t prevTimeStamp = seekablePoints[seekIdx - 1];
+ seekToTimeStamp = seekToTimeStamp - ((seekToTimeStamp - prevTimeStamp) >> 3);
+ }
+
+ // Opus has a seekPreRollUs. TimeStamp returned by the
+ // extractor is calculated based on (seekPts - seekPreRollUs).
+ // So we add the preRoll value to the timeStamp we want to seek to.
+ if (isOpus) {
+ seekToTimeStamp += kOpusSeekPreRollUs;
+ }
+
+ MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
+ mode | CMediaTrackReadOptions::SEEK, seekToTimeStamp);
+ ASSERT_NE(options, nullptr) << "Cannot create read option";
+
+ MediaBufferHelper *buffer = nullptr;
+ status = track->read(&buffer, options);
+ if (status == AMEDIA_ERROR_END_OF_STREAM) {
+ delete options;
+ continue;
+ }
+ if (buffer) {
+ AMediaFormat *metaData = buffer->meta_data();
+ int64_t timeStamp;
+ AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+ buffer->release();
+
+ // CMediaTrackReadOptions::SEEK is 8. Using mask 0111b to get true modes
+ switch (mode & 0x7) {
+ case CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC:
+ if (seekablePointsSize == 1) {
+ EXPECT_EQ(timeStamp, seekablePoints[seekIdx]);
+ } else {
+ EXPECT_EQ(timeStamp, seekablePoints[seekIdx - 1]);
+ }
+ break;
+ case CMediaTrackReadOptions::SEEK_NEXT_SYNC:
+ case CMediaTrackReadOptions::SEEK_CLOSEST_SYNC:
+ case CMediaTrackReadOptions::SEEK_CLOSEST:
+ EXPECT_EQ(timeStamp, seekablePoints[seekIdx]);
+ break;
+ default:
+ break;
+ }
+ }
+ delete options;
+ }
+ }
+ status = cTrack->stop(track);
+ ASSERT_EQ(OK, status) << "Failed to stop the track";
+ delete bufferGroup;
+ delete track;
+ }
+ seekablePoints.clear();
+}
+
+// TODO: (b/145332185)
+// Add MIDI inputs
+INSTANTIATE_TEST_SUITE_P(ExtractorUnitTestAll, ExtractorUnitTest,
+ ::testing::Values(make_pair("aac", "loudsoftaac.aac"),
+ make_pair("amr", "testamr.amr"),
+ make_pair("amr", "amrwb.wav"),
+ make_pair("ogg", "john_cage.ogg"),
+ make_pair("wav", "monotestgsm.wav"),
+ make_pair("mpeg2ts", "segment000001.ts"),
+ make_pair("flac", "sinesweepflac.flac"),
+ make_pair("ogg", "testopus.opus"),
+ make_pair("mkv", "sinesweepvorbis.mkv"),
+ make_pair("mpeg4", "sinesweepoggmp4.mp4"),
+ make_pair("mp3", "sinesweepmp3lame.mp3"),
+ make_pair("mkv", "swirl_144x136_vp9.webm"),
+ make_pair("mkv", "swirl_144x136_vp8.webm"),
+ make_pair("mpeg2ps", "swirl_144x136_mpeg2.mpg"),
+ make_pair("mpeg4", "swirl_132x130_mpeg4.mp4")));
+
+int main(int argc, char **argv) {
+ gEnv = new ExtractorUnitTestEnvironment();
+ ::testing::AddGlobalTestEnvironment(gEnv);
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = gEnv->initFromOptions(argc, argv);
+ if (status == 0) {
+ status = RUN_ALL_TESTS();
+ ALOGV("Test result = %d\n", status);
+ }
+ return status;
+}
diff --git a/media/extractors/tests/ExtractorUnitTestEnvironment.h b/media/extractors/tests/ExtractorUnitTestEnvironment.h
new file mode 100644
index 0000000..fce8fc2
--- /dev/null
+++ b/media/extractors/tests/ExtractorUnitTestEnvironment.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __EXTRACTOR_UNIT_TEST_ENVIRONMENT_H__
+#define __EXTRACTOR_UNIT_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class ExtractorUnitTestEnvironment : public ::testing::Environment {
+ public:
+ ExtractorUnitTestEnvironment() : res("/data/local/tmp/") {}
+
+ // Parses the command line arguments
+ int initFromOptions(int argc, char **argv);
+
+ void setRes(const char *_res) { res = _res; }
+
+ const string getRes() const { return res; }
+
+ private:
+ string res;
+};
+
+int ExtractorUnitTestEnvironment::initFromOptions(int argc, char **argv) {
+ static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+ while (true) {
+ int index = 0;
+ int c = getopt_long(argc, argv, "P:", options, &index);
+ if (c == -1) {
+ break;
+ }
+
+ switch (c) {
+ case 'P':
+ setRes(optarg);
+ break;
+ default:
+ break;
+ }
+ }
+
+ if (optind < argc) {
+ fprintf(stderr,
+ "unrecognized option: %s\n\n"
+ "usage: %s <gtest options> <test options>\n\n"
+ "test options are:\n\n"
+ "-P, --path: Resource files directory location\n",
+ argv[optind ?: 1], argv[0]);
+ return 2;
+ }
+ return 0;
+}
+
+#endif // __EXTRACTOR_UNIT_TEST_ENVIRONMENT_H__
diff --git a/media/extractors/tests/README.md b/media/extractors/tests/README.md
new file mode 100644
index 0000000..6e02d3e
--- /dev/null
+++ b/media/extractors/tests/README.md
@@ -0,0 +1,34 @@
+## Media Testing ##
+---
+#### Extractor :
+The Extractor Test Suite validates the extractors available in the device.
+
+Run the following steps to build the test suite:
+```
+m ExtractorUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/ExtractorUnitTest/ExtractorUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/ExtractorUnitTest/ExtractorUnitTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://drive.google.com/drive/folders/1Z9nCIRB6pGLvb5mPkF8BURa5Nc6cY9pY). Push these files into device for testing.
+Download extractor folder and push all the files in this folder to /data/local/tmp/ on the device.
+```
+adb push extractor /data/local/tmp/
+```
+
+usage: ExtractorUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/ExtractorUnitTest -P /data/local/tmp/extractor/
+```
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index 51e3c31..8ce5c3f 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -1,4 +1,7 @@
cc_library {
+ name: "libwavextractor",
+
+ defaults: ["extractor-defaults"],
srcs: ["WAVExtractor.cpp"],
@@ -8,8 +11,6 @@
shared_libs: [
"libbinder_ndk",
- "liblog",
- "libmediandk",
],
static_libs: [
@@ -17,25 +18,4 @@
"libfifo",
"libstagefright_foundation",
],
-
- name: "libwavextractor",
- relative_install_path: "extractors",
-
- compile_multilib: "first",
-
- cflags: [
- "-Werror",
- "-Wall",
- "-fvisibility=hidden",
- ],
- version_script: "exports.lds",
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-
}
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 5bebd61..f814cba 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -227,6 +227,8 @@
};
typedef int32_t aaudio_performance_mode_t;
+#define AAUDIO_SYSTEM_USAGE_OFFSET 1000
+
/**
* The USAGE attribute expresses "why" you are playing a sound, what is this sound used for.
* This information is used by certain platforms or routing policies
@@ -297,7 +299,31 @@
/**
* Use this for audio responses to user queries, audio instructions or help utterances.
*/
- AAUDIO_USAGE_ASSISTANT = 16
+ AAUDIO_USAGE_ASSISTANT = 16,
+
+ /**
+ * Use this in case of playing sounds in an emergency.
+ * Privileged MODIFY_AUDIO_ROUTING permission required.
+ */
+ AAUDIO_SYSTEM_USAGE_EMERGENCY = AAUDIO_SYSTEM_USAGE_OFFSET,
+
+ /**
+ * Use this for safety sounds and alerts, for example backup camera obstacle detection.
+ * Privileged MODIFY_AUDIO_ROUTING permission required.
+ */
+ AAUDIO_SYSTEM_USAGE_SAFETY = AAUDIO_SYSTEM_USAGE_OFFSET + 1,
+
+ /**
+ * Use this for vehicle status alerts and information, for example the check engine light.
+ * Privileged MODIFY_AUDIO_ROUTING permission required.
+ */
+ AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS = AAUDIO_SYSTEM_USAGE_OFFSET + 2,
+
+ /**
+ * Use this for traffic announcements, etc.
+ * Privileged MODIFY_AUDIO_ROUTING permission required.
+ */
+ AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT = AAUDIO_SYSTEM_USAGE_OFFSET + 3,
};
typedef int32_t aaudio_usage_t;
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index baada22..8753aa9 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -44,14 +44,6 @@
sanitize: {
integer_overflow: true,
misc_undefined: ["bounds"],
- diag: {
- integer_overflow: true,
- misc_undefined: ["bounds"],
- no_recover: [
- "bounds",
- "integer",
- ],
- },
},
stubs: {
@@ -137,14 +129,5 @@
sanitize: {
integer_overflow: true,
misc_undefined: ["bounds"],
- diag: {
- integer_overflow: true,
- misc_undefined: ["bounds"],
- no_recover: [
- "bounds",
- "integer",
- ],
- },
},
-
}
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 58058f5..5f45261 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -133,6 +133,10 @@
case AAUDIO_USAGE_ASSISTANCE_SONIFICATION:
case AAUDIO_USAGE_GAME:
case AAUDIO_USAGE_ASSISTANT:
+ case AAUDIO_SYSTEM_USAGE_EMERGENCY:
+ case AAUDIO_SYSTEM_USAGE_SAFETY:
+ case AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS:
+ case AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT:
break; // valid
default:
ALOGD("usage not valid = %d", mUsage);
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index ef89697..9007b10 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -183,6 +183,10 @@
STATIC_ASSERT(AAUDIO_USAGE_ASSISTANCE_SONIFICATION == AUDIO_USAGE_ASSISTANCE_SONIFICATION);
STATIC_ASSERT(AAUDIO_USAGE_GAME == AUDIO_USAGE_GAME);
STATIC_ASSERT(AAUDIO_USAGE_ASSISTANT == AUDIO_USAGE_ASSISTANT);
+ STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_EMERGENCY == AUDIO_USAGE_EMERGENCY);
+ STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_SAFETY == AUDIO_USAGE_SAFETY);
+ STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS == AUDIO_USAGE_VEHICLE_STATUS);
+ STATIC_ASSERT(AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT == AUDIO_USAGE_ANNOUNCEMENT);
if (usage == AAUDIO_UNSPECIFIED) {
usage = AAUDIO_USAGE_MEDIA;
}
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index 3bab25c..d540866 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -131,7 +131,11 @@
AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
AAUDIO_USAGE_GAME,
- AAUDIO_USAGE_ASSISTANT
+ AAUDIO_USAGE_ASSISTANT,
+ AAUDIO_SYSTEM_USAGE_EMERGENCY,
+ AAUDIO_SYSTEM_USAGE_SAFETY,
+ AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS,
+ AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT
};
static const aaudio_content_type_t sContentypes[] = {
diff --git a/media/libaaudio/tests/test_various.cpp b/media/libaaudio/tests/test_various.cpp
index 4b065c9..935d88b 100644
--- a/media/libaaudio/tests/test_various.cpp
+++ b/media/libaaudio/tests/test_various.cpp
@@ -441,7 +441,7 @@
bufferCapacity, bufferCapacity % framesPerBurst);
actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, 0);
- EXPECT_GT(actualSize, 0);
+ EXPECT_GE(actualSize, 0); // 0 is legal in R
EXPECT_LE(actualSize, bufferCapacity);
actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, 2 * framesPerBurst);
@@ -469,7 +469,7 @@
EXPECT_LE(actualSize, bufferCapacity);
actualSize = AAudioStream_setBufferSizeInFrames(aaudioStream, INT32_MIN);
- EXPECT_GT(actualSize, 0);
+ EXPECT_GE(actualSize, 0); // 0 is legal in R
EXPECT_LE(actualSize, bufferCapacity);
AAudioStream_close(aaudioStream);
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index d1812e6..3638d2c 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -90,7 +90,12 @@
],
export_shared_lib_headers: ["libbinder"],
- local_include_dirs: ["include/media", "aidl"],
+ include_dirs: [
+ "frameworks/av/media/libnbaio/include_mono/",
+ ],
+ local_include_dirs: [
+ "include/media", "aidl"
+ ],
header_libs: [
"libaudioclient_headers",
"libbase_headers",
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 7d5b690..a1b141b 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -138,6 +138,7 @@
mIEffectClient = new EffectClient(this);
mClientPid = IPCThreadState::self()->getCallingPid();
+ mClientUid = IPCThreadState::self()->getCallingUid();
iEffect = audioFlinger->createEffect((effect_descriptor_t *)&mDescriptor,
mIEffectClient, priority, io, mSessionId, device, mOpPackageName, mClientPid,
@@ -179,7 +180,7 @@
mStatus, mEnabled, mClientPid);
if (!audio_is_global_session(mSessionId)) {
- AudioSystem::acquireAudioSessionId(mSessionId, mClientPid);
+ AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
}
return mStatus;
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index a438c77..981cfee 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -186,7 +186,7 @@
IPCThreadState::self()->flushCommands();
ALOGV("%s(%d): releasing session id %d",
__func__, mPortId, mSessionId);
- AudioSystem::releaseAudioSessionId(mSessionId, -1 /*pid*/);
+ AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
}
}
@@ -361,7 +361,7 @@
mMarkerReached = false;
mNewPosition = 0;
mUpdatePeriod = 0;
- AudioSystem::acquireAudioSessionId(mSessionId, -1);
+ AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
mSequence = 1;
mObservedSequence = mSequence;
mInOverrun = false;
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 941cf54..6914d5a 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -36,10 +36,11 @@
// client singleton for AudioFlinger binder interface
Mutex AudioSystem::gLock;
+Mutex AudioSystem::gLockErrorCallbacks;
Mutex AudioSystem::gLockAPS;
sp<IAudioFlinger> AudioSystem::gAudioFlinger;
sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
-audio_error_callback AudioSystem::gAudioErrorCallback = NULL;
+std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
dynamic_policy_callback AudioSystem::gDynPolicyCallback = NULL;
record_config_callback AudioSystem::gRecordConfigCallback = NULL;
@@ -63,9 +64,7 @@
if (gAudioFlingerClient == NULL) {
gAudioFlingerClient = new AudioFlingerClient();
} else {
- if (gAudioErrorCallback) {
- gAudioErrorCallback(NO_ERROR);
- }
+ reportError(NO_ERROR);
}
binder->linkToDeath(gAudioFlingerClient);
gAudioFlinger = interface_cast<IAudioFlinger>(binder);
@@ -434,11 +433,11 @@
return af->newAudioUniqueId(use);
}
-void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid)
+void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid)
{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af != 0) {
- af->acquireAudioSessionId(audioSession, pid);
+ af->acquireAudioSessionId(audioSession, pid, uid);
}
}
@@ -500,19 +499,16 @@
void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused)
{
- audio_error_callback cb = NULL;
{
Mutex::Autolock _l(AudioSystem::gLock);
AudioSystem::gAudioFlinger.clear();
- cb = gAudioErrorCallback;
}
// clear output handles and stream to output map caches
clearIoCache();
- if (cb) {
- cb(DEAD_OBJECT);
- }
+ reportError(DEAD_OBJECT);
+
ALOGW("AudioFlinger server died!");
}
@@ -717,10 +713,23 @@
return NO_ERROR;
}
-/* static */ void AudioSystem::setErrorCallback(audio_error_callback cb)
+/* static */ uintptr_t AudioSystem::addErrorCallback(audio_error_callback cb)
{
- Mutex::Autolock _l(gLock);
- gAudioErrorCallback = cb;
+ Mutex::Autolock _l(gLockErrorCallbacks);
+ gAudioErrorCallbacks.insert(cb);
+ return reinterpret_cast<uintptr_t>(cb);
+}
+
+/* static */ void AudioSystem::removeErrorCallback(uintptr_t cb) {
+ Mutex::Autolock _l(gLockErrorCallbacks);
+ gAudioErrorCallbacks.erase(reinterpret_cast<audio_error_callback>(cb));
+}
+
+/* static */ void AudioSystem::reportError(status_t err) {
+ Mutex::Autolock _l(gLockErrorCallbacks);
+ for (auto callback : gAudioErrorCallbacks) {
+ callback(err);
+ }
}
/*static*/ void AudioSystem::setDynPolicyCallback(dynamic_policy_callback cb)
@@ -1017,6 +1026,16 @@
return aps->getDevicesForStream(stream);
}
+status_t AudioSystem::getDevicesForAttributes(const AudioAttributes &aa,
+ AudioDeviceTypeAddrVector *devices) {
+ if (devices == nullptr) {
+ return BAD_VALUE;
+ }
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->getDevicesForAttributes(aa, devices);
+}
+
audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t *desc)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
@@ -1123,6 +1142,12 @@
}
}
+status_t AudioSystem::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == nullptr) return PERMISSION_DENIED;
+ return aps->setSupportedSystemUsages(systemUsages);
+}
+
status_t AudioSystem::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) {
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == nullptr) return PERMISSION_DENIED;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 3151feb..8cfee50 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -599,7 +599,7 @@
mReleased = 0;
mStartNs = 0;
mStartFromZeroUs = 0;
- AudioSystem::acquireAudioSessionId(mSessionId, mClientPid);
+ AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
mSequence = 1;
mObservedSequence = mSequence;
mInUnderrun = false;
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index cbc98bd..513da2b 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -571,12 +571,13 @@
return id;
}
- virtual void acquireAudioSessionId(audio_session_t audioSession, int pid)
+ void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) override
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(audioSession);
- data.writeInt32(pid);
+ data.writeInt32((int32_t)pid);
+ data.writeInt32((int32_t)uid);
remote()->transact(ACQUIRE_AUDIO_SESSION_ID, data, &reply);
}
@@ -1326,8 +1327,9 @@
case ACQUIRE_AUDIO_SESSION_ID: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
audio_session_t audioSession = (audio_session_t) data.readInt32();
- int pid = data.readInt32();
- acquireAudioSessionId(audioSession, pid);
+ const pid_t pid = (pid_t)data.readInt32();
+ const uid_t uid = (uid_t)data.readInt32();
+ acquireAudioSessionId(audioSession, pid, uid);
return NO_ERROR;
} break;
case RELEASE_AUDIO_SESSION_ID: {
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index b9e6e33..ce8b33c 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -102,6 +102,7 @@
GET_STRATEGY_FOR_ATTRIBUTES,
LIST_AUDIO_VOLUME_GROUPS,
GET_VOLUME_GROUP_FOR_ATTRIBUTES,
+ SET_SUPPORTED_SYSTEM_USAGES,
SET_ALLOWED_CAPTURE_POLICY,
MOVE_EFFECTS_TO_IO,
SET_RTT_ENABLED,
@@ -109,6 +110,7 @@
SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
+ GET_DEVICES_FOR_ATTRIBUTES,
};
#define MAX_ITEMS_PER_LIST 1024
@@ -331,6 +333,7 @@
ALOGE("getInputForAttr NULL portId - shouldn't happen");
return BAD_VALUE;
}
+
data.write(attr, sizeof(audio_attributes_t));
data.writeInt32(*input);
data.writeInt32(riid);
@@ -621,6 +624,20 @@
return status;
}
+ status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(systemUsages.size());
+ for (auto systemUsage : systemUsages) {
+ data.writeInt32(systemUsage);
+ }
+ status_t status = remote()->transact(SET_SUPPORTED_SYSTEM_USAGES, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ return static_cast <status_t> (reply.readInt32());
+ }
+
status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) override {
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
@@ -1348,6 +1365,41 @@
}
return static_cast<status_t>(reply.readInt32());
}
+
+ virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
+ AudioDeviceTypeAddrVector *devices) const
+ {
+ if (devices == nullptr) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ status_t status = aa.writeToParcel(&data);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = remote()->transact(GET_DEVICES_FOR_ATTRIBUTES, data, &reply);
+ if (status != NO_ERROR) {
+ // transaction failed, return error
+ return status;
+ }
+ status = static_cast<status_t>(reply.readInt32());
+ if (status != NO_ERROR) {
+ // APM method call failed, return error
+ return status;
+ }
+
+ const size_t numberOfDevices = (size_t)reply.readInt32();
+ for (size_t i = 0; i < numberOfDevices; i++) {
+ AudioDeviceTypeAddr device;
+ if (device.readFromParcel((Parcel*)&reply) == NO_ERROR) {
+ devices->push_back(device);
+ } else {
+ return FAILED_TRANSACTION;
+ }
+ }
+ return NO_ERROR;
+ }
};
IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -1413,8 +1465,10 @@
case SET_RTT_ENABLED:
case IS_CALL_SCREEN_MODE_SUPPORTED:
case SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
+ case SET_SUPPORTED_SYSTEM_USAGES:
case REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
- case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
+ case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
+ case GET_DEVICES_FOR_ATTRIBUTES: {
if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
__func__, code, IPCThreadState::self()->getCallingPid(),
@@ -2405,16 +2459,46 @@
if (status != NO_ERROR) {
return status;
}
+
volume_group_t group;
status = getVolumeGroupFromAudioAttributes(attributes, group);
- reply->writeInt32(status);
if (status != NO_ERROR) {
return NO_ERROR;
}
+
+ reply->writeInt32(status);
reply->writeUint32(static_cast<int>(group));
return NO_ERROR;
}
+ case SET_SUPPORTED_SYSTEM_USAGES: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ std::vector<audio_usage_t> systemUsages;
+
+ int32_t size;
+ status_t status = data.readInt32(&size);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ if (size > MAX_ITEMS_PER_LIST) {
+ size = MAX_ITEMS_PER_LIST;
+ }
+
+ for (int32_t i = 0; i < size; i++) {
+ int32_t systemUsageInt;
+ status = data.readInt32(&systemUsageInt);
+ if (status != NO_ERROR) {
+ return status;
+ }
+
+ audio_usage_t systemUsage = static_cast<audio_usage_t>(systemUsageInt);
+ systemUsages.push_back(systemUsage);
+ }
+ status = setSupportedSystemUsages(systemUsages);
+ reply->writeInt32(static_cast <int32_t>(status));
+ return NO_ERROR;
+ }
+
case SET_ALLOWED_CAPTURE_POLICY: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
uid_t uid = data.readInt32();
@@ -2473,6 +2557,37 @@
return NO_ERROR;
}
+ case GET_DEVICES_FOR_ATTRIBUTES: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ AudioAttributes attributes;
+ status_t status = attributes.readFromParcel(&data);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ AudioDeviceTypeAddrVector devices;
+ status = getDevicesForAttributes(attributes.getAttributes(), &devices);
+ // reply data formatted as:
+ // - (int32) method call result from APM
+ // - (int32) number of devices (n) if method call returned NO_ERROR
+ // - n AudioDeviceTypeAddr if method call returned NO_ERROR
+ reply->writeInt32(status);
+ if (status != NO_ERROR) {
+ return NO_ERROR;
+ }
+ status = reply->writeInt32(devices.size());
+ if (status != NO_ERROR) {
+ return status;
+ }
+ for (const auto& device : devices) {
+ status = device.writeToParcel(reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ }
+
+ return NO_ERROR;
+ }
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index f17d737..eec9dfc 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -639,7 +639,8 @@
sp<EffectClient> mIEffectClient; // IEffectClient implementation
sp<IMemory> mCblkMemory; // shared memory for deferred parameter setting
effect_param_cblk_t* mCblk; // control block for deferred parameter setting
- pid_t mClientPid;
+ pid_t mClientPid = (pid_t)-1;
+ uid_t mClientUid = (uid_t)-1;
};
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index c4b528e..691755f 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -27,6 +27,7 @@
#include <media/IAudioFlingerClient.h>
#include <media/IAudioPolicyServiceClient.h>
#include <media/MicrophoneInfo.h>
+#include <set>
#include <system/audio.h>
#include <system/audio_effect.h>
#include <system/audio_policy.h>
@@ -107,7 +108,16 @@
static status_t setParameters(const String8& keyValuePairs);
static String8 getParameters(const String8& keys);
- static void setErrorCallback(audio_error_callback cb);
+ // Registers an error callback. When this callback is invoked, it means all
+ // state implied by this interface has been reset.
+ // Returns a token that can be used for un-registering.
+ // Might block while callbacks are being invoked.
+ static uintptr_t addErrorCallback(audio_error_callback cb);
+
+ // Un-registers a callback previously added with addErrorCallback.
+ // Might block while callbacks are being invoked.
+ static void removeErrorCallback(uintptr_t cb);
+
static void setDynPolicyCallback(dynamic_policy_callback cb);
static void setRecordConfigCallback(record_config_callback);
@@ -172,7 +182,7 @@
// or an unspecified existing unique ID.
static audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use);
- static void acquireAudioSessionId(audio_session_t audioSession, pid_t pid);
+ static void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid);
static void releaseAudioSessionId(audio_session_t audioSession, pid_t pid);
// Get the HW synchronization source used for an audio session.
@@ -279,6 +289,8 @@
static uint32_t getStrategyForStream(audio_stream_type_t stream);
static audio_devices_t getDevicesForStream(audio_stream_type_t stream);
+ static status_t getDevicesForAttributes(const AudioAttributes &aa,
+ AudioDeviceTypeAddrVector *devices);
static audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc);
static status_t registerEffect(const effect_descriptor_t *desc,
@@ -302,6 +314,8 @@
static status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory);
+ static status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages);
+
static status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags);
// Check if hw offload is possible for given format, stream type, sample rate,
@@ -560,15 +574,19 @@
static const sp<AudioFlingerClient> getAudioFlingerClient();
static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+ // Invokes all registered error callbacks with the given error code.
+ static void reportError(status_t err);
+
static sp<AudioFlingerClient> gAudioFlingerClient;
static sp<AudioPolicyServiceClient> gAudioPolicyServiceClient;
friend class AudioFlingerClient;
friend class AudioPolicyServiceClient;
- static Mutex gLock; // protects gAudioFlinger and gAudioErrorCallback,
+ static Mutex gLock; // protects gAudioFlinger
+ static Mutex gLockErrorCallbacks; // protects gAudioErrorCallbacks
static Mutex gLockAPS; // protects gAudioPolicyService and gAudioPolicyServiceClient
static sp<IAudioFlinger> gAudioFlinger;
- static audio_error_callback gAudioErrorCallback;
+ static std::set<audio_error_callback> gAudioErrorCallbacks;
static dynamic_policy_callback gDynPolicyCallback;
static record_config_callback gRecordConfigCallback;
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 8703f55..8bc1d83 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -446,7 +446,7 @@
virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use) = 0;
- virtual void acquireAudioSessionId(audio_session_t audioSession, pid_t pid) = 0;
+ virtual void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) = 0;
virtual void releaseAudioSessionId(audio_session_t audioSession, pid_t pid) = 0;
virtual status_t queryNumberEffects(uint32_t *numEffects) const = 0;
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index 6623061..402fdcf 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -108,6 +108,8 @@
virtual uint32_t getStrategyForStream(audio_stream_type_t stream) = 0;
virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream) = 0;
+ virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
+ AudioDeviceTypeAddrVector *devices) const = 0;
virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc) = 0;
virtual status_t registerEffect(const effect_descriptor_t *desc,
audio_io_handle_t io,
@@ -138,6 +140,7 @@
audio_unique_id_t* id) = 0;
virtual status_t removeSourceDefaultEffect(audio_unique_id_t id) = 0;
virtual status_t removeStreamDefaultEffect(audio_unique_id_t id) = 0;
+ virtual status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) = 0;
virtual status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) = 0;
// Check if offload is possible for given format, stream type, sample rate,
// bit rate, duration, video and streaming or offload property is enabled
diff --git a/media/libaudioprocessing/Android.bp b/media/libaudioprocessing/Android.bp
index 9b5d58c..1a6a5a2 100644
--- a/media/libaudioprocessing/Android.bp
+++ b/media/libaudioprocessing/Android.bp
@@ -32,6 +32,7 @@
],
header_libs: [
+ "libaudiohal_headers",
"libbase_headers",
"libmedia_headers"
],
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 60d2f85..1fadc94 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -60,7 +60,7 @@
mVideoWidth = mVideoHeight = 0;
mLockThreadId = 0;
mAudioSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
- AudioSystem::acquireAudioSessionId(mAudioSessionId, -1);
+ AudioSystem::acquireAudioSessionId(mAudioSessionId, (pid_t)-1, (uid_t)-1); // always in client.
mSendLevel = 0;
mRetransmitEndpointValid = false;
}
@@ -72,7 +72,7 @@
delete mAudioAttributesParcel;
mAudioAttributesParcel = NULL;
}
- AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
+ AudioSystem::releaseAudioSessionId(mAudioSessionId, (pid_t)-1);
disconnect();
IPCThreadState::self()->flushCommands();
}
@@ -709,8 +709,8 @@
return BAD_VALUE;
}
if (sessionId != mAudioSessionId) {
- AudioSystem::acquireAudioSessionId(sessionId, -1);
- AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
+ AudioSystem::acquireAudioSessionId(sessionId, (pid_t)-1, (uid_t)-1);
+ AudioSystem::releaseAudioSessionId(mAudioSessionId, (pid_t)-1);
mAudioSessionId = sessionId;
}
return NO_ERROR;
diff --git a/media/libmediahelper/TypeConverter.cpp b/media/libmediahelper/TypeConverter.cpp
index fc037a6..6f14081 100644
--- a/media/libmediahelper/TypeConverter.cpp
+++ b/media/libmediahelper/TypeConverter.cpp
@@ -360,6 +360,10 @@
MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VIRTUAL_SOURCE),
MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANT),
MAKE_STRING_FROM_ENUM(AUDIO_USAGE_CALL_ASSISTANT),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_EMERGENCY),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_SAFETY),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VEHICLE_STATUS),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ANNOUNCEMENT),
TERMINATOR
};
diff --git a/media/libmediametrics/include/MediaMetricsItem.h b/media/libmediametrics/include/MediaMetricsItem.h
index 536c0e1..6141b36 100644
--- a/media/libmediametrics/include/MediaMetricsItem.h
+++ b/media/libmediametrics/include/MediaMetricsItem.h
@@ -35,6 +35,49 @@
namespace android {
+/*
+ * MediaMetrics Keys and Properties for Audio.
+ *
+ * C/C++ friendly constants that ensure
+ * 1) Compilation error on misspelling
+ * 2) Consistent behavior and documentation.
+ *
+ * TODO: Move to separate header file.
+ */
+
+// Taxonomy of audio keys
+
+// Key Prefixes are used for MediaMetrics Item Keys and ends with a ".".
+// They must be appended with another value to make a key.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO "audio."
+
+// The AudioRecord key appends the "trackId" to the prefix.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD AMEDIAMETRICS_KEY_PREFIX_AUDIO "record."
+
+// The AudioThread key appends the "threadId" to the prefix.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD AMEDIAMETRICS_KEY_PREFIX_AUDIO "thread."
+
+// The AudioTrack key appends the "trackId" to the prefix.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK AMEDIAMETRICS_KEY_PREFIX_AUDIO "track."
+
+// Keys are strings used for MediaMetrics Item Keys
+#define AMEDIAMETRICS_KEY_AUDIO_FLINGER AMEDIAMETRICS_KEY_PREFIX_AUDIO "flinger"
+#define AMEDIAMETRICS_KEY_AUDIO_POLICY AMEDIAMETRICS_KEY_PREFIX_AUDIO "policy"
+
+// Props are properties allowed for Mediametrics Items.
+#define AMEDIAMETRICS_PROP_EVENT "event" // string value (often func name)
+#define AMEDIAMETRICS_PROP_LATENCYMS "latencyMs" // double value
+#define AMEDIAMETRICS_PROP_OUTPUTDEVICES "outputDevices" // string value
+#define AMEDIAMETRICS_PROP_STARTUPMS "startupMs" // double value
+#define AMEDIAMETRICS_PROP_THREADID "threadId" // int32 value io handle
+
+// Values are strings accepted for a given property.
+
+// An event is a general description, which often is a function name.
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR "ctor"
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_DTOR "dtor"
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_UNDERRUN "underrun" // from Thread
+
class IMediaMetricsService;
class Parcel;
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index 8f0e278..9d0c534 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -6,10 +6,11 @@
"aidl/android/media/IMediaTranscodingService.aidl",
"aidl/android/media/ITranscodingServiceClient.aidl",
"aidl/android/media/TranscodingErrorCode.aidl",
+ "aidl/android/media/TranscodingJobPriority.aidl",
"aidl/android/media/TranscodingType.aidl",
"aidl/android/media/TranscodingVideoCodecType.aidl",
- "aidl/android/media/TranscodingJob.aidl",
- "aidl/android/media/TranscodingRequest.aidl",
- "aidl/android/media/TranscodingResult.aidl",
+ "aidl/android/media/TranscodingJobParcel.aidl",
+ "aidl/android/media/TranscodingRequestParcel.aidl",
+ "aidl/android/media/TranscodingResultParcel.aidl",
],
}
diff --git a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
index cae8ff0..798300a 100644
--- a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
+++ b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
@@ -16,8 +16,8 @@
package android.media;
-import android.media.TranscodingJob;
-import android.media.TranscodingRequest;
+import android.media.TranscodingJobParcel;
+import android.media.TranscodingRequestParcel;
import android.media.ITranscodingServiceClient;
/**
@@ -27,6 +27,25 @@
*/
interface IMediaTranscodingService {
/**
+ * All MediaTranscoding service and device Binder calls may return a
+ * ServiceSpecificException with the following error codes
+ */
+ const int ERROR_PERMISSION_DENIED = 1;
+ const int ERROR_ALREADY_EXISTS = 2;
+ const int ERROR_ILLEGAL_ARGUMENT = 3;
+ const int ERROR_DISCONNECTED = 4;
+ const int ERROR_TIMED_OUT = 5;
+ const int ERROR_DISABLED = 6;
+ const int ERROR_INVALID_OPERATION = 7;
+
+ /**
+ * Default UID/PID values for non-privileged callers of
+ * registerClient().
+ */
+ const int USE_CALLING_UID = -1;
+ const int USE_CALLING_PID = -1;
+
+ /**
* Register the client with the MediaTranscodingService.
*
* Client must call this function to register itself with the service in order to perform
@@ -34,10 +53,16 @@
* Client should save this Id and use it for all the transaction with the service.
*
* @param client interface for the MediaTranscodingService to call the client.
+ * @param opPackageName op package name of the client.
+ * @param clientUid user id of the client.
+ * @param clientPid process id of the client.
* @return a unique positive Id assigned to the client by the service, -1 means failed to
* register.
*/
- int registerClient(in ITranscodingServiceClient client);
+ int registerClient(in ITranscodingServiceClient client,
+ in String opPackageName,
+ in int clientUid,
+ in int clientPid);
/**
* Unregister the client with the MediaTranscodingService.
@@ -58,8 +83,8 @@
* @return a unique positive jobId generated by the MediaTranscodingService, -1 means failure.
*/
int submitRequest(in int clientId,
- in TranscodingRequest request,
- out TranscodingJob job);
+ in TranscodingRequestParcel request,
+ out TranscodingJobParcel job);
/**
* Cancels a transcoding job.
@@ -77,5 +102,5 @@
* @param job(output variable) the TranscodingJob associated with the jobId.
* @return true if succeeds, false otherwise.
*/
- boolean getJobWithId(in int jobId, out TranscodingJob job);
+ boolean getJobWithId(in int jobId, out TranscodingJobParcel job);
}
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
index 29b6f35..e23c833 100644
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
@@ -17,8 +17,8 @@
package android.media;
import android.media.TranscodingErrorCode;
-import android.media.TranscodingJob;
-import android.media.TranscodingResult;
+import android.media.TranscodingJobParcel;
+import android.media.TranscodingResultParcel;
/**
* ITranscodingServiceClient interface for the MediaTranscodingervice to communicate with the
@@ -39,7 +39,7 @@
* @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
* @param result contains the transcoded file stats and other transcoding metrics if requested.
*/
- oneway void onTranscodingFinished(in int jobId, in TranscodingResult result);
+ oneway void onTranscodingFinished(in int jobId, in TranscodingResultParcel result);
/**
* Called when the transcoding associated with the jobId failed.
@@ -60,7 +60,9 @@
* @param oldAwaitNumber previous number of jobs ahead of current job.
* @param newAwaitNumber updated number of jobs ahead of current job.
*/
- oneway void onAwaitNumberOfJobsChanged(in int jobId, in int oldAwaitNumber, in int newAwaitNumber);
+ oneway void onAwaitNumberOfJobsChanged(in int jobId,
+ in int oldAwaitNumber,
+ in int newAwaitNumber);
/**
* Called when there is an update on the progress of the TranscodingJob.
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJob.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
similarity index 91%
rename from media/libmediatranscoding/aidl/android/media/TranscodingJob.aidl
rename to media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
index 42ad8ad..d912c38 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingJob.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
@@ -16,7 +16,7 @@
package android.media;
-import android.media.TranscodingRequest;
+import android.media.TranscodingRequestParcel;
/**
* TranscodingJob is generated by the MediaTranscodingService upon receiving a TranscodingRequest.
@@ -26,7 +26,7 @@
* {@hide}
*/
//TODO(hkuang): Implement the parcelable.
-parcelable TranscodingJob {
+parcelable TranscodingJobParcel {
/**
* A unique positive Id generated by the MediaTranscodingService.
*/
@@ -35,7 +35,7 @@
/**
* The request associated with the TranscodingJob.
*/
- TranscodingRequest request;
+ TranscodingRequestParcel request;
/**
* Current number of jobs ahead of this job. The service schedules the job based on the priority
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
new file mode 100644
index 0000000..1a5d81a
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Priority of a transcoding job.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum TranscodingJobPriority {
+ // TODO(hkuang): define what each priority level actually mean.
+ kUnspecified = 0,
+ kLow = 1,
+ /**
+ * 2 ~ 20 is reserved for future use.
+ */
+ kNormal = 21,
+ /**
+ * 22 ~ 30 is reserved for future use.
+ */
+ kHigh = 31,
+}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingRequest.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
similarity index 91%
rename from media/libmediatranscoding/aidl/android/media/TranscodingRequest.aidl
rename to media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
index 6b51ee3..7b7986d 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingRequest.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
@@ -16,6 +16,7 @@
package android.media;
+import android.media.TranscodingJobPriority;
import android.media.TranscodingType;
/**
@@ -24,7 +25,7 @@
* {@hide}
*/
//TODO(hkuang): Implement the parcelable.
-parcelable TranscodingRequest {
+parcelable TranscodingRequestParcel {
/**
* Name of file to be transcoded.
*/
@@ -48,8 +49,7 @@
/**
* Priority of this transcoding. Service will schedule the transcoding based on the priority.
*/
- // TODO(hkuang): Define the priority level.
- int priority;
+ TranscodingJobPriority priority;
/**
* Whether to receive update on progress and change of awaitNumJobs.
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingResult.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
similarity index 93%
rename from media/libmediatranscoding/aidl/android/media/TranscodingResult.aidl
rename to media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
index a41e025..65c49e7 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingResult.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
@@ -16,15 +16,13 @@
package android.media;
-import android.media.TranscodingJob;
-
/**
* Result of the transcoding.
*
* {@hide}
*/
//TODO(hkuang): Implement the parcelable.
-parcelable TranscodingResult {
+parcelable TranscodingResultParcel {
/**
* The jobId associated with the TranscodingResult.
*/
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index ef9d253..960120f 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1310,7 +1310,7 @@
OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
status_t err = configureOutputBuffersFromNativeWindow(
&bufferCount, &bufferSize, &minUndequeuedBuffers,
- false /* preregister */);
+ mFlags & kFlagPreregisterMetadataBuffers /* preregister */);
if (err != OK)
return err;
mNumUndequeuedBuffers = minUndequeuedBuffers;
@@ -1896,6 +1896,19 @@
setPortMode(kPortIndexInput, IOMX::kPortModePresetByteBuffer);
err = OK; // ignore error for now
}
+
+ OMX_INDEXTYPE index;
+ if (mOMXNode->getExtensionIndex(
+ "OMX.google.android.index.preregisterMetadataBuffers", &index) == OK) {
+ OMX_CONFIG_BOOLEANTYPE param;
+ InitOMXParams(¶m);
+ param.bEnabled = OMX_FALSE;
+ if (mOMXNode->getParameter(index, ¶m, sizeof(param)) == OK) {
+ if (param.bEnabled == OMX_TRUE) {
+ mFlags |= kFlagPreregisterMetadataBuffers;
+ }
+ }
+ }
}
if (haveNativeWindow) {
sp<ANativeWindow> nativeWindow =
diff --git a/media/libstagefright/colorconversion/Android.bp b/media/libstagefright/colorconversion/Android.bp
index ba57497..6b08b08 100644
--- a/media/libstagefright/colorconversion/Android.bp
+++ b/media/libstagefright/colorconversion/Android.bp
@@ -15,6 +15,11 @@
"libnativewindow",
],
+ header_libs: [
+ "libstagefright_headers",
+ "libstagefright_foundation_headers",
+ ],
+
static_libs: ["libyuv_static"],
cflags: ["-Werror"],
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 7ebe71f..d65a663 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libstagefright_flacdec",
vendor_available: true,
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 5485f6d..7398690 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -2,6 +2,7 @@
name: "libstagefright_foundation_headers",
export_include_dirs: ["include"],
vendor_available: true,
+ host_supported: true,
}
cc_defaults {
diff --git a/media/libstagefright/include/media/stagefright/AACWriter.h b/media/libstagefright/include/media/stagefright/AACWriter.h
index 7c63ddd..2671138 100644
--- a/media/libstagefright/include/media/stagefright/AACWriter.h
+++ b/media/libstagefright/include/media/stagefright/AACWriter.h
@@ -17,7 +17,7 @@
#ifndef AAC_WRITER_H_
#define AAC_WRITER_H_
-#include "foundation/ABase.h"
+#include "media/stagefright/foundation/ABase.h"
#include <media/stagefright/MediaWriter.h>
#include <utils/threads.h>
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 7754de4..d56ec4f 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -158,6 +158,7 @@
kFlagIsSecure = 1,
kFlagPushBlankBuffersToNativeWindowOnShutdown = 2,
kFlagIsGrallocUsageProtected = 4,
+ kFlagPreregisterMetadataBuffers = 8,
};
enum {
diff --git a/media/libstagefright/include/media/stagefright/MediaErrors.h b/media/libstagefright/include/media/stagefright/MediaErrors.h
index 6f48c5d..5418f10 100644
--- a/media/libstagefright/include/media/stagefright/MediaErrors.h
+++ b/media/libstagefright/include/media/stagefright/MediaErrors.h
@@ -105,7 +105,8 @@
ERROR_CAS_CARD_MUTE = CAS_ERROR_BASE - 15,
ERROR_CAS_CARD_INVALID = CAS_ERROR_BASE - 16,
ERROR_CAS_BLACKOUT = CAS_ERROR_BASE - 17,
- ERROR_CAS_LAST_USED_ERRORCODE = CAS_ERROR_BASE - 17,
+ ERROR_CAS_REBOOTING = CAS_ERROR_BASE - 18,
+ ERROR_CAS_LAST_USED_ERRORCODE = CAS_ERROR_BASE - 18,
ERROR_CAS_VENDOR_MAX = CAS_ERROR_BASE - 500,
ERROR_CAS_VENDOR_MIN = CAS_ERROR_BASE - 999,
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxer.h b/media/libstagefright/include/media/stagefright/MediaMuxer.h
index 98df9b5..7c75f74 100644
--- a/media/libstagefright/include/media/stagefright/MediaMuxer.h
+++ b/media/libstagefright/include/media/stagefright/MediaMuxer.h
@@ -22,7 +22,7 @@
#include <utils/Vector.h>
#include <utils/threads.h>
-#include "foundation/ABase.h"
+#include "media/stagefright/foundation/ABase.h"
namespace android {
diff --git a/media/libstagefright/include/media/stagefright/foundation b/media/libstagefright/include/media/stagefright/foundation
deleted file mode 120000
index b9fd3b3..0000000
--- a/media/libstagefright/include/media/stagefright/foundation
+++ /dev/null
@@ -1 +0,0 @@
-../../../foundation/include/media/stagefright/foundation/
\ No newline at end of file
diff --git a/media/libstagefright/mpeg2ts/Android.bp b/media/libstagefright/mpeg2ts/Android.bp
index a507b91..42afea3 100644
--- a/media/libstagefright/mpeg2ts/Android.bp
+++ b/media/libstagefright/mpeg2ts/Android.bp
@@ -29,7 +29,6 @@
shared_libs: [
"libcrypto",
- "libmedia",
"libhidlmemory",
"android.hardware.cas.native@1.0",
"android.hidl.memory@1.0",
@@ -37,9 +36,13 @@
],
header_libs: [
+ "libmedia_headers",
+ "libaudioclient_headers",
"media_ndk_headers",
],
+ export_include_dirs: ["."],
+
whole_static_libs: [
"libstagefright_metadatautils",
],
diff --git a/media/libstagefright/tests/writer/README.md b/media/libstagefright/tests/writer/README.md
index 52db6f0..ae07917 100644
--- a/media/libstagefright/tests/writer/README.md
+++ b/media/libstagefright/tests/writer/README.md
@@ -19,12 +19,13 @@
adb push ${OUT}/data/nativetest/writerTest/writerTest /data/local/tmp/
-The resource file for the tests is taken from Codec2 VTS resource folder. Push these files into device for testing.
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/writerTestRes.zip).
+Download and extract the folder. Push all the files in this folder to /data/local/tmp/ on the device.
```
-adb push $ANDROID_BUILD_TOP/frameworks/av/media/codec2/hidl/1.0/vts/functional/res /sdcard/
+adb push writerTestRes /data/local/tmp/
```
usage: writerTest -P \<path_to_res_folder\>
```
-adb shell /data/local/tmp/writerTest -P /sdcard/res/
+adb shell /data/local/tmp/writerTest -P /data/local/tmp/
```
diff --git a/media/libstagefright/tests/writer/WriterTest.cpp b/media/libstagefright/tests/writer/WriterTest.cpp
index d68438c..409f141 100644
--- a/media/libstagefright/tests/writer/WriterTest.cpp
+++ b/media/libstagefright/tests/writer/WriterTest.cpp
@@ -29,9 +29,9 @@
#include <media/stagefright/AACWriter.h>
#include <media/stagefright/AMRWriter.h>
-#include <media/stagefright/OggWriter.h>
-#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MPEG2TSWriter.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OggWriter.h>
#include <webm/WebmWriter.h>
#include "WriterTestEnvironment.h"
@@ -89,19 +89,36 @@
class WriterTest : public ::testing::TestWithParam<pair<string, int32_t>> {
public:
+ WriterTest() : mWriter(nullptr), mFileMeta(nullptr), mCurrentTrack(nullptr) {}
+
+ ~WriterTest() {
+ if (mWriter) {
+ mWriter.clear();
+ mWriter = nullptr;
+ }
+ if (mFileMeta) {
+ mFileMeta.clear();
+ mFileMeta = nullptr;
+ }
+ if (mCurrentTrack) {
+ mCurrentTrack.clear();
+ mCurrentTrack = nullptr;
+ }
+ }
+
virtual void SetUp() override {
mNumCsds = 0;
mInputFrameId = 0;
mWriterName = unknown_comp;
mDisableTest = false;
- std::map<std::string, standardWriters> mapWriter = {
+ static const std::map<std::string, standardWriters> mapWriter = {
{"ogg", OGG}, {"aac", AAC}, {"aac_adts", AAC_ADTS}, {"webm", WEBM},
{"mpeg4", MPEG4}, {"amrnb", AMR_NB}, {"amrwb", AMR_WB}, {"mpeg2Ts", MPEG2TS}};
// Find the component type
string writerFormat = GetParam().first;
if (mapWriter.find(writerFormat) != mapWriter.end()) {
- mWriterName = mapWriter[writerFormat];
+ mWriterName = mapWriter.at(writerFormat);
}
if (mWriterName == standardWriters::unknown_comp) {
cout << "[ WARN ] Test Skipped. No specific writer mentioned\n";
@@ -110,10 +127,8 @@
}
virtual void TearDown() override {
- mWriter.clear();
- mFileMeta.clear();
mBufferInfo.clear();
- if (mInputStream) mInputStream.close();
+ if (mInputStream.is_open()) mInputStream.close();
}
void getInputBufferInfo(string inputFileName, string inputInfo);
@@ -149,7 +164,7 @@
void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo) {
std::ifstream eleInfo;
eleInfo.open(inputInfo.c_str());
- CHECK_EQ(eleInfo.is_open(), true);
+ ASSERT_EQ(eleInfo.is_open(), true);
int32_t bytesCount = 0;
uint32_t flags = 0;
int64_t timestamp = 0;
@@ -162,7 +177,7 @@
}
eleInfo.close();
mInputStream.open(inputFileName.c_str(), std::ifstream::binary);
- CHECK_EQ(mInputStream.is_open(), true);
+ ASSERT_EQ(mInputStream.is_open(), true);
}
int32_t WriterTest::createWriter(int32_t fd) {
@@ -258,14 +273,11 @@
string outputFile = OUTPUT_FILE_NAME;
int32_t fd =
open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (fd < 0) return;
+ ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
// Creating writer within a test scope. Destructor should be called when the test ends
- int32_t status = createWriter(fd);
- if (status) {
- cout << "Failed to create writer for output format:" << GetParam().first << "\n";
- ASSERT_TRUE(false);
- }
+ ASSERT_EQ((status_t)OK, createWriter(fd))
+ << "Failed to create writer for output format:" << GetParam().first;
}
TEST_P(WriterTest, WriterTest) {
@@ -276,38 +288,32 @@
string outputFile = OUTPUT_FILE_NAME;
int32_t fd =
open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (fd < 0) return;
+ ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
int32_t status = createWriter(fd);
- if (status) {
- cout << "Failed to create writer for output format:" << writerFormat << "\n";
- ASSERT_TRUE(false);
- }
+ ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
+
string inputFile = gEnv->getRes();
string inputInfo = gEnv->getRes();
configFormat param;
bool isAudio;
int32_t inputFileIdx = GetParam().second;
getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
- if (!inputFile.compare(gEnv->getRes())) {
- ALOGV("No input file specified");
- return;
- }
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
getInputBufferInfo(inputFile, inputInfo);
status = addWriterSource(isAudio, param);
- if (status) {
- cout << "Failed to add source for " << writerFormat << "Writer \n";
- ASSERT_TRUE(false);
- }
- CHECK_EQ((status_t)OK, mWriter->start(mFileMeta.get()));
+ ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_EQ((status_t)OK, status);
status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
mBufferInfo.size());
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
mCurrentTrack->stop();
- if (status) {
- cout << writerFormat << " writer failed \n";
- mWriter->stop();
- ASSERT_TRUE(false);
- }
- CHECK_EQ((status_t)OK, mWriter->stop());
+
+ status = mWriter->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
close(fd);
}
@@ -319,59 +325,125 @@
string outputFile = OUTPUT_FILE_NAME;
int32_t fd =
open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (fd < 0) return;
+ ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
int32_t status = createWriter(fd);
- if (status) {
- cout << "Failed to create writer for output format:" << writerFormat << "\n";
- ASSERT_TRUE(false);
- }
+ ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
+
string inputFile = gEnv->getRes();
string inputInfo = gEnv->getRes();
configFormat param;
bool isAudio;
int32_t inputFileIdx = GetParam().second;
getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
- if (!inputFile.compare(gEnv->getRes())) {
- ALOGV("No input file specified");
- return;
- }
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
getInputBufferInfo(inputFile, inputInfo);
status = addWriterSource(isAudio, param);
- if (status) {
- cout << "Failed to add source for " << writerFormat << "Writer \n";
- ASSERT_TRUE(false);
- }
- CHECK_EQ((status_t)OK, mWriter->start(mFileMeta.get()));
+ ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_EQ((status_t)OK, status);
status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
mBufferInfo.size() / 4);
- if (status) {
- cout << writerFormat << " writer failed \n";
- mCurrentTrack->stop();
- mWriter->stop();
- ASSERT_TRUE(false);
- }
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
bool isPaused = false;
if ((mWriterName != standardWriters::MPEG2TS) && (mWriterName != standardWriters::MPEG4)) {
- CHECK_EQ((status_t)OK, mWriter->pause());
+ status = mWriter->pause();
+ ASSERT_EQ((status_t)OK, status);
isPaused = true;
}
// In the pause state, writers shouldn't write anything. Testing the writers for the same
int32_t numFramesPaused = mBufferInfo.size() / 4;
- status |= sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+ status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
mInputFrameId, numFramesPaused, isPaused);
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
if (isPaused) {
- CHECK_EQ((status_t)OK, mWriter->start(mFileMeta.get()));
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_EQ((status_t)OK, status);
}
- status |= sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+ status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
mInputFrameId, mBufferInfo.size());
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
mCurrentTrack->stop();
- if (status) {
- cout << writerFormat << " writer failed \n";
- mWriter->stop();
- ASSERT_TRUE(false);
+
+ status = mWriter->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+ close(fd);
+}
+
+TEST_P(WriterTest, MultiStartStopPauseTest) {
+ // TODO: (b/144821804)
+ // Enable the test for MPE2TS writer
+ if (mDisableTest || mWriterName == standardWriters::MPEG2TS) return;
+ ALOGV("Test writers for multiple start, stop and pause calls");
+
+ string outputFile = OUTPUT_FILE_NAME;
+ int32_t fd =
+ open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+ ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+ string writerFormat = GetParam().first;
+ int32_t status = createWriter(fd);
+ ASSERT_EQ(status, (status_t)OK) << "Failed to create writer for output format:" << writerFormat;
+
+ string inputFile = gEnv->getRes();
+ string inputInfo = gEnv->getRes();
+ configFormat param;
+ bool isAudio;
+ int32_t inputFileIdx = GetParam().second;
+ getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+ getInputBufferInfo(inputFile, inputInfo);
+ status = addWriterSource(isAudio, param);
+ ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+ // first start should succeed.
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_EQ((status_t)OK, status) << "Could not start the writer";
+
+ // Multiple start() may/may not succeed.
+ // Writers are expected to not crash on multiple start() calls.
+ for (int32_t count = 0; count < kMaxCount; count++) {
+ mWriter->start(mFileMeta.get());
}
- CHECK_EQ((status_t)OK, mWriter->stop());
+
+ status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
+ mBufferInfo.size() / 4);
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+ for (int32_t count = 0; count < kMaxCount; count++) {
+ mWriter->pause();
+ mWriter->start(mFileMeta.get());
+ }
+
+ mWriter->pause();
+ int32_t numFramesPaused = mBufferInfo.size() / 4;
+ status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+ mInputFrameId, numFramesPaused, true);
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+ for (int32_t count = 0; count < kMaxCount; count++) {
+ mWriter->start(mFileMeta.get());
+ }
+
+ status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
+ mInputFrameId, mBufferInfo.size());
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+ mCurrentTrack->stop();
+
+ // first stop should succeed.
+ status = mWriter->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+ // Multiple stop() may/may not succeed.
+ // Writers are expected to not crash on multiple stop() calls.
+ for (int32_t count = 0; count < kMaxCount; count++) {
+ mWriter->stop();
+ }
close(fd);
}
diff --git a/media/libstagefright/tests/writer/WriterTestEnvironment.h b/media/libstagefright/tests/writer/WriterTestEnvironment.h
index 34c2baa..99e686f 100644
--- a/media/libstagefright/tests/writer/WriterTestEnvironment.h
+++ b/media/libstagefright/tests/writer/WriterTestEnvironment.h
@@ -25,7 +25,7 @@
class WriterTestEnvironment : public ::testing::Environment {
public:
- WriterTestEnvironment() : res("/sdcard/media/") {}
+ WriterTestEnvironment() : res("/data/local/tmp/") {}
// Parses the command line arguments
int initFromOptions(int argc, char **argv);
diff --git a/media/libstagefright/tests/writer/WriterUtility.cpp b/media/libstagefright/tests/writer/WriterUtility.cpp
index 2ba90a0..f24ccb6 100644
--- a/media/libstagefright/tests/writer/WriterUtility.cpp
+++ b/media/libstagefright/tests/writer/WriterUtility.cpp
@@ -26,7 +26,7 @@
int32_t &inputFrameId, sp<MediaAdapter> ¤tTrack, int32_t offset,
int32_t range, bool isPaused) {
while (1) {
- if (inputFrameId == (int)bufferInfo.size() || inputFrameId >= (offset + range)) break;
+ if (inputFrameId >= (int)bufferInfo.size() || inputFrameId >= (offset + range)) break;
int32_t size = bufferInfo[inputFrameId].size;
char *data = (char *)malloc(size);
if (!data) {
diff --git a/media/libstagefright/tests/writer/WriterUtility.h b/media/libstagefright/tests/writer/WriterUtility.h
index d402798..cdd6246 100644
--- a/media/libstagefright/tests/writer/WriterUtility.h
+++ b/media/libstagefright/tests/writer/WriterUtility.h
@@ -33,6 +33,7 @@
#define CODEC_CONFIG_FLAG 32
constexpr uint32_t kMaxCSDStrlen = 16;
+constexpr uint32_t kMaxCount = 20;
struct BufferInfo {
int32_t size;
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index f83d530..7b285a6 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -179,6 +179,10 @@
"libcutils",
"android.hardware.graphics.bufferqueue@1.0",
],
+ header_libs: [
+ "libstagefright_foundation_headers",
+ ],
+
cflags: [
"-D__ANDROID_VNDK__",
],
diff --git a/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml b/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
index 89d6ce2..1179d6c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
+++ b/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
@@ -15,6 +15,7 @@
-->
<configuration description="Runs Media Benchmark Tests">
<target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">
+ <option name="cleanup-apks" value="false" />
<option name="test-file-name" value="MediaBenchmarkTest.apk" />
</target_preparer>
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
index 07d414d..c41f198 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
@@ -57,7 +57,7 @@
private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
private static final String mOutputFilePath = mContext.getString(R.string.output_file_path);
private static final String mStatsFile =
- mContext.getFilesDir() + "/Decoder." + System.currentTimeMillis() + ".csv";
+ mContext.getExternalFilesDir(null) + "/Decoder." + System.currentTimeMillis() + ".csv";
private static final String TAG = "DecoderTest";
private static final long PER_TEST_TIMEOUT_MS = 60000;
private static final boolean DEBUG = false;
@@ -114,6 +114,7 @@
Stats mStats = new Stats();
boolean status = mStats.writeStatsHeader(mStatsFile);
assertTrue("Unable to open stats file for writing!", status);
+ Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
}
@Test(timeout = PER_TEST_TIMEOUT_MS)
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
index 00e5e21..831467a 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
@@ -57,7 +57,7 @@
private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
private static final String mOutputFilePath = mContext.getString(R.string.output_file_path);
private static final String mStatsFile =
- mContext.getFilesDir() + "/Encoder." + System.currentTimeMillis() + ".csv";
+ mContext.getExternalFilesDir(null) + "/Encoder." + System.currentTimeMillis() + ".csv";
private static final String TAG = "EncoderTest";
private static final long PER_TEST_TIMEOUT_MS = 120000;
private static final boolean DEBUG = false;
@@ -94,6 +94,7 @@
Stats mStats = new Stats();
boolean status = mStats.writeStatsHeader(mStatsFile);
assertTrue("Unable to open stats file for writing!", status);
+ Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
}
@Test(timeout = PER_TEST_TIMEOUT_MS)
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java
index a33ecfe..6b7aad1 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java
@@ -50,8 +50,8 @@
private static Context mContext =
InstrumentationRegistry.getInstrumentation().getTargetContext();
private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
- private static final String mStatsFile =
- mContext.getFilesDir() + "/Extractor." + System.currentTimeMillis() + ".csv";
+ private static final String mStatsFile = mContext.getExternalFilesDir(null) + "/Extractor."
+ + System.currentTimeMillis() + ".csv";
private static final String TAG = "ExtractorTest";
private String mInputFileName;
private int mTrackId;
@@ -84,6 +84,7 @@
Stats mStats = new Stats();
boolean status = mStats.writeStatsHeader(mStatsFile);
assertTrue("Unable to open stats file for writing!", status);
+ Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
}
@Test
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
index b69c57b..2efdba2 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
@@ -58,7 +58,7 @@
InstrumentationRegistry.getInstrumentation().getTargetContext();
private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
private static final String mStatsFile =
- mContext.getFilesDir() + "/Muxer." + System.currentTimeMillis() + ".csv";
+ mContext.getExternalFilesDir(null) + "/Muxer." + System.currentTimeMillis() + ".csv";
private static final String TAG = "MuxerTest";
private static final Map<String, Integer> mMapFormat = new Hashtable<String, Integer>() {
{
@@ -106,6 +106,7 @@
Stats mStats = new Stats();
boolean status = mStats.writeStatsHeader(mStatsFile);
assertTrue("Unable to open stats file for writing!", status);
+ Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
}
@Test
diff --git a/media/tests/benchmark/src/native/decoder/Decoder.cpp b/media/tests/benchmark/src/native/decoder/Decoder.cpp
index b797cc9..2171589 100644
--- a/media/tests/benchmark/src/native/decoder/Decoder.cpp
+++ b/media/tests/benchmark/src/native/decoder/Decoder.cpp
@@ -225,12 +225,12 @@
}
void Decoder::deInitCodec() {
- int64_t sTime = mStats->getCurTime();
if (mFormat) {
AMediaFormat_delete(mFormat);
mFormat = nullptr;
}
if (!mCodec) return;
+ int64_t sTime = mStats->getCurTime();
AMediaCodec_stop(mCodec);
AMediaCodec_delete(mCodec);
int64_t eTime = mStats->getCurTime();
diff --git a/media/tests/benchmark/src/native/encoder/Encoder.cpp b/media/tests/benchmark/src/native/encoder/Encoder.cpp
index f119cf3..2db612c 100644
--- a/media/tests/benchmark/src/native/encoder/Encoder.cpp
+++ b/media/tests/benchmark/src/native/encoder/Encoder.cpp
@@ -154,11 +154,12 @@
}
void Encoder::deInitCodec() {
- int64_t sTime = mStats->getCurTime();
if (mFormat) {
AMediaFormat_delete(mFormat);
mFormat = nullptr;
}
+ if (!mCodec) return;
+ int64_t sTime = mStats->getCurTime();
AMediaCodec_stop(mCodec);
AMediaCodec_delete(mCodec);
int64_t eTime = mStats->getCurTime();
diff --git a/media/tests/benchmark/src/native/muxer/Muxer.cpp b/media/tests/benchmark/src/native/muxer/Muxer.cpp
index f8627cb..3e150ca 100644
--- a/media/tests/benchmark/src/native/muxer/Muxer.cpp
+++ b/media/tests/benchmark/src/native/muxer/Muxer.cpp
@@ -49,12 +49,12 @@
}
void Muxer::deInitMuxer() {
- int64_t sTime = mStats->getCurTime();
if (mFormat) {
AMediaFormat_delete(mFormat);
mFormat = nullptr;
}
if (!mMuxer) return;
+ int64_t sTime = mStats->getCurTime();
AMediaMuxer_stop(mMuxer);
AMediaMuxer_delete(mMuxer);
int64_t eTime = mStats->getCurTime();
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index a33ed97..343ec05 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -416,6 +416,8 @@
} else if (strchr(pkg.c_str(), '.') == nullptr) {
// not of form 'com.whatever...'; assume internal
// so we don't need to look it up in package manager.
+ } else if (strncmp(pkg.c_str(), "android.", 8) == 0) {
+ // android.* packages are assumed fine
} else if (package_mgr.get() != nullptr) {
String16 pkgName16(pkg.c_str());
binder::Status status = package_mgr->getInstallerForPackage(pkgName16, &installer);
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index c58360d..f06d026 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -3,6 +3,8 @@
cc_library_shared {
name: "libaudioflinger",
+ tidy: false, // b/146435095, segmentation fault with Effects.cpp
+
srcs: [
"AudioFlinger.cpp",
"AudioHwDevice.cpp",
@@ -63,6 +65,7 @@
],
header_libs: [
+ "libaudiohal_headers",
"libmedia_headers",
],
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index c856106..d6f0824 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -348,6 +348,9 @@
thread->configure(&localAttr, streamType, actualSessionId, callback, *deviceId, portId);
*handle = portId;
*sessionId = actualSessionId;
+ config->sample_rate = thread->sampleRate();
+ config->channel_mask = thread->channelMask();
+ config->format = thread->format();
} else {
if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
AudioSystem::releaseOutput(portId);
@@ -461,10 +464,12 @@
}
result.append("Global session refs:\n");
- result.append(" session cnt pid\n");
+ result.append(" session cnt pid uid name\n");
for (size_t i = 0; i < mAudioSessionRefs.size(); i++) {
AudioSessionRef *r = mAudioSessionRefs[i];
- result.appendFormat(" %7d %4d %7d\n", r->mSessionid, r->mCnt, r->mPid);
+ const mediautils::UidInfo::Info info = mUidInfo.getInfo(r->mUid);
+ result.appendFormat(" %7d %4d %7d %6u %s\n", r->mSessionid, r->mCnt, r->mPid,
+ r->mUid, info.package.c_str());
}
write(fd, result.string(), result.size());
}
@@ -2892,14 +2897,18 @@
return nextUniqueId(use);
}
-void AudioFlinger::acquireAudioSessionId(audio_session_t audioSession, pid_t pid)
+void AudioFlinger::acquireAudioSessionId(
+ audio_session_t audioSession, pid_t pid, uid_t uid)
{
Mutex::Autolock _l(mLock);
pid_t caller = IPCThreadState::self()->getCallingPid();
ALOGV("acquiring %d from %d, for %d", audioSession, caller, pid);
const uid_t callerUid = IPCThreadState::self()->getCallingUid();
- if (pid != -1 && isAudioServerUid(callerUid)) { // check must match releaseAudioSessionId()
- caller = pid;
+ if (pid != (pid_t)-1 && isAudioServerOrMediaServerUid(callerUid)) {
+ caller = pid; // check must match releaseAudioSessionId()
+ }
+ if (uid == (uid_t)-1 || !isAudioServerOrMediaServerUid(callerUid)) {
+ uid = callerUid;
}
{
@@ -2923,7 +2932,7 @@
return;
}
}
- mAudioSessionRefs.push(new AudioSessionRef(audioSession, caller));
+ mAudioSessionRefs.push(new AudioSessionRef(audioSession, caller, uid));
ALOGV(" added new entry for %d", audioSession);
}
@@ -2935,8 +2944,8 @@
pid_t caller = IPCThreadState::self()->getCallingPid();
ALOGV("releasing %d from %d for %d", audioSession, caller, pid);
const uid_t callerUid = IPCThreadState::self()->getCallingUid();
- if (pid != -1 && isAudioServerUid(callerUid)) { // check must match acquireAudioSessionId()
- caller = pid;
+ if (pid != (pid_t)-1 && isAudioServerOrMediaServerUid(callerUid)) {
+ caller = pid; // check must match acquireAudioSessionId()
}
size_t num = mAudioSessionRefs.size();
for (size_t i = 0; i < num; i++) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 5c975e1..d4e0ae2 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -214,7 +214,7 @@
// This is the binder API. For the internal API see nextUniqueId().
virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use);
- virtual void acquireAudioSessionId(audio_session_t audioSession, pid_t pid);
+ void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) override;
virtual void releaseAudioSessionId(audio_session_t audioSession, pid_t pid);
@@ -343,7 +343,10 @@
virtual ~SyncEvent() {}
- void trigger() { Mutex::Autolock _l(mLock); if (mCallback) mCallback(this); }
+ void trigger() {
+ Mutex::Autolock _l(mLock);
+ if (mCallback) mCallback(wp<SyncEvent>(this));
+ }
bool isCancelled() const { Mutex::Autolock _l(mLock); return (mCallback == NULL); }
void cancel() { Mutex::Autolock _l(mLock); mCallback = NULL; }
AudioSystem::sync_event_t type() const { return mType; }
@@ -808,10 +811,11 @@
// for mAudioSessionRefs only
struct AudioSessionRef {
- AudioSessionRef(audio_session_t sessionid, pid_t pid) :
- mSessionid(sessionid), mPid(pid), mCnt(1) {}
+ AudioSessionRef(audio_session_t sessionid, pid_t pid, uid_t uid) :
+ mSessionid(sessionid), mPid(pid), mUid(uid), mCnt(1) {}
const audio_session_t mSessionid;
const pid_t mPid;
+ const uid_t mUid;
int mCnt;
};
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 70c56e3..2f3724f 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -737,7 +737,7 @@
}
if (!mSupportsFloat) { // convert input to int16_t as effect doesn't support float.
if (!auxType) {
- if (mInConversionBuffer.get() == nullptr) {
+ if (mInConversionBuffer == nullptr) {
ALOGW("%s: mInConversionBuffer is null, bypassing", __func__);
goto data_bypass;
}
@@ -748,7 +748,7 @@
inBuffer = mInConversionBuffer;
}
if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
- if (mOutConversionBuffer.get() == nullptr) {
+ if (mOutConversionBuffer == nullptr) {
ALOGW("%s: mOutConversionBuffer is null, bypassing", __func__);
goto data_bypass;
}
@@ -921,9 +921,8 @@
mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
ALOGV("configure() %p chain %p buffer %p framecount %zu",
- this, mCallback->chain().promote() != nullptr ? mCallback->chain().promote().get() :
- nullptr,
- mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
+ this, mCallback->chain().promote().get(),
+ mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
status_t cmdStatus;
size = sizeof(int);
@@ -1290,7 +1289,7 @@
const uint32_t inChannelCount =
audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
const bool formatMismatch = !mSupportsFloat || mInChannelCountRequested != inChannelCount;
- if (!auxType && formatMismatch && mInBuffer.get() != nullptr) {
+ if (!auxType && formatMismatch && mInBuffer != nullptr) {
// we need to translate - create hidl shared buffer and intercept
const size_t inFrameCount = mConfig.inputCfg.buffer.frameCount;
// Use FCC_2 in case mInChannelCountRequested is mono and the effect is stereo.
@@ -1300,13 +1299,13 @@
ALOGV("%s: setInBuffer updating for inChannels:%d inFrameCount:%zu total size:%zu",
__func__, inChannels, inFrameCount, size);
- if (size > 0 && (mInConversionBuffer.get() == nullptr
+ if (size > 0 && (mInConversionBuffer == nullptr
|| size > mInConversionBuffer->getSize())) {
mInConversionBuffer.clear();
ALOGV("%s: allocating mInConversionBuffer %zu", __func__, size);
(void)mCallback->allocateHalBuffer(size, &mInConversionBuffer);
}
- if (mInConversionBuffer.get() != nullptr) {
+ if (mInConversionBuffer != nullptr) {
mInConversionBuffer->setFrameCount(inFrameCount);
mEffectInterface->setInBuffer(mInConversionBuffer);
} else if (size > 0) {
@@ -1335,7 +1334,7 @@
const uint32_t outChannelCount =
audio_channel_count_from_out_mask(mConfig.outputCfg.channels);
const bool formatMismatch = !mSupportsFloat || mOutChannelCountRequested != outChannelCount;
- if (formatMismatch && mOutBuffer.get() != nullptr) {
+ if (formatMismatch && mOutBuffer != nullptr) {
const size_t outFrameCount = mConfig.outputCfg.buffer.frameCount;
// Use FCC_2 in case mOutChannelCountRequested is mono and the effect is stereo.
const uint32_t outChannels = std::max((uint32_t)FCC_2, mOutChannelCountRequested);
@@ -1344,13 +1343,13 @@
ALOGV("%s: setOutBuffer updating for outChannels:%d outFrameCount:%zu total size:%zu",
__func__, outChannels, outFrameCount, size);
- if (size > 0 && (mOutConversionBuffer.get() == nullptr
+ if (size > 0 && (mOutConversionBuffer == nullptr
|| size > mOutConversionBuffer->getSize())) {
mOutConversionBuffer.clear();
ALOGV("%s: allocating mOutConversionBuffer %zu", __func__, size);
(void)mCallback->allocateHalBuffer(size, &mOutConversionBuffer);
}
- if (mOutConversionBuffer.get() != nullptr) {
+ if (mOutConversionBuffer != nullptr) {
mOutConversionBuffer->setFrameCount(outFrameCount);
mEffectInterface->setOutBuffer(mOutConversionBuffer);
} else if (size > 0) {
@@ -1521,7 +1520,7 @@
static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
std::stringstream ss;
- if (buffer.get() == nullptr) {
+ if (buffer == nullptr) {
return "nullptr"; // make different than below
} else if (buffer->externalData() != nullptr) {
ss << (isInput ? buffer->externalData() : buffer->audioBuffer()->raw)
@@ -1937,19 +1936,20 @@
#undef LOG_TAG
#define LOG_TAG "AudioFlinger::EffectChain"
-AudioFlinger::EffectChain::EffectChain(ThreadBase *thread,
- audio_session_t sessionId)
+AudioFlinger::EffectChain::EffectChain(const wp<ThreadBase>& thread,
+ audio_session_t sessionId)
: mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0),
mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX),
- mEffectCallback(new EffectCallback(this, thread, thread->mAudioFlinger.get()))
+ mEffectCallback(new EffectCallback(wp<EffectChain>(this), thread))
{
mStrategy = AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC);
- if (thread == nullptr) {
+ sp<ThreadBase> p = thread.promote();
+ if (p == nullptr) {
return;
}
- mMaxTailBuffers = ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
- thread->frameCount();
+ mMaxTailBuffers = ((kProcessTailDurationMs * p->sampleRate()) / 1000) /
+ p->frameCount();
}
AudioFlinger::EffectChain::~EffectChain()
@@ -2638,7 +2638,7 @@
void AudioFlinger::EffectChain::setThread(const sp<ThreadBase>& thread)
{
Mutex::Autolock _l(mLock);
- mEffectCallback->setThread(thread.get());
+ mEffectCallback->setThread(thread);
}
void AudioFlinger::EffectChain::checkOutputFlagCompatibility(audio_output_flags_t *flags) const
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 40bb226..1dfa1ae 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -402,7 +402,6 @@
class EffectChain : public RefBase {
public:
EffectChain(const wp<ThreadBase>& wThread, audio_session_t sessionId);
- EffectChain(ThreadBase *thread, audio_session_t sessionId);
virtual ~EffectChain();
// special key used for an entry in mSuspendedEffects keyed vector
@@ -513,8 +512,11 @@
class EffectCallback : public EffectCallbackInterface {
public:
- EffectCallback(EffectChain *chain, ThreadBase *thread, AudioFlinger *audioFlinger)
- : mChain(chain), mThread(thread), mAudioFlinger(audioFlinger) {}
+ EffectCallback(const wp<EffectChain>& chain,
+ const wp<ThreadBase>& thread)
+ : mChain(chain) {
+ setThread(thread);
+ }
status_t createEffectHal(const effect_uuid_t *pEffectUuid,
int32_t sessionId, int32_t deviceId, sp<EffectHalInterface> *effect) override;
@@ -550,7 +552,12 @@
wp<EffectChain> chain() const override { return mChain; }
wp<ThreadBase> thread() { return mThread; }
- void setThread(ThreadBase *thread) { mThread = thread; };
+
+ void setThread(const wp<ThreadBase>& thread) {
+ mThread = thread;
+ sp<ThreadBase> p = thread.promote();
+ mAudioFlinger = p ? p->mAudioFlinger : nullptr;
+ }
private:
wp<EffectChain> mChain;
@@ -623,7 +630,8 @@
effect_descriptor_t *desc, int id)
: EffectBase(callback, desc, id, AUDIO_SESSION_DEVICE, false),
mDevice(device), mManagerCallback(callback),
- mMyCallback(new ProxyCallback(this, callback)) {}
+ mMyCallback(new ProxyCallback(wp<DeviceEffectProxy>(this),
+ callback)) {}
status_t setEnabled(bool enabled, bool fromHandle) override;
sp<DeviceEffectProxy> asDeviceEffectProxy() override { return this; }
@@ -649,7 +657,7 @@
class ProxyCallback : public EffectCallbackInterface {
public:
- ProxyCallback(DeviceEffectProxy *proxy,
+ ProxyCallback(const wp<DeviceEffectProxy>& proxy,
const sp<DeviceEffectManagerCallback>& callback)
: mProxy(proxy), mManagerCallback(callback) {}
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 5501856..b58fd8b 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -497,10 +497,12 @@
}
sp<RecordThread::PatchRecord> tempRecordTrack;
+ const bool usePassthruPatchRecord =
+ (inputFlags & AUDIO_INPUT_FLAG_DIRECT) && (outputFlags & AUDIO_OUTPUT_FLAG_DIRECT);
const size_t playbackFrameCount = mPlayback.thread()->frameCount();
const size_t recordFrameCount = mRecord.thread()->frameCount();
size_t frameCount = 0;
- if ((inputFlags & AUDIO_INPUT_FLAG_DIRECT) && (outputFlags & AUDIO_OUTPUT_FLAG_DIRECT)) {
+ if (usePassthruPatchRecord) {
// PassthruPatchRecord producesBufferOnDemand, so use
// maximum of playback and record thread framecounts
frameCount = std::max(playbackFrameCount, recordFrameCount);
@@ -557,8 +559,14 @@
}
// tie playback and record tracks together
- mRecord.setTrackAndPeer(tempRecordTrack, tempPatchTrack);
- mPlayback.setTrackAndPeer(tempPatchTrack, tempRecordTrack);
+ // In the case of PassthruPatchRecord no I/O activity happens on RecordThread,
+ // everything is driven from PlaybackThread. Thus AudioBufferProvider methods
+ // of PassthruPatchRecord can only be called if the corresponding PatchTrack
+ // is alive. There is no need to hold a reference, and there is no need
+ // to clear it. In fact, since playback stopping is asynchronous, there is
+ // no proper time when clearing could be done.
+ mRecord.setTrackAndPeer(tempRecordTrack, tempPatchTrack, !usePassthruPatchRecord);
+ mPlayback.setTrackAndPeer(tempPatchTrack, tempRecordTrack, true /*holdReference*/);
// start capture and playback
mRecord.track()->start(AudioSystem::SYNC_EVENT_NONE, AUDIO_SESSION_NONE);
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 4e0d243..89d4eb1 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -128,18 +128,20 @@
mCloseThread = closeThread;
}
template <typename T>
- void setTrackAndPeer(const sp<TrackType>& track, const sp<T> &peer) {
+ void setTrackAndPeer(const sp<TrackType>& track, const sp<T> &peer, bool holdReference) {
mTrack = track;
mThread->addPatchTrack(mTrack);
- mTrack->setPeerProxy(peer, true /* holdReference */);
+ mTrack->setPeerProxy(peer, holdReference);
+ mClearPeerProxy = holdReference;
}
- void clearTrackPeer() { if (mTrack) mTrack->clearPeerProxy(); }
+ void clearTrackPeer() { if (mClearPeerProxy && mTrack) mTrack->clearPeerProxy(); }
void stopTrack() { if (mTrack) mTrack->stop(); }
void swap(Endpoint &other) noexcept {
using std::swap;
swap(mThread, other.mThread);
swap(mCloseThread, other.mCloseThread);
+ swap(mClearPeerProxy, other.mClearPeerProxy);
swap(mHandle, other.mHandle);
swap(mTrack, other.mTrack);
}
@@ -151,6 +153,7 @@
private:
sp<ThreadType> mThread;
bool mCloseThread = true;
+ bool mClearPeerProxy = true;
audio_patch_handle_t mHandle = AUDIO_PATCH_HANDLE_NONE;
sp<TrackType> mTrack;
};
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index c5cdc25..dd0cd9b 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -192,6 +192,10 @@
// return the enabled output devices for the given stream type
virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream) = 0;
+ // retrieves the list of enabled output devices for the given audio attributes
+ virtual status_t getDevicesForAttributes(const audio_attributes_t &attr,
+ AudioDeviceTypeAddrVector *devices) = 0;
+
// Audio effect management
virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc) = 0;
virtual status_t registerEffect(const effect_descriptor_t *desc,
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 20c0a24..b1103ab 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -197,7 +197,9 @@
ALOGV("%s: Mix %zu ignored as secondaryOutput because not opened yet", __func__, i);
} else {
ALOGV("%s: Add a secondary desc %zu", __func__, i);
- secondaryDescs->push_back(policyDesc);
+ if (secondaryDescs != nullptr) {
+ secondaryDescs->push_back(policyDesc);
+ }
}
}
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 4376802..1b2f7c7 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -200,6 +200,7 @@
{
static constexpr const char *speakerDrcEnabled = "speaker_drc_enabled";
static constexpr const char *callScreenModeSupported= "call_screen_mode_supported";
+ static constexpr const char *engineLibrarySuffix = "engine_library";
};
static status_t deserialize(const xmlNode *root, AudioPolicyConfig *config);
@@ -692,6 +693,11 @@
convertTo<std::string, bool>(attr, value)) {
config->setCallScreenModeSupported(value);
}
+ std::string engineLibrarySuffix = getXmlAttribute(cur, Attributes::engineLibrarySuffix);
+ if (!engineLibrarySuffix.empty()) {
+ config->setEngineLibraryNameSuffix(engineLibrarySuffix);
+ }
+ return NO_ERROR;
}
}
return NO_ERROR;
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 4b5e788..f4610bb 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -92,6 +92,10 @@
srcs: ["audio_policy_configuration_generic.xml"],
}
filegroup {
+ name: "audio_policy_configuration_generic_configurable",
+ srcs: ["audio_policy_configuration_generic_configurable.xml"],
+}
+filegroup {
name: "usb_audio_policy_configuration",
srcs: ["usb_audio_policy_configuration.xml"],
}
diff --git a/services/audiopolicy/config/audio_policy_configuration_generic_configurable.xml b/services/audiopolicy/config/audio_policy_configuration_generic_configurable.xml
new file mode 100644
index 0000000..fbe4f7f
--- /dev/null
+++ b/services/audiopolicy/config/audio_policy_configuration_generic_configurable.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <!-- version section contains a “version” tag in the form “major.minor” e.g version=”1.0” -->
+
+ <!-- Global configuration Decalaration -->
+ <globalConfiguration speaker_drc_enabled="false" engine_library="configurable"/>
+
+ <modules>
+ <!-- Primary Audio HAL -->
+ <xi:include href="primary_audio_policy_configuration.xml"/>
+
+ <!-- Remote Submix Audio HAL -->
+ <xi:include href="r_submix_audio_policy_configuration.xml"/>
+
+ </modules>
+ <!-- End of Modules section -->
+
+ <!-- Volume section:
+ IMPORTANT NOTE: Volume tables have been moved to engine configuration.
+ Keep it here for legacy.
+ Engine will fallback on these files if none are provided by engine.
+ -->
+
+ <xi:include href="audio_policy_volumes.xml"/>
+ <xi:include href="default_volume_tables.xml"/>
+
+ <!-- End of Volume section -->
+
+ <!-- Surround Sound configuration -->
+
+ <xi:include href="surround_sound_configuration_5_0.xml"/>
+
+ <!-- End of Surround Sound configuration -->
+
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 885b5fa..ff840f9 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -1,4 +1,4 @@
-cc_library_static {
+cc_library {
name: "libaudiopolicyengine_config",
export_include_dirs: ["include"],
include_dirs: [
@@ -14,17 +14,14 @@
],
shared_libs: [
"libmedia_helper",
- "libandroidicu",
"libxml2",
"libutils",
"liblog",
"libcutils",
],
- static_libs: [
- "libaudiopolicycomponents",
- ],
header_libs: [
"libaudio_system_headers",
- "libaudiopolicycommon",
+ "libmedia_headers",
+ "libaudioclient_headers",
],
}
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index d47fbd2..7f8cdd9 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -18,7 +18,6 @@
//#define LOG_NDEBUG 0
#include "EngineConfig.h"
-#include <policy.h>
#include <cutils/properties.h>
#include <media/TypeConverter.h>
#include <media/convert.h>
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 752ba92..7e5c5e3 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -80,8 +80,9 @@
status_t Engine::initCheck()
{
- if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start() != NO_ERROR) {
- ALOGE("%s: could not start Policy PFW", __FUNCTION__);
+ std::string error;
+ if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start(error) != NO_ERROR) {
+ ALOGE("%s: could not start Policy PFW: %s", __FUNCTION__, error.c_str());
return NO_INIT;
}
return EngineBase::initCheck();
@@ -162,21 +163,21 @@
return mPolicyParameterMgr->getForceUse(usage);
}
-status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
+status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> device,
audio_policy_dev_state_t state)
{
- mPolicyParameterMgr->setDeviceConnectionState(devDesc, state);
-
- if (audio_is_output_device(devDesc->type())) {
+ mPolicyParameterMgr->setDeviceConnectionState(
+ device->type(), device->address().c_str(), state);
+ if (audio_is_output_device(device->type())) {
// FIXME: Use DeviceTypeSet when the interface is ready
return mPolicyParameterMgr->setAvailableOutputDevices(
deviceTypesToBitMask(getApmObserver()->getAvailableOutputDevices().types()));
- } else if (audio_is_input_device(devDesc->type())) {
+ } else if (audio_is_input_device(device->type())) {
// FIXME: Use DeviceTypeSet when the interface is ready
return mPolicyParameterMgr->setAvailableInputDevices(
deviceTypesToBitMask(getApmObserver()->getAvailableInputDevices().types()));
}
- return EngineBase::setDeviceConnectionState(devDesc, state);
+ return EngineBase::setDeviceConnectionState(device, state);
}
status_t Engine::loadAudioPolicyEngineConfig()
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 6f59487..301ecc0 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -11,7 +11,6 @@
"libbase_headers",
"libaudiopolicycommon",
],
- static_libs: ["libaudiopolicycomponents"],
shared_libs: [
"liblog",
"libutils",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
index 465a6f9..1b3b9a0 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
+++ b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
@@ -92,7 +92,8 @@
template <>
struct ParameterManagerWrapper::parameterManagerElementSupported<ISelectionCriterionTypeInterface> {};
-ParameterManagerWrapper::ParameterManagerWrapper()
+ParameterManagerWrapper::ParameterManagerWrapper(bool enableSchemaVerification,
+ const std::string &schemaUri)
: mPfwConnectorLogger(new ParameterMgrPlatformConnectorLogger)
{
// Connector
@@ -104,6 +105,15 @@
// Logger
mPfwConnector->setLogger(mPfwConnectorLogger);
+
+ // Schema validation
+ std::string error;
+ bool ret = mPfwConnector->setValidateSchemasOnStart(enableSchemaVerification, error);
+ ALOGE_IF(!ret, "Failed to activate schema validation: %s", error.c_str());
+ if (enableSchemaVerification && ret && !schemaUri.empty()) {
+ ALOGE("Schema verification activated with schema URI: %s", schemaUri.c_str());
+ mPfwConnector->setSchemaUri(schemaUri);
+ }
}
status_t ParameterManagerWrapper::addCriterion(const std::string &name, bool isInclusive,
@@ -145,11 +155,10 @@
delete mPfwConnector;
}
-status_t ParameterManagerWrapper::start()
+status_t ParameterManagerWrapper::start(std::string &error)
{
ALOGD("%s: in", __FUNCTION__);
/// Start PFW
- std::string error;
if (!mPfwConnector->start(error)) {
ALOGE("%s: Policy PFW start error: %s", __FUNCTION__, error.c_str());
return NO_INIT;
@@ -253,13 +262,13 @@
return interface->getLiteralValue(valueToCheck, literalValue);
}
-status_t ParameterManagerWrapper::setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
- audio_policy_dev_state_t state)
+status_t ParameterManagerWrapper::setDeviceConnectionState(
+ audio_devices_t type, const std::string address, audio_policy_dev_state_t state)
{
- std::string criterionName = audio_is_output_device(devDesc->type()) ?
+ std::string criterionName = audio_is_output_device(type) ?
gOutputDeviceAddressCriterionName : gInputDeviceAddressCriterionName;
- ALOGV("%s: device with address %s %s", __FUNCTION__, devDesc->address().c_str(),
+ ALOGV("%s: device with address %s %s", __FUNCTION__, address.c_str(),
state != AUDIO_POLICY_DEVICE_STATE_AVAILABLE? "disconnected" : "connected");
ISelectionCriterionInterface *criterion =
getElement<ISelectionCriterionInterface>(criterionName, mPolicyCriteria);
@@ -271,8 +280,8 @@
auto criterionType = criterion->getCriterionType();
int deviceAddressId;
- if (not criterionType->getNumericalValue(devDesc->address().c_str(), deviceAddressId)) {
- ALOGW("%s: unknown device address reported (%s)", __FUNCTION__, devDesc->address().c_str());
+ if (not criterionType->getNumericalValue(address.c_str(), deviceAddressId)) {
+ ALOGW("%s: unknown device address reported (%s)", __FUNCTION__, address.c_str());
return BAD_TYPE;
}
int currentValueMask = criterion->getCriterionState();
diff --git a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
index 8443008..62b129a 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
+++ b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
@@ -16,9 +16,6 @@
#pragma once
-#include <PolicyAudioPort.h>
-#include <HwModule.h>
-#include <DeviceDescriptor.h>
#include <system/audio.h>
#include <system/audio_policy.h>
#include <utils/Errors.h>
@@ -47,16 +44,18 @@
using Criteria = std::map<std::string, ISelectionCriterionInterface *>;
public:
- ParameterManagerWrapper();
+ ParameterManagerWrapper(bool enableSchemaVerification = false,
+ const std::string &schemaUri = {});
~ParameterManagerWrapper();
/**
* Starts the platform state service.
* It starts the parameter framework policy instance.
+ * @param[out] contains human readable error if starts failed
*
- * @return NO_ERROR if success, error code otherwise.
+ * @return NO_ERROR if success, error code otherwise, and error is set to human readable string.
*/
- status_t start();
+ status_t start(std::string &error);
/**
* The following API wrap policy action to criteria
@@ -117,7 +116,15 @@
*/
status_t setAvailableOutputDevices(audio_devices_t outputDevices);
- status_t setDeviceConnectionState(const sp<DeviceDescriptor> devDesc,
+ /**
+ * @brief setDeviceConnectionState propagates a state event on a given device(s)
+ * @param type bit mask of the device whose state has changed
+ * @param address of the device whose state has changed
+ * @param state new state of the given device
+ * @return NO_ERROR if new state corretly propagated to Engine Parameter-Framework, error
+ * code otherwise.
+ */
+ status_t setDeviceConnectionState(audio_devices_t type, const std::string address,
audio_policy_dev_state_t state);
/**
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index d1b85c8..82c4e47 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -971,19 +971,21 @@
if (usePrimaryOutputFromPolicyMixes) {
*output = policyDesc->mIoHandle;
sp<AudioPolicyMix> mix = policyDesc->mPolicyMix.promote();
- sp<DeviceDescriptor> deviceDesc =
- mAvailableOutputDevices.getDevice(mix->mDeviceType,
- mix->mDeviceAddress,
- AUDIO_FORMAT_DEFAULT);
- *selectedDeviceId = deviceDesc != 0 ? deviceDesc->getId() : AUDIO_PORT_HANDLE_NONE;
+ if (mix != nullptr) {
+ sp<DeviceDescriptor> deviceDesc =
+ mAvailableOutputDevices.getDevice(mix->mDeviceType,
+ mix->mDeviceAddress,
+ AUDIO_FORMAT_DEFAULT);
+ *selectedDeviceId = deviceDesc != 0 ? deviceDesc->getId() : AUDIO_PORT_HANDLE_NONE;
- ALOGV("getOutputForAttr() returns output %d", *output);
- if (resultAttr->usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
- *outputType = API_OUT_MIX_PLAYBACK;
- } else {
- *outputType = API_OUTPUT_LEGACY;
+ ALOGV("getOutputForAttr() returns output %d", *output);
+ if (resultAttr->usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
+ *outputType = API_OUT_MIX_PLAYBACK;
+ } else {
+ *outputType = API_OUTPUT_LEGACY;
+ }
+ return NO_ERROR;
}
- return NO_ERROR;
}
// Virtual sources must always be dynamicaly or explicitly routed
if (resultAttr->usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
@@ -1646,7 +1648,7 @@
DeviceVector devices;
sp<AudioPolicyMix> policyMix = outputDesc->mPolicyMix.promote();
const char *address = NULL;
- if (policyMix != NULL) {
+ if (policyMix != nullptr) {
audio_devices_t newDeviceType;
address = policyMix->mDeviceAddress.string();
if ((policyMix->mRouteFlags & MIX_ROUTE_FLAG_LOOP_BACK) == MIX_ROUTE_FLAG_LOOP_BACK) {
@@ -1828,7 +1830,7 @@
sp<AudioPolicyMix> policyMix = outputDesc->mPolicyMix.promote();
if (isSingleDeviceType(
outputDesc->devices().types(), &audio_is_remote_submix_device) &&
- policyMix != NULL &&
+ policyMix != nullptr &&
policyMix->mMixType == MIX_TYPE_RECORDERS) {
setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
@@ -2275,7 +2277,7 @@
if (status == NO_ERROR && inputDesc->activeCount() == 1) {
sp<AudioPolicyMix> policyMix = inputDesc->mPolicyMix.promote();
// if input maps to a dynamic policy with an activity listener, notify of state change
- if ((policyMix != NULL)
+ if ((policyMix != nullptr)
&& ((policyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
mpClientInterface->onDynamicPolicyMixStateUpdate(policyMix->mDeviceAddress,
MIX_STATE_MIXING);
@@ -2292,7 +2294,7 @@
// For remote submix (a virtual device), we open only one input per capture request.
if (audio_is_remote_submix_device(inputDesc->getDeviceType())) {
String8 address = String8("");
- if (policyMix == NULL) {
+ if (policyMix == nullptr) {
address = String8("0");
} else if (policyMix->mMixType == MIX_TYPE_PLAYERS) {
address = policyMix->mDeviceAddress;
@@ -2339,7 +2341,7 @@
} else {
sp<AudioPolicyMix> policyMix = inputDesc->mPolicyMix.promote();
// if input maps to a dynamic policy with an activity listener, notify of state change
- if ((policyMix != NULL)
+ if ((policyMix != nullptr)
&& ((policyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
mpClientInterface->onDynamicPolicyMixStateUpdate(policyMix->mDeviceAddress,
MIX_STATE_IDLE);
@@ -2349,7 +2351,7 @@
// used by a policy mix of type MIX_TYPE_RECORDERS
if (audio_is_remote_submix_device(inputDesc->getDeviceType())) {
String8 address = String8("");
- if (policyMix == NULL) {
+ if (policyMix == nullptr) {
address = String8("0");
} else if (policyMix->mMixType == MIX_TYPE_PLAYERS) {
address = policyMix->mDeviceAddress;
@@ -5437,6 +5439,35 @@
return deviceTypesToBitMask(devices.types());
}
+status_t AudioPolicyManager::getDevicesForAttributes(
+ const audio_attributes_t &attr, AudioDeviceTypeAddrVector *devices) {
+ if (devices == nullptr) {
+ return BAD_VALUE;
+ }
+ // check dynamic policies but only for primary descriptors (secondary not used for audible
+ // audio routing, only used for duplication for playback capture)
+ sp<SwAudioOutputDescriptor> policyDesc;
+ status_t status = mPolicyMixes.getOutputForAttr(attr, 0 /*uid unknown here*/,
+ AUDIO_OUTPUT_FLAG_NONE, policyDesc, nullptr);
+ if (status != OK) {
+ return status;
+ }
+ if (policyDesc != nullptr) {
+ sp<AudioPolicyMix> mix = policyDesc->mPolicyMix.promote();
+ if (mix != nullptr) {
+ AudioDeviceTypeAddr device(mix->mDeviceType, mix->mDeviceAddress.c_str());
+ devices->push_back(device);
+ return NO_ERROR;
+ }
+ }
+
+ DeviceVector curDevices = mEngine->getOutputDevicesForAttributes(attr, nullptr, false);
+ for (const auto& device : curDevices) {
+ devices->push_back(device->getDeviceTypeAddr());
+ }
+ return NO_ERROR;
+}
+
void AudioPolicyManager::handleNotificationRoutingForStream(audio_stream_type_t stream) {
switch(stream) {
case AUDIO_STREAM_MUSIC:
@@ -6110,6 +6141,10 @@
case AUDIO_USAGE_VIRTUAL_SOURCE:
case AUDIO_USAGE_ASSISTANT:
case AUDIO_USAGE_CALL_ASSISTANT:
+ case AUDIO_USAGE_EMERGENCY:
+ case AUDIO_USAGE_SAFETY:
+ case AUDIO_USAGE_VEHICLE_STATUS:
+ case AUDIO_USAGE_ANNOUNCEMENT:
break;
default:
return false;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index d6c1016..7e0e16f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -193,6 +193,10 @@
// return the enabled output devices for the given stream type
virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream);
+ virtual status_t getDevicesForAttributes(
+ const audio_attributes_t &attributes,
+ AudioDeviceTypeAddrVector *devices);
+
virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc = NULL);
virtual status_t registerEffect(const effect_descriptor_t *desc,
audio_io_handle_t io,
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index d245231..c865063 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -25,6 +25,44 @@
namespace android {
+const std::vector<audio_usage_t>& SYSTEM_USAGES = {
+ AUDIO_USAGE_CALL_ASSISTANT,
+ AUDIO_USAGE_EMERGENCY,
+ AUDIO_USAGE_SAFETY,
+ AUDIO_USAGE_VEHICLE_STATUS,
+ AUDIO_USAGE_ANNOUNCEMENT
+};
+
+bool isSystemUsage(audio_usage_t usage) {
+ return std::find(std::begin(SYSTEM_USAGES), std::end(SYSTEM_USAGES), usage)
+ != std::end(SYSTEM_USAGES);
+}
+
+bool AudioPolicyService::isSupportedSystemUsage(audio_usage_t usage) {
+ return std::find(std::begin(mSupportedSystemUsages), std::end(mSupportedSystemUsages), usage)
+ != std::end(mSupportedSystemUsages);
+}
+
+status_t AudioPolicyService::validateUsage(audio_usage_t usage) {
+ return validateUsage(usage, IPCThreadState::self()->getCallingPid(),
+ IPCThreadState::self()->getCallingUid());
+}
+
+status_t AudioPolicyService::validateUsage(audio_usage_t usage, pid_t pid, uid_t uid) {
+ if (isSystemUsage(usage)) {
+ if (isSupportedSystemUsage(usage)) {
+ if (!modifyAudioRoutingAllowed(pid, uid)) {
+ ALOGE("permission denied: modify audio routing not allowed for uid %d", uid);
+ return PERMISSION_DENIED;
+ }
+ } else {
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+
// ----------------------------------------------------------------------------
@@ -181,6 +219,12 @@
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
+
+ status_t result = validateUsage(attr->usage, pid, uid);
+ if (result != NO_ERROR) {
+ return result;
+ }
+
ALOGV("%s()", __func__);
Mutex::Autolock _l(mLock);
@@ -199,7 +243,7 @@
}
AutoCallerClear acc;
AudioPolicyInterface::output_type_t outputType;
- status_t result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid,
+ result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid,
config,
&flags, selectedDeviceId, portId,
secondaryOutputs,
@@ -363,6 +407,11 @@
return NO_INIT;
}
+ status_t result = validateUsage(attr->usage, pid, uid);
+ if (result != NO_ERROR) {
+ return result;
+ }
+
audio_source_t inputSource = attr->source;
if (inputSource == AUDIO_SOURCE_DEFAULT) {
inputSource = AUDIO_SOURCE_MIC;
@@ -806,6 +855,17 @@
return mAudioPolicyManager->getDevicesForStream(stream);
}
+status_t AudioPolicyService::getDevicesForAttributes(const AudioAttributes &aa,
+ AudioDeviceTypeAddrVector *devices) const
+{
+ if (mAudioPolicyManager == NULL) {
+ return NO_INIT;
+ }
+ Mutex::Autolock _l(mLock);
+ AutoCallerClear acc;
+ return mAudioPolicyManager->getDevicesForAttributes(aa.getAttributes(), devices);
+}
+
audio_io_handle_t AudioPolicyService::getOutputForEffect(const effect_descriptor_t *desc)
{
// FIXME change return type to status_t, and return NO_INIT here
@@ -991,6 +1051,22 @@
return audioPolicyEffects->removeStreamDefaultEffect(id);
}
+status_t AudioPolicyService::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
+ Mutex::Autolock _l(mLock);
+ if(!modifyAudioRoutingAllowed()) {
+ return PERMISSION_DENIED;
+ }
+
+ bool areAllSystemUsages = std::all_of(begin(systemUsages), end(systemUsages),
+ [](audio_usage_t usage) { return isSystemUsage(usage); });
+ if (!areAllSystemUsages) {
+ return BAD_VALUE;
+ }
+
+ mSupportedSystemUsages = systemUsages;
+ return NO_ERROR;
+}
+
status_t AudioPolicyService::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) {
Mutex::Autolock _l(mLock);
if (mAudioPolicyManager == NULL) {
@@ -1022,6 +1098,12 @@
ALOGV("mAudioPolicyManager == NULL");
return false;
}
+
+ status_t result = validateUsage(attributes.usage);
+ if (result != NO_ERROR) {
+ return result;
+ }
+
Mutex::Autolock _l(mLock);
return mAudioPolicyManager->isDirectOutputSupported(config, attributes);
}
@@ -1190,6 +1272,12 @@
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
+
+ status_t result = validateUsage(attributes->usage);
+ if (result != NO_ERROR) {
+ return result;
+ }
+
// startAudioSource should be created as the calling uid
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
AutoCallerClear acc;
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index c83512f..e5c36ea 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -74,6 +74,8 @@
mAudioPolicyClient = new AudioPolicyClient(this);
mAudioPolicyManager = createAudioPolicyManager(mAudioPolicyClient);
+
+ mSupportedSystemUsages = std::vector<audio_usage_t> {};
}
// load audio processing modules
sp<AudioPolicyEffects>audioPolicyEffects = new AudioPolicyEffects();
@@ -392,6 +394,14 @@
snprintf(buffer, SIZE, "Command Thread: %p\n", mAudioCommandThread.get());
result.append(buffer);
+ snprintf(buffer, SIZE, "Supported System Usages:\n");
+ result.append(buffer);
+ for (std::vector<audio_usage_t>::iterator it = mSupportedSystemUsages.begin();
+ it != mSupportedSystemUsages.end(); ++it) {
+ snprintf(buffer, SIZE, "\t%d\n", *it);
+ result.append(buffer);
+ }
+
write(fd, result.string(), result.size());
return NO_ERROR;
}
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 135b3ac..d9fec9a 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -127,6 +127,8 @@
virtual uint32_t getStrategyForStream(audio_stream_type_t stream);
virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream);
+ virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
+ AudioDeviceTypeAddrVector *devices) const;
virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc);
virtual status_t registerEffect(const effect_descriptor_t *desc,
@@ -185,6 +187,7 @@
audio_io_handle_t output,
int delayMs = 0);
virtual status_t setVoiceVolume(float volume, int delayMs = 0);
+ status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages);
status_t setAllowedCapturePolicy(uint_t uid, audio_flags_mask_t capturePolicy) override;
virtual bool isOffloadSupported(const audio_offload_info_t &config);
virtual bool isDirectOutputSupported(const audio_config_base_t& config,
@@ -342,6 +345,10 @@
app_state_t apmStatFromAmState(int amState);
+ bool isSupportedSystemUsage(audio_usage_t usage);
+ status_t validateUsage(audio_usage_t usage);
+ status_t validateUsage(audio_usage_t usage, pid_t pid, uid_t uid);
+
void updateUidStates();
void updateUidStates_l();
@@ -861,6 +868,7 @@
struct audio_policy *mpAudioPolicy;
AudioPolicyInterface *mAudioPolicyManager;
AudioPolicyClient *mAudioPolicyClient;
+ std::vector<audio_usage_t> mSupportedSystemUsages;
DefaultKeyedVector< int64_t, sp<NotificationClient> > mNotificationClients;
Mutex mNotificationClientsLock; // protects mNotificationClients
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index c63feb2..496a21b 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -41,11 +41,13 @@
"api1/client2/CaptureSequencer.cpp",
"api1/client2/ZslProcessor.cpp",
"api2/CameraDeviceClient.cpp",
+ "api2/CameraOfflineSessionClient.cpp",
"api2/CompositeStream.cpp",
"api2/DepthCompositeStream.cpp",
"api2/HeicEncoderInfoManager.cpp",
"api2/HeicCompositeStream.cpp",
"device1/CameraHardwareInterface.cpp",
+ "device3/BufferUtils.cpp",
"device3/Camera3Device.cpp",
"device3/Camera3OfflineSession.cpp",
"device3/Camera3Stream.cpp",
@@ -60,6 +62,8 @@
"device3/CoordinateMapper.cpp",
"device3/DistortionMapper.cpp",
"device3/ZoomRatioMapper.cpp",
+ "device3/Camera3OutputStreamInterface.cpp",
+ "device3/Camera3OutputUtils.cpp",
"gui/RingBufferConsumer.cpp",
"utils/CameraThreadState.cpp",
"hidl/AidlCameraDeviceCallbacks.cpp",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 18ed3a5..2fe7179 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -128,6 +128,7 @@
static constexpr int32_t kVendorClientScore = 200;
// Matches with PROCESS_STATE_PERSISTENT_UI in ActivityManager.java
static constexpr int32_t kVendorClientState = 1;
+const String8 CameraService::kOfflineDevice("offline-");
Mutex CameraService::sProxyMutex;
sp<hardware::ICameraServiceProxy> CameraService::sCameraServiceProxy;
@@ -394,7 +395,7 @@
// to this device until the status changes
updateStatus(StatusInternal::NOT_PRESENT, id);
- sp<BasicClient> clientToDisconnect;
+ sp<BasicClient> clientToDisconnectOnline, clientToDisconnectOffline;
{
// Don't do this in updateStatus to avoid deadlock over mServiceLock
Mutex::Autolock lock(mServiceLock);
@@ -402,23 +403,14 @@
// Remove cached shim parameters
state->setShimParams(CameraParameters());
- // Remove the client from the list of active clients, if there is one
- clientToDisconnect = removeClientLocked(id);
+ // Remove online as well as offline client from the list of active clients,
+ // if they are present
+ clientToDisconnectOnline = removeClientLocked(id);
+ clientToDisconnectOffline = removeClientLocked(kOfflineDevice + id);
}
- // Disconnect client
- if (clientToDisconnect.get() != nullptr) {
- ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL",
- __FUNCTION__, id.string());
- // Notify the client of disconnection
- clientToDisconnect->notifyError(
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
- CaptureResultExtras{});
- // Ensure not in binder RPC so client disconnect PID checks work correctly
- LOG_ALWAYS_FATAL_IF(CameraThreadState::getCallingPid() != getpid(),
- "onDeviceStatusChanged must be called from the camera service process!");
- clientToDisconnect->disconnect();
- }
+ disconnectClient(id, clientToDisconnectOnline);
+ disconnectClient(kOfflineDevice + id, clientToDisconnectOffline);
removeStates(id);
} else {
@@ -431,6 +423,21 @@
}
+void CameraService::disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect) {
+ if (clientToDisconnect.get() != nullptr) {
+ ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL",
+ __FUNCTION__, id.string());
+ // Notify the client of disconnection
+ clientToDisconnect->notifyError(
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
+ CaptureResultExtras{});
+ // Ensure not in binder RPC so client disconnect PID checks work correctly
+ LOG_ALWAYS_FATAL_IF(CameraThreadState::getCallingPid() != getpid(),
+ "onDeviceStatusChanged must be called from the camera service process!");
+ clientToDisconnect->disconnect();
+ }
+}
+
void CameraService::onTorchStatusChanged(const String8& cameraId,
TorchModeStatus newStatus) {
Mutex::Autolock al(mTorchStatusMutex);
@@ -761,6 +768,7 @@
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_5:
+ case CAMERA_DEVICE_API_VERSION_3_6:
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, packageName, featureId,
@@ -1695,6 +1703,77 @@
return ret;
}
+status_t CameraService::addOfflineClient(String8 cameraId, sp<BasicClient> offlineClient) {
+ if (offlineClient.get() == nullptr) {
+ return BAD_VALUE;
+ }
+
+ {
+ // Acquire mServiceLock and prevent other clients from connecting
+ std::unique_ptr<AutoConditionLock> lock =
+ AutoConditionLock::waitAndAcquire(mServiceLockWrapper, DEFAULT_CONNECT_TIMEOUT_NS);
+
+ if (lock == nullptr) {
+ ALOGE("%s: (PID %d) rejected (too many other clients connecting)."
+ , __FUNCTION__, offlineClient->getClientPid());
+ return TIMED_OUT;
+ }
+
+ auto onlineClientDesc = mActiveClientManager.get(cameraId);
+ if (onlineClientDesc.get() == nullptr) {
+ ALOGE("%s: No active online client using camera id: %s", __FUNCTION__,
+ cameraId.c_str());
+ return BAD_VALUE;
+ }
+
+ // Offline clients do not evict or conflict with other online devices. Resource sharing
+ // conflicts are handled by the camera provider which will either succeed or fail before
+ // reaching this method.
+ const auto& onlinePriority = onlineClientDesc->getPriority();
+ auto offlineClientDesc = CameraClientManager::makeClientDescriptor(
+ kOfflineDevice + onlineClientDesc->getKey(), offlineClient, /*cost*/ 0,
+ /*conflictingKeys*/ std::set<String8>(), onlinePriority.getScore(),
+ onlineClientDesc->getOwnerId(), onlinePriority.getState());
+
+ // Allow only one offline device per camera
+ auto incompatibleClients = mActiveClientManager.getIncompatibleClients(offlineClientDesc);
+ if (!incompatibleClients.empty()) {
+ ALOGE("%s: Incompatible offline clients present!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ auto err = offlineClient->initialize(mCameraProviderManager, mMonitorTags);
+ if (err != OK) {
+ ALOGE("%s: Could not initialize offline client.", __FUNCTION__);
+ return err;
+ }
+
+ auto evicted = mActiveClientManager.addAndEvict(offlineClientDesc);
+ if (evicted.size() > 0) {
+ for (auto& i : evicted) {
+ ALOGE("%s: Invalid state: Offline client for camera %s was not removed ",
+ __FUNCTION__, i->getKey().string());
+ }
+
+ LOG_ALWAYS_FATAL("%s: Invalid state for CameraService, offline clients not evicted "
+ "properly", __FUNCTION__);
+
+ return BAD_VALUE;
+ }
+
+ logConnectedOffline(offlineClientDesc->getKey(),
+ static_cast<int>(offlineClientDesc->getOwnerId()),
+ String8(offlineClient->getPackageName()));
+
+ sp<IBinder> remoteCallback = offlineClient->getRemote();
+ if (remoteCallback != nullptr) {
+ remoteCallback->linkToDeath(this);
+ }
+ } // lock is destroyed, allow further connect calls
+
+ return OK;
+}
+
Status CameraService::setTorchMode(const String16& cameraId, bool enabled,
const sp<IBinder>& clientBinder) {
Mutex::Autolock lock(mServiceLock);
@@ -2093,6 +2172,7 @@
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_5:
+ case CAMERA_DEVICE_API_VERSION_3_6:
ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
__FUNCTION__, id.string());
*isSupported = true;
@@ -2298,6 +2378,13 @@
clientPackage, clientPid));
}
+void CameraService::logDisconnectedOffline(const char* cameraId, int clientPid,
+ const char* clientPackage) {
+ // Log the clients evicted
+ logEvent(String8::format("DISCONNECT offline device %s client for package %s (PID %d)",
+ cameraId, clientPackage, clientPid));
+}
+
void CameraService::logConnected(const char* cameraId, int clientPid,
const char* clientPackage) {
// Log the clients evicted
@@ -2305,6 +2392,13 @@
clientPackage, clientPid));
}
+void CameraService::logConnectedOffline(const char* cameraId, int clientPid,
+ const char* clientPackage) {
+ // Log the clients evicted
+ logEvent(String8::format("CONNECT offline device %s client for package %s (PID %d)", cameraId,
+ clientPackage, clientPid));
+}
+
void CameraService::logRejected(const char* cameraId, int clientPid,
const char* clientPackage, const char* reason) {
// Log the client rejected
@@ -2742,6 +2836,7 @@
if (mAppOpsManager == nullptr) {
return;
}
+ // TODO : add offline camera session case
if (op != AppOpsManager::OP_CAMERA) {
ALOGW("Unexpected app ops notification received: %d", op);
return;
@@ -2776,20 +2871,6 @@
// ----------------------------------------------------------------------------
-sp<CameraService> CameraService::OfflineClient::sCameraService;
-
-status_t CameraService::OfflineClient::startCameraOps() {
- // TODO
- return OK;
-}
-
-status_t CameraService::OfflineClient::finishCameraOps() {
- // TODO
- return OK;
-}
-
-// ----------------------------------------------------------------------------
-
void CameraService::Client::notifyError(int32_t errorCode,
const CaptureResultExtras& resultExtras) {
(void) resultExtras;
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index dae9c09..726cb0f 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -68,6 +68,7 @@
{
friend class BinderService<CameraService>;
friend class CameraClient;
+ friend class CameraOfflineSessionClient;
public:
class Client;
class BasicClient;
@@ -185,6 +186,9 @@
// Monitored UIDs availability notification
void notifyMonitoredUids();
+ // Register an offline client for a given active camera id
+ status_t addOfflineClient(String8 cameraId, sp<BasicClient> offlineClient);
+
/////////////////////////////////////////////////////////////////////
// Client functionality
@@ -310,10 +314,9 @@
sp<IBinder> mRemoteBinder; // immutable after constructor
// permissions management
- status_t startCameraOps();
- status_t finishCameraOps();
+ virtual status_t startCameraOps();
+ virtual status_t finishCameraOps();
- private:
std::unique_ptr<AppOpsManager> mAppOpsManager = nullptr;
class OpsCallback : public BnAppOpsCallback {
@@ -402,87 +405,6 @@
int mCameraId; // All API1 clients use integer camera IDs
}; // class Client
-
- // Client for offline session. Note that offline session client does not affect camera service's
- // client arbitration logic. It is camera HAL's decision to decide whether a normal camera
- // client is conflicting with existing offline client(s).
- // The other distinctive difference between offline clients and normal clients is that normal
- // clients are created through ICameraService binder calls, while the offline session client
- // is created through ICameraDeviceUser::switchToOffline call.
- class OfflineClient : public virtual RefBase {
-
- virtual status_t dump(int fd, const Vector<String16>& args) = 0;
-
- // Block the client form using the camera
- virtual void block() = 0;
-
- // Return the package name for this client
- virtual String16 getPackageName() const = 0;
-
- // Notify client about a fatal error
- // TODO: maybe let impl notify within block?
- virtual void notifyError(int32_t errorCode,
- const CaptureResultExtras& resultExtras) = 0;
-
- // Get the UID of the application client using this
- virtual uid_t getClientUid() const = 0;
-
- // Get the PID of the application client using this
- virtual int getClientPid() const = 0;
-
- protected:
- OfflineClient(const sp<CameraService>& cameraService,
- const String16& clientPackageName,
- const String8& cameraIdStr,
- int clientPid,
- uid_t clientUid,
- int servicePid): mCameraIdStr(cameraIdStr),
- mClientPackageName(clientPackageName), mClientPid(clientPid),
- mClientUid(clientUid), mServicePid(servicePid) {
- if (sCameraService == nullptr) {
- sCameraService = cameraService;
- }
- }
-
- virtual ~OfflineClient() { /*TODO*/ }
-
- // these are initialized in the constructor.
- static sp<CameraService> sCameraService;
- const String8 mCameraIdStr;
- String16 mClientPackageName;
- pid_t mClientPid;
- const uid_t mClientUid;
- const pid_t mServicePid;
- bool mDisconnected;
-
- // - The app-side Binder interface to receive callbacks from us
- sp<IBinder> mRemoteBinder; // immutable after constructor
-
- // permissions management
- status_t startCameraOps();
- status_t finishCameraOps();
-
- private:
- std::unique_ptr<AppOpsManager> mAppOpsManager = nullptr;
-
- class OpsCallback : public BnAppOpsCallback {
- public:
- explicit OpsCallback(wp<OfflineClient> client) : mClient(client) {}
- virtual void opChanged(int32_t /*op*/, const String16& /*packageName*/) {
- //TODO
- }
-
- private:
- wp<OfflineClient> mClient;
-
- }; // class OpsCallback
-
- sp<OpsCallback> mOpsCallback;
-
- // IAppOpsCallback interface, indirected through opListener
- // virtual void opChanged(int32_t op, const String16& packageName);
- }; // class OfflineClient
-
/**
* A listener class that implements the LISTENER interface for use with a ClientManager, and
* implements the following methods:
@@ -872,6 +794,17 @@
void logDisconnected(const char* cameraId, int clientPid, const char* clientPackage);
/**
+ * Add an event log message that a client has been disconnected from offline device.
+ */
+ void logDisconnectedOffline(const char* cameraId, int clientPid, const char* clientPackage);
+
+ /**
+ * Add an event log message that an offline client has been connected.
+ */
+ void logConnectedOffline(const char* cameraId, int clientPid,
+ const char* clientPackage);
+
+ /**
* Add an event log message that a client has been connected.
*/
void logConnected(const char* cameraId, int clientPid, const char* clientPackage);
@@ -1095,6 +1028,12 @@
void broadcastTorchModeStatus(const String8& cameraId,
hardware::camera::common::V1_0::TorchModeStatus status);
+ void disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect);
+
+ // Regular online and offline devices must not be in conflict at camera service layer.
+ // Use separate keys for offline devices.
+ static const String8 kOfflineDevice;
+
// TODO: right now each BasicClient holds one AppOpsManager instance.
// We can refactor the code so all of clients share this instance
AppOpsManager mAppOps;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 28421ba..9deb662 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -470,7 +470,8 @@
}
binder::Status CameraDeviceClient::endConfigure(int operatingMode,
- const hardware::camera2::impl::CameraMetadataNative& sessionParams) {
+ const hardware::camera2::impl::CameraMetadataNative& sessionParams,
+ std::vector<int>* offlineStreamIds /*out*/) {
ATRACE_CALL();
ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
__FUNCTION__, mInputStream.configured ? 1 : 0,
@@ -479,6 +480,12 @@
binder::Status res;
if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
+ if (offlineStreamIds == nullptr) {
+ String8 msg = String8::format("Invalid offline stream ids");
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+
Mutex::Autolock icl(mBinderSerializationLock);
if (!mDevice.get()) {
@@ -502,15 +509,41 @@
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
} else {
+ offlineStreamIds->clear();
+ mDevice->getOfflineStreamIds(offlineStreamIds);
+
for (size_t i = 0; i < mCompositeStreamMap.size(); ++i) {
err = mCompositeStreamMap.valueAt(i)->configureStream();
- if (err != OK ) {
+ if (err != OK) {
String8 msg = String8::format("Camera %s: Error configuring composite "
"streams: %s (%d)", mCameraIdStr.string(), strerror(-err), err);
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
break;
}
+
+ // Composite streams can only support offline mode in case all individual internal
+ // streams are also supported.
+ std::vector<int> internalStreams;
+ mCompositeStreamMap.valueAt(i)->insertCompositeStreamIds(&internalStreams);
+ std::remove_if(offlineStreamIds->begin(), offlineStreamIds->end(),
+ [&internalStreams] (int streamId) {
+ auto it = std::find(internalStreams.begin(), internalStreams.end(),
+ streamId);
+ if (it != internalStreams.end()) {
+ internalStreams.erase(it);
+ return true;
+ }
+
+ return false;
+ });
+ if (internalStreams.empty()) {
+ offlineStreamIds->push_back(mCompositeStreamMap.valueAt(i)->getStreamId());
+ }
+ }
+
+ for (const auto& offlineStreamId : *offlineStreamIds) {
+ mStreamInfoMap[offlineStreamId].supportsOffline = true;
}
}
@@ -1903,7 +1936,7 @@
binder::Status CameraDeviceClient::switchToOffline(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
- const std::vector<view::Surface>& offlineOutputs,
+ const std::vector<int>& offlineOutputIds,
/*out*/
sp<hardware::camera2::ICameraOfflineSession>* session) {
ATRACE_CALL();
@@ -1917,8 +1950,8 @@
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
- if (offlineOutputs.empty()) {
- String8 msg = String8::format("Offline outputs must not be empty");
+ if (offlineOutputIds.empty()) {
+ String8 msg = String8::format("Offline surfaces must not be empty");
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
@@ -1929,24 +1962,47 @@
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
- std::vector<int32_t> offlineStreamIds(offlineOutputs.size());
- for (auto& surface : offlineOutputs) {
- sp<IBinder> binder = IInterface::asBinder(surface.graphicBufferProducer);
- ssize_t index = mStreamMap.indexOfKey(binder);
+ std::vector<int32_t> offlineStreamIds(offlineOutputIds.size());
+ KeyedVector<sp<IBinder>, sp<CompositeStream>> offlineCompositeStreamMap;
+ for (const auto& streamId : offlineOutputIds) {
+ ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
if (index == NAME_NOT_FOUND) {
- String8 msg = String8::format("Offline output is invalid");
+ String8 msg = String8::format("Offline surface with id: %d is not registered",
+ streamId);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
- // TODO: Also check whether the offline output is supported by Hal for offline mode.
- sp<Surface> s = new Surface(surface.graphicBufferProducer);
- bool isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s);
- isCompositeStream |= camera3::HeicCompositeStream::isHeicCompositeStream(s);
- if (isCompositeStream) {
- // TODO: Add composite specific handling
- } else {
- offlineStreamIds.push_back(mStreamMap.valueAt(index).streamId());
+ if (!mStreamInfoMap[streamId].supportsOffline) {
+ String8 msg = String8::format("Offline surface with id: %d doesn't support "
+ "offline mode", streamId);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+
+ bool isCompositeStream = false;
+ for (const auto& gbp : mConfiguredOutputs[streamId].getGraphicBufferProducers()) {
+ sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
+ isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) |
+ camera3::HeicCompositeStream::isHeicCompositeStream(s);
+ if (isCompositeStream) {
+ auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
+ if (compositeIdx == NAME_NOT_FOUND) {
+ ALOGE("%s: Unknown composite stream", __FUNCTION__);
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Unknown composite stream");
+ }
+
+ mCompositeStreamMap.valueAt(compositeIdx)->insertCompositeStreamIds(
+ &offlineStreamIds);
+ offlineCompositeStreamMap.add(mCompositeStreamMap.keyAt(compositeIdx),
+ mCompositeStreamMap.valueAt(compositeIdx));
+ break;
+ }
+ }
+
+ if (!isCompositeStream) {
+ offlineStreamIds.push_back(streamId);
}
}
@@ -1958,16 +2014,36 @@
mCameraIdStr.string(), strerror(ret), ret);
}
- sp<CameraOfflineSessionClient> offlineClient = new CameraOfflineSessionClient(sCameraService,
- offlineSession, cameraCb, mClientPackageName, mCameraIdStr, mClientPid, mClientUid,
- mServicePid);
- ret = offlineClient->initialize();
+ sp<CameraOfflineSessionClient> offlineClient;
+ if (offlineSession.get() != nullptr) {
+ offlineClient = new CameraOfflineSessionClient(sCameraService,
+ offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
+ mClientFeatureId, mCameraIdStr, mCameraFacing, mClientPid, mClientUid, mServicePid);
+ ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
+ }
+
if (ret == OK) {
- // TODO: We need to update mStreamMap, mConfiguredOutputs
+ // A successful offline session switch must reset the current camera client
+ // and release any resources occupied by previously configured streams.
+ mStreamMap.clear();
+ mConfiguredOutputs.clear();
+ mDeferredStreams.clear();
+ mStreamInfoMap.clear();
+ mCompositeStreamMap.clear();
+ mInputStream = {false, 0, 0, 0, 0};
} else {
- return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
- "Camera %s: Failed to initilize offline session: %s (%d)",
- mCameraIdStr.string(), strerror(ret), ret);
+ switch(ret) {
+ case BAD_VALUE:
+ return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Illegal argument to HAL module for camera \"%s\"", mCameraIdStr.c_str());
+ case TIMED_OUT:
+ return STATUS_ERROR_FMT(CameraService::ERROR_CAMERA_IN_USE,
+ "Camera \"%s\" is already open", mCameraIdStr.c_str());
+ default:
+ return STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
+ "Failed to initialize camera \"%s\": %s (%d)", mCameraIdStr.c_str(),
+ strerror(-ret), ret);
+ }
}
*session = offlineClient;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 0a8f377..a3a3189 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -92,7 +92,9 @@
virtual binder::Status beginConfigure() override;
virtual binder::Status endConfigure(int operatingMode,
- const hardware::camera2::impl::CameraMetadataNative& sessionParams) override;
+ const hardware::camera2::impl::CameraMetadataNative& sessionParams,
+ /*out*/
+ std::vector<int>* offlineStreamIds) override;
// Verify specific session configuration.
virtual binder::Status isSessionConfigurationSupported(
@@ -160,7 +162,7 @@
virtual binder::Status switchToOffline(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
- const std::vector<view::Surface>& offlineOutputs,
+ const std::vector<int>& offlineOutputIds,
/*out*/
sp<hardware::camera2::ICameraOfflineSession>* session) override;
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
new file mode 100644
index 0000000..af7b9e1
--- /dev/null
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -0,0 +1,287 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraOfflineClient"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include "CameraOfflineSessionClient.h"
+#include "utils/CameraThreadState.h"
+#include <utils/Trace.h>
+
+namespace android {
+
+using binder::Status;
+
+status_t CameraOfflineSessionClient::initialize(sp<CameraProviderManager>, const String8&) {
+ ATRACE_CALL();
+
+ // Verify ops permissions
+ auto res = startCameraOps();
+ if (res != OK) {
+ return res;
+ }
+
+ if (mOfflineSession.get() == nullptr) {
+ ALOGE("%s: Camera %s: No valid offline session",
+ __FUNCTION__, mCameraIdStr.string());
+ return NO_INIT;
+ }
+
+ wp<NotificationListener> weakThis(this);
+ res = mOfflineSession->initialize(weakThis);
+ if (res != OK) {
+ ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
+ __FUNCTION__, mCameraIdStr.string(), strerror(-res), res);
+ return res;
+ }
+
+ return OK;
+}
+
+status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
+ return BasicClient::dump(fd, args);
+}
+
+status_t CameraOfflineSessionClient::dumpClient(int fd, const Vector<String16>& /*args*/) {
+ String8 result;
+
+ result = " Offline session dump:\n";
+ write(fd, result.string(), result.size());
+
+ if (mOfflineSession.get() == nullptr) {
+ result = " *** Offline session is detached\n";
+ write(fd, result.string(), result.size());
+ return NO_ERROR;
+ }
+
+ auto res = mOfflineSession->dump(fd);
+ if (res != OK) {
+ result = String8::format(" Error dumping offline session: %s (%d)",
+ strerror(-res), res);
+ write(fd, result.string(), result.size());
+ }
+
+ return OK;
+}
+
+binder::Status CameraOfflineSessionClient::disconnect() {
+ Mutex::Autolock icl(mBinderSerializationLock);
+
+ binder::Status res = Status::ok();
+ if (mDisconnected) {
+ return res;
+ }
+ // Allow both client and the media server to disconnect at all times
+ int callingPid = CameraThreadState::getCallingPid();
+ if (callingPid != mClientPid &&
+ callingPid != mServicePid) {
+ return res;
+ }
+
+ mDisconnected = true;
+
+ sCameraService->removeByClient(this);
+ sCameraService->logDisconnectedOffline(mCameraIdStr, mClientPid, String8(mClientPackageName));
+
+ sp<IBinder> remote = getRemote();
+ if (remote != nullptr) {
+ remote->unlinkToDeath(sCameraService);
+ }
+
+ finishCameraOps();
+ ALOGI("%s: Disconnected client for offline camera %s for PID %d", __FUNCTION__,
+ mCameraIdStr.string(), mClientPid);
+
+ // client shouldn't be able to call into us anymore
+ mClientPid = 0;
+
+ if (mOfflineSession.get() != nullptr) {
+ auto ret = mOfflineSession->disconnect();
+ if (ret != OK) {
+ ALOGE("%s: Failed disconnecting from offline session %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ }
+ mOfflineSession = nullptr;
+ }
+
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
+ if (ret != OK) {
+ ALOGE("%s: Failed removing composite stream %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ }
+ }
+ mCompositeStreamMap.clear();
+
+ return res;
+}
+
+void CameraOfflineSessionClient::notifyError(int32_t errorCode,
+ const CaptureResultExtras& resultExtras) {
+ // Thread safe. Don't bother locking.
+ // Composites can have multiple internal streams. Error notifications coming from such internal
+ // streams may need to remain within camera service.
+ bool skipClientNotification = false;
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ skipClientNotification |= mCompositeStreamMap.valueAt(i)->onError(errorCode, resultExtras);
+ }
+
+ if ((mRemoteCallback.get() != nullptr) && (!skipClientNotification)) {
+ mRemoteCallback->onDeviceError(errorCode, resultExtras);
+ }
+}
+
+status_t CameraOfflineSessionClient::startCameraOps() {
+ ATRACE_CALL();
+ {
+ ALOGV("%s: Start camera ops, package name = %s, client UID = %d",
+ __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
+ }
+
+ if (mAppOpsManager != nullptr) {
+ // Notify app ops that the camera is not available
+ mOpsCallback = new OpsCallback(this);
+ int32_t res;
+ // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+ mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+ mClientPackageName, mOpsCallback);
+ // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+ res = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA,
+ mClientUid, mClientPackageName, /*startIfModeDefault*/ false);
+
+ if (res == AppOpsManager::MODE_ERRORED) {
+ ALOGI("Offline Camera %s: Access for \"%s\" has been revoked",
+ mCameraIdStr.string(), String8(mClientPackageName).string());
+ return PERMISSION_DENIED;
+ }
+
+ if (res == AppOpsManager::MODE_IGNORED) {
+ ALOGI("Offline Camera %s: Access for \"%s\" has been restricted",
+ mCameraIdStr.string(), String8(mClientPackageName).string());
+ // Return the same error as for device policy manager rejection
+ return -EACCES;
+ }
+ }
+
+ mOpsActive = true;
+
+ // Transition device state to OPEN
+ sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+
+ return OK;
+}
+
+status_t CameraOfflineSessionClient::finishCameraOps() {
+ ATRACE_CALL();
+
+ // Check if startCameraOps succeeded, and if so, finish the camera op
+ if (mOpsActive) {
+ // Notify app ops that the camera is available again
+ if (mAppOpsManager != nullptr) {
+ // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+ mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
+ mClientPackageName);
+ mOpsActive = false;
+ }
+ }
+ // Always stop watching, even if no camera op is active
+ if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
+ mAppOpsManager->stopWatchingMode(mOpsCallback);
+ }
+ mOpsCallback.clear();
+
+ sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+
+ return OK;
+}
+
+void CameraOfflineSessionClient::onResultAvailable(const CaptureResult& result) {
+ ATRACE_CALL();
+ ALOGV("%s", __FUNCTION__);
+
+ if (mRemoteCallback.get() != NULL) {
+ mRemoteCallback->onResultReceived(result.mMetadata, result.mResultExtras,
+ result.mPhysicalMetadatas);
+ }
+
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ mCompositeStreamMap.valueAt(i)->onResultAvailable(result);
+ }
+}
+
+void CameraOfflineSessionClient::notifyShutter(const CaptureResultExtras& resultExtras,
+ nsecs_t timestamp) {
+
+ if (mRemoteCallback.get() != nullptr) {
+ mRemoteCallback->onCaptureStarted(resultExtras, timestamp);
+ }
+
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ mCompositeStreamMap.valueAt(i)->onShutter(resultExtras, timestamp);
+ }
+}
+
+void CameraOfflineSessionClient::notifyIdle() {
+ if (mRemoteCallback.get() != nullptr) {
+ mRemoteCallback->onDeviceIdle();
+ }
+}
+
+void CameraOfflineSessionClient::notifyAutoFocus(uint8_t newState, int triggerId) {
+ (void)newState;
+ (void)triggerId;
+
+ ALOGV("%s: Autofocus state now %d, last trigger %d",
+ __FUNCTION__, newState, triggerId);
+}
+
+void CameraOfflineSessionClient::notifyAutoExposure(uint8_t newState, int triggerId) {
+ (void)newState;
+ (void)triggerId;
+
+ ALOGV("%s: Autoexposure state now %d, last trigger %d",
+ __FUNCTION__, newState, triggerId);
+}
+
+void CameraOfflineSessionClient::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
+ (void)newState;
+ (void)triggerId;
+
+ ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", __FUNCTION__, newState,
+ triggerId);
+}
+
+void CameraOfflineSessionClient::notifyPrepared(int /*streamId*/) {
+ ALOGE("%s: Unexpected stream prepare notification in offline mode!", __FUNCTION__);
+ notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+ CaptureResultExtras());
+}
+
+void CameraOfflineSessionClient::notifyRequestQueueEmpty() {
+ if (mRemoteCallback.get() != nullptr) {
+ mRemoteCallback->onRequestQueueEmpty();
+ }
+}
+
+void CameraOfflineSessionClient::notifyRepeatingRequestError(long /*lastFrameNumber*/) {
+ ALOGE("%s: Unexpected repeating request error in offline mode!", __FUNCTION__);
+ notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+ CaptureResultExtras());
+}
+
+// ----------------------------------------------------------------------------
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index cb83e29..b0f000d 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -19,65 +19,88 @@
#include <android/hardware/camera2/BnCameraOfflineSession.h>
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+#include "common/FrameProcessorBase.h"
+#include "common/CameraDeviceBase.h"
#include "CameraService.h"
+#include "CompositeStream.h"
namespace android {
using android::hardware::camera2::ICameraDeviceCallbacks;
+using camera3::CompositeStream;
+// Client for offline session. Note that offline session client does not affect camera service's
+// client arbitration logic. It is camera HAL's decision to decide whether a normal camera
+// client is conflicting with existing offline client(s).
+// The other distinctive difference between offline clients and normal clients is that normal
+// clients are created through ICameraService binder calls, while the offline session client
+// is created through ICameraDeviceUser::switchToOffline call.
class CameraOfflineSessionClient :
- public CameraService::OfflineClient,
- public hardware::camera2::BnCameraOfflineSession
- // public camera2::FrameProcessorBase::FilteredListener?
+ public CameraService::BasicClient,
+ public hardware::camera2::BnCameraOfflineSession,
+ public camera2::FrameProcessorBase::FilteredListener,
+ public NotificationListener
{
public:
CameraOfflineSessionClient(
const sp<CameraService>& cameraService,
sp<CameraOfflineSessionBase> session,
+ const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
const sp<ICameraDeviceCallbacks>& remoteCallback,
const String16& clientPackageName,
- const String8& cameraIdStr,
+ const std::unique_ptr<String16>& clientFeatureId,
+ const String8& cameraIdStr, int cameraFacing,
int clientPid, uid_t clientUid, int servicePid) :
- CameraService::OfflineClient(cameraService, clientPackageName,
- cameraIdStr, clientPid, clientUid, servicePid),
- mRemoteCallback(remoteCallback), mOfflineSession(session) {}
+ CameraService::BasicClient(
+ cameraService,
+ IInterface::asBinder(remoteCallback),
+ clientPackageName, clientFeatureId,
+ cameraIdStr, cameraFacing, clientPid, clientUid, servicePid),
+ mRemoteCallback(remoteCallback), mOfflineSession(session),
+ mCompositeStreamMap(offlineCompositeStreamMap) {}
- ~CameraOfflineSessionClient() {}
+ virtual ~CameraOfflineSessionClient() {}
- virtual binder::Status disconnect() override { return binder::Status::ok(); }
-
- virtual status_t dump(int /*fd*/, const Vector<String16>& /*args*/) override {
- return OK;
+ sp<IBinder> asBinderWrapper() override {
+ return IInterface::asBinder(this);
}
- // Block the client form using the camera
- virtual void block() override {};
+ binder::Status disconnect() override;
- // Return the package name for this client
- virtual String16 getPackageName() const override { String16 ret; return ret; };
+ status_t dump(int /*fd*/, const Vector<String16>& /*args*/) override;
- // Notify client about a fatal error
- // TODO: maybe let impl notify within block?
- virtual void notifyError(int32_t /*errorCode*/,
- const CaptureResultExtras& /*resultExtras*/) override {}
+ status_t dumpClient(int /*fd*/, const Vector<String16>& /*args*/) override;
- // Get the UID of the application client using this
- virtual uid_t getClientUid() const override { return 0; }
+ status_t initialize(sp<CameraProviderManager> /*manager*/,
+ const String8& /*monitorTags*/) override;
- // Get the PID of the application client using this
- virtual int getClientPid() const override { return 0; }
+ // permissions management
+ status_t startCameraOps() override;
+ status_t finishCameraOps() override;
- status_t initialize() {
- // TODO: Talk to camera service to add the offline session client book keeping
- return OK;
- }
+ // FilteredResultListener API
+ void onResultAvailable(const CaptureResult& result) override;
+
+ // NotificationListener API
+ void notifyError(int32_t errorCode, const CaptureResultExtras& resultExtras) override;
+ void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
+ void notifyIdle() override;
+ void notifyAutoFocus(uint8_t newState, int triggerId) override;
+ void notifyAutoExposure(uint8_t newState, int triggerId) override;
+ void notifyAutoWhitebalance(uint8_t newState, int triggerId) override;
+ void notifyPrepared(int streamId) override;
+ void notifyRequestQueueEmpty() override;
+ void notifyRepeatingRequestError(long lastFrameNumber) override;
+
private:
- sp<CameraOfflineSessionBase> mSession;
+ mutable Mutex mBinderSerializationLock;
sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
- // This class is responsible to convert HAL callbacks to AIDL callbacks
sp<CameraOfflineSessionBase> mOfflineSession;
+
+ // Offline composite stream map, output surface -> composite stream
+ KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
};
} // namespace android
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index a401a82..de894f3 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -64,6 +64,10 @@
virtual status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap,
Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) = 0;
+ // Attach the internal composite stream ids.
+ virtual status_t insertCompositeStreamIds(
+ std::vector<int32_t>* compositeStreamIds /*out*/) = 0;
+
// Return composite stream id.
virtual int getStreamId() = 0;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index ac6f8d6..acad8c6 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -683,6 +683,18 @@
return NO_ERROR;
}
+status_t DepthCompositeStream::insertCompositeStreamIds(
+ std::vector<int32_t>* compositeStreamIds /*out*/) {
+ if (compositeStreamIds == nullptr) {
+ return BAD_VALUE;
+ }
+
+ compositeStreamIds->push_back(mDepthStreamId);
+ compositeStreamIds->push_back(mBlobStreamId);
+
+ return OK;
+}
+
void DepthCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
// Processing can continue even in case of result errors.
// At the moment depth composite stream processing relies mainly on static camera
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index 60c6b1e..1bf714d 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -56,6 +56,7 @@
status_t configureStream() override;
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
int32_t* /*out*/currentStreamId) override;
+ status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
int getStreamId() override { return mBlobStreamId; }
// CpuConsumer listener implementation
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 9cdad70..d25e467 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -504,6 +504,18 @@
return NO_ERROR;
}
+status_t HeicCompositeStream::insertCompositeStreamIds(
+ std::vector<int32_t>* compositeStreamIds /*out*/) {
+ if (compositeStreamIds == nullptr) {
+ return BAD_VALUE;
+ }
+
+ compositeStreamIds->push_back(mAppSegmentStreamId);
+ compositeStreamIds->push_back(mMainImageStreamId);
+
+ return OK;
+}
+
void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
Mutex::Autolock l(mMutex);
if (mErrorState) {
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index e88471d..8fc521e 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -55,6 +55,8 @@
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
int32_t* /*out*/currentStreamId) override;
+ status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
+
void onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
int getStreamId() override { return mMainImageStreamId; }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 8792f9a..0a41776 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -111,7 +111,7 @@
return res;
}
- wp<CameraDeviceBase::NotificationListener> weakThis(this);
+ wp<NotificationListener> weakThis(this);
res = mDevice->setNotifyCallback(weakThis);
return OK;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 12cba0b..042f5aa 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -29,7 +29,7 @@
template <typename TClientBase>
class Camera2ClientBase :
public TClientBase,
- public CameraDeviceBase::NotificationListener
+ public NotificationListener
{
public:
typedef typename TClientBase::TCamCallbacks TCamCallbacks;
@@ -61,7 +61,7 @@
virtual status_t dumpClient(int fd, const Vector<String16>& args);
/**
- * CameraDeviceBase::NotificationListener implementation
+ * NotificationListener implementation
*/
virtual void notifyError(int32_t errorCode,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.cpp b/services/camera/libcameraservice/common/CameraDeviceBase.cpp
index 6c4e87f..0efe4bc 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.cpp
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.cpp
@@ -24,7 +24,4 @@
CameraDeviceBase::~CameraDeviceBase() {
}
-CameraDeviceBase::NotificationListener::~NotificationListener() {
-}
-
} // namespace android
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 1026fdf..2f01198 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -234,6 +234,12 @@
virtual status_t configureStreams(const CameraMetadata& sessionParams,
int operatingMode = 0) = 0;
+ /**
+ * Retrieve a list of all stream ids that were advertised as capable of
+ * supporting offline processing mode by Hal after the last stream configuration.
+ */
+ virtual void getOfflineStreamIds(std::vector<int> *offlineStreamIds) = 0;
+
// get the buffer producer of the input stream
virtual status_t getInputBufferProducer(
sp<IGraphicBufferProducer> *producer) = 0;
@@ -259,35 +265,6 @@
virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const = 0;
/**
- * Abstract class for HAL notification listeners
- */
- class NotificationListener : public virtual RefBase {
- public:
- // The set of notifications is a merge of the notifications required for
- // API1 and API2.
-
- // Required for API 1 and 2
- virtual void notifyError(int32_t errorCode,
- const CaptureResultExtras &resultExtras) = 0;
-
- // Required only for API2
- virtual void notifyIdle() = 0;
- virtual void notifyShutter(const CaptureResultExtras &resultExtras,
- nsecs_t timestamp) = 0;
- virtual void notifyPrepared(int streamId) = 0;
- virtual void notifyRequestQueueEmpty() = 0;
-
- // Required only for API1
- virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0;
- virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0;
- virtual void notifyAutoWhitebalance(uint8_t newState,
- int triggerId) = 0;
- virtual void notifyRepeatingRequestError(long lastFrameNumber) = 0;
- protected:
- virtual ~NotificationListener();
- };
-
- /**
* Connect HAL notifications to a listener. Overwrites previous
* listener. Set to NULL to stop receiving notifications.
*/
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 3862521..05ea7fb 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -25,10 +25,42 @@
namespace android {
+/**
+ * Abstract class for HAL notification listeners
+ */
+class NotificationListener : public virtual RefBase {
+ public:
+ // The set of notifications is a merge of the notifications required for
+ // API1 and API2.
+
+ // Required for API 1 and 2
+ virtual void notifyError(int32_t errorCode,
+ const CaptureResultExtras &resultExtras) = 0;
+
+ // Required only for API2
+ virtual void notifyIdle() = 0;
+ virtual void notifyShutter(const CaptureResultExtras &resultExtras,
+ nsecs_t timestamp) = 0;
+ virtual void notifyPrepared(int streamId) = 0;
+ virtual void notifyRequestQueueEmpty() = 0;
+
+ // Required only for API1
+ virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0;
+ virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0;
+ virtual void notifyAutoWhitebalance(uint8_t newState,
+ int triggerId) = 0;
+ virtual void notifyRepeatingRequestError(long lastFrameNumber) = 0;
+ protected:
+ virtual ~NotificationListener() {}
+};
+
class CameraOfflineSessionBase : public virtual RefBase {
public:
virtual ~CameraOfflineSessionBase();
+ virtual status_t initialize(
+ wp<NotificationListener> listener) = 0;
+
// The session's original camera ID
virtual const String8& getId() const = 0;
@@ -36,8 +68,6 @@
virtual status_t dump(int fd) = 0;
- virtual status_t abort() = 0;
-
/**
* Capture result passing
*/
diff --git a/services/camera/libcameraservice/device3/BufferUtils.cpp b/services/camera/libcameraservice/device3/BufferUtils.cpp
new file mode 100644
index 0000000..cc29390
--- /dev/null
+++ b/services/camera/libcameraservice/device3/BufferUtils.cpp
@@ -0,0 +1,277 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-BufUtils"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0 // Per-frame verbose logging
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+
+#include "device3/BufferUtils.h"
+
+namespace android {
+namespace camera3 {
+
+camera3_buffer_status_t mapHidlBufferStatus(hardware::camera::device::V3_2::BufferStatus status) {
+ using hardware::camera::device::V3_2::BufferStatus;
+
+ switch (status) {
+ case BufferStatus::OK: return CAMERA3_BUFFER_STATUS_OK;
+ case BufferStatus::ERROR: return CAMERA3_BUFFER_STATUS_ERROR;
+ }
+ return CAMERA3_BUFFER_STATUS_ERROR;
+}
+
+void BufferRecords::takeInflightBufferMap(BufferRecords& other) {
+ std::lock_guard<std::mutex> oLock(other.mInflightLock);
+ std::lock_guard<std::mutex> lock(mInflightLock);
+ if (mInflightBufferMap.size() > 0) {
+ ALOGE("%s: inflight map is set in non-empty state!", __FUNCTION__);
+ }
+ mInflightBufferMap = std::move(other.mInflightBufferMap);
+ other.mInflightBufferMap.clear();
+}
+
+void BufferRecords::takeRequestedBufferMap(BufferRecords& other) {
+ std::lock_guard<std::mutex> oLock(other.mRequestedBuffersLock);
+ std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+ if (mRequestedBufferMap.size() > 0) {
+ ALOGE("%s: requested buffer map is set in non-empty state!", __FUNCTION__);
+ }
+ mRequestedBufferMap = std::move(other.mRequestedBufferMap);
+ other.mRequestedBufferMap.clear();
+}
+
+void BufferRecords::takeBufferCaches(BufferRecords& other, const std::vector<int32_t>& streams) {
+ std::lock_guard<std::mutex> oLock(other.mBufferIdMapLock);
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ if (mBufferIdMaps.size() > 0) {
+ ALOGE("%s: buffer ID map is set in non-empty state!", __FUNCTION__);
+ }
+ for (auto streamId : streams) {
+ mBufferIdMaps.insert({streamId, std::move(other.mBufferIdMaps.at(streamId))});
+ }
+ other.mBufferIdMaps.clear();
+}
+
+std::pair<bool, uint64_t> BufferRecords::getBufferId(
+ const buffer_handle_t& buf, int streamId) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+
+ BufferIdMap& bIdMap = mBufferIdMaps.at(streamId);
+ auto it = bIdMap.find(buf);
+ if (it == bIdMap.end()) {
+ bIdMap[buf] = mNextBufferId++;
+ ALOGV("stream %d now have %zu buffer caches, buf %p",
+ streamId, bIdMap.size(), buf);
+ return std::make_pair(true, mNextBufferId - 1);
+ } else {
+ return std::make_pair(false, it->second);
+ }
+}
+
+void BufferRecords::tryCreateBufferCache(int streamId) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ if (mBufferIdMaps.count(streamId) == 0) {
+ mBufferIdMaps.emplace(streamId, BufferIdMap{});
+ }
+}
+
+void BufferRecords::removeInactiveBufferCaches(const std::set<int32_t>& activeStreams) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
+ int streamId = it->first;
+ bool active = activeStreams.count(streamId) > 0;
+ if (!active) {
+ it = mBufferIdMaps.erase(it);
+ } else {
+ ++it;
+ }
+ }
+}
+
+uint64_t BufferRecords::removeOneBufferCache(int streamId, const native_handle_t* handle) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ uint64_t bufferId = BUFFER_ID_NO_BUFFER;
+ auto mapIt = mBufferIdMaps.find(streamId);
+ if (mapIt == mBufferIdMaps.end()) {
+ // streamId might be from a deleted stream here
+ ALOGI("%s: stream %d has been removed",
+ __FUNCTION__, streamId);
+ return BUFFER_ID_NO_BUFFER;
+ }
+ BufferIdMap& bIdMap = mapIt->second;
+ auto it = bIdMap.find(handle);
+ if (it == bIdMap.end()) {
+ ALOGW("%s: cannot find buffer %p in stream %d",
+ __FUNCTION__, handle, streamId);
+ return BUFFER_ID_NO_BUFFER;
+ } else {
+ bufferId = it->second;
+ bIdMap.erase(it);
+ ALOGV("%s: stream %d now have %zu buffer caches after removing buf %p",
+ __FUNCTION__, streamId, bIdMap.size(), handle);
+ }
+ return bufferId;
+}
+
+std::vector<uint64_t> BufferRecords::clearBufferCaches(int streamId) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ std::vector<uint64_t> ret;
+ auto mapIt = mBufferIdMaps.find(streamId);
+ if (mapIt == mBufferIdMaps.end()) {
+ ALOGE("%s: streamId %d not found!", __FUNCTION__, streamId);
+ return ret;
+ }
+ BufferIdMap& bIdMap = mapIt->second;
+ ret.reserve(bIdMap.size());
+ for (const auto& it : bIdMap) {
+ ret.push_back(it.second);
+ }
+ bIdMap.clear();
+ return ret;
+}
+
+bool BufferRecords::isStreamCached(int streamId) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ return mBufferIdMaps.find(streamId) != mBufferIdMaps.end();
+}
+
+bool BufferRecords::verifyBufferIds(
+ int32_t streamId, std::vector<uint64_t>& bufIds) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ camera3::BufferIdMap& bIdMap = mBufferIdMaps.at(streamId);
+ if (bIdMap.size() != bufIds.size()) {
+ ALOGE("%s: stream ID %d buffer cache number mismatch: %zu/%zu (service/HAL)",
+ __FUNCTION__, streamId, bIdMap.size(), bufIds.size());
+ return false;
+ }
+ std::vector<uint64_t> internalBufIds;
+ internalBufIds.reserve(bIdMap.size());
+ for (const auto& pair : bIdMap) {
+ internalBufIds.push_back(pair.second);
+ }
+ std::sort(bufIds.begin(), bufIds.end());
+ std::sort(internalBufIds.begin(), internalBufIds.end());
+ for (size_t i = 0; i < bufIds.size(); i++) {
+ if (bufIds[i] != internalBufIds[i]) {
+ ALOGE("%s: buffer cache mismatch! Service %" PRIu64 ", HAL %" PRIu64,
+ __FUNCTION__, internalBufIds[i], bufIds[i]);
+ return false;
+ }
+ }
+ return true;
+}
+
+void BufferRecords::getInflightBufferKeys(
+ std::vector<std::pair<int32_t, int32_t>>* out) {
+ std::lock_guard<std::mutex> lock(mInflightLock);
+ out->clear();
+ out->reserve(mInflightBufferMap.size());
+ for (auto& pair : mInflightBufferMap) {
+ uint64_t key = pair.first;
+ int32_t streamId = key & 0xFFFFFFFF;
+ int32_t frameNumber = (key >> 32) & 0xFFFFFFFF;
+ out->push_back(std::make_pair(frameNumber, streamId));
+ }
+ return;
+}
+
+status_t BufferRecords::pushInflightBuffer(
+ int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer) {
+ std::lock_guard<std::mutex> lock(mInflightLock);
+ uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
+ mInflightBufferMap[key] = buffer;
+ return OK;
+}
+
+status_t BufferRecords::popInflightBuffer(
+ int32_t frameNumber, int32_t streamId,
+ /*out*/ buffer_handle_t **buffer) {
+ std::lock_guard<std::mutex> lock(mInflightLock);
+
+ uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
+ auto it = mInflightBufferMap.find(key);
+ if (it == mInflightBufferMap.end()) return NAME_NOT_FOUND;
+ if (buffer != nullptr) {
+ *buffer = it->second;
+ }
+ mInflightBufferMap.erase(it);
+ return OK;
+}
+
+void BufferRecords::popInflightBuffers(
+ const std::vector<std::pair<int32_t, int32_t>>& buffers) {
+ for (const auto& pair : buffers) {
+ int32_t frameNumber = pair.first;
+ int32_t streamId = pair.second;
+ popInflightBuffer(frameNumber, streamId, nullptr);
+ }
+}
+
+status_t BufferRecords::pushInflightRequestBuffer(
+ uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) {
+ std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+ auto pair = mRequestedBufferMap.insert({bufferId, {streamId, buf}});
+ if (!pair.second) {
+ ALOGE("%s: bufId %" PRIu64 " is already inflight!",
+ __FUNCTION__, bufferId);
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+// Find and pop a buffer_handle_t based on bufferId
+status_t BufferRecords::popInflightRequestBuffer(
+ uint64_t bufferId,
+ /*out*/ buffer_handle_t** buffer,
+ /*optional out*/ int32_t* streamId) {
+ if (buffer == nullptr) {
+ ALOGE("%s: buffer (%p) must not be null", __FUNCTION__, buffer);
+ return BAD_VALUE;
+ }
+ std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+ auto it = mRequestedBufferMap.find(bufferId);
+ if (it == mRequestedBufferMap.end()) {
+ ALOGE("%s: bufId %" PRIu64 " is not inflight!",
+ __FUNCTION__, bufferId);
+ return BAD_VALUE;
+ }
+ *buffer = it->second.second;
+ if (streamId != nullptr) {
+ *streamId = it->second.first;
+ }
+ mRequestedBufferMap.erase(it);
+ return OK;
+}
+
+void BufferRecords::getInflightRequestBufferKeys(
+ std::vector<uint64_t>* out) {
+ std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
+ out->clear();
+ out->reserve(mRequestedBufferMap.size());
+ for (auto& pair : mRequestedBufferMap) {
+ out->push_back(pair.first);
+ }
+ return;
+}
+
+
+} // camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/BufferUtils.h b/services/camera/libcameraservice/device3/BufferUtils.h
new file mode 100644
index 0000000..452a908
--- /dev/null
+++ b/services/camera/libcameraservice/device3/BufferUtils.h
@@ -0,0 +1,167 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_BUFFER_UTILS_H
+#define ANDROID_SERVERS_CAMERA3_BUFFER_UTILS_H
+
+#include <unordered_map>
+#include <mutex>
+#include <set>
+
+#include <cutils/native_handle.h>
+
+#include <android/hardware/camera/device/3.2/ICameraDevice.h>
+
+// TODO: remove legacy camera3.h references
+#include "hardware/camera3.h"
+
+#include <device3/Camera3OutputInterface.h>
+
+namespace android {
+
+namespace camera3 {
+
+ struct BufferHasher {
+ size_t operator()(const buffer_handle_t& buf) const {
+ if (buf == nullptr)
+ return 0;
+
+ size_t result = 1;
+ result = 31 * result + buf->numFds;
+ for (int i = 0; i < buf->numFds; i++) {
+ result = 31 * result + buf->data[i];
+ }
+ return result;
+ }
+ };
+
+ struct BufferComparator {
+ bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
+ if (buf1->numFds == buf2->numFds) {
+ for (int i = 0; i < buf1->numFds; i++) {
+ if (buf1->data[i] != buf2->data[i]) {
+ return false;
+ }
+ }
+ return true;
+ }
+ return false;
+ }
+ };
+
+ // Per stream buffer native handle -> bufId map
+ typedef std::unordered_map<const buffer_handle_t, uint64_t,
+ BufferHasher, BufferComparator> BufferIdMap;
+
+ // streamId -> BufferIdMap
+ typedef std::unordered_map<int, BufferIdMap> BufferIdMaps;
+
+ // Map of inflight buffers sent along in capture requests.
+ // Key is composed by (frameNumber << 32 | streamId)
+ typedef std::unordered_map<uint64_t, buffer_handle_t*> InflightBufferMap;
+
+ // Map of inflight buffers dealt by requestStreamBuffers API
+ typedef std::unordered_map<uint64_t, std::pair<int32_t, buffer_handle_t*>> RequestedBufferMap;
+
+ // A struct containing all buffer tracking information like inflight buffers
+ // and buffer ID caches
+ class BufferRecords : public BufferRecordsInterface {
+
+ public:
+ BufferRecords() {}
+
+ BufferRecords(BufferRecords&& other) :
+ mBufferIdMaps(other.mBufferIdMaps),
+ mNextBufferId(other.mNextBufferId),
+ mInflightBufferMap(other.mInflightBufferMap),
+ mRequestedBufferMap(other.mRequestedBufferMap) {}
+
+ virtual ~BufferRecords() {}
+
+ // Helper methods to help moving buffer records
+ void takeInflightBufferMap(BufferRecords& other);
+ void takeRequestedBufferMap(BufferRecords& other);
+ void takeBufferCaches(BufferRecords& other, const std::vector<int32_t>& streams);
+
+ // method to extract buffer's unique ID
+ // return pair of (newlySeenBuffer?, bufferId)
+ virtual std::pair<bool, uint64_t> getBufferId(
+ const buffer_handle_t& buf, int streamId) override;
+
+ void tryCreateBufferCache(int streamId);
+
+ void removeInactiveBufferCaches(const std::set<int32_t>& activeStreams);
+
+ // Return the removed buffer ID if input cache is found.
+ // Otherwise return BUFFER_ID_NO_BUFFER
+ uint64_t removeOneBufferCache(int streamId, const native_handle_t* handle);
+
+ // Clear all caches for input stream, but do not remove the stream
+ // Removed buffers' ID are returned
+ std::vector<uint64_t> clearBufferCaches(int streamId);
+
+ bool isStreamCached(int streamId);
+
+ // Return true if the input caches match what we have; otherwise false
+ bool verifyBufferIds(int32_t streamId, std::vector<uint64_t>& inBufIds);
+
+ // Get a vector of (frameNumber, streamId) pair of currently inflight
+ // buffers
+ void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out);
+
+ status_t pushInflightBuffer(int32_t frameNumber, int32_t streamId,
+ buffer_handle_t *buffer);
+
+ // Find a buffer_handle_t based on frame number and stream ID
+ virtual status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
+ /*out*/ buffer_handle_t **buffer) override;
+
+ // Pop inflight buffers based on pairs of (frameNumber,streamId)
+ void popInflightBuffers(const std::vector<std::pair<int32_t, int32_t>>& buffers);
+
+ // Get a vector of bufferId of currently inflight buffers
+ void getInflightRequestBufferKeys(std::vector<uint64_t>* out);
+
+ // Register a bufId (streamId, buffer_handle_t) to inflight request buffer
+ virtual status_t pushInflightRequestBuffer(
+ uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) override;
+
+ // Find a buffer_handle_t based on bufferId
+ virtual status_t popInflightRequestBuffer(uint64_t bufferId,
+ /*out*/ buffer_handle_t** buffer,
+ /*optional out*/ int32_t* streamId = nullptr) override;
+
+ private:
+ std::mutex mBufferIdMapLock;
+ BufferIdMaps mBufferIdMaps;
+ uint64_t mNextBufferId = 1; // 0 means no buffer
+
+ std::mutex mInflightLock;
+ InflightBufferMap mInflightBufferMap;
+
+ std::mutex mRequestedBuffersLock;
+ RequestedBufferMap mRequestedBufferMap;
+ }; // class BufferRecords
+
+ static const uint64_t BUFFER_ID_NO_BUFFER = 0;
+
+ camera3_buffer_status_t mapHidlBufferStatus(
+ hardware::camera::device::V3_2::BufferStatus status);
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 92ba5e4..23e26ce 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -58,10 +58,10 @@
#include "device3/Camera3InputStream.h"
#include "device3/Camera3DummyStream.h"
#include "device3/Camera3SharedOutputStream.h"
-#include "device3/Camera3OfflineSession.h"
#include "CameraService.h"
#include "utils/CameraThreadState.h"
+#include <algorithm>
#include <tuple>
using namespace android::camera3;
@@ -574,14 +574,6 @@
return OK;
}
-camera3_buffer_status_t Camera3Device::mapHidlBufferStatus(BufferStatus status) {
- switch (status) {
- case BufferStatus::OK: return CAMERA3_BUFFER_STATUS_OK;
- case BufferStatus::ERROR: return CAMERA3_BUFFER_STATUS_ERROR;
- }
- return CAMERA3_BUFFER_STATUS_ERROR;
-}
-
int Camera3Device::mapToFrameworkFormat(
hardware::graphics::common::V1_0::PixelFormat pixelFormat) {
return static_cast<uint32_t>(pixelFormat);
@@ -1001,230 +993,18 @@
hardware::Return<void> Camera3Device::requestStreamBuffers(
const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
requestStreamBuffers_cb _hidl_cb) {
- using hardware::camera::device::V3_5::BufferRequestStatus;
- using hardware::camera::device::V3_5::StreamBufferRet;
- using hardware::camera::device::V3_5::StreamBufferRequestError;
-
- std::lock_guard<std::mutex> lock(mRequestBufferInterfaceLock);
-
- hardware::hidl_vec<StreamBufferRet> bufRets;
- if (!mUseHalBufManager) {
- ALOGE("%s: Camera %s does not support HAL buffer management",
- __FUNCTION__, mId.string());
- _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
- return hardware::Void();
- }
-
- SortedVector<int32_t> streamIds;
- ssize_t sz = streamIds.setCapacity(bufReqs.size());
- if (sz < 0 || static_cast<size_t>(sz) != bufReqs.size()) {
- ALOGE("%s: failed to allocate memory for %zu buffer requests",
- __FUNCTION__, bufReqs.size());
- _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
- return hardware::Void();
- }
-
- if (bufReqs.size() > mOutputStreams.size()) {
- ALOGE("%s: too many buffer requests (%zu > # of output streams %zu)",
- __FUNCTION__, bufReqs.size(), mOutputStreams.size());
- _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
- return hardware::Void();
- }
-
- // Check for repeated streamId
- for (const auto& bufReq : bufReqs) {
- if (streamIds.indexOf(bufReq.streamId) != NAME_NOT_FOUND) {
- ALOGE("%s: Stream %d appear multiple times in buffer requests",
- __FUNCTION__, bufReq.streamId);
- _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
- return hardware::Void();
- }
- streamIds.add(bufReq.streamId);
- }
-
- if (!mRequestBufferSM.startRequestBuffer()) {
- ALOGE("%s: request buffer disallowed while camera service is configuring",
- __FUNCTION__);
- _hidl_cb(BufferRequestStatus::FAILED_CONFIGURING, bufRets);
- return hardware::Void();
- }
-
- bufRets.resize(bufReqs.size());
-
- bool allReqsSucceeds = true;
- bool oneReqSucceeds = false;
- for (size_t i = 0; i < bufReqs.size(); i++) {
- const auto& bufReq = bufReqs[i];
- auto& bufRet = bufRets[i];
- int32_t streamId = bufReq.streamId;
- sp<Camera3OutputStreamInterface> outputStream = mOutputStreams.get(streamId);
- if (outputStream == nullptr) {
- ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
- hardware::hidl_vec<StreamBufferRet> emptyBufRets;
- _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, emptyBufRets);
- mRequestBufferSM.endRequestBuffer();
- return hardware::Void();
- }
-
- if (outputStream->isAbandoned()) {
- bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
- allReqsSucceeds = false;
- continue;
- }
-
- bufRet.streamId = streamId;
- size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
- uint32_t numBuffersRequested = bufReq.numBuffersRequested;
- size_t totalHandout = handOutBufferCount + numBuffersRequested;
- uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
- if (totalHandout > maxBuffers) {
- // Not able to allocate enough buffer. Exit early for this stream
- ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
- " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
- numBuffersRequested, maxBuffers);
- bufRet.val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
- allReqsSucceeds = false;
- continue;
- }
-
- hardware::hidl_vec<StreamBuffer> tmpRetBuffers(numBuffersRequested);
- bool currentReqSucceeds = true;
- std::vector<camera3_stream_buffer_t> streamBuffers(numBuffersRequested);
- size_t numAllocatedBuffers = 0;
- size_t numPushedInflightBuffers = 0;
- for (size_t b = 0; b < numBuffersRequested; b++) {
- camera3_stream_buffer_t& sb = streamBuffers[b];
- // Since this method can run concurrently with request thread
- // We need to update the wait duration everytime we call getbuffer
- nsecs_t waitDuration = kBaseGetBufferWait + getExpectedInFlightDuration();
- status_t res = outputStream->getBuffer(&sb, waitDuration);
- if (res != OK) {
- if (res == NO_INIT || res == DEAD_OBJECT) {
- ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
- __FUNCTION__, streamId, strerror(-res), res);
- bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
- } else {
- ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
- __FUNCTION__, streamId, strerror(-res), res);
- if (res == TIMED_OUT || res == NO_MEMORY) {
- bufRet.val.error(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
- } else {
- bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
- }
- }
- currentReqSucceeds = false;
- break;
- }
- numAllocatedBuffers++;
-
- buffer_handle_t *buffer = sb.buffer;
- auto pair = mInterface->getBufferId(*buffer, streamId);
- bool isNewBuffer = pair.first;
- uint64_t bufferId = pair.second;
- StreamBuffer& hBuf = tmpRetBuffers[b];
-
- hBuf.streamId = streamId;
- hBuf.bufferId = bufferId;
- hBuf.buffer = (isNewBuffer) ? *buffer : nullptr;
- hBuf.status = BufferStatus::OK;
- hBuf.releaseFence = nullptr;
-
- native_handle_t *acquireFence = nullptr;
- if (sb.acquire_fence != -1) {
- acquireFence = native_handle_create(1,0);
- acquireFence->data[0] = sb.acquire_fence;
- }
- hBuf.acquireFence.setTo(acquireFence, /*shouldOwn*/true);
- hBuf.releaseFence = nullptr;
-
- res = mInterface->pushInflightRequestBuffer(bufferId, buffer, streamId);
- if (res != OK) {
- ALOGE("%s: Can't get register request buffers for stream %d: %s (%d)",
- __FUNCTION__, streamId, strerror(-res), res);
- bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
- currentReqSucceeds = false;
- break;
- }
- numPushedInflightBuffers++;
- }
- if (currentReqSucceeds) {
- bufRet.val.buffers(std::move(tmpRetBuffers));
- oneReqSucceeds = true;
- } else {
- allReqsSucceeds = false;
- for (size_t b = 0; b < numPushedInflightBuffers; b++) {
- StreamBuffer& hBuf = tmpRetBuffers[b];
- buffer_handle_t* buffer;
- status_t res = mInterface->popInflightRequestBuffer(hBuf.bufferId, &buffer);
- if (res != OK) {
- SET_ERR("%s: popInflightRequestBuffer failed for stream %d: %s (%d)",
- __FUNCTION__, streamId, strerror(-res), res);
- }
- }
- for (size_t b = 0; b < numAllocatedBuffers; b++) {
- camera3_stream_buffer_t& sb = streamBuffers[b];
- sb.acquire_fence = -1;
- sb.status = CAMERA3_BUFFER_STATUS_ERROR;
- }
- returnOutputBuffers(streamBuffers.data(), numAllocatedBuffers, 0);
- }
- }
-
- _hidl_cb(allReqsSucceeds ? BufferRequestStatus::OK :
- oneReqSucceeds ? BufferRequestStatus::FAILED_PARTIAL :
- BufferRequestStatus::FAILED_UNKNOWN,
- bufRets);
- mRequestBufferSM.endRequestBuffer();
+ RequestBufferStates states {
+ mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+ *this, *mInterface, *this};
+ camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
return hardware::Void();
}
hardware::Return<void> Camera3Device::returnStreamBuffers(
const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
- if (!mUseHalBufManager) {
- ALOGE("%s: Camera %s does not support HAL buffer managerment",
- __FUNCTION__, mId.string());
- return hardware::Void();
- }
-
- for (const auto& buf : buffers) {
- if (buf.bufferId == HalInterface::BUFFER_ID_NO_BUFFER) {
- ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
- continue;
- }
-
- buffer_handle_t* buffer;
- status_t res = mInterface->popInflightRequestBuffer(buf.bufferId, &buffer);
-
- if (res != OK) {
- ALOGE("%s: cannot find in-flight buffer %" PRIu64 " for stream %d",
- __FUNCTION__, buf.bufferId, buf.streamId);
- continue;
- }
-
- camera3_stream_buffer_t streamBuffer;
- streamBuffer.buffer = buffer;
- streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
- streamBuffer.acquire_fence = -1;
- streamBuffer.release_fence = -1;
-
- if (buf.releaseFence == nullptr) {
- streamBuffer.release_fence = -1;
- } else if (buf.releaseFence->numFds == 1) {
- streamBuffer.release_fence = dup(buf.releaseFence->data[0]);
- } else {
- ALOGE("%s: Invalid release fence, fd count is %d, not 1",
- __FUNCTION__, buf.releaseFence->numFds);
- continue;
- }
-
- sp<Camera3StreamInterface> stream = mOutputStreams.get(buf.streamId);
- if (stream == nullptr) {
- ALOGE("%s: Output stream id %d not found!", __FUNCTION__, buf.streamId);
- continue;
- }
- streamBuffer.stream = stream->asHalStream();
- returnOutputBuffers(&streamBuffer, /*size*/1, /*timestamp*/ 0);
- }
+ ReturnBufferStates states {
+ mId, mUseHalBufManager, mOutputStreams, *mInterface};
+ camera3::returnStreamBuffers(states, buffers);
return hardware::Void();
}
@@ -1241,6 +1021,12 @@
ALOGW("%s: received capture result in error state.", __FUNCTION__);
}
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> l(mOutputLock);
+ listener = mListener.promote();
+ }
+
if (mProcessCaptureResultLock.tryLock() != OK) {
// This should never happen; it indicates a wrong client implementation
// that doesn't follow the contract. But, we can be tolerant here.
@@ -1253,8 +1039,22 @@
return hardware::Void();
}
}
+ CaptureOutputStates states {
+ mId,
+ mInFlightLock, mInFlightMap,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+ };
+
for (const auto& result : results) {
- processOneCaptureResultLocked(result.v3_2, result.physicalCameraMetadata);
+ processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
}
mProcessCaptureResultLock.unlock();
return hardware::Void();
@@ -1277,6 +1077,12 @@
ALOGW("%s: received capture result in error state.", __FUNCTION__);
}
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> l(mOutputLock);
+ listener = mListener.promote();
+ }
+
if (mProcessCaptureResultLock.tryLock() != OK) {
// This should never happen; it indicates a wrong client implementation
// that doesn't follow the contract. But, we can be tolerant here.
@@ -1289,186 +1095,28 @@
return hardware::Void();
}
}
+
+ CaptureOutputStates states {
+ mId,
+ mInFlightLock, mInFlightMap,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+ };
+
for (const auto& result : results) {
- processOneCaptureResultLocked(result, noPhysMetadata);
+ processOneCaptureResultLocked(states, result, noPhysMetadata);
}
mProcessCaptureResultLock.unlock();
return hardware::Void();
}
-status_t Camera3Device::readOneCameraMetadataLocked(
- uint64_t fmqResultSize, hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
- const hardware::camera::device::V3_2::CameraMetadata& result) {
- if (fmqResultSize > 0) {
- resultMetadata.resize(fmqResultSize);
- if (mResultMetadataQueue == nullptr) {
- return NO_MEMORY; // logged in initialize()
- }
- if (!mResultMetadataQueue->read(resultMetadata.data(), fmqResultSize)) {
- ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
- __FUNCTION__, fmqResultSize);
- return INVALID_OPERATION;
- }
- } else {
- resultMetadata.setToExternal(const_cast<uint8_t *>(result.data()),
- result.size());
- }
-
- if (resultMetadata.size() != 0) {
- status_t res;
- const camera_metadata_t* metadata =
- reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
- size_t expected_metadata_size = resultMetadata.size();
- if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
- ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return INVALID_OPERATION;
- }
- }
-
- return OK;
-}
-
-void Camera3Device::processOneCaptureResultLocked(
- const hardware::camera::device::V3_2::CaptureResult& result,
- const hardware::hidl_vec<
- hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
- camera3_capture_result r;
- status_t res;
- r.frame_number = result.frameNumber;
-
- // Read and validate the result metadata.
- hardware::camera::device::V3_2::CameraMetadata resultMetadata;
- res = readOneCameraMetadataLocked(result.fmqResultSize, resultMetadata, result.result);
- if (res != OK) {
- ALOGE("%s: Frame %d: Failed to read capture result metadata",
- __FUNCTION__, result.frameNumber);
- return;
- }
- r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
-
- // Read and validate physical camera metadata
- size_t physResultCount = physicalCameraMetadata.size();
- std::vector<const char*> physCamIds(physResultCount);
- std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
- std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
- physResultMetadata.resize(physResultCount);
- for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
- res = readOneCameraMetadataLocked(physicalCameraMetadata[i].fmqMetadataSize,
- physResultMetadata[i], physicalCameraMetadata[i].metadata);
- if (res != OK) {
- ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
- __FUNCTION__, result.frameNumber,
- physicalCameraMetadata[i].physicalCameraId.c_str());
- return;
- }
- physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
- phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
- physResultMetadata[i].data());
- }
- r.num_physcam_metadata = physResultCount;
- r.physcam_ids = physCamIds.data();
- r.physcam_metadata = phyCamMetadatas.data();
-
- std::vector<camera3_stream_buffer_t> outputBuffers(result.outputBuffers.size());
- std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
- for (size_t i = 0; i < result.outputBuffers.size(); i++) {
- auto& bDst = outputBuffers[i];
- const StreamBuffer &bSrc = result.outputBuffers[i];
-
- sp<Camera3StreamInterface> stream = mOutputStreams.get(bSrc.streamId);
- if (stream == nullptr) {
- ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
- __FUNCTION__, result.frameNumber, i, bSrc.streamId);
- return;
- }
- bDst.stream = stream->asHalStream();
-
- bool noBufferReturned = false;
- buffer_handle_t *buffer = nullptr;
- if (mUseHalBufManager) {
- // This is suspicious most of the time but can be correct during flush where HAL
- // has to return capture result before a buffer is requested
- if (bSrc.bufferId == HalInterface::BUFFER_ID_NO_BUFFER) {
- if (bSrc.status == BufferStatus::OK) {
- ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
- __FUNCTION__, result.frameNumber, i, bSrc.streamId);
- // Still proceeds so other buffers can be returned
- }
- noBufferReturned = true;
- }
- if (noBufferReturned) {
- res = OK;
- } else {
- res = mInterface->popInflightRequestBuffer(bSrc.bufferId, &buffer);
- }
- } else {
- res = mInterface->popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
- }
-
- if (res != OK) {
- ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
- __FUNCTION__, result.frameNumber, i, bSrc.streamId);
- return;
- }
-
- bDst.buffer = buffer;
- bDst.status = mapHidlBufferStatus(bSrc.status);
- bDst.acquire_fence = -1;
- if (bSrc.releaseFence == nullptr) {
- bDst.release_fence = -1;
- } else if (bSrc.releaseFence->numFds == 1) {
- if (noBufferReturned) {
- ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
- }
- bDst.release_fence = dup(bSrc.releaseFence->data[0]);
- } else {
- ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
- __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
- return;
- }
- }
- r.num_output_buffers = outputBuffers.size();
- r.output_buffers = outputBuffers.data();
-
- camera3_stream_buffer_t inputBuffer;
- if (result.inputBuffer.streamId == -1) {
- r.input_buffer = nullptr;
- } else {
- if (mInputStream->getId() != result.inputBuffer.streamId) {
- ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
- result.frameNumber, result.inputBuffer.streamId);
- return;
- }
- inputBuffer.stream = mInputStream->asHalStream();
- buffer_handle_t *buffer;
- res = mInterface->popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
- &buffer);
- if (res != OK) {
- ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
- __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
- return;
- }
- inputBuffer.buffer = buffer;
- inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
- inputBuffer.acquire_fence = -1;
- if (result.inputBuffer.releaseFence == nullptr) {
- inputBuffer.release_fence = -1;
- } else if (result.inputBuffer.releaseFence->numFds == 1) {
- inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
- } else {
- ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
- __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
- return;
- }
- r.input_buffer = &inputBuffer;
- }
-
- r.partial_result = result.partialResult;
-
- processCaptureResult(&r);
-}
-
hardware::Return<void> Camera3Device::notify(
const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
// Ideally we should grab mLock, but that can lead to deadlock, and
@@ -1481,55 +1129,31 @@
ALOGW("%s: received notify message in error state.", __FUNCTION__);
}
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> l(mOutputLock);
+ listener = mListener.promote();
+ }
+
+ CaptureOutputStates states {
+ mId,
+ mInFlightLock, mInFlightMap,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+ };
for (const auto& msg : msgs) {
- notify(msg);
+ camera3::notify(states, msg);
}
return hardware::Void();
}
-void Camera3Device::notify(
- const hardware::camera::device::V3_2::NotifyMsg& msg) {
-
- camera3_notify_msg m;
- switch (msg.type) {
- case MsgType::ERROR:
- m.type = CAMERA3_MSG_ERROR;
- m.message.error.frame_number = msg.msg.error.frameNumber;
- if (msg.msg.error.errorStreamId >= 0) {
- sp<Camera3StreamInterface> stream = mOutputStreams.get(msg.msg.error.errorStreamId);
- if (stream == nullptr) {
- ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
- m.message.error.frame_number, msg.msg.error.errorStreamId);
- return;
- }
- m.message.error.error_stream = stream->asHalStream();
- } else {
- m.message.error.error_stream = nullptr;
- }
- switch (msg.msg.error.errorCode) {
- case ErrorCode::ERROR_DEVICE:
- m.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
- break;
- case ErrorCode::ERROR_REQUEST:
- m.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
- break;
- case ErrorCode::ERROR_RESULT:
- m.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
- break;
- case ErrorCode::ERROR_BUFFER:
- m.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
- break;
- }
- break;
- case MsgType::SHUTTER:
- m.type = CAMERA3_MSG_SHUTTER;
- m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
- m.message.shutter.timestamp = msg.msg.shutter.timestamp;
- break;
- }
- notify(&m);
-}
-
status_t Camera3Device::captureList(const List<const PhysicalCameraSettingsList> &requestsList,
const std::list<const SurfaceMap> &surfaceMaps,
int64_t *lastFrameNumber) {
@@ -1683,56 +1307,6 @@
return OK;
}
-status_t Camera3Device::StreamSet::add(
- int streamId, sp<camera3::Camera3OutputStreamInterface> stream) {
- if (stream == nullptr) {
- ALOGE("%s: cannot add null stream", __FUNCTION__);
- return BAD_VALUE;
- }
- std::lock_guard<std::mutex> lock(mLock);
- return mData.add(streamId, stream);
-}
-
-ssize_t Camera3Device::StreamSet::remove(int streamId) {
- std::lock_guard<std::mutex> lock(mLock);
- return mData.removeItem(streamId);
-}
-
-sp<camera3::Camera3OutputStreamInterface>
-Camera3Device::StreamSet::get(int streamId) {
- std::lock_guard<std::mutex> lock(mLock);
- ssize_t idx = mData.indexOfKey(streamId);
- if (idx == NAME_NOT_FOUND) {
- return nullptr;
- }
- return mData.editValueAt(idx);
-}
-
-sp<camera3::Camera3OutputStreamInterface>
-Camera3Device::StreamSet::operator[] (size_t index) {
- std::lock_guard<std::mutex> lock(mLock);
- return mData.editValueAt(index);
-}
-
-size_t Camera3Device::StreamSet::size() const {
- std::lock_guard<std::mutex> lock(mLock);
- return mData.size();
-}
-
-void Camera3Device::StreamSet::clear() {
- std::lock_guard<std::mutex> lock(mLock);
- return mData.clear();
-}
-
-std::vector<int> Camera3Device::StreamSet::getStreamIds() {
- std::lock_guard<std::mutex> lock(mLock);
- std::vector<int> streamIds(mData.size());
- for (size_t i = 0; i < mData.size(); i++) {
- streamIds[i] = mData.keyAt(i);
- }
- return streamIds;
-}
-
status_t Camera3Device::createStream(sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
@@ -2296,7 +1870,7 @@
status_t Camera3Device::setNotifyCallback(wp<NotificationListener> listener) {
ATRACE_CALL();
- Mutex::Autolock l(mOutputLock);
+ std::lock_guard<std::mutex> l(mOutputLock);
if (listener != NULL && mListener != NULL) {
ALOGW("%s: Replacing old callback listener", __FUNCTION__);
@@ -2314,17 +1888,12 @@
status_t Camera3Device::waitForNextFrame(nsecs_t timeout) {
ATRACE_CALL();
- status_t res;
- Mutex::Autolock l(mOutputLock);
+ std::unique_lock<std::mutex> l(mOutputLock);
while (mResultQueue.empty()) {
- res = mResultSignal.waitRelative(mOutputLock, timeout);
- if (res == TIMED_OUT) {
- return res;
- } else if (res != OK) {
- ALOGW("%s: Camera %s: No frame in %" PRId64 " ns: %s (%d)",
- __FUNCTION__, mId.string(), timeout, strerror(-res), res);
- return res;
+ auto st = mResultSignal.wait_for(l, std::chrono::nanoseconds(timeout));
+ if (st == std::cv_status::timeout) {
+ return TIMED_OUT;
}
}
return OK;
@@ -2332,7 +1901,7 @@
status_t Camera3Device::getNextResult(CaptureResult *frame) {
ATRACE_CALL();
- Mutex::Autolock l(mOutputLock);
+ std::lock_guard<std::mutex> l(mOutputLock);
if (mResultQueue.empty()) {
return NOT_ENOUGH_DATA;
@@ -2528,7 +2097,7 @@
sp<NotificationListener> listener;
{
- Mutex::Autolock l(mOutputLock);
+ std::lock_guard<std::mutex> l(mOutputLock);
listener = mListener.promote();
}
if (idle && listener != NULL) {
@@ -2727,17 +2296,6 @@
return newRequest;
}
-bool Camera3Device::isOpaqueInputSizeSupported(uint32_t width, uint32_t height) {
- for (uint32_t i = 0; i < mSupportedOpaqueInputSizes.size(); i++) {
- Size size = mSupportedOpaqueInputSizes[i];
- if (size.width == width && size.height == height) {
- return true;
- }
- }
-
- return false;
-}
-
void Camera3Device::cancelStreamsConfigurationLocked() {
int res = OK;
if (mInputStream != NULL && mInputStream->isConfiguring()) {
@@ -3175,7 +2733,7 @@
bool isZslCapture, const std::set<std::string>& cameraIdsWithZoom,
const SurfaceMap& outputSurfaces) {
ATRACE_CALL();
- Mutex::Autolock l(mInFlightLock);
+ std::lock_guard<std::mutex> l(mInFlightLock);
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
@@ -3196,79 +2754,7 @@
return OK;
}
-void Camera3Device::returnOutputBuffers(
- const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
- nsecs_t timestamp, bool timestampIncreasing,
- const SurfaceMap& outputSurfaces,
- const CaptureResultExtras &inResultExtras) {
-
- for (size_t i = 0; i < numBuffers; i++)
- {
- if (outputBuffers[i].buffer == nullptr) {
- if (!mUseHalBufManager) {
- // With HAL buffer management API, HAL sometimes will have to return buffers that
- // has not got a output buffer handle filled yet. This is though illegal if HAL
- // buffer management API is not being used.
- ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
- }
- continue;
- }
-
- Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
- int streamId = stream->getId();
- const auto& it = outputSurfaces.find(streamId);
- status_t res = OK;
- if (it != outputSurfaces.end()) {
- res = stream->returnBuffer(
- outputBuffers[i], timestamp, timestampIncreasing, it->second,
- inResultExtras.frameNumber);
- } else {
- res = stream->returnBuffer(
- outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
- inResultExtras.frameNumber);
- }
-
- // Note: stream may be deallocated at this point, if this buffer was
- // the last reference to it.
- if (res == NO_INIT || res == DEAD_OBJECT) {
- ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
- } else if (res != OK) {
- ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
- }
-
- // Long processing consumers can cause returnBuffer timeout for shared stream
- // If that happens, cancel the buffer and send a buffer error to client
- if (it != outputSurfaces.end() && res == TIMED_OUT &&
- outputBuffers[i].status == CAMERA3_BUFFER_STATUS_OK) {
- // cancel the buffer
- camera3_stream_buffer_t sb = outputBuffers[i];
- sb.status = CAMERA3_BUFFER_STATUS_ERROR;
- stream->returnBuffer(sb, /*timestamp*/0, timestampIncreasing, std::vector<size_t> (),
- inResultExtras.frameNumber);
-
- // notify client buffer error
- sp<NotificationListener> listener;
- {
- Mutex::Autolock l(mOutputLock);
- listener = mListener.promote();
- }
-
- if (listener != nullptr) {
- CaptureResultExtras extras = inResultExtras;
- extras.errorStreamId = streamId;
- listener->notifyError(
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
- extras);
- }
- }
- }
-}
-
-void Camera3Device::removeInFlightMapEntryLocked(int idx) {
- ATRACE_CALL();
- nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
- mInFlightMap.removeItemsAt(idx, 1);
-
+void Camera3Device::onInflightEntryRemovedLocked(nsecs_t duration) {
// Indicate idle inFlightMap to the status tracker
if (mInFlightMap.size() == 0) {
mRequestBufferSM.onInflightMapEmpty();
@@ -3282,50 +2768,7 @@
mExpectedInflightDuration -= duration;
}
-void Camera3Device::removeInFlightRequestIfReadyLocked(int idx) {
-
- const InFlightRequest &request = mInFlightMap.valueAt(idx);
- const uint32_t frameNumber = mInFlightMap.keyAt(idx);
-
- nsecs_t sensorTimestamp = request.sensorTimestamp;
- nsecs_t shutterTimestamp = request.shutterTimestamp;
-
- // Check if it's okay to remove the request from InFlightMap:
- // In the case of a successful request:
- // all input and output buffers, all result metadata, shutter callback
- // arrived.
- // In the case of a unsuccessful request:
- // all input and output buffers arrived.
- if (request.numBuffersLeft == 0 &&
- (request.skipResultMetadata ||
- (request.haveResultMetadata && shutterTimestamp != 0))) {
- if (request.stillCapture) {
- ATRACE_ASYNC_END("still capture", frameNumber);
- }
-
- ATRACE_ASYNC_END("frame capture", frameNumber);
-
- // Sanity check - if sensor timestamp matches shutter timestamp in the
- // case of request having callback.
- if (request.hasCallback && request.requestStatus == OK &&
- sensorTimestamp != shutterTimestamp) {
- SET_ERR("sensor timestamp (%" PRId64
- ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
- sensorTimestamp, frameNumber, shutterTimestamp);
- }
-
- // for an unsuccessful request, it may have pending output buffers to
- // return.
- assert(request.requestStatus != OK ||
- request.pendingOutputBuffers.size() == 0);
- returnOutputBuffers(request.pendingOutputBuffers.array(),
- request.pendingOutputBuffers.size(), 0, /*timestampIncreasing*/true,
- request.outputSurfaces, request.resultExtras);
-
- removeInFlightMapEntryLocked(idx);
- ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
- }
-
+void Camera3Device::checkInflightMapLengthLocked() {
// Sanity check - if we have too many in-flight frames with long total inflight duration,
// something has likely gone wrong. This might still be legit only if application send in
// a long burst of long exposure requests.
@@ -3342,734 +2785,32 @@
}
}
+void Camera3Device::onInflightMapFlushedLocked() {
+ mExpectedInflightDuration = 0;
+}
+
+void Camera3Device::removeInFlightMapEntryLocked(int idx) {
+ ATRACE_CALL();
+ nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
+ mInFlightMap.removeItemsAt(idx, 1);
+
+ onInflightEntryRemovedLocked(duration);
+}
+
+
void Camera3Device::flushInflightRequests() {
ATRACE_CALL();
- { // First return buffers cached in mInFlightMap
- Mutex::Autolock l(mInFlightLock);
- for (size_t idx = 0; idx < mInFlightMap.size(); idx++) {
- const InFlightRequest &request = mInFlightMap.valueAt(idx);
- returnOutputBuffers(request.pendingOutputBuffers.array(),
- request.pendingOutputBuffers.size(), 0,
- /*timestampIncreasing*/true, request.outputSurfaces,
- request.resultExtras);
- }
- mInFlightMap.clear();
- mExpectedInflightDuration = 0;
- }
-
- // Then return all inflight buffers not returned by HAL
- std::vector<std::pair<int32_t, int32_t>> inflightKeys;
- mInterface->getInflightBufferKeys(&inflightKeys);
-
- // Inflight buffers for HAL buffer manager
- std::vector<uint64_t> inflightRequestBufferKeys;
- mInterface->getInflightRequestBufferKeys(&inflightRequestBufferKeys);
-
- // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
- // frameNumber will be -1 for buffers from HAL buffer manager
- std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
- inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
-
- for (auto& pair : inflightKeys) {
- int32_t frameNumber = pair.first;
- int32_t streamId = pair.second;
- buffer_handle_t* buffer;
- status_t res = mInterface->popInflightBuffer(frameNumber, streamId, &buffer);
- if (res != OK) {
- ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
- __FUNCTION__, frameNumber, streamId);
- continue;
- }
- inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
- }
-
- for (auto& bufferId : inflightRequestBufferKeys) {
- int32_t streamId = -1;
- buffer_handle_t* buffer = nullptr;
- status_t res = mInterface->popInflightRequestBuffer(bufferId, &buffer, &streamId);
- if (res != OK) {
- ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
- continue;
- }
- inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
- }
-
- int32_t inputStreamId = (mInputStream != nullptr) ? mInputStream->getId() : -1;
- for (auto& tuple : inflightBuffers) {
- status_t res = OK;
- int32_t streamId = std::get<0>(tuple);
- int32_t frameNumber = std::get<1>(tuple);
- buffer_handle_t* buffer = std::get<2>(tuple);
-
- camera3_stream_buffer_t streamBuffer;
- streamBuffer.buffer = buffer;
- streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
- streamBuffer.acquire_fence = -1;
- streamBuffer.release_fence = -1;
-
- // First check if the buffer belongs to deleted stream
- bool streamDeleted = false;
- for (auto& stream : mDeletedStreams) {
- if (streamId == stream->getId()) {
- streamDeleted = true;
- // Return buffer to deleted stream
- camera3_stream* halStream = stream->asHalStream();
- streamBuffer.stream = halStream;
- switch (halStream->stream_type) {
- case CAMERA3_STREAM_OUTPUT:
- res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
- /*timestampIncreasing*/true, std::vector<size_t> (), frameNumber);
- if (res != OK) {
- ALOGE("%s: Can't return output buffer for frame %d to"
- " stream %d: %s (%d)", __FUNCTION__,
- frameNumber, streamId, strerror(-res), res);
- }
- break;
- case CAMERA3_STREAM_INPUT:
- res = stream->returnInputBuffer(streamBuffer);
- if (res != OK) {
- ALOGE("%s: Can't return input buffer for frame %d to"
- " stream %d: %s (%d)", __FUNCTION__,
- frameNumber, streamId, strerror(-res), res);
- }
- break;
- default: // Bi-direcitonal stream is deprecated
- ALOGE("%s: stream %d has unknown stream type %d",
- __FUNCTION__, streamId, halStream->stream_type);
- break;
- }
- break;
- }
- }
- if (streamDeleted) {
- continue;
- }
-
- // Then check against configured streams
- if (streamId == inputStreamId) {
- streamBuffer.stream = mInputStream->asHalStream();
- res = mInputStream->returnInputBuffer(streamBuffer);
- if (res != OK) {
- ALOGE("%s: Can't return input buffer for frame %d to"
- " stream %d: %s (%d)", __FUNCTION__,
- frameNumber, streamId, strerror(-res), res);
- }
- } else {
- sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
- if (stream == nullptr) {
- ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
- continue;
- }
- streamBuffer.stream = stream->asHalStream();
- returnOutputBuffers(&streamBuffer, /*size*/1, /*timestamp*/ 0);
- }
- }
-}
-
-void Camera3Device::insertResultLocked(CaptureResult *result,
- uint32_t frameNumber) {
- if (result == nullptr) return;
-
- camera_metadata_t *meta = const_cast<camera_metadata_t *>(
- result->mMetadata.getAndLock());
- set_camera_metadata_vendor_id(meta, mVendorTagId);
- result->mMetadata.unlock(meta);
-
- if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
- (int32_t*)&frameNumber, 1) != OK) {
- SET_ERR("Failed to set frame number %d in metadata", frameNumber);
- return;
- }
-
- if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
- SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
- return;
- }
-
- // Update vendor tag id for physical metadata
- for (auto& physicalMetadata : result->mPhysicalMetadatas) {
- camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
- physicalMetadata.mPhysicalCameraMetadata.getAndLock());
- set_camera_metadata_vendor_id(pmeta, mVendorTagId);
- physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
- }
-
- // Valid result, insert into queue
- List<CaptureResult>::iterator queuedResult =
- mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
- ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
- ", burstId = %" PRId32, __FUNCTION__,
- queuedResult->mResultExtras.requestId,
- queuedResult->mResultExtras.frameNumber,
- queuedResult->mResultExtras.burstId);
-
- mResultSignal.signal();
-}
-
-
-void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
- const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
- ATRACE_CALL();
- Mutex::Autolock l(mOutputLock);
-
- CaptureResult captureResult;
- captureResult.mResultExtras = resultExtras;
- captureResult.mMetadata = partialResult;
-
- // Fix up result metadata for monochrome camera.
- status_t res = fixupMonochromeTags(mDeviceInfo, captureResult.mMetadata);
- if (res != OK) {
- SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
- return;
- }
-
- insertResultLocked(&captureResult, frameNumber);
-}
-
-
-void Camera3Device::sendCaptureResult(CameraMetadata &pendingMetadata,
- CaptureResultExtras &resultExtras,
- CameraMetadata &collectedPartialResult,
- uint32_t frameNumber,
- bool reprocess, bool zslStillCapture, const std::set<std::string>& cameraIdsWithZoom,
- const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
- ATRACE_CALL();
- if (pendingMetadata.isEmpty())
- return;
-
- Mutex::Autolock l(mOutputLock);
-
- // TODO: need to track errors for tighter bounds on expected frame number
- if (reprocess) {
- if (frameNumber < mNextReprocessResultFrameNumber) {
- SET_ERR("Out-of-order reprocess capture result metadata submitted! "
- "(got frame number %d, expecting %d)",
- frameNumber, mNextReprocessResultFrameNumber);
- return;
- }
- mNextReprocessResultFrameNumber = frameNumber + 1;
- } else if (zslStillCapture) {
- if (frameNumber < mNextZslStillResultFrameNumber) {
- SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
- "(got frame number %d, expecting %d)",
- frameNumber, mNextZslStillResultFrameNumber);
- return;
- }
- mNextZslStillResultFrameNumber = frameNumber + 1;
- } else {
- if (frameNumber < mNextResultFrameNumber) {
- SET_ERR("Out-of-order capture result metadata submitted! "
- "(got frame number %d, expecting %d)",
- frameNumber, mNextResultFrameNumber);
- return;
- }
- mNextResultFrameNumber = frameNumber + 1;
- }
-
- CaptureResult captureResult;
- captureResult.mResultExtras = resultExtras;
- captureResult.mMetadata = pendingMetadata;
- captureResult.mPhysicalMetadatas = physicalMetadatas;
-
- // Append any previous partials to form a complete result
- if (mUsePartialResult && !collectedPartialResult.isEmpty()) {
- captureResult.mMetadata.append(collectedPartialResult);
- }
-
- captureResult.mMetadata.sort();
-
- // Check that there's a timestamp in the result metadata
- camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
- if (timestamp.count == 0) {
- SET_ERR("No timestamp provided by HAL for frame %d!",
- frameNumber);
- return;
- }
- for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
- camera_metadata_entry timestamp =
- physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
- if (timestamp.count == 0) {
- SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
- String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
- return;
- }
- }
-
- // Fix up some result metadata to account for HAL-level distortion correction
- status_t res =
- mDistortionMappers[mId.c_str()].correctCaptureResult(&captureResult.mMetadata);
- if (res != OK) {
- SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
- frameNumber, strerror(-res), res);
- return;
- }
-
- // Fix up result metadata to account for zoom ratio availabilities between
- // HAL and app.
- bool zoomRatioIs1 = cameraIdsWithZoom.find(mId.c_str()) == cameraIdsWithZoom.end();
- res = mZoomRatioMappers[mId.c_str()].updateCaptureResult(
- &captureResult.mMetadata, zoomRatioIs1);
- if (res != OK) {
- SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
- frameNumber, strerror(-res), res);
- return;
- }
-
- for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
- String8 cameraId8(physicalMetadata.mPhysicalCameraId);
- if (mDistortionMappers.find(cameraId8.c_str()) != mDistortionMappers.end()) {
- res = mDistortionMappers[cameraId8.c_str()].correctCaptureResult(
- &physicalMetadata.mPhysicalCameraMetadata);
- if (res != OK) {
- SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
- frameNumber, strerror(-res), res);
- return;
- }
- }
-
- zoomRatioIs1 = cameraIdsWithZoom.find(cameraId8.c_str()) == cameraIdsWithZoom.end();
- res = mZoomRatioMappers[cameraId8.c_str()].updateCaptureResult(
- &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
- if (res != OK) {
- SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
- "frame %d: %s(%d)", cameraId8.c_str(), frameNumber, strerror(-res), res);
- return;
- }
- }
-
- // Fix up result metadata for monochrome camera.
- res = fixupMonochromeTags(mDeviceInfo, captureResult.mMetadata);
- if (res != OK) {
- SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
- return;
- }
- for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
- String8 cameraId8(physicalMetadata.mPhysicalCameraId);
- res = fixupMonochromeTags(mPhysicalDeviceInfoMap.at(cameraId8.c_str()),
- physicalMetadata.mPhysicalCameraMetadata);
- if (res != OK) {
- SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
- return;
- }
- }
-
- std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
- for (auto& m : physicalMetadatas) {
- monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
- CameraMetadata(m.mPhysicalCameraMetadata));
- }
- mTagMonitor.monitorMetadata(TagMonitor::RESULT,
- frameNumber, timestamp.data.i64[0], captureResult.mMetadata,
- monitoredPhysicalMetadata);
-
- insertResultLocked(&captureResult, frameNumber);
-}
-
-/**
- * Camera HAL device callback methods
- */
-
-void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
- ATRACE_CALL();
-
- status_t res;
-
- uint32_t frameNumber = result->frame_number;
- if (result->result == NULL && result->num_output_buffers == 0 &&
- result->input_buffer == NULL) {
- SET_ERR("No result data provided by HAL for frame %d",
- frameNumber);
- return;
- }
-
- if (!mUsePartialResult &&
- result->result != NULL &&
- result->partial_result != 1) {
- SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
- " if partial result is not supported",
- frameNumber, result->partial_result);
- return;
- }
-
- bool isPartialResult = false;
- CameraMetadata collectedPartialResult;
- bool hasInputBufferInRequest = false;
-
- // Get shutter timestamp and resultExtras from list of in-flight requests,
- // where it was added by the shutter notification for this frame. If the
- // shutter timestamp isn't received yet, append the output buffers to the
- // in-flight request and they will be returned when the shutter timestamp
- // arrives. Update the in-flight status and remove the in-flight entry if
- // all result data and shutter timestamp have been received.
- nsecs_t shutterTimestamp = 0;
-
- {
- Mutex::Autolock l(mInFlightLock);
- ssize_t idx = mInFlightMap.indexOfKey(frameNumber);
- if (idx == NAME_NOT_FOUND) {
- SET_ERR("Unknown frame number for capture result: %d",
- frameNumber);
- return;
- }
- InFlightRequest &request = mInFlightMap.editValueAt(idx);
- ALOGVV("%s: got InFlightRequest requestId = %" PRId32
- ", frameNumber = %" PRId64 ", burstId = %" PRId32
- ", partialResultCount = %d, hasCallback = %d",
- __FUNCTION__, request.resultExtras.requestId,
- request.resultExtras.frameNumber, request.resultExtras.burstId,
- result->partial_result, request.hasCallback);
- // Always update the partial count to the latest one if it's not 0
- // (buffers only). When framework aggregates adjacent partial results
- // into one, the latest partial count will be used.
- if (result->partial_result != 0)
- request.resultExtras.partialResultCount = result->partial_result;
-
- // Check if this result carries only partial metadata
- if (mUsePartialResult && result->result != NULL) {
- if (result->partial_result > mNumPartialResults || result->partial_result < 1) {
- SET_ERR("Result is malformed for frame %d: partial_result %u must be in"
- " the range of [1, %d] when metadata is included in the result",
- frameNumber, result->partial_result, mNumPartialResults);
- return;
- }
- isPartialResult = (result->partial_result < mNumPartialResults);
- if (isPartialResult && result->num_physcam_metadata) {
- SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
- " physical camera result", frameNumber);
- return;
- }
- if (isPartialResult) {
- request.collectedPartialResult.append(result->result);
- }
-
- if (isPartialResult && request.hasCallback) {
- // Send partial capture result
- sendPartialCaptureResult(result->result, request.resultExtras,
- frameNumber);
- }
- }
-
- shutterTimestamp = request.shutterTimestamp;
- hasInputBufferInRequest = request.hasInputBuffer;
-
- // Did we get the (final) result metadata for this capture?
- if (result->result != NULL && !isPartialResult) {
- if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
- SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
- request.physicalCameraIds.size(), result->num_physcam_metadata);
- return;
- }
- if (request.haveResultMetadata) {
- SET_ERR("Called multiple times with metadata for frame %d",
- frameNumber);
- return;
- }
- for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
- String8 physicalId(result->physcam_ids[i]);
- std::set<String8>::iterator cameraIdIter =
- request.physicalCameraIds.find(physicalId);
- if (cameraIdIter != request.physicalCameraIds.end()) {
- request.physicalCameraIds.erase(cameraIdIter);
- } else {
- SET_ERR("Total result for frame %d has already returned for camera %s",
- frameNumber, physicalId.c_str());
- return;
- }
- }
- if (mUsePartialResult &&
- !request.collectedPartialResult.isEmpty()) {
- collectedPartialResult.acquire(
- request.collectedPartialResult);
- }
- request.haveResultMetadata = true;
- }
-
- uint32_t numBuffersReturned = result->num_output_buffers;
- if (result->input_buffer != NULL) {
- if (hasInputBufferInRequest) {
- numBuffersReturned += 1;
- } else {
- ALOGW("%s: Input buffer should be NULL if there is no input"
- " buffer sent in the request",
- __FUNCTION__);
- }
- }
- request.numBuffersLeft -= numBuffersReturned;
- if (request.numBuffersLeft < 0) {
- SET_ERR("Too many buffers returned for frame %d",
- frameNumber);
- return;
- }
-
- camera_metadata_ro_entry_t entry;
- res = find_camera_metadata_ro_entry(result->result,
- ANDROID_SENSOR_TIMESTAMP, &entry);
- if (res == OK && entry.count == 1) {
- request.sensorTimestamp = entry.data.i64[0];
- }
-
- // If shutter event isn't received yet, append the output buffers to
- // the in-flight request. Otherwise, return the output buffers to
- // streams.
- if (shutterTimestamp == 0) {
- request.pendingOutputBuffers.appendArray(result->output_buffers,
- result->num_output_buffers);
- } else {
- bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
- returnOutputBuffers(result->output_buffers,
- result->num_output_buffers, shutterTimestamp, timestampIncreasing,
- request.outputSurfaces, request.resultExtras);
- }
-
- if (result->result != NULL && !isPartialResult) {
- for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
- CameraMetadata physicalMetadata;
- physicalMetadata.append(result->physcam_metadata[i]);
- request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
- physicalMetadata});
- }
- if (shutterTimestamp == 0) {
- request.pendingMetadata = result->result;
- request.collectedPartialResult = collectedPartialResult;
- } else if (request.hasCallback) {
- CameraMetadata metadata;
- metadata = result->result;
- sendCaptureResult(metadata, request.resultExtras,
- collectedPartialResult, frameNumber,
- hasInputBufferInRequest, request.zslCapture && request.stillCapture,
- request.cameraIdsWithZoom, request.physicalMetadatas);
- }
- }
-
- removeInFlightRequestIfReadyLocked(idx);
- } // scope for mInFlightLock
-
- if (result->input_buffer != NULL) {
- if (hasInputBufferInRequest) {
- Camera3Stream *stream =
- Camera3Stream::cast(result->input_buffer->stream);
- res = stream->returnInputBuffer(*(result->input_buffer));
- // Note: stream may be deallocated at this point, if this buffer was the
- // last reference to it.
- if (res != OK) {
- ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
- " its stream:%s (%d)", __FUNCTION__,
- frameNumber, strerror(-res), res);
- }
- } else {
- ALOGW("%s: Input buffer should be NULL if there is no input"
- " buffer sent in the request, skipping input buffer return.",
- __FUNCTION__);
- }
- }
-}
-
-void Camera3Device::notify(const camera3_notify_msg *msg) {
- ATRACE_CALL();
sp<NotificationListener> listener;
{
- Mutex::Autolock l(mOutputLock);
+ std::lock_guard<std::mutex> l(mOutputLock);
listener = mListener.promote();
}
- if (msg == NULL) {
- SET_ERR("HAL sent NULL notify message!");
- return;
- }
+ FlushInflightReqStates states {
+ mId, mInFlightLock, mInFlightMap, mUseHalBufManager,
+ listener, *this, *mInterface, *this};
- switch (msg->type) {
- case CAMERA3_MSG_ERROR: {
- notifyError(msg->message.error, listener);
- break;
- }
- case CAMERA3_MSG_SHUTTER: {
- notifyShutter(msg->message.shutter, listener);
- break;
- }
- default:
- SET_ERR("Unknown notify message from HAL: %d",
- msg->type);
- }
-}
-
-void Camera3Device::notifyError(const camera3_error_msg_t &msg,
- sp<NotificationListener> listener) {
- ATRACE_CALL();
- // Map camera HAL error codes to ICameraDeviceCallback error codes
- // Index into this with the HAL error code
- static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
- // 0 = Unused error code
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
- // 1 = CAMERA3_MSG_ERROR_DEVICE
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
- // 2 = CAMERA3_MSG_ERROR_REQUEST
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
- // 3 = CAMERA3_MSG_ERROR_RESULT
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
- // 4 = CAMERA3_MSG_ERROR_BUFFER
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
- };
-
- int32_t errorCode =
- ((msg.error_code >= 0) &&
- (msg.error_code < CAMERA3_MSG_NUM_ERRORS)) ?
- halErrorMap[msg.error_code] :
- hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
-
- int streamId = 0;
- String16 physicalCameraId;
- if (msg.error_stream != NULL) {
- Camera3Stream *stream =
- Camera3Stream::cast(msg.error_stream);
- streamId = stream->getId();
- physicalCameraId = String16(stream->physicalCameraId());
- }
- ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
- mId.string(), __FUNCTION__, msg.frame_number,
- streamId, msg.error_code);
-
- CaptureResultExtras resultExtras;
- switch (errorCode) {
- case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
- // SET_ERR calls notifyError
- SET_ERR("Camera HAL reported serious device error");
- break;
- case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
- case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
- case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
- {
- Mutex::Autolock l(mInFlightLock);
- ssize_t idx = mInFlightMap.indexOfKey(msg.frame_number);
- if (idx >= 0) {
- InFlightRequest &r = mInFlightMap.editValueAt(idx);
- r.requestStatus = msg.error_code;
- resultExtras = r.resultExtras;
- bool logicalDeviceResultError = false;
- if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
- errorCode) {
- if (physicalCameraId.size() > 0) {
- String8 cameraId(physicalCameraId);
- auto iter = r.physicalCameraIds.find(cameraId);
- if (iter == r.physicalCameraIds.end()) {
- ALOGE("%s: Reported result failure for physical camera device: %s "
- " which is not part of the respective request!",
- __FUNCTION__, cameraId.string());
- break;
- }
- r.physicalCameraIds.erase(iter);
- resultExtras.errorPhysicalCameraId = physicalCameraId;
- } else {
- logicalDeviceResultError = true;
- }
- }
-
- if (logicalDeviceResultError
- || hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST ==
- errorCode) {
- r.skipResultMetadata = true;
- }
- if (logicalDeviceResultError) {
- // In case of missing result check whether the buffers
- // returned. If they returned, then remove inflight
- // request.
- // TODO: should we call this for ERROR_CAMERA_REQUEST as well?
- // otherwise we are depending on HAL to send the buffers back after
- // calling notifyError. Not sure if that's in the spec.
- removeInFlightRequestIfReadyLocked(idx);
- }
- } else {
- resultExtras.frameNumber = msg.frame_number;
- ALOGE("Camera %s: %s: cannot find in-flight request on "
- "frame %" PRId64 " error", mId.string(), __FUNCTION__,
- resultExtras.frameNumber);
- }
- }
- resultExtras.errorStreamId = streamId;
- if (listener != NULL) {
- listener->notifyError(errorCode, resultExtras);
- } else {
- ALOGE("Camera %s: %s: no listener available", mId.string(), __FUNCTION__);
- }
- break;
- default:
- // SET_ERR calls notifyError
- SET_ERR("Unknown error message from HAL: %d", msg.error_code);
- break;
- }
-}
-
-void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg,
- sp<NotificationListener> listener) {
- ATRACE_CALL();
- ssize_t idx;
-
- // Set timestamp for the request in the in-flight tracking
- // and get the request ID to send upstream
- {
- Mutex::Autolock l(mInFlightLock);
- idx = mInFlightMap.indexOfKey(msg.frame_number);
- if (idx >= 0) {
- InFlightRequest &r = mInFlightMap.editValueAt(idx);
-
- // Verify ordering of shutter notifications
- {
- Mutex::Autolock l(mOutputLock);
- // TODO: need to track errors for tighter bounds on expected frame number.
- if (r.hasInputBuffer) {
- if (msg.frame_number < mNextReprocessShutterFrameNumber) {
- SET_ERR("Reprocess shutter notification out-of-order. Expected "
- "notification for frame %d, got frame %d",
- mNextReprocessShutterFrameNumber, msg.frame_number);
- return;
- }
- mNextReprocessShutterFrameNumber = msg.frame_number + 1;
- } else if (r.zslCapture && r.stillCapture) {
- if (msg.frame_number < mNextZslStillShutterFrameNumber) {
- SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
- "notification for frame %d, got frame %d",
- mNextZslStillShutterFrameNumber, msg.frame_number);
- return;
- }
- mNextZslStillShutterFrameNumber = msg.frame_number + 1;
- } else {
- if (msg.frame_number < mNextShutterFrameNumber) {
- SET_ERR("Shutter notification out-of-order. Expected "
- "notification for frame %d, got frame %d",
- mNextShutterFrameNumber, msg.frame_number);
- return;
- }
- mNextShutterFrameNumber = msg.frame_number + 1;
- }
- }
-
- r.shutterTimestamp = msg.timestamp;
- if (r.hasCallback) {
- ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
- mId.string(), __FUNCTION__,
- msg.frame_number, r.resultExtras.requestId, msg.timestamp);
- // Call listener, if any
- if (listener != NULL) {
- listener->notifyShutter(r.resultExtras, msg.timestamp);
- }
- // send pending result and buffers
- sendCaptureResult(r.pendingMetadata, r.resultExtras,
- r.collectedPartialResult, msg.frame_number,
- r.hasInputBuffer, r.zslCapture && r.stillCapture,
- r.cameraIdsWithZoom, r.physicalMetadatas);
- }
- bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
- returnOutputBuffers(r.pendingOutputBuffers.array(),
- r.pendingOutputBuffers.size(), r.shutterTimestamp, timestampIncreasing,
- r.outputSurfaces, r.resultExtras);
- r.pendingOutputBuffers.clear();
-
- removeInFlightRequestIfReadyLocked(idx);
- }
- }
- if (idx < 0) {
- SET_ERR("Shutter notification for non-existent frame number %d",
- msg.frame_number);
- }
+ camera3::flushInflightRequests(states);
}
CameraMetadata Camera3Device::getLatestRequestLocked() {
@@ -4084,7 +2825,6 @@
return retVal;
}
-
void Camera3Device::monitorMetadata(TagMonitor::eventSource source,
int64_t frameNumber, nsecs_t timestamp, const CameraMetadata& metadata,
const std::unordered_map<std::string, CameraMetadata>& physicalMetadata) {
@@ -4319,20 +3059,10 @@
activeStreams.insert(streamId);
// Create Buffer ID map if necessary
- if (mBufferIdMaps.count(streamId) == 0) {
- mBufferIdMaps.emplace(streamId, BufferIdMap{});
- }
+ mBufferRecords.tryCreateBufferCache(streamId);
}
// remove BufferIdMap for deleted streams
- for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
- int streamId = it->first;
- bool active = activeStreams.count(streamId) > 0;
- if (!active) {
- it = mBufferIdMaps.erase(it);
- } else {
- ++it;
- }
- }
+ mBufferRecords.removeInactiveBufferCaches(activeStreams);
StreamConfigurationMode operationMode;
res = mapToStreamConfigurationMode(
@@ -4350,6 +3080,7 @@
// Invoke configureStreams
device::V3_3::HalStreamConfiguration finalConfiguration;
device::V3_4::HalStreamConfiguration finalConfiguration3_4;
+ device::V3_6::HalStreamConfiguration finalConfiguration3_6;
common::V1_0::Status status;
auto configStream34Cb = [&status, &finalConfiguration3_4]
@@ -4358,6 +3089,12 @@
status = s;
};
+ auto configStream36Cb = [&status, &finalConfiguration3_6]
+ (common::V1_0::Status s, const device::V3_6::HalStreamConfiguration& halConfiguration) {
+ finalConfiguration3_6 = halConfiguration;
+ status = s;
+ };
+
auto postprocConfigStream34 = [&finalConfiguration, &finalConfiguration3_4]
(hardware::Return<void>& err) -> status_t {
if (!err.isOk()) {
@@ -4371,8 +3108,32 @@
return OK;
};
+ auto postprocConfigStream36 = [&finalConfiguration, &finalConfiguration3_6]
+ (hardware::Return<void>& err) -> status_t {
+ if (!err.isOk()) {
+ ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+ return DEAD_OBJECT;
+ }
+ finalConfiguration.streams.resize(finalConfiguration3_6.streams.size());
+ for (size_t i = 0; i < finalConfiguration3_6.streams.size(); i++) {
+ finalConfiguration.streams[i] = finalConfiguration3_6.streams[i].v3_4.v3_3;
+ }
+ return OK;
+ };
+
// See which version of HAL we have
- if (mHidlSession_3_5 != nullptr) {
+ if (mHidlSession_3_6 != nullptr) {
+ ALOGV("%s: v3.6 device found", __FUNCTION__);
+ device::V3_5::StreamConfiguration requestedConfiguration3_5;
+ requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
+ requestedConfiguration3_5.streamConfigCounter = mNextStreamConfigCounter++;
+ auto err = mHidlSession_3_6->configureStreams_3_6(
+ requestedConfiguration3_5, configStream36Cb);
+ res = postprocConfigStream36(err);
+ if (res != OK) {
+ return res;
+ }
+ } else if (mHidlSession_3_5 != nullptr) {
ALOGV("%s: v3.5 device found", __FUNCTION__);
device::V3_5::StreamConfiguration requestedConfiguration3_5;
requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
@@ -4454,11 +3215,16 @@
return INVALID_OPERATION;
}
device::V3_3::HalStream &src = finalConfiguration.streams[realIdx];
+ device::V3_6::HalStream &src_36 = finalConfiguration3_6.streams[realIdx];
Camera3Stream* dstStream = Camera3Stream::cast(dst);
int overrideFormat = mapToFrameworkFormat(src.v3_2.overrideFormat);
android_dataspace overrideDataSpace = mapToFrameworkDataspace(src.overrideDataSpace);
+ if (mHidlSession_3_6 != nullptr) {
+ dstStream->setOfflineProcessingSupport(src_36.supportOffline);
+ }
+
if (dstStream->getOriginalFormat() != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
dstStream->setFormatOverride(false);
dstStream->setDataSpaceOverride(false);
@@ -4522,7 +3288,6 @@
captureRequest->fmqSettingsSize = 0;
{
- std::lock_guard<std::mutex> lock(mInflightLock);
if (request->input_buffer != nullptr) {
int32_t streamId = Camera3Stream::cast(request->input_buffer->stream)->getId();
buffer_handle_t buf = *(request->input_buffer->buffer);
@@ -4542,7 +3307,7 @@
captureRequest->inputBuffer.acquireFence = acquireFence;
captureRequest->inputBuffer.releaseFence = nullptr;
- pushInflightBufferLocked(captureRequest->frameNumber, streamId,
+ mBufferRecords.pushInflightBuffer(captureRequest->frameNumber, streamId,
request->input_buffer->buffer);
inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
} else {
@@ -4583,7 +3348,8 @@
// Output buffers are empty when using HAL buffer manager
if (!mUseHalBufManager) {
- pushInflightBufferLocked(captureRequest->frameNumber, streamId, src->buffer);
+ mBufferRecords.pushInflightBuffer(
+ captureRequest->frameNumber, streamId, src->buffer);
inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
}
}
@@ -4640,7 +3406,7 @@
/*out*/&handlesCreated, /*out*/&inflightBuffers);
}
if (res != OK) {
- popInflightBuffers(inflightBuffers);
+ mBufferRecords.popInflightBuffers(inflightBuffers);
cleanupNativeHandles(&handlesCreated);
return res;
}
@@ -4648,10 +3414,10 @@
std::vector<device::V3_2::BufferCache> cachesToRemove;
{
- std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+ std::lock_guard<std::mutex> lock(mFreedBuffersLock);
for (auto& pair : mFreedBuffers) {
// The stream might have been removed since onBufferFreed
- if (mBufferIdMaps.find(pair.first) != mBufferIdMaps.end()) {
+ if (mBufferRecords.isStreamCached(pair.first)) {
cachesToRemove.push_back({pair.first, pair.second});
}
}
@@ -4758,7 +3524,7 @@
cleanupNativeHandles(&handlesCreated);
}
} else {
- popInflightBuffers(inflightBuffers);
+ mBufferRecords.popInflightBuffers(inflightBuffers);
cleanupNativeHandles(&handlesCreated);
}
return res;
@@ -4820,20 +3586,20 @@
status_t Camera3Device::HalInterface::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
/*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
- /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession) {
+ /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+ /*out*/camera3::BufferRecords* bufferRecords) {
ATRACE_NAME("CameraHal::switchToOffline");
if (!valid() || mHidlSession_3_6 == nullptr) {
ALOGE("%s called on invalid camera!", __FUNCTION__);
return INVALID_OPERATION;
}
- if (offlineSessionInfo == nullptr || offlineSession == nullptr) {
- ALOGE("%s: offlineSessionInfo and offlineSession must not be null!", __FUNCTION__);
+ if (offlineSessionInfo == nullptr || offlineSession == nullptr || bufferRecords == nullptr) {
+ ALOGE("%s: output arguments must not be null!", __FUNCTION__);
return INVALID_OPERATION;
}
common::V1_0::Status status = common::V1_0::Status::INTERNAL_ERROR;
-
auto resultCallback =
[&status, &offlineSessionInfo, &offlineSession] (auto s, auto info, auto session) {
status = s;
@@ -4846,75 +3612,65 @@
ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
return DEAD_OBJECT;
}
- return CameraProviderManager::mapToStatusT(status);
+
+ status_t ret = CameraProviderManager::mapToStatusT(status);
+ if (ret != OK) {
+ return ret;
+ }
+
+ // TODO: assert no ongoing requestBuffer/returnBuffer call here
+ // TODO: update RequestBufferStateMachine to block requestBuffer/returnBuffer once HAL
+ // returns from switchToOffline.
+
+
+ // Validate buffer caches
+ std::vector<int32_t> streams;
+ streams.reserve(offlineSessionInfo->offlineStreams.size());
+ for (auto offlineStream : offlineSessionInfo->offlineStreams) {
+ int32_t id = offlineStream.id;
+ streams.push_back(id);
+ // Verify buffer caches
+ std::vector<uint64_t> bufIds(offlineStream.circulatingBufferIds.begin(),
+ offlineStream.circulatingBufferIds.end());
+ if (!verifyBufferIds(id, bufIds)) {
+ ALOGE("%s: stream ID %d buffer cache records mismatch!", __FUNCTION__, id);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Move buffer records
+ bufferRecords->takeBufferCaches(mBufferRecords, streams);
+ bufferRecords->takeInflightBufferMap(mBufferRecords);
+ bufferRecords->takeRequestedBufferMap(mBufferRecords);
+ return ret;
}
void Camera3Device::HalInterface::getInflightBufferKeys(
std::vector<std::pair<int32_t, int32_t>>* out) {
- std::lock_guard<std::mutex> lock(mInflightLock);
- out->clear();
- out->reserve(mInflightBufferMap.size());
- for (auto& pair : mInflightBufferMap) {
- uint64_t key = pair.first;
- int32_t streamId = key & 0xFFFFFFFF;
- int32_t frameNumber = (key >> 32) & 0xFFFFFFFF;
- out->push_back(std::make_pair(frameNumber, streamId));
- }
+ mBufferRecords.getInflightBufferKeys(out);
return;
}
void Camera3Device::HalInterface::getInflightRequestBufferKeys(
std::vector<uint64_t>* out) {
- std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
- out->clear();
- out->reserve(mRequestedBuffers.size());
- for (auto& pair : mRequestedBuffers) {
- out->push_back(pair.first);
- }
+ mBufferRecords.getInflightRequestBufferKeys(out);
return;
}
-status_t Camera3Device::HalInterface::pushInflightBufferLocked(
- int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer) {
- uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
- mInflightBufferMap[key] = buffer;
- return OK;
+bool Camera3Device::HalInterface::verifyBufferIds(
+ int32_t streamId, std::vector<uint64_t>& bufIds) {
+ return mBufferRecords.verifyBufferIds(streamId, bufIds);
}
status_t Camera3Device::HalInterface::popInflightBuffer(
int32_t frameNumber, int32_t streamId,
/*out*/ buffer_handle_t **buffer) {
- std::lock_guard<std::mutex> lock(mInflightLock);
-
- uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
- auto it = mInflightBufferMap.find(key);
- if (it == mInflightBufferMap.end()) return NAME_NOT_FOUND;
- if (buffer != nullptr) {
- *buffer = it->second;
- }
- mInflightBufferMap.erase(it);
- return OK;
-}
-
-void Camera3Device::HalInterface::popInflightBuffers(
- const std::vector<std::pair<int32_t, int32_t>>& buffers) {
- for (const auto& pair : buffers) {
- int32_t frameNumber = pair.first;
- int32_t streamId = pair.second;
- popInflightBuffer(frameNumber, streamId, nullptr);
- }
+ return mBufferRecords.popInflightBuffer(frameNumber, streamId, buffer);
}
status_t Camera3Device::HalInterface::pushInflightRequestBuffer(
uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) {
- std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
- auto pair = mRequestedBuffers.insert({bufferId, {streamId, buf}});
- if (!pair.second) {
- ALOGE("%s: bufId %" PRIu64 " is already inflight!",
- __FUNCTION__, bufferId);
- return BAD_VALUE;
- }
- return OK;
+ return mBufferRecords.pushInflightRequestBuffer(bufferId, buf, streamId);
}
// Find and pop a buffer_handle_t based on bufferId
@@ -4922,81 +3678,29 @@
uint64_t bufferId,
/*out*/ buffer_handle_t** buffer,
/*optional out*/ int32_t* streamId) {
- if (buffer == nullptr) {
- ALOGE("%s: buffer (%p) must not be null", __FUNCTION__, buffer);
- return BAD_VALUE;
- }
- std::lock_guard<std::mutex> lock(mRequestedBuffersLock);
- auto it = mRequestedBuffers.find(bufferId);
- if (it == mRequestedBuffers.end()) {
- ALOGE("%s: bufId %" PRIu64 " is not inflight!",
- __FUNCTION__, bufferId);
- return BAD_VALUE;
- }
- *buffer = it->second.second;
- if (streamId != nullptr) {
- *streamId = it->second.first;
- }
- mRequestedBuffers.erase(it);
- return OK;
+ return mBufferRecords.popInflightRequestBuffer(bufferId, buffer, streamId);
}
std::pair<bool, uint64_t> Camera3Device::HalInterface::getBufferId(
const buffer_handle_t& buf, int streamId) {
- std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-
- BufferIdMap& bIdMap = mBufferIdMaps.at(streamId);
- auto it = bIdMap.find(buf);
- if (it == bIdMap.end()) {
- bIdMap[buf] = mNextBufferId++;
- ALOGV("stream %d now have %zu buffer caches, buf %p",
- streamId, bIdMap.size(), buf);
- return std::make_pair(true, mNextBufferId - 1);
- } else {
- return std::make_pair(false, it->second);
- }
+ return mBufferRecords.getBufferId(buf, streamId);
}
void Camera3Device::HalInterface::onBufferFreed(
int streamId, const native_handle_t* handle) {
- std::lock_guard<std::mutex> lock(mBufferIdMapLock);
- uint64_t bufferId = BUFFER_ID_NO_BUFFER;
- auto mapIt = mBufferIdMaps.find(streamId);
- if (mapIt == mBufferIdMaps.end()) {
- // streamId might be from a deleted stream here
- ALOGI("%s: stream %d has been removed",
- __FUNCTION__, streamId);
- return;
+ uint32_t bufferId = mBufferRecords.removeOneBufferCache(streamId, handle);
+ std::lock_guard<std::mutex> lock(mFreedBuffersLock);
+ if (bufferId != BUFFER_ID_NO_BUFFER) {
+ mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
}
- BufferIdMap& bIdMap = mapIt->second;
- auto it = bIdMap.find(handle);
- if (it == bIdMap.end()) {
- ALOGW("%s: cannot find buffer %p in stream %d",
- __FUNCTION__, handle, streamId);
- return;
- } else {
- bufferId = it->second;
- bIdMap.erase(it);
- ALOGV("%s: stream %d now have %zu buffer caches after removing buf %p",
- __FUNCTION__, streamId, bIdMap.size(), handle);
- }
- mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
}
void Camera3Device::HalInterface::onStreamReConfigured(int streamId) {
- std::lock_guard<std::mutex> lock(mBufferIdMapLock);
- auto mapIt = mBufferIdMaps.find(streamId);
- if (mapIt == mBufferIdMaps.end()) {
- ALOGE("%s: streamId %d not found!", __FUNCTION__, streamId);
- return;
- }
-
- BufferIdMap& bIdMap = mapIt->second;
- for (const auto& it : bIdMap) {
- uint64_t bufferId = it.second;
+ std::vector<uint64_t> bufIds = mBufferRecords.clearBufferCaches(streamId);
+ std::lock_guard<std::mutex> lock(mFreedBuffersLock);
+ for (auto bufferId : bufIds) {
mFreedBuffers.push_back(std::make_pair(streamId, bufferId));
}
- bIdMap.clear();
}
/**
@@ -6030,17 +4734,22 @@
status_t Camera3Device::RequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
/*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
- /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession) {
+ /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+ /*out*/camera3::BufferRecords* bufferRecords) {
Mutex::Autolock l(mRequestLock);
clearRepeatingRequestsLocked(/*lastFrameNumber*/nullptr);
- // Theoretically we should also check for mRepeatingRequests.empty(), but the API interface
- // is serialized by mInterfaceLock so skip that check.
+ // Wait until request thread is fully stopped
+ // TBD: check if request thread is being paused by other APIs (shouldn't be)
+
+ // We could also check for mRepeatingRequests.empty(), but the API interface
+ // is serialized by Camera3Device::mInterfaceLock so no one should be able to submit any
+ // new requests during the call; hence skip that check.
bool queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
while (!queueEmpty) {
status_t res = mRequestSubmittedSignal.waitRelative(mRequestLock, kRequestSubmitTimeout);
if (res == TIMED_OUT) {
- ALOGE("%s: request thread failed to submit a request within timeout!", __FUNCTION__);
+ ALOGE("%s: request thread failed to submit one request within timeout!", __FUNCTION__);
return res;
} else if (res != OK) {
ALOGE("%s: request thread failed to submit a request: %s (%d)!",
@@ -6049,13 +4758,14 @@
}
queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
}
+
return mInterface->switchToOffline(
- streamsToKeep, offlineSessionInfo, offlineSession);
+ streamsToKeep, offlineSessionInfo, offlineSession, bufferRecords);
}
nsecs_t Camera3Device::getExpectedInFlightDuration() {
ATRACE_CALL();
- Mutex::Autolock al(mInFlightLock);
+ std::lock_guard<std::mutex> l(mInFlightLock);
return mExpectedInflightDuration > kMinInflightDuration ?
mExpectedInflightDuration : kMinInflightDuration;
}
@@ -6141,7 +4851,7 @@
{
sp<Camera3Device> parent = mParent.promote();
if (parent != NULL) {
- Mutex::Autolock l(parent->mInFlightLock);
+ std::lock_guard<std::mutex> l(parent->mInFlightLock);
ssize_t idx = parent->mInFlightMap.indexOfKey(captureRequest->mResultExtras.frameNumber);
if (idx >= 0) {
ALOGV("%s: Remove inflight request from queue: frameNumber %" PRId64,
@@ -6830,6 +5540,7 @@
void Camera3Device::RequestBufferStateMachine::onStreamsConfigured() {
std::lock_guard<std::mutex> lock(mLock);
+ mSwitchedToOffline = false;
mStatus = RB_STATUS_READY;
return;
}
@@ -6872,6 +5583,20 @@
return;
}
+bool Camera3Device::RequestBufferStateMachine::onSwitchToOfflineSuccess() {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mRequestBufferOngoing) {
+ ALOGE("%s: HAL must not be requesting buffer after HAL returns switchToOffline!",
+ __FUNCTION__);
+ return false;
+ }
+ mSwitchedToOffline = true;
+ mInflightMapEmpty = true;
+ mRequestThreadPaused = true;
+ mStatus = RB_STATUS_STOPPED;
+ return true;
+}
+
void Camera3Device::RequestBufferStateMachine::notifyTrackerLocked(bool active) {
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr) {
@@ -6891,75 +5616,40 @@
return false;
}
-status_t Camera3Device::fixupMonochromeTags(const CameraMetadata& deviceInfo,
- CameraMetadata& resultMetadata) {
- status_t res = OK;
- if (!mNeedFixupMonochromeTags) {
- return res;
- }
+bool Camera3Device::startRequestBuffer() {
+ return mRequestBufferSM.startRequestBuffer();
+}
- // Remove tags that are not applicable to monochrome camera.
- int32_t tagsToRemove[] = {
- ANDROID_SENSOR_GREEN_SPLIT,
- ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
- ANDROID_COLOR_CORRECTION_MODE,
- ANDROID_COLOR_CORRECTION_TRANSFORM,
- ANDROID_COLOR_CORRECTION_GAINS,
- };
- for (auto tag : tagsToRemove) {
- res = resultMetadata.erase(tag);
- if (res != OK) {
- ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
- return res;
- }
- }
+void Camera3Device::endRequestBuffer() {
+ mRequestBufferSM.endRequestBuffer();
+}
- // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
- camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
- for (size_t i = 1; i < blEntry.count; i++) {
- blEntry.data.f[i] = blEntry.data.f[0];
- }
+nsecs_t Camera3Device::getWaitDuration() {
+ return kBaseGetBufferWait + getExpectedInFlightDuration();
+}
- // ANDROID_SENSOR_NOISE_PROFILE
- camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
- if (npEntry.count > 0 && npEntry.count % 2 == 0) {
- double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
- res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
- if (res != OK) {
- ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
- }
+void Camera3Device::getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) {
+ mInterface->getInflightBufferKeys(out);
+}
- // ANDROID_STATISTICS_LENS_SHADING_MAP
- camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
- camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
- if (lsSizeEntry.count == 2 && lsEntry.count > 0
- && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
- for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
- lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
- lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
- lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
- }
- }
+void Camera3Device::getInflightRequestBufferKeys(std::vector<uint64_t>* out) {
+ mInterface->getInflightRequestBufferKeys(out);
+}
- // ANDROID_TONEMAP_CURVE_BLUE
- // ANDROID_TONEMAP_CURVE_GREEN
- // ANDROID_TONEMAP_CURVE_RED
- camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
- camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
- camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
- if (tcbEntry.count > 0
- && tcbEntry.count == tcgEntry.count
- && tcbEntry.count == tcrEntry.count) {
- for (size_t i = 0; i < tcbEntry.count; i++) {
- tcbEntry.data.f[i] = tcrEntry.data.f[i];
- tcgEntry.data.f[i] = tcrEntry.data.f[i];
- }
+std::vector<sp<Camera3StreamInterface>> Camera3Device::getAllStreams() {
+ std::vector<sp<Camera3StreamInterface>> ret;
+ bool hasInputStream = mInputStream != nullptr;
+ ret.reserve(mOutputStreams.size() + mDeletedStreams.size() + ((hasInputStream) ? 1 : 0));
+ if (hasInputStream) {
+ ret.push_back(mInputStream);
}
-
- return res;
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ ret.push_back(mOutputStreams[i]);
+ }
+ for (size_t i = 0; i < mDeletedStreams.size(); i++) {
+ ret.push_back(mDeletedStreams[i]);
+ }
+ return ret;
}
status_t Camera3Device::switchToOffline(
@@ -6972,24 +5662,231 @@
}
Mutex::Autolock il(mInterfaceLock);
- Mutex::Autolock l(mLock);
- // 1. Stop repeating request, wait until request thread submitted all requests, then
- // call HAL switchToOffline
+ bool hasInputStream = mInputStream != nullptr;
+ int32_t inputStreamId = hasInputStream ? mInputStream->getId() : -1;
+ bool inputStreamSupportsOffline = hasInputStream ?
+ mInputStream->getOfflineProcessingSupport() : false;
+ auto outputStreamIds = mOutputStreams.getStreamIds();
+ auto streamIds = outputStreamIds;
+ if (hasInputStream) {
+ streamIds.push_back(mInputStream->getId());
+ }
+
+ // Check all streams in streamsToKeep supports offline mode
+ for (auto id : streamsToKeep) {
+ if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
+ ALOGE("%s: Unknown stream ID %d", __FUNCTION__, id);
+ return BAD_VALUE;
+ } else if (id == inputStreamId) {
+ if (!inputStreamSupportsOffline) {
+ ALOGE("%s: input stream %d cannot be switched to offline",
+ __FUNCTION__, id);
+ return BAD_VALUE;
+ }
+ } else {
+ sp<camera3::Camera3OutputStreamInterface> stream = mOutputStreams.get(id);
+ if (!stream->getOfflineProcessingSupport()) {
+ ALOGE("%s: output stream %d cannot be switched to offline",
+ __FUNCTION__, id);
+ return BAD_VALUE;
+ }
+ }
+ }
+
+ // TODO: block surface sharing and surface group streams until we can support them
+
+ // Stop repeating request, wait until all remaining requests are submitted, then call into
+ // HAL switchToOffline
hardware::camera::device::V3_6::CameraOfflineSessionInfo offlineSessionInfo;
sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession;
+ camera3::BufferRecords bufferRecords;
+ status_t ret = mRequestThread->switchToOffline(
+ streamsToKeep, &offlineSessionInfo, &offlineSession, &bufferRecords);
- mRequestThread->switchToOffline(streamsToKeep, &offlineSessionInfo, &offlineSession);
+ if (ret != OK) {
+ SET_ERR("Switch to offline failed: %s (%d)", strerror(-ret), ret);
+ return ret;
+ }
+ bool succ = mRequestBufferSM.onSwitchToOfflineSuccess();
+ if (!succ) {
+ SET_ERR("HAL must not be calling requestStreamBuffers call");
+ // TODO: block ALL callbacks from HAL till app configured new streams?
+ return UNKNOWN_ERROR;
+ }
- // 2. Verify offlineSessionInfo
- // 3. create Camera3OfflineSession and transfer object ownership
+ // Verify offlineSessionInfo
+ std::vector<int32_t> offlineStreamIds;
+ offlineStreamIds.reserve(offlineSessionInfo.offlineStreams.size());
+ for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+ // verify stream IDs
+ int32_t id = offlineStream.id;
+ if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
+ SET_ERR("stream ID %d not found!", id);
+ return UNKNOWN_ERROR;
+ }
+
+ // When not using HAL buf manager, only allow streams requested by app to be preserved
+ if (!mUseHalBufManager) {
+ if (std::find(streamsToKeep.begin(), streamsToKeep.end(), id) == streamsToKeep.end()) {
+ SET_ERR("stream ID %d must not be switched to offline!", id);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ offlineStreamIds.push_back(id);
+ sp<Camera3StreamInterface> stream = (id == inputStreamId) ?
+ static_cast<sp<Camera3StreamInterface>>(mInputStream) :
+ static_cast<sp<Camera3StreamInterface>>(mOutputStreams.get(id));
+ // Verify number of outstanding buffers
+ if (stream->getOutstandingBuffersCount() != offlineStream.numOutstandingBuffers) {
+ SET_ERR("Offline stream %d # of remaining buffer mismatch: (%zu,%d) (service/HAL)",
+ id, stream->getOutstandingBuffersCount(), offlineStream.numOutstandingBuffers);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Verify all streams to be deleted don't have any outstanding buffers
+ if (hasInputStream && std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
+ inputStreamId) == offlineStreamIds.end()) {
+ if (mInputStream->hasOutstandingBuffers()) {
+ SET_ERR("Input stream %d still has %zu outstanding buffer!",
+ inputStreamId, mInputStream->getOutstandingBuffersCount());
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ for (const auto& outStreamId : outputStreamIds) {
+ if (std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
+ outStreamId) == offlineStreamIds.end()) {
+ auto outStream = mOutputStreams.get(outStreamId);
+ if (outStream->hasOutstandingBuffers()) {
+ SET_ERR("Output stream %d still has %zu outstanding buffer!",
+ outStreamId, outStream->getOutstandingBuffersCount());
+ return UNKNOWN_ERROR;
+ }
+ }
+ }
+
+ InFlightRequestMap offlineReqs;
+ // Verify inflight requests and their pending buffers
+ {
+ std::lock_guard<std::mutex> l(mInFlightLock);
+ for (auto offlineReq : offlineSessionInfo.offlineRequests) {
+ int idx = mInFlightMap.indexOfKey(offlineReq.frameNumber);
+ if (idx == NAME_NOT_FOUND) {
+ SET_ERR("Offline request frame number %d not found!", offlineReq.frameNumber);
+ return UNKNOWN_ERROR;
+ }
+
+ const auto& inflightReq = mInFlightMap.valueAt(idx);
+ // TODO: check specific stream IDs
+ size_t numBuffersLeft = static_cast<size_t>(inflightReq.numBuffersLeft);
+ if (numBuffersLeft != offlineReq.pendingStreams.size()) {
+ SET_ERR("Offline request # of remaining buffer mismatch: (%d,%d) (service/HAL)",
+ inflightReq.numBuffersLeft, offlineReq.pendingStreams.size());
+ return UNKNOWN_ERROR;
+ }
+ offlineReqs.add(offlineReq.frameNumber, inflightReq);
+ }
+ }
+
+ // Create Camera3OfflineSession and transfer object ownership
// (streams, inflight requests, buffer caches)
- *session = new Camera3OfflineSession(mId);
+ camera3::StreamSet offlineStreamSet;
+ sp<camera3::Camera3Stream> inputStream;
+ for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+ int32_t id = offlineStream.id;
+ if (mInputStream != nullptr && id == mInputStream->getId()) {
+ inputStream = mInputStream;
+ } else {
+ offlineStreamSet.add(id, mOutputStreams.get(id));
+ }
+ }
- // 4. Delete unneeded streams
- // 5. Verify Camera3Device is in unconfigured state
+ // TODO: check if we need to lock before copying states
+ // though technically no other thread should be talking to Camera3Device at this point
+ Camera3OfflineStates offlineStates(
+ mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
+ mUsePartialResult, mNumPartialResults, mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mNextShutterFrameNumber, mNextReprocessShutterFrameNumber,
+ mNextZslStillShutterFrameNumber, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mDistortionMappers, mZoomRatioMappers);
+
+ *session = new Camera3OfflineSession(mId, inputStream, offlineStreamSet,
+ std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
+
+ // Delete all streams that has been transferred to offline session
+ Mutex::Autolock l(mLock);
+ for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+ int32_t id = offlineStream.id;
+ if (mInputStream != nullptr && id == mInputStream->getId()) {
+ mInputStream.clear();
+ } else {
+ mOutputStreams.remove(id);
+ }
+ }
+
+ // disconnect all other streams and switch to UNCONFIGURED state
+ if (mInputStream != nullptr) {
+ ret = mInputStream->disconnect();
+ if (ret != OK) {
+ SET_ERR_L("disconnect input stream failed!");
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ for (auto streamId : mOutputStreams.getStreamIds()) {
+ sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
+ ret = stream->disconnect();
+ if (ret != OK) {
+ SET_ERR_L("disconnect output stream %d failed!", streamId);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ mInputStream.clear();
+ mOutputStreams.clear();
+ mNeedConfig = true;
+ internalUpdateStatusLocked(STATUS_UNCONFIGURED);
+ mOperatingMode = NO_MODE;
+ mIsConstrainedHighSpeedConfiguration = false;
+
return OK;
+ // TO be done by CameraDeviceClient/Camera3OfflineSession
+ // register the offline client to camera service
+ // Setup result passthing threads etc
+ // Initialize offline session so HAL can start sending callback to it (result Fmq)
+ // TODO: check how many onIdle callback will be sent
+ // Java side to make sure the CameraCaptureSession is properly closed
+}
+
+void Camera3Device::getOfflineStreamIds(std::vector<int> *offlineStreamIds) {
+ ATRACE_CALL();
+
+ if (offlineStreamIds == nullptr) {
+ return;
+ }
+
+ Mutex::Autolock il(mInterfaceLock);
+
+ auto streamIds = mOutputStreams.getStreamIds();
+ bool hasInputStream = mInputStream != nullptr;
+ if (hasInputStream && mInputStream->getOfflineProcessingSupport()) {
+ offlineStreamIds->push_back(mInputStream->getId());
+ }
+
+ for (const auto & streamId : streamIds) {
+ sp<camera3::Camera3OutputStreamInterface> stream = mOutputStreams.get(streamId);
+ // Streams that use the camera buffer manager are currently not supported in
+ // offline mode
+ if (stream->getOfflineProcessingSupport() &&
+ (stream->getStreamSetId() == CAMERA3_STREAM_SET_ID_INVALID)) {
+ offlineStreamIds->push_back(streamId);
+ }
+ }
}
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 5faabd1..7279baf 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -43,10 +43,14 @@
#include <camera/CaptureResult.h>
#include "common/CameraDeviceBase.h"
+#include "device3/BufferUtils.h"
#include "device3/StatusTracker.h"
#include "device3/Camera3BufferManager.h"
#include "device3/DistortionMapper.h"
#include "device3/ZoomRatioMapper.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3OutputInterface.h"
+#include "device3/Camera3OfflineSession.h"
#include "utils/TagMonitor.h"
#include "utils/LatencyHistogram.h"
#include <camera_metadata_hidden.h>
@@ -69,7 +73,11 @@
*/
class Camera3Device :
public CameraDeviceBase,
- virtual public hardware::camera::device::V3_5::ICameraDeviceCallback {
+ virtual public hardware::camera::device::V3_5::ICameraDeviceCallback,
+ public camera3::SetErrorInterface,
+ public camera3::InflightRequestUpdateInterface,
+ public camera3::RequestBufferInterface,
+ public camera3::FlushBufferInterface {
public:
explicit Camera3Device(const String8& id);
@@ -142,6 +150,8 @@
status_t getInputBufferProducer(
sp<IGraphicBufferProducer> *producer) override;
+ void getOfflineStreamIds(std::vector<int> *offlineStreamIds) override;
+
status_t createDefaultRequest(int templateId, CameraMetadata *request) override;
// Transitions to the idle state on success
@@ -201,6 +211,16 @@
status_t switchToOffline(const std::vector<int32_t>& streamsToKeep,
/*out*/ sp<CameraOfflineSessionBase>* session) override;
+ // RequestBufferInterface
+ bool startRequestBuffer() override;
+ void endRequestBuffer() override;
+ nsecs_t getWaitDuration() override;
+
+ // FlushBufferInterface
+ void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) override;
+ void getInflightRequestBufferKeys(std::vector<uint64_t>* out) override;
+ std::vector<sp<camera3::Camera3StreamInterface>> getAllStreams() override;
+
/**
* Helper functions to map between framework and HIDL values
*/
@@ -213,8 +233,6 @@
// Returns a negative error code if the passed-in operation mode is not valid.
static status_t mapToStreamConfigurationMode(camera3_stream_configuration_mode_t operationMode,
/*out*/ hardware::camera::device::V3_2::StreamConfigurationMode *mode);
- static camera3_buffer_status_t mapHidlBufferStatus(
- hardware::camera::device::V3_2::BufferStatus status);
static int mapToFrameworkFormat(hardware::graphics::common::V1_0::PixelFormat pixelFormat);
static android_dataspace mapToFrameworkDataspace(
hardware::camera::device::V3_2::DataspaceFlags);
@@ -229,7 +247,6 @@
// internal typedefs
using RequestMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
- using ResultMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
static const size_t kDumpLockAttempts = 10;
static const size_t kDumpSleepDuration = 100000; // 0.10 sec
@@ -283,7 +300,8 @@
* Adapter for legacy HAL / HIDL HAL interface calls; calls either into legacy HALv3 or the
* HIDL HALv3 interfaces.
*/
- class HalInterface : public camera3::Camera3StreamBufferFreedListener {
+ class HalInterface : public camera3::Camera3StreamBufferFreedListener,
+ public camera3::BufferRecordsInterface {
public:
HalInterface(sp<hardware::camera::device::V3_2::ICameraDeviceSession> &session,
std::shared_ptr<RequestMetadataQueue> queue,
@@ -321,27 +339,31 @@
bool isReconfigurationRequired(CameraMetadata& oldSessionParams,
CameraMetadata& newSessionParams);
+ // Upon successful return, HalInterface will return buffer maps needed for offline
+ // processing, and clear all its internal buffer maps.
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
/*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
- /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession);
+ /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+ /*out*/camera3::BufferRecords* bufferRecords);
- // method to extract buffer's unique ID
- // return pair of (newlySeenBuffer?, bufferId)
- std::pair<bool, uint64_t> getBufferId(const buffer_handle_t& buf, int streamId);
+ /////////////////////////////////////////////////////////////////////
+ // Implements BufferRecordsInterface
- // Find a buffer_handle_t based on frame number and stream ID
+ std::pair<bool, uint64_t> getBufferId(
+ const buffer_handle_t& buf, int streamId) override;
+
status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
- /*out*/ buffer_handle_t **buffer);
+ /*out*/ buffer_handle_t **buffer) override;
- // Register a bufId (streamId, buffer_handle_t) to inflight request buffer
status_t pushInflightRequestBuffer(
- uint64_t bufferId, buffer_handle_t* buf, int32_t streamId);
+ uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) override;
- // Find a buffer_handle_t based on bufferId
status_t popInflightRequestBuffer(uint64_t bufferId,
/*out*/ buffer_handle_t** buffer,
- /*optional out*/ int32_t* streamId = nullptr);
+ /*optional out*/ int32_t* streamId = nullptr) override;
+
+ /////////////////////////////////////////////////////////////////////
// Get a vector of (frameNumber, streamId) pair of currently inflight
// buffers
@@ -352,7 +374,6 @@
void onStreamReConfigured(int streamId);
- static const uint64_t BUFFER_ID_NO_BUFFER = 0;
private:
// Always valid
sp<hardware::camera::device::V3_2::ICameraDeviceSession> mHidlSession;
@@ -367,8 +388,6 @@
std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
- std::mutex mInflightLock;
-
// The output HIDL request still depends on input camera3_capture_request_t
// Do not free input camera3_capture_request_t before output HIDL request
status_t wrapAsHidlRequest(camera3_capture_request_t* in,
@@ -382,55 +401,20 @@
// Pop inflight buffers based on pairs of (frameNumber,streamId)
void popInflightBuffers(const std::vector<std::pair<int32_t, int32_t>>& buffers);
- // Cache of buffer handles keyed off (frameNumber << 32 | streamId)
- std::unordered_map<uint64_t, buffer_handle_t*> mInflightBufferMap;
+ // Return true if the input caches match what we have; otherwise false
+ bool verifyBufferIds(int32_t streamId, std::vector<uint64_t>& inBufIds);
// Delete and optionally close native handles and clear the input vector afterward
static void cleanupNativeHandles(
std::vector<native_handle_t*> *handles, bool closeFd = false);
- struct BufferHasher {
- size_t operator()(const buffer_handle_t& buf) const {
- if (buf == nullptr)
- return 0;
-
- size_t result = 1;
- result = 31 * result + buf->numFds;
- for (int i = 0; i < buf->numFds; i++) {
- result = 31 * result + buf->data[i];
- }
- return result;
- }
- };
-
- struct BufferComparator {
- bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
- if (buf1->numFds == buf2->numFds) {
- for (int i = 0; i < buf1->numFds; i++) {
- if (buf1->data[i] != buf2->data[i]) {
- return false;
- }
- }
- return true;
- }
- return false;
- }
- };
-
- std::mutex mBufferIdMapLock; // protecting mBufferIdMaps and mNextBufferId
- typedef std::unordered_map<const buffer_handle_t, uint64_t,
- BufferHasher, BufferComparator> BufferIdMap;
- // stream ID -> per stream buffer ID map
- std::unordered_map<int, BufferIdMap> mBufferIdMaps;
- uint64_t mNextBufferId = 1; // 0 means no buffer
-
virtual void onBufferFreed(int streamId, const native_handle_t* handle) override;
+ std::mutex mFreedBuffersLock;
std::vector<std::pair<int, uint64_t>> mFreedBuffers;
- // Buffers given to HAL through requestStreamBuffer API
- std::mutex mRequestedBuffersLock;
- std::unordered_map<uint64_t, std::pair<int32_t, buffer_handle_t*>> mRequestedBuffers;
+ // Keep track of buffer cache and inflight buffer records
+ camera3::BufferRecords mBufferRecords;
uint32_t mNextStreamConfigCounter = 1;
@@ -473,24 +457,7 @@
// Tracking cause of fatal errors when in STATUS_ERROR
String8 mErrorCause;
- // Synchronized mapping of stream IDs to stream instances
- class StreamSet {
- public:
- status_t add(int streamId, sp<camera3::Camera3OutputStreamInterface>);
- ssize_t remove(int streamId);
- sp<camera3::Camera3OutputStreamInterface> get(int streamId);
- // get by (underlying) vector index
- sp<camera3::Camera3OutputStreamInterface> operator[] (size_t index);
- size_t size() const;
- std::vector<int> getStreamIds();
- void clear();
-
- private:
- mutable std::mutex mLock;
- KeyedVector<int, sp<camera3::Camera3OutputStreamInterface>> mData;
- };
-
- StreamSet mOutputStreams;
+ camera3::StreamSet mOutputStreams;
sp<camera3::Camera3Stream> mInputStream;
int mNextStreamId;
bool mNeedConfig;
@@ -579,15 +546,6 @@
const hardware::hidl_vec<
hardware::camera::device::V3_2::StreamBuffer>& buffers) override;
- // Handle one capture result. Assume that mProcessCaptureResultLock is held.
- void processOneCaptureResultLocked(
- const hardware::camera::device::V3_2::CaptureResult& result,
- const hardware::hidl_vec<
- hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata);
- status_t readOneCameraMetadataLocked(uint64_t fmqResultSize,
- hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
- const hardware::camera::device::V3_2::CameraMetadata& result);
-
// Handle one notify message
void notify(const hardware::camera::device::V3_2::NotifyMsg& msg);
@@ -710,11 +668,20 @@
* error message to indicate why. Only the first call's message will be
* used. The message is also sent to the log.
*/
- void setErrorState(const char *fmt, ...);
+ void setErrorState(const char *fmt, ...) override;
+ void setErrorStateLocked(const char *fmt, ...) override;
void setErrorStateV(const char *fmt, va_list args);
- void setErrorStateLocked(const char *fmt, ...);
void setErrorStateLockedV(const char *fmt, va_list args);
+ /////////////////////////////////////////////////////////////////////
+ // Implements InflightRequestUpdateInterface
+
+ void onInflightEntryRemovedLocked(nsecs_t duration) override;
+ void checkInflightMapLengthLocked() override;
+ void onInflightMapFlushedLocked() override;
+
+ /////////////////////////////////////////////////////////////////////
+
/**
* Debugging trylock/spin method
* Try to acquire a lock a few times with sleeps between before giving up.
@@ -722,12 +689,6 @@
bool tryLockSpinRightRound(Mutex& lock);
/**
- * Helper function to determine if an input size for implementation defined
- * format is supported.
- */
- bool isOpaqueInputSizeSupported(uint32_t width, uint32_t height);
-
- /**
* Helper function to get the largest Jpeg resolution (in area)
* Return Size(0, 0) if static metatdata is invalid
*/
@@ -860,7 +821,8 @@
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
/*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
- /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession);
+ /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+ /*out*/camera3::BufferRecords* bufferRecords);
protected:
@@ -1021,119 +983,12 @@
/**
* In-flight queue for tracking completion of capture requests.
*/
+ std::mutex mInFlightLock;
+ camera3::InFlightRequestMap mInFlightMap;
+ nsecs_t mExpectedInflightDuration = 0;
+ // End of mInFlightLock protection scope
- struct InFlightRequest {
- // Set by notify() SHUTTER call.
- nsecs_t shutterTimestamp;
- // Set by process_capture_result().
- nsecs_t sensorTimestamp;
- int requestStatus;
- // Set by process_capture_result call with valid metadata
- bool haveResultMetadata;
- // Decremented by calls to process_capture_result with valid output
- // and input buffers
- int numBuffersLeft;
- CaptureResultExtras resultExtras;
- // If this request has any input buffer
- bool hasInputBuffer;
-
- // The last metadata that framework receives from HAL and
- // not yet send out because the shutter event hasn't arrived.
- // It's added by process_capture_result and sent when framework
- // receives the shutter event.
- CameraMetadata pendingMetadata;
-
- // The metadata of the partial results that framework receives from HAL so far
- // and has sent out.
- CameraMetadata collectedPartialResult;
-
- // Buffers are added by process_capture_result when output buffers
- // return from HAL but framework has not yet received the shutter
- // event. They will be returned to the streams when framework receives
- // the shutter event.
- Vector<camera3_stream_buffer_t> pendingOutputBuffers;
-
- // Whether this inflight request's shutter and result callback are to be
- // called. The policy is that if the request is the last one in the constrained
- // high speed recording request list, this flag will be true. If the request list
- // is not for constrained high speed recording, this flag will also be true.
- bool hasCallback;
-
- // Maximum expected frame duration for this request.
- // For manual captures, equal to the max of requested exposure time and frame duration
- // For auto-exposure modes, equal to 1/(lower end of target FPS range)
- nsecs_t maxExpectedDuration;
-
- // Whether the result metadata for this request is to be skipped. The
- // result metadata should be skipped in the case of
- // REQUEST/RESULT error.
- bool skipResultMetadata;
-
- // The physical camera ids being requested.
- std::set<String8> physicalCameraIds;
-
- // Map of physicalCameraId <-> Metadata
- std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
-
- // Indicates a still capture request.
- bool stillCapture;
-
- // Indicates a ZSL capture request
- bool zslCapture;
-
- // Requested camera ids (both logical and physical) with zoomRatio != 1.0f
- std::set<std::string> cameraIdsWithZoom;
-
- // What shared surfaces an output should go to
- SurfaceMap outputSurfaces;
-
- // Default constructor needed by KeyedVector
- InFlightRequest() :
- shutterTimestamp(0),
- sensorTimestamp(0),
- requestStatus(OK),
- haveResultMetadata(false),
- numBuffersLeft(0),
- hasInputBuffer(false),
- hasCallback(true),
- maxExpectedDuration(kDefaultExpectedDuration),
- skipResultMetadata(false),
- stillCapture(false),
- zslCapture(false) {
- }
-
- InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
- bool hasAppCallback, nsecs_t maxDuration,
- const std::set<String8>& physicalCameraIdSet, bool isStillCapture,
- bool isZslCapture, const std::set<std::string>& idsWithZoom,
- const SurfaceMap& outSurfaces = SurfaceMap{}) :
- shutterTimestamp(0),
- sensorTimestamp(0),
- requestStatus(OK),
- haveResultMetadata(false),
- numBuffersLeft(numBuffers),
- resultExtras(extras),
- hasInputBuffer(hasInput),
- hasCallback(hasAppCallback),
- maxExpectedDuration(maxDuration),
- skipResultMetadata(false),
- physicalCameraIds(physicalCameraIdSet),
- stillCapture(isStillCapture),
- zslCapture(isZslCapture),
- cameraIdsWithZoom(idsWithZoom),
- outputSurfaces(outSurfaces) {
- }
- };
-
- // Map from frame number to the in-flight request state
- typedef KeyedVector<uint32_t, InFlightRequest> InFlightMap;
-
-
- Mutex mInFlightLock; // Protects mInFlightMap and
- // mExpectedInflightDuration
- InFlightMap mInFlightMap;
- nsecs_t mExpectedInflightDuration = 0;
- int mInFlightStatusId;
+ int mInFlightStatusId; // const after initialize
status_t registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
@@ -1211,7 +1066,7 @@
*/
// Lock for output side of device
- Mutex mOutputLock;
+ std::mutex mOutputLock;
/**** Scope for mOutputLock ****/
// the minimal frame number of the next non-reprocess result
@@ -1226,61 +1081,18 @@
uint32_t mNextReprocessShutterFrameNumber;
// the minimal frame number of the next ZSL still capture shutter
uint32_t mNextZslStillShutterFrameNumber;
- List<CaptureResult> mResultQueue;
- Condition mResultSignal;
- wp<NotificationListener> mListener;
+ List<CaptureResult> mResultQueue;
+ std::condition_variable mResultSignal;
+ wp<NotificationListener> mListener;
/**** End scope for mOutputLock ****/
- /**
- * Callback functions from HAL device
- */
- void processCaptureResult(const camera3_capture_result *result);
-
- void notify(const camera3_notify_msg *msg);
-
- // Specific notify handlers
- void notifyError(const camera3_error_msg_t &msg,
- sp<NotificationListener> listener);
- void notifyShutter(const camera3_shutter_msg_t &msg,
- sp<NotificationListener> listener);
-
- // helper function to return the output buffers to the streams.
- void returnOutputBuffers(const camera3_stream_buffer_t *outputBuffers,
- size_t numBuffers, nsecs_t timestamp, bool timestampIncreasing = true,
- // The following arguments are only meant for surface sharing use case
- const SurfaceMap& outputSurfaces = SurfaceMap{},
- // Used to send buffer error callback when failing to return buffer
- const CaptureResultExtras &resultExtras = CaptureResultExtras{});
-
- // Send a partial capture result.
- void sendPartialCaptureResult(const camera_metadata_t * partialResult,
- const CaptureResultExtras &resultExtras, uint32_t frameNumber);
-
- // Send a total capture result given the pending metadata and result extras,
- // partial results, and the frame number to the result queue.
- void sendCaptureResult(CameraMetadata &pendingMetadata,
- CaptureResultExtras &resultExtras,
- CameraMetadata &collectedPartialResult, uint32_t frameNumber,
- bool reprocess, bool zslStillCapture,
- const std::set<std::string>& cameraIdsWithZoom,
- const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas);
-
- bool isLastFullResult(const InFlightRequest& inFlightRequest);
-
- // Insert the result to the result queue after updating frame number and overriding AE
- // trigger cancel.
- // mOutputLock must be held when calling this function.
- void insertResultLocked(CaptureResult *result, uint32_t frameNumber);
-
/**** Scope for mInFlightLock ****/
// Remove the in-flight map entry of the given index from mInFlightMap.
// It must only be called with mInFlightLock held.
void removeInFlightMapEntryLocked(int idx);
- // Remove the in-flight request of the given index from mInFlightMap
- // if it's no longer needed. It must only be called with mInFlightLock held.
- void removeInFlightRequestIfReadyLocked(int idx);
+
// Remove all in-flight requests and return all buffers.
// This is used after HAL interface is closed to cleanup any request/buffers
// not returned by HAL.
@@ -1378,6 +1190,10 @@
void onSubmittingRequest();
void onRequestThreadPaused();
+ // Events triggered by successful switchToOffline call
+ // Return true is there is no ongoing requestBuffer call.
+ bool onSwitchToOfflineSuccess();
+
private:
void notifyTrackerLocked(bool active);
@@ -1391,6 +1207,7 @@
bool mRequestThreadPaused = true;
bool mInflightMapEmpty = true;
bool mRequestBufferOngoing = false;
+ bool mSwitchedToOffline = false;
wp<camera3::StatusTracker> mStatusTracker;
int mRequestBufferStatusId;
@@ -1398,7 +1215,6 @@
// Fix up result metadata for monochrome camera.
bool mNeedFixupMonochromeTags;
- status_t fixupMonochromeTags(const CameraMetadata& deviceInfo, CameraMetadata& resultMetadata);
// Whether HAL supports offline processing capability.
bool mSupportOfflineProcessing = false;
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
index dc5cbb3..0f05632 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
@@ -29,90 +29,435 @@
#include <utils/Trace.h>
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+
#include "device3/Camera3OfflineSession.h"
#include "device3/Camera3OutputStream.h"
#include "device3/Camera3InputStream.h"
#include "device3/Camera3SharedOutputStream.h"
+#include "utils/CameraTraces.h"
using namespace android::camera3;
using namespace android::hardware::camera;
namespace android {
-Camera3OfflineSession::Camera3OfflineSession(const String8 &id):
- mId(id)
-{
+Camera3OfflineSession::Camera3OfflineSession(const String8 &id,
+ const sp<camera3::Camera3Stream>& inputStream,
+ const camera3::StreamSet& offlineStreamSet,
+ camera3::BufferRecords&& bufferRecords,
+ const camera3::InFlightRequestMap& offlineReqs,
+ const Camera3OfflineStates& offlineStates,
+ sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession) :
+ mId(id),
+ mInputStream(inputStream),
+ mOutputStreams(offlineStreamSet),
+ mBufferRecords(std::move(bufferRecords)),
+ mOfflineReqs(offlineReqs),
+ mSession(offlineSession),
+ mTagMonitor(offlineStates.mTagMonitor),
+ mVendorTagId(offlineStates.mVendorTagId),
+ mUseHalBufManager(offlineStates.mUseHalBufManager),
+ mNeedFixupMonochromeTags(offlineStates.mNeedFixupMonochromeTags),
+ mUsePartialResult(offlineStates.mUsePartialResult),
+ mNumPartialResults(offlineStates.mNumPartialResults),
+ mNextResultFrameNumber(offlineStates.mNextResultFrameNumber),
+ mNextReprocessResultFrameNumber(offlineStates.mNextReprocessResultFrameNumber),
+ mNextZslStillResultFrameNumber(offlineStates.mNextZslStillResultFrameNumber),
+ mNextShutterFrameNumber(offlineStates.mNextShutterFrameNumber),
+ mNextReprocessShutterFrameNumber(offlineStates.mNextReprocessShutterFrameNumber),
+ mNextZslStillShutterFrameNumber(offlineStates.mNextZslStillShutterFrameNumber),
+ mDeviceInfo(offlineStates.mDeviceInfo),
+ mPhysicalDeviceInfoMap(offlineStates.mPhysicalDeviceInfoMap),
+ mDistortionMappers(offlineStates.mDistortionMappers),
+ mZoomRatioMappers(offlineStates.mZoomRatioMappers),
+ mStatus(STATUS_UNINITIALIZED) {
ATRACE_CALL();
ALOGV("%s: Created offline session for camera %s", __FUNCTION__, mId.string());
}
-Camera3OfflineSession::~Camera3OfflineSession()
-{
+Camera3OfflineSession::~Camera3OfflineSession() {
ATRACE_CALL();
ALOGV("%s: Tearing down offline session for camera id %s", __FUNCTION__, mId.string());
+ disconnectImpl();
}
const String8& Camera3OfflineSession::getId() const {
return mId;
}
-status_t Camera3OfflineSession::initialize(
- sp<hardware::camera::device::V3_6::ICameraOfflineSession> /*hidlSession*/) {
+status_t Camera3OfflineSession::initialize(wp<NotificationListener> listener) {
ATRACE_CALL();
+
+ if (mSession == nullptr) {
+ ALOGE("%s: HIDL session is null!", __FUNCTION__);
+ return DEAD_OBJECT;
+ }
+
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+
+ mListener = listener;
+
+ // setup result FMQ
+ std::unique_ptr<ResultMetadataQueue>& resQueue = mResultMetadataQueue;
+ auto resultQueueRet = mSession->getCaptureResultMetadataQueue(
+ [&resQueue](const auto& descriptor) {
+ resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
+ if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+ ALOGE("HAL returns empty result metadata fmq, not use it");
+ resQueue = nullptr;
+ // Don't use resQueue onwards.
+ }
+ });
+ if (!resultQueueRet.isOk()) {
+ ALOGE("Transaction error when getting result metadata queue from camera session: %s",
+ resultQueueRet.description().c_str());
+ return DEAD_OBJECT;
+ }
+ mStatus = STATUS_ACTIVE;
+ }
+
+ mSession->setCallback(this);
+
return OK;
}
status_t Camera3OfflineSession::dump(int /*fd*/) {
ATRACE_CALL();
- return OK;
-}
-
-status_t Camera3OfflineSession::abort() {
- ATRACE_CALL();
+ std::lock_guard<std::mutex> il(mInterfaceLock);
return OK;
}
status_t Camera3OfflineSession::disconnect() {
ATRACE_CALL();
+ return disconnectImpl();
+}
+
+status_t Camera3OfflineSession::disconnectImpl() {
+ ATRACE_CALL();
+ std::lock_guard<std::mutex> il(mInterfaceLock);
+
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus == STATUS_CLOSED) {
+ return OK; // don't close twice
+ } else if (mStatus == STATUS_ERROR) {
+ ALOGE("%s: offline session %s shutting down in error state",
+ __FUNCTION__, mId.string());
+ }
+ listener = mListener.promote();
+ }
+
+ ALOGV("%s: E", __FUNCTION__);
+
+ {
+ std::lock_guard<std::mutex> lock(mRequestBufferInterfaceLock);
+ mAllowRequestBuffer = false;
+ }
+
+ std::vector<wp<Camera3StreamInterface>> streams;
+ streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ streams.push_back(mOutputStreams[i]);
+ }
+ if (mInputStream != nullptr) {
+ streams.push_back(mInputStream);
+ }
+
+ mSession->close();
+
+ FlushInflightReqStates states {
+ mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager,
+ listener, *this, mBufferRecords, *this};
+
+ camera3::flushInflightRequests(states);
+
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ mSession.clear();
+ mOutputStreams.clear();
+ mInputStream.clear();
+ mStatus = STATUS_CLOSED;
+ }
+
+ for (auto& weakStream : streams) {
+ sp<Camera3StreamInterface> stream = weakStream.promote();
+ if (stream != nullptr) {
+ ALOGE("%s: Stream %d leaked! strong reference (%d)!",
+ __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
+ }
+ }
+
+ ALOGV("%s: X", __FUNCTION__);
return OK;
}
-status_t Camera3OfflineSession::waitForNextFrame(nsecs_t /*timeout*/) {
+status_t Camera3OfflineSession::waitForNextFrame(nsecs_t timeout) {
ATRACE_CALL();
+ std::unique_lock<std::mutex> lk(mOutputLock);
+
+ while (mResultQueue.empty()) {
+ auto st = mResultSignal.wait_for(lk, std::chrono::nanoseconds(timeout));
+ if (st == std::cv_status::timeout) {
+ return TIMED_OUT;
+ }
+ }
return OK;
}
-status_t Camera3OfflineSession::getNextResult(CaptureResult* /*frame*/) {
+status_t Camera3OfflineSession::getNextResult(CaptureResult* frame) {
ATRACE_CALL();
+ std::lock_guard<std::mutex> l(mOutputLock);
+
+ if (mResultQueue.empty()) {
+ return NOT_ENOUGH_DATA;
+ }
+
+ if (frame == nullptr) {
+ ALOGE("%s: argument cannot be NULL", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ CaptureResult &result = *(mResultQueue.begin());
+ frame->mResultExtras = result.mResultExtras;
+ frame->mMetadata.acquire(result.mMetadata);
+ frame->mPhysicalMetadatas = std::move(result.mPhysicalMetadatas);
+ mResultQueue.erase(mResultQueue.begin());
+
return OK;
}
hardware::Return<void> Camera3OfflineSession::processCaptureResult_3_4(
const hardware::hidl_vec<
- hardware::camera::device::V3_4::CaptureResult>& /*results*/) {
+ hardware::camera::device::V3_4::CaptureResult>& results) {
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus != STATUS_ACTIVE) {
+ ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+ return hardware::Void();
+ }
+ listener = mListener.promote();
+ }
+
+ CaptureOutputStates states {
+ mId,
+ mOfflineReqsLock, mOfflineReqs,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+ };
+
+ std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
+ for (const auto& result : results) {
+ processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
+ }
return hardware::Void();
}
hardware::Return<void> Camera3OfflineSession::processCaptureResult(
const hardware::hidl_vec<
- hardware::camera::device::V3_2::CaptureResult>& /*results*/) {
+ hardware::camera::device::V3_2::CaptureResult>& results) {
+ // TODO: changed impl to call into processCaptureResult_3_4 instead?
+ // might need to figure how to reduce copy though.
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus != STATUS_ACTIVE) {
+ ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+ return hardware::Void();
+ }
+ listener = mListener.promote();
+ }
+
+ hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
+
+ CaptureOutputStates states {
+ mId,
+ mOfflineReqsLock, mOfflineReqs,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+ };
+
+ std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
+ for (const auto& result : results) {
+ processOneCaptureResultLocked(states, result, noPhysMetadata);
+ }
return hardware::Void();
}
hardware::Return<void> Camera3OfflineSession::notify(
- const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& /*msgs*/) {
+ const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
+ sp<NotificationListener> listener;
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus != STATUS_ACTIVE) {
+ ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+ return hardware::Void();
+ }
+ listener = mListener.promote();
+ }
+
+ CaptureOutputStates states {
+ mId,
+ mOfflineReqsLock, mOfflineReqs,
+ mOutputLock, mResultQueue, mResultSignal,
+ mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mNextResultFrameNumber,
+ mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+ mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+ mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+ mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mTagMonitor,
+ mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+ };
+ for (const auto& msg : msgs) {
+ camera3::notify(states, msg);
+ }
return hardware::Void();
}
hardware::Return<void> Camera3OfflineSession::requestStreamBuffers(
- const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& /*bufReqs*/,
- requestStreamBuffers_cb /*_hidl_cb*/) {
+ const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+ requestStreamBuffers_cb _hidl_cb) {
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus != STATUS_ACTIVE) {
+ ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+ return hardware::Void();
+ }
+ }
+
+ RequestBufferStates states {
+ mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+ *this, mBufferRecords, *this};
+ camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
return hardware::Void();
}
hardware::Return<void> Camera3OfflineSession::returnStreamBuffers(
- const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& /*buffers*/) {
+ const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
+ {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStatus != STATUS_ACTIVE) {
+ ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+ return hardware::Void();
+ }
+ }
+
+ ReturnBufferStates states {
+ mId, mUseHalBufManager, mOutputStreams, mBufferRecords};
+ camera3::returnStreamBuffers(states, buffers);
return hardware::Void();
}
+void Camera3OfflineSession::setErrorState(const char *fmt, ...) {
+ ATRACE_CALL();
+ std::lock_guard<std::mutex> lock(mLock);
+ va_list args;
+ va_start(args, fmt);
+
+ setErrorStateLockedV(fmt, args);
+
+ va_end(args);
+
+ //FIXME: automatically disconnect here?
+}
+
+void Camera3OfflineSession::setErrorStateLocked(const char *fmt, ...) {
+ va_list args;
+ va_start(args, fmt);
+
+ setErrorStateLockedV(fmt, args);
+
+ va_end(args);
+}
+
+void Camera3OfflineSession::setErrorStateLockedV(const char *fmt, va_list args) {
+ // Print out all error messages to log
+ String8 errorCause = String8::formatV(fmt, args);
+ ALOGE("Camera %s: %s", mId.string(), errorCause.string());
+
+ // But only do error state transition steps for the first error
+ if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return;
+
+ mErrorCause = errorCause;
+
+ mStatus = STATUS_ERROR;
+
+ // Notify upstream about a device error
+ sp<NotificationListener> listener = mListener.promote();
+ if (listener != NULL) {
+ listener->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+ CaptureResultExtras());
+ }
+
+ // Save stack trace. View by dumping it later.
+ CameraTraces::saveTrace();
+}
+
+void Camera3OfflineSession::onInflightEntryRemovedLocked(nsecs_t /*duration*/) {
+ if (mOfflineReqs.size() == 0) {
+ std::lock_guard<std::mutex> lock(mRequestBufferInterfaceLock);
+ mAllowRequestBuffer = false;
+ }
+}
+
+void Camera3OfflineSession::checkInflightMapLengthLocked() {
+ // Intentional empty impl.
+}
+
+void Camera3OfflineSession::onInflightMapFlushedLocked() {
+ // Intentional empty impl.
+}
+
+bool Camera3OfflineSession::startRequestBuffer() {
+ return mAllowRequestBuffer;
+}
+
+void Camera3OfflineSession::endRequestBuffer() {
+ // Intentional empty impl.
+}
+
+nsecs_t Camera3OfflineSession::getWaitDuration() {
+ const nsecs_t kBaseGetBufferWait = 3000000000; // 3 sec.
+ return kBaseGetBufferWait;
+}
+
+void Camera3OfflineSession::getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) {
+ mBufferRecords.getInflightBufferKeys(out);
+}
+
+void Camera3OfflineSession::getInflightRequestBufferKeys(std::vector<uint64_t>* out) {
+ mBufferRecords.getInflightRequestBufferKeys(out);
+}
+
+std::vector<sp<Camera3StreamInterface>> Camera3OfflineSession::getAllStreams() {
+ std::vector<sp<Camera3StreamInterface>> ret;
+ bool hasInputStream = mInputStream != nullptr;
+ ret.reserve(mOutputStreams.size() + ((hasInputStream) ? 1 : 0));
+ if (hasInputStream) {
+ ret.push_back(mInputStream);
+ }
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ ret.push_back(mOutputStreams[i]);
+ }
+ return ret;
+}
+
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index 30e8c4f..b6b8a7d 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -17,16 +17,23 @@
#ifndef ANDROID_SERVERS_CAMERA3OFFLINESESSION_H
#define ANDROID_SERVERS_CAMERA3OFFLINESESSION_H
+#include <memory>
+#include <mutex>
+
#include <utils/String8.h>
#include <utils/String16.h>
#include <android/hardware/camera/device/3.6/ICameraOfflineSession.h>
+
#include <fmq/MessageQueue.h>
#include "common/CameraOfflineSessionBase.h"
#include "device3/Camera3BufferManager.h"
#include "device3/DistortionMapper.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3OutputUtils.h"
+#include "device3/ZoomRatioMapper.h"
#include "utils/TagMonitor.h"
#include "utils/LatencyHistogram.h"
#include <camera_metadata_hidden.h>
@@ -41,26 +48,90 @@
} // namespace camera3
+
+// An immutable struct containing general states that will be copied from Camera3Device to
+// Camera3OfflineSession
+struct Camera3OfflineStates {
+ Camera3OfflineStates(
+ const TagMonitor& tagMonitor, const metadata_vendor_id_t vendorTagId,
+ const bool useHalBufManager, const bool needFixupMonochromeTags,
+ const bool usePartialResult, const uint32_t numPartialResults,
+ const uint32_t nextResultFN, const uint32_t nextReprocResultFN,
+ const uint32_t nextZslResultFN, const uint32_t nextShutterFN,
+ const uint32_t nextReprocShutterFN, const uint32_t nextZslShutterFN,
+ const CameraMetadata& deviceInfo,
+ const std::unordered_map<std::string, CameraMetadata>& physicalDeviceInfoMap,
+ const std::unordered_map<std::string, camera3::DistortionMapper>& distortionMappers,
+ const std::unordered_map<std::string, camera3::ZoomRatioMapper>& zoomRatioMappers) :
+ mTagMonitor(tagMonitor), mVendorTagId(vendorTagId),
+ mUseHalBufManager(useHalBufManager), mNeedFixupMonochromeTags(needFixupMonochromeTags),
+ mUsePartialResult(usePartialResult), mNumPartialResults(numPartialResults),
+ mNextResultFrameNumber(nextResultFN),
+ mNextReprocessResultFrameNumber(nextReprocResultFN),
+ mNextZslStillResultFrameNumber(nextZslResultFN),
+ mNextShutterFrameNumber(nextShutterFN),
+ mNextReprocessShutterFrameNumber(nextReprocShutterFN),
+ mNextZslStillShutterFrameNumber(nextZslShutterFN),
+ mDeviceInfo(deviceInfo),
+ mPhysicalDeviceInfoMap(physicalDeviceInfoMap),
+ mDistortionMappers(distortionMappers),
+ mZoomRatioMappers(zoomRatioMappers) {}
+
+ const TagMonitor& mTagMonitor;
+ const metadata_vendor_id_t mVendorTagId;
+
+ const bool mUseHalBufManager;
+ const bool mNeedFixupMonochromeTags;
+
+ const bool mUsePartialResult;
+ const uint32_t mNumPartialResults;
+
+ // the minimal frame number of the next non-reprocess result
+ const uint32_t mNextResultFrameNumber;
+ // the minimal frame number of the next reprocess result
+ const uint32_t mNextReprocessResultFrameNumber;
+ // the minimal frame number of the next ZSL still capture result
+ const uint32_t mNextZslStillResultFrameNumber;
+ // the minimal frame number of the next non-reprocess shutter
+ const uint32_t mNextShutterFrameNumber;
+ // the minimal frame number of the next reprocess shutter
+ const uint32_t mNextReprocessShutterFrameNumber;
+ // the minimal frame number of the next ZSL still capture shutter
+ const uint32_t mNextZslStillShutterFrameNumber;
+
+ const CameraMetadata& mDeviceInfo;
+
+ const std::unordered_map<std::string, CameraMetadata>& mPhysicalDeviceInfoMap;
+
+ const std::unordered_map<std::string, camera3::DistortionMapper>& mDistortionMappers;
+
+ const std::unordered_map<std::string, camera3::ZoomRatioMapper>& mZoomRatioMappers;
+};
+
/**
* Camera3OfflineSession for offline session defined in HIDL ICameraOfflineSession@3.6 or higher
*/
class Camera3OfflineSession :
public CameraOfflineSessionBase,
- virtual public hardware::camera::device::V3_5::ICameraDeviceCallback {
-
+ virtual public hardware::camera::device::V3_5::ICameraDeviceCallback,
+ public camera3::SetErrorInterface,
+ public camera3::InflightRequestUpdateInterface,
+ public camera3::RequestBufferInterface,
+ public camera3::FlushBufferInterface {
public:
- // initialize by Camera3Device. Camera3Device must send all info in separate argument.
- // monitored tags
- // mUseHalBufManager
- // mUsePartialResult
- // mNumPartialResults
- explicit Camera3OfflineSession(const String8& id);
+ // initialize by Camera3Device.
+ explicit Camera3OfflineSession(const String8& id,
+ const sp<camera3::Camera3Stream>& inputStream,
+ const camera3::StreamSet& offlineStreamSet,
+ camera3::BufferRecords&& bufferRecords,
+ const camera3::InFlightRequestMap& offlineReqs,
+ const Camera3OfflineStates& offlineStates,
+ sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession);
virtual ~Camera3OfflineSession();
- status_t initialize(
- sp<hardware::camera::device::V3_6::ICameraOfflineSession> hidlSession);
+ virtual status_t initialize(wp<NotificationListener> listener) override;
/**
* CameraOfflineSessionBase interface
@@ -71,8 +142,6 @@
status_t dump(int fd) override;
- status_t abort() override;
-
// methods for capture result passing
status_t waitForNextFrame(nsecs_t timeout) override;
status_t getNextResult(CaptureResult *frame) override;
@@ -115,10 +184,99 @@
*/
private:
-
// Camera device ID
const String8 mId;
+ sp<camera3::Camera3Stream> mInputStream;
+ camera3::StreamSet mOutputStreams;
+ camera3::BufferRecords mBufferRecords;
+ std::mutex mOfflineReqsLock;
+ camera3::InFlightRequestMap mOfflineReqs;
+
+ sp<hardware::camera::device::V3_6::ICameraOfflineSession> mSession;
+
+ TagMonitor mTagMonitor;
+ const metadata_vendor_id_t mVendorTagId;
+
+ const bool mUseHalBufManager;
+ const bool mNeedFixupMonochromeTags;
+
+ const bool mUsePartialResult;
+ const uint32_t mNumPartialResults;
+
+ std::mutex mOutputLock;
+ List<CaptureResult> mResultQueue;
+ std::condition_variable mResultSignal;
+ // the minimal frame number of the next non-reprocess result
+ uint32_t mNextResultFrameNumber;
+ // the minimal frame number of the next reprocess result
+ uint32_t mNextReprocessResultFrameNumber;
+ // the minimal frame number of the next ZSL still capture result
+ uint32_t mNextZslStillResultFrameNumber;
+ // the minimal frame number of the next non-reprocess shutter
+ uint32_t mNextShutterFrameNumber;
+ // the minimal frame number of the next reprocess shutter
+ uint32_t mNextReprocessShutterFrameNumber;
+ // the minimal frame number of the next ZSL still capture shutter
+ uint32_t mNextZslStillShutterFrameNumber;
+ // End of mOutputLock scope
+
+ const CameraMetadata mDeviceInfo;
+ std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
+
+ std::unordered_map<std::string, camera3::DistortionMapper> mDistortionMappers;
+
+ std::unordered_map<std::string, camera3::ZoomRatioMapper> mZoomRatioMappers;
+
+ mutable std::mutex mLock;
+
+ enum Status {
+ STATUS_UNINITIALIZED = 0,
+ STATUS_ACTIVE,
+ STATUS_ERROR,
+ STATUS_CLOSED
+ } mStatus;
+
+ wp<NotificationListener> mListener;
+ // End of mLock protect scope
+
+ std::mutex mProcessCaptureResultLock;
+ // FMQ to write result on. Must be guarded by mProcessCaptureResultLock.
+ std::unique_ptr<ResultMetadataQueue> mResultMetadataQueue;
+
+ // Tracking cause of fatal errors when in STATUS_ERROR
+ String8 mErrorCause;
+
+ // Lock to ensure requestStreamBuffers() callbacks are serialized
+ std::mutex mRequestBufferInterfaceLock;
+ // allow request buffer until all requests are processed or disconnectImpl is called
+ bool mAllowRequestBuffer = true;
+
+ // For client methods such as disconnect/dump
+ std::mutex mInterfaceLock;
+
+ // SetErrorInterface
+ void setErrorState(const char *fmt, ...) override;
+ void setErrorStateLocked(const char *fmt, ...) override;
+
+ // InflightRequestUpdateInterface
+ void onInflightEntryRemovedLocked(nsecs_t duration) override;
+ void checkInflightMapLengthLocked() override;
+ void onInflightMapFlushedLocked() override;
+
+ // RequestBufferInterface
+ bool startRequestBuffer() override;
+ void endRequestBuffer() override;
+ nsecs_t getWaitDuration() override;
+
+ // FlushBufferInterface
+ void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) override;
+ void getInflightRequestBufferKeys(std::vector<uint64_t>* out) override;
+ std::vector<sp<camera3::Camera3StreamInterface>> getAllStreams() override;
+
+ void setErrorStateLockedV(const char *fmt, va_list args);
+
+ status_t disconnectImpl();
}; // class Camera3OfflineSession
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputInterface.h b/services/camera/libcameraservice/device3/Camera3OutputInterface.h
new file mode 100644
index 0000000..8817833
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputInterface.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_INTERFACE_H
+#define ANDROID_SERVERS_CAMERA3_OUTPUT_INTERFACE_H
+
+#include <memory>
+
+#include <cutils/native_handle.h>
+
+#include <utils/Timers.h>
+
+#include "device3/Camera3StreamInterface.h"
+
+namespace android {
+
+namespace camera3 {
+
+ /**
+ * Interfaces used by result/notification path shared between Camera3Device and
+ * Camera3OfflineSession
+ */
+ class SetErrorInterface {
+ public:
+ // Switch device into error state and send a ERROR_DEVICE notification
+ virtual void setErrorState(const char *fmt, ...) = 0;
+ // Same as setErrorState except this method assumes callers holds the main object lock
+ virtual void setErrorStateLocked(const char *fmt, ...) = 0;
+
+ virtual ~SetErrorInterface() {}
+ };
+
+ // Interface used by callback path to update buffer records
+ class BufferRecordsInterface {
+ public:
+ // method to extract buffer's unique ID
+ // return pair of (newlySeenBuffer?, bufferId)
+ virtual std::pair<bool, uint64_t> getBufferId(const buffer_handle_t& buf, int streamId) = 0;
+
+ // Find a buffer_handle_t based on frame number and stream ID
+ virtual status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
+ /*out*/ buffer_handle_t **buffer) = 0;
+
+ // Register a bufId (streamId, buffer_handle_t) to inflight request buffer
+ virtual status_t pushInflightRequestBuffer(
+ uint64_t bufferId, buffer_handle_t* buf, int32_t streamId) = 0;
+
+ // Find a buffer_handle_t based on bufferId
+ virtual status_t popInflightRequestBuffer(uint64_t bufferId,
+ /*out*/ buffer_handle_t** buffer,
+ /*optional out*/ int32_t* streamId = nullptr) = 0;
+
+ virtual ~BufferRecordsInterface() {}
+ };
+
+ class InflightRequestUpdateInterface {
+ public:
+ // Caller must hold the lock proctecting InflightRequestMap
+ // duration: the maxExpectedDuration of the removed entry
+ virtual void onInflightEntryRemovedLocked(nsecs_t duration) = 0;
+
+ virtual void checkInflightMapLengthLocked() = 0;
+
+ virtual void onInflightMapFlushedLocked() = 0;
+
+ virtual ~InflightRequestUpdateInterface() {}
+ };
+
+ class RequestBufferInterface {
+ public:
+ // Return if the state machine currently allows for requestBuffers.
+ // If this returns true, caller must call endRequestBuffer() later to signal end of a
+ // request buffer transaction.
+ virtual bool startRequestBuffer() = 0;
+
+ virtual void endRequestBuffer() = 0;
+
+ // Returns how long should implementation wait for a buffer returned
+ virtual nsecs_t getWaitDuration() = 0;
+
+ virtual ~RequestBufferInterface() {}
+ };
+
+ class FlushBufferInterface {
+ public:
+ virtual void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out) = 0;
+
+ virtual void getInflightRequestBufferKeys(std::vector<uint64_t>* out) = 0;
+
+ virtual std::vector<sp<Camera3StreamInterface>> getAllStreams() = 0;
+
+ virtual ~FlushBufferInterface() {}
+ };
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.cpp b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.cpp
new file mode 100644
index 0000000..64af91e
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.cpp
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-OutStrmIntf"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0 // Per-frame verbose logging
+
+
+#include "Camera3OutputStreamInterface.h"
+
+namespace android {
+
+namespace camera3 {
+
+status_t StreamSet::add(
+ int streamId, sp<camera3::Camera3OutputStreamInterface> stream) {
+ if (stream == nullptr) {
+ ALOGE("%s: cannot add null stream", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ std::lock_guard<std::mutex> lock(mLock);
+ return mData.add(streamId, stream);
+}
+
+ssize_t StreamSet::remove(int streamId) {
+ std::lock_guard<std::mutex> lock(mLock);
+ return mData.removeItem(streamId);
+}
+
+sp<camera3::Camera3OutputStreamInterface> StreamSet::get(int streamId) {
+ std::lock_guard<std::mutex> lock(mLock);
+ ssize_t idx = mData.indexOfKey(streamId);
+ if (idx == NAME_NOT_FOUND) {
+ return nullptr;
+ }
+ return mData.editValueAt(idx);
+}
+
+sp<camera3::Camera3OutputStreamInterface> StreamSet::operator[] (size_t index) {
+ std::lock_guard<std::mutex> lock(mLock);
+ return mData.editValueAt(index);
+}
+
+size_t StreamSet::size() const {
+ std::lock_guard<std::mutex> lock(mLock);
+ return mData.size();
+}
+
+void StreamSet::clear() {
+ std::lock_guard<std::mutex> lock(mLock);
+ return mData.clear();
+}
+
+std::vector<int> StreamSet::getStreamIds() {
+ std::lock_guard<std::mutex> lock(mLock);
+ std::vector<int> streamIds(mData.size());
+ for (size_t i = 0; i < mData.size(); i++) {
+ streamIds[i] = mData.keyAt(i);
+ }
+ return streamIds;
+}
+
+StreamSet::StreamSet(const StreamSet& other) {
+ std::lock_guard<std::mutex> lock(other.mLock);
+ mData = other.mData;
+}
+
+} // namespace camera3
+
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 2bde949..7f5c87a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -97,6 +97,26 @@
virtual const String8& getPhysicalCameraId() const = 0;
};
+// Helper class to organize a synchronized mapping of stream IDs to stream instances
+class StreamSet {
+ public:
+ status_t add(int streamId, sp<camera3::Camera3OutputStreamInterface>);
+ ssize_t remove(int streamId);
+ sp<camera3::Camera3OutputStreamInterface> get(int streamId);
+ // get by (underlying) vector index
+ sp<camera3::Camera3OutputStreamInterface> operator[] (size_t index);
+ size_t size() const;
+ std::vector<int> getStreamIds();
+ void clear();
+
+ StreamSet() {};
+ StreamSet(const StreamSet& other);
+
+ private:
+ mutable std::mutex mLock;
+ KeyedVector<int, sp<camera3::Camera3OutputStreamInterface>> mData;
+};
+
} // namespace camera3
} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
new file mode 100644
index 0000000..eb7b666
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -0,0 +1,1414 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-OutputUtils"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0 // Per-frame verbose logging
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+// Convenience macros for transitioning to the error state
+#define SET_ERR(fmt, ...) states.setErrIntf.setErrorState( \
+ "%s: " fmt, __FUNCTION__, \
+ ##__VA_ARGS__)
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+#include <utils/SortedVector.h>
+#include <utils/Trace.h>
+
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+
+#include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
+
+#include <camera_metadata_hidden.h>
+
+#include "device3/Camera3OutputUtils.h"
+
+using namespace android::camera3;
+using namespace android::hardware::camera;
+
+namespace android {
+namespace camera3 {
+
+status_t fixupMonochromeTags(
+ CaptureOutputStates& states,
+ const CameraMetadata& deviceInfo,
+ CameraMetadata& resultMetadata) {
+ status_t res = OK;
+ if (!states.needFixupMonoChrome) {
+ return res;
+ }
+
+ // Remove tags that are not applicable to monochrome camera.
+ int32_t tagsToRemove[] = {
+ ANDROID_SENSOR_GREEN_SPLIT,
+ ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
+ ANDROID_COLOR_CORRECTION_MODE,
+ ANDROID_COLOR_CORRECTION_TRANSFORM,
+ ANDROID_COLOR_CORRECTION_GAINS,
+ };
+ for (auto tag : tagsToRemove) {
+ res = resultMetadata.erase(tag);
+ if (res != OK) {
+ ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
+ return res;
+ }
+ }
+
+ // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
+ camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
+ for (size_t i = 1; i < blEntry.count; i++) {
+ blEntry.data.f[i] = blEntry.data.f[0];
+ }
+
+ // ANDROID_SENSOR_NOISE_PROFILE
+ camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
+ if (npEntry.count > 0 && npEntry.count % 2 == 0) {
+ double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
+ res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
+ if (res != OK) {
+ ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ // ANDROID_STATISTICS_LENS_SHADING_MAP
+ camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
+ camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
+ if (lsSizeEntry.count == 2 && lsEntry.count > 0
+ && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
+ for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
+ lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
+ lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
+ lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
+ }
+ }
+
+ // ANDROID_TONEMAP_CURVE_BLUE
+ // ANDROID_TONEMAP_CURVE_GREEN
+ // ANDROID_TONEMAP_CURVE_RED
+ camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
+ camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
+ camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
+ if (tcbEntry.count > 0
+ && tcbEntry.count == tcgEntry.count
+ && tcbEntry.count == tcrEntry.count) {
+ for (size_t i = 0; i < tcbEntry.count; i++) {
+ tcbEntry.data.f[i] = tcrEntry.data.f[i];
+ tcgEntry.data.f[i] = tcrEntry.data.f[i];
+ }
+ }
+
+ return res;
+}
+
+void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
+ if (result == nullptr) return;
+
+ camera_metadata_t *meta = const_cast<camera_metadata_t *>(
+ result->mMetadata.getAndLock());
+ set_camera_metadata_vendor_id(meta, states.vendorTagId);
+ result->mMetadata.unlock(meta);
+
+ if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
+ (int32_t*)&frameNumber, 1) != OK) {
+ SET_ERR("Failed to set frame number %d in metadata", frameNumber);
+ return;
+ }
+
+ if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
+ SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
+ return;
+ }
+
+ // Update vendor tag id for physical metadata
+ for (auto& physicalMetadata : result->mPhysicalMetadatas) {
+ camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
+ physicalMetadata.mPhysicalCameraMetadata.getAndLock());
+ set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
+ physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
+ }
+
+ // Valid result, insert into queue
+ List<CaptureResult>::iterator queuedResult =
+ states.resultQueue.insert(states.resultQueue.end(), CaptureResult(*result));
+ ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
+ ", burstId = %" PRId32, __FUNCTION__,
+ queuedResult->mResultExtras.requestId,
+ queuedResult->mResultExtras.frameNumber,
+ queuedResult->mResultExtras.burstId);
+
+ states.resultSignal.notify_one();
+}
+
+
+void sendPartialCaptureResult(CaptureOutputStates& states,
+ const camera_metadata_t * partialResult,
+ const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
+ ATRACE_CALL();
+ std::lock_guard<std::mutex> l(states.outputLock);
+
+ CaptureResult captureResult;
+ captureResult.mResultExtras = resultExtras;
+ captureResult.mMetadata = partialResult;
+
+ // Fix up result metadata for monochrome camera.
+ status_t res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
+ return;
+ }
+
+ insertResultLocked(states, &captureResult, frameNumber);
+}
+
+void sendCaptureResult(
+ CaptureOutputStates& states,
+ CameraMetadata &pendingMetadata,
+ CaptureResultExtras &resultExtras,
+ CameraMetadata &collectedPartialResult,
+ uint32_t frameNumber,
+ bool reprocess, bool zslStillCapture,
+ const std::set<std::string>& cameraIdsWithZoom,
+ const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
+ ATRACE_CALL();
+ if (pendingMetadata.isEmpty())
+ return;
+
+ std::lock_guard<std::mutex> l(states.outputLock);
+
+ // TODO: need to track errors for tighter bounds on expected frame number
+ if (reprocess) {
+ if (frameNumber < states.nextReprocResultFrameNum) {
+ SET_ERR("Out-of-order reprocess capture result metadata submitted! "
+ "(got frame number %d, expecting %d)",
+ frameNumber, states.nextReprocResultFrameNum);
+ return;
+ }
+ states.nextReprocResultFrameNum = frameNumber + 1;
+ } else if (zslStillCapture) {
+ if (frameNumber < states.nextZslResultFrameNum) {
+ SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
+ "(got frame number %d, expecting %d)",
+ frameNumber, states.nextZslResultFrameNum);
+ return;
+ }
+ states.nextZslResultFrameNum = frameNumber + 1;
+ } else {
+ if (frameNumber < states.nextResultFrameNum) {
+ SET_ERR("Out-of-order capture result metadata submitted! "
+ "(got frame number %d, expecting %d)",
+ frameNumber, states.nextResultFrameNum);
+ return;
+ }
+ states.nextResultFrameNum = frameNumber + 1;
+ }
+
+ CaptureResult captureResult;
+ captureResult.mResultExtras = resultExtras;
+ captureResult.mMetadata = pendingMetadata;
+ captureResult.mPhysicalMetadatas = physicalMetadatas;
+
+ // Append any previous partials to form a complete result
+ if (states.usePartialResult && !collectedPartialResult.isEmpty()) {
+ captureResult.mMetadata.append(collectedPartialResult);
+ }
+
+ captureResult.mMetadata.sort();
+
+ // Check that there's a timestamp in the result metadata
+ camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+ if (timestamp.count == 0) {
+ SET_ERR("No timestamp provided by HAL for frame %d!",
+ frameNumber);
+ return;
+ }
+ for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+ camera_metadata_entry timestamp =
+ physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+ if (timestamp.count == 0) {
+ SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
+ String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
+ return;
+ }
+ }
+
+ // Fix up some result metadata to account for HAL-level distortion correction
+ status_t res =
+ states.distortionMappers[states.cameraId.c_str()].correctCaptureResult(
+ &captureResult.mMetadata);
+ if (res != OK) {
+ SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
+ frameNumber, strerror(-res), res);
+ return;
+ }
+
+ // Fix up result metadata to account for zoom ratio availabilities between
+ // HAL and app.
+ bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId.c_str()) == cameraIdsWithZoom.end();
+ res = states.zoomRatioMappers[states.cameraId.c_str()].updateCaptureResult(
+ &captureResult.mMetadata, zoomRatioIs1);
+ if (res != OK) {
+ SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
+ frameNumber, strerror(-res), res);
+ return;
+ }
+
+ for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+ String8 cameraId8(physicalMetadata.mPhysicalCameraId);
+ if (states.distortionMappers.find(cameraId8.c_str()) != states.distortionMappers.end()) {
+ res = states.distortionMappers[cameraId8.c_str()].correctCaptureResult(
+ &physicalMetadata.mPhysicalCameraMetadata);
+ if (res != OK) {
+ SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
+ frameNumber, strerror(-res), res);
+ return;
+ }
+ }
+
+ zoomRatioIs1 = cameraIdsWithZoom.find(cameraId8.c_str()) == cameraIdsWithZoom.end();
+ res = states.zoomRatioMappers[cameraId8.c_str()].updateCaptureResult(
+ &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
+ if (res != OK) {
+ SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
+ "frame %d: %s(%d)", cameraId8.c_str(), frameNumber, strerror(-res), res);
+ return;
+ }
+ }
+
+ // Fix up result metadata for monochrome camera.
+ res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
+ return;
+ }
+ for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+ String8 cameraId8(physicalMetadata.mPhysicalCameraId);
+ res = fixupMonochromeTags(states,
+ states.physicalDeviceInfoMap.at(cameraId8.c_str()),
+ physicalMetadata.mPhysicalCameraMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
+ return;
+ }
+ }
+
+ std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
+ for (auto& m : physicalMetadatas) {
+ monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
+ CameraMetadata(m.mPhysicalCameraMetadata));
+ }
+ states.tagMonitor.monitorMetadata(TagMonitor::RESULT,
+ frameNumber, timestamp.data.i64[0], captureResult.mMetadata,
+ monitoredPhysicalMetadata);
+
+ insertResultLocked(states, &captureResult, frameNumber);
+}
+
+// Reading one camera metadata from result argument via fmq or from the result
+// Assuming the fmq is protected by a lock already
+status_t readOneCameraMetadataLocked(
+ std::unique_ptr<ResultMetadataQueue>& fmq,
+ uint64_t fmqResultSize,
+ hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
+ const hardware::camera::device::V3_2::CameraMetadata& result) {
+ if (fmqResultSize > 0) {
+ resultMetadata.resize(fmqResultSize);
+ if (fmq == nullptr) {
+ return NO_MEMORY; // logged in initialize()
+ }
+ if (!fmq->read(resultMetadata.data(), fmqResultSize)) {
+ ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
+ __FUNCTION__, fmqResultSize);
+ return INVALID_OPERATION;
+ }
+ } else {
+ resultMetadata.setToExternal(const_cast<uint8_t *>(result.data()),
+ result.size());
+ }
+
+ if (resultMetadata.size() != 0) {
+ status_t res;
+ const camera_metadata_t* metadata =
+ reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
+ size_t expected_metadata_size = resultMetadata.size();
+ if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
+ ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ }
+
+ return OK;
+}
+
+void removeInFlightMapEntryLocked(CaptureOutputStates& states, int idx) {
+ ATRACE_CALL();
+ InFlightRequestMap& inflightMap = states.inflightMap;
+ nsecs_t duration = inflightMap.valueAt(idx).maxExpectedDuration;
+ inflightMap.removeItemsAt(idx, 1);
+
+ states.inflightIntf.onInflightEntryRemovedLocked(duration);
+}
+
+void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
+ InFlightRequestMap& inflightMap = states.inflightMap;
+ const InFlightRequest &request = inflightMap.valueAt(idx);
+ const uint32_t frameNumber = inflightMap.keyAt(idx);
+
+ nsecs_t sensorTimestamp = request.sensorTimestamp;
+ nsecs_t shutterTimestamp = request.shutterTimestamp;
+
+ // Check if it's okay to remove the request from InFlightMap:
+ // In the case of a successful request:
+ // all input and output buffers, all result metadata, shutter callback
+ // arrived.
+ // In the case of a unsuccessful request:
+ // all input and output buffers arrived.
+ if (request.numBuffersLeft == 0 &&
+ (request.skipResultMetadata ||
+ (request.haveResultMetadata && shutterTimestamp != 0))) {
+ if (request.stillCapture) {
+ ATRACE_ASYNC_END("still capture", frameNumber);
+ }
+
+ ATRACE_ASYNC_END("frame capture", frameNumber);
+
+ // Sanity check - if sensor timestamp matches shutter timestamp in the
+ // case of request having callback.
+ if (request.hasCallback && request.requestStatus == OK &&
+ sensorTimestamp != shutterTimestamp) {
+ SET_ERR("sensor timestamp (%" PRId64
+ ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
+ sensorTimestamp, frameNumber, shutterTimestamp);
+ }
+
+ // for an unsuccessful request, it may have pending output buffers to
+ // return.
+ assert(request.requestStatus != OK ||
+ request.pendingOutputBuffers.size() == 0);
+
+ returnOutputBuffers(
+ states.useHalBufManager, states.listener,
+ request.pendingOutputBuffers.array(),
+ request.pendingOutputBuffers.size(), 0, /*timestampIncreasing*/true,
+ request.outputSurfaces, request.resultExtras);
+
+ removeInFlightMapEntryLocked(states, idx);
+ ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
+ }
+
+ states.inflightIntf.checkInflightMapLengthLocked();
+}
+
+void processCaptureResult(CaptureOutputStates& states, const camera3_capture_result *result) {
+ ATRACE_CALL();
+
+ status_t res;
+
+ uint32_t frameNumber = result->frame_number;
+ if (result->result == NULL && result->num_output_buffers == 0 &&
+ result->input_buffer == NULL) {
+ SET_ERR("No result data provided by HAL for frame %d",
+ frameNumber);
+ return;
+ }
+
+ if (!states.usePartialResult &&
+ result->result != NULL &&
+ result->partial_result != 1) {
+ SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
+ " if partial result is not supported",
+ frameNumber, result->partial_result);
+ return;
+ }
+
+ bool isPartialResult = false;
+ CameraMetadata collectedPartialResult;
+ bool hasInputBufferInRequest = false;
+
+ // Get shutter timestamp and resultExtras from list of in-flight requests,
+ // where it was added by the shutter notification for this frame. If the
+ // shutter timestamp isn't received yet, append the output buffers to the
+ // in-flight request and they will be returned when the shutter timestamp
+ // arrives. Update the in-flight status and remove the in-flight entry if
+ // all result data and shutter timestamp have been received.
+ nsecs_t shutterTimestamp = 0;
+ {
+ std::lock_guard<std::mutex> l(states.inflightLock);
+ ssize_t idx = states.inflightMap.indexOfKey(frameNumber);
+ if (idx == NAME_NOT_FOUND) {
+ SET_ERR("Unknown frame number for capture result: %d",
+ frameNumber);
+ return;
+ }
+ InFlightRequest &request = states.inflightMap.editValueAt(idx);
+ ALOGVV("%s: got InFlightRequest requestId = %" PRId32
+ ", frameNumber = %" PRId64 ", burstId = %" PRId32
+ ", partialResultCount = %d, hasCallback = %d",
+ __FUNCTION__, request.resultExtras.requestId,
+ request.resultExtras.frameNumber, request.resultExtras.burstId,
+ result->partial_result, request.hasCallback);
+ // Always update the partial count to the latest one if it's not 0
+ // (buffers only). When framework aggregates adjacent partial results
+ // into one, the latest partial count will be used.
+ if (result->partial_result != 0)
+ request.resultExtras.partialResultCount = result->partial_result;
+
+ // Check if this result carries only partial metadata
+ if (states.usePartialResult && result->result != NULL) {
+ if (result->partial_result > states.numPartialResults || result->partial_result < 1) {
+ SET_ERR("Result is malformed for frame %d: partial_result %u must be in"
+ " the range of [1, %d] when metadata is included in the result",
+ frameNumber, result->partial_result, states.numPartialResults);
+ return;
+ }
+ isPartialResult = (result->partial_result < states.numPartialResults);
+ if (isPartialResult && result->num_physcam_metadata) {
+ SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
+ " physical camera result", frameNumber);
+ return;
+ }
+ if (isPartialResult) {
+ request.collectedPartialResult.append(result->result);
+ }
+
+ if (isPartialResult && request.hasCallback) {
+ // Send partial capture result
+ sendPartialCaptureResult(states, result->result, request.resultExtras,
+ frameNumber);
+ }
+ }
+
+ shutterTimestamp = request.shutterTimestamp;
+ hasInputBufferInRequest = request.hasInputBuffer;
+
+ // Did we get the (final) result metadata for this capture?
+ if (result->result != NULL && !isPartialResult) {
+ if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
+ SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
+ request.physicalCameraIds.size(), result->num_physcam_metadata);
+ return;
+ }
+ if (request.haveResultMetadata) {
+ SET_ERR("Called multiple times with metadata for frame %d",
+ frameNumber);
+ return;
+ }
+ for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
+ String8 physicalId(result->physcam_ids[i]);
+ std::set<String8>::iterator cameraIdIter =
+ request.physicalCameraIds.find(physicalId);
+ if (cameraIdIter != request.physicalCameraIds.end()) {
+ request.physicalCameraIds.erase(cameraIdIter);
+ } else {
+ SET_ERR("Total result for frame %d has already returned for camera %s",
+ frameNumber, physicalId.c_str());
+ return;
+ }
+ }
+ if (states.usePartialResult &&
+ !request.collectedPartialResult.isEmpty()) {
+ collectedPartialResult.acquire(
+ request.collectedPartialResult);
+ }
+ request.haveResultMetadata = true;
+ }
+
+ uint32_t numBuffersReturned = result->num_output_buffers;
+ if (result->input_buffer != NULL) {
+ if (hasInputBufferInRequest) {
+ numBuffersReturned += 1;
+ } else {
+ ALOGW("%s: Input buffer should be NULL if there is no input"
+ " buffer sent in the request",
+ __FUNCTION__);
+ }
+ }
+ request.numBuffersLeft -= numBuffersReturned;
+ if (request.numBuffersLeft < 0) {
+ SET_ERR("Too many buffers returned for frame %d",
+ frameNumber);
+ return;
+ }
+
+ camera_metadata_ro_entry_t entry;
+ res = find_camera_metadata_ro_entry(result->result,
+ ANDROID_SENSOR_TIMESTAMP, &entry);
+ if (res == OK && entry.count == 1) {
+ request.sensorTimestamp = entry.data.i64[0];
+ }
+
+ // If shutter event isn't received yet, append the output buffers to
+ // the in-flight request. Otherwise, return the output buffers to
+ // streams.
+ if (shutterTimestamp == 0) {
+ request.pendingOutputBuffers.appendArray(result->output_buffers,
+ result->num_output_buffers);
+ } else {
+ bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
+ returnOutputBuffers(states.useHalBufManager, states.listener,
+ result->output_buffers, result->num_output_buffers,
+ shutterTimestamp, timestampIncreasing,
+ request.outputSurfaces, request.resultExtras);
+ }
+
+ if (result->result != NULL && !isPartialResult) {
+ for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
+ CameraMetadata physicalMetadata;
+ physicalMetadata.append(result->physcam_metadata[i]);
+ request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
+ physicalMetadata});
+ }
+ if (shutterTimestamp == 0) {
+ request.pendingMetadata = result->result;
+ request.collectedPartialResult = collectedPartialResult;
+ } else if (request.hasCallback) {
+ CameraMetadata metadata;
+ metadata = result->result;
+ sendCaptureResult(states, metadata, request.resultExtras,
+ collectedPartialResult, frameNumber,
+ hasInputBufferInRequest, request.zslCapture && request.stillCapture,
+ request.cameraIdsWithZoom, request.physicalMetadatas);
+ }
+ }
+ removeInFlightRequestIfReadyLocked(states, idx);
+ } // scope for states.inFlightLock
+
+ if (result->input_buffer != NULL) {
+ if (hasInputBufferInRequest) {
+ Camera3Stream *stream =
+ Camera3Stream::cast(result->input_buffer->stream);
+ res = stream->returnInputBuffer(*(result->input_buffer));
+ // Note: stream may be deallocated at this point, if this buffer was the
+ // last reference to it.
+ if (res != OK) {
+ ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
+ " its stream:%s (%d)", __FUNCTION__,
+ frameNumber, strerror(-res), res);
+ }
+ } else {
+ ALOGW("%s: Input buffer should be NULL if there is no input"
+ " buffer sent in the request, skipping input buffer return.",
+ __FUNCTION__);
+ }
+ }
+}
+
+void processOneCaptureResultLocked(
+ CaptureOutputStates& states,
+ const hardware::camera::device::V3_2::CaptureResult& result,
+ const hardware::hidl_vec<
+ hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
+ using hardware::camera::device::V3_2::StreamBuffer;
+ using hardware::camera::device::V3_2::BufferStatus;
+ std::unique_ptr<ResultMetadataQueue>& fmq = states.fmq;
+ BufferRecordsInterface& bufferRecords = states.bufferRecordsIntf;
+ camera3_capture_result r;
+ status_t res;
+ r.frame_number = result.frameNumber;
+
+ // Read and validate the result metadata.
+ hardware::camera::device::V3_2::CameraMetadata resultMetadata;
+ res = readOneCameraMetadataLocked(
+ fmq, result.fmqResultSize,
+ resultMetadata, result.result);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Failed to read capture result metadata",
+ __FUNCTION__, result.frameNumber);
+ return;
+ }
+ r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
+
+ // Read and validate physical camera metadata
+ size_t physResultCount = physicalCameraMetadata.size();
+ std::vector<const char*> physCamIds(physResultCount);
+ std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
+ std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
+ physResultMetadata.resize(physResultCount);
+ for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
+ res = readOneCameraMetadataLocked(fmq, physicalCameraMetadata[i].fmqMetadataSize,
+ physResultMetadata[i], physicalCameraMetadata[i].metadata);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
+ __FUNCTION__, result.frameNumber,
+ physicalCameraMetadata[i].physicalCameraId.c_str());
+ return;
+ }
+ physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
+ phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
+ physResultMetadata[i].data());
+ }
+ r.num_physcam_metadata = physResultCount;
+ r.physcam_ids = physCamIds.data();
+ r.physcam_metadata = phyCamMetadatas.data();
+
+ std::vector<camera3_stream_buffer_t> outputBuffers(result.outputBuffers.size());
+ std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
+ for (size_t i = 0; i < result.outputBuffers.size(); i++) {
+ auto& bDst = outputBuffers[i];
+ const StreamBuffer &bSrc = result.outputBuffers[i];
+
+ sp<Camera3StreamInterface> stream = states.outputStreams.get(bSrc.streamId);
+ if (stream == nullptr) {
+ ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
+ __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+ return;
+ }
+ bDst.stream = stream->asHalStream();
+
+ bool noBufferReturned = false;
+ buffer_handle_t *buffer = nullptr;
+ if (states.useHalBufManager) {
+ // This is suspicious most of the time but can be correct during flush where HAL
+ // has to return capture result before a buffer is requested
+ if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
+ if (bSrc.status == BufferStatus::OK) {
+ ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
+ __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+ // Still proceeds so other buffers can be returned
+ }
+ noBufferReturned = true;
+ }
+ if (noBufferReturned) {
+ res = OK;
+ } else {
+ res = bufferRecords.popInflightRequestBuffer(bSrc.bufferId, &buffer);
+ }
+ } else {
+ res = bufferRecords.popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
+ }
+
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
+ __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+ return;
+ }
+
+ bDst.buffer = buffer;
+ bDst.status = mapHidlBufferStatus(bSrc.status);
+ bDst.acquire_fence = -1;
+ if (bSrc.releaseFence == nullptr) {
+ bDst.release_fence = -1;
+ } else if (bSrc.releaseFence->numFds == 1) {
+ if (noBufferReturned) {
+ ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
+ }
+ bDst.release_fence = dup(bSrc.releaseFence->data[0]);
+ } else {
+ ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
+ __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
+ return;
+ }
+ }
+ r.num_output_buffers = outputBuffers.size();
+ r.output_buffers = outputBuffers.data();
+
+ camera3_stream_buffer_t inputBuffer;
+ if (result.inputBuffer.streamId == -1) {
+ r.input_buffer = nullptr;
+ } else {
+ if (states.inputStream->getId() != result.inputBuffer.streamId) {
+ ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
+ result.frameNumber, result.inputBuffer.streamId);
+ return;
+ }
+ inputBuffer.stream = states.inputStream->asHalStream();
+ buffer_handle_t *buffer;
+ res = bufferRecords.popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
+ &buffer);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
+ __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
+ return;
+ }
+ inputBuffer.buffer = buffer;
+ inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
+ inputBuffer.acquire_fence = -1;
+ if (result.inputBuffer.releaseFence == nullptr) {
+ inputBuffer.release_fence = -1;
+ } else if (result.inputBuffer.releaseFence->numFds == 1) {
+ inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
+ } else {
+ ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
+ __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
+ return;
+ }
+ r.input_buffer = &inputBuffer;
+ }
+
+ r.partial_result = result.partialResult;
+
+ processCaptureResult(states, &r);
+}
+
+void returnOutputBuffers(
+ bool useHalBufManager,
+ sp<NotificationListener> listener,
+ const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
+ nsecs_t timestamp, bool timestampIncreasing,
+ const SurfaceMap& outputSurfaces,
+ const CaptureResultExtras &inResultExtras) {
+
+ for (size_t i = 0; i < numBuffers; i++)
+ {
+ if (outputBuffers[i].buffer == nullptr) {
+ if (!useHalBufManager) {
+ // With HAL buffer management API, HAL sometimes will have to return buffers that
+ // has not got a output buffer handle filled yet. This is though illegal if HAL
+ // buffer management API is not being used.
+ ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
+ }
+ continue;
+ }
+
+ Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
+ int streamId = stream->getId();
+ const auto& it = outputSurfaces.find(streamId);
+ status_t res = OK;
+ if (it != outputSurfaces.end()) {
+ res = stream->returnBuffer(
+ outputBuffers[i], timestamp, timestampIncreasing, it->second,
+ inResultExtras.frameNumber);
+ } else {
+ res = stream->returnBuffer(
+ outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
+ inResultExtras.frameNumber);
+ }
+
+ // Note: stream may be deallocated at this point, if this buffer was
+ // the last reference to it.
+ if (res == NO_INIT || res == DEAD_OBJECT) {
+ ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
+ } else if (res != OK) {
+ ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
+ }
+
+ // Long processing consumers can cause returnBuffer timeout for shared stream
+ // If that happens, cancel the buffer and send a buffer error to client
+ if (it != outputSurfaces.end() && res == TIMED_OUT &&
+ outputBuffers[i].status == CAMERA3_BUFFER_STATUS_OK) {
+ // cancel the buffer
+ camera3_stream_buffer_t sb = outputBuffers[i];
+ sb.status = CAMERA3_BUFFER_STATUS_ERROR;
+ stream->returnBuffer(sb, /*timestamp*/0, timestampIncreasing, std::vector<size_t> (),
+ inResultExtras.frameNumber);
+
+ if (listener != nullptr) {
+ CaptureResultExtras extras = inResultExtras;
+ extras.errorStreamId = streamId;
+ listener->notifyError(
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
+ extras);
+ }
+ }
+ }
+}
+
+void notifyShutter(CaptureOutputStates& states, const camera3_shutter_msg_t &msg) {
+ ATRACE_CALL();
+ ssize_t idx;
+
+ // Set timestamp for the request in the in-flight tracking
+ // and get the request ID to send upstream
+ {
+ std::lock_guard<std::mutex> l(states.inflightLock);
+ InFlightRequestMap& inflightMap = states.inflightMap;
+ idx = inflightMap.indexOfKey(msg.frame_number);
+ if (idx >= 0) {
+ InFlightRequest &r = inflightMap.editValueAt(idx);
+
+ // Verify ordering of shutter notifications
+ {
+ std::lock_guard<std::mutex> l(states.outputLock);
+ // TODO: need to track errors for tighter bounds on expected frame number.
+ if (r.hasInputBuffer) {
+ if (msg.frame_number < states.nextReprocShutterFrameNum) {
+ SET_ERR("Reprocess shutter notification out-of-order. Expected "
+ "notification for frame %d, got frame %d",
+ states.nextReprocShutterFrameNum, msg.frame_number);
+ return;
+ }
+ states.nextReprocShutterFrameNum = msg.frame_number + 1;
+ } else if (r.zslCapture && r.stillCapture) {
+ if (msg.frame_number < states.nextZslShutterFrameNum) {
+ SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
+ "notification for frame %d, got frame %d",
+ states.nextZslShutterFrameNum, msg.frame_number);
+ return;
+ }
+ states.nextZslShutterFrameNum = msg.frame_number + 1;
+ } else {
+ if (msg.frame_number < states.nextShutterFrameNum) {
+ SET_ERR("Shutter notification out-of-order. Expected "
+ "notification for frame %d, got frame %d",
+ states.nextShutterFrameNum, msg.frame_number);
+ return;
+ }
+ states.nextShutterFrameNum = msg.frame_number + 1;
+ }
+ }
+
+ r.shutterTimestamp = msg.timestamp;
+ if (r.hasCallback) {
+ ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
+ states.cameraId.string(), __FUNCTION__,
+ msg.frame_number, r.resultExtras.requestId, msg.timestamp);
+ // Call listener, if any
+ if (states.listener != nullptr) {
+ states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+ }
+ // send pending result and buffers
+ sendCaptureResult(states,
+ r.pendingMetadata, r.resultExtras,
+ r.collectedPartialResult, msg.frame_number,
+ r.hasInputBuffer, r.zslCapture && r.stillCapture,
+ r.cameraIdsWithZoom, r.physicalMetadatas);
+ }
+ bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
+ returnOutputBuffers(
+ states.useHalBufManager, states.listener,
+ r.pendingOutputBuffers.array(),
+ r.pendingOutputBuffers.size(), r.shutterTimestamp, timestampIncreasing,
+ r.outputSurfaces, r.resultExtras);
+ r.pendingOutputBuffers.clear();
+
+ removeInFlightRequestIfReadyLocked(states, idx);
+ }
+ }
+ if (idx < 0) {
+ SET_ERR("Shutter notification for non-existent frame number %d",
+ msg.frame_number);
+ }
+}
+
+void notifyError(CaptureOutputStates& states, const camera3_error_msg_t &msg) {
+ ATRACE_CALL();
+ // Map camera HAL error codes to ICameraDeviceCallback error codes
+ // Index into this with the HAL error code
+ static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
+ // 0 = Unused error code
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
+ // 1 = CAMERA3_MSG_ERROR_DEVICE
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+ // 2 = CAMERA3_MSG_ERROR_REQUEST
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
+ // 3 = CAMERA3_MSG_ERROR_RESULT
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
+ // 4 = CAMERA3_MSG_ERROR_BUFFER
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
+ };
+
+ int32_t errorCode =
+ ((msg.error_code >= 0) &&
+ (msg.error_code < CAMERA3_MSG_NUM_ERRORS)) ?
+ halErrorMap[msg.error_code] :
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
+
+ int streamId = 0;
+ String16 physicalCameraId;
+ if (msg.error_stream != nullptr) {
+ Camera3Stream *stream =
+ Camera3Stream::cast(msg.error_stream);
+ streamId = stream->getId();
+ physicalCameraId = String16(stream->physicalCameraId());
+ }
+ ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
+ states.cameraId.string(), __FUNCTION__, msg.frame_number,
+ streamId, msg.error_code);
+
+ CaptureResultExtras resultExtras;
+ switch (errorCode) {
+ case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
+ // SET_ERR calls into listener to notify application
+ SET_ERR("Camera HAL reported serious device error");
+ break;
+ case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
+ case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
+ case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
+ {
+ std::lock_guard<std::mutex> l(states.inflightLock);
+ ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
+ if (idx >= 0) {
+ InFlightRequest &r = states.inflightMap.editValueAt(idx);
+ r.requestStatus = msg.error_code;
+ resultExtras = r.resultExtras;
+ bool logicalDeviceResultError = false;
+ if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
+ errorCode) {
+ if (physicalCameraId.size() > 0) {
+ String8 cameraId(physicalCameraId);
+ auto iter = r.physicalCameraIds.find(cameraId);
+ if (iter == r.physicalCameraIds.end()) {
+ ALOGE("%s: Reported result failure for physical camera device: %s "
+ " which is not part of the respective request!",
+ __FUNCTION__, cameraId.string());
+ break;
+ }
+ r.physicalCameraIds.erase(iter);
+ resultExtras.errorPhysicalCameraId = physicalCameraId;
+ } else {
+ logicalDeviceResultError = true;
+ }
+ }
+
+ if (logicalDeviceResultError
+ || hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST ==
+ errorCode) {
+ r.skipResultMetadata = true;
+ }
+ if (logicalDeviceResultError) {
+ // In case of missing result check whether the buffers
+ // returned. If they returned, then remove inflight
+ // request.
+ // TODO: should we call this for ERROR_CAMERA_REQUEST as well?
+ // otherwise we are depending on HAL to send the buffers back after
+ // calling notifyError. Not sure if that's in the spec.
+ removeInFlightRequestIfReadyLocked(states, idx);
+ }
+ } else {
+ resultExtras.frameNumber = msg.frame_number;
+ ALOGE("Camera %s: %s: cannot find in-flight request on "
+ "frame %" PRId64 " error", states.cameraId.string(), __FUNCTION__,
+ resultExtras.frameNumber);
+ }
+ }
+ resultExtras.errorStreamId = streamId;
+ if (states.listener != nullptr) {
+ states.listener->notifyError(errorCode, resultExtras);
+ } else {
+ ALOGE("Camera %s: %s: no listener available",
+ states.cameraId.string(), __FUNCTION__);
+ }
+ break;
+ default:
+ // SET_ERR calls notifyError
+ SET_ERR("Unknown error message from HAL: %d", msg.error_code);
+ break;
+ }
+}
+
+void notify(CaptureOutputStates& states, const camera3_notify_msg *msg) {
+ switch (msg->type) {
+ case CAMERA3_MSG_ERROR: {
+ notifyError(states, msg->message.error);
+ break;
+ }
+ case CAMERA3_MSG_SHUTTER: {
+ notifyShutter(states, msg->message.shutter);
+ break;
+ }
+ default:
+ SET_ERR("Unknown notify message from HAL: %d",
+ msg->type);
+ }
+}
+
+void notify(CaptureOutputStates& states,
+ const hardware::camera::device::V3_2::NotifyMsg& msg) {
+ using android::hardware::camera::device::V3_2::MsgType;
+ using android::hardware::camera::device::V3_2::ErrorCode;
+
+ ATRACE_CALL();
+ camera3_notify_msg m;
+ switch (msg.type) {
+ case MsgType::ERROR:
+ m.type = CAMERA3_MSG_ERROR;
+ m.message.error.frame_number = msg.msg.error.frameNumber;
+ if (msg.msg.error.errorStreamId >= 0) {
+ sp<Camera3StreamInterface> stream =
+ states.outputStreams.get(msg.msg.error.errorStreamId);
+ if (stream == nullptr) {
+ ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
+ m.message.error.frame_number, msg.msg.error.errorStreamId);
+ return;
+ }
+ m.message.error.error_stream = stream->asHalStream();
+ } else {
+ m.message.error.error_stream = nullptr;
+ }
+ switch (msg.msg.error.errorCode) {
+ case ErrorCode::ERROR_DEVICE:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
+ break;
+ case ErrorCode::ERROR_REQUEST:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+ break;
+ case ErrorCode::ERROR_RESULT:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
+ break;
+ case ErrorCode::ERROR_BUFFER:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
+ break;
+ }
+ break;
+ case MsgType::SHUTTER:
+ m.type = CAMERA3_MSG_SHUTTER;
+ m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
+ m.message.shutter.timestamp = msg.msg.shutter.timestamp;
+ break;
+ }
+ notify(states, &m);
+}
+
+void requestStreamBuffers(RequestBufferStates& states,
+ const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+ hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb) {
+ using android::hardware::camera::device::V3_2::BufferStatus;
+ using android::hardware::camera::device::V3_2::StreamBuffer;
+ using android::hardware::camera::device::V3_5::BufferRequestStatus;
+ using android::hardware::camera::device::V3_5::StreamBufferRet;
+ using android::hardware::camera::device::V3_5::StreamBufferRequestError;
+
+ std::lock_guard<std::mutex> lock(states.reqBufferLock);
+
+ hardware::hidl_vec<StreamBufferRet> bufRets;
+ if (!states.useHalBufManager) {
+ ALOGE("%s: Camera %s does not support HAL buffer management",
+ __FUNCTION__, states.cameraId.string());
+ _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+ return;
+ }
+
+ SortedVector<int32_t> streamIds;
+ ssize_t sz = streamIds.setCapacity(bufReqs.size());
+ if (sz < 0 || static_cast<size_t>(sz) != bufReqs.size()) {
+ ALOGE("%s: failed to allocate memory for %zu buffer requests",
+ __FUNCTION__, bufReqs.size());
+ _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+ return;
+ }
+
+ if (bufReqs.size() > states.outputStreams.size()) {
+ ALOGE("%s: too many buffer requests (%zu > # of output streams %zu)",
+ __FUNCTION__, bufReqs.size(), states.outputStreams.size());
+ _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+ return;
+ }
+
+ // Check for repeated streamId
+ for (const auto& bufReq : bufReqs) {
+ if (streamIds.indexOf(bufReq.streamId) != NAME_NOT_FOUND) {
+ ALOGE("%s: Stream %d appear multiple times in buffer requests",
+ __FUNCTION__, bufReq.streamId);
+ _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
+ return;
+ }
+ streamIds.add(bufReq.streamId);
+ }
+
+ if (!states.reqBufferIntf.startRequestBuffer()) {
+ ALOGE("%s: request buffer disallowed while camera service is configuring",
+ __FUNCTION__);
+ _hidl_cb(BufferRequestStatus::FAILED_CONFIGURING, bufRets);
+ return;
+ }
+
+ bufRets.resize(bufReqs.size());
+
+ bool allReqsSucceeds = true;
+ bool oneReqSucceeds = false;
+ for (size_t i = 0; i < bufReqs.size(); i++) {
+ const auto& bufReq = bufReqs[i];
+ auto& bufRet = bufRets[i];
+ int32_t streamId = bufReq.streamId;
+ sp<Camera3OutputStreamInterface> outputStream = states.outputStreams.get(streamId);
+ if (outputStream == nullptr) {
+ ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
+ hardware::hidl_vec<StreamBufferRet> emptyBufRets;
+ _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, emptyBufRets);
+ states.reqBufferIntf.endRequestBuffer();
+ return;
+ }
+
+ if (outputStream->isAbandoned()) {
+ bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
+ allReqsSucceeds = false;
+ continue;
+ }
+
+ bufRet.streamId = streamId;
+ size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
+ uint32_t numBuffersRequested = bufReq.numBuffersRequested;
+ size_t totalHandout = handOutBufferCount + numBuffersRequested;
+ uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
+ if (totalHandout > maxBuffers) {
+ // Not able to allocate enough buffer. Exit early for this stream
+ ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
+ " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
+ numBuffersRequested, maxBuffers);
+ bufRet.val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
+ allReqsSucceeds = false;
+ continue;
+ }
+
+ hardware::hidl_vec<StreamBuffer> tmpRetBuffers(numBuffersRequested);
+ bool currentReqSucceeds = true;
+ std::vector<camera3_stream_buffer_t> streamBuffers(numBuffersRequested);
+ size_t numAllocatedBuffers = 0;
+ size_t numPushedInflightBuffers = 0;
+ for (size_t b = 0; b < numBuffersRequested; b++) {
+ camera3_stream_buffer_t& sb = streamBuffers[b];
+ // Since this method can run concurrently with request thread
+ // We need to update the wait duration everytime we call getbuffer
+ nsecs_t waitDuration = states.reqBufferIntf.getWaitDuration();
+ status_t res = outputStream->getBuffer(&sb, waitDuration);
+ if (res != OK) {
+ if (res == NO_INIT || res == DEAD_OBJECT) {
+ ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
+ __FUNCTION__, streamId, strerror(-res), res);
+ bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
+ } else {
+ ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
+ __FUNCTION__, streamId, strerror(-res), res);
+ if (res == TIMED_OUT || res == NO_MEMORY) {
+ bufRet.val.error(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
+ } else {
+ bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
+ }
+ }
+ currentReqSucceeds = false;
+ break;
+ }
+ numAllocatedBuffers++;
+
+ buffer_handle_t *buffer = sb.buffer;
+ auto pair = states.bufferRecordsIntf.getBufferId(*buffer, streamId);
+ bool isNewBuffer = pair.first;
+ uint64_t bufferId = pair.second;
+ StreamBuffer& hBuf = tmpRetBuffers[b];
+
+ hBuf.streamId = streamId;
+ hBuf.bufferId = bufferId;
+ hBuf.buffer = (isNewBuffer) ? *buffer : nullptr;
+ hBuf.status = BufferStatus::OK;
+ hBuf.releaseFence = nullptr;
+
+ native_handle_t *acquireFence = nullptr;
+ if (sb.acquire_fence != -1) {
+ acquireFence = native_handle_create(1,0);
+ acquireFence->data[0] = sb.acquire_fence;
+ }
+ hBuf.acquireFence.setTo(acquireFence, /*shouldOwn*/true);
+ hBuf.releaseFence = nullptr;
+
+ res = states.bufferRecordsIntf.pushInflightRequestBuffer(bufferId, buffer, streamId);
+ if (res != OK) {
+ ALOGE("%s: Can't get register request buffers for stream %d: %s (%d)",
+ __FUNCTION__, streamId, strerror(-res), res);
+ bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
+ currentReqSucceeds = false;
+ break;
+ }
+ numPushedInflightBuffers++;
+ }
+ if (currentReqSucceeds) {
+ bufRet.val.buffers(std::move(tmpRetBuffers));
+ oneReqSucceeds = true;
+ } else {
+ allReqsSucceeds = false;
+ for (size_t b = 0; b < numPushedInflightBuffers; b++) {
+ StreamBuffer& hBuf = tmpRetBuffers[b];
+ buffer_handle_t* buffer;
+ status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
+ hBuf.bufferId, &buffer);
+ if (res != OK) {
+ SET_ERR("%s: popInflightRequestBuffer failed for stream %d: %s (%d)",
+ __FUNCTION__, streamId, strerror(-res), res);
+ }
+ }
+ for (size_t b = 0; b < numAllocatedBuffers; b++) {
+ camera3_stream_buffer_t& sb = streamBuffers[b];
+ sb.acquire_fence = -1;
+ sb.status = CAMERA3_BUFFER_STATUS_ERROR;
+ }
+ returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
+ streamBuffers.data(), numAllocatedBuffers, 0);
+ }
+ }
+
+ _hidl_cb(allReqsSucceeds ? BufferRequestStatus::OK :
+ oneReqSucceeds ? BufferRequestStatus::FAILED_PARTIAL :
+ BufferRequestStatus::FAILED_UNKNOWN,
+ bufRets);
+ states.reqBufferIntf.endRequestBuffer();
+}
+
+void returnStreamBuffers(ReturnBufferStates& states,
+ const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
+ if (!states.useHalBufManager) {
+ ALOGE("%s: Camera %s does not support HAL buffer managerment",
+ __FUNCTION__, states.cameraId.string());
+ return;
+ }
+
+ for (const auto& buf : buffers) {
+ if (buf.bufferId == BUFFER_ID_NO_BUFFER) {
+ ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
+ continue;
+ }
+
+ buffer_handle_t* buffer;
+ status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(buf.bufferId, &buffer);
+
+ if (res != OK) {
+ ALOGE("%s: cannot find in-flight buffer %" PRIu64 " for stream %d",
+ __FUNCTION__, buf.bufferId, buf.streamId);
+ continue;
+ }
+
+ camera3_stream_buffer_t streamBuffer;
+ streamBuffer.buffer = buffer;
+ streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ streamBuffer.acquire_fence = -1;
+ streamBuffer.release_fence = -1;
+
+ if (buf.releaseFence == nullptr) {
+ streamBuffer.release_fence = -1;
+ } else if (buf.releaseFence->numFds == 1) {
+ streamBuffer.release_fence = dup(buf.releaseFence->data[0]);
+ } else {
+ ALOGE("%s: Invalid release fence, fd count is %d, not 1",
+ __FUNCTION__, buf.releaseFence->numFds);
+ continue;
+ }
+
+ sp<Camera3StreamInterface> stream = states.outputStreams.get(buf.streamId);
+ if (stream == nullptr) {
+ ALOGE("%s: Output stream id %d not found!", __FUNCTION__, buf.streamId);
+ continue;
+ }
+ streamBuffer.stream = stream->asHalStream();
+ returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
+ &streamBuffer, /*size*/1, /*timestamp*/ 0);
+ }
+}
+
+void flushInflightRequests(FlushInflightReqStates& states) {
+ ATRACE_CALL();
+ { // First return buffers cached in mInFlightMap
+ std::lock_guard<std::mutex> l(states.inflightLock);
+ for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
+ const InFlightRequest &request = states.inflightMap.valueAt(idx);
+ returnOutputBuffers(
+ states.useHalBufManager, states.listener,
+ request.pendingOutputBuffers.array(),
+ request.pendingOutputBuffers.size(), 0,
+ /*timestampIncreasing*/true, request.outputSurfaces,
+ request.resultExtras);
+ }
+ states.inflightMap.clear();
+ states.inflightIntf.onInflightMapFlushedLocked();
+ }
+
+ // Then return all inflight buffers not returned by HAL
+ std::vector<std::pair<int32_t, int32_t>> inflightKeys;
+ states.flushBufferIntf.getInflightBufferKeys(&inflightKeys);
+
+ // Inflight buffers for HAL buffer manager
+ std::vector<uint64_t> inflightRequestBufferKeys;
+ states.flushBufferIntf.getInflightRequestBufferKeys(&inflightRequestBufferKeys);
+
+ // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
+ // frameNumber will be -1 for buffers from HAL buffer manager
+ std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
+ inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
+
+ for (auto& pair : inflightKeys) {
+ int32_t frameNumber = pair.first;
+ int32_t streamId = pair.second;
+ buffer_handle_t* buffer;
+ status_t res = states.bufferRecordsIntf.popInflightBuffer(frameNumber, streamId, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
+ __FUNCTION__, frameNumber, streamId);
+ continue;
+ }
+ inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
+ }
+
+ for (auto& bufferId : inflightRequestBufferKeys) {
+ int32_t streamId = -1;
+ buffer_handle_t* buffer = nullptr;
+ status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
+ bufferId, &buffer, &streamId);
+ if (res != OK) {
+ ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
+ continue;
+ }
+ inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
+ }
+
+ std::vector<sp<Camera3StreamInterface>> streams = states.flushBufferIntf.getAllStreams();
+
+ for (auto& tuple : inflightBuffers) {
+ status_t res = OK;
+ int32_t streamId = std::get<0>(tuple);
+ int32_t frameNumber = std::get<1>(tuple);
+ buffer_handle_t* buffer = std::get<2>(tuple);
+
+ camera3_stream_buffer_t streamBuffer;
+ streamBuffer.buffer = buffer;
+ streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ streamBuffer.acquire_fence = -1;
+ streamBuffer.release_fence = -1;
+
+ for (auto& stream : streams) {
+ if (streamId == stream->getId()) {
+ // Return buffer to deleted stream
+ camera3_stream* halStream = stream->asHalStream();
+ streamBuffer.stream = halStream;
+ switch (halStream->stream_type) {
+ case CAMERA3_STREAM_OUTPUT:
+ res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
+ /*timestampIncreasing*/true, std::vector<size_t> (), frameNumber);
+ if (res != OK) {
+ ALOGE("%s: Can't return output buffer for frame %d to"
+ " stream %d: %s (%d)", __FUNCTION__,
+ frameNumber, streamId, strerror(-res), res);
+ }
+ break;
+ case CAMERA3_STREAM_INPUT:
+ res = stream->returnInputBuffer(streamBuffer);
+ if (res != OK) {
+ ALOGE("%s: Can't return input buffer for frame %d to"
+ " stream %d: %s (%d)", __FUNCTION__,
+ frameNumber, streamId, strerror(-res), res);
+ }
+ break;
+ default: // Bi-direcitonal stream is deprecated
+ ALOGE("%s: stream %d has unknown stream type %d",
+ __FUNCTION__, streamId, halStream->stream_type);
+ break;
+ }
+ break;
+ }
+ }
+ }
+}
+
+} // camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
new file mode 100644
index 0000000..47d8095
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_UTILS_H
+#define ANDROID_SERVERS_CAMERA3_OUTPUT_UTILS_H
+
+#include <memory>
+#include <mutex>
+
+#include <cutils/native_handle.h>
+
+#include <fmq/MessageQueue.h>
+
+#include <common/CameraDeviceBase.h>
+
+#include "device3/BufferUtils.h"
+#include "device3/DistortionMapper.h"
+#include "device3/ZoomRatioMapper.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3Stream.h"
+#include "device3/Camera3OutputStreamInterface.h"
+#include "utils/TagMonitor.h"
+
+namespace android {
+
+using ResultMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
+
+namespace camera3 {
+
+ /**
+ * Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
+ */
+ // helper function to return the output buffers to output streams.
+ void returnOutputBuffers(
+ bool useHalBufManager,
+ sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
+ const camera3_stream_buffer_t *outputBuffers,
+ size_t numBuffers, nsecs_t timestamp, bool timestampIncreasing = true,
+ // The following arguments are only meant for surface sharing use case
+ const SurfaceMap& outputSurfaces = SurfaceMap{},
+ // Used to send buffer error callback when failing to return buffer
+ const CaptureResultExtras &resultExtras = CaptureResultExtras{});
+
+ // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
+ // callbacks
+ struct CaptureOutputStates {
+ const String8& cameraId;
+ std::mutex& inflightLock;
+ InFlightRequestMap& inflightMap; // end of inflightLock scope
+ std::mutex& outputLock;
+ List<CaptureResult>& resultQueue;
+ std::condition_variable& resultSignal;
+ uint32_t& nextShutterFrameNum;
+ uint32_t& nextReprocShutterFrameNum;
+ uint32_t& nextZslShutterFrameNum;
+ uint32_t& nextResultFrameNum;
+ uint32_t& nextReprocResultFrameNum;
+ uint32_t& nextZslResultFrameNum; // end of outputLock scope
+ const bool useHalBufManager;
+ const bool usePartialResult;
+ const bool needFixupMonoChrome;
+ const uint32_t numPartialResults;
+ const metadata_vendor_id_t vendorTagId;
+ const CameraMetadata& deviceInfo;
+ const std::unordered_map<std::string, CameraMetadata>& physicalDeviceInfoMap;
+ std::unique_ptr<ResultMetadataQueue>& fmq;
+ std::unordered_map<std::string, camera3::DistortionMapper>& distortionMappers;
+ std::unordered_map<std::string, camera3::ZoomRatioMapper>& zoomRatioMappers;
+ TagMonitor& tagMonitor;
+ sp<Camera3Stream> inputStream;
+ StreamSet& outputStreams;
+ sp<NotificationListener> listener;
+ SetErrorInterface& setErrIntf;
+ InflightRequestUpdateInterface& inflightIntf;
+ BufferRecordsInterface& bufferRecordsIntf;
+ };
+
+ // Handle one capture result. Assume callers hold the lock to serialize all
+ // processCaptureResult calls
+ void processOneCaptureResultLocked(
+ CaptureOutputStates& states,
+ const hardware::camera::device::V3_2::CaptureResult& result,
+ const hardware::hidl_vec<
+ hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata);
+
+ // Handle one notify message
+ void notify(CaptureOutputStates& states,
+ const hardware::camera::device::V3_2::NotifyMsg& msg);
+
+ struct RequestBufferStates {
+ const String8& cameraId;
+ std::mutex& reqBufferLock; // lock to serialize request buffer calls
+ const bool useHalBufManager;
+ StreamSet& outputStreams;
+ SetErrorInterface& setErrIntf;
+ BufferRecordsInterface& bufferRecordsIntf;
+ RequestBufferInterface& reqBufferIntf;
+ };
+
+ void requestStreamBuffers(RequestBufferStates& states,
+ const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+ hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb);
+
+ struct ReturnBufferStates {
+ const String8& cameraId;
+ const bool useHalBufManager;
+ StreamSet& outputStreams;
+ BufferRecordsInterface& bufferRecordsIntf;
+ };
+
+ void returnStreamBuffers(ReturnBufferStates& states,
+ const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers);
+
+ struct FlushInflightReqStates {
+ const String8& cameraId;
+ std::mutex& inflightLock;
+ InFlightRequestMap& inflightMap; // end of inflightLock scope
+ const bool useHalBufManager;
+ sp<NotificationListener> listener;
+ InflightRequestUpdateInterface& inflightIntf;
+ BufferRecordsInterface& bufferRecordsIntf;
+ FlushBufferInterface& flushBufferIntf;
+ };
+
+ void flushInflightRequests(FlushInflightReqStates& states);
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index b5e37c2..e645e05 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -65,6 +65,12 @@
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
+ virtual bool getOfflineProcessingSupport() const {
+ // As per Camera spec. shared streams currently do not support
+ // offline mode.
+ return false;
+ }
+
private:
static const size_t kMaxOutputs = 4;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index f707ef8..7916ddb 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -151,6 +151,14 @@
return mPhysicalCameraId;
}
+void Camera3Stream::setOfflineProcessingSupport(bool support) {
+ mSupportOfflineProcessing = support;
+}
+
+bool Camera3Stream::getOfflineProcessingSupport() const {
+ return mSupportOfflineProcessing;
+}
+
status_t Camera3Stream::forceToIdle() {
ATRACE_CALL();
Mutex::Autolock l(mLock);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 805df82..d768d3d 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -167,6 +167,9 @@
android_dataspace getOriginalDataSpace() const;
const String8& physicalCameraId() const;
+ void setOfflineProcessingSupport(bool) override;
+ bool getOfflineProcessingSupport() const override;
+
camera3_stream* asHalStream() override {
return this;
}
@@ -592,6 +595,8 @@
String8 mPhysicalCameraId;
nsecs_t mLastTimestamp;
+
+ bool mSupportOfflineProcessing = false;
}; // class Camera3Stream
}; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 73f501a..667e3bb 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -23,6 +23,7 @@
#include "Camera3StreamBufferListener.h"
#include "Camera3StreamBufferFreedListener.h"
+struct camera3_stream;
struct camera3_stream_buffer;
namespace android {
@@ -54,6 +55,7 @@
android_dataspace dataSpace;
uint64_t consumerUsage;
bool finalized = false;
+ bool supportsOffline = false;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
consumerUsage(0) {}
@@ -99,6 +101,12 @@
virtual android_dataspace getOriginalDataSpace() const = 0;
/**
+ * Offline processing
+ */
+ virtual void setOfflineProcessingSupport(bool support) = 0;
+ virtual bool getOfflineProcessingSupport() const = 0;
+
+ /**
* Get a HAL3 handle for the stream, without starting stream configuration.
*/
virtual camera3_stream* asHalStream() = 0;
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.h b/services/camera/libcameraservice/device3/DistortionMapper.h
index a255003..7dcb67b 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.h
+++ b/services/camera/libcameraservice/device3/DistortionMapper.h
@@ -36,6 +36,15 @@
public:
DistortionMapper();
+ DistortionMapper(const DistortionMapper& other) :
+ mValidMapping(other.mValidMapping), mValidGrids(other.mValidGrids),
+ mFx(other.mFx), mFy(other.mFy), mCx(other.mCx), mCy(other.mCy), mS(other.mS),
+ mInvFx(other.mInvFx), mInvFy(other.mInvFy), mK(other.mK),
+ mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
+ mActiveWidth(other.mActiveWidth), mActiveHeight(other.mActiveHeight),
+ mArrayDiffX(other.mArrayDiffX), mArrayDiffY(other.mArrayDiffY),
+ mCorrectedGrid(other.mCorrectedGrid), mDistortedGrid(other.mDistortedGrid) {}
+
/**
* Check whether distortion correction is supported by the camera HAL
*/
@@ -173,7 +182,7 @@
// pre-calculated inverses for speed
float mInvFx, mInvFy;
// radial/tangential distortion parameters
- float mK[5];
+ std::array<float, 5> mK;
// pre-correction active array dimensions
float mArrayWidth, mArrayHeight;
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
new file mode 100644
index 0000000..ceeaa71
--- /dev/null
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -0,0 +1,148 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_INFLIGHT_REQUEST_H
+#define ANDROID_SERVERS_CAMERA3_INFLIGHT_REQUEST_H
+
+#include <set>
+
+#include <camera/CaptureResult.h>
+#include <camera/CameraMetadata.h>
+#include <utils/String8.h>
+#include <utils/Timers.h>
+
+#include "hardware/camera3.h"
+
+#include "common/CameraDeviceBase.h"
+
+namespace android {
+
+namespace camera3 {
+
+struct InFlightRequest {
+ // Set by notify() SHUTTER call.
+ nsecs_t shutterTimestamp;
+ // Set by process_capture_result().
+ nsecs_t sensorTimestamp;
+ int requestStatus;
+ // Set by process_capture_result call with valid metadata
+ bool haveResultMetadata;
+ // Decremented by calls to process_capture_result with valid output
+ // and input buffers
+ int numBuffersLeft;
+ CaptureResultExtras resultExtras;
+ // If this request has any input buffer
+ bool hasInputBuffer;
+
+ // The last metadata that framework receives from HAL and
+ // not yet send out because the shutter event hasn't arrived.
+ // It's added by process_capture_result and sent when framework
+ // receives the shutter event.
+ CameraMetadata pendingMetadata;
+
+ // The metadata of the partial results that framework receives from HAL so far
+ // and has sent out.
+ CameraMetadata collectedPartialResult;
+
+ // Buffers are added by process_capture_result when output buffers
+ // return from HAL but framework has not yet received the shutter
+ // event. They will be returned to the streams when framework receives
+ // the shutter event.
+ Vector<camera3_stream_buffer_t> pendingOutputBuffers;
+
+ // Whether this inflight request's shutter and result callback are to be
+ // called. The policy is that if the request is the last one in the constrained
+ // high speed recording request list, this flag will be true. If the request list
+ // is not for constrained high speed recording, this flag will also be true.
+ bool hasCallback;
+
+ // Maximum expected frame duration for this request.
+ // For manual captures, equal to the max of requested exposure time and frame duration
+ // For auto-exposure modes, equal to 1/(lower end of target FPS range)
+ nsecs_t maxExpectedDuration;
+
+ // Whether the result metadata for this request is to be skipped. The
+ // result metadata should be skipped in the case of
+ // REQUEST/RESULT error.
+ bool skipResultMetadata;
+
+ // The physical camera ids being requested.
+ std::set<String8> physicalCameraIds;
+
+ // Map of physicalCameraId <-> Metadata
+ std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
+
+ // Indicates a still capture request.
+ bool stillCapture;
+
+ // Indicates a ZSL capture request
+ bool zslCapture;
+
+ // Requested camera ids (both logical and physical) with zoomRatio != 1.0f
+ std::set<std::string> cameraIdsWithZoom;
+
+ // What shared surfaces an output should go to
+ SurfaceMap outputSurfaces;
+
+ // TODO: dedupe
+ static const nsecs_t kDefaultExpectedDuration = 100000000; // 100 ms
+
+ // Default constructor needed by KeyedVector
+ InFlightRequest() :
+ shutterTimestamp(0),
+ sensorTimestamp(0),
+ requestStatus(OK),
+ haveResultMetadata(false),
+ numBuffersLeft(0),
+ hasInputBuffer(false),
+ hasCallback(true),
+ maxExpectedDuration(kDefaultExpectedDuration),
+ skipResultMetadata(false),
+ stillCapture(false),
+ zslCapture(false) {
+ }
+
+ InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
+ bool hasAppCallback, nsecs_t maxDuration,
+ const std::set<String8>& physicalCameraIdSet, bool isStillCapture,
+ bool isZslCapture, const std::set<std::string>& idsWithZoom,
+ const SurfaceMap& outSurfaces = SurfaceMap{}) :
+ shutterTimestamp(0),
+ sensorTimestamp(0),
+ requestStatus(OK),
+ haveResultMetadata(false),
+ numBuffersLeft(numBuffers),
+ resultExtras(extras),
+ hasInputBuffer(hasInput),
+ hasCallback(hasAppCallback),
+ maxExpectedDuration(maxDuration),
+ skipResultMetadata(false),
+ physicalCameraIds(physicalCameraIdSet),
+ stillCapture(isStillCapture),
+ zslCapture(isZslCapture),
+ cameraIdsWithZoom(idsWithZoom),
+ outputSurfaces(outSurfaces) {
+ }
+};
+
+// Map from frame number to the in-flight request state
+typedef KeyedVector<uint32_t, InFlightRequest> InFlightRequestMap;
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 675ad24..bf89ca5 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -201,8 +201,9 @@
return HStatus::ILLEGAL_ARGUMENT;
}
+ std::vector<int> offlineStreamIds;
binder::Status ret = mDeviceRemote->endConfigure(convertFromHidl(operatingMode),
- cameraMetadata);
+ cameraMetadata, &offlineStreamIds);
return B2HStatus(ret);
}
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index 4037a66..262f962 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -33,6 +33,16 @@
mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID)
{}
+TagMonitor::TagMonitor(const TagMonitor& other):
+ mMonitoringEnabled(other.mMonitoringEnabled.load()),
+ mMonitoredTagList(other.mMonitoredTagList),
+ mLastMonitoredRequestValues(other.mLastMonitoredRequestValues),
+ mLastMonitoredResultValues(other.mLastMonitoredResultValues),
+ mLastMonitoredPhysicalRequestKeys(other.mLastMonitoredPhysicalRequestKeys),
+ mLastMonitoredPhysicalResultKeys(other.mLastMonitoredPhysicalResultKeys),
+ mMonitoringEvents(other.mMonitoringEvents),
+ mVendorTagId(other.mVendorTagId) {}
+
const String16 TagMonitor::kMonitorOption = String16("-m");
const char* TagMonitor::k3aTags =
diff --git a/services/camera/libcameraservice/utils/TagMonitor.h b/services/camera/libcameraservice/utils/TagMonitor.h
index 1b7b033..413f502 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.h
+++ b/services/camera/libcameraservice/utils/TagMonitor.h
@@ -50,6 +50,8 @@
TagMonitor();
+ TagMonitor(const TagMonitor& other);
+
void initialize(metadata_vendor_id_t id) { mVendorTagId = id; }
// Parse tag name list (comma-separated) and if valid, enable monitoring
diff --git a/services/mediametrics/AnalyticsState.h b/services/mediametrics/AnalyticsState.h
index 4711bb6..290ed21 100644
--- a/services/mediametrics/AnalyticsState.h
+++ b/services/mediametrics/AnalyticsState.h
@@ -56,6 +56,24 @@
}
/**
+ * Returns the TimeMachine.
+ *
+ * The TimeMachine object is internally locked, so access is safe and defined,
+ * but multiple threaded access may change results after calling.
+ */
+ TimeMachine& timeMachine() { return mTimeMachine; }
+ const TimeMachine& timeMachine() const { return mTimeMachine; }
+
+ /**
+ * Returns the TransactionLog.
+ *
+ * The TransactionLog object is internally locked, so access is safe and defined,
+ * but multiple threaded access may change results after calling.
+ */
+ TransactionLog& transactionLog() { return mTransactionLog; }
+ const TransactionLog& transactionLog() const { return mTransactionLog; }
+
+ /**
* Returns a pair consisting of the dump string, and the number of lines in the string.
*
* The number of lines in the returned pair is used as an optimization
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index f7ee051..126e501 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -32,16 +32,60 @@
// This triggers on an item of "audio.flinger"
// with a property "event" set to "AudioFlinger" (the constructor).
mActions.addAction(
- "audio.flinger.event",
- std::string("AudioFlinger"),
+ AMEDIAMETRICS_KEY_AUDIO_FLINGER "." AMEDIAMETRICS_PROP_EVENT,
+ std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR),
std::make_shared<AnalyticsActions::Function>(
- [this](const std::shared_ptr<const android::mediametrics::Item> &){
- ALOGW("Audioflinger() constructor event detected");
+ [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+ ALOGW("(key=%s) Audioflinger constructor event detected", item->getKey().c_str());
mPreviousAnalyticsState.set(std::make_shared<AnalyticsState>(
*mAnalyticsState.get()));
// Note: get returns shared_ptr temp, whose lifetime is extended
// to end of full expression.
mAnalyticsState->clear(); // TODO: filter the analytics state.
+ // Perhaps report this.
+ }));
+
+ // Check underruns
+ mActions.addAction(
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD "*." AMEDIAMETRICS_PROP_EVENT,
+ std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_UNDERRUN),
+ std::make_shared<AnalyticsActions::Function>(
+ [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+ std::string threadId = item->getKey().substr(
+ sizeof(AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD) - 1);
+ std::string outputDevices;
+ mAnalyticsState->timeMachine().get(
+ item->getKey(), AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevices);
+ ALOGD("(key=%s) Thread underrun event detected on io handle:%s device:%s",
+ item->getKey().c_str(), threadId.c_str(), outputDevices.c_str());
+ if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos) {
+ // report this for Bluetooth
+ }
+ }));
+
+ // Check latencies, playback and startup
+ mActions.addAction(
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK "*." AMEDIAMETRICS_PROP_LATENCYMS,
+ std::monostate{}, // accept any value
+ std::make_shared<AnalyticsActions::Function>(
+ [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+ double latencyMs{};
+ double startupMs{};
+ if (!item->get(AMEDIAMETRICS_PROP_LATENCYMS, &latencyMs)
+ || !item->get(AMEDIAMETRICS_PROP_STARTUPMS, &startupMs)) return;
+
+ std::string trackId = item->getKey().substr(
+ sizeof(AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK) - 1);
+ std::string thread = getThreadFromTrack(item->getKey());
+ std::string outputDevices;
+ mAnalyticsState->timeMachine().get(
+ thread, AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevices);
+ ALOGD("(key=%s) Track latencyMs:%lf startupMs:%lf detected on port:%s device:%s",
+ item->getKey().c_str(), latencyMs, startupMs,
+ trackId.c_str(), outputDevices.c_str());
+ if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos) {
+ // report this for Bluetooth
+ }
}));
}
@@ -53,7 +97,7 @@
status_t AudioAnalytics::submit(
const std::shared_ptr<const mediametrics::Item>& item, bool isTrusted)
{
- if (!startsWith(item->getKey(), "audio.")) return BAD_VALUE;
+ if (!startsWith(item->getKey(), AMEDIAMETRICS_KEY_PREFIX_AUDIO)) return BAD_VALUE;
status_t status = mAnalyticsState->submit(item, isTrusted);
if (status != NO_ERROR) return status; // may not be permitted.
@@ -94,4 +138,16 @@
}
}
+// HELPER METHODS
+
+std::string AudioAnalytics::getThreadFromTrack(const std::string& track) const
+{
+ int32_t threadId_int32{};
+ if (mAnalyticsState->timeMachine().get(
+ track, AMEDIAMETRICS_PROP_THREADID, &threadId_int32) != NO_ERROR) {
+ return {};
+ }
+ return std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD) + std::to_string(threadId_int32);
+}
+
} // namespace android
diff --git a/services/mediametrics/AudioAnalytics.h b/services/mediametrics/AudioAnalytics.h
index be885ec..4a42e22 100644
--- a/services/mediametrics/AudioAnalytics.h
+++ b/services/mediametrics/AudioAnalytics.h
@@ -72,6 +72,14 @@
*/
void checkActions(const std::shared_ptr<const mediametrics::Item>& item);
+ // HELPER METHODS
+ /**
+ * Return the audio thread associated with an audio track name.
+ * e.g. "audio.track.32" -> "audio.thread.10" if the associated
+ * threadId for the audio track is 10.
+ */
+ std::string getThreadFromTrack(const std::string& track) const;
+
// Actions is individually locked
AnalyticsActions mActions;
diff --git a/services/mediametrics/Wrap.h b/services/mediametrics/Wrap.h
index 99963fb..3584e08 100644
--- a/services/mediametrics/Wrap.h
+++ b/services/mediametrics/Wrap.h
@@ -150,11 +150,17 @@
*/
class LockedPointer {
friend LockWrap;
- LockedPointer(T *t, std::mutex *lock)
- : mT(t), mLock(*lock) {}
+ LockedPointer(T *t, std::recursive_mutex *lock, std::atomic<size_t> *recursionDepth)
+ : mT(t), mLock(*lock), mRecursionDepth(recursionDepth) { ++*mRecursionDepth; }
+
T* const mT;
- std::lock_guard<std::mutex> mLock;
+ std::lock_guard<std::recursive_mutex> mLock;
+ std::atomic<size_t>* mRecursionDepth;
public:
+ ~LockedPointer() {
+ --*mRecursionDepth; // Used for testing, we do not check underflow.
+ }
+
const T* operator->() const {
return mT;
}
@@ -163,27 +169,36 @@
}
};
- mutable std::mutex mLock;
+ // We must use a recursive mutex because the end of the full expression may
+ // involve another reference to T->.
+ //
+ // A recursive mutex allows the same thread to recursively acquire,
+ // but different thread would block.
+ //
+ // Example which fails with a normal mutex:
+ //
+ // android::mediametrics::LockWrap<std::vector<int>> v{std::initializer_list<int>{1, 2}};
+ // const int sum = v->operator[](0) + v->operator[](1);
+ //
+ mutable std::recursive_mutex mLock;
mutable T mT;
+ mutable std::atomic<size_t> mRecursionDepth{}; // Used for testing.
public:
template <typename... Args>
explicit LockWrap(Args&&... args) : mT(std::forward<Args>(args)...) {}
const LockedPointer operator->() const {
- return LockedPointer(&mT, &mLock);
+ return LockedPointer(&mT, &mLock, &mRecursionDepth);
}
LockedPointer operator->() {
- return LockedPointer(&mT, &mLock);
+ return LockedPointer(&mT, &mLock, &mRecursionDepth);
}
+ // Returns the lock depth of the recursive mutex.
// @TestApi
- bool isLocked() const {
- if (mLock.try_lock()) {
- mLock.unlock();
- return false; // we were able to get the lock.
- }
- return true; // we were NOT able to get the lock.
+ size_t getRecursionDepth() const {
+ return mRecursionDepth;
}
};
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index dea625c..27b72eb 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -112,6 +112,27 @@
s3->operator=("abc");
ASSERT_EQ('b', s3->operator[](1)); // s2[1] = 'b';
+ // Check that we can recursively hold lock.
+ android::mediametrics::LockWrap<std::vector<int>> v{std::initializer_list<int>{1, 2}};
+ v->push_back(3);
+ v->push_back(4);
+ ASSERT_EQ(1, v->operator[](0));
+ ASSERT_EQ(2, v->operator[](1));
+ ASSERT_EQ(3, v->operator[](2));
+ ASSERT_EQ(4, v->operator[](3));
+ // The end of the full expression here requires recursive depth of 4.
+ ASSERT_EQ(10, v->operator[](0) + v->operator[](1) + v->operator[](2) + v->operator[](3));
+
+ // Mikhail's note: a non-recursive lock implementation could be used if one obtains
+ // the LockedPointer helper object like this and directly hold the lock through RAII,
+ // though it is trickier in use.
+ //
+ // We include an example here for completeness.
+ {
+ auto l = v.operator->();
+ ASSERT_EQ(10, l->operator[](0) + l->operator[](1) + l->operator[](2) + l->operator[](3));
+ }
+
// Use Thunk to check whether we have the lock when calling a method through LockWrap.
class Thunk {
@@ -123,11 +144,13 @@
};
android::mediametrics::LockWrap<Thunk> s4([&]{
- ASSERT_EQ(true, s4.isLocked()); // we must be locked when thunk() is called.
+ ASSERT_EQ((size_t)1, s4.getRecursionDepth()); // we must be locked when thunk() is called.
});
+ ASSERT_EQ((size_t)0, s4.getRecursionDepth());
// This will fail if we are not locked during method access.
s4->thunk();
+ ASSERT_EQ((size_t)0, s4.getRecursionDepth());
}
TEST(mediametrics_tests, lock_wrap_multithread) {
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index 2680cee..0269896 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -25,10 +25,6 @@
namespace android {
-namespace media {
-
-using android::media::MediaTranscodingService;
-
MediaTranscodingService::MediaTranscodingService() {
ALOGV("MediaTranscodingService is created");
}
@@ -56,7 +52,8 @@
Status MediaTranscodingService::registerClient(
const std::shared_ptr<ITranscodingServiceClient>& /*in_client*/,
- int32_t* /*_aidl_return*/) {
+ const std::string& /* in_opPackageName */, int32_t /* in_clientUid */,
+ int32_t /* in_clientPid */, int32_t* /*_aidl_return*/) {
// TODO(hkuang): Add implementation.
return Status::ok();
}
@@ -67,8 +64,9 @@
}
Status MediaTranscodingService::submitRequest(int32_t /*clientId*/,
- const TranscodingRequest& /*request*/,
- TranscodingJob* /*job*/, int32_t* /*_aidl_return*/) {
+ const TranscodingRequestParcel& /*request*/,
+ TranscodingJobParcel* /*job*/,
+ int32_t* /*_aidl_return*/) {
// TODO(hkuang): Add implementation.
return Status::ok();
}
@@ -79,11 +77,11 @@
return Status::ok();
}
-Status MediaTranscodingService::getJobWithId(int32_t /*in_jobId*/, TranscodingJob* /*out_job*/,
+Status MediaTranscodingService::getJobWithId(int32_t /*in_jobId*/,
+ TranscodingJobParcel* /*out_job*/,
bool* /*_aidl_return*/) {
// TODO(hkuang): Add implementation.
return Status::ok();
}
-} // namespace media
} // namespace android
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/services/mediatranscoding/MediaTranscodingService.h
index c2c30b9..d225f9a 100644
--- a/services/mediatranscoding/MediaTranscodingService.h
+++ b/services/mediatranscoding/MediaTranscodingService.h
@@ -22,13 +22,11 @@
namespace android {
-namespace media {
-
using Status = ::ndk::ScopedAStatus;
using ::aidl::android::media::BnMediaTranscodingService;
using ::aidl::android::media::ITranscodingServiceClient;
-using ::aidl::android::media::TranscodingJob;
-using ::aidl::android::media::TranscodingRequest;
+using ::aidl::android::media::TranscodingJobParcel;
+using ::aidl::android::media::TranscodingRequestParcel;
class MediaTranscodingService : public BnMediaTranscodingService {
public:
@@ -40,23 +38,24 @@
static const char* getServiceName() { return "media.transcoding"; }
Status registerClient(const std::shared_ptr<ITranscodingServiceClient>& in_client,
- int32_t* _aidl_return) override;
+ const std::string& in_opPackageName, int32_t in_clientUid,
+ int32_t in_clientPid, int32_t* _aidl_return) override;
Status unregisterClient(int32_t clientId, bool* _aidl_return) override;
- Status submitRequest(int32_t in_clientId, const TranscodingRequest& in_request,
- TranscodingJob* out_job, int32_t* _aidl_return) override;
+ Status submitRequest(int32_t in_clientId, const TranscodingRequestParcel& in_request,
+ TranscodingJobParcel* out_job, int32_t* _aidl_return) override;
Status cancelJob(int32_t in_clientId, int32_t in_jobId, bool* _aidl_return) override;
- Status getJobWithId(int32_t in_jobId, TranscodingJob* out_job, bool* _aidl_return) override;
+ Status getJobWithId(int32_t in_jobId, TranscodingJobParcel* out_job,
+ bool* _aidl_return) override;
virtual inline binder_status_t dump(int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
private:
};
-} // namespace media
} // namespace android
#endif // ANDROID_MEDIA_TRANSCODING_SERVICE_H
diff --git a/services/mediatranscoding/main_mediatranscodingservice.cpp b/services/mediatranscoding/main_mediatranscodingservice.cpp
index 1978eb4..7d862e6 100644
--- a/services/mediatranscoding/main_mediatranscodingservice.cpp
+++ b/services/mediatranscoding/main_mediatranscodingservice.cpp
@@ -29,7 +29,7 @@
strcpy(argv[0], "media.transcoding");
sp<ProcessState> proc(ProcessState::self());
sp<IServiceManager> sm = defaultServiceManager();
- android::media::MediaTranscodingService::instantiate();
+ android::MediaTranscodingService::instantiate();
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index ca1354d..64d835b 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -55,4 +55,12 @@
"libutils",
],
+ header_libs: [
+ "libaudiohal_headers",
+ ],
+
+ include_dirs: [
+ "frameworks/av/media/libnbaio/include_mono",
+ "frameworks/av/media/libnbaio/include",
+ ],
}