Merge "media.metrics changes AString -> std::string"
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 61deb46..c0eb5c1 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -251,6 +251,36 @@
* @see ACameraDevice_createCaptureRequest
*/
TEMPLATE_MANUAL = 6,
+
+ /**
+ * A template for selecting camera parameters that match TEMPLATE_PREVIEW as closely as
+ * possible while improving the camera output for motion tracking use cases.
+ *
+ * <p>This template is best used by applications that are frequently switching between motion
+ * tracking use cases and regular still capture use cases, to minimize the IQ changes
+ * when swapping use cases.</p>
+ *
+ * <p>This template is guaranteed to be supported on camera devices that support the
+ * {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING} capability.</p>
+ *
+ * @see ACameraDevice_createCaptureRequest
+ */
+ TEMPLATE_MOTION_TRACKING_PREVIEW = 7,
+
+ /**
+ * A template for selecting camera parameters that maximize the quality of camera output for
+ * motion tracking use cases.
+ *
+ * <p>This template is best used by applications dedicated to motion tracking applications,
+ * which aren't concerned about fast switches between motion tracking and other use cases.</p>
+ *
+ * <p>This template is guaranteed to be supported on camera devices that support the
+ * {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING} capability.</p>
+ *
+ * @see ACameraDevice_createCaptureRequest
+ */
+ TEMPLATE_MOTION_TRACKING_BEST = 8,
+
} ACameraDevice_request_template;
/**
@@ -760,4 +790,3 @@
#endif /* _NDK_CAMERA_DEVICE_H */
/** @} */
-
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index e5a6f7d..2c144b7 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -837,10 +837,13 @@
*
* <p>This control (except for MANUAL) is only effective if
* <code>ACAMERA_CONTROL_MODE != OFF</code> and any 3A routine is active.</p>
- * <p>ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
- * contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
- * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains MANUAL_SENSOR. Other intent values are
- * always supported.</p>
+ * <p>All intents are supported by all devices, except that:
+ * * ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * PRIVATE_REPROCESSING or YUV_REPROCESSING.
+ * * MANUAL will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * MANUAL_SENSOR.
+ * * MOTION_TRACKING will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * MOTION_TRACKING.</p>
*
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
@@ -2235,34 +2238,31 @@
* </ul></p>
*
* <p>The position of the camera device's lens optical center,
- * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
- * optical center of the largest camera device facing in the
- * same direction as this camera, in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate
- * axes</a>. Note that only the axis definitions are shared with
- * the sensor coordinate system, but not the origin.</p>
- * <p>If this device is the largest or only camera device with a
- * given facing, then this position will be <code>(0, 0, 0)</code>; a
- * camera device with a lens optical center located 3 cm from
- * the main sensor along the +X axis (to the right from the
- * user's perspective) will report <code>(0.03, 0, 0)</code>.</p>
- * <p>To transform a pixel coordinates between two cameras
- * facing the same direction, first the source camera
- * ACAMERA_LENS_RADIAL_DISTORTION must be corrected for. Then
- * the source camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs
- * to be applied, followed by the ACAMERA_LENS_POSE_ROTATION
- * of the source camera, the translation of the source camera
- * relative to the destination camera, the
- * ACAMERA_LENS_POSE_ROTATION of the destination camera, and
- * finally the inverse of ACAMERA_LENS_INTRINSIC_CALIBRATION
- * of the destination camera. This obtains a
- * radial-distortion-free coordinate in the destination
- * camera pixel coordinates.</p>
- * <p>To compare this against a real image from the destination
- * camera, the destination camera image then needs to be
- * corrected for radial distortion before comparison or
- * sampling.</p>
+ * as a three-dimensional vector <code>(x,y,z)</code>.</p>
+ * <p>Prior to Android P, or when ACAMERA_LENS_POSE_REFERENCE is PRIMARY_CAMERA, this position
+ * is relative to the optical center of the largest camera device facing in the same
+ * direction as this camera, in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor
+ * coordinate axes</a>. Note that only the axis definitions are shared with the sensor
+ * coordinate system, but not the origin.</p>
+ * <p>If this device is the largest or only camera device with a given facing, then this
+ * position will be <code>(0, 0, 0)</code>; a camera device with a lens optical center located 3 cm
+ * from the main sensor along the +X axis (to the right from the user's perspective) will
+ * report <code>(0.03, 0, 0)</code>.</p>
+ * <p>To transform a pixel coordinates between two cameras facing the same direction, first
+ * the source camera ACAMERA_LENS_RADIAL_DISTORTION must be corrected for. Then the source
+ * camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs to be applied, followed by the
+ * ACAMERA_LENS_POSE_ROTATION of the source camera, the translation of the source camera
+ * relative to the destination camera, the ACAMERA_LENS_POSE_ROTATION of the destination
+ * camera, and finally the inverse of ACAMERA_LENS_INTRINSIC_CALIBRATION of the destination
+ * camera. This obtains a radial-distortion-free coordinate in the destination camera pixel
+ * coordinates.</p>
+ * <p>To compare this against a real image from the destination camera, the destination camera
+ * image then needs to be corrected for radial distortion before comparison or sampling.</p>
+ * <p>When ACAMERA_LENS_POSE_REFERENCE is GYROSCOPE, then this position is relative to
+ * the center of the primary gyroscope on the device.</p>
*
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
* @see ACAMERA_LENS_POSE_ROTATION
* @see ACAMERA_LENS_RADIAL_DISTORTION
*/
@@ -2433,6 +2433,26 @@
*/
ACAMERA_LENS_RADIAL_DISTORTION = // float[6]
ACAMERA_LENS_START + 11,
+ /**
+ * <p>The origin for ACAMERA_LENS_POSE_TRANSLATION.</p>
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ *
+ * <p>Type: byte (acamera_metadata_enum_android_lens_pose_reference_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Different calibration methods and use cases can produce better or worse results
+ * depending on the selected coordinate origin.</p>
+ * <p>For devices designed to support the MOTION_TRACKING capability, the GYROSCOPE origin
+ * makes device calibration and later usage by applications combining camera and gyroscope
+ * information together simpler.</p>
+ */
+ ACAMERA_LENS_POSE_REFERENCE = // byte (acamera_metadata_enum_android_lens_pose_reference_t)
+ ACAMERA_LENS_START + 12,
ACAMERA_LENS_END,
/**
@@ -2895,7 +2915,7 @@
* time-consuming hardware re-configuration or internal camera pipeline
* change. For performance reasons we advise clients to pass their initial
* values as part of
- * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.i
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
* Once the camera capture session is enabled it is also recommended to avoid
* changing them from their initial values set in
* {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
@@ -5717,6 +5737,15 @@
*/
ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL = 6,
+ /**
+ * <p>This request is for a motion tracking use case, where
+ * the application will use camera and inertial sensor data to
+ * locate and track objects in the world.</p>
+ * <p>The camera device auto-exposure routine will limit the exposure time
+ * of the camera to no more than 20 milliseconds, to minimize motion blur.</p>
+ */
+ ACAMERA_CONTROL_CAPTURE_INTENT_MOTION_TRACKING = 7,
+
} acamera_metadata_enum_android_control_capture_intent_t;
// ACAMERA_CONTROL_EFFECT_MODE
@@ -6428,6 +6457,28 @@
} acamera_metadata_enum_android_lens_state_t;
+// ACAMERA_LENS_POSE_REFERENCE
+typedef enum acamera_metadata_enum_acamera_lens_pose_reference {
+ /**
+ * <p>The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the optical center of
+ * the largest camera device facing the same direction as this camera.</p>
+ * <p>This default value for API levels before Android P.</p>
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_LENS_POSE_REFERENCE_PRIMARY_CAMERA = 0,
+
+ /**
+ * <p>The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the position of the
+ * primary gyroscope of this Android device.</p>
+ * <p>This is the value reported by all devices that support the MOTION_TRACKING capability.</p>
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_LENS_POSE_REFERENCE_GYROSCOPE = 1,
+
+} acamera_metadata_enum_android_lens_pose_reference_t;
+
// ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
typedef enum acamera_metadata_enum_acamera_lens_info_focus_distance_calibration {
@@ -6760,6 +6811,7 @@
* </ul>
* </li>
* <li>The ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE entry is listed by this device.</li>
+ * <li>As of Android P, the ACAMERA_LENS_POSE_REFERENCE entry is listed by this device.</li>
* <li>A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
* normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
* format.</li>
@@ -6775,12 +6827,57 @@
* @see ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE
* @see ACAMERA_LENS_FACING
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
* @see ACAMERA_LENS_POSE_ROTATION
* @see ACAMERA_LENS_POSE_TRANSLATION
* @see ACAMERA_LENS_RADIAL_DISTORTION
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT = 8,
+ /**
+ * <p>The device supports controls and metadata required for accurate motion tracking for
+ * use cases such as augmented reality, electronic image stabilization, and so on.</p>
+ * <p>This means this camera device has accurate optical calibration and timestamps relative
+ * to the inertial sensors.</p>
+ * <p>This capability requires the camera device to support the following:</p>
+ * <ul>
+ * <li>Capture request templates <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#TEMPLATE_MOTION_TRACKING_PREVIEW">CameraDevice#TEMPLATE_MOTION_TRACKING_PREVIEW</a> and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#TEMPLATE_MOTION_TRACKING_BEST">CameraDevice#TEMPLATE_MOTION_TRACKING_BEST</a> are defined.</li>
+ * <li>The stream configurations listed in <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for MOTION_TRACKING are
+ * supported, either at 30 or 60fps maximum frame rate.</li>
+ * <li>The following camera characteristics and capture result metadata are provided:<ul>
+ * <li>ACAMERA_LENS_INTRINSIC_CALIBRATION</li>
+ * <li>ACAMERA_LENS_RADIAL_DISTORTION</li>
+ * <li>ACAMERA_LENS_POSE_ROTATION</li>
+ * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+ * <li>ACAMERA_LENS_POSE_REFERENCE with value GYROSCOPE</li>
+ * </ul>
+ * </li>
+ * <li>The ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE field has value <code>REALTIME</code>. When compared to
+ * timestamps from the device's gyroscopes, the clock difference for events occuring at
+ * the same actual time instant will be less than 1 ms.</li>
+ * <li>The value of the ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW field is accurate to within 1 ms.</li>
+ * <li>The value of ACAMERA_SENSOR_EXPOSURE_TIME is guaranteed to be available in the
+ * capture result.</li>
+ * <li>The ACAMERA_CONTROL_CAPTURE_INTENT control supports MOTION_TRACKING to limit maximum
+ * exposure to 20 milliseconds.</li>
+ * <li>The stream configurations required for MOTION_TRACKING (listed at <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) can operate at least at
+ * 30fps; optionally, they can operate at 60fps, and '[60, 60]' is listed in
+ * ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES.</li>
+ * </ul>
+ *
+ * @see ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
+ * @see ACAMERA_CONTROL_CAPTURE_INTENT
+ * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ * @see ACAMERA_LENS_RADIAL_DISTORTION
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
+ * @see ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW
+ */
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING = 10,
+
} acamera_metadata_enum_android_request_available_capabilities_t;
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 44ed034..bb517aa 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -622,7 +622,7 @@
fprintf(stderr, " -o playback audio\n");
fprintf(stderr, " -w(rite) filename (write to .mp4 file)\n");
fprintf(stderr, " -k seek test\n");
- fprintf(stderr, " -O(verride) name of the component\n");
+ fprintf(stderr, " -N(ame) of the component\n");
fprintf(stderr, " -x display a histogram of decoding times/fps "
"(video only)\n");
fprintf(stderr, " -q don't show progress indicator\n");
@@ -708,7 +708,7 @@
sp<ALooper> looper;
int res;
- while ((res = getopt(argc, argv, "haqn:lm:b:ptsrow:kO:xSTd:D:")) >= 0) {
+ while ((res = getopt(argc, argv, "haqn:lm:b:ptsrow:kN:xSTd:D:")) >= 0) {
switch (res) {
case 'a':
{
@@ -737,7 +737,7 @@
break;
}
- case 'O':
+ case 'N':
{
gComponentNameOverride.setTo(optarg);
break;
diff --git a/drm/libmediadrm/CryptoHal.cpp b/drm/libmediadrm/CryptoHal.cpp
index b9b3685..2114d40 100644
--- a/drm/libmediadrm/CryptoHal.cpp
+++ b/drm/libmediadrm/CryptoHal.cpp
@@ -332,10 +332,13 @@
return status;
}
secure = false;
- } else {
+ } else if (destination.mType == kDestinationTypeNativeHandle) {
hDestination.type = BufferType::NATIVE_HANDLE;
hDestination.secureMemory = hidl_handle(destination.mHandle);
secure = true;
+ } else {
+ android_errorWriteLog(0x534e4554, "70526702");
+ return UNKNOWN_ERROR;
}
::SharedBuffer hSource;
diff --git a/drm/libmediadrm/ICrypto.cpp b/drm/libmediadrm/ICrypto.cpp
index 8506d95..1d70a4e 100644
--- a/drm/libmediadrm/ICrypto.cpp
+++ b/drm/libmediadrm/ICrypto.cpp
@@ -16,14 +16,14 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ICrypto"
-#include <utils/Log.h>
-
#include <binder/Parcel.h>
#include <binder/IMemory.h>
+#include <cutils/log.h>
#include <media/ICrypto.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AString.h>
+#include <utils/Log.h>
namespace android {
@@ -362,6 +362,17 @@
reply->writeInt32(BAD_VALUE);
return OK;
}
+ sp<IMemory> dest = destination.mSharedMemory;
+ if (totalSize > dest->size() ||
+ (size_t)dest->offset() > dest->size() - totalSize) {
+ reply->writeInt32(BAD_VALUE);
+ android_errorWriteLog(0x534e4554, "71389378");
+ return OK;
+ }
+ } else {
+ reply->writeInt32(BAD_VALUE);
+ android_errorWriteLog(0x534e4554, "70526702");
+ return OK;
}
AString errorDetailMsg;
diff --git a/drm/libmediadrm/PluginMetricsReporting.cpp b/drm/libmediadrm/PluginMetricsReporting.cpp
index fcff5ba..cc7fb72 100644
--- a/drm/libmediadrm/PluginMetricsReporting.cpp
+++ b/drm/libmediadrm/PluginMetricsReporting.cpp
@@ -17,6 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "PluginMetricsReporting"
#include <utils/Log.h>
+#include <inttypes.h>
#include <media/PluginMetricsReporting.h>
@@ -81,10 +82,7 @@
analyticsItem.setFinalized(true);
if (!analyticsItem.selfrecord()) {
- // Note the cast to int is because we build on 32 and 64 bit.
- // The cast prevents a peculiar printf problem where one format cannot
- // satisfy both.
- ALOGE("selfrecord() returned false. sessioId %d", (int) sessionId);
+ ALOGE("selfrecord() returned false. sessioId %" PRId64, sessionId);
}
for (int i = 0; i < metricsGroup.metric_sub_group_size(); ++i) {
diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
index 7c43994..944002d 100644
--- a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
@@ -142,25 +142,24 @@
ssize_t index = mByteArrayProperties.indexOfKey(name);
if (index < 0) {
ALOGE("App requested unknown property: %s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_CANNOT_HANDLE;
}
value = mByteArrayProperties.valueAt(index);
return android::OK;
}
status_t DrmPlugin::setPropertyByteArray(
- const String8& name, const Vector<uint8_t>& value) {
+ const String8& name, const Vector<uint8_t>& value)
+{
+ UNUSED(value);
if (0 == name.compare(kDeviceIdKey)) {
ALOGD("Cannot set immutable property: %s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_CANNOT_HANDLE;
}
- ssize_t status = mByteArrayProperties.replaceValueFor(name, value);
- if (status >= 0) {
- return android::OK;
- }
+ // Setting of undefined properties is not supported
ALOGE("Failed to set property byte array, key=%s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_CANNOT_HANDLE;
}
status_t DrmPlugin::getPropertyString(
@@ -168,7 +167,7 @@
ssize_t index = mStringProperties.indexOfKey(name);
if (index < 0) {
ALOGE("App requested unknown property: %s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_CANNOT_HANDLE;
}
value = mStringProperties.valueAt(index);
return android::OK;
@@ -182,12 +181,18 @@
kVendorKey.string(), kVersionKey.string());
if (immutableKeys.contains(name.string())) {
ALOGD("Cannot set immutable property: %s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_CANNOT_HANDLE;
+ }
+
+ ssize_t index = mStringProperties.indexOfKey(name);
+ if (index < 0) {
+ ALOGE("Cannot set undefined property string, key=%s", name.string());
+ return android::ERROR_DRM_CANNOT_HANDLE;
}
if (mStringProperties.add(name, value) < 0) {
ALOGE("Failed to set property string, key=%s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_UNKNOWN;
}
return android::OK;
}
diff --git a/include/media/IMediaCodecService.h b/include/media/IMediaCodecService.h
deleted file mode 120000
index 37f6822..0000000
--- a/include/media/IMediaCodecService.h
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libmedia/include/media/IMediaCodecService.h
\ No newline at end of file
diff --git a/include/media/MediaDefs.h b/include/media/MediaDefs.h
deleted file mode 120000
index 9850603..0000000
--- a/include/media/MediaDefs.h
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libmedia/include/media/MediaDefs.h
\ No newline at end of file
diff --git a/media/OWNERS b/media/OWNERS
index 1605efd..d49eb8d 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -2,6 +2,7 @@
dwkang@google.com
elaurent@google.com
essick@google.com
+hkuang@google.com
hunga@google.com
jmtrivi@google.com
krocard@google.com
diff --git a/media/extractors/aac/AACExtractor.cpp b/media/extractors/aac/AACExtractor.cpp
index 716fe31..dfb54e2 100644
--- a/media/extractors/aac/AACExtractor.cpp
+++ b/media/extractors/aac/AACExtractor.cpp
@@ -288,6 +288,10 @@
if (options && options->getSeekTo(&seekTimeUs, &mode)) {
if (mFrameDurationUs > 0) {
int64_t seekFrame = seekTimeUs / mFrameDurationUs;
+ if (seekFrame < 0 || seekFrame >= (int64_t)mOffsetVector.size()) {
+ android_errorWriteLog(0x534e4554, "70239507");
+ return ERROR_MALFORMED;
+ }
mCurrentTimeUs = seekFrame * mFrameDurationUs;
mOffset = mOffsetVector.itemAt(seekFrame);
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 8823c79..938bd5d 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -5375,11 +5375,6 @@
return NULL;
}
-void MPEG4Extractor::populateMetrics() {
- ALOGV("MPEG4Extractor::populateMetrics");
- // write into mAnalyticsItem
-}
-
static bool LegacySniffMPEG4(
const sp<DataSource> &source, String8 *mimeType, float *confidence) {
uint8_t header[8];
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 76b549d..8bfecaa 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -69,8 +69,6 @@
protected:
virtual ~MPEG4Extractor();
- virtual void populateMetrics();
-
private:
struct PsshInfo {
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 3c23736..00c43dc 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -137,6 +137,149 @@
};
typedef int32_t aaudio_performance_mode_t;
+/**
+ * The USAGE attribute expresses "why" you are playing a sound, what is this sound used for.
+ * This information is used by certain platforms or routing policies
+ * to make more refined volume or routing decisions.
+ *
+ * Note that these match the equivalent values in AudioAttributes in the Android Java API.
+ */
+enum {
+ /**
+ * Use this for streaming media, music performance, video, podcasts, etcetera.
+ */
+ AAUDIO_USAGE_MEDIA = 1,
+
+ /**
+ * Use this for voice over IP, telephony, etcetera.
+ */
+ AAUDIO_USAGE_VOICE_COMMUNICATION = 2,
+
+ /**
+ * Use this for sounds associated with telephony such as busy tones, DTMF, etcetera.
+ */
+ AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING = 3,
+
+ /**
+ * Use this to demand the users attention.
+ */
+ AAUDIO_USAGE_ALARM = 4,
+
+ /**
+ * Use this for notifying the user when a message has arrived or some
+ * other background event has occured.
+ */
+ AAUDIO_USAGE_NOTIFICATION = 5,
+
+ /**
+ * Use this when the phone rings.
+ */
+ AAUDIO_USAGE_NOTIFICATION_RINGTONE = 6,
+
+ /**
+ * Use this to attract the users attention when, for example, the battery is low.
+ */
+ AAUDIO_USAGE_NOTIFICATION_EVENT = 10,
+
+ /**
+ * Use this for screen readers, etcetera.
+ */
+ AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY = 11,
+
+ /**
+ * Use this for driving or navigation directions.
+ */
+ AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE = 12,
+
+ /**
+ * Use this for user interface sounds, beeps, etcetera.
+ */
+ AAUDIO_USAGE_ASSISTANCE_SONIFICATION = 13,
+
+ /**
+ * Use this for game audio and sound effects.
+ */
+ AAUDIO_USAGE_GAME = 14,
+
+ /**
+ * Use this for audio responses to user queries, audio instructions or help utterances.
+ */
+ AAUDIO_USAGE_ASSISTANT = 16
+};
+typedef int32_t aaudio_usage_t;
+
+/**
+ * The CONTENT_TYPE attribute describes "what" you are playing.
+ * It expresses the general category of the content. This information is optional.
+ * But in case it is known (for instance {@link #AAUDIO_CONTENT_TYPE_MOVIE} for a
+ * movie streaming service or {@link #AAUDIO_CONTENT_TYPE_SPEECH} for
+ * an audio book application) this information might be used by the audio framework to
+ * enforce audio focus.
+ *
+ * Note that these match the equivalent values in AudioAttributes in the Android Java API.
+ */
+enum {
+
+ /**
+ * Use this for spoken voice, audio books, etcetera.
+ */
+ AAUDIO_CONTENT_TYPE_SPEECH = 1,
+
+ /**
+ * Use this for pre-recorded or live music.
+ */
+ AAUDIO_CONTENT_TYPE_MUSIC = 2,
+
+ /**
+ * Use this for a movie or video soundtrack.
+ */
+ AAUDIO_CONTENT_TYPE_MOVIE = 3,
+
+ /**
+ * Use this for sound is designed to accompany a user action,
+ * such as a click or beep sound made when the user presses a button.
+ */
+ AAUDIO_CONTENT_TYPE_SONIFICATION = 4
+};
+typedef int32_t aaudio_content_type_t;
+
+/**
+ * Defines the audio source.
+ * An audio source defines both a default physical source of audio signal, and a recording
+ * configuration.
+ *
+ * Note that these match the equivalent values in MediaRecorder.AudioSource in the Android Java API.
+ */
+enum {
+ /**
+ * Use this preset when other presets do not apply.
+ */
+ AAUDIO_INPUT_PRESET_GENERIC = 1,
+
+ /**
+ * Use this preset when recording video.
+ */
+ AAUDIO_INPUT_PRESET_CAMCORDER = 5,
+
+ /**
+ * Use this preset when doing speech recognition.
+ */
+ AAUDIO_INPUT_PRESET_VOICE_RECOGNITION = 6,
+
+ /**
+ * Use this preset when doing telephony or voice messaging.
+ */
+ AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION = 7,
+
+ /**
+ * Use this preset to obtain an input with no effects.
+ * Note that this input will not have automatic gain control
+ * so the recorded volume may be very low.
+ */
+ AAUDIO_INPUT_PRESET_UNPROCESSED = 9,
+};
+typedef int32_t aaudio_input_preset_t;
+
typedef struct AAudioStreamStruct AAudioStream;
typedef struct AAudioStreamBuilderStruct AAudioStreamBuilder;
@@ -308,6 +451,52 @@
aaudio_performance_mode_t mode);
/**
+ * Set the intended use case for the stream.
+ *
+ * The AAudio system will use this information to optimize the
+ * behavior of the stream.
+ * This could, for example, affect how volume and focus is handled for the stream.
+ *
+ * The default, if you do not call this function, is AAUDIO_USAGE_MEDIA.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param usage the desired usage, eg. AAUDIO_USAGE_GAME
+ */
+AAUDIO_API void AAudioStreamBuilder_setUsage(AAudioStreamBuilder* builder,
+ aaudio_usage_t usage);
+
+/**
+ * Set the type of audio data that the stream will carry.
+ *
+ * The AAudio system will use this information to optimize the
+ * behavior of the stream.
+ * This could, for example, affect whether a stream is paused when a notification occurs.
+ *
+ * The default, if you do not call this function, is AAUDIO_CONTENT_TYPE_MUSIC.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param contentType the type of audio data, eg. AAUDIO_CONTENT_TYPE_SPEECH
+ */
+AAUDIO_API void AAudioStreamBuilder_setContentType(AAudioStreamBuilder* builder,
+ aaudio_content_type_t contentType);
+
+/**
+ * Set the input (capture) preset for the stream.
+ *
+ * The AAudio system will use this information to optimize the
+ * behavior of the stream.
+ * This could, for example, affect which microphones are used and how the
+ * recorded data is processed.
+ *
+ * The default, if you do not call this function, is AAUDIO_INPUT_PRESET_GENERIC.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param inputPreset the desired configuration for recording
+ */
+AAUDIO_API void AAudioStreamBuilder_setInputPreset(AAudioStreamBuilder* builder,
+ aaudio_input_preset_t inputPreset);
+
+/**
* Return one of these values from the data callback function.
*/
enum {
@@ -820,6 +1009,30 @@
int64_t *framePosition,
int64_t *timeNanoseconds);
+/**
+ * Return the use case for the stream.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return frames read
+ */
+AAUDIO_API aaudio_usage_t AAudioStream_getUsage(AAudioStream* stream);
+
+/**
+ * Return the content type for the stream.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return content type, for example AAUDIO_CONTENT_TYPE_MUSIC
+ */
+AAUDIO_API aaudio_content_type_t AAudioStream_getContentType(AAudioStream* stream);
+
+/**
+ * Return the input preset for the stream.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return input preset, for example AAUDIO_INPUT_PRESET_CAMCORDER
+ */
+AAUDIO_API aaudio_input_preset_t AAudioStream_getInputPreset(AAudioStream* stream);
+
#ifdef __cplusplus
}
#endif
diff --git a/media/libaaudio/libaaudio.map.txt b/media/libaaudio/libaaudio.map.txt
index 2ba5250..fca7b50 100644
--- a/media/libaaudio/libaaudio.map.txt
+++ b/media/libaaudio/libaaudio.map.txt
@@ -17,6 +17,9 @@
AAudioStreamBuilder_setSharingMode;
AAudioStreamBuilder_setDirection;
AAudioStreamBuilder_setBufferCapacityInFrames;
+ AAudioStreamBuilder_setUsage;
+ AAudioStreamBuilder_setContentType;
+ AAudioStreamBuilder_setInputPreset;
AAudioStreamBuilder_openStream;
AAudioStreamBuilder_delete;
AAudioStream_close;
@@ -42,6 +45,9 @@
AAudioStream_getFormat;
AAudioStream_getSharingMode;
AAudioStream_getDirection;
+ AAudioStream_getUsage;
+ AAudioStream_getContentType;
+ AAudioStream_getInputPreset;
AAudioStream_getFramesWritten;
AAudioStream_getFramesRead;
AAudioStream_getTimestamp;
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index 153fce3..97672a0 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -50,6 +50,12 @@
if (status != NO_ERROR) goto error;
status = parcel->writeInt32(getBufferCapacity());
if (status != NO_ERROR) goto error;
+ status = parcel->writeInt32((int32_t) getUsage());
+ if (status != NO_ERROR) goto error;
+ status = parcel->writeInt32((int32_t) getContentType());
+ if (status != NO_ERROR) goto error;
+ status = parcel->writeInt32((int32_t) getInputPreset());
+ if (status != NO_ERROR) goto error;
return NO_ERROR;
error:
ALOGE("AAudioStreamConfiguration.writeToParcel(): write failed = %d", status);
@@ -69,16 +75,25 @@
setSamplesPerFrame(value);
status = parcel->readInt32(&value);
if (status != NO_ERROR) goto error;
- setSharingMode(value);
+ setSharingMode((aaudio_sharing_mode_t) value);
status = parcel->readInt32(&value);
if (status != NO_ERROR) goto error;
- setFormat(value);
+ setFormat((aaudio_format_t) value);
status = parcel->readInt32(&value);
if (status != NO_ERROR) goto error;
setDirection((aaudio_direction_t) value);
status = parcel->readInt32(&value);
if (status != NO_ERROR) goto error;
setBufferCapacity(value);
+ status = parcel->readInt32(&value);
+ if (status != NO_ERROR) goto error;
+ setUsage((aaudio_usage_t) value);
+ status = parcel->readInt32(&value);
+ if (status != NO_ERROR) goto error;
+ setContentType((aaudio_content_type_t) value);
+ status = parcel->readInt32(&value);
+ if (status != NO_ERROR) goto error;
+ setInputPreset((aaudio_input_preset_t) value);
return NO_ERROR;
error:
ALOGE("AAudioStreamConfiguration.readFromParcel(): read failed = %d", status);
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index bb007ac..9e5ca8e 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -177,6 +177,24 @@
streamBuilder->setSharingMode(sharingMode);
}
+AAUDIO_API void AAudioStreamBuilder_setUsage(AAudioStreamBuilder* builder,
+ aaudio_usage_t usage) {
+ AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+ streamBuilder->setUsage(usage);
+}
+
+AAUDIO_API void AAudioStreamBuilder_setContentType(AAudioStreamBuilder* builder,
+ aaudio_content_type_t contentType) {
+ AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+ streamBuilder->setContentType(contentType);
+}
+
+AAUDIO_API void AAudioStreamBuilder_setInputPreset(AAudioStreamBuilder* builder,
+ aaudio_input_preset_t inputPreset) {
+ AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+ streamBuilder->setInputPreset(inputPreset);
+}
+
AAUDIO_API void AAudioStreamBuilder_setBufferCapacityInFrames(AAudioStreamBuilder* builder,
int32_t frames)
{
@@ -447,6 +465,24 @@
return audioStream->getSharingMode();
}
+AAUDIO_API aaudio_usage_t AAudioStream_getUsage(AAudioStream* stream)
+{
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ return audioStream->getUsage();
+}
+
+AAUDIO_API aaudio_content_type_t AAudioStream_getContentType(AAudioStream* stream)
+{
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ return audioStream->getContentType();
+}
+
+AAUDIO_API aaudio_input_preset_t AAudioStream_getInputPreset(AAudioStream* stream)
+{
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ return audioStream->getInputPreset();
+}
+
AAUDIO_API int64_t AAudioStream_getFramesWritten(AAudioStream* stream)
{
AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 6400eb4..23c4eb8 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -42,6 +42,9 @@
mAudioFormat = other.mAudioFormat;
mDirection = other.mDirection;
mBufferCapacity = other.mBufferCapacity;
+ mUsage = other.mUsage;
+ mContentType = other.mContentType;
+ mInputPreset = other.mInputPreset;
}
aaudio_result_t AAudioStreamParameters::validate() const {
@@ -98,6 +101,54 @@
// break;
}
+ switch (mUsage) {
+ case AAUDIO_UNSPECIFIED:
+ case AAUDIO_USAGE_MEDIA:
+ case AAUDIO_USAGE_VOICE_COMMUNICATION:
+ case AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
+ case AAUDIO_USAGE_ALARM:
+ case AAUDIO_USAGE_NOTIFICATION:
+ case AAUDIO_USAGE_NOTIFICATION_RINGTONE:
+ case AAUDIO_USAGE_NOTIFICATION_EVENT:
+ case AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
+ case AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+ case AAUDIO_USAGE_ASSISTANCE_SONIFICATION:
+ case AAUDIO_USAGE_GAME:
+ case AAUDIO_USAGE_ASSISTANT:
+ break; // valid
+ default:
+ ALOGE("usage not valid = %d", mUsage);
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ // break;
+ }
+
+ switch (mContentType) {
+ case AAUDIO_UNSPECIFIED:
+ case AAUDIO_CONTENT_TYPE_MUSIC:
+ case AAUDIO_CONTENT_TYPE_MOVIE:
+ case AAUDIO_CONTENT_TYPE_SONIFICATION:
+ case AAUDIO_CONTENT_TYPE_SPEECH:
+ break; // valid
+ default:
+ ALOGE("content type not valid = %d", mContentType);
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ // break;
+ }
+
+ switch (mInputPreset) {
+ case AAUDIO_UNSPECIFIED:
+ case AAUDIO_INPUT_PRESET_GENERIC:
+ case AAUDIO_INPUT_PRESET_CAMCORDER:
+ case AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION:
+ case AAUDIO_INPUT_PRESET_VOICE_RECOGNITION:
+ case AAUDIO_INPUT_PRESET_UNPROCESSED:
+ break; // valid
+ default:
+ ALOGE("input preset not valid = %d", mInputPreset);
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ // break;
+ }
+
return AAUDIO_OK;
}
@@ -109,5 +160,8 @@
ALOGD("mAudioFormat = %6d", (int)mAudioFormat);
ALOGD("mDirection = %6d", mDirection);
ALOGD("mBufferCapacity = %6d", mBufferCapacity);
+ ALOGD("mUsage = %6d", mUsage);
+ ALOGD("mContentType = %6d", mContentType);
+ ALOGD("mInputPreset = %6d", mInputPreset);
}
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index 5e67c93..0c173f5 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -88,6 +88,30 @@
mDirection = direction;
}
+ aaudio_usage_t getUsage() const {
+ return mUsage;
+ }
+
+ void setUsage(aaudio_usage_t usage) {
+ mUsage = usage;
+ }
+
+ aaudio_content_type_t getContentType() const {
+ return mContentType;
+ }
+
+ void setContentType(aaudio_content_type_t contentType) {
+ mContentType = contentType;
+ }
+
+ aaudio_input_preset_t getInputPreset() const {
+ return mInputPreset;
+ }
+
+ void setInputPreset(aaudio_input_preset_t inputPreset) {
+ mInputPreset = inputPreset;
+ }
+
int32_t calculateBytesPerFrame() const {
return getSamplesPerFrame() * AAudioConvert_formatToSizeInBytes(getFormat());
}
@@ -109,6 +133,9 @@
aaudio_sharing_mode_t mSharingMode = AAUDIO_SHARING_MODE_SHARED;
aaudio_format_t mAudioFormat = AAUDIO_FORMAT_UNSPECIFIED;
aaudio_direction_t mDirection = AAUDIO_DIRECTION_OUTPUT;
+ aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
+ aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
+ aaudio_input_preset_t mInputPreset = AAUDIO_UNSPECIFIED;
int32_t mBufferCapacity = AAUDIO_UNSPECIFIED;
};
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 8f5f5d3..289e0db 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -74,15 +74,28 @@
}
// Copy parameters from the Builder because the Builder may be deleted after this call.
+ // TODO AudioStream should be a subclass of AudioStreamParameters
mSamplesPerFrame = builder.getSamplesPerFrame();
mSampleRate = builder.getSampleRate();
mDeviceId = builder.getDeviceId();
mFormat = builder.getFormat();
mSharingMode = builder.getSharingMode();
mSharingModeMatchRequired = builder.isSharingModeMatchRequired();
-
mPerformanceMode = builder.getPerformanceMode();
+ mUsage = builder.getUsage();
+ if (mUsage == AAUDIO_UNSPECIFIED) {
+ mUsage = AAUDIO_USAGE_MEDIA;
+ }
+ mContentType = builder.getContentType();
+ if (mContentType == AAUDIO_UNSPECIFIED) {
+ mContentType = AAUDIO_CONTENT_TYPE_MUSIC;
+ }
+ mInputPreset = builder.getInputPreset();
+ if (mInputPreset == AAUDIO_UNSPECIFIED) {
+ mInputPreset = AAUDIO_INPUT_PRESET_GENERIC;
+ }
+
// callbacks
mFramesPerDataCallback = builder.getFramesPerDataCallback();
mDataCallbackProc = builder.getDataCallbackProc();
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index b5d7fd5..82e7189 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -204,6 +204,18 @@
virtual aaudio_direction_t getDirection() const = 0;
+ aaudio_usage_t getUsage() const {
+ return mUsage;
+ }
+
+ aaudio_content_type_t getContentType() const {
+ return mContentType;
+ }
+
+ aaudio_input_preset_t getInputPreset() const {
+ return mInputPreset;
+ }
+
/**
* This is only valid after setSamplesPerFrame() and setFormat() have been called.
*/
@@ -471,6 +483,9 @@
aaudio_format_t mFormat = AAUDIO_FORMAT_UNSPECIFIED;
aaudio_stream_state_t mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
aaudio_performance_mode_t mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
+ aaudio_usage_t mUsage = AAUDIO_USAGE_MEDIA;
+ aaudio_content_type_t mContentType = AAUDIO_CONTENT_TYPE_MUSIC;
+ aaudio_input_preset_t mInputPreset = AAUDIO_INPUT_PRESET_GENERIC;
// callback ----------------------------------
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 55eed92..5f4ab9b 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -104,11 +104,24 @@
? AUDIO_PORT_HANDLE_NONE
: getDeviceId();
+ const audio_content_type_t contentType =
+ AAudioConvert_contentTypeToInternal(builder.getContentType());
+ const audio_source_t source =
+ AAudioConvert_inputPresetToAudioSource(builder.getInputPreset());
+
+ const audio_attributes_t attributes = {
+ .content_type = contentType,
+ .usage = AUDIO_USAGE_UNKNOWN, // only used for output
+ .source = source,
+ .flags = flags, // If attributes are set then the other flags parameter is ignored.
+ .tags = ""
+ };
+
mAudioRecord = new AudioRecord(
mOpPackageName // const String16& opPackageName TODO does not compile
);
mAudioRecord->set(
- AUDIO_SOURCE_VOICE_RECOGNITION,
+ AUDIO_SOURCE_DEFAULT, // ignored because we pass attributes below
getSampleRate(),
format,
channelMask,
@@ -122,7 +135,7 @@
flags,
AUDIO_UID_INVALID, // DEFAULT uid
-1, // DEFAULT pid
- NULL, // DEFAULT audio_attributes_t
+ &attributes,
selectedDeviceId
);
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 5113278..17a8d52 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -121,9 +121,22 @@
? AUDIO_PORT_HANDLE_NONE
: getDeviceId();
+ const audio_content_type_t contentType =
+ AAudioConvert_contentTypeToInternal(builder.getContentType());
+ const audio_usage_t usage =
+ AAudioConvert_usageToInternal(builder.getUsage());
+
+ const audio_attributes_t attributes = {
+ .content_type = contentType,
+ .usage = usage,
+ .source = AUDIO_SOURCE_DEFAULT, // only used for recording
+ .flags = flags, // If attributes are set then the other flags parameter is ignored.
+ .tags = ""
+ };
+
mAudioTrack = new AudioTrack();
mAudioTrack->set(
- (audio_stream_type_t) AUDIO_STREAM_MUSIC,
+ AUDIO_STREAM_DEFAULT, // ignored because we pass attributes below
getSampleRate(),
format,
channelMask,
@@ -139,7 +152,7 @@
NULL, // DEFAULT audio_offload_info_t
AUDIO_UID_INVALID, // DEFAULT uid
-1, // DEFAULT pid
- NULL, // DEFAULT audio_attributes_t
+ &attributes,
// WARNING - If doNotReconnect set true then audio stops after plugging and unplugging
// headphones a few times.
false, // DEFAULT doNotReconnect,
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index f709f41..c6adf33 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -26,6 +26,7 @@
#include "aaudio/AAudio.h"
#include <aaudio/AAudioTesting.h>
#include <math.h>
+#include <system/audio-base.h>
#include "utility/AAudioUtilities.h"
@@ -283,6 +284,61 @@
return aaudioFormat;
}
+// Make a message string from the condition.
+#define STATIC_ASSERT(condition) static_assert(condition, #condition)
+
+audio_usage_t AAudioConvert_usageToInternal(aaudio_usage_t usage) {
+ // The public aaudio_content_type_t constants are supposed to have the same
+ // values as the internal audio_content_type_t values.
+ STATIC_ASSERT(AAUDIO_USAGE_MEDIA == AUDIO_USAGE_MEDIA);
+ STATIC_ASSERT(AAUDIO_USAGE_VOICE_COMMUNICATION == AUDIO_USAGE_VOICE_COMMUNICATION);
+ STATIC_ASSERT(AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING
+ == AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING);
+ STATIC_ASSERT(AAUDIO_USAGE_ALARM == AUDIO_USAGE_ALARM);
+ STATIC_ASSERT(AAUDIO_USAGE_NOTIFICATION == AUDIO_USAGE_NOTIFICATION);
+ STATIC_ASSERT(AAUDIO_USAGE_NOTIFICATION_RINGTONE
+ == AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE);
+ STATIC_ASSERT(AAUDIO_USAGE_NOTIFICATION_EVENT == AUDIO_USAGE_NOTIFICATION_EVENT);
+ STATIC_ASSERT(AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY == AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY);
+ STATIC_ASSERT(AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE
+ == AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE);
+ STATIC_ASSERT(AAUDIO_USAGE_ASSISTANCE_SONIFICATION == AUDIO_USAGE_ASSISTANCE_SONIFICATION);
+ STATIC_ASSERT(AAUDIO_USAGE_GAME == AUDIO_USAGE_GAME);
+ STATIC_ASSERT(AAUDIO_USAGE_ASSISTANT == AUDIO_USAGE_ASSISTANT);
+ if (usage == AAUDIO_UNSPECIFIED) {
+ usage = AAUDIO_USAGE_MEDIA;
+ }
+ return (audio_usage_t) usage; // same value
+}
+
+audio_content_type_t AAudioConvert_contentTypeToInternal(aaudio_content_type_t contentType) {
+ // The public aaudio_content_type_t constants are supposed to have the same
+ // values as the internal audio_content_type_t values.
+ STATIC_ASSERT(AAUDIO_CONTENT_TYPE_MUSIC == AUDIO_CONTENT_TYPE_MUSIC);
+ STATIC_ASSERT(AAUDIO_CONTENT_TYPE_SPEECH == AUDIO_CONTENT_TYPE_SPEECH);
+ STATIC_ASSERT(AAUDIO_CONTENT_TYPE_SONIFICATION == AUDIO_CONTENT_TYPE_SONIFICATION);
+ STATIC_ASSERT(AAUDIO_CONTENT_TYPE_MOVIE == AUDIO_CONTENT_TYPE_MOVIE);
+ if (contentType == AAUDIO_UNSPECIFIED) {
+ contentType = AAUDIO_CONTENT_TYPE_MUSIC;
+ }
+ return (audio_content_type_t) contentType; // same value
+}
+
+audio_source_t AAudioConvert_inputPresetToAudioSource(aaudio_input_preset_t preset) {
+ // The public aaudio_input_preset_t constants are supposed to have the same
+ // values as the internal audio_source_t values.
+ STATIC_ASSERT(AAUDIO_UNSPECIFIED == AUDIO_SOURCE_DEFAULT);
+ STATIC_ASSERT(AAUDIO_INPUT_PRESET_GENERIC == AUDIO_SOURCE_MIC);
+ STATIC_ASSERT(AAUDIO_INPUT_PRESET_CAMCORDER == AUDIO_SOURCE_CAMCORDER);
+ STATIC_ASSERT(AAUDIO_INPUT_PRESET_VOICE_RECOGNITION == AUDIO_SOURCE_VOICE_RECOGNITION);
+ STATIC_ASSERT(AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION == AUDIO_SOURCE_VOICE_COMMUNICATION);
+ STATIC_ASSERT(AAUDIO_INPUT_PRESET_UNPROCESSED == AUDIO_SOURCE_UNPROCESSED);
+ if (preset == AAUDIO_UNSPECIFIED) {
+ preset = AAUDIO_INPUT_PRESET_GENERIC;
+ }
+ return (audio_source_t) preset; // same value
+}
+
int32_t AAudioConvert_framesToBytes(int32_t numFrames,
int32_t bytesPerFrame,
int32_t *sizeInBytes) {
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index 3afa976..f2347f5 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -167,6 +167,29 @@
aaudio_format_t AAudioConvert_androidToAAudioDataFormat(audio_format_t format);
+
+/**
+ * Note that this function does not validate the passed in value.
+ * That is done somewhere else.
+ * @return internal value
+ */
+
+audio_usage_t AAudioConvert_usageToInternal(aaudio_usage_t usage);
+
+/**
+ * Note that this function does not validate the passed in value.
+ * That is done somewhere else.
+ * @return internal value
+ */
+audio_content_type_t AAudioConvert_contentTypeToInternal(aaudio_content_type_t contentType);
+
+/**
+ * Note that this function does not validate the passed in value.
+ * That is done somewhere else.
+ * @return internal audio source
+ */
+audio_source_t AAudioConvert_inputPresetToAudioSource(aaudio_input_preset_t preset);
+
/**
* @return the size of a sample of the given format in bytes or AAUDIO_ERROR_ILLEGAL_ARGUMENT
*/
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 9f80695..33718fc 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -124,3 +124,15 @@
"libutils",
],
}
+
+cc_test {
+ name: "test_attributes",
+ defaults: ["libaaudio_tests_defaults"],
+ srcs: ["test_attributes.cpp"],
+ shared_libs: [
+ "libaaudio",
+ "libbinder",
+ "libcutils",
+ "libutils",
+ ],
+}
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
new file mode 100644
index 0000000..9cbf113
--- /dev/null
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Test AAudio attributes such as Usage, ContentType and InputPreset.
+
+#include <stdio.h>
+#include <unistd.h>
+
+#include <aaudio/AAudio.h>
+#include <gtest/gtest.h>
+
+constexpr int64_t kNanosPerSecond = 1000000000;
+constexpr int kNumFrames = 256;
+constexpr int kChannelCount = 2;
+
+constexpr int32_t DONT_SET = -1000;
+
+static void checkAttributes(aaudio_performance_mode_t perfMode,
+ aaudio_usage_t usage,
+ aaudio_content_type_t contentType,
+ aaudio_input_preset_t preset = DONT_SET,
+ aaudio_direction_t direction = AAUDIO_DIRECTION_OUTPUT) {
+
+ float *buffer = new float[kNumFrames * kChannelCount];
+
+ AAudioStreamBuilder *aaudioBuilder = nullptr;
+ AAudioStream *aaudioStream = nullptr;
+
+ // Use an AAudioStreamBuilder to contain requested parameters.
+ ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+ // Request stream properties.
+ AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
+ AAudioStreamBuilder_setDirection(aaudioBuilder, direction);
+
+ // Set the attribute in the builder.
+ if (usage != DONT_SET) {
+ AAudioStreamBuilder_setUsage(aaudioBuilder, usage);
+ }
+ if (contentType != DONT_SET) {
+ AAudioStreamBuilder_setContentType(aaudioBuilder, contentType);
+ }
+ if (preset != DONT_SET) {
+ AAudioStreamBuilder_setInputPreset(aaudioBuilder, preset);
+ }
+
+ // Create an AAudioStream using the Builder.
+ ASSERT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+ AAudioStreamBuilder_delete(aaudioBuilder);
+
+ // Make sure we get the same attributes back from the stream.
+ aaudio_usage_t expectedUsage =
+ (usage == DONT_SET || usage == AAUDIO_UNSPECIFIED)
+ ? AAUDIO_USAGE_MEDIA // default
+ : usage;
+ EXPECT_EQ(expectedUsage, AAudioStream_getUsage(aaudioStream));
+
+ aaudio_content_type_t expectedContentType =
+ (contentType == DONT_SET || contentType == AAUDIO_UNSPECIFIED)
+ ? AAUDIO_CONTENT_TYPE_MUSIC // default
+ : contentType;
+ EXPECT_EQ(expectedContentType, AAudioStream_getContentType(aaudioStream));
+
+ aaudio_input_preset_t expectedPreset =
+ (preset == DONT_SET || preset == AAUDIO_UNSPECIFIED)
+ ? AAUDIO_INPUT_PRESET_GENERIC // default
+ : preset;
+ EXPECT_EQ(expectedPreset, AAudioStream_getInputPreset(aaudioStream));
+
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream));
+
+ if (direction == AAUDIO_DIRECTION_INPUT) {
+ EXPECT_EQ(kNumFrames,
+ AAudioStream_read(aaudioStream, buffer, kNumFrames, kNanosPerSecond));
+ } else {
+ EXPECT_EQ(kNumFrames,
+ AAudioStream_write(aaudioStream, buffer, kNumFrames, kNanosPerSecond));
+ }
+
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream));
+
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
+ delete[] buffer;
+}
+
+static const aaudio_usage_t sUsages[] = {
+ DONT_SET,
+ AAUDIO_UNSPECIFIED,
+ AAUDIO_USAGE_MEDIA,
+ AAUDIO_USAGE_VOICE_COMMUNICATION,
+ AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+ AAUDIO_USAGE_ALARM,
+ AAUDIO_USAGE_NOTIFICATION,
+ AAUDIO_USAGE_NOTIFICATION_RINGTONE,
+ AAUDIO_USAGE_NOTIFICATION_EVENT,
+ AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+ AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
+ AAUDIO_USAGE_GAME,
+ AAUDIO_USAGE_ASSISTANT
+};
+
+static const aaudio_content_type_t sContentypes[] = {
+ DONT_SET,
+ AAUDIO_UNSPECIFIED,
+ AAUDIO_CONTENT_TYPE_SPEECH,
+ AAUDIO_CONTENT_TYPE_MUSIC,
+ AAUDIO_CONTENT_TYPE_MOVIE,
+ AAUDIO_CONTENT_TYPE_SONIFICATION
+};
+
+static const aaudio_input_preset_t sInputPresets[] = {
+ DONT_SET,
+ AAUDIO_UNSPECIFIED,
+ AAUDIO_INPUT_PRESET_GENERIC,
+ AAUDIO_INPUT_PRESET_CAMCORDER,
+ AAUDIO_INPUT_PRESET_VOICE_RECOGNITION,
+ AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION,
+ AAUDIO_INPUT_PRESET_UNPROCESSED,
+};
+
+static void checkAttributesUsage(aaudio_performance_mode_t perfMode) {
+ for (aaudio_usage_t usage : sUsages) {
+ checkAttributes(perfMode, usage, DONT_SET);
+ }
+}
+
+static void checkAttributesContentType(aaudio_input_preset_t perfMode) {
+ for (aaudio_content_type_t contentType : sContentypes) {
+ checkAttributes(perfMode, DONT_SET, contentType);
+ }
+}
+
+static void checkAttributesInputPreset(aaudio_performance_mode_t perfMode) {
+ for (aaudio_input_preset_t inputPreset : sInputPresets) {
+ checkAttributes(perfMode,
+ DONT_SET,
+ DONT_SET,
+ inputPreset,
+ AAUDIO_DIRECTION_INPUT);
+ }
+}
+
+TEST(test_attributes, aaudio_usage_perfnone) {
+ checkAttributesUsage(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
+TEST(test_attributes, aaudio_content_type_perfnone) {
+ checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
+TEST(test_attributes, aaudio_input_preset_perfnone) {
+ checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
+TEST(test_attributes, aaudio_usage_lowlat) {
+ checkAttributesUsage(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
+
+TEST(test_attributes, aaudio_content_type_lowlat) {
+ checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
+
+TEST(test_attributes, aaudio_input_preset_lowlat) {
+ checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
diff --git a/media/libmedia/nuplayer2/GenericSource.cpp b/media/libmedia/nuplayer2/GenericSource.cpp
index acd54a1..011691a 100644
--- a/media/libmedia/nuplayer2/GenericSource.cpp
+++ b/media/libmedia/nuplayer2/GenericSource.cpp
@@ -354,6 +354,9 @@
mLooper->unregisterHandler(id());
mLooper->stop();
}
+ if (mDataSource != NULL) {
+ mDataSource->close();
+ }
resetDataSource();
}
diff --git a/media/libmedia/nuplayer2/HTTPLiveSource.cpp b/media/libmedia/nuplayer2/HTTPLiveSource.cpp
index 14b67cad..e0e3df9 100644
--- a/media/libmedia/nuplayer2/HTTPLiveSource.cpp
+++ b/media/libmedia/nuplayer2/HTTPLiveSource.cpp
@@ -103,7 +103,8 @@
if (mLiveLooper == NULL) {
mLiveLooper = new ALooper;
mLiveLooper->setName("http live");
- mLiveLooper->start();
+ mLiveLooper->start(false, /* runOnCallingThread */
+ true /* canCallJava */);
mLiveLooper->registerHandler(this);
}
diff --git a/media/libmediaextractor/Android.bp b/media/libmediaextractor/Android.bp
index c57cd41..dcdb320 100644
--- a/media/libmediaextractor/Android.bp
+++ b/media/libmediaextractor/Android.bp
@@ -1,5 +1,6 @@
cc_library_shared {
name: "libmediaextractor",
+
include_dirs: [
"frameworks/av/include",
"frameworks/av/media/libmediaextractor/include",
@@ -14,7 +15,6 @@
],
shared_libs: [
- "libmediametrics",
"libstagefright_foundation",
"libutils",
"libcutils",
diff --git a/media/libmediaextractor/MediaExtractor.cpp b/media/libmediaextractor/MediaExtractor.cpp
index 6ba7c0e..2241567 100644
--- a/media/libmediaextractor/MediaExtractor.cpp
+++ b/media/libmediaextractor/MediaExtractor.cpp
@@ -19,68 +19,26 @@
#include <utils/Log.h>
#include <pwd.h>
-#include <media/MediaAnalyticsItem.h>
#include <media/MediaExtractor.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MetaData.h>
namespace android {
-// key for media statistics
-static const char *kKeyExtractor = "extractor";
-
MediaExtractor::MediaExtractor() {
if (!LOG_NDEBUG) {
uid_t uid = getuid();
struct passwd *pw = getpwuid(uid);
ALOGV("extractor created in uid: %d (%s)", getuid(), pw->pw_name);
}
-
- mAnalyticsItem = NULL;
- if (MEDIA_LOG) {
- mAnalyticsItem = new MediaAnalyticsItem(kKeyExtractor);
- (void) mAnalyticsItem->generateSessionID();
- }
}
-MediaExtractor::~MediaExtractor() {
-
- // log the current record, provided it has some information worth recording
- if (MEDIA_LOG) {
- if (mAnalyticsItem != NULL) {
- if (mAnalyticsItem->count() > 0) {
- mAnalyticsItem->setFinalized(true);
- mAnalyticsItem->selfrecord();
- }
- }
- }
- if (mAnalyticsItem != NULL) {
- delete mAnalyticsItem;
- mAnalyticsItem = NULL;
- }
-}
+MediaExtractor::~MediaExtractor() {}
sp<MetaData> MediaExtractor::getMetaData() {
return new MetaData;
}
-status_t MediaExtractor::getMetrics(Parcel *reply) {
-
- if (mAnalyticsItem == NULL || reply == NULL) {
- return UNKNOWN_ERROR;
- }
-
- populateMetrics();
- mAnalyticsItem->writeToParcel(reply);
-
- return OK;
-}
-
-void MediaExtractor::populateMetrics() {
- ALOGV("MediaExtractor::populateMetrics");
- // normally overridden in subclasses
-}
-
uint32_t MediaExtractor::flags() const {
return CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_PAUSE | CAN_SEEK;
}
diff --git a/media/libmediaextractor/include/media/MediaExtractor.h b/media/libmediaextractor/include/media/MediaExtractor.h
index 2dcced3..f197b5e 100644
--- a/media/libmediaextractor/include/media/MediaExtractor.h
+++ b/media/libmediaextractor/include/media/MediaExtractor.h
@@ -24,14 +24,10 @@
#include <utils/Errors.h>
#include <utils/RefBase.h>
-// still doing some on/off toggling here.
-#define MEDIA_LOG 1
-
namespace android {
class DataSource;
class IMediaSource;
-class MediaAnalyticsItem;
class MediaExtractorFactory;
class MetaData;
class Parcel;
@@ -55,8 +51,6 @@
// returns an empty metadata object.
virtual sp<MetaData> getMetaData();
- status_t getMetrics(Parcel *reply);
-
enum Flags {
CAN_SEEK_BACKWARD = 1, // the "seek 10secs back button"
CAN_SEEK_FORWARD = 2, // the "seek 10secs forward button"
@@ -123,14 +117,9 @@
MediaExtractor();
virtual ~MediaExtractor();
- MediaAnalyticsItem *mAnalyticsItem;
-
- virtual void populateMetrics();
-
private:
MediaExtractor(const MediaExtractor &);
MediaExtractor &operator=(const MediaExtractor &);
- friend class MediaExtractorFactory;
};
// purposely not defined anywhere so that this will fail to link if
diff --git a/media/libnblog/NBLog.cpp b/media/libnblog/NBLog.cpp
index c8c7195..d6fa3e3 100644
--- a/media/libnblog/NBLog.cpp
+++ b/media/libnblog/NBLog.cpp
@@ -259,7 +259,8 @@
*(int*) (buffer + sizeof(entry) + sizeof(HistTsEntry)) = author;
// Update lengths
buffer[offsetof(entry, length)] = sizeof(HistTsEntryWithAuthor);
- buffer[sizeof(buffer) + Entry::kPreviousLengthOffset] = sizeof(HistTsEntryWithAuthor);
+ buffer[offsetof(entry, data) + sizeof(HistTsEntryWithAuthor) + offsetof(ending, length)]
+ = sizeof(HistTsEntryWithAuthor);
// Write new buffer into FIFO
dst->write(buffer, sizeof(buffer));
return EntryIterator(mEntry).next();
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 26de4ca..484e310 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -117,9 +117,6 @@
"android.hardware.media.omx@1.0",
"android.hardware.graphics.allocator@2.0",
"android.hardware.graphics.mapper@2.0",
-
- // XXX: hack
- "libstagefright_soft_c2avcdec",
],
static_libs: [
@@ -135,7 +132,6 @@
"libstagefright_id3",
"libFLAC",
- // XXX: hack
"libstagefright_codec2_vndk",
],
diff --git a/media/libstagefright/CCodec.cpp b/media/libstagefright/CCodec.cpp
index 068ca5f..0103abd 100644
--- a/media/libstagefright/CCodec.cpp
+++ b/media/libstagefright/CCodec.cpp
@@ -18,11 +18,10 @@
#define LOG_TAG "CCodec"
#include <utils/Log.h>
-// XXX: HACK
-#include "codecs/avcdec/C2SoftAvcDec.h"
-
#include <thread>
+#include <C2PlatformSupport.h>
+
#include <gui/Surface.h>
#include <media/stagefright/CCodec.h>
@@ -181,8 +180,18 @@
// TODO: use C2ComponentStore to create component
mListener.reset(new CCodecListener(mChannel));
- std::shared_ptr<C2Component> comp(new C2SoftAvcDec(componentName.c_str(), 0));
- comp->setListener_vb(mListener, C2_DONT_BLOCK);
+ std::shared_ptr<C2Component> comp;
+ c2_status_t err = GetCodec2PlatformComponentStore()->createComponent(
+ componentName.c_str(), &comp);
+ if (err != C2_OK) {
+ Mutexed<State>::Locked state(mState);
+ state->mState = RELEASED;
+ state.unlock();
+ mCallback->onError(err, ACTION_CODE_FATAL);
+ state.lock();
+ return;
+ }
+ comp->setListener_vb(mListener, C2_MAY_BLOCK);
{
Mutexed<State>::Locked state(mState);
if (state->mState != ALLOCATING) {
@@ -233,6 +242,26 @@
setSurface(surface);
}
+ // XXX: hack
+ bool audio = mime.startsWithIgnoreCase("audio/");
+ if (encoder) {
+ outputFormat->setString("mime", mime);
+ inputFormat->setString("mime", AStringPrintf("%s/raw", audio ? "audio" : "video"));
+ if (audio) {
+ inputFormat->setInt32("channel-count", 1);
+ inputFormat->setInt32("sample-rate", 44100);
+ outputFormat->setInt32("channel-count", 1);
+ outputFormat->setInt32("sample-rate", 44100);
+ }
+ } else {
+ inputFormat->setString("mime", mime);
+ outputFormat->setString("mime", AStringPrintf("%s/raw", audio ? "audio" : "video"));
+ if (audio) {
+ outputFormat->setInt32("channel-count", 2);
+ outputFormat->setInt32("sample-rate", 44100);
+ }
+ }
+
// TODO
return OK;
diff --git a/media/libstagefright/CCodecBufferChannel.cpp b/media/libstagefright/CCodecBufferChannel.cpp
index 61f3f3c..eea9c78 100644
--- a/media/libstagefright/CCodecBufferChannel.cpp
+++ b/media/libstagefright/CCodecBufferChannel.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright 2016, The Android Open Source Project
+ * Copyright 2017, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -21,6 +21,7 @@
#include <numeric>
#include <thread>
+#include <C2AllocatorGralloc.h>
#include <C2PlatformSupport.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -47,18 +48,11 @@
using namespace hardware::cas::V1_0;
using namespace hardware::cas::native::V1_0;
+namespace {
+
// TODO: get this info from component
const static size_t kMinBufferArraySize = 16;
-void CCodecBufferChannel::OutputBuffers::flush(
- const std::list<std::unique_ptr<C2Work>> &flushedWork) {
- (void) flushedWork;
- // This is no-op by default unless we're in array mode where we need to keep
- // track of the flushed work.
-}
-
-namespace {
-
template <class T>
ssize_t findBufferSlot(
std::vector<T> *buffers,
@@ -76,16 +70,103 @@
return std::distance(buffers->begin(), it);
}
+sp<Codec2Buffer> allocateLinearBuffer(
+ const std::shared_ptr<C2BlockPool> &pool,
+ const sp<AMessage> &format,
+ size_t size,
+ const C2MemoryUsage &usage) {
+ std::shared_ptr<C2LinearBlock> block;
+
+ status_t err = pool->fetchLinearBlock(
+ size,
+ usage,
+ &block);
+ if (err != OK) {
+ return nullptr;
+ }
+
+ return Codec2Buffer::allocate(format, block);
+}
+
class LinearBuffer : public C2Buffer {
public:
explicit LinearBuffer(C2ConstLinearBlock block) : C2Buffer({ block }) {}
};
+class InputBuffersArray : public CCodecBufferChannel::InputBuffers {
+public:
+ InputBuffersArray() = default;
+
+ void add(
+ size_t index,
+ const sp<MediaCodecBuffer> &clientBuffer,
+ const std::shared_ptr<C2Buffer> &compBuffer,
+ bool available) {
+ if (mBufferArray.size() < index) {
+ mBufferArray.resize(index + 1);
+ }
+ mBufferArray[index].clientBuffer = clientBuffer;
+ mBufferArray[index].compBuffer = compBuffer;
+ mBufferArray[index].available = available;
+ }
+
+ bool isArrayMode() final { return true; }
+
+ std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
+ return nullptr;
+ }
+
+ void getArray(Vector<sp<MediaCodecBuffer>> *array) final {
+ array->clear();
+ for (const auto &entry : mBufferArray) {
+ array->push(entry.clientBuffer);
+ }
+ }
+
+ bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
+ for (size_t i = 0; i < mBufferArray.size(); ++i) {
+ if (mBufferArray[i].available) {
+ mBufferArray[i].available = false;
+ *index = i;
+ *buffer = mBufferArray[i].clientBuffer;
+ return true;
+ }
+ }
+ return false;
+ }
+
+ std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
+ for (size_t i = 0; i < mBufferArray.size(); ++i) {
+ if (!mBufferArray[i].available && mBufferArray[i].clientBuffer == buffer) {
+ mBufferArray[i].available = true;
+ return std::move(mBufferArray[i].compBuffer);
+ }
+ }
+ return nullptr;
+ }
+
+ void flush() override {
+ for (size_t i = 0; i < mBufferArray.size(); ++i) {
+ mBufferArray[i].available = true;
+ mBufferArray[i].compBuffer.reset();
+ }
+ }
+
+private:
+ struct Entry {
+ sp<MediaCodecBuffer> clientBuffer;
+ std::shared_ptr<C2Buffer> compBuffer;
+ bool available;
+ };
+
+ std::vector<Entry> mBufferArray;
+};
+
class LinearInputBuffers : public CCodecBufferChannel::InputBuffers {
public:
using CCodecBufferChannel::InputBuffers::InputBuffers;
- virtual bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
+ bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
*buffer = nullptr;
ssize_t ret = findBufferSlot<wp<Codec2Buffer>>(
&mBuffers, kMinBufferArraySize,
@@ -93,25 +174,20 @@
if (ret < 0) {
return false;
}
- std::shared_ptr<C2LinearBlock> block;
-
- status_t err = mAlloc->fetchLinearBlock(
- // TODO: proper max input size
- 65536,
- { 0, C2MemoryUsage::CPU_WRITE },
- &block);
- if (err != OK) {
+ // TODO: proper max input size and usage
+ // TODO: read usage from intf
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ sp<Codec2Buffer> newBuffer = allocateLinearBuffer(mPool, mFormat, 65536, usage);
+ if (newBuffer == nullptr) {
return false;
}
-
- sp<Codec2Buffer> newBuffer = Codec2Buffer::allocate(mFormat, block);
mBuffers[ret] = newBuffer;
*index = ret;
*buffer = newBuffer;
return true;
}
- virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
+ std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
auto it = std::find(mBuffers.begin(), mBuffers.end(), buffer);
if (it == mBuffers.end()) {
return nullptr;
@@ -122,80 +198,358 @@
return std::make_shared<LinearBuffer>(codecBuffer->share());
}
- virtual void flush() override {
+ void flush() override {
+ }
+
+ std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
+ std::unique_ptr<InputBuffersArray> array(new InputBuffersArray);
+ // TODO
+ const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+ for (size_t i = 0; i < size; ++i) {
+ sp<Codec2Buffer> clientBuffer = mBuffers[i].promote();
+ bool available = false;
+ if (clientBuffer == nullptr) {
+ // TODO: proper max input size
+ // TODO: read usage from intf
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ clientBuffer = allocateLinearBuffer(mPool, mFormat, 65536, usage);
+ available = true;
+ }
+ array->add(
+ i,
+ clientBuffer,
+ std::make_shared<LinearBuffer>(clientBuffer->share()),
+ available);
+ }
+ return std::move(array);
}
private:
// Buffers we passed to the client. The index of a buffer matches what
// was passed in BufferCallback::onInputBufferAvailable().
std::vector<wp<Codec2Buffer>> mBuffers;
-
- // Buffer array we passed to the client. This only gets initialized at
- // getInput/OutputBufferArray() and when this is set we can't add more
- // buffers.
- std::vector<sp<Codec2Buffer>> mBufferArray;
};
-class GraphicOutputBuffers : public CCodecBufferChannel::OutputBuffers {
+// TODO: stub
+class GraphicInputBuffers : public CCodecBufferChannel::InputBuffers {
+public:
+ using CCodecBufferChannel::InputBuffers::InputBuffers;
+
+ bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
+ (void)index;
+ (void)buffer;
+ return false;
+ }
+
+ std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
+ (void)buffer;
+ return nullptr;
+ }
+
+ void flush() override {
+ }
+
+ std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
+ return nullptr;
+ }
+};
+
+class OutputBuffersArray : public CCodecBufferChannel::OutputBuffers {
public:
using CCodecBufferChannel::OutputBuffers::OutputBuffers;
- virtual bool registerBuffer(
+ void add(
+ size_t index,
+ const sp<MediaCodecBuffer> &clientBuffer,
+ const std::shared_ptr<C2Buffer> &compBuffer,
+ bool available) {
+ if (mBufferArray.size() < index) {
+ mBufferArray.resize(index + 1);
+ }
+ mBufferArray[index].clientBuffer = clientBuffer;
+ mBufferArray[index].compBuffer = compBuffer;
+ mBufferArray[index].available = available;
+ }
+
+ bool isArrayMode() final { return true; }
+
+ std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() final {
+ return nullptr;
+ }
+
+ bool registerBuffer(
+ const std::shared_ptr<C2Buffer> &buffer,
+ size_t *index,
+ sp<MediaCodecBuffer> *codecBuffer) final {
+ for (size_t i = 0; i < mBufferArray.size(); ++i) {
+ if (mBufferArray[i].available && copy(buffer, mBufferArray[i].clientBuffer)) {
+ *index = i;
+ *codecBuffer = mBufferArray[i].clientBuffer;
+ mBufferArray[i].compBuffer = buffer;
+ mBufferArray[i].available = false;
+ return true;
+ }
+ }
+ return false;
+ }
+
+ bool registerCsd(
+ const C2StreamCsdInfo::output *csd,
+ size_t *index,
+ sp<MediaCodecBuffer> *codecBuffer) final {
+ for (size_t i = 0; i < mBufferArray.size(); ++i) {
+ if (mBufferArray[i].available
+ && mBufferArray[i].clientBuffer->capacity() <= csd->flexCount()) {
+ memcpy(mBufferArray[i].clientBuffer->base(), csd->m.value, csd->flexCount());
+ *index = i;
+ *codecBuffer = mBufferArray[i].clientBuffer;
+ mBufferArray[i].available = false;
+ return true;
+ }
+ }
+ return false;
+ }
+
+ std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) final {
+ for (size_t i = 0; i < mBufferArray.size(); ++i) {
+ if (!mBufferArray[i].available && mBufferArray[i].clientBuffer == buffer) {
+ mBufferArray[i].available = true;
+ return std::move(mBufferArray[i].compBuffer);
+ }
+ }
+ return nullptr;
+ }
+
+ void flush(
+ const std::list<std::unique_ptr<C2Work>> &flushedWork) override {
+ (void) flushedWork;
+ for (size_t i = 0; i < mBufferArray.size(); ++i) {
+ mBufferArray[i].available = true;
+ mBufferArray[i].compBuffer.reset();
+ }
+ }
+
+ virtual bool copy(
+ const std::shared_ptr<C2Buffer> &buffer,
+ const sp<MediaCodecBuffer> &clientBuffer) = 0;
+
+ void getArray(Vector<sp<MediaCodecBuffer>> *array) final {
+ array->clear();
+ for (const auto &entry : mBufferArray) {
+ array->push(entry.clientBuffer);
+ }
+ }
+
+private:
+ struct Entry {
+ sp<MediaCodecBuffer> clientBuffer;
+ std::shared_ptr<C2Buffer> compBuffer;
+ bool available;
+ };
+
+ std::vector<Entry> mBufferArray;
+};
+
+class LinearOutputBuffersArray : public OutputBuffersArray {
+public:
+ using OutputBuffersArray::OutputBuffersArray;
+
+ bool copy(
+ const std::shared_ptr<C2Buffer> &buffer,
+ const sp<MediaCodecBuffer> &clientBuffer) final {
+ if (!buffer) {
+ clientBuffer->setRange(0u, 0u);
+ return true;
+ }
+ C2ReadView view = buffer->data().linearBlocks().front().map().get();
+ if (clientBuffer->capacity() < view.capacity()) {
+ return false;
+ }
+ clientBuffer->setRange(0u, view.capacity());
+ memcpy(clientBuffer->data(), view.data(), view.capacity());
+ return true;
+ }
+};
+
+class GraphicOutputBuffersArray : public OutputBuffersArray {
+public:
+ using OutputBuffersArray::OutputBuffersArray;
+
+ bool copy(
+ const std::shared_ptr<C2Buffer> &buffer,
+ const sp<MediaCodecBuffer> &clientBuffer) final {
+ if (!buffer) {
+ clientBuffer->setRange(0u, 0u);
+ return true;
+ }
+ clientBuffer->setRange(0u, 1u);
+ return true;
+ }
+};
+
+// Flexible in a sense that it does not have fixed array size.
+class FlexOutputBuffers : public CCodecBufferChannel::OutputBuffers {
+public:
+ using CCodecBufferChannel::OutputBuffers::OutputBuffers;
+
+ bool registerBuffer(
const std::shared_ptr<C2Buffer> &buffer,
size_t *index,
sp<MediaCodecBuffer> *codecBuffer) override {
*codecBuffer = nullptr;
ssize_t ret = findBufferSlot<BufferInfo>(
&mBuffers,
- kMinBufferArraySize,
- [] (const auto &elem) { return elem.mClientBuffer.promote() == nullptr; });
+ std::numeric_limits<size_t>::max(),
+ [] (const auto &elem) { return elem.clientBuffer.promote() == nullptr; });
if (ret < 0) {
return false;
}
sp<MediaCodecBuffer> newBuffer = new MediaCodecBuffer(
mFormat,
- buffer == nullptr ? kEmptyBuffer : kDummyBuffer);
+ convert(buffer));
mBuffers[ret] = { newBuffer, buffer };
*index = ret;
*codecBuffer = newBuffer;
return true;
}
- virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
+ bool registerCsd(
+ const C2StreamCsdInfo::output *csd,
+ size_t *index,
+ sp<MediaCodecBuffer> *codecBuffer) final {
+ *codecBuffer = nullptr;
+ ssize_t ret = findBufferSlot<BufferInfo>(
+ &mBuffers,
+ std::numeric_limits<size_t>::max(),
+ [] (const auto &elem) { return elem.clientBuffer.promote() == nullptr; });
+ if (ret < 0) {
+ return false;
+ }
+ sp<MediaCodecBuffer> newBuffer = new MediaCodecBuffer(
+ mFormat,
+ ABuffer::CreateAsCopy(csd->m.value, csd->flexCount()));
+ mBuffers[ret] = { newBuffer, nullptr };
+ *index = ret;
+ *codecBuffer = newBuffer;
+ return true;
+ }
+
+ std::shared_ptr<C2Buffer> releaseBuffer(
+ const sp<MediaCodecBuffer> &buffer) override {
auto it = std::find_if(
mBuffers.begin(), mBuffers.end(),
[buffer] (const auto &elem) {
- return elem.mClientBuffer.promote() == buffer;
+ return elem.clientBuffer.promote() == buffer;
});
if (it == mBuffers.end()) {
return nullptr;
}
- return it->mBufferRef;
+ return std::move(it->bufferRef);
}
-private:
- static const sp<ABuffer> kEmptyBuffer;
- static const sp<ABuffer> kDummyBuffer;
+ void flush(
+ const std::list<std::unique_ptr<C2Work>> &flushedWork) override {
+ (void) flushedWork;
+ // This is no-op by default unless we're in array mode where we need to keep
+ // track of the flushed work.
+ }
+ virtual sp<ABuffer> convert(const std::shared_ptr<C2Buffer> &buffer) = 0;
+
+protected:
struct BufferInfo {
// wp<> of MediaCodecBuffer for MediaCodec.
- wp<MediaCodecBuffer> mClientBuffer;
- // Buffer reference to hold until mClientBuffer is valid.
- std::shared_ptr<C2Buffer> mBufferRef;
+ wp<MediaCodecBuffer> clientBuffer;
+ // Buffer reference to hold until clientBuffer is valid.
+ std::shared_ptr<C2Buffer> bufferRef;
};
// Buffers we passed to the client. The index of a buffer matches what
// was passed in BufferCallback::onInputBufferAvailable().
std::vector<BufferInfo> mBuffers;
};
-const sp<ABuffer> GraphicOutputBuffers::kEmptyBuffer = new ABuffer(nullptr, 0);
-const sp<ABuffer> GraphicOutputBuffers::kDummyBuffer = new ABuffer(nullptr, 1);
+class LinearOutputBuffers : public FlexOutputBuffers {
+public:
+ using FlexOutputBuffers::FlexOutputBuffers;
+
+ virtual sp<ABuffer> convert(const std::shared_ptr<C2Buffer> &buffer) override {
+ if (buffer == nullptr) {
+ return new ABuffer(nullptr, 0);
+ }
+ if (buffer->data().type() != C2BufferData::LINEAR) {
+ // We expect linear output buffers from the component.
+ return nullptr;
+ }
+ if (buffer->data().linearBlocks().size() != 1u) {
+ // We expect one and only one linear block from the component.
+ return nullptr;
+ }
+ C2ReadView view = buffer->data().linearBlocks().front().map().get();
+ if (view.error() != C2_OK) {
+ // Mapping the linear block failed
+ return nullptr;
+ }
+ return new ABuffer(
+ // XXX: the data is supposed to be read-only. We don't have
+ // const equivalent of ABuffer however...
+ const_cast<uint8_t *>(view.data()),
+ view.capacity());
+ }
+
+ std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() override {
+ std::unique_ptr<OutputBuffersArray> array(new LinearOutputBuffersArray);
+
+ const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+ for (size_t i = 0; i < size; ++i) {
+ sp<MediaCodecBuffer> clientBuffer = mBuffers[i].clientBuffer.promote();
+ std::shared_ptr<C2Buffer> compBuffer = mBuffers[i].bufferRef;
+ bool available = false;
+ if (clientBuffer == nullptr) {
+ // TODO: proper max input size
+ clientBuffer = new MediaCodecBuffer(mFormat, new ABuffer(65536));
+ available = true;
+ compBuffer.reset();
+ }
+ array->add(i, clientBuffer, compBuffer, available);
+ }
+ return std::move(array);
+ }
+};
+
+class GraphicOutputBuffers : public FlexOutputBuffers {
+public:
+ using FlexOutputBuffers::FlexOutputBuffers;
+
+ sp<ABuffer> convert(const std::shared_ptr<C2Buffer> &buffer) override {
+ return buffer ? new ABuffer(nullptr, 1) : new ABuffer(nullptr, 0);
+ }
+
+ std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() override {
+ std::unique_ptr<OutputBuffersArray> array(new GraphicOutputBuffersArray);
+
+ const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+ for (size_t i = 0; i < size; ++i) {
+ sp<MediaCodecBuffer> clientBuffer = mBuffers[i].clientBuffer.promote();
+ std::shared_ptr<C2Buffer> compBuffer = mBuffers[i].bufferRef;
+ bool available = false;
+ if (clientBuffer == nullptr) {
+ clientBuffer = new MediaCodecBuffer(mFormat, new ABuffer(nullptr, 1));
+ available = true;
+ compBuffer.reset();
+ }
+ array->add(i, clientBuffer, compBuffer, available);
+ }
+ return std::move(array);
+ }
+};
} // namespace
CCodecBufferChannel::QueueGuard::QueueGuard(
CCodecBufferChannel::QueueSync &sync) : mSync(sync) {
std::unique_lock<std::mutex> l(mSync.mMutex);
+ // At this point it's guaranteed that mSync is not under state transition,
+ // as we are holding its mutex.
if (mSync.mCount == -1) {
mRunning = false;
} else {
@@ -206,6 +560,8 @@
CCodecBufferChannel::QueueGuard::~QueueGuard() {
if (mRunning) {
+ // We are not holding mutex at this point so that QueueSync::stop() can
+ // keep holding the lock until mCount reaches zero.
--mSync.mCount;
}
}
@@ -214,7 +570,7 @@
std::unique_lock<std::mutex> l(mMutex);
// If stopped, it goes to running state; otherwise no-op.
int32_t expected = -1;
- mCount.compare_exchange_strong(expected, 0);
+ (void)mCount.compare_exchange_strong(expected, 0);
}
void CCodecBufferChannel::QueueSync::stop() {
@@ -223,6 +579,11 @@
// no-op
return;
}
+ // Holding mutex here blocks creation of additional QueueGuard objects, so
+ // mCount can only decrement. In other words, threads that acquired the lock
+ // are allowed to finish execution but additional threads trying to acquire
+ // the lock at this point will block, and then get QueueGuard at STOPPED
+ // state.
int32_t expected = 0;
while (!mCount.compare_exchange_weak(expected, -1)) {
std::this_thread::yield();
@@ -232,8 +593,6 @@
CCodecBufferChannel::CCodecBufferChannel(
const std::function<void(status_t, enum ActionCode)> &onError)
: mOnError(onError),
- mInputBuffers(new LinearInputBuffers),
- mOutputBuffers(new GraphicOutputBuffers),
mFrameIndex(0u),
mFirstValidFrameIndex(0u) {
}
@@ -246,12 +605,50 @@
void CCodecBufferChannel::setComponent(const std::shared_ptr<C2Component> &component) {
mComponent = component;
- // TODO: get pool ID from params
- std::shared_ptr<C2BlockPool> pool;
- c2_status_t err = GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, component, &pool);
- if (err == C2_OK) {
+ C2StreamFormatConfig::input inputFormat(0u);
+ C2StreamFormatConfig::output outputFormat(0u);
+ c2_status_t err = mComponent->intf()->query_vb(
+ { &inputFormat, &outputFormat },
+ {},
+ C2_DONT_BLOCK,
+ nullptr);
+ if (err != C2_OK) {
+ // TODO: error
+ return;
+ }
+
+ {
Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
- (*buffers)->setAlloc(pool);
+
+ bool graphic = (inputFormat.value == C2FormatVideo);
+ if (graphic) {
+ buffers->reset(new GraphicInputBuffers);
+ } else {
+ buffers->reset(new LinearInputBuffers);
+ }
+
+ ALOGV("graphic = %s", graphic ? "true" : "false");
+ std::shared_ptr<C2BlockPool> pool;
+ err = GetCodec2BlockPool(
+ graphic ? C2BlockPool::BASIC_GRAPHIC : C2BlockPool::BASIC_LINEAR,
+ component,
+ &pool);
+ if (err == C2_OK) {
+ (*buffers)->setPool(pool);
+ } else {
+ // TODO: error
+ }
+ }
+
+ {
+ Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+
+ bool graphic = (outputFormat.value == C2FormatVideo);
+ if (graphic) {
+ buffers->reset(new GraphicOutputBuffers);
+ } else {
+ buffers->reset(new LinearOutputBuffers);
+ }
}
}
@@ -314,17 +711,6 @@
status_t CCodecBufferChannel::renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) {
ALOGV("renderOutputBuffer");
- sp<MediaCodecBuffer> inBuffer;
- size_t index;
- {
- Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
- if (!(*buffers)->requestNewBuffer(&index, &inBuffer)) {
- inBuffer = nullptr;
- }
- }
- if (inBuffer != nullptr) {
- mCallback->onInputBufferAvailable(index, inBuffer);
- }
std::shared_ptr<C2Buffer> c2Buffer;
{
@@ -344,8 +730,9 @@
return UNKNOWN_ERROR;
}
+ native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(blocks.front().handle());
sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(
- blocks.front().handle(),
+ grallocHandle,
GraphicBuffer::CLONE_HANDLE,
blocks.front().width(),
blocks.front().height(),
@@ -355,6 +742,7 @@
(uint64_t)GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
// TODO
blocks.front().width()));
+ native_handle_delete(grallocHandle);
status_t result = (*surface)->attachBuffer(graphicBuffer.get());
if (result != OK) {
@@ -385,85 +773,38 @@
}
status_t CCodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
- ALOGV("discardBuffer");
+ ALOGV("discardBuffer: %p", buffer.get());
{
Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
- (void) (*buffers)->releaseBuffer(buffer);
+ (void)(*buffers)->releaseBuffer(buffer);
}
{
Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
- (void) (*buffers)->releaseBuffer(buffer);
+ (void)(*buffers)->releaseBuffer(buffer);
}
return OK;
}
-#if 0
-void fillBufferArray_l(Mutexed<Buffers>::Locked &buffers) {
- for (size_t i = 0; i < buffers->mClientBuffer.size(); ++i) {
- sp<Codec2Buffer> buffer(buffers->mClientBuffer.get(i).promote());
- if (buffer == nullptr) {
- buffer = allocateBuffer_l(buffers->mAlloc);
- }
- buffers->mBufferArray.push_back(buffer);
- }
- while (buffers->mBufferArray.size() < kMinBufferArraySize) {
- sp<Codec2Buffer> buffer = allocateBuffer_l(buffers->mAlloc);
- // allocate buffer
- buffers->mBufferArray.push_back(buffer);
- }
-}
-#endif
-
void CCodecBufferChannel::getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
- (void) array;
- // TODO
-#if 0
array->clear();
- Mutexed<Buffers>::Locked buffers(mInputBuffers);
+ Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
- if (!buffers->isArrayMode()) {
- // mBufferArray is empty.
- fillBufferArray_l(buffers);
+ if (!(*buffers)->isArrayMode()) {
+ *buffers = (*buffers)->toArrayMode();
}
- for (const auto &buffer : buffers->mBufferArray) {
- array->push_back(buffer);
- }
-#endif
+ (*buffers)->getArray(array);
}
void CCodecBufferChannel::getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
- (void) array;
- // TODO
-#if 0
array->clear();
- Mutexed<Buffers>::Locked buffers(mOutputBuffers);
+ Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
- if (!buffers->isArrayMode()) {
- if (linear) {
- // mBufferArray is empty.
- fillBufferArray_l(buffers);
-
- // We need to replace the allocator so that the component only returns
- // buffer from the array.
- ArrayModeAllocator::Builder builder(buffers->mBufferArray);
- for (size_t i = 0; i < buffers->mClientBuffer.size(); ++i) {
- if (buffers->mClientBuffer.get(i).promote() != nullptr) {
- builder.markUsing(i);
- }
- }
- buffers->mAlloc.reset(builder.build());
- } else {
- for (int i = 0; i < X; ++i) {
- buffers->mBufferArray.push_back(dummy buffer);
- }
- }
+ if (!(*buffers)->isArrayMode()) {
+ *buffers = (*buffers)->toArrayMode();
}
- for (const auto &buffer : buffers->mBufferArray) {
- array->push_back(buffer);
- }
-#endif
+ (*buffers)->getArray(array);
}
void CCodecBufferChannel::start(const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
@@ -513,6 +854,19 @@
void CCodecBufferChannel::onWorkDone(std::vector<std::unique_ptr<C2Work>> workItems) {
for (const auto &work : workItems) {
+ sp<MediaCodecBuffer> inBuffer;
+ size_t index;
+ {
+ Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+ if (!(*buffers)->requestNewBuffer(&index, &inBuffer)) {
+ ALOGW("no new buffer available");
+ inBuffer = nullptr;
+ }
+ }
+ if (inBuffer != nullptr) {
+ mCallback->onInputBufferAvailable(index, inBuffer);
+ }
+
if (work->result != OK) {
ALOGE("work failed to complete: %d", work->result);
mOnError(work->result, ACTION_CODE_FATAL);
@@ -539,7 +893,16 @@
}
const std::shared_ptr<C2Buffer> &buffer = worklet->output.buffers[0];
- // TODO: transfer infos() into buffer metadata
+ const C2StreamCsdInfo::output *csdInfo = nullptr;
+ if (buffer) {
+ // TODO: transfer infos() into buffer metadata
+ }
+ for (const auto &info : worklet->output.infos) {
+ if (info->coreIndex() == C2StreamCsdInfo::output::CORE_INDEX) {
+ ALOGV("csd found");
+ csdInfo = static_cast<const C2StreamCsdInfo::output *>(info.get());
+ }
+ }
int32_t flags = 0;
if (worklet->output.flags & C2BufferPack::FLAG_END_OF_STREAM) {
@@ -547,15 +910,43 @@
ALOGV("output EOS");
}
- size_t index;
sp<MediaCodecBuffer> outBuffer;
- Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
- if (!(*buffers)->registerBuffer(buffer, &index, &outBuffer)) {
- ALOGE("unable to register output buffer");
- mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ if (csdInfo != nullptr) {
+ Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+ if ((*buffers)->registerCsd(csdInfo, &index, &outBuffer)) {
+ outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp);
+ outBuffer->meta()->setInt32("flags", flags | MediaCodec::BUFFER_FLAG_CODECCONFIG);
+ ALOGV("csd index = %zu", index);
+
+ buffers.unlock();
+ mCallback->onOutputBufferAvailable(index, outBuffer);
+ buffers.lock();
+ } else {
+ ALOGE("unable to register output buffer");
+ buffers.unlock();
+ mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ buffers.lock();
+ continue;
+ }
+ }
+
+ if (!buffer && !flags) {
+ ALOGV("Not reporting output buffer");
continue;
}
+ {
+ Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+ if (!(*buffers)->registerBuffer(buffer, &index, &outBuffer)) {
+ ALOGE("unable to register output buffer");
+
+ buffers.unlock();
+ mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ buffers.lock();
+ continue;
+ }
+ }
+
outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp);
outBuffer->meta()->setInt32("flags", flags);
ALOGV("index = %zu", index);
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 6ad11f4..7cfa4ce 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -552,7 +552,7 @@
//static
sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, bool nameIsType) {
static bool ccodecEnabled = property_get_bool("debug.stagefright.ccodec", false);
- if (ccodecEnabled && !nameIsType && name.startsWithIgnoreCase("codec2.")) {
+ if (ccodecEnabled && !nameIsType && name.startsWithIgnoreCase("c2.")) {
return new CCodec;
} else if (nameIsType || name.startsWithIgnoreCase("omx.")) {
// at this time only ACodec specifies a mime type.
diff --git a/media/libstagefright/MediaExtractorFactory.cpp b/media/libstagefright/MediaExtractorFactory.cpp
index e1e04eb..8a90e93 100644
--- a/media/libstagefright/MediaExtractorFactory.cpp
+++ b/media/libstagefright/MediaExtractorFactory.cpp
@@ -37,11 +37,6 @@
namespace android {
-// attrs for media statistics
-static const char *kExtractorMime = "android.media.mediaextractor.mime";
-static const char *kExtractorTracks = "android.media.mediaextractor.ntrk";
-static const char *kExtractorFormat = "android.media.mediaextractor.fmt";
-
// static
sp<IMediaExtractor> MediaExtractorFactory::Create(
const sp<DataSource> &source, const char *mime) {
@@ -133,31 +128,6 @@
mime, confidence);
MediaExtractor *ret = creator(source, meta);
-
- if (ret != NULL) {
- // track the container format (mpeg, aac, wvm, etc)
- if (MEDIA_LOG) {
- if (ret->mAnalyticsItem != NULL) {
- size_t ntracks = ret->countTracks();
- ret->mAnalyticsItem->setCString(kExtractorFormat, ret->name());
- // tracks (size_t)
- ret->mAnalyticsItem->setInt32(kExtractorTracks, ntracks);
- // metadata
- sp<MetaData> pMetaData = ret->getMetaData();
- if (pMetaData != NULL) {
- String8 xx = pMetaData->toString();
- // 'titl' -- but this verges into PII
- // 'mime'
- const char *mime = NULL;
- if (pMetaData->findCString(kKeyMIMEType, &mime)) {
- ret->mAnalyticsItem->setCString(kExtractorMime, mime);
- }
- // what else is interesting and not already available?
- }
- }
- }
- }
-
return ret;
}
diff --git a/media/libstagefright/RemoteMediaExtractor.cpp b/media/libstagefright/RemoteMediaExtractor.cpp
index 1dd7986..2a16e16 100644
--- a/media/libstagefright/RemoteMediaExtractor.cpp
+++ b/media/libstagefright/RemoteMediaExtractor.cpp
@@ -14,16 +14,71 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RemoteMediaExtractor"
+#include <utils/Log.h>
+
#include <media/stagefright/InterfaceUtils.h>
+#include <media/MediaAnalyticsItem.h>
#include <media/MediaSource.h>
#include <media/stagefright/RemoteMediaExtractor.h>
+// still doing some on/off toggling here.
+#define MEDIA_LOG 1
+
namespace android {
-RemoteMediaExtractor::RemoteMediaExtractor(const sp<MediaExtractor> &extractor)
- :mExtractor(extractor) {}
+// key for media statistics
+static const char *kKeyExtractor = "extractor";
-RemoteMediaExtractor::~RemoteMediaExtractor() {}
+// attrs for media statistics
+static const char *kExtractorMime = "android.media.mediaextractor.mime";
+static const char *kExtractorTracks = "android.media.mediaextractor.ntrk";
+static const char *kExtractorFormat = "android.media.mediaextractor.fmt";
+
+RemoteMediaExtractor::RemoteMediaExtractor(const sp<MediaExtractor> &extractor)
+ :mExtractor(extractor) {
+
+ mAnalyticsItem = nullptr;
+ if (MEDIA_LOG) {
+ mAnalyticsItem = new MediaAnalyticsItem(kKeyExtractor);
+ (void) mAnalyticsItem->generateSessionID();
+
+ // track the container format (mpeg, aac, wvm, etc)
+ size_t ntracks = extractor->countTracks();
+ mAnalyticsItem->setCString(kExtractorFormat, extractor->name());
+ // tracks (size_t)
+ mAnalyticsItem->setInt32(kExtractorTracks, ntracks);
+ // metadata
+ sp<MetaData> pMetaData = extractor->getMetaData();
+ if (pMetaData != nullptr) {
+ String8 xx = pMetaData->toString();
+ // 'titl' -- but this verges into PII
+ // 'mime'
+ const char *mime = nullptr;
+ if (pMetaData->findCString(kKeyMIMEType, &mime)) {
+ mAnalyticsItem->setCString(kExtractorMime, mime);
+ }
+ // what else is interesting and not already available?
+ }
+ }
+}
+
+RemoteMediaExtractor::~RemoteMediaExtractor() {
+ // log the current record, provided it has some information worth recording
+ if (MEDIA_LOG) {
+ if (mAnalyticsItem != nullptr) {
+ if (mAnalyticsItem->count() > 0) {
+ mAnalyticsItem->setFinalized(true);
+ mAnalyticsItem->selfrecord();
+ }
+ }
+ }
+ if (mAnalyticsItem != nullptr) {
+ delete mAnalyticsItem;
+ mAnalyticsItem = nullptr;
+ }
+}
size_t RemoteMediaExtractor::countTracks() {
return mExtractor->countTracks();
@@ -43,7 +98,12 @@
}
status_t RemoteMediaExtractor::getMetrics(Parcel *reply) {
- return mExtractor->getMetrics(reply);
+ if (mAnalyticsItem == nullptr || reply == nullptr) {
+ return UNKNOWN_ERROR;
+ }
+
+ mAnalyticsItem->writeToParcel(reply);
+ return OK;
}
uint32_t RemoteMediaExtractor::flags() const {
diff --git a/media/libstagefright/codec2/Android.bp b/media/libstagefright/codec2/Android.bp
index 74609e8..ee5c3eb 100644
--- a/media/libstagefright/codec2/Android.bp
+++ b/media/libstagefright/codec2/Android.bp
@@ -42,27 +42,22 @@
"optional",
],
- srcs: ["SimpleC2Component.cpp"],
+ srcs: [
+ "SimpleC2Component.cpp",
+ "SimpleC2Interface.cpp",
+ ],
include_dirs: [
- "frameworks/av/media/libstagefright/codec2/include",
],
shared_libs: [
- "android.hardware.graphics.allocator@2.0",
- "android.hardware.graphics.mapper@2.0",
- "libhidlbase",
- "libion",
"liblog",
"libstagefright_codec2",
+ "libstagefright_codec2_vndk",
"libstagefright_foundation",
"libutils",
],
- static_libs: [
- "libstagefright_codec2_vndk",
- ],
-
sanitize: {
misc_undefined: [
"unsigned-integer-overflow",
diff --git a/media/libstagefright/codec2/SimpleC2Component.cpp b/media/libstagefright/codec2/SimpleC2Component.cpp
index 0e4a354..4d75a31 100644
--- a/media/libstagefright/codec2/SimpleC2Component.cpp
+++ b/media/libstagefright/codec2/SimpleC2Component.cpp
@@ -18,12 +18,39 @@
#define LOG_TAG "SimpleC2Component"
#include <media/stagefright/foundation/ADebug.h>
-#include <C2PlatformSupport.h>
+#include <inttypes.h>
+#include <C2PlatformSupport.h>
#include <SimpleC2Component.h>
namespace android {
+std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
+ std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
+ mQueue.pop_front();
+ return work;
+}
+
+void SimpleC2Component::WorkQueue::push_back(std::unique_ptr<C2Work> work) {
+ mQueue.push_back({ std::move(work), NO_DRAIN });
+}
+
+bool SimpleC2Component::WorkQueue::empty() const {
+ return mQueue.empty();
+}
+
+void SimpleC2Component::WorkQueue::clear() {
+ mQueue.clear();
+}
+
+uint32_t SimpleC2Component::WorkQueue::drainMode() const {
+ return mQueue.front().drainMode;
+}
+
+void SimpleC2Component::WorkQueue::markDrain(uint32_t drainMode) {
+ mQueue.push_back({ nullptr, drainMode });
+}
+
SimpleC2Component::SimpleC2Component(
const std::shared_ptr<C2ComponentInterface> &intf)
: mIntf(intf) {
@@ -55,7 +82,7 @@
{
Mutexed<WorkQueue>::Locked queue(mWorkQueue);
while (!items->empty()) {
- queue->mQueue.push_back(std::move(items->front()));
+ queue->push_back(std::move(items->front()));
items->pop_front();
}
queue->mCondition.broadcast();
@@ -79,10 +106,12 @@
}
{
Mutexed<WorkQueue>::Locked queue(mWorkQueue);
- ++queue->mGeneration;
- while (!queue->mQueue.empty()) {
- flushedWork->push_back(std::move(queue->mQueue.front()));
- queue->mQueue.pop_front();
+ queue->incGeneration();
+ while (!queue->empty()) {
+ std::unique_ptr<C2Work> work = queue->pop_front();
+ if (work) {
+ flushedWork->push_back(std::move(work));
+ }
}
}
{
@@ -96,8 +125,10 @@
return onFlush_sm();
}
-c2_status_t SimpleC2Component::drain_nb(drain_mode_t drainThrough) {
- (void) drainThrough;
+c2_status_t SimpleC2Component::drain_nb(drain_mode_t drainMode) {
+ if (drainMode == DRAIN_CHAIN) {
+ return C2_OMITTED;
+ }
{
Mutexed<ExecState>::Locked state(mExecState);
if (state->mState != RUNNING) {
@@ -106,14 +137,11 @@
}
{
Mutexed<WorkQueue>::Locked queue(mWorkQueue);
- if (!queue->mQueue.empty()) {
- const std::unique_ptr<C2Work> &work = queue->mQueue.back();
- work->input.flags = (C2BufferPack::flags_t)(work->input.flags | C2BufferPack::FLAG_END_OF_STREAM);
- return C2_OK;
- }
+ queue->markDrain(drainMode);
+ queue->mCondition.broadcast();
}
- return onDrain_nb();
+ return C2_OK;
}
c2_status_t SimpleC2Component::start() {
@@ -161,7 +189,7 @@
}
{
Mutexed<WorkQueue>::Locked queue(mWorkQueue);
- queue->mQueue.clear();
+ queue->clear();
}
{
Mutexed<PendingWork>::Locked pending(mPendingWork);
@@ -181,7 +209,7 @@
}
{
Mutexed<WorkQueue>::Locked queue(mWorkQueue);
- queue->mQueue.clear();
+ queue->clear();
}
{
Mutexed<PendingWork>::Locked pending(mPendingWork);
@@ -192,11 +220,13 @@
}
c2_status_t SimpleC2Component::release() {
+ std::thread releasing;
{
Mutexed<ExecState>::Locked state(mExecState);
- mExitRequested = true;
- state->mThread.join();
+ releasing = std::move(state->mThread);
}
+ mExitRequested = true;
+ releasing.join();
onRelease();
return C2_OK;
}
@@ -221,6 +251,7 @@
{
Mutexed<PendingWork>::Locked pending(mPendingWork);
if (pending->count(frameIndex) == 0) {
+ ALOGW("unknown frame index: %" PRIu64, frameIndex);
return;
}
work = std::move(pending->at(frameIndex));
@@ -230,34 +261,56 @@
fillWork(work);
Mutexed<ExecState>::Locked state(mExecState);
state->mListener->onWorkDone_nb(shared_from_this(), vec(work));
+ ALOGV("returning pending work");
}
}
void SimpleC2Component::processQueue() {
std::unique_ptr<C2Work> work;
uint64_t generation;
+ int32_t drainMode;
{
Mutexed<WorkQueue>::Locked queue(mWorkQueue);
nsecs_t deadline = systemTime() + ms2ns(250);
- while (queue->mQueue.empty()) {
- status_t err = queue.waitForConditionRelative(
- queue->mCondition, std::max(deadline - systemTime(), (nsecs_t)0));
+ while (queue->empty()) {
+ nsecs_t now = systemTime();
+ if (now >= deadline) {
+ return;
+ }
+ status_t err = queue.waitForConditionRelative(queue->mCondition, deadline - now);
if (err == TIMED_OUT) {
return;
}
}
- generation = queue->mGeneration;
- work = std::move(queue->mQueue.front());
- queue->mQueue.pop_front();
- }
- if (!work) {
- return;
+ generation = queue->generation();
+ drainMode = queue->drainMode();
+ work = queue->pop_front();
}
- // TODO: grab pool ID from intf
if (!mOutputBlockPool) {
- c2_status_t err = GetCodec2BlockPool(C2BlockPool::BASIC_GRAPHIC, shared_from_this(), &mOutputBlockPool);
+ c2_status_t err = [this] {
+ // TODO: don't use query_vb
+ C2StreamFormatConfig::output outputFormat(0u);
+ c2_status_t err = intf()->query_vb(
+ { &outputFormat },
+ {},
+ C2_DONT_BLOCK,
+ nullptr);
+ if (err != C2_OK) {
+ return err;
+ }
+ err = GetCodec2BlockPool(
+ (outputFormat.value == C2FormatVideo)
+ ? C2BlockPool::BASIC_GRAPHIC
+ : C2BlockPool::BASIC_LINEAR,
+ shared_from_this(),
+ &mOutputBlockPool);
+ if (err != C2_OK) {
+ return err;
+ }
+ return C2_OK;
+ }();
if (err != C2_OK) {
Mutexed<ExecState>::Locked state(mExecState);
state->mListener->onError_nb(shared_from_this(), err);
@@ -265,10 +318,20 @@
}
}
- bool done = process(work, mOutputBlockPool);
+ if (!work) {
+ c2_status_t err = drain(drainMode, mOutputBlockPool);
+ if (err != C2_OK) {
+ Mutexed<ExecState>::Locked state(mExecState);
+ state->mListener->onError_nb(shared_from_this(), err);
+ }
+ return;
+ }
+
+ process(work, mOutputBlockPool);
{
Mutexed<WorkQueue>::Locked queue(mWorkQueue);
- if (queue->mGeneration != generation) {
+ if (queue->generation() != generation) {
+ ALOGW("work form old generation: was %" PRIu64 " now %" PRIu64, queue->generation(), generation);
work->result = C2_NOT_FOUND;
queue.unlock();
{
@@ -279,10 +342,12 @@
return;
}
}
- if (done) {
+ if (work->worklets_processed != 0u) {
Mutexed<ExecState>::Locked state(mExecState);
+ ALOGV("returning this work");
state->mListener->onWorkDone_nb(shared_from_this(), vec(work));
} else {
+ ALOGV("queue pending work");
std::unique_ptr<C2Work> unexpected;
{
Mutexed<PendingWork>::Locked pending(mPendingWork);
@@ -301,4 +366,45 @@
}
}
+namespace {
+
+class GraphicBuffer : public C2Buffer {
+public:
+ GraphicBuffer(
+ const std::shared_ptr<C2GraphicBlock> &block,
+ const C2Rect &crop)
+ : C2Buffer({ block->share(crop, ::android::C2Fence()) }) {}
+};
+
+
+class LinearBuffer : public C2Buffer {
+public:
+ LinearBuffer(
+ const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size)
+ : C2Buffer({ block->share(offset, size, ::android::C2Fence()) }) {}
+};
+
+} // namespace
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
+ const std::shared_ptr<C2LinearBlock> &block) {
+ return createLinearBuffer(block, block->offset(), block->size());
+}
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
+ const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size) {
+ return std::make_shared<LinearBuffer>(block, offset, size);
+}
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
+ const std::shared_ptr<C2GraphicBlock> &block) {
+ return createGraphicBuffer(block, C2Rect(0, 0, block->width(), block->height()));
+}
+
+std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
+ const std::shared_ptr<C2GraphicBlock> &block,
+ const C2Rect &crop) {
+ return std::make_shared<GraphicBuffer>(block, crop);
+}
+
} // namespace android
diff --git a/media/libstagefright/codec2/SimpleC2Interface.cpp b/media/libstagefright/codec2/SimpleC2Interface.cpp
new file mode 100644
index 0000000..f9cab26
--- /dev/null
+++ b/media/libstagefright/codec2/SimpleC2Interface.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimpleC2Interface"
+#include <utils/Log.h>
+
+#include <SimpleC2Interface.h>
+
+namespace android {
+
+c2_status_t SimpleC2Interface::query_vb(
+ const std::vector<C2Param* const> &stackParams,
+ const std::vector<C2Param::Index> &heapParamIndices,
+ c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+ (void)mayBlock;
+
+ for (C2Param* const param : stackParams) {
+ if (param->coreIndex() != C2StreamFormatConfig::CORE_INDEX
+ || !param->forStream()
+ || param->stream() != 0u) {
+ param->invalidate();
+ continue;
+ }
+ if (param->forInput()) {
+ param->updateFrom(mInputFormat);
+ } else {
+ param->updateFrom(mOutputFormat);
+ }
+ }
+ if (heapParams) {
+ heapParams->clear();
+ for (const auto &index : heapParamIndices) {
+ if (index.coreIndex() != C2StreamFormatConfig::CORE_INDEX
+ || !index.forStream()
+ || index.stream() != 0u) {
+ heapParams->push_back(nullptr);
+ }
+ if (index.forInput()) {
+ heapParams->push_back(C2Param::Copy(mInputFormat));
+ } else {
+ heapParams->push_back(C2Param::Copy(mOutputFormat));
+ }
+ }
+ }
+
+ return C2_OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/include/C2Config.h b/media/libstagefright/codec2/include/C2Config.h
index bbbf338..83cb72c 100644
--- a/media/libstagefright/codec2/include/C2Config.h
+++ b/media/libstagefright/codec2/include/C2Config.h
@@ -66,6 +66,8 @@
kParamIndexMaxVideoSizeHint,
kParamIndexVideoSizeTuning,
+ kParamIndexCsd,
+
// video info
kParamIndexStructStart = 0x1,
@@ -129,6 +131,8 @@
typedef C2PortParam<C2Tuning, C2Uint64Array, kParamIndexBlockPools> C2PortBlockPoolsTuning;
+typedef C2StreamParam<C2Info, C2BlobValue, kParamIndexCsd> C2StreamCsdInfo;
+
/*
Component description fields:
diff --git a/media/libstagefright/codec2/include/SimpleC2Component.h b/media/libstagefright/codec2/include/SimpleC2Component.h
index 48b8382..a4b6ee1 100644
--- a/media/libstagefright/codec2/include/SimpleC2Component.h
+++ b/media/libstagefright/codec2/include/SimpleC2Component.h
@@ -84,23 +84,28 @@
virtual c2_status_t onFlush_sm() = 0;
/**
- * Drain the component.
- */
- virtual c2_status_t onDrain_nb() = 0;
-
- /**
* Process the given work and finish pending work using finish().
*
* \param[in,out] work the work to process
* \param[in] pool the pool to use for allocating output blocks.
- *
- * \retval true |work| is done and ready for return to client
- * \retval false more data is needed for the |work| to be done;
- * mark |work| as pending.
*/
- virtual bool process(
+ virtual void process(
const std::unique_ptr<C2Work> &work,
- std::shared_ptr<C2BlockPool> pool) = 0;
+ const std::shared_ptr<C2BlockPool> &pool) = 0;
+
+ /**
+ * Drain the component and finish pending work using finish().
+ *
+ * \param[in] drainMode mode of drain.
+ * \param[in] pool the pool to use for allocating output blocks.
+ *
+ * \retval C2_OK The component has drained all pending output
+ * work.
+ * \retval C2_OMITTED Unsupported mode (e.g. DRAIN_CHAIN)
+ */
+ virtual c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) = 0;
// for derived classes
/**
@@ -116,6 +121,21 @@
*/
void finish(uint64_t frameIndex, std::function<void(const std::unique_ptr<C2Work> &)> fillWork);
+ std::shared_ptr<C2Buffer> createLinearBuffer(
+ const std::shared_ptr<C2LinearBlock> &block);
+
+ std::shared_ptr<C2Buffer> createLinearBuffer(
+ const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size);
+
+ std::shared_ptr<C2Buffer> createGraphicBuffer(
+ const std::shared_ptr<C2GraphicBlock> &block);
+
+ std::shared_ptr<C2Buffer> createGraphicBuffer(
+ const std::shared_ptr<C2GraphicBlock> &block,
+ const C2Rect &crop);
+
+ static constexpr uint32_t NO_DRAIN = ~0u;
+
private:
const std::shared_ptr<C2ComponentInterface> mIntf;
std::atomic_bool mExitRequested;
@@ -135,10 +155,30 @@
};
Mutexed<ExecState> mExecState;
- struct WorkQueue {
+ class WorkQueue {
+ public:
+ inline WorkQueue() : mGeneration(0ul) {}
+
+ inline uint64_t generation() const { return mGeneration; }
+ inline void incGeneration() { ++mGeneration; }
+
+ std::unique_ptr<C2Work> pop_front();
+ void push_back(std::unique_ptr<C2Work> work);
+ bool empty() const;
+ uint32_t drainMode() const;
+ void markDrain(uint32_t drainMode);
+ void clear();
+
Condition mCondition;
- std::list<std::unique_ptr<C2Work>> mQueue;
+
+ private:
+ struct Entry {
+ std::unique_ptr<C2Work> work;
+ uint32_t drainMode;
+ };
+
uint64_t mGeneration;
+ std::list<Entry> mQueue;
};
Mutexed<WorkQueue> mWorkQueue;
diff --git a/media/libstagefright/codec2/include/SimpleC2Interface.h b/media/libstagefright/codec2/include/SimpleC2Interface.h
new file mode 100644
index 0000000..3796b0b
--- /dev/null
+++ b/media/libstagefright/codec2/include/SimpleC2Interface.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SIMPLE_C2_INTERFACE_H_
+#define SIMPLE_C2_INTERFACE_H_
+
+#include <C2Component.h>
+
+namespace android {
+
+class SimpleC2Interface : public C2ComponentInterface {
+public:
+ class Builder {
+ public:
+ inline Builder(
+ const char *name,
+ c2_node_id_t id)
+ : mIntf(new SimpleC2Interface(name, id)) {}
+
+ inline Builder(
+ const char *name,
+ c2_node_id_t id,
+ std::function<void(::android::SimpleC2Interface*)> deleter)
+ : mIntf(new SimpleC2Interface(name, id), deleter) {}
+
+ inline Builder &inputFormat(C2FormatKind input) {
+ mIntf->mInputFormat.value = input;
+ return *this;
+ }
+
+ inline Builder &outputFormat(C2FormatKind output) {
+ mIntf->mOutputFormat.value = output;
+ return *this;
+ }
+
+ inline std::shared_ptr<SimpleC2Interface> build() {
+ return mIntf;
+ }
+ private:
+ std::shared_ptr<SimpleC2Interface> mIntf;
+ };
+
+ virtual ~SimpleC2Interface() = default;
+
+ // From C2ComponentInterface
+ inline C2String getName() const override { return mName; }
+ inline c2_node_id_t getId() const override { return mId; }
+ c2_status_t query_vb(
+ const std::vector<C2Param* const> &stackParams,
+ const std::vector<C2Param::Index> &heapParamIndices,
+ c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
+ inline c2_status_t config_vb(
+ const std::vector<C2Param* const> &,
+ c2_blocking_t,
+ std::vector<std::unique_ptr<C2SettingResult>>* const) override {
+ return C2_OMITTED;
+ }
+ inline c2_status_t createTunnel_sm(c2_node_id_t) override { return C2_OMITTED; }
+ inline c2_status_t releaseTunnel_sm(c2_node_id_t) override { return C2_OMITTED; }
+ inline c2_status_t querySupportedParams_nb(
+ std::vector<std::shared_ptr<C2ParamDescriptor>> * const) const override {
+ return C2_OMITTED;
+ }
+ c2_status_t querySupportedValues_vb(
+ std::vector<C2FieldSupportedValuesQuery> &,
+ c2_blocking_t) const override {
+ return C2_OMITTED;
+ }
+
+private:
+ inline SimpleC2Interface(const char *name, c2_node_id_t id)
+ : mName(name), mId(id), mInputFormat(0u), mOutputFormat(0u) {}
+
+ const C2String mName;
+ const c2_node_id_t mId;
+ C2StreamFormatConfig::input mInputFormat;
+ C2StreamFormatConfig::output mOutputFormat;
+
+ SimpleC2Interface() = delete;
+};
+
+} // namespace android
+
+#endif // SIMPLE_C2_INTERFACE_H_
diff --git a/media/libstagefright/codec2/tests/Android.bp b/media/libstagefright/codec2/tests/Android.bp
index cf75061..f26fbd0 100644
--- a/media/libstagefright/codec2/tests/Android.bp
+++ b/media/libstagefright/codec2/tests/Android.bp
@@ -42,23 +42,14 @@
],
include_dirs: [
- "frameworks/av/media/libstagefright/codec2/include",
- "frameworks/av/media/libstagefright/codec2/vndk/include",
],
shared_libs: [
- "android.hardware.graphics.allocator@2.0",
- "android.hardware.graphics.mapper@2.0",
"libcutils",
- "libhidlbase",
- "libion",
"liblog",
"libstagefright_codec2",
- "libutils",
- ],
-
- static_libs: [
"libstagefright_codec2_vndk",
+ "libutils",
],
cflags: [
@@ -80,22 +71,15 @@
],
include_dirs: [
- "frameworks/av/media/libstagefright/codec2/include",
- "frameworks/av/media/libstagefright/codec2/vndk/include",
"frameworks/native/include/media/openmax",
],
shared_libs: [
"libcutils",
- "libhidlbase",
- "libion",
"liblog",
"libstagefright_codec2",
- "libutils",
- ],
-
- static_libs: [
"libstagefright_codec2_vndk",
+ "libutils",
],
cflags: [
diff --git a/media/libstagefright/codec2/vndk/Android.bp b/media/libstagefright/codec2/vndk/Android.bp
index fb469d7..cc79dc0 100644
--- a/media/libstagefright/codec2/vndk/Android.bp
+++ b/media/libstagefright/codec2/vndk/Android.bp
@@ -1,4 +1,4 @@
-cc_library_static {
+cc_library {
name: "libstagefright_codec2_vndk",
srcs: [
diff --git a/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp b/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
index b5ca90d..18db3e9 100644
--- a/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
@@ -20,6 +20,7 @@
#include <android/hardware/graphics/allocator/2.0/IAllocator.h>
#include <android/hardware/graphics/mapper/2.0/IMapper.h>
+#include <cutils/native_handle.h>
#include <hardware/gralloc.h>
#include <C2AllocatorGralloc.h>
@@ -50,6 +51,111 @@
return C2_CORRUPTED;
}
+static
+bool native_handle_is_invalid(const native_handle_t *const handle) {
+ // perform basic validation of a native handle
+ if (handle == nullptr) {
+ // null handle is considered valid
+ return false;
+ }
+ return ((size_t)handle->version != sizeof(native_handle_t) ||
+ handle->numFds < 0 ||
+ handle->numInts < 0 ||
+ // for sanity assume handles must occupy less memory than INT_MAX bytes
+ handle->numFds > int((INT_MAX - handle->version) / sizeof(int)) - handle->numInts);
+}
+
+class C2HandleGralloc : public C2Handle {
+private:
+ struct ExtraData {
+ uint32_t width;
+ uint32_t height;
+ uint32_t format;
+ uint32_t usage_lo;
+ uint32_t usage_hi;
+ uint32_t magic;
+ };
+
+ enum {
+ NUM_INTS = sizeof(ExtraData) / sizeof(int),
+ };
+ const static uint32_t MAGIC = '\xc2gr\x00';
+
+ static
+ const ExtraData* getExtraData(const C2Handle *const handle) {
+ if (handle == nullptr
+ || native_handle_is_invalid(handle)
+ || handle->numInts < NUM_INTS) {
+ return nullptr;
+ }
+ return reinterpret_cast<const ExtraData*>(
+ &handle->data[handle->numFds + handle->numInts - NUM_INTS]);
+ }
+
+ static
+ ExtraData *getExtraData(C2Handle *const handle) {
+ return const_cast<ExtraData *>(getExtraData(const_cast<const C2Handle *const>(handle)));
+ }
+
+public:
+ static bool isValid(const C2Handle *const o) {
+ if (o == nullptr) { // null handle is always valid
+ return true;
+ }
+ const ExtraData *xd = getExtraData(o);
+ // we cannot validate width/height/format/usage without accessing gralloc driver
+ return xd != nullptr && xd->magic == MAGIC;
+ }
+
+ static C2HandleGralloc* WrapNativeHandle(
+ const native_handle_t *const handle,
+ uint32_t width, uint32_t height, uint32_t format, uint64_t usage) {
+ //CHECK(handle != nullptr);
+ if (native_handle_is_invalid(handle) ||
+ handle->numInts > int((INT_MAX - handle->version) / sizeof(int)) - NUM_INTS - handle->numFds) {
+ return nullptr;
+ }
+ ExtraData xd = { width, height, format, uint32_t(usage & 0xFFFFFFFF), uint32_t(usage >> 32), MAGIC };
+ native_handle_t *res = native_handle_create(handle->numFds, handle->numInts + NUM_INTS);
+ if (res != nullptr) {
+ memcpy(&res->data, &handle->data, sizeof(int) * (handle->numFds + handle->numInts));
+ *getExtraData(res) = xd;
+ }
+ return reinterpret_cast<C2HandleGralloc *>(res);
+ }
+
+ static native_handle_t* UnwrapNativeHandle(const C2Handle *const handle) {
+ const ExtraData *xd = getExtraData(handle);
+ if (xd == nullptr || xd->magic != MAGIC) {
+ return nullptr;
+ }
+ native_handle_t *res = native_handle_create(handle->numFds, handle->numInts - NUM_INTS);
+ if (res != nullptr) {
+ memcpy(&res->data, &handle->data, sizeof(int) * (res->numFds + res->numInts));
+ }
+ return res;
+ }
+
+ static const C2HandleGralloc* Import(
+ const C2Handle *const handle,
+ uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage) {
+ const ExtraData *xd = getExtraData(handle);
+ if (xd == nullptr) {
+ return nullptr;
+ }
+ *width = xd->width;
+ *height = xd->height;
+ *format = xd->format;
+ *usage = xd->usage_lo | (uint64_t(xd->usage_hi) << 32);
+
+ return reinterpret_cast<const C2HandleGralloc *>(handle);
+ }
+};
+
+native_handle_t* UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
+ return C2HandleGralloc::UnwrapNativeHandle(handle);
+}
+
class C2AllocationGralloc : public C2GraphicAllocation {
public:
virtual ~C2AllocationGralloc() override;
@@ -59,7 +165,7 @@
C2PlanarLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) override;
virtual c2_status_t unmap(C2Fence *fenceFd /* nullable */) override;
virtual bool isValid() const override { return true; }
- virtual const C2Handle *handle() const override { return mHandle; }
+ virtual const C2Handle *handle() const override { return mLockedHandle ? : mHandle; }
virtual bool equals(const std::shared_ptr<const C2GraphicAllocation> &other) const override;
// internal methods
@@ -67,26 +173,31 @@
C2AllocationGralloc(
const IMapper::BufferDescriptorInfo &info,
const sp<IMapper> &mapper,
- hidl_handle &handle);
+ hidl_handle &hidlHandle,
+ const C2HandleGralloc *const handle);
int dup() const;
c2_status_t status() const;
private:
const IMapper::BufferDescriptorInfo mInfo;
const sp<IMapper> mMapper;
- const hidl_handle mHandle;
+ const hidl_handle mHidlHandle;
+ const C2HandleGralloc *mHandle;
buffer_handle_t mBuffer;
+ const C2HandleGralloc *mLockedHandle;
bool mLocked;
};
C2AllocationGralloc::C2AllocationGralloc(
const IMapper::BufferDescriptorInfo &info,
const sp<IMapper> &mapper,
- hidl_handle &handle)
+ hidl_handle &hidlHandle,
+ const C2HandleGralloc *const handle)
: C2GraphicAllocation(info.width, info.height),
mInfo(info),
mMapper(mapper),
- mHandle(std::move(handle)),
+ mHidlHandle(std::move(hidlHandle)),
+ mHandle(handle),
mBuffer(nullptr),
mLocked(false) {}
@@ -117,7 +228,7 @@
c2_status_t err = C2_OK;
if (!mBuffer) {
mMapper->importBuffer(
- mHandle, [&err, this](const auto &maperr, const auto &buffer) {
+ mHidlHandle, [&err, this](const auto &maperr, const auto &buffer) {
err = maperr2error(maperr);
if (err == C2_OK) {
mBuffer = static_cast<buffer_handle_t>(buffer);
@@ -126,6 +237,11 @@
if (err != C2_OK) {
return err;
}
+ if (mBuffer == nullptr) {
+ return C2_CORRUPTED;
+ }
+ mLockedHandle = C2HandleGralloc::WrapNativeHandle(
+ mBuffer, mInfo.width, mInfo.height, (uint32_t)mInfo.format, mInfo.usage);
}
if (mInfo.format == PixelFormat::YCBCR_420_888 || mInfo.format == PixelFormat::YV12) {
@@ -321,17 +437,30 @@
return err;
}
- allocation->reset(new C2AllocationGralloc(info, mMapper, buffer));
+
+ allocation->reset(new C2AllocationGralloc(
+ info, mMapper, buffer,
+ C2HandleGralloc::WrapNativeHandle(
+ buffer.getNativeHandle(),
+ info.width, info.height, (uint32_t)info.format, info.usage)));
return C2_OK;
}
c2_status_t C2AllocatorGralloc::Impl::priorGraphicAllocation(
const C2Handle *handle,
std::shared_ptr<C2GraphicAllocation> *allocation) {
- (void) handle;
+ IMapper::BufferDescriptorInfo info;
+ info.layerCount = 1u;
+ const C2HandleGralloc *grallocHandle = C2HandleGralloc::Import(
+ handle,
+ &info.width, &info.height, (uint32_t *)&info.format, (uint64_t *)&info.usage);
+ if (grallocHandle == nullptr) {
+ return C2_BAD_VALUE;
+ }
- // TODO: need to figure out BufferDescriptorInfo from the handle.
- allocation->reset();
+ hidl_handle hidlHandle = C2HandleGralloc::UnwrapNativeHandle(grallocHandle);
+
+ allocation->reset(new C2AllocationGralloc(info, mMapper, hidlHandle, grallocHandle));
return C2_OMITTED;
}
diff --git a/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp b/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp
index 3a95118..34c68bb 100644
--- a/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/libstagefright/codec2/vndk/C2AllocatorIon.cpp
@@ -27,35 +27,67 @@
namespace android {
+/* size_t <=> int(lo), int(hi) conversions */
+constexpr inline int size2intLo(size_t s) {
+ return int(s & 0xFFFFFFFF);
+}
+
+constexpr inline int size2intHi(size_t s) {
+ // cast to uint64_t as size_t may be 32 bits wide
+ return int((uint64_t(s) >> 32) & 0xFFFFFFFF);
+}
+
+constexpr inline size_t ints2size(int intLo, int intHi) {
+ // convert in 2 stages to 64 bits as intHi may be negative
+ return size_t(unsigned(intLo)) | size_t(uint64_t(unsigned(intHi)) << 32);
+}
+
/* ========================================= ION HANDLE ======================================== */
+/**
+ * ION handle
+ *
+ * There can be only a sole ion client per process, this is captured in the ion fd that is passed
+ * to the constructor, but this should be managed by the ion buffer allocator/mapper.
+ *
+ * ion uses ion_user_handle_t for buffers. We don't store this in the native handle as
+ * it requires an ion_free to decref. Instead, we share the buffer to get an fd that also holds
+ * a refcount.
+ *
+ * This handle will not capture mapped fd-s as updating that would require a global mutex.
+ */
+
struct C2HandleIon : public C2Handle {
- C2HandleIon(int ionFd, ion_user_handle_t buffer) : C2Handle(cHeader),
- mFds{ ionFd, buffer },
- mInts{ kMagic } { }
+ // ion handle owns ionFd(!) and bufferFd
+ C2HandleIon(int bufferFd, size_t size)
+ : C2Handle(cHeader),
+ mFds{ bufferFd },
+ mInts{ int(size & 0xFFFFFFFF), int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic } { }
static bool isValid(const C2Handle * const o);
- int ionFd() const { return mFds.mIon; }
- ion_user_handle_t buffer() const { return mFds.mBuffer; }
-
- void setBuffer(ion_user_handle_t bufferFd) { mFds.mBuffer = bufferFd; }
+ int bufferFd() const { return mFds.mBuffer; }
+ size_t size() const {
+ return size_t(unsigned(mInts.mSizeLo))
+ | size_t(uint64_t(unsigned(mInts.mSizeHi)) << 32);
+ }
protected:
struct {
- int mIon;
- int mBuffer; // ion_user_handle_t
+ int mBuffer; // shared ion buffer
} mFds;
struct {
+ int mSizeLo; // low 32-bits of size
+ int mSizeHi; // high 32-bits of size
int mMagic;
} mInts;
private:
typedef C2HandleIon _type;
enum {
- kMagic = 'ion1',
+ kMagic = '\xc2io\x00',
numFds = sizeof(mFds) / sizeof(int),
numInts = sizeof(mInts) / sizeof(int),
- version = sizeof(C2Handle) + sizeof(mFds) + sizeof(mInts)
+ version = sizeof(C2Handle)
};
//constexpr static C2Handle cHeader = { version, numFds, numInts, {} };
const static C2Handle cHeader;
@@ -82,6 +114,7 @@
/* ======================================= ION ALLOCATION ====================================== */
class C2AllocationIon : public C2LinearAllocation {
public:
+ /* Interface methods */
virtual c2_status_t map(
size_t offset, size_t size, C2MemoryUsage usage, int *fence,
void **addr /* nonnull */) override;
@@ -94,57 +127,108 @@
// internal methods
C2AllocationIon(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags);
C2AllocationIon(int ionFd, size_t size, int shareFd);
- int dup() const;
+
c2_status_t status() const;
protected:
class Impl;
Impl *mImpl;
+
+ // TODO: we could make this encapsulate shared_ptr and copiable
+ C2_DO_NOT_COPY(C2AllocationIon);
};
class C2AllocationIon::Impl {
-public:
- // NOTE: using constructor here instead of a factory method as we will need the
- // error value and this simplifies the error handling by the wrapper.
- Impl(int ionFd, size_t capacity, size_t align, unsigned heapMask, unsigned flags)
- : mInit(C2_OK),
- mHandle(ionFd, -1),
+private:
+ /**
+ * Constructs an ion allocation.
+ *
+ * \note We always create an ion allocation, even if the allocation or import fails
+ * so that we can capture the error.
+ *
+ * \param ionFd ion client (ownership transferred to created object)
+ * \param capacity size of allocation
+ * \param bufferFd buffer handle (ownership transferred to created object). Must be
+ * invalid if err is not 0.
+ * \param buffer ion buffer user handle (ownership transferred to created object). Must be
+ * invalid if err is not 0.
+ * \param err errno during buffer allocation or import
+ */
+ Impl(int ionFd, size_t capacity, int bufferFd, ion_user_handle_t buffer, int err)
+ : mIonFd(ionFd),
+ mHandle(bufferFd, capacity),
+ mBuffer(buffer),
+ mInit(c2_map_errno<ENOMEM, EACCES, EINVAL>(err)),
mMapFd(-1),
- mCapacity(capacity) {
- ion_user_handle_t buffer = -1;
- int ret = ion_alloc(mHandle.ionFd(), mCapacity, align, heapMask, flags, &buffer);
- if (ret == 0) {
- mHandle.setBuffer(buffer);
- } else {
- mInit = c2_map_errno<ENOMEM, EACCES, EINVAL>(-ret);
+ mMapSize(0) {
+ if (mInit != C2_OK) {
+ // close ionFd now on error
+ if (mIonFd >= 0) {
+ close(mIonFd);
+ mIonFd = -1;
+ }
+ // C2_CHECK(bufferFd < 0);
+ // C2_CHECK(buffer < 0);
}
}
- Impl(int ionFd, size_t capacity, int shareFd)
- : mInit(C2_OK),
- mHandle(ionFd, -1),
- mMapFd(-1),
- mCapacity(capacity) {
- ion_user_handle_t buffer;
- int ret = ion_import(mHandle.ionFd(), shareFd, &buffer);
- switch (-ret) {
- case 0:
- mHandle.setBuffer(buffer);
- break;
- case EBADF: // bad ion handle - should not happen
- case ENOTTY: // bad ion driver
- mInit = C2_CORRUPTED;
- break;
- default:
- mInit = c2_map_errno<ENOMEM, EACCES, EINVAL>(-ret);
- break;
+public:
+ /**
+ * Constructs an ion allocation by importing a shared buffer fd.
+ *
+ * \param ionFd ion client (ownership transferred to created object)
+ * \param capacity size of allocation
+ * \param bufferFd buffer handle (ownership transferred to created object)
+ *
+ * \return created ion allocation (implementation) which may be invalid if the
+ * import failed.
+ */
+ static Impl *Import(int ionFd, size_t capacity, int bufferFd) {
+ ion_user_handle_t buffer = -1;
+ int ret = ion_import(ionFd, bufferFd, &buffer);
+ return new Impl(ionFd, capacity, bufferFd, buffer, ret);
+ }
+
+ /**
+ * Constructs an ion allocation by allocating an ion buffer.
+ *
+ * \param ionFd ion client (ownership transferred to created object)
+ * \param size size of allocation
+ * \param align desired alignment of allocation
+ * \param heapMask mask of heaps considered
+ * \param flags ion allocation flags
+ *
+ * \return created ion allocation (implementation) which may be invalid if the
+ * allocation failed.
+ */
+ static Impl *Alloc(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags) {
+ int bufferFd = -1;
+ ion_user_handle_t buffer = -1;
+ int ret = ion_alloc(ionFd, size, align, heapMask, flags, &buffer);
+ if (ret == 0) {
+ // get buffer fd for native handle constructor
+ ret = ion_share(ionFd, buffer, &bufferFd);
+ if (ret != 0) {
+ ion_free(ionFd, buffer);
+ buffer = -1;
+ }
}
- (void)mCapacity; // TODO
+ return new Impl(ionFd, size, bufferFd, buffer, ret);
}
c2_status_t map(size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd, void **addr) {
(void)fenceFd; // TODO: wait for fence
*addr = nullptr;
+ if (mMapSize > 0) {
+ // TODO: technically we should return DUPLICATE here, but our block views don't
+ // actually unmap, so we end up remapping an ion buffer multiple times.
+ //
+ // return C2_DUPLICATE;
+ }
+ if (size == 0) {
+ return C2_BAD_VALUE;
+ }
+
int prot = PROT_NONE;
int flags = MAP_PRIVATE;
if (usage.consumer & C2MemoryUsage::CPU_READ) {
@@ -161,7 +245,7 @@
c2_status_t err = C2_OK;
if (mMapFd == -1) {
- int ret = ion_map(mHandle.ionFd(), mHandle.buffer(), mapSize, prot,
+ int ret = ion_map(mIonFd, mBuffer, mapSize, prot,
flags, mapOffset, (unsigned char**)&mMapAddr, &mMapFd);
if (ret) {
mMapFd = -1;
@@ -187,6 +271,9 @@
}
c2_status_t unmap(void *addr, size_t size, int *fenceFd) {
+ if (mMapFd < 0 || mMapSize == 0) {
+ return C2_NOT_FOUND;
+ }
if (addr != (uint8_t *)mMapAddr + mMapAlignmentBytes ||
size + mMapAlignmentBytes != mMapSize) {
return C2_BAD_VALUE;
@@ -196,44 +283,43 @@
return c2_map_errno<EINVAL>(errno);
}
if (fenceFd) {
- *fenceFd = -1;
+ *fenceFd = -1; // not using fences
}
+ mMapSize = 0;
return C2_OK;
}
~Impl() {
- if (mMapFd != -1) {
+ if (mMapFd >= 0) {
close(mMapFd);
mMapFd = -1;
}
-
- (void)ion_free(mHandle.ionFd(), mHandle.buffer());
+ if (mInit == C2_OK) {
+ (void)ion_free(mIonFd, mBuffer);
+ }
+ if (mIonFd >= 0) {
+ close(mIonFd);
+ }
+ native_handle_close(&mHandle);
}
c2_status_t status() const {
return mInit;
}
- const C2Handle * handle() const {
+ const C2Handle *handle() const {
return &mHandle;
}
- int dup() const {
- int fd = -1;
- if (mInit != 0 || ion_share(mHandle.ionFd(), mHandle.buffer(), &fd) != 0) {
- fd = -1;
- }
- return fd;
- }
-
private:
- c2_status_t mInit;
+ int mIonFd;
C2HandleIon mHandle;
+ ion_user_handle_t mBuffer;
+ c2_status_t mInit;
int mMapFd; // only one for now
void *mMapAddr;
size_t mMapAlignmentBytes;
size_t mMapSize;
- size_t mCapacity;
};
c2_status_t C2AllocationIon::map(
@@ -268,15 +354,11 @@
C2AllocationIon::C2AllocationIon(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags)
: C2LinearAllocation(size),
- mImpl(new Impl(ionFd, size, align, heapMask, flags)) { }
+ mImpl(Impl::Alloc(ionFd, size, align, heapMask, flags)) { }
C2AllocationIon::C2AllocationIon(int ionFd, size_t size, int shareFd)
: C2LinearAllocation(size),
- mImpl(new Impl(ionFd, size, shareFd)) { }
-
-int C2AllocationIon::dup() const {
- return mImpl->dup();
-}
+ mImpl(Impl::Import(ionFd, size, shareFd)) { }
/* ======================================= ION ALLOCATOR ====================================== */
C2AllocatorIon::C2AllocatorIon() : mInit(C2_OK), mIonFd(ion_open()) {
@@ -328,7 +410,7 @@
#endif
std::shared_ptr<C2AllocationIon> alloc
- = std::make_shared<C2AllocationIon>(mIonFd, capacity, align, heapMask, flags);
+ = std::make_shared<C2AllocationIon>(dup(mIonFd), capacity, align, heapMask, flags);
c2_status_t ret = alloc->status();
if (ret == C2_OK) {
*allocation = alloc;
@@ -350,7 +432,7 @@
// TODO: get capacity and validate it
const C2HandleIon *h = static_cast<const C2HandleIon*>(handle);
std::shared_ptr<C2AllocationIon> alloc
- = std::make_shared<C2AllocationIon>(mIonFd, 0 /* capacity */, h->buffer());
+ = std::make_shared<C2AllocationIon>(dup(mIonFd), h->size(), h->bufferFd());
c2_status_t ret = alloc->status();
if (ret == C2_OK) {
*allocation = alloc;
diff --git a/media/libstagefright/codec2/vndk/C2Store.cpp b/media/libstagefright/codec2/vndk/C2Store.cpp
index 204f895..eb72d17 100644
--- a/media/libstagefright/codec2/vndk/C2Store.cpp
+++ b/media/libstagefright/codec2/vndk/C2Store.cpp
@@ -404,6 +404,7 @@
C2PlatformComponentStore::C2PlatformComponentStore() {
// TODO: move this also into a .so so it can be updated
mComponents.emplace("c2.google.avc.decoder", "libstagefright_soft_c2avcdec.so");
+ mComponents.emplace("c2.google.aac.decoder", "libstagefright_soft_c2aacdec.so");
}
c2_status_t C2PlatformComponentStore::copyBuffer(
diff --git a/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h b/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
index 374b0ed..5311747 100644
--- a/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
@@ -24,12 +24,18 @@
namespace android {
+/**
+ * Unwrap the native handle from a Codec2 handle allocated by C2AllocatorGralloc.
+ *
+ * @param handle a handle allocated by C2AllocatorGralloc. This includes handles returned for a
+ * graphic block allocation handle returned.
+ *
+ * @return a new NON-OWNING native handle that must be deleted using native_handle_delete.
+ */
+native_handle_t*UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle);
+
class C2AllocatorGralloc : public C2Allocator {
public:
- // (usage, capacity) => (align, heapMask, flags)
- typedef std::function<int (C2MemoryUsage, size_t,
- /* => */ size_t*, unsigned*, unsigned*)> usage_mapper_fn;
-
virtual id_t getId() const override;
virtual C2String getName() const override;
diff --git a/media/libstagefright/codecs/aacdec/Android.bp b/media/libstagefright/codecs/aacdec/Android.bp
index 21c00a1..abf3b1c 100644
--- a/media/libstagefright/codecs/aacdec/Android.bp
+++ b/media/libstagefright/codecs/aacdec/Android.bp
@@ -1,4 +1,45 @@
cc_library_shared {
+ name: "libstagefright_soft_c2aacdec",
+// vendor_available: true,
+// vndk: {
+// enabled: true,
+// },
+
+ srcs: [
+ "C2SoftAac.cpp",
+ "DrcPresModeWrap.cpp",
+ ],
+
+ cflags: ["-Werror"],
+
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ "unsigned-integer-overflow",
+ ],
+ cfi: true,
+ diag: {
+ cfi: true,
+ },
+ },
+
+ static_libs: [
+ "libFraunhoferAAC",
+ "libstagefright_codec2_vndk"
+ ],
+
+ shared_libs: [
+ "libcutils",
+ "libion",
+ "liblog",
+ "libstagefright_codec2",
+ "libstagefright_foundation",
+ "libstagefright_simple_c2component",
+ "libutils",
+ ],
+}
+
+cc_library_shared {
name: "libstagefright_soft_aacdec",
vendor_available: true,
vndk: {
diff --git a/media/libstagefright/codecs/aacdec/C2SoftAac.cpp b/media/libstagefright/codecs/aacdec/C2SoftAac.cpp
new file mode 100644
index 0000000..390f36c
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/C2SoftAac.cpp
@@ -0,0 +1,710 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAac"
+#include <utils/Log.h>
+
+#include "C2SoftAac.h"
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MediaErrors.h>
+#include <utils/misc.h>
+
+#include <inttypes.h>
+#include <math.h>
+#include <numeric>
+
+#define FILEREAD_MAX_LAYERS 2
+
+#define DRC_DEFAULT_MOBILE_REF_LEVEL 64 /* 64*-0.25dB = -16 dB below full scale for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_CUT 127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1 /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL (-1) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */
+// names of properties that can be used to override the default DRC settings
+#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level"
+#define PROP_DRC_OVERRIDE_CUT "aac_drc_cut"
+#define PROP_DRC_OVERRIDE_BOOST "aac_drc_boost"
+#define PROP_DRC_OVERRIDE_HEAVY "aac_drc_heavy"
+#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level"
+
+namespace android {
+
+C2SoftAac::C2SoftAac(const char *name, c2_node_id_t id)
+ : SimpleC2Component(
+ SimpleC2Interface::Builder(name, id)
+ .inputFormat(C2FormatCompressed)
+ .outputFormat(C2FormatAudio)
+ .build()),
+ mAACDecoder(NULL),
+ mStreamInfo(NULL),
+ mIsADTS(false),
+ mSignalledError(false),
+ mOutputDelayRingBuffer(NULL) {
+}
+
+C2SoftAac::~C2SoftAac() {
+ onRelease();
+}
+
+c2_status_t C2SoftAac::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftAac::onStop() {
+ drainDecoder();
+ // reset the "configured" state
+ mOutputDelayCompensated = 0;
+ mOutputDelayRingBufferWritePos = 0;
+ mOutputDelayRingBufferReadPos = 0;
+ mOutputDelayRingBufferFilled = 0;
+ mBuffersInfo.clear();
+
+ // To make the codec behave the same before and after a reset, we need to invalidate the
+ // streaminfo struct. This does that:
+ mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
+
+ mSignalledError = false;
+
+ return C2_OK;
+}
+
+void C2SoftAac::onReset() {
+ (void)onStop();
+}
+
+void C2SoftAac::onRelease() {
+ if (mAACDecoder) {
+ aacDecoder_Close(mAACDecoder);
+ mAACDecoder = NULL;
+ }
+ if (mOutputDelayRingBuffer) {
+ delete[] mOutputDelayRingBuffer;
+ mOutputDelayRingBuffer = NULL;
+ }
+}
+
+status_t C2SoftAac::initDecoder() {
+ ALOGV("initDecoder()");
+ status_t status = UNKNOWN_ERROR;
+ mAACDecoder = aacDecoder_Open(TT_MP4_ADIF, /* num layers */ 1);
+ if (mAACDecoder != NULL) {
+ mStreamInfo = aacDecoder_GetStreamInfo(mAACDecoder);
+ if (mStreamInfo != NULL) {
+ status = OK;
+ }
+ }
+
+ mOutputDelayCompensated = 0;
+ mOutputDelayRingBufferSize = 2048 * MAX_CHANNEL_COUNT * kNumDelayBlocksMax;
+ mOutputDelayRingBuffer = new short[mOutputDelayRingBufferSize];
+ mOutputDelayRingBufferWritePos = 0;
+ mOutputDelayRingBufferReadPos = 0;
+ mOutputDelayRingBufferFilled = 0;
+
+ if (mAACDecoder == NULL) {
+ ALOGE("AAC decoder is null. TODO: Can not call aacDecoder_SetParam in the following code");
+ }
+
+ //aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE, 0);
+
+ //init DRC wrapper
+ mDrcWrap.setDecoderHandle(mAACDecoder);
+ mDrcWrap.submitStreamData(mStreamInfo);
+
+ // for streams that contain metadata, use the mobile profile DRC settings unless overridden by platform properties
+ // TODO: change the DRC settings depending on audio output device type (HDMI, loadspeaker, headphone)
+ char value[PROPERTY_VALUE_MAX];
+ // DRC_PRES_MODE_WRAP_DESIRED_TARGET
+ if (property_get(PROP_DRC_OVERRIDE_REF_LEVEL, value, NULL)) {
+ unsigned refLevel = atoi(value);
+ ALOGV("AAC decoder using desired DRC target reference level of %d instead of %d", refLevel,
+ DRC_DEFAULT_MOBILE_REF_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, refLevel);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, DRC_DEFAULT_MOBILE_REF_LEVEL);
+ }
+ // DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR
+ if (property_get(PROP_DRC_OVERRIDE_CUT, value, NULL)) {
+ unsigned cut = atoi(value);
+ ALOGV("AAC decoder using desired DRC attenuation factor of %d instead of %d", cut,
+ DRC_DEFAULT_MOBILE_DRC_CUT);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, cut);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, DRC_DEFAULT_MOBILE_DRC_CUT);
+ }
+ // DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR
+ if (property_get(PROP_DRC_OVERRIDE_BOOST, value, NULL)) {
+ unsigned boost = atoi(value);
+ ALOGV("AAC decoder using desired DRC boost factor of %d instead of %d", boost,
+ DRC_DEFAULT_MOBILE_DRC_BOOST);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, boost);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST);
+ }
+ // DRC_PRES_MODE_WRAP_DESIRED_HEAVY
+ if (property_get(PROP_DRC_OVERRIDE_HEAVY, value, NULL)) {
+ unsigned heavy = atoi(value);
+ ALOGV("AAC decoder using desried DRC heavy compression switch of %d instead of %d", heavy,
+ DRC_DEFAULT_MOBILE_DRC_HEAVY);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, heavy);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, DRC_DEFAULT_MOBILE_DRC_HEAVY);
+ }
+ // DRC_PRES_MODE_WRAP_ENCODER_TARGET
+ if (property_get(PROP_DRC_OVERRIDE_ENC_LEVEL, value, NULL)) {
+ unsigned encoderRefLevel = atoi(value);
+ ALOGV("AAC decoder using encoder-side DRC reference level of %d instead of %d",
+ encoderRefLevel, DRC_DEFAULT_MOBILE_ENC_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, encoderRefLevel);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, DRC_DEFAULT_MOBILE_ENC_LEVEL);
+ }
+
+ // By default, the decoder creates a 5.1 channel downmix signal.
+ // For seven and eight channel input streams, enable 6.1 and 7.1 channel output
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1);
+
+ return status;
+}
+
+bool C2SoftAac::outputDelayRingBufferPutSamples(INT_PCM *samples, int32_t numSamples) {
+ if (numSamples == 0) {
+ return true;
+ }
+ if (outputDelayRingBufferSpaceLeft() < numSamples) {
+ ALOGE("RING BUFFER WOULD OVERFLOW");
+ return false;
+ }
+ if (mOutputDelayRingBufferWritePos + numSamples <= mOutputDelayRingBufferSize
+ && (mOutputDelayRingBufferReadPos <= mOutputDelayRingBufferWritePos
+ || mOutputDelayRingBufferReadPos > mOutputDelayRingBufferWritePos + numSamples)) {
+ // faster memcopy loop without checks, if the preconditions allow this
+ for (int32_t i = 0; i < numSamples; i++) {
+ mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos++] = samples[i];
+ }
+
+ if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+ }
+ } else {
+ ALOGV("slow C2SoftAac::outputDelayRingBufferPutSamples()");
+
+ for (int32_t i = 0; i < numSamples; i++) {
+ mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos] = samples[i];
+ mOutputDelayRingBufferWritePos++;
+ if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+ }
+ }
+ }
+ mOutputDelayRingBufferFilled += numSamples;
+ return true;
+}
+
+int32_t C2SoftAac::outputDelayRingBufferGetSamples(INT_PCM *samples, int32_t numSamples) {
+
+ if (numSamples > mOutputDelayRingBufferFilled) {
+ ALOGE("RING BUFFER WOULD UNDERRUN");
+ return -1;
+ }
+
+ if (mOutputDelayRingBufferReadPos + numSamples <= mOutputDelayRingBufferSize
+ && (mOutputDelayRingBufferWritePos < mOutputDelayRingBufferReadPos
+ || mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferReadPos + numSamples)) {
+ // faster memcopy loop without checks, if the preconditions allow this
+ if (samples != 0) {
+ for (int32_t i = 0; i < numSamples; i++) {
+ samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos++];
+ }
+ } else {
+ mOutputDelayRingBufferReadPos += numSamples;
+ }
+ if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+ }
+ } else {
+ ALOGV("slow C2SoftAac::outputDelayRingBufferGetSamples()");
+
+ for (int32_t i = 0; i < numSamples; i++) {
+ if (samples != 0) {
+ samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos];
+ }
+ mOutputDelayRingBufferReadPos++;
+ if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+ }
+ }
+ }
+ mOutputDelayRingBufferFilled -= numSamples;
+ return numSamples;
+}
+
+int32_t C2SoftAac::outputDelayRingBufferSamplesAvailable() {
+ return mOutputDelayRingBufferFilled;
+}
+
+int32_t C2SoftAac::outputDelayRingBufferSpaceLeft() {
+ return mOutputDelayRingBufferSize - outputDelayRingBufferSamplesAvailable();
+}
+
+void C2SoftAac::drainRingBuffer(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool,
+ bool eos) {
+ while (!mBuffersInfo.empty() && outputDelayRingBufferSamplesAvailable()
+ >= mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ Info &outInfo = mBuffersInfo.front();
+ ALOGV("outInfo.frameIndex = %" PRIu64, outInfo.frameIndex);
+ int samplesize = mStreamInfo->numChannels * sizeof(int16_t);
+
+ int available = outputDelayRingBufferSamplesAvailable();
+ int numFrames = outInfo.decodedSizes.size();
+ int numSamples = numFrames * (mStreamInfo->frameSize * mStreamInfo->numChannels);
+ if (available < numSamples) {
+ if (eos) {
+ numSamples = available;
+ } else {
+ break;
+ }
+ }
+ ALOGV("%d samples available (%d), or %d frames",
+ numSamples, available, numFrames);
+ ALOGV("getting %d from ringbuffer", numSamples);
+
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ // TODO: error handling, proper usage, etc.
+ c2_status_t err = pool->fetchLinearBlock(numSamples * sizeof(int16_t), usage, &block);
+ if (err != C2_OK) {
+ ALOGE("err = %d", err);
+ }
+
+ C2WriteView wView = block->map().get();
+ // TODO
+ INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(wView.data());
+ int32_t ns = outputDelayRingBufferGetSamples(outBuffer, numSamples);
+ if (ns != numSamples) {
+ ALOGE("not a complete frame of samples available");
+ mSignalledError = true;
+ // TODO: notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ auto fillWork = [buffer = createLinearBuffer(block)](const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets_processed = 1u;
+ };
+ if (work && work->input.ordinal.frame_index == outInfo.frameIndex) {
+ fillWork(work);
+ } else {
+ finish(outInfo.frameIndex, fillWork);
+ }
+
+ ALOGV("out timestamp %" PRIu64 " / %u", outInfo.timestamp, block->capacity());
+ mBuffersInfo.pop_front();
+ }
+}
+
+void C2SoftAac::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ work->worklets_processed = 0u;
+ if (mSignalledError) {
+ return;
+ }
+
+ UCHAR* inBuffer[FILEREAD_MAX_LAYERS];
+ UINT inBufferLength[FILEREAD_MAX_LAYERS] = {0};
+ UINT bytesValid[FILEREAD_MAX_LAYERS] = {0};
+
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+ C2ReadView view = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ size_t offset = 0u;
+ size_t size = view.capacity();
+
+ bool eos = (work->input.flags & C2BufferPack::FLAG_END_OF_STREAM) != 0;
+ bool codecConfig = (work->input.flags & C2BufferPack::FLAG_CODEC_CONFIG) != 0;
+
+ //TODO
+#if 0
+ if (mInputBufferCount == 0 && !codecConfig) {
+ ALOGW("first buffer should have FLAG_CODEC_CONFIG set");
+ codecConfig = true;
+ }
+#endif
+ if (codecConfig) {
+ // const_cast because of libAACdec method signature.
+ inBuffer[0] = const_cast<UCHAR *>(view.data() + offset);
+ inBufferLength[0] = size;
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_ConfigRaw(mAACDecoder,
+ inBuffer,
+ inBufferLength);
+
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGE("aacDecoder_ConfigRaw decoderErr = 0x%4.4x", decoderErr);
+ mSignalledError = true;
+ // TODO: error
+ return;
+ }
+
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(nullptr);
+
+ return;
+ }
+
+ Info inInfo;
+ inInfo.frameIndex = work->input.ordinal.frame_index;
+ inInfo.timestamp = work->input.ordinal.timestamp;
+ inInfo.bufferSize = size;
+ inInfo.decodedSizes.clear();
+ while (size > 0u) {
+ ALOGV("size = %zu", size);
+ if (mIsADTS) {
+ size_t adtsHeaderSize = 0;
+ // skip 30 bits, aac_frame_length follows.
+ // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
+
+ const uint8_t *adtsHeader = view.data() + offset;
+
+ bool signalError = false;
+ if (size < 7) {
+ ALOGE("Audio data too short to contain even the ADTS header. "
+ "Got %zu bytes.", size);
+ hexdump(adtsHeader, size);
+ signalError = true;
+ } else {
+ bool protectionAbsent = (adtsHeader[1] & 1);
+
+ unsigned aac_frame_length =
+ ((adtsHeader[3] & 3) << 11)
+ | (adtsHeader[4] << 3)
+ | (adtsHeader[5] >> 5);
+
+ if (size < aac_frame_length) {
+ ALOGE("Not enough audio data for the complete frame. "
+ "Got %zu bytes, frame size according to the ADTS "
+ "header is %u bytes.",
+ size, aac_frame_length);
+ hexdump(adtsHeader, size);
+ signalError = true;
+ } else {
+ adtsHeaderSize = (protectionAbsent ? 7 : 9);
+ if (aac_frame_length < adtsHeaderSize) {
+ signalError = true;
+ } else {
+ // const_cast because of libAACdec method signature.
+ inBuffer[0] = const_cast<UCHAR *>(adtsHeader + adtsHeaderSize);
+ inBufferLength[0] = aac_frame_length - adtsHeaderSize;
+
+ offset += adtsHeaderSize;
+ size -= adtsHeaderSize;
+ }
+ }
+ }
+
+ if (signalError) {
+ mSignalledError = true;
+ // TODO: notify(OMX_EventError, OMX_ErrorStreamCorrupt, ERROR_MALFORMED, NULL);
+ return;
+ }
+ } else {
+ // const_cast because of libAACdec method signature.
+ inBuffer[0] = const_cast<UCHAR *>(view.data() + offset);
+ inBufferLength[0] = size;
+ }
+
+ // Fill and decode
+ bytesValid[0] = inBufferLength[0];
+
+ INT prevSampleRate = mStreamInfo->sampleRate;
+ INT prevNumChannels = mStreamInfo->numChannels;
+
+ aacDecoder_Fill(mAACDecoder,
+ inBuffer,
+ inBufferLength,
+ bytesValid);
+
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
+ size -= inBufferUsedLength;
+ offset += inBufferUsedLength;
+
+ AAC_DECODER_ERROR decoderErr;
+ do {
+ if (outputDelayRingBufferSpaceLeft() <
+ (mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ ALOGV("skipping decode: not enough space left in ringbuffer");
+ break;
+ }
+
+ int numConsumed = mStreamInfo->numTotalBytes;
+ decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ 0 /* flags */);
+
+ numConsumed = mStreamInfo->numTotalBytes - numConsumed;
+
+ if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
+ break;
+ }
+ inInfo.decodedSizes.push_back(numConsumed);
+
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ if (bytesValid[0] != 0) {
+ ALOGE("bytesValid[0] != 0 should never happen");
+ mSignalledError = true;
+ // TODO: notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ size_t numOutBytes =
+ mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
+
+ if (decoderErr == AAC_DEC_OK) {
+ if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ mSignalledError = true;
+ // TODO: notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
+ } else {
+ ALOGW("AAC decoder returned error 0x%4.4x, substituting silence", decoderErr);
+
+ memset(tmpOutBuffer, 0, numOutBytes); // TODO: check for overflow
+
+ if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ mSignalledError = true;
+ // TODO: notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
+
+ // Discard input buffer.
+ size = 0;
+
+ aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
+
+ // After an error, replace bufferSize with the sum of the
+ // decodedSizes to resynchronize the in/out lists.
+ inInfo.decodedSizes.pop_back();
+ inInfo.bufferSize = std::accumulate(
+ inInfo.decodedSizes.begin(), inInfo.decodedSizes.end(), 0);
+
+ // fall through
+ }
+
+ /*
+ * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
+ * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
+ * rate system and the sampling rate in the final output is actually
+ * doubled compared with the core AAC decoder sampling rate.
+ *
+ * Explicit signalling is done by explicitly defining SBR audio object
+ * type in the bitstream. Implicit signalling is done by embedding
+ * SBR content in AAC extension payload specific to SBR, and hence
+ * requires an AAC decoder to perform pre-checks on actual audio frames.
+ *
+ * Thus, we could not say for sure whether a stream is
+ * AAC+/eAAC+ until the first data frame is decoded.
+ */
+ if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) {
+ // TODO:
+#if 0
+ if ((mInputBufferCount > 2) && (mOutputBufferCount <= 1)) {
+ ALOGW("Invalid AAC stream");
+ mSignalledError = true;
+ // TODO: notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return false;
+ }
+#endif
+ }
+ ALOGV("size = %zu", size);
+ } while (decoderErr == AAC_DEC_OK);
+ }
+
+ int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
+
+ mBuffersInfo.push_back(std::move(inInfo));
+
+ if (!eos && mOutputDelayCompensated < outputDelay) {
+ // discard outputDelay at the beginning
+ int32_t toCompensate = outputDelay - mOutputDelayCompensated;
+ int32_t discard = outputDelayRingBufferSamplesAvailable();
+ if (discard > toCompensate) {
+ discard = toCompensate;
+ }
+ int32_t discarded = outputDelayRingBufferGetSamples(0, discard);
+ mOutputDelayCompensated += discarded;
+ return;
+ }
+
+ if (eos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ } else {
+ drainRingBuffer(work, pool, false /* not EOS */);
+ }
+}
+
+c2_status_t C2SoftAac::drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ bool eos = (drainMode == DRAIN_COMPONENT_WITH_EOS);
+
+ drainDecoder();
+ drainRingBuffer(work, pool, eos);
+
+ if (eos) {
+ auto fillEmptyWork = [](const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.emplace_back(nullptr);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets_processed = 1u;
+ };
+ while (mBuffersInfo.size() > 1u) {
+ finish(mBuffersInfo.front().frameIndex, fillEmptyWork);
+ mBuffersInfo.pop_front();
+ }
+ if (work->worklets_processed == 0u) {
+ fillEmptyWork(work);
+ }
+ mBuffersInfo.clear();
+ }
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAac::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
+c2_status_t C2SoftAac::onFlush_sm() {
+ drainDecoder();
+ mBuffersInfo.clear();
+
+ int avail;
+ while ((avail = outputDelayRingBufferSamplesAvailable()) > 0) {
+ if (avail > mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ avail = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ }
+ int32_t ns = outputDelayRingBufferGetSamples(0, avail);
+ if (ns != avail) {
+ ALOGW("not a complete frame of samples available");
+ break;
+ }
+ }
+ mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos;
+
+ return C2_OK;
+}
+
+void C2SoftAac::drainDecoder() {
+ // flush decoder until outputDelay is compensated
+ while (mOutputDelayCompensated > 0) {
+ // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ AACDEC_FLUSH);
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ if (tmpOutBufferSamples > mOutputDelayCompensated) {
+ tmpOutBufferSamples = mOutputDelayCompensated;
+ }
+ outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
+
+ mOutputDelayCompensated -= tmpOutBufferSamples;
+ }
+}
+
+class C2SoftAacDecFactory : public C2ComponentFactory {
+public:
+ virtual c2_status_t createComponent(
+ c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+ std::function<void(::android::C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(new C2SoftAac("aac", id), deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(::android::C2ComponentInterface*)> deleter) override {
+ *interface =
+ SimpleC2Interface::Builder("aac", id, deleter)
+ .inputFormat(C2FormatCompressed)
+ .outputFormat(C2FormatVideo)
+ .build();
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAacDecFactory() override = default;
+};
+
+} // namespace android
+
+extern "C" ::android::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAacDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::android::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/libstagefright/codecs/aacdec/C2SoftAac.h b/media/libstagefright/codecs/aacdec/C2SoftAac.h
new file mode 100644
index 0000000..b877635
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/C2SoftAac.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2_SOFT_AAC_H_
+#define C2_SOFT_AAC_H_
+
+#include <SimpleC2Component.h>
+
+#include <media/stagefright/foundation/ABase.h>
+
+#include "aacdecoder_lib.h"
+#include "DrcPresModeWrap.h"
+
+namespace android {
+
+struct C2SoftAac : public SimpleC2Component {
+ C2SoftAac(const char *name, c2_node_id_t id);
+ virtual ~C2SoftAac();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ enum {
+ kNumDelayBlocksMax = 8,
+ };
+
+ HANDLE_AACDECODER mAACDecoder;
+ CStreamInfo *mStreamInfo;
+ bool mIsADTS;
+ bool mIsFirst;
+ size_t mInputBufferCount;
+ size_t mOutputBufferCount;
+ bool mSignalledError;
+ struct Info {
+ uint64_t frameIndex;
+ size_t bufferSize;
+ uint64_t timestamp;
+ std::vector<int32_t> decodedSizes;
+ };
+ std::list<Info> mBuffersInfo;
+
+ CDrcPresModeWrapper mDrcWrap;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ status_t initDecoder();
+ bool isConfigured() const;
+ void drainDecoder();
+
+ void drainRingBuffer(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool,
+ bool eos);
+ c2_status_t drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work);
+
+// delay compensation
+ bool mEndOfInput;
+ bool mEndOfOutput;
+ int32_t mOutputDelayCompensated;
+ int32_t mOutputDelayRingBufferSize;
+ short *mOutputDelayRingBuffer;
+ int32_t mOutputDelayRingBufferWritePos;
+ int32_t mOutputDelayRingBufferReadPos;
+ int32_t mOutputDelayRingBufferFilled;
+ bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
+ int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
+ int32_t outputDelayRingBufferSamplesAvailable();
+ int32_t outputDelayRingBufferSpaceLeft();
+
+ DISALLOW_EVIL_CONSTRUCTORS(C2SoftAac);
+};
+
+} // namespace android
+
+#endif // C2_SOFT_AAC_H_
diff --git a/media/libstagefright/codecs/avcdec/Android.bp b/media/libstagefright/codecs/avcdec/Android.bp
index 259fb25..04e5dc1 100644
--- a/media/libstagefright/codecs/avcdec/Android.bp
+++ b/media/libstagefright/codecs/avcdec/Android.bp
@@ -46,7 +46,6 @@
static_libs: [
"libavcdec",
- "libstagefright_codec2_vndk",
],
srcs: ["C2SoftAvcDec.cpp"],
@@ -58,21 +57,16 @@
include_dirs: [
"external/libavc/decoder",
"external/libavc/common",
- "frameworks/av/media/libstagefright/codec2/include",
- "frameworks/av/media/libstagefright/codec2/vndk/include",
],
shared_libs: [
- "android.hardware.graphics.allocator@2.0",
- "android.hardware.graphics.mapper@2.0",
- "libhidlbase",
- "libion",
"liblog",
+ "libutils",
"libmedia",
"libstagefright_codec2",
+ "libstagefright_codec2_vndk",
"libstagefright_foundation",
"libstagefright_simple_c2component",
- "libutils",
],
sanitize: {
diff --git a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
index 01d120e..ffe6332 100644
--- a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
+++ b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-//#define LOG_NDEBUG 0
+#define LOG_NDEBUG 0
#define LOG_TAG "C2SoftAvcDec"
#include <utils/Log.h>
@@ -29,6 +29,7 @@
#include "C2SoftAvcDec.h"
#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaDefs.h>
@@ -68,9 +69,9 @@
#define IVDEXT_CMD_CTL_SET_NUM_CORES \
(IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
-
namespace {
+#if 0
using SupportedValuesWithFields = C2SoftAvcDecIntf::SupportedValuesWithFields;
struct ValidateParam {
@@ -201,15 +202,23 @@
private:
const char *mExpected;
};
+#endif
-class GraphicBuffer : public C2Buffer {
-public:
- explicit GraphicBuffer(const std::shared_ptr<C2GraphicBlock> &block)
- : C2Buffer({ block->share(C2Rect(block->width(), block->height()), ::android::C2Fence()) }) {}
-};
+void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ uint32_t flags = 0;
+ if ((work->input.flags & C2BufferPack::FLAG_END_OF_STREAM)) {
+ flags |= C2BufferPack::FLAG_END_OF_STREAM;
+ }
+ work->worklets.front()->output.flags = (C2BufferPack::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.emplace_back(nullptr);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets_processed = 1u;
+}
} // namespace
+#if 0
#define CASE(member) \
case decltype(component->member)::CORE_INDEX: \
return std::unique_ptr<C2StructDescriptor>(new C2StructDescriptor( \
@@ -599,13 +608,18 @@
frameRate.restrictingFields.clear();
frameRate.restrictingFields.insert(fields.begin(), fields.end());
}
+#endif
///////////////////////////////////////////////////////////////////////////////
C2SoftAvcDec::C2SoftAvcDec(
const char *name,
c2_node_id_t id)
- : SimpleC2Component(std::make_shared<C2SoftAvcDecIntf>(name, id)),
+ : SimpleC2Component(
+ SimpleC2Interface::Builder(name, id)
+ .inputFormat(C2FormatCompressed)
+ .outputFormat(C2FormatVideo)
+ .build()),
mCodecCtx(NULL),
mFlushOutBuffer(NULL),
mIvColorFormat(IV_YUV_420P),
@@ -684,11 +698,6 @@
return C2_OK;
}
-c2_status_t C2SoftAvcDec::onDrain_nb() {
- // TODO
- return C2_OK;
-}
-
static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) {
(void) ctxt;
return memalign(alignment, size);
@@ -875,7 +884,6 @@
ALOGE("Error in create: 0x%x",
s_create_op.s_ivd_create_op_t.u4_error_code);
deInitDecoder();
- mCodecCtx = NULL;
return UNKNOWN_ERROR;
}
}
@@ -914,6 +922,7 @@
s_delete_op.s_ivd_delete_op_t.u4_error_code);
return UNKNOWN_ERROR;
}
+ mCodecCtx = NULL;
}
mChangingResolution = false;
@@ -1018,25 +1027,67 @@
return true;
}
-bool C2SoftAvcDec::process(const std::unique_ptr<C2Work> &work, std::shared_ptr<C2BlockPool> pool) {
- bool isInFlush = false;
- bool eos = false;
+c2_status_t C2SoftAvcDec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
+ if (NULL == mCodecCtx) {
+ if (OK != initDecoder()) {
+ ALOGE("Failed to initialize decoder");
+ // TODO: notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+ mSignalledError = true;
+ return C2_CORRUPTED;
+ }
+ }
+ if (mWidth != mStride) {
+ /* Set the run-time (dynamic) parameters */
+ mStride = mWidth;
+ setParams(mStride);
+ }
- bool done = false;
- work->result = C2_OK;
+ if (!mAllocatedBlock) {
+ // TODO: error handling
+ // TODO: format & usage
+ uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ ALOGV("using allocator %u", pool->getAllocatorId());
- const C2ConstLinearBlock &buffer =
- work->input.buffers[0]->data().linearBlocks().front();
- auto fillEmptyWork = [](const std::unique_ptr<C2Work> &work) {
+ (void)pool->fetchGraphicBlock(
+ mWidth, mHeight, format, usage, &mAllocatedBlock);
+ ALOGV("provided (%dx%d) required (%dx%d)",
+ mAllocatedBlock->width(), mAllocatedBlock->height(), mWidth, mHeight);
+ }
+ return C2_OK;
+}
+
+void C2SoftAvcDec::finishWork(uint64_t index, const std::unique_ptr<C2Work> &work) {
+ std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(std::move(mAllocatedBlock));
+ auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
uint32_t flags = 0;
- if ((work->input.flags & C2BufferPack::FLAG_END_OF_STREAM)) {
+ if (work->input.flags & C2BufferPack::FLAG_END_OF_STREAM) {
flags |= C2BufferPack::FLAG_END_OF_STREAM;
+ ALOGV("EOS");
}
work->worklets.front()->output.flags = (C2BufferPack::flags_t)flags;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.emplace_back(nullptr);
+ work->worklets.front()->output.buffers.push_back(buffer);
work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets_processed = 1u;
};
+ if (work && index == work->input.ordinal.frame_index) {
+ fillWork(work);
+ } else {
+ finish(index, fillWork);
+ }
+}
+
+void C2SoftAvcDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ bool eos = false;
+
+ work->result = C2_OK;
+ work->worklets_processed = 0u;
+
+ const C2ConstLinearBlock &buffer =
+ work->input.buffers[0]->data().linearBlocks().front();
if (buffer.capacity() == 0) {
ALOGV("empty input: %llu", (long long)work->input.ordinal.frame_index);
// TODO: result?
@@ -1044,51 +1095,21 @@
if ((work->input.flags & C2BufferPack::FLAG_END_OF_STREAM)) {
eos = true;
}
- done = true;
+ return;
} else if (work->input.flags & C2BufferPack::FLAG_END_OF_STREAM) {
ALOGV("input EOS: %llu", (long long)work->input.ordinal.frame_index);
eos = true;
}
- std::unique_ptr<C2ReadView> deferred;
- std::unique_ptr<C2ReadView> input(new C2ReadView(
- work->input.buffers[0]->data().linearBlocks().front().map().get()));
+ C2ReadView input = work->input.buffers[0]->data().linearBlocks().front().map().get();
uint32_t workIndex = work->input.ordinal.frame_index & 0xFFFFFFFF;
size_t inOffset = 0u;
- while (input || isInFlush) {
+ while (inOffset < input.capacity()) {
if (mSignalledError) {
- return done;
+ break;
}
- if (NULL == mCodecCtx) {
- if (OK != initDecoder()) {
- ALOGE("Failed to initialize decoder");
- // TODO: notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
- mSignalledError = true;
- return done;
- }
- }
- if (mWidth != mStride) {
- /* Set the run-time (dynamic) parameters */
- mStride = mWidth;
- setParams(mStride);
- }
-
- if (isInFlush) {
- ALOGV("flushing");
- }
-
- if (!mAllocatedBlock) {
- // TODO: error handling
- // TODO: format & usage
- uint32_t format = HAL_PIXEL_FORMAT_YV12;
- C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
- ALOGV("using allocator %u", pool->getAllocatorId());
-
- (void)pool->fetchGraphicBlock(
- mWidth, mHeight, format, usage, &mAllocatedBlock);
- ALOGV("provided (%dx%d) required (%dx%d)", mAllocatedBlock->width(), mAllocatedBlock->height(), mWidth, mHeight);
- }
+ (void)ensureDecoderState(pool);
C2GraphicView output = mAllocatedBlock->map().get();
if (output.error() != OK) {
ALOGE("mapped err = %d", output.error());
@@ -1099,11 +1120,11 @@
WORD32 timeDelay, timeTaken;
//size_t sizeY, sizeUV;
- if (!setDecodeArgs(&s_dec_ip, &s_dec_op, input.get(), &output, workIndex, inOffset)) {
+ if (!setDecodeArgs(&s_dec_ip, &s_dec_op, &input, &output, workIndex, inOffset)) {
ALOGE("Decoder arg setup failed");
// TODO: notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
mSignalledError = true;
- return done;
+ break;
}
ALOGV("Decoder arg setup succeeded");
// If input dump is enabled, then write to file
@@ -1116,6 +1137,7 @@
IV_API_CALL_STATUS_T status;
status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+ ALOGV("status = %d, error_code = %d", status, (s_dec_op.u4_error_code & 0xFF));
bool unsupportedResolution =
(IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));
@@ -1125,7 +1147,7 @@
ALOGE("Unsupported resolution : %dx%d", mWidth, mHeight);
// TODO: notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
mSignalledError = true;
- return done;
+ break;
}
bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & 0xFF));
@@ -1133,7 +1155,7 @@
ALOGE("Allocation failure in decoder");
// TODO: notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
mSignalledError = true;
- return done;
+ break;
}
bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
@@ -1146,26 +1168,23 @@
PRINT_TIME("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
s_dec_op.u4_num_bytes_consumed);
- if (input) {
- ALOGV("bytes total=%u", input->capacity());
- }
+ ALOGV("bytes total=%u", input.capacity());
if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
mFlushNeeded = true;
}
- if (1 != s_dec_op.u4_frame_decoded_flag && input) {
+ if (1 != s_dec_op.u4_frame_decoded_flag) {
/* If the input did not contain picture data, return work without
* buffer */
ALOGV("no picture data: %u", workIndex);
fillEmptyWork(work);
- done = true;
}
- // If the decoder is in the changing resolution mode and there is no output present,
- // that means the switching is done and it's ready to reset the decoder and the plugin.
- if (mChangingResolution && !s_dec_op.u4_output_present) {
- ALOGV("changing resolution");
- mChangingResolution = false;
+ if (resChanged) {
+ ALOGV("res changed");
+ if (mFlushNeeded) {
+ drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
+ }
resetDecoder();
resetPlugin();
mStride = mWidth;
@@ -1173,19 +1192,6 @@
continue;
}
- if (resChanged) {
- ALOGV("res changed");
- mChangingResolution = true;
- if (mFlushNeeded) {
- setFlushMode();
- isInFlush = true;
- deferred = std::move(input);
- }
- continue;
- }
-
- // Combine the resolution change and coloraspects change in one PortSettingChange event
- // if necessary.
if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
uint32_t width = s_dec_op.u4_pic_wd;
uint32_t height = s_dec_op.u4_pic_ht;
@@ -1195,73 +1201,76 @@
mWidth = width;
mHeight = height;
}
- } else if (mUpdateColorAspects) {
+ // TODO: continue?
+ }
+
+ if (mUpdateColorAspects) {
//notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
// kDescribeColorAspectsIndex, NULL);
ALOGV("update color aspect");
mUpdateColorAspects = false;
- continue;
}
if (s_dec_op.u4_output_present) {
ALOGV("output_present: %d", s_dec_op.u4_ts);
- auto fillWork = [this](const std::unique_ptr<C2Work> &work) {
- uint32_t flags = 0;
- if (work->input.flags & C2BufferPack::FLAG_END_OF_STREAM) {
- flags |= C2BufferPack::FLAG_END_OF_STREAM;
- ALOGV("EOS");
- }
- work->worklets.front()->output.flags = (C2BufferPack::flags_t)flags;
- work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.emplace_back(
- std::make_shared<GraphicBuffer>(std::move(mAllocatedBlock)));
- work->worklets.front()->output.ordinal = work->input.ordinal;
- };
- if (s_dec_op.u4_ts != workIndex) {
- finish(s_dec_op.u4_ts, fillWork);
- } else {
- fillWork(work);
- done = true;
- }
- } else if (isInFlush) {
- ALOGV("flush complete");
- /* If in flush mode and no output is returned by the codec,
- * then come out of flush mode */
- isInFlush = false;
-
- /* If EOS was recieved on input port and there is no output
- * from the codec, then signal EOS on output port */
- if (eos) {
- // TODO: It's an error if not done.
-
- resetPlugin();
- return done;
- }
-
- input = std::move(deferred);
+ finishWork(s_dec_op.u4_ts, work);
}
- if (input) {
- inOffset += s_dec_op.u4_num_bytes_consumed;
- if (inOffset >= input->capacity()) {
- /* If input EOS is seen and decoder is not in flush mode,
- * set the decoder in flush mode.
- * There can be a case where EOS is sent along with last picture data
- * In that case, only after decoding that input data, decoder has to be
- * put in flush. This case is handled here */
- if (eos && !isInFlush) {
- setFlushMode();
- isInFlush = true;
- }
- if (isInFlush) {
- input.reset();
- } else {
- break;
- }
- }
+ inOffset += s_dec_op.u4_num_bytes_consumed;
+ }
+ if (inOffset >= input.capacity()) {
+ /* If input EOS is seen, drain the decoder.
+ * There can be a case where EOS is sent along with last picture data
+ * In that case, only after decoding that input data, decoder has to be
+ * put in flush. This case is handled here */
+ if (eos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
}
}
- return done;
+}
+
+c2_status_t C2SoftAvcDec::drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+ setFlushMode();
+
+ while (true) {
+ (void)ensureDecoderState(pool);
+ C2GraphicView output = mAllocatedBlock->map().get();
+ if (output.error() != OK) {
+ ALOGE("mapped err = %d", output.error());
+ }
+
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, &output, 0, 0u);
+
+ (void)ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+
+ if (s_dec_op.u4_output_present) {
+ ALOGV("output_present: %d", s_dec_op.u4_ts);
+ finishWork(s_dec_op.u4_ts, work);
+ } else {
+ break;
+ }
+ }
+
+ if (drainMode == DRAIN_COMPONENT_WITH_EOS
+ && work && work->worklets_processed == 0u) {
+ fillEmptyWork(work);
+ }
+
+ return C2_OK;
}
bool C2SoftAvcDec::colorAspectsDiffer(
@@ -1275,6 +1284,12 @@
return false;
}
+c2_status_t C2SoftAvcDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
void C2SoftAvcDec::updateFinalColorAspects(
const ColorAspects &otherAspects, const ColorAspects &preferredAspects) {
Mutex::Autolock autoLock(mColorAspectsLock);
@@ -1323,7 +1338,11 @@
c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
std::function<void(::android::C2ComponentInterface*)> deleter) override {
*interface =
- std::shared_ptr<C2ComponentInterface>(new C2SoftAvcDecIntf("avc", id), deleter);
+ SimpleC2Interface::Builder("avc", id, deleter)
+ .inputFormat(C2FormatCompressed)
+ .outputFormat(C2FormatVideo)
+ .build();
+// std::shared_ptr<C2ComponentInterface>(new C2SoftAvcDecIntf("avc", id), deleter);
return C2_OK;
}
diff --git a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h
index aa22e23..0e8cf77 100644
--- a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h
+++ b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.h
@@ -67,7 +67,7 @@
diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
((end).tv_usec - (start).tv_usec);
-
+#if 0
class C2SoftAvcDecIntf : public C2ComponentInterface {
public:
struct SupportedValuesWithFields {
@@ -139,6 +139,7 @@
void updateSupportedValues();
friend class C2SoftAvcDec;
};
+#endif
class C2SoftAvcDec : public SimpleC2Component {
public:
@@ -146,15 +147,17 @@
virtual ~C2SoftAvcDec();
// From SimpleC2Component
- virtual c2_status_t onInit() override;
- virtual c2_status_t onStop() override;
- virtual void onReset() override;
- virtual void onRelease() override;
- virtual c2_status_t onFlush_sm() override;
- virtual c2_status_t onDrain_nb() override;
- virtual bool process(
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
const std::unique_ptr<C2Work> &work,
- std::shared_ptr<C2BlockPool> pool) override;
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
private:
Mutex mColorAspectsLock;
@@ -216,6 +219,13 @@
status_t resetDecoder();
status_t resetPlugin();
+ c2_status_t ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool);
+ void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work);
+ c2_status_t drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work);
+
bool setDecodeArgs(
ivd_video_decode_ip_t *ps_dec_ip,
ivd_video_decode_op_t *ps_dec_op,
diff --git a/media/libstagefright/codecs/cmds/Android.bp b/media/libstagefright/codecs/cmds/Android.bp
index ad0bd2d..40f1a3d 100644
--- a/media/libstagefright/codecs/cmds/Android.bp
+++ b/media/libstagefright/codecs/cmds/Android.bp
@@ -6,30 +6,21 @@
],
include_dirs: [
- "frameworks/av/media/libstagefright/codec2/include",
- "frameworks/av/media/libstagefright/codec2/vndk/include",
],
shared_libs: [
- "android.hardware.graphics.allocator@2.0",
- "android.hardware.graphics.mapper@2.0",
"libbinder",
"libcutils",
"libgui",
- "libhidlbase",
- "libion",
"liblog",
"libstagefright",
"libstagefright_codec2",
+ "libstagefright_codec2_vndk",
"libstagefright_foundation",
"libui",
"libutils",
],
- static_libs: [
- "libstagefright_codec2_vndk",
- ],
-
cflags: [
"-Werror",
"-Wall",
diff --git a/media/libstagefright/codecs/cmds/codec2.cpp b/media/libstagefright/codecs/cmds/codec2.cpp
index 5a225f1..78fb527 100644
--- a/media/libstagefright/codecs/cmds/codec2.cpp
+++ b/media/libstagefright/codecs/cmds/codec2.cpp
@@ -52,6 +52,7 @@
#include <gui/SurfaceComposerClient.h>
#include <util/C2ParamUtils.h>
+#include <C2AllocatorGralloc.h>
#include <C2Buffer.h>
#include <C2BufferPriv.h>
#include <C2Component.h>
@@ -174,6 +175,7 @@
void SimplePlayer::onWorkDone(
std::weak_ptr<C2Component> component, std::vector<std::unique_ptr<C2Work>> workItems) {
+ ALOGV("SimplePlayer::onWorkDone");
(void) component;
ULock l(mProcessedLock);
for (auto & item : workItems) {
@@ -245,31 +247,36 @@
}
int slot;
sp<Fence> fence;
+ ALOGV("Render: Frame #%" PRId64, work->worklets.front()->output.ordinal.frame_index);
const std::shared_ptr<C2Buffer> &output = work->worklets.front()->output.buffers[0];
- const C2ConstGraphicBlock &block = output->data().graphicBlocks().front();
- sp<GraphicBuffer> buffer(new GraphicBuffer(
- block.handle(),
- GraphicBuffer::CLONE_HANDLE,
- block.width(),
- block.height(),
- HAL_PIXEL_FORMAT_YV12,
- 1,
- (uint64_t)GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
- block.width()));
+ if (output) {
+ const C2ConstGraphicBlock &block = output->data().graphicBlocks().front();
+ native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(block.handle());
+ sp<GraphicBuffer> buffer(new GraphicBuffer(
+ grallocHandle,
+ GraphicBuffer::CLONE_HANDLE,
+ block.width(),
+ block.height(),
+ HAL_PIXEL_FORMAT_YV12,
+ 1,
+ (uint64_t)GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+ block.width()));
+ native_handle_delete(grallocHandle);
- status_t err = igbp->attachBuffer(&slot, buffer);
+ status_t err = igbp->attachBuffer(&slot, buffer);
- IGraphicBufferProducer::QueueBufferInput qbi(
- work->worklets.front()->output.ordinal.timestamp * 1000ll,
- false,
- HAL_DATASPACE_UNKNOWN,
- Rect(block.width(), block.height()),
- NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW,
- 0,
- Fence::NO_FENCE,
- 0);
- IGraphicBufferProducer::QueueBufferOutput qbo;
- err = igbp->queueBuffer(slot, qbi, &qbo);
+ IGraphicBufferProducer::QueueBufferInput qbi(
+ work->worklets.front()->output.ordinal.timestamp * 1000ll,
+ false,
+ HAL_DATASPACE_UNKNOWN,
+ Rect(block.width(), block.height()),
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW,
+ 0,
+ Fence::NO_FENCE,
+ 0);
+ IGraphicBufferProducer::QueueBufferOutput qbo;
+ err = igbp->queueBuffer(slot, qbi, &qbo);
+ }
work->input.buffers.clear();
work->worklets.clear();
@@ -278,6 +285,7 @@
mWorkQueue.push_back(std::move(work));
mQueueCondition.notify_all();
}
+ ALOGV("render loop finished");
});
long numFrames = 0;
@@ -365,11 +373,12 @@
++numFrames;
}
+ ALOGV("main loop finished");
source->stop();
- component->release();
-
running.store(false);
surfaceThread.join();
+
+ component->release();
printf("\n");
}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
index 4c7290d..1ea1c85 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
@@ -69,6 +69,13 @@
codecReturn);
return codecReturn;
}
+ codecReturn = vpx_codec_control(mCodecContext, VP9E_SET_ROW_MT, 1);
+ if (codecReturn != VPX_CODEC_OK) {
+ ALOGE("Error setting VP9E_SET_ROW_MT to 1. vpx_codec_control() "
+ "returned %d", codecReturn);
+ return codecReturn;
+ }
+
// For VP9, we always set CPU_USED to 8 (because the realtime default is 0
// which is too slow).
codecReturn = vpx_codec_control(mCodecContext, VP8E_SET_CPUUSED, 8);
diff --git a/media/libstagefright/codecs/tests/Android.mk b/media/libstagefright/codecs/tests/Android.mk
deleted file mode 100644
index ea188ea..0000000
--- a/media/libstagefright/codecs/tests/Android.mk
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (C) 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
- C2SoftAvcDec_test.cpp \
-
-LOCAL_MODULE_TAGS := optional
-LOCAL_MODULE := c2_google_component_test
-
-LOCAL_SHARED_LIBRARIES := \
- libcutils \
- libstagefright_codec2 \
- libstagefright_foundation \
- libstagefright_soft_c2avcdec \
- liblog \
-
-LOCAL_C_INCLUDES := \
- frameworks/av/media/libstagefright/codec2/include \
- frameworks/av/media/libstagefright/codec2/vndk/include \
- frameworks/av/media/libstagefright/codecs/avcdec \
-
-LOCAL_CFLAGS += -Werror -Wall -std=c++14
-LOCAL_CLANG := true
-
-include $(BUILD_NATIVE_TEST)
diff --git a/media/libstagefright/codecs/tests/C2SoftAvcDec_test.cpp b/media/libstagefright/codecs/tests/C2SoftAvcDec_test.cpp
deleted file mode 100644
index ca26a1d..0000000
--- a/media/libstagefright/codecs/tests/C2SoftAvcDec_test.cpp
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
- * Copyright 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "C2SoftAvcDec_test"
-#include <utils/Log.h>
-
-#include <gtest/gtest.h>
-
-#include <media/stagefright/foundation/MediaDefs.h>
-
-#include "C2SoftAvcDec.h"
-
-namespace android {
-
-namespace {
-
-template <class T>
-std::unique_ptr<T> alloc_unique_cstr(const char *cstr) {
- std::unique_ptr<T> ptr = T::alloc_unique(strlen(cstr) + 1);
- strcpy(ptr->m.value, cstr);
- return ptr;
-}
-
-} // namespace
-
-
-class C2SoftAvcDecTest : public ::testing::Test {
-public:
- C2SoftAvcDecTest() : mIntf(new C2SoftAvcDecIntf("dummy", 0u)) {}
- ~C2SoftAvcDecTest() = default;
-
- template <typename T>
- void testReadOnlyParam(const T *expected, const T *invalid);
-
- template <typename T>
- void testReadOnlyParamOnStack(const T *expected, const T *invalid);
-
- template <typename T>
- void testReadOnlyParamOnHeap(const T *expected, const T *invalid);
-
- template <typename T>
- void testReadOnlyFlexParam(
- const std::unique_ptr<T> &expected, const std::unique_ptr<T> &invalid);
-
-protected:
- std::shared_ptr<C2SoftAvcDecIntf> mIntf;
-};
-
-template <typename T>
-void C2SoftAvcDecTest::testReadOnlyParam(const T *expected, const T *invalid) {
- testReadOnlyParamOnStack(expected, invalid);
- testReadOnlyParamOnHeap(expected, invalid);
-}
-
-template <typename T>
-void C2SoftAvcDecTest::testReadOnlyParamOnStack(const T *expected, const T *invalid) {
- T param;
- ASSERT_EQ(C2_OK, mIntf->query_vb({¶m}, {}, C2_DONT_BLOCK, nullptr));
- ASSERT_EQ(*expected, param);
-
- std::vector<C2Param * const> params{ (C2Param * const)invalid };
- std::vector<std::unique_ptr<C2SettingResult>> failures;
- ASSERT_EQ(C2_BAD_VALUE, mIntf->config_vb(params, C2_DONT_BLOCK, &failures));
-
- // The param must not change after failed config.
- ASSERT_EQ(C2_OK, mIntf->query_vb({¶m}, {}, C2_DONT_BLOCK, nullptr));
- ASSERT_EQ(*expected, param);
-}
-
-template <typename T>
-void C2SoftAvcDecTest::testReadOnlyParamOnHeap(const T *expected, const T *invalid) {
- std::vector<std::unique_ptr<C2Param>> heapParams;
-
- uint32_t index = expected->index();
-
- ASSERT_EQ(C2_OK, mIntf->query_vb({}, {index}, C2_DONT_BLOCK, &heapParams));
- ASSERT_EQ(1u, heapParams.size());
- ASSERT_EQ(*expected, *heapParams[0]);
-
- std::vector<C2Param * const> params{ (C2Param * const)invalid };
- std::vector<std::unique_ptr<C2SettingResult>> failures;
- ASSERT_EQ(C2_BAD_VALUE, mIntf->config_vb(params, C2_DONT_BLOCK, &failures));
-
- // The param must not change after failed config.
- heapParams.clear();
- ASSERT_EQ(C2_OK, mIntf->query_vb({}, {index}, C2_DONT_BLOCK, &heapParams));
- ASSERT_EQ(1u, heapParams.size());
- ASSERT_EQ(*expected, *heapParams[0]);
-}
-
-template <typename T>
-void C2SoftAvcDecTest::testReadOnlyFlexParam(
- const std::unique_ptr<T> &expected, const std::unique_ptr<T> &invalid) {
- std::vector<std::unique_ptr<C2Param>> heapParams;
-
- uint32_t index = expected->index();
-
- ASSERT_EQ(C2_OK, mIntf->query_vb({}, {index}, C2_DONT_BLOCK, &heapParams));
- ASSERT_EQ(1u, heapParams.size());
- ASSERT_EQ(*expected, *heapParams[0]);
-
- std::vector<C2Param * const> params{ invalid.get() };
- std::vector<std::unique_ptr<C2SettingResult>> failures;
- ASSERT_EQ(C2_BAD_VALUE, mIntf->config_vb(params, C2_DONT_BLOCK, &failures));
-
- // The param must not change after failed config.
- heapParams.clear();
- ASSERT_EQ(C2_OK, mIntf->query_vb({}, {index}, C2_DONT_BLOCK, &heapParams));
- ASSERT_EQ(1u, heapParams.size());
- ASSERT_EQ(*expected, *heapParams[0]);
-}
-
-
-TEST_F(C2SoftAvcDecTest, TestNameAndId) {
- EXPECT_STREQ("dummy", mIntf->getName().c_str());
- EXPECT_EQ(0u, mIntf->getId());
-}
-
-TEST_F(C2SoftAvcDecTest, TestDomainInfo) {
- C2ComponentDomainInfo expected(C2DomainVideo);
- C2ComponentDomainInfo invalid(C2DomainAudio);
- testReadOnlyParam(&expected, &invalid);
-}
-
-TEST_F(C2SoftAvcDecTest, TestInputStreamCount) {
- C2PortStreamCountConfig::input expected(1);
- C2PortStreamCountConfig::input invalid(100);
- testReadOnlyParam(&expected, &invalid);
-}
-
-TEST_F(C2SoftAvcDecTest, TestOutputStreamCount) {
- C2PortStreamCountConfig::output expected(1);
- C2PortStreamCountConfig::output invalid(100);
- testReadOnlyParam(&expected, &invalid);
-}
-
-TEST_F(C2SoftAvcDecTest, TestInputPortMime) {
- std::unique_ptr<C2PortMimeConfig::input> expected(
- alloc_unique_cstr<C2PortMimeConfig::input>(MEDIA_MIMETYPE_VIDEO_AVC));
- std::unique_ptr<C2PortMimeConfig::input> invalid(
- alloc_unique_cstr<C2PortMimeConfig::input>(MEDIA_MIMETYPE_VIDEO_RAW));
- testReadOnlyFlexParam(expected, invalid);
-}
-
-TEST_F(C2SoftAvcDecTest, TestOutputPortMime) {
- std::unique_ptr<C2PortMimeConfig::output> expected(
- alloc_unique_cstr<C2PortMimeConfig::output>(MEDIA_MIMETYPE_VIDEO_RAW));
- std::unique_ptr<C2PortMimeConfig::output> invalid(
- alloc_unique_cstr<C2PortMimeConfig::output>(MEDIA_MIMETYPE_VIDEO_AVC));
- testReadOnlyFlexParam(expected, invalid);
-}
-
-TEST_F(C2SoftAvcDecTest, TestInputStreamFormat) {
- C2StreamFormatConfig::input expected(0u, C2FormatCompressed);
- C2StreamFormatConfig::input invalid(0u, C2FormatVideo);
- testReadOnlyParam(&expected, &invalid);
-}
-
-TEST_F(C2SoftAvcDecTest, TestOutputStreamFormat) {
- C2StreamFormatConfig::output expected(0u, C2FormatVideo);
- C2StreamFormatConfig::output invalid(0u, C2FormatCompressed);
- testReadOnlyParam(&expected, &invalid);
-}
-
-} // namespace android
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 88a8351..c4eaa27 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -398,6 +398,7 @@
}
// TODO: move this into a Video HAL
+const static
ALookup<CU::ColorStandard, std::pair<CA::Primaries, CA::MatrixCoeffs>> sStandardFallbacks {
{
{ CU::kColorStandardBT601_625, { CA::PrimariesBT709_5, CA::MatrixBT470_6M } },
@@ -420,6 +421,7 @@
}
};
+const static
ALookup<CU::ColorStandard, CA::Primaries> sStandardPrimariesFallbacks {
{
{ CU::kColorStandardFilm, CA::PrimariesGenericFilm },
@@ -430,7 +432,8 @@
}
};
-static ALookup<android_dataspace, android_dataspace> sLegacyDataSpaceToV0 {
+const static
+ALookup<android_dataspace, android_dataspace> sLegacyDataSpaceToV0 {
{
{ HAL_DATASPACE_SRGB, HAL_DATASPACE_V0_SRGB },
{ HAL_DATASPACE_BT709, HAL_DATASPACE_V0_BT709 },
@@ -441,6 +444,73 @@
}
};
+#define GET_HAL_ENUM(class, name) HAL_DATASPACE_##class##name
+#define GET_HAL_BITFIELD(class, name) (GET_HAL_ENUM(class, _##name) >> GET_HAL_ENUM(class, _SHIFT))
+
+const static
+ALookup<CU::ColorStandard, uint32_t> sGfxStandards {
+ {
+ { CU::kColorStandardUnspecified, GET_HAL_BITFIELD(STANDARD, UNSPECIFIED) },
+ { CU::kColorStandardBT709, GET_HAL_BITFIELD(STANDARD, BT709) },
+ { CU::kColorStandardBT601_625, GET_HAL_BITFIELD(STANDARD, BT601_625) },
+ { CU::kColorStandardBT601_625_Unadjusted, GET_HAL_BITFIELD(STANDARD, BT601_625_UNADJUSTED) },
+ { CU::kColorStandardBT601_525, GET_HAL_BITFIELD(STANDARD, BT601_525) },
+ { CU::kColorStandardBT601_525_Unadjusted, GET_HAL_BITFIELD(STANDARD, BT601_525_UNADJUSTED) },
+ { CU::kColorStandardBT2020, GET_HAL_BITFIELD(STANDARD, BT2020) },
+ { CU::kColorStandardBT2020Constant, GET_HAL_BITFIELD(STANDARD, BT2020_CONSTANT_LUMINANCE) },
+ { CU::kColorStandardBT470M, GET_HAL_BITFIELD(STANDARD, BT470M) },
+ { CU::kColorStandardFilm, GET_HAL_BITFIELD(STANDARD, FILM) },
+ { CU::kColorStandardDCI_P3, GET_HAL_BITFIELD(STANDARD, DCI_P3) },
+ }
+};
+
+// verify public values are stable
+static_assert(CU::kColorStandardUnspecified == 0, "SDK mismatch"); // N
+static_assert(CU::kColorStandardBT709 == 1, "SDK mismatch"); // N
+static_assert(CU::kColorStandardBT601_625 == 2, "SDK mismatch"); // N
+static_assert(CU::kColorStandardBT601_525 == 4, "SDK mismatch"); // N
+static_assert(CU::kColorStandardBT2020 == 6, "SDK mismatch"); // N
+
+const static
+ALookup<CU::ColorTransfer, uint32_t> sGfxTransfers {
+ {
+ { CU::kColorTransferUnspecified, GET_HAL_BITFIELD(TRANSFER, UNSPECIFIED) },
+ { CU::kColorTransferLinear, GET_HAL_BITFIELD(TRANSFER, LINEAR) },
+ { CU::kColorTransferSRGB, GET_HAL_BITFIELD(TRANSFER, SRGB) },
+ { CU::kColorTransferSMPTE_170M, GET_HAL_BITFIELD(TRANSFER, SMPTE_170M) },
+ { CU::kColorTransferGamma22, GET_HAL_BITFIELD(TRANSFER, GAMMA2_2) },
+ { CU::kColorTransferGamma28, GET_HAL_BITFIELD(TRANSFER, GAMMA2_8) },
+ { CU::kColorTransferST2084, GET_HAL_BITFIELD(TRANSFER, ST2084) },
+ { CU::kColorTransferHLG, GET_HAL_BITFIELD(TRANSFER, HLG) },
+ }
+};
+
+// verify public values are stable
+static_assert(CU::kColorTransferUnspecified == 0, "SDK mismatch"); // N
+static_assert(CU::kColorTransferLinear == 1, "SDK mismatch"); // N
+static_assert(CU::kColorTransferSRGB == 2, "SDK mismatch"); // N
+static_assert(CU::kColorTransferSMPTE_170M == 3, "SDK mismatch"); // N
+static_assert(CU::kColorTransferST2084 == 6, "SDK mismatch"); // N
+static_assert(CU::kColorTransferHLG == 7, "SDK mismatch"); // N
+
+const static
+ALookup<CU::ColorRange, uint32_t> sGfxRanges {
+ {
+ { CU::kColorRangeUnspecified, GET_HAL_BITFIELD(RANGE, UNSPECIFIED) },
+ { CU::kColorRangeFull, GET_HAL_BITFIELD(RANGE, FULL) },
+ { CU::kColorRangeLimited, GET_HAL_BITFIELD(RANGE, LIMITED) },
+ }
+};
+
+// verify public values are stable
+static_assert(CU::kColorRangeUnspecified == 0, "SDK mismatch"); // N
+static_assert(CU::kColorRangeFull == 1, "SDK mismatch"); // N
+static_assert(CU::kColorRangeLimited == 2, "SDK mismatch"); // N
+
+#undef GET_HAL_BITFIELD
+#undef GET_HAL_ENUM
+
+
bool ColorUtils::convertDataSpaceToV0(android_dataspace &dataSpace) {
(void)sLegacyDataSpaceToV0.lookup(dataSpace, &dataSpace);
return (dataSpace & 0xC000FFFF) == 0;
@@ -507,9 +577,23 @@
}
}
+ // assume 1-to-1 mapping to HAL values (to deal with potential vendor extensions)
+ uint32_t gfxRange = range;
+ uint32_t gfxStandard = standard;
+ uint32_t gfxTransfer = transfer;
+ // TRICKY: use & to ensure all three mappings are completed
+ if (!(sGfxRanges.map(range, &gfxRange) & sGfxStandards.map(standard, &gfxStandard)
+ & sGfxTransfers.map(transfer, &gfxTransfer))) {
+ ALOGW("could not safely map platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s) to "
+ "graphics dataspace (R:%u S:%u T:%u)",
+ range, asString(range), standard, asString(standard), transfer, asString(transfer),
+ gfxRange, gfxStandard, gfxTransfer);
+ }
+
android_dataspace dataSpace = (android_dataspace)(
- (range << HAL_DATASPACE_RANGE_SHIFT) | (standard << HAL_DATASPACE_STANDARD_SHIFT) |
- (transfer << HAL_DATASPACE_TRANSFER_SHIFT));
+ (gfxRange << HAL_DATASPACE_RANGE_SHIFT) |
+ (gfxStandard << HAL_DATASPACE_STANDARD_SHIFT) |
+ (gfxTransfer << HAL_DATASPACE_TRANSFER_SHIFT));
(void)sLegacyDataSpaceToV0.rlookup(dataSpace, &dataSpace);
if (!mayExpand) {
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index b889a02..d6c768d 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -39,26 +39,28 @@
* vendor-extension section so they won't collide with future platform values.
*/
-#define GET_HAL_ENUM(class, name) HAL_DATASPACE_##class##name
-#define GET_HAL_BITFIELD(class, name) (GET_HAL_ENUM(class, _##name) >> GET_HAL_ENUM(class, _SHIFT))
-
+ /**
+ * graphic.h constants changed in Android 8.0 after ColorStandard values were already public
+ * in Android 7.0. We will not deal with the break in graphic.h here, but list the public
+ * Android SDK MediaFormat values here.
+ */
enum ColorStandard : uint32_t {
- kColorStandardUnspecified = GET_HAL_BITFIELD(STANDARD, UNSPECIFIED),
- kColorStandardBT709 = GET_HAL_BITFIELD(STANDARD, BT709),
- kColorStandardBT601_625 = GET_HAL_BITFIELD(STANDARD, BT601_625),
- kColorStandardBT601_625_Unadjusted = GET_HAL_BITFIELD(STANDARD, BT601_625_UNADJUSTED),
- kColorStandardBT601_525 = GET_HAL_BITFIELD(STANDARD, BT601_525),
- kColorStandardBT601_525_Unadjusted = GET_HAL_BITFIELD(STANDARD, BT601_525_UNADJUSTED),
- kColorStandardBT2020 = GET_HAL_BITFIELD(STANDARD, BT2020),
- kColorStandardBT2020Constant = GET_HAL_BITFIELD(STANDARD, BT2020_CONSTANT_LUMINANCE),
- kColorStandardBT470M = GET_HAL_BITFIELD(STANDARD, BT470M),
- kColorStandardFilm = GET_HAL_BITFIELD(STANDARD, FILM),
- kColorStandardMax = GET_HAL_BITFIELD(STANDARD, MASK),
+ kColorStandardUnspecified = 0,
+ kColorStandardBT709 = 1,
+ kColorStandardBT601_625 = 2,
+ kColorStandardBT601_625_Unadjusted = 3, // not in SDK
+ kColorStandardBT601_525 = 4,
+ kColorStandardBT601_525_Unadjusted = 5, // not in SDK
+ kColorStandardBT2020 = 6,
+ kColorStandardBT2020Constant = 7, // not in SDK
+ kColorStandardBT470M = 8, // not in SDK
+ kColorStandardFilm = 9, // not in SDK
+ kColorStandardDCI_P3 = 10, // not in SDK, new in Android 8.0
/* This marks a section of color-standard values that are not supported by graphics HAL,
but track defined color primaries-matrix coefficient combinations in media.
These are stable for a given release. */
- kColorStandardExtendedStart = kColorStandardMax + 1,
+ kColorStandardExtendedStart = 64,
/* This marks a section of color-standard values that are not supported by graphics HAL
nor using media defined color primaries or matrix coefficients. These may differ per
@@ -67,19 +69,19 @@
};
enum ColorTransfer : uint32_t {
- kColorTransferUnspecified = GET_HAL_BITFIELD(TRANSFER, UNSPECIFIED),
- kColorTransferLinear = GET_HAL_BITFIELD(TRANSFER, LINEAR),
- kColorTransferSRGB = GET_HAL_BITFIELD(TRANSFER, SRGB),
- kColorTransferSMPTE_170M = GET_HAL_BITFIELD(TRANSFER, SMPTE_170M),
- kColorTransferGamma22 = GET_HAL_BITFIELD(TRANSFER, GAMMA2_2),
- kColorTransferGamma28 = GET_HAL_BITFIELD(TRANSFER, GAMMA2_8),
- kColorTransferST2084 = GET_HAL_BITFIELD(TRANSFER, ST2084),
- kColorTransferHLG = GET_HAL_BITFIELD(TRANSFER, HLG),
- kColorTransferMax = GET_HAL_BITFIELD(TRANSFER, MASK),
+ kColorTransferUnspecified = 0,
+ kColorTransferLinear = 1,
+ kColorTransferSRGB = 2,
+ kColorTransferSMPTE_170M = 3, // not in SDK
+ kColorTransferGamma22 = 4, // not in SDK
+ kColorTransferGamma28 = 5, // not in SDK
+ kColorTransferST2084 = 6,
+ kColorTransferHLG = 7,
+ kColorTransferGamma26 = 8, // not in SDK, new in Android 8.0
/* This marks a section of color-transfer values that are not supported by graphics HAL,
but track media-defined color-transfer. These are stable for a given release. */
- kColorTransferExtendedStart = kColorTransferMax + 1,
+ kColorTransferExtendedStart = 32,
/* This marks a section of color-transfer values that are not supported by graphics HAL
nor defined by media. These may differ per device. */
@@ -87,23 +89,19 @@
};
enum ColorRange : uint32_t {
- kColorRangeUnspecified = GET_HAL_BITFIELD(RANGE, UNSPECIFIED),
- kColorRangeFull = GET_HAL_BITFIELD(RANGE, FULL),
- kColorRangeLimited = GET_HAL_BITFIELD(RANGE, LIMITED),
- kColorRangeMax = GET_HAL_BITFIELD(RANGE, MASK),
+ kColorRangeUnspecified = 0,
+ kColorRangeFull = 1,
+ kColorRangeLimited = 2,
/* This marks a section of color-transfer values that are not supported by graphics HAL,
but track media-defined color-transfer. These are stable for a given release. */
- kColorRangeExtendedStart = kColorRangeMax + 1,
+ kColorRangeExtendedStart = 8,
/* This marks a section of color-transfer values that are not supported by graphics HAL
nor defined by media. These may differ per device. */
kColorRangeVendorStart = 0x10000,
};
-#undef GET_HAL_BITFIELD
-#undef GET_HAL_ENUM
-
/*
* Static utilities for codec support
*/
@@ -197,7 +195,8 @@
case ColorUtils::kColorStandardBT2020Constant: return "BT2020Constant";
case ColorUtils::kColorStandardBT470M: return "BT470M";
case ColorUtils::kColorStandardFilm: return "Film";
- default: return def;
+ case ColorUtils::kColorStandardDCI_P3: return "DCI_P3";
+ default: return def;
}
}
@@ -212,7 +211,8 @@
case ColorUtils::kColorTransferGamma28: return "Gamma28";
case ColorUtils::kColorTransferST2084: return "ST2084";
case ColorUtils::kColorTransferHLG: return "HLG";
- default: return def;
+ case ColorUtils::kColorTransferGamma26: return "Gamma26";
+ default: return def;
}
}
@@ -222,7 +222,7 @@
case ColorUtils::kColorRangeUnspecified: return "Unspecified";
case ColorUtils::kColorRangeFull: return "Full";
case ColorUtils::kColorRangeLimited: return "Limited";
- default: return def;
+ default: return def;
}
}
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 1e2e684..7eff8eb 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1014,7 +1014,8 @@
mFetcherLooper = new ALooper();
mFetcherLooper->setName("Fetcher");
- mFetcherLooper->start(false, false);
+ mFetcherLooper->start(false, /* runOnCallingThread */
+ true /* canCallJava */);
}
// create fetcher to fetch the master playlist
diff --git a/media/libstagefright/include/CCodecBufferChannel.h b/media/libstagefright/include/CCodecBufferChannel.h
index 354cee2..c5062d6 100644
--- a/media/libstagefright/include/CCodecBufferChannel.h
+++ b/media/libstagefright/include/CCodecBufferChannel.h
@@ -33,24 +33,36 @@
namespace android {
/**
- * BufferChannelBase implementation for ACodec.
+ * BufferChannelBase implementation for CCodec.
*/
class CCodecBufferChannel : public BufferChannelBase {
public:
+ /**
+ * Base class for representation of buffers at one port.
+ */
class Buffers {
public:
Buffers() = default;
virtual ~Buffers() = default;
- inline void setAlloc(const std::shared_ptr<C2BlockPool> &alloc) { mAlloc = alloc; }
+ /**
+ * Set format for MediaCodec-facing buffers.
+ */
inline void setFormat(const sp<AMessage> &format) { mFormat = format; }
- inline const std::shared_ptr<C2BlockPool> &getAlloc() { return mAlloc; }
+
+ /**
+ * Returns true if the buffers are operating under array mode.
+ */
+ virtual bool isArrayMode() { return false; }
+
+ /**
+ * Fills the vector with MediaCodecBuffer's if in array mode; otherwise,
+ * no-op.
+ */
+ virtual void getArray(Vector<sp<MediaCodecBuffer>> *) {}
protected:
- // Input: this object uses it to allocate input buffers with which the
- // client fills.
- // Output: this object passes it to the component.
- std::shared_ptr<C2BlockPool> mAlloc;
+ // Format to be used for creating MediaCodec-facing buffers.
sp<AMessage> mFormat;
private:
@@ -62,10 +74,41 @@
using Buffers::Buffers;
virtual ~InputBuffers() = default;
+ /**
+ * Set a block pool to obtain input memory blocks.
+ */
+ inline void setPool(const std::shared_ptr<C2BlockPool> &pool) { mPool = pool; }
+
+ /**
+ * Get a new MediaCodecBuffer for input and its corresponding index.
+ * Returns false if no new buffer can be obtained at the moment.
+ */
virtual bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) = 0;
+
+ /**
+ * Release the buffer obtained from requestNewBuffer() and get the
+ * associated C2Buffer object back. Returns empty shared_ptr if the
+ * buffer is not on file.
+ */
virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
+
+ /**
+ * Flush internal state. After this call, no index or buffer previously
+ * returned from requestNewBuffer() is valid.
+ */
virtual void flush() = 0;
+ /**
+ * Return array-backed version of input buffers. The returned object
+ * shall retain the internal state so that it will honor index and
+ * buffer from previous calls of requestNewBuffer().
+ */
+ virtual std::unique_ptr<InputBuffers> toArrayMode() = 0;
+
+ protected:
+ // Pool to obtain blocks for input buffers.
+ std::shared_ptr<C2BlockPool> mPool;
+
private:
DISALLOW_EVIL_CONSTRUCTORS(InputBuffers);
};
@@ -75,12 +118,46 @@
using Buffers::Buffers;
virtual ~OutputBuffers() = default;
+ /**
+ * Register output C2Buffer from the component and obtain corresponding
+ * index and MediaCodecBuffer object. Returns false if registration
+ * fails.
+ */
virtual bool registerBuffer(
const std::shared_ptr<C2Buffer> &buffer,
size_t *index,
sp<MediaCodecBuffer> *codecBuffer) = 0;
+
+ /**
+ * Register codec specific data as a buffer to be consistent with
+ * MediaCodec behavior.
+ */
+ virtual bool registerCsd(
+ const C2StreamCsdInfo::output * /* csd */,
+ size_t * /* index */,
+ sp<MediaCodecBuffer> * /* codecBuffer */) {
+ return false;
+ }
+
+ /**
+ * Release the buffer obtained from registerBuffer() and get the
+ * associated C2Buffer object back. Returns empty shared_ptr if the
+ * buffer is not on file.
+ */
virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
- virtual void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork);
+
+ /**
+ * Flush internal state. After this call, no index or buffer previously
+ * returned from registerBuffer() is valid.
+ */
+ virtual void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) = 0;
+
+ /**
+ * Return array-backed version of output buffers. The returned object
+ * shall retain the internal state so that it will honor index and
+ * buffer from previous calls of registerBuffer().
+ */
+ virtual std::unique_ptr<OutputBuffers> toArrayMode() = 0;
private:
DISALLOW_EVIL_CONSTRUCTORS(OutputBuffers);
@@ -151,12 +228,34 @@
private:
class QueueGuard;
+ /**
+ * Special mutex-like object with the following properties:
+ *
+ * - At STOPPED state (initial, or after stop())
+ * - QueueGuard object gets created at STOPPED state, and the client is
+ * supposed to return immediately.
+ * - At RUNNING state (after start())
+ * - Each QueueGuard object
+ */
class QueueSync {
public:
+ /**
+ * At construction the sync object is in STOPPED state.
+ */
inline QueueSync() : mCount(-1) {}
~QueueSync() = default;
+ /**
+ * Transition to RUNNING state when stopped. No-op if already in RUNNING
+ * state.
+ */
void start();
+
+ /**
+ * At RUNNING state, wait until all QueueGuard object created during
+ * RUNNING state are destroyed, and then transition to STOPPED state.
+ * No-op if already in STOPPED state.
+ */
void stop();
private:
@@ -186,6 +285,7 @@
std::function<void(status_t, enum ActionCode)> mOnError;
std::shared_ptr<C2BlockPool> mInputAllocator;
QueueSync mQueueSync;
+
Mutexed<std::unique_ptr<InputBuffers>> mInputBuffers;
Mutexed<std::unique_ptr<OutputBuffers>> mOutputBuffers;
diff --git a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
index b5a4b34..98b8b4d 100644
--- a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
@@ -22,6 +22,8 @@
namespace android {
+class MediaAnalyticsItem;
+
// IMediaExtractor wrapper to the MediaExtractor.
class RemoteMediaExtractor : public BnMediaExtractor {
public:
@@ -43,6 +45,8 @@
private:
sp<MediaExtractor> mExtractor;
+ MediaAnalyticsItem *mAnalyticsItem;
+
explicit RemoteMediaExtractor(const sp<MediaExtractor> &extractor);
DISALLOW_EVIL_CONSTRUCTORS(RemoteMediaExtractor);
diff --git a/packages/MediaUpdate/Android.mk b/packages/MediaUpdate/Android.mk
index e757098..4a71401 100644
--- a/packages/MediaUpdate/Android.mk
+++ b/packages/MediaUpdate/Android.mk
@@ -25,4 +25,10 @@
# TODO: create a separate key for this package.
LOCAL_CERTIFICATE := platform
+# TODO: Use System SDK once public APIs are approved
+# LOCAL_SDK_VERSION := system_current
+
+LOCAL_SRC_FILES := $(call all-java-files-under, src)
+LOCAL_PROGUARD_FLAG_FILES := proguard.cfg
+
include $(BUILD_PACKAGE)
diff --git a/packages/MediaUpdate/proguard.cfg b/packages/MediaUpdate/proguard.cfg
new file mode 100644
index 0000000..874dbf5
--- /dev/null
+++ b/packages/MediaUpdate/proguard.cfg
@@ -0,0 +1,20 @@
+#
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Keep entry point for updatable Java classes
+-keep public class com.android.media.update.ApiFactory {
+ public static java.lang.Object initialize(android.content.Context);
+}
diff --git a/packages/MediaUpdate/src/com/android/media/update/ApiFactory.java b/packages/MediaUpdate/src/com/android/media/update/ApiFactory.java
new file mode 100644
index 0000000..1cdd177
--- /dev/null
+++ b/packages/MediaUpdate/src/com/android/media/update/ApiFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.update;
+
+import android.content.Context;
+import android.media.update.MediaController2Provider;
+import android.media.update.StaticProvider;
+import android.media.update.ViewProvider;
+import android.widget.MediaController2;
+
+import com.android.widget.MediaController2Impl;
+
+public class ApiFactory implements StaticProvider {
+ private final Context mContext;
+
+ public ApiFactory(Context context) {
+ mContext = context;
+ }
+
+ public static Object initialize(Context context) throws ReflectiveOperationException {
+ return new ApiFactory(context);
+ }
+
+ @Override
+ public MediaController2Provider createMediaController2(
+ MediaController2 instance, ViewProvider superProvider) {
+ return new MediaController2Impl(instance, superProvider);
+ }
+}
diff --git a/packages/MediaUpdate/src/com/android/widget/MediaController2Impl.java b/packages/MediaUpdate/src/com/android/widget/MediaController2Impl.java
new file mode 100644
index 0000000..d322a20
--- /dev/null
+++ b/packages/MediaUpdate/src/com/android/widget/MediaController2Impl.java
@@ -0,0 +1,192 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.widget;
+
+import android.graphics.Canvas;
+import android.media.session.MediaController;
+import android.media.update.MediaController2Provider;
+import android.media.update.ViewProvider;
+import android.view.KeyEvent;
+import android.view.MotionEvent;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.MediaController2;
+
+public class MediaController2Impl implements MediaController2Provider {
+ private final MediaController2 mInstance;
+ private final ViewProvider mSuperProvider;
+
+ public MediaController2Impl(MediaController2 instance, ViewProvider superProvider) {
+ mInstance = instance;
+ mSuperProvider = superProvider;
+
+ // TODO: Implement
+ }
+
+ @Override
+ public void setController_impl(MediaController controller) {
+ // TODO: Implement
+ }
+
+ @Override
+ public void setAnchorView_impl(View view) {
+ // TODO: Implement
+ }
+
+ @Override
+ public void show_impl() {
+ // TODO: Implement
+ }
+
+ @Override
+ public void show_impl(int timeout) {
+ // TODO: Implement
+ }
+
+ @Override
+ public boolean isShowing_impl() {
+ // TODO: Implement
+ return false;
+ }
+
+ @Override
+ public void hide_impl() {
+ // TODO: Implement
+ }
+
+ @Override
+ public void setPrevNextListeners_impl(OnClickListener next, OnClickListener prev) {
+ // TODO: Implement
+ }
+
+ @Override
+ public void showCCButton_impl() {
+ // TODO: Implement
+ }
+
+ @Override
+ public boolean isPlaying_impl() {
+ // TODO: Implement
+ return false;
+ }
+
+ @Override
+ public int getCurrentPosition_impl() {
+ // TODO: Implement
+ return 0;
+ }
+
+ @Override
+ public int getBufferPercentage_impl() {
+ // TODO: Implement
+ return 0;
+ }
+
+ @Override
+ public boolean canPause_impl() {
+ // TODO: Implement
+ return false;
+ }
+
+ @Override
+ public boolean canSeekBackward_impl() {
+ // TODO: Implement
+ return false;
+ }
+
+ @Override
+ public boolean canSeekForward_impl() {
+ // TODO: Implement
+ return false;
+ }
+
+ @Override
+ public void showSubtitle_impl() {
+ // TODO: Implement
+ }
+
+ @Override
+ public void hideSubtitle_impl() {
+ // TODO: Implement
+ }
+
+ @Override
+ public void onAttachedToWindow_impl() {
+ mSuperProvider.onAttachedToWindow_impl();
+ // TODO: Implement
+ }
+
+ @Override
+ public void onDetachedFromWindow_impl() {
+ mSuperProvider.onDetachedFromWindow_impl();
+ // TODO: Implement
+ }
+
+ @Override
+ public void onLayout_impl(boolean changed, int left, int top, int right, int bottom) {
+ mSuperProvider.onLayout_impl(changed, left, top, right, bottom);
+ // TODO: Implement
+ }
+
+ @Override
+ public void draw_impl(Canvas canvas) {
+ mSuperProvider.draw_impl(canvas);
+ // TODO: Implement
+ }
+
+ @Override
+ public CharSequence getAccessibilityClassName_impl() {
+ // TODO: Implement
+ return MediaController2.class.getName();
+ }
+
+ @Override
+ public boolean onTouchEvent_impl(MotionEvent ev) {
+ // TODO: Implement
+ return mSuperProvider.onTouchEvent_impl(ev);
+ }
+
+ @Override
+ public boolean onTrackballEvent_impl(MotionEvent ev) {
+ // TODO: Implement
+ return mSuperProvider.onTrackballEvent_impl(ev);
+ }
+
+ @Override
+ public boolean onKeyDown_impl(int keyCode, KeyEvent event) {
+ // TODO: Implement
+ return mSuperProvider.onKeyDown_impl(keyCode, event);
+ }
+
+ @Override
+ public void onFinishInflate_impl() {
+ mSuperProvider.onFinishInflate_impl();
+ // TODO: Implement
+ }
+
+ @Override
+ public boolean dispatchKeyEvent_impl(KeyEvent event) {
+ // TODO: Implement
+ return mSuperProvider.dispatchKeyEvent_impl(event);
+ }
+
+ @Override
+ public void setEnabled_impl(boolean enabled) {
+ mSuperProvider.setEnabled_impl(enabled);
+ // TODO: Implement
+ }
+}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 3c975c3..4d5e094 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2597,6 +2597,7 @@
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
sp<PlaybackThread> t = mPlaybackThreads.valueAt(i);
+ Mutex::Autolock _l(t->mLock);
for (size_t j = 0; j < t->mEffectChains.size(); j++) {
sp<EffectChain> ec = t->mEffectChains[j];
if (ec->sessionId() > AUDIO_SESSION_OUTPUT_MIX) {
@@ -2606,6 +2607,7 @@
}
for (size_t i = 0; i < mRecordThreads.size(); i++) {
sp<RecordThread> t = mRecordThreads.valueAt(i);
+ Mutex::Autolock _l(t->mLock);
for (size_t j = 0; j < t->mEffectChains.size(); j++) {
sp<EffectChain> ec = t->mEffectChains[j];
chains.push(ec);
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 3bb5803..d5def48 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -4301,10 +4301,16 @@
// because we're about to decrement the last sp<> on those tracks.
block = FastMixerStateQueue::BLOCK_UNTIL_ACKED;
} else {
- LOG_ALWAYS_FATAL("fast track %d should have been active; "
+ // ALOGW rather than LOG_ALWAYS_FATAL because it seems there are cases where an
+ // AudioTrack may start (which may not be with a start() but with a write()
+ // after underrun) and immediately paused or released. In that case the
+ // FastTrack state hasn't had time to update.
+ // TODO Remove the ALOGW when this theory is confirmed.
+ ALOGW("fast track %d should have been active; "
"mState=%d, mTrackMask=%#x, recentUnderruns=%u, isShared=%d",
j, track->mState, state->mTrackMask, recentUnderruns,
track->sharedBuffer() != 0);
+ // Since the FastMixer state already has the track inactive, do nothing here.
}
tracksToRemove->add(track);
// Avoids a misleading display in dumpsys
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPort.h b/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
index d520937..caf3c02 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPort.h
@@ -84,12 +84,7 @@
bool hasDynamicAudioProfile() const { return mProfiles.hasDynamicProfile(); }
// searches for an exact match
- status_t checkExactAudioProfile(uint32_t samplingRate,
- audio_channel_mask_t channelMask,
- audio_format_t format) const
- {
- return mProfiles.checkExactProfile(samplingRate, channelMask, format);
- }
+ virtual status_t checkExactAudioProfile(const struct audio_port_config *config) const;
// searches for a compatible match, currently implemented for input
// parameters are input|output, returned value is the best match.
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
index d6ea698..094ff65 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
@@ -137,6 +137,26 @@
}
}
+status_t AudioPort::checkExactAudioProfile(const struct audio_port_config *config) const
+{
+ status_t status = NO_ERROR;
+ auto config_mask = config->config_mask;
+ if (config_mask & AUDIO_PORT_CONFIG_GAIN) {
+ config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
+ status = checkGain(&config->gain, config->gain.index);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ }
+ if (config_mask != 0) {
+ // TODO should we check sample_rate / channel_mask / format separately?
+ status = mProfiles.checkExactProfile(config->sample_rate,
+ config->channel_mask,
+ config->format);
+ }
+ return status;
+}
+
void AudioPort::pickSamplingRate(uint32_t &pickedRate,const SampleRateVector &samplingRates) const
{
pickedRate = 0;
@@ -388,9 +408,7 @@
status = NO_INIT;
goto exit;
}
- status = audioport->checkExactAudioProfile(config->sample_rate,
- config->channel_mask,
- config->format);
+ status = audioport->checkExactAudioProfile(config);
if (status != NO_ERROR) {
goto exit;
}
@@ -404,10 +422,6 @@
mFormat = config->format;
}
if (config->config_mask & AUDIO_PORT_CONFIG_GAIN) {
- status = audioport->checkGain(&config->gain, config->gain.index);
- if (status != NO_ERROR) {
- goto exit;
- }
mGain = config->gain;
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index e8980b5..69dd06b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -71,7 +71,13 @@
return false;
}
} else {
- if (checkExactAudioProfile(samplingRate, channelMask, format) != NO_ERROR) {
+ const struct audio_port_config config = {
+ .config_mask = AUDIO_PORT_CONFIG_ALL & ~AUDIO_PORT_CONFIG_GAIN,
+ .sample_rate = samplingRate,
+ .channel_mask = channelMask,
+ .format = format,
+ };
+ if (checkExactAudioProfile(&config) != NO_ERROR) {
return false;
}
}
diff --git a/services/mediacodec/Android.mk b/services/mediacodec/Android.mk
index ca31691..8e5b260 100644
--- a/services/mediacodec/Android.mk
+++ b/services/mediacodec/Android.mk
@@ -2,7 +2,11 @@
# service executable
include $(CLEAR_VARS)
+# seccomp is not required for coverage build.
+ifneq ($(NATIVE_COVERAGE),true)
LOCAL_REQUIRED_MODULES_arm := mediacodec.policy
+LOCAL_REQUIRED_MODULES_x86 := mediacodec.policy
+endif
LOCAL_SRC_FILES := main_codecservice.cpp
LOCAL_SHARED_LIBRARIES := \
libmedia_omx \
@@ -28,7 +32,7 @@
include $(BUILD_EXECUTABLE)
# service seccomp policy
-ifeq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), arm arm64))
+ifeq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), x86 x86_64 arm arm64))
include $(CLEAR_VARS)
LOCAL_MODULE := mediacodec.policy
LOCAL_MODULE_CLASS := ETC
diff --git a/services/mediacodec/seccomp_policy/mediacodec-x86.policy b/services/mediacodec/seccomp_policy/mediacodec-x86.policy
new file mode 100644
index 0000000..dc2c04f
--- /dev/null
+++ b/services/mediacodec/seccomp_policy/mediacodec-x86.policy
@@ -0,0 +1,69 @@
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+read: 1
+mprotect: 1
+prctl: 1
+openat: 1
+getuid32: 1
+writev: 1
+ioctl: 1
+close: 1
+mmap2: 1
+fstat64: 1
+madvise: 1
+fstatat64: 1
+futex: 1
+munmap: 1
+faccessat: 1
+_llseek: 1
+lseek: 1
+clone: 1
+sigaltstack: 1
+setpriority: 1
+restart_syscall: 1
+exit: 1
+exit_group: 1
+rt_sigreturn: 1
+ugetrlimit: 1
+readlinkat: 1
+_llseek: 1
+fstatfs64: 1
+pread64: 1
+mremap: 1
+dup: 1
+set_tid_address: 1
+write: 1
+nanosleep: 1
+
+# for attaching to debuggerd on process crash
+socketcall: 1
+sigaction: 1
+tgkill: 1
+rt_sigprocmask: 1
+fcntl64: 1
+rt_tgsigqueueinfo: 1
+geteuid32: 1
+getgid32: 1
+getegid32: 1
+getgroups32: 1
+getdents64: 1
+pipe2: 1
+ppoll: 1
+
+# Required by AddressSanitizer
+gettid: 1
+sched_yield: 1
+getpid: 1
+gettid: 1
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index a61994d..8db1761 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -72,13 +72,6 @@
aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
aaudio_result_t result = AAUDIO_OK;
- const audio_attributes_t attributes = {
- .content_type = AUDIO_CONTENT_TYPE_MUSIC,
- .usage = AUDIO_USAGE_MEDIA,
- .source = AUDIO_SOURCE_VOICE_RECOGNITION,
- .flags = AUDIO_FLAG_LOW_LATENCY,
- .tags = ""
- };
audio_config_base_t config;
audio_port_handle_t deviceId;
@@ -87,6 +80,24 @@
copyFrom(request.getConstantConfiguration());
+ aaudio_direction_t direction = getDirection();
+
+ const audio_content_type_t contentType =
+ AAudioConvert_contentTypeToInternal(getContentType());
+ const audio_usage_t usage = (direction == AAUDIO_DIRECTION_OUTPUT)
+ ? AAudioConvert_usageToInternal(getUsage())
+ : AUDIO_USAGE_UNKNOWN;
+ const audio_source_t source = (direction == AAUDIO_DIRECTION_INPUT)
+ ? AAudioConvert_inputPresetToAudioSource(getInputPreset())
+ : AUDIO_SOURCE_DEFAULT;
+
+ const audio_attributes_t attributes = {
+ .content_type = contentType,
+ .usage = usage,
+ .source = source,
+ .flags = AUDIO_FLAG_LOW_LATENCY,
+ .tags = ""
+ };
mMmapClient.clientUid = request.getUserId();
mMmapClient.clientPid = request.getProcessId();
mMmapClient.packageName.setTo(String16(""));
@@ -108,7 +119,6 @@
int32_t aaudioSamplesPerFrame = getSamplesPerFrame();
- aaudio_direction_t direction = getDirection();
if (direction == AAUDIO_DIRECTION_OUTPUT) {
config.channel_mask = (aaudioSamplesPerFrame == AAUDIO_UNSPECIFIED)
? AUDIO_CHANNEL_OUT_STEREO