Merge "Fix performance-for-range-copy warnings in frameworks/av."
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 8b76cdf..629d75a 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -115,11 +115,13 @@
* <p>The mode control selects how the image data is converted from the
* sensor's native color into linear sRGB color.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_color_correction_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When auto-white balance (AWB) is enabled with ACAMERA_CONTROL_AWB_MODE, this
* control is overridden by the AWB routine. When AWB is disabled, the
@@ -164,17 +166,19 @@
* @see ACAMERA_COLOR_CORRECTION_TRANSFORM
* @see ACAMERA_CONTROL_AWB_MODE
*/
- ACAMERA_COLOR_CORRECTION_MODE = // byte (enum)
+ ACAMERA_COLOR_CORRECTION_MODE = // byte (acamera_metadata_enum_android_color_correction_mode_t)
ACAMERA_COLOR_CORRECTION_START,
/**
* <p>A color transform matrix to use to transform
* from sensor RGB color space to output linear sRGB color space.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is either set by the camera device when the request
* ACAMERA_COLOR_CORRECTION_MODE is not TRANSFORM_MATRIX, or
@@ -196,11 +200,13 @@
* <p>Gains applying to Bayer raw color channels for
* white-balance.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>These per-channel gains are either set by the camera device
* when the request ACAMERA_COLOR_CORRECTION_MODE is not
@@ -221,11 +227,13 @@
/**
* <p>Mode of operation for the chromatic aberration correction algorithm.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_color_correction_aberration_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
* can not focus on the same point after exiting from the lens. This metadata defines
@@ -239,7 +247,7 @@
* applying aberration correction.</p>
* <p>LEGACY devices will always be in FAST mode.</p>
*/
- ACAMERA_COLOR_CORRECTION_ABERRATION_MODE = // byte (enum)
+ ACAMERA_COLOR_CORRECTION_ABERRATION_MODE = // byte (acamera_metadata_enum_android_color_correction_aberration_mode_t)
ACAMERA_COLOR_CORRECTION_START + 3,
/**
* <p>List of aberration correction modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE that are
@@ -247,10 +255,12 @@
*
* @see ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This key lists the valid modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE. If no
* aberration correction modes are available for a device, this list will solely include
@@ -269,11 +279,13 @@
* <p>The desired setting for the camera device's auto-exposure
* algorithm's antibanding compensation.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_antibanding_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Some kinds of lighting fixtures, such as some fluorescent
* lights, flicker at the rate of the power supply frequency
@@ -310,17 +322,19 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_STATISTICS_SCENE_FLICKER
*/
- ACAMERA_CONTROL_AE_ANTIBANDING_MODE = // byte (enum)
+ ACAMERA_CONTROL_AE_ANTIBANDING_MODE = // byte (acamera_metadata_enum_android_control_ae_antibanding_mode_t)
ACAMERA_CONTROL_START,
/**
* <p>Adjustment to auto-exposure (AE) target image
* brightness.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The adjustment is measured as a count of steps, with the
* step size defined by ACAMERA_CONTROL_AE_COMPENSATION_STEP and the
@@ -350,11 +364,13 @@
* <p>Whether auto-exposure (AE) is currently locked to its latest
* calculated values.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_lock_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When set to <code>true</code> (ON), the AE algorithm is locked to its latest parameters,
* and will not change exposure settings until the lock is set to <code>false</code> (OFF).</p>
@@ -398,17 +414,19 @@
* @see ACAMERA_SENSOR_EXPOSURE_TIME
* @see ACAMERA_SENSOR_SENSITIVITY
*/
- ACAMERA_CONTROL_AE_LOCK = // byte (enum)
+ ACAMERA_CONTROL_AE_LOCK = // byte (acamera_metadata_enum_android_control_ae_lock_t)
ACAMERA_CONTROL_START + 2,
/**
* <p>The desired mode for the camera device's
* auto-exposure routine.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control is only effective if ACAMERA_CONTROL_MODE is
* AUTO.</p>
@@ -436,16 +454,18 @@
* @see ACAMERA_SENSOR_FRAME_DURATION
* @see ACAMERA_SENSOR_SENSITIVITY
*/
- ACAMERA_CONTROL_AE_MODE = // byte (enum)
+ ACAMERA_CONTROL_AE_MODE = // byte (acamera_metadata_enum_android_control_ae_mode_t)
ACAMERA_CONTROL_START + 3,
/**
* <p>List of metering areas to use for auto-exposure adjustment.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[5*area_count]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Not available if android.control.maxRegionsAe is 0.
* Otherwise will always be present.</p>
@@ -486,11 +506,13 @@
* adjust the capture frame rate to maintain good
* exposure.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Only constrains auto-exposure (AE) algorithm, not
* manual control of ACAMERA_SENSOR_EXPOSURE_TIME and
@@ -505,11 +527,13 @@
* <p>Whether the camera device will trigger a precapture
* metering sequence when it processes this request.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_precapture_trigger_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This entry is normally set to IDLE, or is not
* included at all in the request settings. When included and
@@ -563,17 +587,19 @@
* @see ACAMERA_CONTROL_AF_TRIGGER
* @see ACAMERA_CONTROL_CAPTURE_INTENT
*/
- ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER = // byte (enum)
+ ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER = // byte (acamera_metadata_enum_android_control_ae_precapture_trigger_t)
ACAMERA_CONTROL_START + 6,
/**
* <p>Whether auto-focus (AF) is currently enabled, and what
* mode it is set to.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_af_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Only effective if ACAMERA_CONTROL_MODE = AUTO and the lens is not fixed focus
* (i.e. <code>ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE > 0</code>). Also note that
@@ -590,16 +616,18 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
*/
- ACAMERA_CONTROL_AF_MODE = // byte (enum)
+ ACAMERA_CONTROL_AF_MODE = // byte (acamera_metadata_enum_android_control_af_mode_t)
ACAMERA_CONTROL_START + 7,
/**
* <p>List of metering areas to use for auto-focus.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[5*area_count]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Not available if android.control.maxRegionsAf is 0.
* Otherwise will always be present.</p>
@@ -638,11 +666,13 @@
/**
* <p>Whether the camera device will trigger autofocus for this request.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_af_trigger_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This entry is normally set to IDLE, or is not
* included at all in the request settings.</p>
@@ -665,17 +695,19 @@
* @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
* @see ACAMERA_CONTROL_AF_STATE
*/
- ACAMERA_CONTROL_AF_TRIGGER = // byte (enum)
+ ACAMERA_CONTROL_AF_TRIGGER = // byte (acamera_metadata_enum_android_control_af_trigger_t)
ACAMERA_CONTROL_START + 9,
/**
* <p>Whether auto-white balance (AWB) is currently locked to its
* latest calculated values.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_awb_lock_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When set to <code>true</code> (ON), the AWB algorithm is locked to its latest parameters,
* and will not change color balance settings until the lock is set to <code>false</code> (OFF).</p>
@@ -699,18 +731,20 @@
*
* @see ACAMERA_CONTROL_AWB_MODE
*/
- ACAMERA_CONTROL_AWB_LOCK = // byte (enum)
+ ACAMERA_CONTROL_AWB_LOCK = // byte (acamera_metadata_enum_android_control_awb_lock_t)
ACAMERA_CONTROL_START + 10,
/**
* <p>Whether auto-white balance (AWB) is currently setting the color
* transform fields, and what its illumination target
* is.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_awb_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control is only effective if ACAMERA_CONTROL_MODE is AUTO.</p>
* <p>When set to the ON mode, the camera device's auto-white balance
@@ -739,17 +773,19 @@
* @see ACAMERA_CONTROL_AWB_LOCK
* @see ACAMERA_CONTROL_MODE
*/
- ACAMERA_CONTROL_AWB_MODE = // byte (enum)
+ ACAMERA_CONTROL_AWB_MODE = // byte (acamera_metadata_enum_android_control_awb_mode_t)
ACAMERA_CONTROL_START + 11,
/**
* <p>List of metering areas to use for auto-white-balance illuminant
* estimation.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[5*area_count]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Not available if android.control.maxRegionsAwb is 0.
* Otherwise will always be present.</p>
@@ -791,11 +827,13 @@
* of this capture, to help the camera device to decide optimal 3A
* strategy.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_capture_intent_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control (except for MANUAL) is only effective if
* <code>ACAMERA_CONTROL_MODE != OFF</code> and any 3A routine is active.</p>
@@ -807,16 +845,18 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
*/
- ACAMERA_CONTROL_CAPTURE_INTENT = // byte (enum)
+ ACAMERA_CONTROL_CAPTURE_INTENT = // byte (acamera_metadata_enum_android_control_capture_intent_t)
ACAMERA_CONTROL_START + 13,
/**
* <p>A special color effect to apply.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_effect_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When this mode is set, a color effect will be applied
* to images produced by the camera device. The interpretation
@@ -825,17 +865,19 @@
* depended on to be consistent (or present) across all
* devices.</p>
*/
- ACAMERA_CONTROL_EFFECT_MODE = // byte (enum)
+ ACAMERA_CONTROL_EFFECT_MODE = // byte (acamera_metadata_enum_android_control_effect_mode_t)
ACAMERA_CONTROL_START + 14,
/**
* <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
* routines.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This is a top-level 3A control switch. When set to OFF, all 3A control
* by the camera device is disabled. The application must set the fields for
@@ -856,16 +898,18 @@
*
* @see ACAMERA_CONTROL_AF_MODE
*/
- ACAMERA_CONTROL_MODE = // byte (enum)
+ ACAMERA_CONTROL_MODE = // byte (acamera_metadata_enum_android_control_mode_t)
ACAMERA_CONTROL_START + 15,
/**
* <p>Control for which scene mode is currently active.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_scene_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Scene modes are custom camera modes optimized for a certain set of conditions and
* capture settings.</p>
@@ -883,17 +927,19 @@
* @see ACAMERA_CONTROL_AWB_MODE
* @see ACAMERA_CONTROL_MODE
*/
- ACAMERA_CONTROL_SCENE_MODE = // byte (enum)
+ ACAMERA_CONTROL_SCENE_MODE = // byte (acamera_metadata_enum_android_control_scene_mode_t)
ACAMERA_CONTROL_START + 16,
/**
* <p>Whether video stabilization is
* active.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_video_stabilization_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Video stabilization automatically warps images from
* the camera in order to stabilize motion between consecutive frames.</p>
@@ -923,7 +969,7 @@
* @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
* @see ACAMERA_SCALER_CROP_REGION
*/
- ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE = // byte (enum)
+ ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE = // byte (acamera_metadata_enum_android_control_video_stabilization_mode_t)
ACAMERA_CONTROL_START + 17,
/**
* <p>List of auto-exposure antibanding modes for ACAMERA_CONTROL_AE_ANTIBANDING_MODE that are
@@ -931,10 +977,12 @@
*
* @see ACAMERA_CONTROL_AE_ANTIBANDING_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Not all of the auto-exposure anti-banding modes may be
* supported by a given camera device. This field lists the
@@ -952,10 +1000,12 @@
*
* @see ACAMERA_CONTROL_AE_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Not all the auto-exposure modes may be supported by a
* given camera device, especially if no flash unit is
@@ -980,10 +1030,12 @@
*
* @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>For devices at the LEGACY level or above:</p>
* <ul>
@@ -1025,12 +1077,13 @@
* @see ACAMERA_CONTROL_AE_COMPENSATION_STEP
* @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_CONTROL_AE_COMPENSATION_RANGE = // int32[2]
ACAMERA_CONTROL_START + 21,
@@ -1038,10 +1091,12 @@
* <p>Smallest step by which the exposure compensation
* can be changed.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This is the unit for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION. For example, if this key has
* a value of <code>1/2</code>, then a setting of <code>-2</code> for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION means
@@ -1059,10 +1114,12 @@
*
* @see ACAMERA_CONTROL_AF_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Not all the auto-focus modes may be supported by a
* given camera device. This entry lists the valid modes for
@@ -1086,10 +1143,12 @@
*
* @see ACAMERA_CONTROL_EFFECT_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This list contains the color effect modes that can be applied to
* images produced by the camera device.
@@ -1111,10 +1170,12 @@
*
* @see ACAMERA_CONTROL_SCENE_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This list contains scene modes that can be set for the camera device.
* Only scene modes that have been fully implemented for the
@@ -1136,10 +1197,12 @@
*
* @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>OFF will always be listed.</p>
*/
@@ -1151,10 +1214,12 @@
*
* @see ACAMERA_CONTROL_AWB_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Not all the auto-white-balance modes may be supported by a
* given camera device. This entry lists the valid modes for
@@ -1183,22 +1248,25 @@
* @see ACAMERA_CONTROL_AF_REGIONS
* @see ACAMERA_CONTROL_AWB_REGIONS
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_CONTROL_MAX_REGIONS = // int32[3]
ACAMERA_CONTROL_START + 28,
/**
* <p>Current state of the auto-exposure (AE) algorithm.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_state_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Switching between or enabling AE modes (ACAMERA_CONTROL_AE_MODE) always
* resets the AE state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1257,15 +1325,17 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_CONTROL_SCENE_MODE
*/
- ACAMERA_CONTROL_AE_STATE = // byte (enum)
+ ACAMERA_CONTROL_AE_STATE = // byte (acamera_metadata_enum_android_control_ae_state_t)
ACAMERA_CONTROL_START + 31,
/**
* <p>Current state of auto-focus (AF) algorithm.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_af_state_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Switching between or enabling AF modes (ACAMERA_CONTROL_AF_MODE) always
* resets the AF state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1357,15 +1427,17 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_CONTROL_SCENE_MODE
*/
- ACAMERA_CONTROL_AF_STATE = // byte (enum)
+ ACAMERA_CONTROL_AF_STATE = // byte (acamera_metadata_enum_android_control_af_state_t)
ACAMERA_CONTROL_START + 32,
/**
* <p>Current state of auto-white balance (AWB) algorithm.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_awb_state_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Switching between or enabling AWB modes (ACAMERA_CONTROL_AWB_MODE) always
* resets the AWB state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
@@ -1408,37 +1480,41 @@
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_CONTROL_SCENE_MODE
*/
- ACAMERA_CONTROL_AWB_STATE = // byte (enum)
+ ACAMERA_CONTROL_AWB_STATE = // byte (acamera_metadata_enum_android_control_awb_state_t)
ACAMERA_CONTROL_START + 34,
/**
* <p>Whether the camera device supports ACAMERA_CONTROL_AE_LOCK</p>
*
* @see ACAMERA_CONTROL_AE_LOCK
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_lock_available_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
* list <code>true</code>. This includes FULL devices.</p>
*/
- ACAMERA_CONTROL_AE_LOCK_AVAILABLE = // byte (enum)
+ ACAMERA_CONTROL_AE_LOCK_AVAILABLE = // byte (acamera_metadata_enum_android_control_ae_lock_available_t)
ACAMERA_CONTROL_START + 36,
/**
* <p>Whether the camera device supports ACAMERA_CONTROL_AWB_LOCK</p>
*
* @see ACAMERA_CONTROL_AWB_LOCK
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_awb_lock_available_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
* always list <code>true</code>. This includes FULL devices.</p>
*/
- ACAMERA_CONTROL_AWB_LOCK_AVAILABLE = // byte (enum)
+ ACAMERA_CONTROL_AWB_LOCK_AVAILABLE = // byte (acamera_metadata_enum_android_control_awb_lock_available_t)
ACAMERA_CONTROL_START + 37,
/**
* <p>List of control modes for ACAMERA_CONTROL_MODE that are supported by this camera
@@ -1446,10 +1522,12 @@
*
* @see ACAMERA_CONTROL_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This list contains control modes that can be set for the camera device.
* LEGACY mode devices will always support AUTO mode. LIMITED and FULL
@@ -1463,10 +1541,12 @@
*
* @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Devices support post RAW sensitivity boost will advertise
* ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST key for controling
@@ -1484,11 +1564,13 @@
* <p>The amount of additional sensitivity boost applied to output images
* after RAW sensor data is captured.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Some camera devices support additional digital sensitivity boosting in the
* camera processing pipeline after sensor RAW image is captured.
@@ -1521,11 +1603,13 @@
*
* @see ACAMERA_CONTROL_CAPTURE_INTENT
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_enable_zsl_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>If enableZsl is <code>true</code>, the camera device may enable zero-shutter-lag mode for requests with
* STILL_CAPTURE capture intent. The camera device may use images captured in the past to
@@ -1552,7 +1636,7 @@
* @see ACAMERA_CONTROL_CAPTURE_INTENT
* @see ACAMERA_SENSOR_TIMESTAMP
*/
- ACAMERA_CONTROL_ENABLE_ZSL = // byte (enum)
+ ACAMERA_CONTROL_ENABLE_ZSL = // byte (acamera_metadata_enum_android_control_enable_zsl_t)
ACAMERA_CONTROL_START + 41,
ACAMERA_CONTROL_END,
@@ -1560,11 +1644,13 @@
* <p>Operation mode for edge
* enhancement.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_edge_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Edge enhancement improves sharpness and details in the captured image. OFF means
* no enhancement will be applied by the camera device.</p>
@@ -1586,7 +1672,7 @@
* The camera device may adjust its internal edge enhancement parameters for best
* image quality based on the android.reprocess.effectiveExposureFactor, if it is set.</p>
*/
- ACAMERA_EDGE_MODE = // byte (enum)
+ ACAMERA_EDGE_MODE = // byte (acamera_metadata_enum_android_edge_mode_t)
ACAMERA_EDGE_START,
/**
* <p>List of edge enhancement modes for ACAMERA_EDGE_MODE that are supported by this camera
@@ -1594,10 +1680,12 @@
*
* @see ACAMERA_EDGE_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Full-capability camera devices must always support OFF; camera devices that support
* YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
@@ -1610,11 +1698,13 @@
/**
* <p>The desired mode for for the camera device's flash control.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_flash_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control is only effective when flash unit is available
* (<code>ACAMERA_FLASH_INFO_AVAILABLE == true</code>).</p>
@@ -1635,16 +1725,18 @@
* @see ACAMERA_FLASH_INFO_AVAILABLE
* @see ACAMERA_FLASH_STATE
*/
- ACAMERA_FLASH_MODE = // byte (enum)
+ ACAMERA_FLASH_MODE = // byte (acamera_metadata_enum_android_flash_mode_t)
ACAMERA_FLASH_START + 2,
/**
* <p>Current state of the flash
* unit.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_flash_state_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>When the camera device doesn't have flash unit
* (i.e. <code>ACAMERA_FLASH_INFO_AVAILABLE == false</code>), this state will always be UNAVAILABLE.
@@ -1664,7 +1756,7 @@
* @see ACAMERA_FLASH_INFO_AVAILABLE
* @see ACAMERA_FLASH_MODE
*/
- ACAMERA_FLASH_STATE = // byte (enum)
+ ACAMERA_FLASH_STATE = // byte (acamera_metadata_enum_android_flash_state_t)
ACAMERA_FLASH_START + 5,
ACAMERA_FLASH_END,
@@ -1672,33 +1764,37 @@
* <p>Whether this camera device has a
* flash unit.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_flash_info_available_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Will be <code>false</code> if no flash is available.</p>
* <p>If there is no flash unit, none of the flash controls do
* anything.</p>
*/
- ACAMERA_FLASH_INFO_AVAILABLE = // byte (enum)
+ ACAMERA_FLASH_INFO_AVAILABLE = // byte (acamera_metadata_enum_android_flash_info_available_t)
ACAMERA_FLASH_INFO_START,
ACAMERA_FLASH_INFO_END,
/**
* <p>Operational mode for hot pixel correction.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_hot_pixel_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Hotpixel correction interpolates out, or otherwise removes, pixels
* that do not accurately measure the incoming light (i.e. pixels that
* are stuck at an arbitrary value or are oversensitive).</p>
*/
- ACAMERA_HOT_PIXEL_MODE = // byte (enum)
+ ACAMERA_HOT_PIXEL_MODE = // byte (acamera_metadata_enum_android_hot_pixel_mode_t)
ACAMERA_HOT_PIXEL_START,
/**
* <p>List of hot pixel correction modes for ACAMERA_HOT_PIXEL_MODE that are supported by this
@@ -1706,10 +1802,12 @@
*
* @see ACAMERA_HOT_PIXEL_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>FULL mode camera devices will always support FAST.</p>
*/
@@ -1721,13 +1819,14 @@
* <p>GPS coordinates to include in output JPEG
* EXIF.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: double[3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_JPEG_GPS_COORDINATES = // double[3]
ACAMERA_JPEG_START,
@@ -1735,13 +1834,14 @@
* <p>32 characters describing GPS algorithm to
* include in EXIF.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_JPEG_GPS_PROCESSING_METHOD = // byte
ACAMERA_JPEG_START + 1,
@@ -1749,24 +1849,27 @@
* <p>Time GPS fix was made to include in
* EXIF.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_JPEG_GPS_TIMESTAMP = // int64
ACAMERA_JPEG_START + 2,
/**
* <p>The orientation for a JPEG image.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The clockwise rotation angle in degrees, relative to the orientation
* to the camera, that the JPEG picture needs to be rotated by, to be viewed
@@ -1805,11 +1908,13 @@
* <p>Compression quality of the final JPEG
* image.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>85-95 is typical usage range.</p>
*/
@@ -1819,24 +1924,27 @@
* <p>Compression quality of JPEG
* thumbnail.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_JPEG_THUMBNAIL_QUALITY = // byte
ACAMERA_JPEG_START + 5,
/**
* <p>Resolution of embedded JPEG thumbnail.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
* but the captured JPEG will still be a valid image.</p>
@@ -1871,10 +1979,12 @@
*
* @see ACAMERA_JPEG_THUMBNAIL_SIZE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This list will include at least one non-zero resolution, plus <code>(0,0)</code> for indicating no
* thumbnail should be generated.</p>
@@ -1902,11 +2012,13 @@
* <p>The desired lens aperture size, as a ratio of lens focal length to the
* effective aperture diameter.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Setting this value is only supported on the camera devices that have a variable
* aperture lens.</p>
@@ -1934,11 +2046,13 @@
/**
* <p>The desired setting for the lens neutral density filter(s).</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control will not be supported on most camera devices.</p>
* <p>Lens filters are typically used to lower the amount of light the
@@ -1960,11 +2074,13 @@
/**
* <p>The desired lens focal length; used for optical zoom.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This setting controls the physical focal length of the camera
* device's lens. Changing the focal length changes the field of
@@ -1986,11 +2102,13 @@
* <p>Desired distance to plane of sharpest focus,
* measured from frontmost surface of the lens.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Should be zero for fixed-focus cameras</p>
*/
@@ -2000,11 +2118,13 @@
* <p>Sets whether the camera device uses optical image stabilization (OIS)
* when capturing images.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_lens_optical_stabilization_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>OIS is used to compensate for motion blur due to small
* movements of the camera during capture. Unlike digital image
@@ -2027,30 +2147,33 @@
* @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
* @see ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
*/
- ACAMERA_LENS_OPTICAL_STABILIZATION_MODE = // byte (enum)
+ ACAMERA_LENS_OPTICAL_STABILIZATION_MODE = // byte (acamera_metadata_enum_android_lens_optical_stabilization_mode_t)
ACAMERA_LENS_START + 4,
/**
* <p>Direction the camera faces relative to
* device screen.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_lens_facing_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
- ACAMERA_LENS_FACING = // byte (enum)
+ ACAMERA_LENS_FACING = // byte (acamera_metadata_enum_android_lens_facing_t)
ACAMERA_LENS_START + 5,
/**
* <p>The orientation of the camera relative to the sensor
* coordinate system.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The four coefficients that describe the quaternion
* rotation from the Android sensor coordinate system to a
@@ -2084,11 +2207,13 @@
/**
* <p>Position of the camera optical center.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The position of the camera device's lens optical center,
* as a three-dimensional vector <code>(x,y,z)</code>, relative to the
@@ -2129,10 +2254,12 @@
* <p>The range of scene distances that are in
* sharp focus (depth of field).</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>If variable focus not supported, can still report
* fixed depth of field range</p>
@@ -2142,10 +2269,12 @@
/**
* <p>Current lens status.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_lens_state_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>For lens parameters ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
* ACAMERA_LENS_FILTER_DENSITY and ACAMERA_LENS_APERTURE, when changes are requested,
@@ -2176,17 +2305,19 @@
* @see ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS
* @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
*/
- ACAMERA_LENS_STATE = // byte (enum)
+ ACAMERA_LENS_STATE = // byte (acamera_metadata_enum_android_lens_state_t)
ACAMERA_LENS_START + 9,
/**
* <p>The parameters for this camera device's intrinsic
* calibration.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[5]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The five calibration parameters that describe the
* transform from camera-centric 3D coordinates to sensor
@@ -2245,11 +2376,13 @@
* <p>The correction coefficients to correct for this camera device's
* radial and tangential lens distortion.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[6]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Four radial distortion coefficients <code>[kappa_0, kappa_1, kappa_2,
* kappa_3]</code> and two tangential distortion coefficients
@@ -2290,10 +2423,12 @@
*
* @see ACAMERA_LENS_APERTURE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If the camera device doesn't support a variable lens aperture,
* this list will contain only one value, which is the fixed aperture size.</p>
@@ -2308,10 +2443,12 @@
*
* @see ACAMERA_LENS_FILTER_DENSITY
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If a neutral density filter is not supported by this camera device,
* this list will contain only 0. Otherwise, this list will include every
@@ -2325,10 +2462,12 @@
*
* @see ACAMERA_LENS_FOCAL_LENGTH
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If optical zoom is not supported, this list will only contain
* a single value corresponding to the fixed focal length of the
@@ -2343,10 +2482,12 @@
*
* @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If OIS is not supported by a given camera device, this list will
* contain only OFF.</p>
@@ -2356,10 +2497,12 @@
/**
* <p>Hyperfocal distance for this lens.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If the lens is not fixed focus, the camera device will report this
* field when ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION is APPROXIMATE or CALIBRATED.</p>
@@ -2372,10 +2515,12 @@
* <p>Shortest distance from frontmost surface
* of the lens that can be brought into sharp focus.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If the lens is fixed-focus, this will be
* 0.</p>
@@ -2385,10 +2530,12 @@
/**
* <p>Dimensions of lens shading map.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The map should be on the order of 30-40 rows and columns, and
* must be smaller than 64x64.</p>
@@ -2398,10 +2545,12 @@
/**
* <p>The lens focus distance calibration quality.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_lens_info_focus_distance_calibration_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The lens focus distance calibration quality determines the reliability of
* focus related metadata entries, i.e. ACAMERA_LENS_FOCUS_DISTANCE,
@@ -2422,18 +2571,20 @@
* @see ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE
* @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
*/
- ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION = // byte (enum)
+ ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION = // byte (acamera_metadata_enum_android_lens_info_focus_distance_calibration_t)
ACAMERA_LENS_INFO_START + 7,
ACAMERA_LENS_INFO_END,
/**
* <p>Mode of operation for the noise reduction algorithm.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_noise_reduction_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The noise reduction algorithm attempts to improve image quality by removing
* excessive noise added by the capture process, especially in dark conditions.</p>
@@ -2463,7 +2614,7 @@
*
* @see ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
*/
- ACAMERA_NOISE_REDUCTION_MODE = // byte (enum)
+ ACAMERA_NOISE_REDUCTION_MODE = // byte (acamera_metadata_enum_android_noise_reduction_mode_t)
ACAMERA_NOISE_REDUCTION_START,
/**
* <p>List of noise reduction modes for ACAMERA_NOISE_REDUCTION_MODE that are supported
@@ -2471,10 +2622,12 @@
*
* @see ACAMERA_NOISE_REDUCTION_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Full-capability camera devices will always support OFF and FAST.</p>
* <p>Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
@@ -2489,10 +2642,12 @@
* <p>The maximum numbers of different types of output streams
* that can be configured and used simultaneously by a camera device.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This is a 3 element tuple that contains the max number of output simultaneous
* streams for raw sensor, processed (but not stalling), and processed (and stalling)
@@ -2523,10 +2678,12 @@
* through from when it was exposed to when the final completed result
* was available to the framework.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Depending on what settings are used in the request, and
* what streams are configured, the data may undergo less processing,
@@ -2542,10 +2699,12 @@
* has to go through from when it's exposed to when it's available
* to the framework.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>A typical minimum value for this is 2 (one stage to expose,
* one stage to readout) from the sensor. The ISP then usually adds
@@ -2568,10 +2727,12 @@
* <p>Defines how many sub-components
* a result will be composed of.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>In order to combat the pipeline latency, partial results
* may be delivered to the application layer from the camera device as
@@ -2592,10 +2753,12 @@
* <p>List of capabilities that this camera device
* advertises as fully supporting.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n] (acamera_metadata_enum_android_request_available_capabilities_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>A capability is a contract that the camera device makes in order
* to be able to satisfy one or more use cases.</p>
@@ -2620,16 +2783,18 @@
* @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
* @see ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS
*/
- ACAMERA_REQUEST_AVAILABLE_CAPABILITIES = // byte[n] (enum)
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES = // byte[n] (acamera_metadata_enum_android_request_available_capabilities_t)
ACAMERA_REQUEST_START + 12,
/**
* <p>A list of all keys that the camera device has available
* to use with {@link ACaptureRequest}.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Attempting to set a key into a CaptureRequest that is not
* listed here will result in an invalid request and will be rejected
@@ -2648,10 +2813,12 @@
* to query with {@link ACameraMetadata} from
* {@link ACameraCaptureSession_captureCallback_result}.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Attempting to get a key from a CaptureResult that is not
* listed here will always return a <code>null</code> value. Getting a key from
@@ -2679,10 +2846,12 @@
* to query with {@link ACameraMetadata} from
* {@link ACameraManager_getCameraCharacteristics}.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This entry follows the same rules as
* ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS (except that it applies for
@@ -2698,11 +2867,13 @@
/**
* <p>The desired region of the sensor to read out for this capture.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>This control can be used to implement digital zoom.</p>
* <p>The data representation is int[4], which maps to (left, top, width, height).</p>
@@ -2748,10 +2919,12 @@
*
* @see ACAMERA_SCALER_CROP_REGION
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This represents the maximum amount of zooming possible by
* the camera device, or equivalently, the minimum cropping
@@ -2767,10 +2940,12 @@
* camera device supports
* (i.e. format, width, height, output/input stream).</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The configurations are listed as <code>(format, width, height, input?)</code>
* tuples.</p>
@@ -2805,16 +2980,18 @@
* @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
*/
- ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS = // int32[n*4] (enum)
+ ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS = // int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_t)
ACAMERA_SCALER_START + 10,
/**
* <p>This lists the minimum frame duration for each
* format/size combination.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This should correspond to the frame duration when only that
* stream is active, with all processing (typically in android.*.mode)
@@ -2836,10 +3013,12 @@
* <p>This lists the maximum stall duration for each
* output format/size combination.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>A stall duration is how much extra time would get added
* to the normal minimum frame duration for a repeating request
@@ -2904,10 +3083,12 @@
/**
* <p>The crop type that this camera device supports.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_scaler_cropping_type_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>When passing a non-centered crop region (ACAMERA_SCALER_CROP_REGION) to a camera
* device that only supports CENTER_ONLY cropping, the camera device will move the
@@ -2922,7 +3103,7 @@
* @see ACAMERA_SCALER_CROP_REGION
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
*/
- ACAMERA_SCALER_CROPPING_TYPE = // byte (enum)
+ ACAMERA_SCALER_CROPPING_TYPE = // byte (acamera_metadata_enum_android_scaler_cropping_type_t)
ACAMERA_SCALER_START + 13,
ACAMERA_SCALER_END,
@@ -2930,11 +3111,13 @@
* <p>Duration each pixel is exposed to
* light.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>If the sensor can't expose this exact duration, it will shorten the
* duration exposed to the nearest possible value (rather than expose longer).
@@ -2951,11 +3134,13 @@
* <p>Duration from start of frame exposure to
* start of next frame exposure.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The maximum frame rate that can be supported by a camera subsystem is
* a function of many factors:</p>
@@ -3037,11 +3222,13 @@
* <p>The amount of gain applied to sensor data
* before processing.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The sensitivity is the standard ISO sensitivity value,
* as defined in ISO 12232:2006.</p>
@@ -3072,10 +3259,12 @@
* @see ACAMERA_SENSOR_COLOR_TRANSFORM1
* @see ACAMERA_SENSOR_FORWARD_MATRIX1
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_sensor_reference_illuminant1_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The values in this key correspond to the values defined for the
* EXIF LightSource tag. These illuminants are standard light sources
@@ -3092,7 +3281,7 @@
* @see ACAMERA_SENSOR_FORWARD_MATRIX1
* @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
*/
- ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 = // byte (enum)
+ ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 = // byte (acamera_metadata_enum_android_sensor_reference_illuminant1_t)
ACAMERA_SENSOR_START + 3,
/**
* <p>The standard reference illuminant used as the scene light source when
@@ -3104,10 +3293,12 @@
* @see ACAMERA_SENSOR_COLOR_TRANSFORM2
* @see ACAMERA_SENSOR_FORWARD_MATRIX2
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>See ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 for more details.</p>
* <p>If this key is present, then ACAMERA_SENSOR_COLOR_TRANSFORM2,
@@ -3125,10 +3316,12 @@
* <p>A per-device calibration transform matrix that maps from the
* reference sensor colorspace to the actual device sensor colorspace.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to correct for per-device variations in the
* sensor colorspace, and is used for processing raw buffer data.</p>
@@ -3148,10 +3341,12 @@
* reference sensor colorspace to the actual device sensor colorspace
* (this is the colorspace of the raw buffer data).</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to correct for per-device variations in the
* sensor colorspace, and is used for processing raw buffer data.</p>
@@ -3172,10 +3367,12 @@
* <p>A matrix that transforms color values from CIE XYZ color space to
* reference sensor color space.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to convert from the standard CIE XYZ color
* space to the reference sensor colorspace, and is used when processing
@@ -3198,10 +3395,12 @@
* <p>A matrix that transforms color values from CIE XYZ color space to
* reference sensor color space.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to convert from the standard CIE XYZ color
* space to the reference sensor colorspace, and is used when processing
@@ -3226,10 +3425,12 @@
* <p>A matrix that transforms white balanced camera colors from the reference
* sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
* is used when processing raw buffer data.</p>
@@ -3250,10 +3451,12 @@
* <p>A matrix that transforms white balanced camera colors from the reference
* sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3*3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
* is used when processing raw buffer data.</p>
@@ -3276,10 +3479,12 @@
* <p>A fixed black level offset for each of the color filter arrangement
* (CFA) mosaic channels.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This key specifies the zero light value for each of the CFA mosaic
* channels in the camera sensor. The maximal value output by the
@@ -3310,10 +3515,12 @@
* <p>Maximum sensitivity that is implemented
* purely through analog gain.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>For ACAMERA_SENSOR_SENSITIVITY values less than or
* equal to this, all applied gain must be analog. For
@@ -3328,10 +3535,12 @@
* <p>Clockwise angle through which the output image needs to be rotated to be
* upright on the device screen in its native orientation.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Also defines the direction of rolling shutter readout, which is from top to bottom in
* the sensor's coordinate system.</p>
@@ -3342,10 +3551,12 @@
* <p>Time at start of exposure of first
* row of the image sensor active array, in nanoseconds.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The timestamps are also included in all image
* buffers produced for the same capture, and will be identical
@@ -3374,10 +3585,12 @@
* <p>The estimated camera neutral color in the native sensor colorspace at
* the time of capture.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: rational[3]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>This value gives the neutral color point encoded as an RGB value in the
* native sensor color space. The neutral color point indicates the
@@ -3391,10 +3604,12 @@
/**
* <p>Noise model coefficients for each CFA mosaic channel.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: double[2*CFA Channels]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>This key contains two noise model coefficients for each CFA channel
* corresponding to the sensor amplification (S) and sensor readout
@@ -3421,10 +3636,12 @@
/**
* <p>The worst-case divergence between Bayer green channels.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>This value is an estimate of the worst case split between the
* Bayer green channels in the red and blue rows in the sensor color
@@ -3465,11 +3682,13 @@
*
* @see ACAMERA_SENSOR_TEST_PATTERN_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Each color channel is treated as an unsigned 32-bit integer.
* The camera device then uses the most significant X bits
@@ -3484,11 +3703,13 @@
* <p>When enabled, the sensor sends a test pattern instead of
* doing a real exposure from the camera.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32 (acamera_metadata_enum_android_sensor_test_pattern_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When a test pattern is enabled, all manual sensor controls specified
* by ACAMERA_SENSOR_* will be ignored. All other controls should
@@ -3498,7 +3719,7 @@
* would not actually affect it).</p>
* <p>Defaults to OFF.</p>
*/
- ACAMERA_SENSOR_TEST_PATTERN_MODE = // int32 (enum)
+ ACAMERA_SENSOR_TEST_PATTERN_MODE = // int32 (acamera_metadata_enum_android_sensor_test_pattern_mode_t)
ACAMERA_SENSOR_START + 24,
/**
* <p>List of sensor test pattern modes for ACAMERA_SENSOR_TEST_PATTERN_MODE
@@ -3506,10 +3727,12 @@
*
* @see ACAMERA_SENSOR_TEST_PATTERN_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Defaults to OFF, and always includes OFF if defined.</p>
*/
@@ -3519,10 +3742,12 @@
* <p>Duration between the start of first row exposure
* and the start of last row exposure.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>This is the exposure time skew between the first and last
* row exposure start times. The first row and the last row are
@@ -3539,10 +3764,12 @@
* <p>List of disjoint rectangles indicating the sensor
* optically shielded black pixel regions.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4*num_regions]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>In most camera sensors, the active array is surrounded by some
* optically shielded pixel areas. By blocking light, these pixels
@@ -3569,10 +3796,12 @@
* <p>A per-frame dynamic black level offset for each of the color filter
* arrangement (CFA) mosaic channels.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Camera sensor black levels may vary dramatically for different
* capture settings (e.g. ACAMERA_SENSOR_SENSITIVITY). The fixed black
@@ -3610,10 +3839,12 @@
/**
* <p>Maximum raw value output by sensor for this frame.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Since the ACAMERA_SENSOR_BLACK_LEVEL_PATTERN may change for different
* capture settings (e.g., ACAMERA_SENSOR_SENSITIVITY), the white
@@ -3637,10 +3868,12 @@
* <p>The area of the image sensor which corresponds to active pixels after any geometric
* distortion correction has been applied.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This is the rectangle representing the size of the active region of the sensor (i.e.
* the region that actually receives light from the scene) after any geometric correction
@@ -3668,10 +3901,12 @@
*
* @see ACAMERA_SENSOR_SENSITIVITY
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The values are the standard ISO sensitivity values,
* as defined in ISO 12232:2006.</p>
@@ -3683,14 +3918,15 @@
* represents the colors in the top-left 2x2 section of
* the sensor, in reading order.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
- ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT = // byte (enum)
+ ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT = // byte (acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t)
ACAMERA_SENSOR_INFO_START + 2,
/**
* <p>The range of image exposure times for ACAMERA_SENSOR_EXPOSURE_TIME supported
@@ -3698,12 +3934,13 @@
*
* @see ACAMERA_SENSOR_EXPOSURE_TIME
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE = // int64[2]
ACAMERA_SENSOR_INFO_START + 3,
@@ -3713,10 +3950,12 @@
*
* @see ACAMERA_SENSOR_FRAME_DURATION
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Attempting to use frame durations beyond the maximum will result in the frame
* duration being clipped to the maximum. See that control for a full definition of frame
@@ -3731,10 +3970,12 @@
* <p>The physical dimensions of the full pixel
* array.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This is the physical size of the sensor pixel
* array defined by ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
@@ -3747,10 +3988,12 @@
* <p>Dimensions of the full pixel array, possibly
* including black calibration pixels.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The pixel count of the full pixel array of the image sensor, which covers
* ACAMERA_SENSOR_INFO_PHYSICAL_SIZE area. This represents the full pixel dimensions of
@@ -3773,10 +4016,12 @@
/**
* <p>Maximum raw value output by sensor.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This specifies the fully-saturated encoding level for the raw
* sample values from the sensor. This is typically caused by the
@@ -3802,26 +4047,30 @@
/**
* <p>The time base source for sensor capture start timestamps.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_sensor_info_timestamp_source_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The timestamps provided for captures are always in nanoseconds and monotonic, but
* may not based on a time source that can be compared to other system time sources.</p>
* <p>This characteristic defines the source for the timestamps, and therefore whether they
* can be compared against other system time sources/timestamps.</p>
*/
- ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE = // byte (enum)
+ ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE = // byte (acamera_metadata_enum_android_sensor_info_timestamp_source_t)
ACAMERA_SENSOR_INFO_START + 8,
/**
* <p>Whether the RAW images output from this camera device are subject to
* lens shading correction.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_sensor_info_lens_shading_applied_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If TRUE, all images produced by the camera device in the RAW image formats will
* have lens shading correction already applied to it. If FALSE, the images will
@@ -3830,16 +4079,18 @@
* <p>This key will be <code>null</code> for all devices do not report this information.
* Devices with RAW capability will always report this information in this key.</p>
*/
- ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED = // byte (enum)
+ ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED = // byte (acamera_metadata_enum_android_sensor_info_lens_shading_applied_t)
ACAMERA_SENSOR_INFO_START + 9,
/**
* <p>The area of the image sensor which corresponds to active pixels prior to the
* application of any geometric distortion correction.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The data representation is int[4], which maps to (left, top, width, height).</p>
* <p>This is the rectangle representing the size of the active region of the sensor (i.e.
@@ -3906,11 +4157,13 @@
* <p>Quality of lens shading correction applied
* to the image data.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_shading_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When set to OFF mode, no lens shading correction will be applied by the
* camera device, and an identity lens shading map data will be provided
@@ -3940,17 +4193,19 @@
* @see ACAMERA_CONTROL_AWB_MODE
* @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
*/
- ACAMERA_SHADING_MODE = // byte (enum)
+ ACAMERA_SHADING_MODE = // byte (acamera_metadata_enum_android_shading_mode_t)
ACAMERA_SHADING_START,
/**
* <p>List of lens shading modes for ACAMERA_SHADING_MODE that are supported by this camera device.</p>
*
* @see ACAMERA_SHADING_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This list contains lens shading modes that can be set for the camera device.
* Camera devices that support the MANUAL_POST_PROCESSING capability will always
@@ -3965,41 +4220,47 @@
* <p>Operating mode for the face detector
* unit.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_statistics_face_detect_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Whether face detection is enabled, and whether it
* should output just the basic fields or the full set of
* fields.</p>
*/
- ACAMERA_STATISTICS_FACE_DETECT_MODE = // byte (enum)
+ ACAMERA_STATISTICS_FACE_DETECT_MODE = // byte (acamera_metadata_enum_android_statistics_face_detect_mode_t)
ACAMERA_STATISTICS_START,
/**
* <p>Operating mode for hot pixel map generation.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_statistics_hot_pixel_map_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>If set to <code>true</code>, a hot pixel map is returned in ACAMERA_STATISTICS_HOT_PIXEL_MAP.
* If set to <code>false</code>, no hot pixel map will be returned.</p>
*
* @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
*/
- ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE = // byte (enum)
+ ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE = // byte (acamera_metadata_enum_android_statistics_hot_pixel_map_mode_t)
ACAMERA_STATISTICS_START + 3,
/**
* <p>List of unique IDs for detected faces.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Each detected face is given a unique ID that is valid for as long as the face is visible
* to the camera device. A face that leaves the field of view and later returns may be
@@ -4014,10 +4275,12 @@
* <p>List of landmarks for detected
* faces.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n*6]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
* <code>(0, 0)</code> being the top-left pixel of the active array.</p>
@@ -4032,10 +4295,12 @@
* <p>List of the bounding rectangles for detected
* faces.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n*4]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The data representation is int[4], which maps to (left, top, width, height).</p>
* <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
@@ -4051,10 +4316,12 @@
* <p>List of the face confidence scores for
* detected faces</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF.</p>
*
@@ -4067,10 +4334,12 @@
* that lists the coefficients used to correct for vignetting and color shading,
* for each Bayer color channel of RAW image data.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[4*n*m]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>The map provided here is the same map that is used by the camera device to
* correct both color shading and vignetting for output non-RAW images.</p>
@@ -4144,10 +4413,12 @@
* <p>The camera device estimated scene illumination lighting
* frequency.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_statistics_scene_flicker_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>Many light sources, such as most fluorescent lights, flicker at a rate
* that depends on the local utility power standards. This flicker must be
@@ -4167,15 +4438,17 @@
* @see ACAMERA_CONTROL_AE_MODE
* @see ACAMERA_CONTROL_MODE
*/
- ACAMERA_STATISTICS_SCENE_FLICKER = // byte (enum)
+ ACAMERA_STATISTICS_SCENE_FLICKER = // byte (acamera_metadata_enum_android_statistics_scene_flicker_t)
ACAMERA_STATISTICS_START + 14,
/**
* <p>List of <code>(x, y)</code> coordinates of hot/defective pixels on the sensor.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[2*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>A coordinate <code>(x, y)</code> must lie between <code>(0, 0)</code>, and
* <code>(width - 1, height - 1)</code> (inclusive), which are the top-left and
@@ -4193,11 +4466,13 @@
* <p>Whether the camera device will output the lens
* shading map in output result metadata.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_statistics_lens_shading_map_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When set to ON,
* ACAMERA_STATISTICS_LENS_SHADING_MAP will be provided in
@@ -4206,7 +4481,7 @@
*
* @see ACAMERA_STATISTICS_LENS_SHADING_MAP
*/
- ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE = // byte (enum)
+ ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE = // byte (acamera_metadata_enum_android_statistics_lens_shading_map_mode_t)
ACAMERA_STATISTICS_START + 16,
ACAMERA_STATISTICS_END,
@@ -4216,10 +4491,12 @@
*
* @see ACAMERA_STATISTICS_FACE_DETECT_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>OFF is always supported.</p>
*/
@@ -4229,12 +4506,13 @@
* <p>The maximum number of simultaneously detectable
* faces.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
- * <p>None</p>
*/
ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT = // int32
ACAMERA_STATISTICS_INFO_START + 2,
@@ -4244,10 +4522,12 @@
*
* @see ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If no hotpixel map output is available for this camera device, this will contain only
* <code>false</code>.</p>
@@ -4261,10 +4541,12 @@
*
* @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If no lens shading map output is available for this camera device, this key will
* contain only OFF.</p>
@@ -4282,11 +4564,13 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n*2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>See ACAMERA_TONEMAP_CURVE_RED for more details.</p>
*
@@ -4301,11 +4585,13 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n*2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>See ACAMERA_TONEMAP_CURVE_RED for more details.</p>
*
@@ -4320,11 +4606,13 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float[n*2]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Each channel's curve is defined by an array of control points:</p>
* <pre><code>ACAMERA_TONEMAP_CURVE_RED =
@@ -4375,11 +4663,13 @@
/**
* <p>High-level global contrast/gamma/tonemapping control.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_tonemap_mode_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>When switching to an application-defined contrast curve by setting
* ACAMERA_TONEMAP_MODE to CONTRAST_CURVE, the curve is defined
@@ -4402,16 +4692,18 @@
*
* @see ACAMERA_TONEMAP_MODE
*/
- ACAMERA_TONEMAP_MODE = // byte (enum)
+ ACAMERA_TONEMAP_MODE = // byte (acamera_metadata_enum_android_tonemap_mode_t)
ACAMERA_TONEMAP_START + 3,
/**
* <p>Maximum number of supported points in the
* tonemap curve that can be used for android.tonemap.curve.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If the actual number of points provided by the application (in ACAMERA_TONEMAPCURVE_*) is
* less than this maximum, the camera device will resample the curve to its internal
@@ -4428,10 +4720,12 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
* at least one of below mode combinations:</p>
@@ -4449,11 +4743,13 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: float</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The tonemap curve will be defined the following formula:
* * OUT = pow(IN, 1.0 / gamma)
@@ -4474,11 +4770,13 @@
*
* @see ACAMERA_TONEMAP_MODE
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_tonemap_preset_curve_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>The tonemap curve will be defined by specified standard.</p>
* <p>sRGB (approximated by 16 control points):</p>
@@ -4488,17 +4786,19 @@
* <p>Note that above figures show a 16 control points approximation of preset
* curves. Camera devices may apply a different approximation to the curve.</p>
*/
- ACAMERA_TONEMAP_PRESET_CURVE = // byte (enum)
+ ACAMERA_TONEMAP_PRESET_CURVE = // byte (acamera_metadata_enum_android_tonemap_preset_curve_t)
ACAMERA_TONEMAP_START + 7,
ACAMERA_TONEMAP_END,
/**
* <p>Generally classifies the overall set of the camera device functionality.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_info_supported_hardware_level_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>The supported hardware level is a high-level description of the camera device's
* capabilities, summarizing several capabilities into one field. Each level adds additional
@@ -4551,7 +4851,7 @@
* @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
* @see ACAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES
*/
- ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL = // byte (enum)
+ ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL = // byte (acamera_metadata_enum_android_info_supported_hardware_level_t)
ACAMERA_INFO_START,
ACAMERA_INFO_END,
@@ -4559,11 +4859,13 @@
* <p>Whether black-level compensation is locked
* to its current values, or is free to vary.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_black_level_lock_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* <li>ACaptureRequest</li>
- * </ul>
+ * </ul></p>
*
* <p>Whether the black level offset was locked for this frame. Should be
* ON if ACAMERA_BLACK_LEVEL_LOCK was ON in the capture request, unless
@@ -4572,7 +4874,7 @@
*
* @see ACAMERA_BLACK_LEVEL_LOCK
*/
- ACAMERA_BLACK_LEVEL_LOCK = // byte (enum)
+ ACAMERA_BLACK_LEVEL_LOCK = // byte (acamera_metadata_enum_android_black_level_lock_t)
ACAMERA_BLACK_LEVEL_START,
ACAMERA_BLACK_LEVEL_END,
@@ -4581,10 +4883,12 @@
* with which the output result (metadata + buffers) has been fully
* synchronized.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64 (acamera_metadata_enum_android_sync_frame_number_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * </ul>
+ * </ul></p>
*
* <p>When a request is submitted to the camera device, there is usually a
* delay of several frames before the controls get applied. A camera
@@ -4638,17 +4942,19 @@
* @see ACAMERA_REQUEST_PIPELINE_MAX_DEPTH
* @see ACAMERA_SYNC_FRAME_NUMBER
*/
- ACAMERA_SYNC_FRAME_NUMBER = // int64 (enum)
+ ACAMERA_SYNC_FRAME_NUMBER = // int64 (acamera_metadata_enum_android_sync_frame_number_t)
ACAMERA_SYNC_START,
/**
* <p>The maximum number of frames that can occur after a request
* (different than the previous) has been submitted, and before the
* result's state becomes synchronized.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32 (acamera_metadata_enum_android_sync_max_latency_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This defines the maximum distance (in number of metadata results),
* between the frame number of the request that has new controls to apply
@@ -4657,7 +4963,7 @@
* must occur before the camera device knows for a fact that the new
* submitted camera settings have been applied in outgoing frames.</p>
*/
- ACAMERA_SYNC_MAX_LATENCY = // int32 (enum)
+ ACAMERA_SYNC_MAX_LATENCY = // int32 (acamera_metadata_enum_android_sync_max_latency_t)
ACAMERA_SYNC_START + 1,
ACAMERA_SYNC_END,
@@ -4666,10 +4972,12 @@
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>These are output stream configurations for use with
* dataSpace HAL_DATASPACE_DEPTH. The configurations are
@@ -4683,16 +4991,18 @@
* android.depth.maxDepthSamples, 1, OUTPUT)</code> in addition to
* the entries for HAL_PIXEL_FORMAT_Y16.</p>
*/
- ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS = // int32[n*4] (enum)
+ ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS = // int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_t)
ACAMERA_DEPTH_START + 1,
/**
* <p>This lists the minimum frame duration for each
* format/size combination for depth output formats.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>This should correspond to the frame duration when only that
* stream is active, with all processing (typically in android.*.mode)
@@ -4714,10 +5024,12 @@
* <p>This lists the maximum stall duration for each
* output format/size combination for depth streams.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>A stall duration is how much extra time would get added
* to the normal minimum frame duration for a repeating request
@@ -4737,10 +5049,12 @@
* DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
* YUV_420_888, JPEG, or RAW) simultaneously.</p>
*
- * <p>This tag may appear in:</p>
+ * <p>Type: byte (acamera_metadata_enum_android_depth_depth_is_exclusive_t)</p>
+ *
+ * <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul>
+ * </ul></p>
*
* <p>If TRUE, including both depth and color outputs in a single
* capture request is not supported. An application must interleave color
@@ -4751,7 +5065,7 @@
* measure depth values, which causes the color image to be
* corrupted during depth measurement.</p>
*/
- ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE = // byte (enum)
+ ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE = // byte (acamera_metadata_enum_android_depth_depth_is_exclusive_t)
ACAMERA_DEPTH_START + 4,
ACAMERA_DEPTH_END,
@@ -6966,6 +7280,7 @@
} acamera_metadata_enum_android_depth_depth_is_exclusive_t;
+
#endif /* __ANDROID_API__ >= 24 */
__END_DECLS
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index de0167a..bc32bbe 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -78,7 +78,7 @@
static bool gWantFrameTime = false; // do we want times on each frame?
static uint32_t gVideoWidth = 0; // default width+height
static uint32_t gVideoHeight = 0;
-static uint32_t gBitRate = 4000000; // 4Mbps
+static uint32_t gBitRate = 20000000; // 20Mbps
static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
// Set by signal handler to stop recording.
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index d7c2e87..d70282b 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -870,7 +870,9 @@
sp<IMemory> mem =
retriever->getFrameAtTime(-1,
- MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
+ MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
+ HAL_PIXEL_FORMAT_RGB_565,
+ false /*metaOnly*/);
if (mem != NULL) {
failed = false;
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index bac60ff..bc37557 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -257,6 +257,11 @@
plugin = hPlugin;
}
);
+
+ if (!hResult.isOk()) {
+ ALOGE("createPlugin remote call failed");
+ }
+
return plugin;
}
diff --git a/include/OWNERS b/include/OWNERS
index 3cb6d9c..d6bd998 100644
--- a/include/OWNERS
+++ b/include/OWNERS
@@ -1,5 +1,5 @@
elaurent@google.com
-gkasten@android.com
+gkasten@google.com
hunga@google.com
jtinker@google.com
lajos@google.com
diff --git a/include/common_time/OWNERS b/include/common_time/OWNERS
new file mode 100644
index 0000000..f9cb567
--- /dev/null
+++ b/include/common_time/OWNERS
@@ -0,0 +1 @@
+gkasten@google.com
diff --git a/include/private/media/OWNERS b/include/private/media/OWNERS
new file mode 100644
index 0000000..21723ba
--- /dev/null
+++ b/include/private/media/OWNERS
@@ -0,0 +1,3 @@
+elaurent@google.com
+gkasten@google.com
+hunga@google.com
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index 51050cd..a9d4dd1 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -30,14 +30,41 @@
class VideoFrame
{
public:
- VideoFrame(): mWidth(0), mHeight(0), mDisplayWidth(0), mDisplayHeight(0), mSize(0),
- mRotationAngle(0), mData(0) {}
+ // Construct a VideoFrame object with the specified parameters,
+ // will allocate frame buffer if |allocate| is set to true, will
+ // allocate buffer to hold ICC data if |iccData| and |iccSize|
+ // indicate its presence.
+ VideoFrame(uint32_t width, uint32_t height,
+ uint32_t displayWidth, uint32_t displayHeight,
+ uint32_t angle, uint32_t bpp, bool allocate,
+ const void *iccData, size_t iccSize):
+ mWidth(width), mHeight(height),
+ mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
+ mRotationAngle(angle), mBytesPerPixel(bpp), mRowBytes(bpp * width),
+ mSize(0), mIccSize(0), mReserved(0), mData(0), mIccData(0) {
+ if (allocate) {
+ mSize = mRowBytes * mHeight;
+ mData = new uint8_t[mSize];
+ if (mData == NULL) {
+ mSize = 0;
+ }
+ }
+ if (iccData != NULL && iccSize > 0) {
+ mIccSize = iccSize;
+ mIccData = new uint8_t[iccSize];
+ if (mIccData != NULL) {
+ memcpy(mIccData, iccData, iccSize);
+ } else {
+ mIccSize = 0;
+ }
+ }
+ }
+
+ // Deep copy of both the information fields and the frame data
VideoFrame(const VideoFrame& copy) {
- mWidth = copy.mWidth;
- mHeight = copy.mHeight;
- mDisplayWidth = copy.mDisplayWidth;
- mDisplayHeight = copy.mDisplayHeight;
+ copyInfoOnly(copy);
+
mSize = copy.mSize;
mData = NULL; // initialize it first
if (mSize > 0 && copy.mData != NULL) {
@@ -48,26 +75,99 @@
mSize = 0;
}
}
- mRotationAngle = copy.mRotationAngle;
+
+ mIccSize = copy.mIccSize;
+ mIccData = NULL; // initialize it first
+ if (mIccSize > 0 && copy.mIccData != NULL) {
+ mIccData = new uint8_t[mIccSize];
+ if (mIccData != NULL) {
+ memcpy(mIccData, copy.mIccData, mIccSize);
+ } else {
+ mIccSize = 0;
+ }
+ }
}
~VideoFrame() {
if (mData != 0) {
delete[] mData;
}
+ if (mIccData != 0) {
+ delete[] mIccData;
+ }
+ }
+
+ // Copy |copy| to a flattened VideoFrame in IMemory, 'this' must point to
+ // a chunk of memory back by IMemory of size at least getFlattenedSize()
+ // of |copy|.
+ void copyFlattened(const VideoFrame& copy) {
+ copyInfoOnly(copy);
+
+ mSize = copy.mSize;
+ mData = NULL; // initialize it first
+ if (copy.mSize > 0 && copy.mData != NULL) {
+ memcpy(getFlattenedData(), copy.mData, copy.mSize);
+ }
+
+ mIccSize = copy.mIccSize;
+ mIccData = NULL; // initialize it first
+ if (copy.mIccSize > 0 && copy.mIccData != NULL) {
+ memcpy(getFlattenedIccData(), copy.mIccData, copy.mIccSize);
+ }
+ }
+
+ // Calculate the flattened size to put it in IMemory
+ size_t getFlattenedSize() const {
+ return sizeof(VideoFrame) + mSize + mIccSize;
+ }
+
+ // Get the pointer to the frame data in a flattened VideoFrame in IMemory
+ uint8_t* getFlattenedData() const {
+ return (uint8_t*)this + sizeof(VideoFrame);
+ }
+
+ // Get the pointer to the ICC data in a flattened VideoFrame in IMemory
+ uint8_t* getFlattenedIccData() const {
+ return (uint8_t*)this + sizeof(VideoFrame) + mSize;
}
// Intentional public access modifier:
- uint32_t mWidth;
- uint32_t mHeight;
- uint32_t mDisplayWidth;
- uint32_t mDisplayHeight;
+ uint32_t mWidth; // Decoded image width before rotation
+ uint32_t mHeight; // Decoded image height before rotation
+ uint32_t mDisplayWidth; // Display width before rotation
+ uint32_t mDisplayHeight; // Display height before rotation
+ int32_t mRotationAngle; // Rotation angle, clockwise, should be multiple of 90
+ uint32_t mBytesPerPixel; // Number of bytes per pixel
+ uint32_t mRowBytes; // Number of bytes per row before rotation
uint32_t mSize; // Number of bytes in mData
- int32_t mRotationAngle; // rotation angle, clockwise, should be multiple of 90
- // mData should be 64 bit aligned to prevent additional padding
+ uint32_t mIccSize; // Number of bytes in mIccData
+ uint32_t mReserved; // (padding to make mData 64-bit aligned)
+
+ // mData should be 64-bit aligned to prevent additional padding
uint8_t* mData; // Actual binary data
- // pad structure so it's the same size on 64 bit and 32 bit
+ // pad structure so it's the same size on 64-bit and 32-bit
char mPadding[8 - sizeof(mData)];
+
+ // mIccData should be 64-bit aligned to prevent additional padding
+ uint8_t* mIccData; // Actual binary data
+ // pad structure so it's the same size on 64-bit and 32-bit
+ char mIccPadding[8 - sizeof(mIccData)];
+
+private:
+ //
+ // Utility methods used only within VideoFrame struct
+ //
+
+ // Copy the information fields only
+ void copyInfoOnly(const VideoFrame& copy) {
+ mWidth = copy.mWidth;
+ mHeight = copy.mHeight;
+ mDisplayWidth = copy.mDisplayWidth;
+ mDisplayHeight = copy.mDisplayHeight;
+ mRotationAngle = copy.mRotationAngle;
+ mBytesPerPixel = copy.mBytesPerPixel;
+ mRowBytes = copy.mRowBytes;
+ }
};
}; // namespace android
diff --git a/include/soundtrigger/OWNERS b/include/soundtrigger/OWNERS
new file mode 100644
index 0000000..e83f6b9
--- /dev/null
+++ b/include/soundtrigger/OWNERS
@@ -0,0 +1,2 @@
+elaurent@google.com
+thorntonc@google.com
diff --git a/media/audioserver/Android.mk b/media/audioserver/Android.mk
index 3ee7494..0777890 100644
--- a/media/audioserver/Android.mk
+++ b/media/audioserver/Android.mk
@@ -3,7 +3,8 @@
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
- main_audioserver.cpp
+ main_audioserver.cpp \
+ ../libaudioclient/aidl/android/media/IAudioRecord.aidl
LOCAL_SHARED_LIBRARIES := \
libaaudioservice \
@@ -36,6 +37,9 @@
$(call include-path-for, audio-utils) \
external/sonic \
+LOCAL_AIDL_INCLUDES := \
+ frameworks/av/media/libaudioclient/aidl
+
# If AUDIOSERVER_MULTILIB in device.mk is non-empty then it is used to control
# the LOCAL_MULTILIB for all audioserver exclusive libraries.
# This is relevant for 64 bit architectures where either or both
diff --git a/media/audioserver/OWNERS b/media/audioserver/OWNERS
new file mode 100644
index 0000000..f9cb567
--- /dev/null
+++ b/media/audioserver/OWNERS
@@ -0,0 +1 @@
+gkasten@google.com
diff --git a/media/common_time/OWNERS b/media/common_time/OWNERS
new file mode 100644
index 0000000..f9cb567
--- /dev/null
+++ b/media/common_time/OWNERS
@@ -0,0 +1 @@
+gkasten@google.com
diff --git a/media/libaaudio/examples/write_sine/src/write_sine.cpp b/media/libaaudio/examples/write_sine/src/write_sine.cpp
index 0125c0f..87fb40b 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine.cpp
@@ -16,11 +16,14 @@
// Play sine waves using AAudio.
-#include <stdio.h>
-#include <stdlib.h>
-#include <math.h>
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
+#include <asm/fcntl.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+
#include "AAudioExampleUtils.h"
#include "AAudioSimplePlayer.h"
#include "AAudioArgsParser.h"
@@ -48,6 +51,8 @@
float *floatData = nullptr;
int16_t *shortData = nullptr;
+ int testFd = -1;
+
// Make printf print immediately so that debug info is not stuck
// in a buffer if we hang or crash.
setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
@@ -95,6 +100,9 @@
goto finish;
}
+ testFd = open("/data/aaudio_temp.raw", O_CREAT | O_RDWR, S_IRWXU);
+ printf("testFd = %d, pid = %d\n", testFd, getpid());
+
// Start the stream.
printf("call player.start()\n");
result = player.start();
@@ -176,7 +184,17 @@
}
finish:
+ printf("testFd = %d, fcntl before aaudio close returns 0x%08X\n",
+ testFd, fcntl(testFd, F_GETFD));
player.close();
+ printf("testFd = %d, fcntl after aaudio close returns 0x%08X\n",
+ testFd, fcntl(testFd, F_GETFD));
+ if (::close(testFd) != 0) {
+ printf("ERROR SharedMemoryParcelable::close() of testFd = %d, errno = %s\n",
+ testFd, strerror(errno));
+ }
+ printf("testFd = %d, fcntl after close() returns 0x%08X\n", testFd, fcntl(testFd, F_GETFD));
+
delete[] floatData;
delete[] shortData;
printf("exiting - AAudio result = %d = %s\n", result, AAudio_convertResultToText(result));
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 30fbdd6..3c23736 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -234,6 +234,15 @@
int32_t channelCount);
/**
+ * Identical to AAudioStreamBuilder_setChannelCount().
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param samplesPerFrame Number of samples in a frame.
+ */
+AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* builder,
+ int32_t samplesPerFrame);
+
+/**
* Request a sample data format, for example AAUDIO_FORMAT_PCM_I16.
*
* The default, if you do not call this function, is AAUDIO_UNSPECIFIED.
@@ -721,6 +730,14 @@
AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* stream);
/**
+ * Identical to AAudioStream_getChannelCount().
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return actual number of samples frame
+ */
+AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream);
+
+/**
* @param stream reference provided by AAudioStreamBuilder_openStream()
* @return actual device ID
*/
diff --git a/media/libaaudio/libaaudio.map.txt b/media/libaaudio/libaaudio.map.txt
index b9012e5..2ba5250 100644
--- a/media/libaaudio/libaaudio.map.txt
+++ b/media/libaaudio/libaaudio.map.txt
@@ -11,6 +11,7 @@
AAudioStreamBuilder_setErrorCallback;
AAudioStreamBuilder_setFramesPerDataCallback;
AAudioStreamBuilder_setSampleRate;
+ AAudioStreamBuilder_setSamplesPerFrame;
AAudioStreamBuilder_setChannelCount;
AAudioStreamBuilder_setFormat;
AAudioStreamBuilder_setSharingMode;
@@ -34,6 +35,7 @@
AAudioStream_getBufferCapacityInFrames;
AAudioStream_getXRunCount;
AAudioStream_getSampleRate;
+ AAudioStream_getSamplesPerFrame;
AAudioStream_getChannelCount;
AAudioStream_getPerformanceMode;
AAudioStream_getDeviceId;
diff --git a/media/libaaudio/src/Android.mk b/media/libaaudio/src/Android.mk
index cfcf27a..c13fa67 100644
--- a/media/libaaudio/src/Android.mk
+++ b/media/libaaudio/src/Android.mk
@@ -27,6 +27,8 @@
$(LOCAL_PATH)/legacy \
$(LOCAL_PATH)/utility
+LOCAL_AIDL_INCLUDES := frameworks/av/media/libaudioclient/aidl
+
# If you add a file here then also add it below in the SHARED target
LOCAL_SRC_FILES = \
core/AudioStream.cpp \
@@ -57,7 +59,8 @@
binding/IAAudioService.cpp \
binding/RingBufferParcelable.cpp \
binding/SharedMemoryParcelable.cpp \
- binding/SharedRegionParcelable.cpp
+ binding/SharedRegionParcelable.cpp \
+ ../../libaudioclient/aidl/android/media/IAudioRecord.aidl
LOCAL_CFLAGS += -Wno-unused-parameter -Wall -Werror
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
index d05abb0..1a97555 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
@@ -28,6 +28,7 @@
#include "binding/RingBufferParcelable.h"
#include "binding/AudioEndpointParcelable.h"
+using android::base::unique_fd;
using android::NO_ERROR;
using android::status_t;
using android::Parcel;
@@ -49,7 +50,8 @@
* Add the file descriptor to the table.
* @return index in table or negative error
*/
-int32_t AudioEndpointParcelable::addFileDescriptor(int fd, int32_t sizeInBytes) {
+int32_t AudioEndpointParcelable::addFileDescriptor(const unique_fd& fd,
+ int32_t sizeInBytes) {
if (mNumSharedMemories >= MAX_SHARED_MEMORIES) {
return AAUDIO_ERROR_OUT_OF_RANGE;
}
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.h b/media/libaaudio/src/binding/AudioEndpointParcelable.h
index 993075c7..aa8573f 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.h
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.h
@@ -20,6 +20,7 @@
#include <stdint.h>
//#include <sys/mman.h>
+#include <android-base/unique_fd.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
@@ -47,7 +48,7 @@
* Add the file descriptor to the table.
* @return index in table or negative error
*/
- int32_t addFileDescriptor(int fd, int32_t sizeInBytes);
+ int32_t addFileDescriptor(const android::base::unique_fd& fd, int32_t sizeInBytes);
virtual status_t writeToParcel(Parcel* parcel) const override;
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index 899eb04..90217ab 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -24,11 +24,13 @@
#include <sys/mman.h>
#include <aaudio/AAudio.h>
+#include <android-base/unique_fd.h>
#include <binder/Parcelable.h>
#include <utility/AAudioUtilities.h>
#include "binding/SharedMemoryParcelable.h"
+using android::base::unique_fd;
using android::NO_ERROR;
using android::status_t;
using android::Parcel;
@@ -39,17 +41,19 @@
SharedMemoryParcelable::SharedMemoryParcelable() {}
SharedMemoryParcelable::~SharedMemoryParcelable() {};
-void SharedMemoryParcelable::setup(int fd, int32_t sizeInBytes) {
- mFd = fd;
+void SharedMemoryParcelable::setup(const unique_fd& fd, int32_t sizeInBytes) {
+ mFd.reset(dup(fd.get())); // store a duplicate fd
+ ALOGV("SharedMemoryParcelable::setup(%d -> %d, %d) this = %p\n",
+ fd.get(), mFd.get(), sizeInBytes, this);
mSizeInBytes = sizeInBytes;
-
}
status_t SharedMemoryParcelable::writeToParcel(Parcel* parcel) const {
status_t status = parcel->writeInt32(mSizeInBytes);
if (status != NO_ERROR) return status;
if (mSizeInBytes > 0) {
- status = parcel->writeDupFileDescriptor(mFd);
+ ALOGV("SharedMemoryParcelable::writeToParcel() mFd = %d, this = %p\n", mFd.get(), this);
+ status = parcel->writeUniqueFileDescriptor(mFd);
ALOGE_IF(status != NO_ERROR, "SharedMemoryParcelable writeDupFileDescriptor failed : %d",
status);
}
@@ -62,15 +66,16 @@
return status;
}
if (mSizeInBytes > 0) {
- // Keep the original FD until you are done with the mFd.
- // If you close it in here then it will prevent mFd from working.
- mOriginalFd = parcel->readFileDescriptor();
- ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? mOriginalFd = %d\n", mOriginalFd);
- mFd = fcntl(mOriginalFd, F_DUPFD_CLOEXEC, 0);
- ALOGV("SharedMemoryParcelable::readFromParcel() LEAK? mFd = %d\n", mFd);
- if (mFd == -1) {
- status = -errno;
- ALOGE("SharedMemoryParcelable readFromParcel fcntl() failed : %d", status);
+ // The Parcel owns the file descriptor and will close it later.
+ unique_fd mmapFd;
+ status = parcel->readUniqueFileDescriptor(&mmapFd);
+ if (status != NO_ERROR) {
+ ALOGE("SharedMemoryParcelable::readFromParcel() readUniqueFileDescriptor() failed : %d",
+ status);
+ } else {
+ // Resolve the memory now while we still have the FD from the Parcel.
+ // Closing the FD will not affect the shared memory once mmap() has been called.
+ status = AAudioConvert_androidToAAudioResult(resolveSharedMemory(mmapFd));
}
}
return status;
@@ -85,45 +90,50 @@
}
mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
}
- if (mFd != -1) {
- ALOGV("SharedMemoryParcelable::close() LEAK? mFd = %d\n", mFd);
- ::close(mFd);
- mFd = -1;
- }
- if (mOriginalFd != -1) {
- ALOGV("SharedMemoryParcelable::close() LEAK? mOriginalFd = %d\n", mOriginalFd);
- ::close(mOriginalFd);
- mOriginalFd = -1;
+ return AAUDIO_OK;
+}
+
+aaudio_result_t SharedMemoryParcelable::resolveSharedMemory(const unique_fd& fd) {
+ mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ | PROT_WRITE,
+ MAP_SHARED, fd.get(), 0);
+ if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
+ ALOGE("SharedMemoryParcelable mmap() failed for fd = %d, errno = %s",
+ fd.get(), strerror(errno));
+ return AAUDIO_ERROR_INTERNAL;
}
return AAUDIO_OK;
}
aaudio_result_t SharedMemoryParcelable::resolve(int32_t offsetInBytes, int32_t sizeInBytes,
void **regionAddressPtr) {
-
if (offsetInBytes < 0) {
ALOGE("SharedMemoryParcelable illegal offsetInBytes = %d", offsetInBytes);
return AAUDIO_ERROR_OUT_OF_RANGE;
} else if ((offsetInBytes + sizeInBytes) > mSizeInBytes) {
ALOGE("SharedMemoryParcelable out of range, offsetInBytes = %d, "
- "sizeInBytes = %d, mSizeInBytes = %d",
+ "sizeInBytes = %d, mSizeInBytes = %d",
offsetInBytes, sizeInBytes, mSizeInBytes);
return AAUDIO_ERROR_OUT_OF_RANGE;
}
+
+ aaudio_result_t result = AAUDIO_OK;
+
if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
- mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ|PROT_WRITE,
- MAP_SHARED, mFd, 0);
- if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
- ALOGE("SharedMemoryParcelable mmap failed for fd = %d, errno = %s",
- mFd, strerror(errno));
- return AAUDIO_ERROR_INTERNAL;
+ if (mFd.get() != -1) {
+ result = resolveSharedMemory(mFd);
+ } else {
+ ALOGE("SharedMemoryParcelable has no file descriptor for shared memory.");
+ result = AAUDIO_ERROR_INTERNAL;
}
}
- *regionAddressPtr = mResolvedAddress + offsetInBytes;
- ALOGV("SharedMemoryParcelable mResolvedAddress = %p", mResolvedAddress);
- ALOGV("SharedMemoryParcelable offset by %d, *regionAddressPtr = %p",
- offsetInBytes, *regionAddressPtr);
- return AAUDIO_OK;
+
+ if (result == AAUDIO_OK && mResolvedAddress != MMAP_UNRESOLVED_ADDRESS) {
+ *regionAddressPtr = mResolvedAddress + offsetInBytes;
+ ALOGV("SharedMemoryParcelable mResolvedAddress = %p", mResolvedAddress);
+ ALOGV("SharedMemoryParcelable offset by %d, *regionAddressPtr = %p",
+ offsetInBytes, *regionAddressPtr);
+ }
+ return result;
}
int32_t SharedMemoryParcelable::getSizeInBytes() {
@@ -135,17 +145,11 @@
ALOGE("SharedMemoryParcelable invalid mSizeInBytes = %d", mSizeInBytes);
return AAUDIO_ERROR_OUT_OF_RANGE;
}
- if (mSizeInBytes > 0) {
- if (mFd == -1) {
- ALOGE("SharedMemoryParcelable uninitialized mFd = %d", mFd);
- return AAUDIO_ERROR_INTERNAL;
- }
- }
return AAUDIO_OK;
}
void SharedMemoryParcelable::dump() {
- ALOGD("SharedMemoryParcelable mFd = %d", mFd);
+ ALOGD("SharedMemoryParcelable mFd = %d", mFd.get());
ALOGD("SharedMemoryParcelable mSizeInBytes = %d", mSizeInBytes);
ALOGD("SharedMemoryParcelable mResolvedAddress = %p", mResolvedAddress);
}
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 4b94b46..2a634e0 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -18,15 +18,12 @@
#define ANDROID_AAUDIO_SHARED_MEMORY_PARCELABLE_H
#include <stdint.h>
-
#include <sys/mman.h>
+
+#include <android-base/unique_fd.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
namespace aaudio {
// Arbitrary limits for sanity checks. TODO remove after debugging.
@@ -37,17 +34,24 @@
/**
* This is a parcelable description of a shared memory referenced by a file descriptor.
* It may be divided into several regions.
+ * The memory can be shared using Binder or simply shared between threads.
*/
-class SharedMemoryParcelable : public Parcelable {
+class SharedMemoryParcelable : public android::Parcelable {
public:
SharedMemoryParcelable();
virtual ~SharedMemoryParcelable();
- void setup(int fd, int32_t sizeInBytes);
+ /**
+ * Make a dup() of the fd and store it for later use.
+ *
+ * @param fd
+ * @param sizeInBytes
+ */
+ void setup(const android::base::unique_fd& fd, int32_t sizeInBytes);
- virtual status_t writeToParcel(Parcel* parcel) const override;
+ virtual android::status_t writeToParcel(android::Parcel* parcel) const override;
- virtual status_t readFromParcel(const Parcel* parcel) override;
+ virtual android::status_t readFromParcel(const android::Parcel* parcel) override;
// mmap() shared memory
aaudio_result_t resolve(int32_t offsetInBytes, int32_t sizeInBytes, void **regionAddressPtr);
@@ -55,8 +59,6 @@
// munmap() any mapped memory
aaudio_result_t close();
- bool isFileDescriptorSafe();
-
int32_t getSizeInBytes();
aaudio_result_t validate();
@@ -67,10 +69,11 @@
#define MMAP_UNRESOLVED_ADDRESS reinterpret_cast<uint8_t*>(MAP_FAILED)
- int mFd = -1;
- int mOriginalFd = -1;
- int32_t mSizeInBytes = 0;
- uint8_t *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
+ aaudio_result_t resolveSharedMemory(const android::base::unique_fd& fd);
+
+ android::base::unique_fd mFd;
+ int32_t mSizeInBytes = 0;
+ uint8_t *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
};
} /* namespace aaudio */
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index 6ec285f..3ee450a 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -32,14 +32,17 @@
#define RIDICULOUSLY_LARGE_FRAME_SIZE 4096
AudioEndpoint::AudioEndpoint()
- : mFreeRunning(false)
+ : mUpCommandQueue(nullptr)
+ , mDataQueue(nullptr)
+ , mFreeRunning(false)
, mDataReadCounter(0)
, mDataWriteCounter(0)
{
}
-AudioEndpoint::~AudioEndpoint()
-{
+AudioEndpoint::~AudioEndpoint() {
+ delete mDataQueue;
+ delete mUpCommandQueue;
}
static aaudio_result_t AudioEndpoint_validateQueueDescriptor(const char *type,
@@ -254,3 +257,7 @@
ALOGD("AudioEndpoint: data readCounter = %lld", (long long) mDataQueue->getReadCounter());
ALOGD("AudioEndpoint: data writeCounter = %lld", (long long) mDataQueue->getWriteCounter());
}
+
+void AudioEndpoint::eraseDataMemory() {
+ mDataQueue->eraseMemory();
+}
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index 81a4f7b..f5b67e8 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -86,6 +86,11 @@
int32_t getBufferCapacityInFrames() const;
+ /**
+ * Write zeros to the data queue memory.
+ */
+ void eraseDataMemory();
+
void dump() const;
private:
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index a136c0d..41d4909 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -440,10 +440,14 @@
setState(AAUDIO_STREAM_STATE_CLOSED);
break;
case AAUDIO_SERVICE_EVENT_DISCONNECTED:
+ // Prevent hardware from looping on old data and making buzzing sounds.
+ if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
+ mAudioEndpoint.eraseDataMemory();
+ }
result = AAUDIO_ERROR_DISCONNECTED;
setState(AAUDIO_STREAM_STATE_DISCONNECTED);
ALOGW("WARNING - AudioStreamInternal::onEventFromServer()"
- " AAUDIO_SERVICE_EVENT_DISCONNECTED");
+ " AAUDIO_SERVICE_EVENT_DISCONNECTED - FIFO cleared");
break;
case AAUDIO_SERVICE_EVENT_VOLUME:
mStreamVolume = (float)message->event.dataDouble;
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 82d96e0..5089b00 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -101,7 +101,6 @@
*/
static aaudio_policy_t s_MMapPolicy = AAUDIO_UNSPECIFIED;
-
static AudioStream *convertAAudioStreamToAudioStream(AAudioStream* stream)
{
return (AudioStream*) stream;
@@ -144,12 +143,18 @@
}
AAUDIO_API void AAudioStreamBuilder_setChannelCount(AAudioStreamBuilder* builder,
- int32_t channelCount)
+ int32_t channelCount)
{
AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
streamBuilder->setSamplesPerFrame(channelCount);
}
+AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* builder,
+ int32_t channelCount)
+{
+ AAudioStreamBuilder_setChannelCount(builder, channelCount);
+}
+
AAUDIO_API void AAudioStreamBuilder_setDirection(AAudioStreamBuilder* builder,
aaudio_direction_t direction)
{
@@ -350,6 +355,11 @@
return audioStream->getSamplesPerFrame();
}
+AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream)
+{
+ return AAudioStream_getChannelCount(stream);
+}
+
AAUDIO_API aaudio_stream_state_t AAudioStream_getState(AAudioStream* stream)
{
AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index 8d2c62d..a869886 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -210,3 +210,9 @@
return mFifo->getCapacity();
}
+void FifoBuffer::eraseMemory() {
+ int32_t numBytes = convertFramesToBytes(getBufferCapacityInFrames());
+ if (numBytes > 0) {
+ memset(mStorage, 0, (size_t) numBytes);
+ }
+}
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index a94e9b0..f5a9e27 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -111,6 +111,11 @@
mFifo->setWriteCounter(n);
}
+ /*
+ * This is generally only called before or after the buffer is used.
+ */
+ void eraseMemory();
+
private:
void fillWrappingBuffer(WrappingBuffer *wrappingBuffer,
diff --git a/media/libaaudio/tests/test_marshalling.cpp b/media/libaaudio/tests/test_marshalling.cpp
index 79beed6..c51fbce 100644
--- a/media/libaaudio/tests/test_marshalling.cpp
+++ b/media/libaaudio/tests/test_marshalling.cpp
@@ -19,6 +19,7 @@
#include <stdlib.h>
#include <math.h>
+#include <android-base/unique_fd.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
#include <cutils/ashmem.h>
@@ -28,6 +29,7 @@
#include <aaudio/AAudio.h>
#include <binding/AudioEndpointParcelable.h>
+using android::base::unique_fd;
using namespace android;
using namespace aaudio;
@@ -48,7 +50,7 @@
SharedMemoryParcelable sharedMemoryA;
SharedMemoryParcelable sharedMemoryB;
const size_t memSizeBytes = 840;
- int fd = ashmem_create_region("TestMarshalling", memSizeBytes);
+ unique_fd fd(ashmem_create_region("TestMarshalling", memSizeBytes));
ASSERT_LE(0, fd);
sharedMemoryA.setup(fd, memSizeBytes);
void *region1;
@@ -81,7 +83,7 @@
SharedRegionParcelable sharedRegionA;
SharedRegionParcelable sharedRegionB;
const size_t memSizeBytes = 840;
- int fd = ashmem_create_region("TestMarshalling", memSizeBytes);
+ unique_fd fd(ashmem_create_region("TestMarshalling", memSizeBytes));
ASSERT_LE(0, fd);
sharedMemories[0].setup(fd, memSizeBytes);
int32_t regionOffset1 = 32;
@@ -119,7 +121,7 @@
const int32_t counterSizeBytes = sizeof(int64_t);
const size_t memSizeBytes = dataSizeBytes + (2 * counterSizeBytes);
- int fd = ashmem_create_region("TestMarshalling", memSizeBytes);
+ unique_fd fd(ashmem_create_region("TestMarshalling Z", memSizeBytes));
ASSERT_LE(0, fd);
sharedMemories[0].setup(fd, memSizeBytes);
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 61c946c..a02311b 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -6,7 +6,21 @@
cc_library_shared {
name: "libaudioclient",
+
+ aidl: {
+ export_aidl_headers: true,
+ local_include_dirs: ["aidl"],
+ include_dirs: [
+ "frameworks/av/media/libaudioclient/aidl",
+ ],
+ },
+
srcs: [
+ // AIDL files for audioclient interfaces
+ // The headers for these interfaces will be available to any modules that
+ // include libaudioclient, at the path "aidl/package/path/BnFoo.h"
+ "aidl/android/media/IAudioRecord.aidl",
+
"AudioEffect.cpp",
"AudioPolicy.cpp",
"AudioRecord.cpp",
@@ -17,7 +31,6 @@
"IAudioFlingerClient.cpp",
"IAudioPolicyService.cpp",
"IAudioPolicyServiceClient.cpp",
- "IAudioRecord.cpp",
"IAudioTrack.cpp",
"IEffect.cpp",
"IEffectClient.cpp",
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 611cde7..6402bbb 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -323,7 +323,7 @@
status_t status = NO_ERROR;
if (!(flags & CBLK_INVALID)) {
- status = mAudioRecord->start(event, triggerSession);
+ status = mAudioRecord->start(event, triggerSession).transactionError();
if (status == DEAD_OBJECT) {
flags |= CBLK_INVALID;
}
@@ -652,22 +652,22 @@
sp<IMemory> iMem; // for cblk
sp<IMemory> bufferMem;
- sp<IAudioRecord> record = audioFlinger->openRecord(input,
- mSampleRate,
- mFormat,
- mChannelMask,
- opPackageName,
- &temp,
- &flags,
- mClientPid,
- tid,
- mClientUid,
- &mSessionId,
- ¬ificationFrames,
- iMem,
- bufferMem,
- &status,
- mPortId);
+ sp<media::IAudioRecord> record = audioFlinger->openRecord(input,
+ mSampleRate,
+ mFormat,
+ mChannelMask,
+ opPackageName,
+ &temp,
+ &flags,
+ mClientPid,
+ tid,
+ mClientUid,
+ &mSessionId,
+ ¬ificationFrames,
+ iMem,
+ bufferMem,
+ &status,
+ mPortId);
ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId,
"session ID changed from %d to %d", originalSessionId, mSessionId);
@@ -1219,7 +1219,8 @@
if (mActive) {
// callback thread or sync event hasn't changed
// FIXME this fails if we have a new AudioFlinger instance
- result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, AUDIO_SESSION_NONE);
+ result = mAudioRecord->start(
+ AudioSystem::SYNC_EVENT_SAME, AUDIO_SESSION_NONE).transactionError();
}
mFramesReadServerOffset = mFramesRead; // server resets to zero so we need an offset.
}
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 14feada..fc8c11a 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -175,7 +175,7 @@
return track;
}
- virtual sp<IAudioRecord> openRecord(
+ virtual sp<media::IAudioRecord> openRecord(
audio_io_handle_t input,
uint32_t sampleRate,
audio_format_t format,
@@ -194,7 +194,7 @@
audio_port_handle_t portId)
{
Parcel data, reply;
- sp<IAudioRecord> record;
+ sp<media::IAudioRecord> record;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32((int32_t) input);
data.writeInt32(sampleRate);
@@ -238,7 +238,7 @@
*notificationFrames = lNotificationFrames;
}
lStatus = reply.readInt32();
- record = interface_cast<IAudioRecord>(reply.readStrongBinder());
+ record = interface_cast<media::IAudioRecord>(reply.readStrongBinder());
cblk = interface_cast<IMemory>(reply.readStrongBinder());
if (cblk != 0 && cblk->pointer() == NULL) {
cblk.clear();
@@ -1025,7 +1025,7 @@
sp<IMemory> cblk;
sp<IMemory> buffers;
status_t status = NO_ERROR;
- sp<IAudioRecord> record = openRecord(input,
+ sp<media::IAudioRecord> record = openRecord(input,
sampleRate, format, channelMask, opPackageName, &frameCount, &flags,
pid, tid, clientUid, &sessionId, ¬ificationFrames, cblk, buffers,
&status, portId);
diff --git a/media/libaudioclient/IAudioRecord.cpp b/media/libaudioclient/IAudioRecord.cpp
deleted file mode 100644
index 1331c0d..0000000
--- a/media/libaudioclient/IAudioRecord.cpp
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#define LOG_TAG "IAudioRecord"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-
-#include <media/IAudioRecord.h>
-
-namespace android {
-
-enum {
- UNUSED_WAS_GET_CBLK = IBinder::FIRST_CALL_TRANSACTION,
- START,
- STOP
-};
-
-class BpAudioRecord : public BpInterface<IAudioRecord>
-{
-public:
- explicit BpAudioRecord(const sp<IBinder>& impl)
- : BpInterface<IAudioRecord>(impl)
- {
- }
-
- virtual status_t start(int /*AudioSystem::sync_event_t*/ event, audio_session_t triggerSession)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor());
- data.writeInt32(event);
- data.writeInt32(triggerSession);
- status_t status = remote()->transact(START, data, &reply);
- if (status == NO_ERROR) {
- status = reply.readInt32();
- } else {
- ALOGW("start() error: %s", strerror(-status));
- }
- return status;
- }
-
- virtual void stop()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor());
- remote()->transact(STOP, data, &reply);
- }
-
-};
-
-IMPLEMENT_META_INTERFACE(AudioRecord, "android.media.IAudioRecord");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioRecord::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- switch (code) {
- case START: {
- CHECK_INTERFACE(IAudioRecord, data, reply);
- int /*AudioSystem::sync_event_t*/ event = data.readInt32();
- audio_session_t triggerSession = (audio_session_t) data.readInt32();
- reply->writeInt32(start(event, triggerSession));
- return NO_ERROR;
- } break;
- case STOP: {
- CHECK_INTERFACE(IAudioRecord, data, reply);
- stop();
- return NO_ERROR;
- } break;
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-} // namespace android
diff --git a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
new file mode 100644
index 0000000..50ce78f
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+interface IAudioRecord {
+
+ /* After it's created the track is not active. Call start() to
+ * make it active.
+ */
+ void start(int /*AudioSystem::sync_event_t*/ event,
+ int /*audio_session_t*/ triggerSession);
+
+ /* Stop a track. If set, the callback will cease being called and
+ * obtainBuffer will return an error. Buffers that are already released
+ * will be processed, unless flush() is called.
+ */
+ void stop();
+}
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index e6a5efb..74a626e 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -17,13 +17,16 @@
#ifndef ANDROID_AUDIORECORD_H
#define ANDROID_AUDIORECORD_H
+#include <binder/IMemory.h>
#include <cutils/sched_policy.h>
#include <media/AudioSystem.h>
#include <media/AudioTimestamp.h>
-#include <media/IAudioRecord.h>
#include <media/Modulo.h>
+#include <utils/RefBase.h>
#include <utils/threads.h>
+#include "android/media/IAudioRecord.h"
+
namespace android {
// ----------------------------------------------------------------------------
@@ -624,7 +627,7 @@
// Next 5 fields may be changed if IAudioRecord is re-created, but always != 0
// provided the initial set() was successful
- sp<IAudioRecord> mAudioRecord;
+ sp<media::IAudioRecord> mAudioRecord;
sp<IMemory> mCblkMemory;
audio_track_cblk_t* mCblk; // re-load after mLock.unlock()
sp<IMemory> mBufferMemory;
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 0ad4231..133d6c9 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -25,7 +25,6 @@
#include <utils/Errors.h>
#include <binder/IInterface.h>
#include <media/IAudioTrack.h>
-#include <media/IAudioRecord.h>
#include <media/IAudioFlingerClient.h>
#include <system/audio.h>
#include <system/audio_effect.h>
@@ -34,6 +33,8 @@
#include <media/IEffectClient.h>
#include <utils/String8.h>
+#include "android/media/IAudioRecord.h"
+
namespace android {
// ----------------------------------------------------------------------------
@@ -69,7 +70,7 @@
status_t *status,
audio_port_handle_t portId) = 0;
- virtual sp<IAudioRecord> openRecord(
+ virtual sp<media::IAudioRecord> openRecord(
// On successful return, AudioFlinger takes over the handle
// reference and will release it when the track is destroyed.
// However on failure, the client is responsible for release.
diff --git a/media/libaudioclient/include/media/IAudioRecord.h b/media/libaudioclient/include/media/IAudioRecord.h
deleted file mode 100644
index 7768176..0000000
--- a/media/libaudioclient/include/media/IAudioRecord.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (C) 2007 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef IAUDIORECORD_H_
-#define IAUDIORECORD_H_
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <utils/RefBase.h>
-#include <utils/Errors.h>
-#include <binder/IInterface.h>
-#include <binder/IMemory.h>
-#include <system/audio.h>
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-class IAudioRecord : public IInterface
-{
-public:
- DECLARE_META_INTERFACE(AudioRecord);
-
- /* After it's created the track is not active. Call start() to
- * make it active.
- */
- virtual status_t start(int /*AudioSystem::sync_event_t*/ event,
- audio_session_t triggerSession) = 0;
-
- /* Stop a track. If set, the callback will cease being called and
- * obtainBuffer will return an error. Buffers that are already released
- * will be processed, unless flush() is called.
- */
- virtual void stop() = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnAudioRecord : public BnInterface<IAudioRecord>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif /*IAUDIORECORD_H_*/
diff --git a/media/libaudiohal/EffectHalHidl.cpp b/media/libaudiohal/EffectHalHidl.cpp
index b49b975..61fb6bab 100644
--- a/media/libaudiohal/EffectHalHidl.cpp
+++ b/media/libaudiohal/EffectHalHidl.cpp
@@ -40,7 +40,7 @@
namespace android {
EffectHalHidl::EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId)
- : mEffect(effect), mEffectId(effectId), mBuffersChanged(true) {
+ : mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
}
EffectHalHidl::~EffectHalHidl() {
@@ -49,6 +49,9 @@
mEffect.clear();
hardware::IPCThreadState::self()->flushCommands();
}
+ if (mEfGroup) {
+ EventFlag::deleteEventFlag(&mEfGroup);
+ }
}
// static
diff --git a/media/libaudioprocessing/OWNERS b/media/libaudioprocessing/OWNERS
new file mode 100644
index 0000000..96d0ea0
--- /dev/null
+++ b/media/libaudioprocessing/OWNERS
@@ -0,0 +1,3 @@
+gkasten@google.com
+hunga@google.com
+rago@google.com
diff --git a/media/libcpustats/OWNERS b/media/libcpustats/OWNERS
new file mode 100644
index 0000000..f9cb567
--- /dev/null
+++ b/media/libcpustats/OWNERS
@@ -0,0 +1 @@
+gkasten@google.com
diff --git a/media/libeffects/OWNERS b/media/libeffects/OWNERS
index 7e3de13..7f9ae81 100644
--- a/media/libeffects/OWNERS
+++ b/media/libeffects/OWNERS
@@ -1,3 +1,4 @@
+hunga@google.com
krocard@google.com
mnaganov@google.com
rago@google.com
diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c
index 519f4a8..b4a1d77 100644
--- a/media/libeffects/downmix/EffectDownmix.c
+++ b/media/libeffects/downmix/EffectDownmix.c
@@ -563,6 +563,10 @@
return -EINVAL;
}
effect_param_t *cmd = (effect_param_t *) pCmdData;
+ if (cmd->psize != sizeof(int32_t)) {
+ android_errorWriteLog(0x534e4554, "63662938");
+ return -EINVAL;
+ }
*(int *)pReplyData = Downmix_setParameter(pDownmixer, *(int32_t *)cmd->data,
cmd->vsize, cmd->data + sizeof(int32_t));
break;
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h b/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
index f32ed30..4ecaf14 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
@@ -534,246 +534,246 @@
/* Coefficients for centre frequency 55Hz */
#define HPF_Fs8000_Fc55_A0 0.958849f
-#define HPF_Fs8000_Fc55_A1 -1.917698f
+#define HPF_Fs8000_Fc55_A1 (-1.917698f)
#define HPF_Fs8000_Fc55_A2 0.958849f
-#define HPF_Fs8000_Fc55_B1 -1.939001f
+#define HPF_Fs8000_Fc55_B1 (-1.939001f)
#define HPF_Fs8000_Fc55_B2 0.940807f
#define HPF_Fs11025_Fc55_A0 0.966909f
-#define HPF_Fs11025_Fc55_A1 -1.933818f
+#define HPF_Fs11025_Fc55_A1 (-1.933818f)
#define HPF_Fs11025_Fc55_A2 0.966909f
-#define HPF_Fs11025_Fc55_B1 -1.955732f
+#define HPF_Fs11025_Fc55_B1 (-1.955732f)
#define HPF_Fs11025_Fc55_B2 0.956690f
#define HPF_Fs12000_Fc55_A0 0.968650f
-#define HPF_Fs12000_Fc55_A1 -1.937300f
+#define HPF_Fs12000_Fc55_A1 (-1.937300f)
#define HPF_Fs12000_Fc55_A2 0.968650f
-#define HPF_Fs12000_Fc55_B1 -1.959327f
+#define HPF_Fs12000_Fc55_B1 (-1.959327f)
#define HPF_Fs12000_Fc55_B2 0.960138f
#define HPF_Fs16000_Fc55_A0 0.973588f
-#define HPF_Fs16000_Fc55_A1 -1.947176f
+#define HPF_Fs16000_Fc55_A1 (-1.947176f)
#define HPF_Fs16000_Fc55_A2 0.973588f
-#define HPF_Fs16000_Fc55_B1 -1.969494f
+#define HPF_Fs16000_Fc55_B1 (-1.969494f)
#define HPF_Fs16000_Fc55_B2 0.969952f
#define HPF_Fs22050_Fc55_A0 0.977671f
-#define HPF_Fs22050_Fc55_A1 -1.955343f
+#define HPF_Fs22050_Fc55_A1 (-1.955343f)
#define HPF_Fs22050_Fc55_A2 0.977671f
-#define HPF_Fs22050_Fc55_B1 -1.977863f
+#define HPF_Fs22050_Fc55_B1 (-1.977863f)
#define HPF_Fs22050_Fc55_B2 0.978105f
#define HPF_Fs24000_Fc55_A0 0.978551f
-#define HPF_Fs24000_Fc55_A1 -1.957102f
+#define HPF_Fs24000_Fc55_A1 (-1.957102f)
#define HPF_Fs24000_Fc55_A2 0.978551f
-#define HPF_Fs24000_Fc55_B1 -1.979662f
+#define HPF_Fs24000_Fc55_B1 (-1.979662f)
#define HPF_Fs24000_Fc55_B2 0.979866f
#define HPF_Fs32000_Fc55_A0 0.981042f
-#define HPF_Fs32000_Fc55_A1 -1.962084f
+#define HPF_Fs32000_Fc55_A1 (-1.962084f)
#define HPF_Fs32000_Fc55_A2 0.981042f
-#define HPF_Fs32000_Fc55_B1 -1.984746f
+#define HPF_Fs32000_Fc55_B1 (-1.984746f)
#define HPF_Fs32000_Fc55_B2 0.984861f
#define HPF_Fs44100_Fc55_A0 0.983097f
-#define HPF_Fs44100_Fc55_A1 -1.966194f
+#define HPF_Fs44100_Fc55_A1 (-1.966194f)
#define HPF_Fs44100_Fc55_A2 0.983097f
-#define HPF_Fs44100_Fc55_B1 -1.988931f
+#define HPF_Fs44100_Fc55_B1 (-1.988931f)
#define HPF_Fs44100_Fc55_B2 0.988992f
#define HPF_Fs48000_Fc55_A0 0.983539f
-#define HPF_Fs48000_Fc55_A1 -1.967079f
+#define HPF_Fs48000_Fc55_A1 (-1.967079f)
#define HPF_Fs48000_Fc55_A2 0.983539f
-#define HPF_Fs48000_Fc55_B1 -1.989831f
+#define HPF_Fs48000_Fc55_B1 (-1.989831f)
#define HPF_Fs48000_Fc55_B2 0.989882f
#ifdef HIGHER_FS
#define HPF_Fs96000_Fc55_A0 0.986040f
-#define HPF_Fs96000_Fc55_A1 -1.972080f
+#define HPF_Fs96000_Fc55_A1 (-1.972080f)
#define HPF_Fs96000_Fc55_A2 0.986040f
-#define HPF_Fs96000_Fc55_B1 -1.994915f
+#define HPF_Fs96000_Fc55_B1 (-1.994915f)
#define HPF_Fs96000_Fc55_B2 0.994928f
#define HPF_Fs192000_Fc55_A0 0.987294f
-#define HPF_Fs192000_Fc55_A1 -1.974588f
+#define HPF_Fs192000_Fc55_A1 (-1.974588f)
#define HPF_Fs192000_Fc55_A2 0.987294f
-#define HPF_Fs192000_Fc55_B1 -1.997458f
+#define HPF_Fs192000_Fc55_B1 (-1.997458f)
#define HPF_Fs192000_Fc55_B2 0.997461f
#endif
/* Coefficients for centre frequency 66Hz */
#define HPF_Fs8000_Fc66_A0 0.953016f
-#define HPF_Fs8000_Fc66_A1 -1.906032f
+#define HPF_Fs8000_Fc66_A1 (-1.906032f)
#define HPF_Fs8000_Fc66_A2 0.953016f
-#define HPF_Fs8000_Fc66_B1 -1.926810f
+#define HPF_Fs8000_Fc66_B1 (-1.926810f)
#define HPF_Fs8000_Fc66_B2 0.929396f
#define HPF_Fs11025_Fc66_A0 0.962638f
-#define HPF_Fs11025_Fc66_A1 -1.925275f
+#define HPF_Fs11025_Fc66_A1 (-1.925275f)
#define HPF_Fs11025_Fc66_A2 0.962638f
-#define HPF_Fs11025_Fc66_B1 -1.946881f
+#define HPF_Fs11025_Fc66_B1 (-1.946881f)
#define HPF_Fs11025_Fc66_B2 0.948256f
#define HPF_Fs12000_Fc66_A0 0.964718f
-#define HPF_Fs12000_Fc66_A1 -1.929435f
+#define HPF_Fs12000_Fc66_A1 (-1.929435f)
#define HPF_Fs12000_Fc66_A2 0.964718f
-#define HPF_Fs12000_Fc66_B1 -1.951196f
+#define HPF_Fs12000_Fc66_B1 (-1.951196f)
#define HPF_Fs12000_Fc66_B2 0.952359f
#define HPF_Fs16000_Fc66_A0 0.970622f
-#define HPF_Fs16000_Fc66_A1 -1.941244f
+#define HPF_Fs16000_Fc66_A1 (-1.941244f)
#define HPF_Fs16000_Fc66_A2 0.970622f
-#define HPF_Fs16000_Fc66_B1 -1.963394f
+#define HPF_Fs16000_Fc66_B1 (-1.963394f)
#define HPF_Fs16000_Fc66_B2 0.964052f
#define HPF_Fs22050_Fc66_A0 0.975509f
-#define HPF_Fs22050_Fc66_A1 -1.951019f
+#define HPF_Fs22050_Fc66_A1 (-1.951019f)
#define HPF_Fs22050_Fc66_A2 0.975509f
-#define HPF_Fs22050_Fc66_B1 -1.973436f
+#define HPF_Fs22050_Fc66_B1 (-1.973436f)
#define HPF_Fs22050_Fc66_B2 0.973784f
#define HPF_Fs24000_Fc66_A0 0.976563f
-#define HPF_Fs24000_Fc66_A1 -1.953125f
+#define HPF_Fs24000_Fc66_A1 (-1.953125f)
#define HPF_Fs24000_Fc66_A2 0.976563f
-#define HPF_Fs24000_Fc66_B1 -1.975594f
+#define HPF_Fs24000_Fc66_B1 (-1.975594f)
#define HPF_Fs24000_Fc66_B2 0.975889f
#define HPF_Fs32000_Fc66_A0 0.979547f
-#define HPF_Fs32000_Fc66_A1 -1.959093f
+#define HPF_Fs32000_Fc66_A1 (-1.959093f)
#define HPF_Fs32000_Fc66_A2 0.979547f
-#define HPF_Fs32000_Fc66_B1 -1.981695f
+#define HPF_Fs32000_Fc66_B1 (-1.981695f)
#define HPF_Fs32000_Fc66_B2 0.981861f
#define HPF_Fs44100_Fc66_A0 0.982010f
-#define HPF_Fs44100_Fc66_A1 -1.964019f
+#define HPF_Fs44100_Fc66_A1 (-1.964019f)
#define HPF_Fs44100_Fc66_A2 0.982010f
-#define HPF_Fs44100_Fc66_B1 -1.986718f
+#define HPF_Fs44100_Fc66_B1 (-1.986718f)
#define HPF_Fs44100_Fc66_B2 0.986805f
#define HPF_Fs48000_Fc66_A0 0.982540f
-#define HPF_Fs48000_Fc66_A1 -1.965079f
+#define HPF_Fs48000_Fc66_A1 (-1.965079f)
#define HPF_Fs48000_Fc66_A2 0.982540f
-#define HPF_Fs48000_Fc66_B1 -1.987797f
+#define HPF_Fs48000_Fc66_B1 (-1.987797f)
#define HPF_Fs48000_Fc66_B2 0.987871f
#ifdef HIGHER_FS
#define HPF_Fs96000_Fc66_A0 0.985539f
-#define HPF_Fs96000_Fc66_A1 -1.971077f
+#define HPF_Fs96000_Fc66_A1 (-1.971077f)
#define HPF_Fs96000_Fc66_A2 0.985539f
-#define HPF_Fs96000_Fc66_B1 -1.993898f
+#define HPF_Fs96000_Fc66_B1 (-1.993898f)
#define HPF_Fs96000_Fc66_B2 0.993917f
#define HPF_Fs192000_Fc66_A0 0.987043f
-#define HPF_Fs192000_Fc66_A1 -1.974086f
+#define HPF_Fs192000_Fc66_A1 (-1.974086f)
#define HPF_Fs192000_Fc66_A2 0.987043f
-#define HPF_Fs192000_Fc66_B1 -1.996949f
+#define HPF_Fs192000_Fc66_B1 (-1.996949f)
#define HPF_Fs192000_Fc66_B2 0.996954f
#endif
/* Coefficients for centre frequency 78Hz */
#define HPF_Fs8000_Fc78_A0 0.946693f
-#define HPF_Fs8000_Fc78_A1 -1.893387f
+#define HPF_Fs8000_Fc78_A1 (-1.893387f)
#define HPF_Fs8000_Fc78_A2 0.946693f
-#define HPF_Fs8000_Fc78_B1 -1.913517f
+#define HPF_Fs8000_Fc78_B1 (-1.913517f)
#define HPF_Fs8000_Fc78_B2 0.917105f
#define HPF_Fs11025_Fc78_A0 0.957999f
-#define HPF_Fs11025_Fc78_A1 -1.915998f
+#define HPF_Fs11025_Fc78_A1 (-1.915998f)
#define HPF_Fs11025_Fc78_A2 0.957999f
-#define HPF_Fs11025_Fc78_B1 -1.937229f
+#define HPF_Fs11025_Fc78_B1 (-1.937229f)
#define HPF_Fs11025_Fc78_B2 0.939140f
#define HPF_Fs12000_Fc78_A0 0.960446f
-#define HPF_Fs12000_Fc78_A1 -1.920892f
+#define HPF_Fs12000_Fc78_A1 (-1.920892f)
#define HPF_Fs12000_Fc78_A2 0.960446f
-#define HPF_Fs12000_Fc78_B1 -1.942326f
+#define HPF_Fs12000_Fc78_B1 (-1.942326f)
#define HPF_Fs12000_Fc78_B2 0.943944f
#define HPF_Fs16000_Fc78_A0 0.967397f
-#define HPF_Fs16000_Fc78_A1 -1.934794f
+#define HPF_Fs16000_Fc78_A1 (-1.934794f)
#define HPF_Fs16000_Fc78_A2 0.967397f
-#define HPF_Fs16000_Fc78_B1 -1.956740f
+#define HPF_Fs16000_Fc78_B1 (-1.956740f)
#define HPF_Fs16000_Fc78_B2 0.957656f
#define HPF_Fs22050_Fc78_A0 0.973156f
-#define HPF_Fs22050_Fc78_A1 -1.946313f
+#define HPF_Fs22050_Fc78_A1 (-1.946313f)
#define HPF_Fs22050_Fc78_A2 0.973156f
-#define HPF_Fs22050_Fc78_B1 -1.968607f
+#define HPF_Fs22050_Fc78_B1 (-1.968607f)
#define HPF_Fs22050_Fc78_B2 0.969092f
#define HPF_Fs24000_Fc78_A0 0.974398f
-#define HPF_Fs24000_Fc78_A1 -1.948797f
+#define HPF_Fs24000_Fc78_A1 (-1.948797f)
#define HPF_Fs24000_Fc78_A2 0.974398f
-#define HPF_Fs24000_Fc78_B1 -1.971157f
+#define HPF_Fs24000_Fc78_B1 (-1.971157f)
#define HPF_Fs24000_Fc78_B2 0.971568f
#define HPF_Fs32000_Fc78_A0 0.977918f
-#define HPF_Fs32000_Fc78_A1 -1.955836f
+#define HPF_Fs32000_Fc78_A1 (-1.955836f)
#define HPF_Fs32000_Fc78_A2 0.977918f
-#define HPF_Fs32000_Fc78_B1 -1.978367f
+#define HPF_Fs32000_Fc78_B1 (-1.978367f)
#define HPF_Fs32000_Fc78_B2 0.978599f
#define HPF_Fs44100_Fc78_A0 0.980824f
-#define HPF_Fs44100_Fc78_A1 -1.961649f
+#define HPF_Fs44100_Fc78_A1 (-1.961649f)
#define HPF_Fs44100_Fc78_A2 0.980824f
-#define HPF_Fs44100_Fc78_B1 -1.984303f
+#define HPF_Fs44100_Fc78_B1 (-1.984303f)
#define HPF_Fs44100_Fc78_B2 0.984425f
#define HPF_Fs48000_Fc78_A0 0.981450f
-#define HPF_Fs48000_Fc78_A1 -1.962900f
+#define HPF_Fs48000_Fc78_A1 (-1.962900f)
#define HPF_Fs48000_Fc78_A2 0.981450f
-#define HPF_Fs48000_Fc78_B1 -1.985578f
+#define HPF_Fs48000_Fc78_B1 (-1.985578f)
#define HPF_Fs48000_Fc78_B2 0.985681f
#ifdef HIGHER_FS
#define HPF_Fs96000_Fc78_A0 0.984992f
-#define HPF_Fs96000_Fc78_A1 -1.969984f
+#define HPF_Fs96000_Fc78_A1 (-1.969984f)
#define HPF_Fs96000_Fc78_A2 0.984992f
-#define HPF_Fs96000_Fc78_B1 -1.992789f
+#define HPF_Fs96000_Fc78_B1 (-1.992789f)
#define HPF_Fs96000_Fc78_B2 0.992815f
#define HPF_Fs192000_Fc78_A0 0.986769f
-#define HPF_Fs192000_Fc78_A1 -1.973539f
+#define HPF_Fs192000_Fc78_A1 (-1.973539f)
#define HPF_Fs192000_Fc78_A2 0.986769f
-#define HPF_Fs192000_Fc78_B1 -1.996394f
+#define HPF_Fs192000_Fc78_B1 (-1.996394f)
#define HPF_Fs192000_Fc78_B2 0.996401f
#endif
/* Coefficients for centre frequency 90Hz */
#define HPF_Fs8000_Fc90_A0 0.940412f
-#define HPF_Fs8000_Fc90_A1 -1.880825f
+#define HPF_Fs8000_Fc90_A1 (-1.880825f)
#define HPF_Fs8000_Fc90_A2 0.940412f
-#define HPF_Fs8000_Fc90_B1 -1.900231f
+#define HPF_Fs8000_Fc90_B1 (-1.900231f)
#define HPF_Fs8000_Fc90_B2 0.904977f
#define HPF_Fs11025_Fc90_A0 0.953383f
-#define HPF_Fs11025_Fc90_A1 -1.906766f
+#define HPF_Fs11025_Fc90_A1 (-1.906766f)
#define HPF_Fs11025_Fc90_A2 0.953383f
-#define HPF_Fs11025_Fc90_B1 -1.927579f
+#define HPF_Fs11025_Fc90_B1 (-1.927579f)
#define HPF_Fs11025_Fc90_B2 0.930111f
#define HPF_Fs12000_Fc90_A0 0.956193f
-#define HPF_Fs12000_Fc90_A1 -1.912387f
+#define HPF_Fs12000_Fc90_A1 (-1.912387f)
#define HPF_Fs12000_Fc90_A2 0.956193f
-#define HPF_Fs12000_Fc90_B1 -1.933459f
+#define HPF_Fs12000_Fc90_B1 (-1.933459f)
#define HPF_Fs12000_Fc90_B2 0.935603f
#define HPF_Fs16000_Fc90_A0 0.964183f
-#define HPF_Fs16000_Fc90_A1 -1.928365f
+#define HPF_Fs16000_Fc90_A1 (-1.928365f)
#define HPF_Fs16000_Fc90_A2 0.964183f
-#define HPF_Fs16000_Fc90_B1 -1.950087f
+#define HPF_Fs16000_Fc90_B1 (-1.950087f)
#define HPF_Fs16000_Fc90_B2 0.951303f
#define HPF_Fs22050_Fc90_A0 0.970809f
-#define HPF_Fs22050_Fc90_A1 -1.941618f
+#define HPF_Fs22050_Fc90_A1 (-1.941618f)
#define HPF_Fs22050_Fc90_A2 0.970809f
-#define HPF_Fs22050_Fc90_B1 -1.963778f
+#define HPF_Fs22050_Fc90_B1 (-1.963778f)
#define HPF_Fs22050_Fc90_B2 0.964423f
#define HPF_Fs24000_Fc90_A0 0.972239f
-#define HPF_Fs24000_Fc90_A1 -1.944477f
+#define HPF_Fs24000_Fc90_A1 (-1.944477f)
#define HPF_Fs24000_Fc90_A2 0.972239f
-#define HPF_Fs24000_Fc90_B1 -1.966721f
+#define HPF_Fs24000_Fc90_B1 (-1.966721f)
#define HPF_Fs24000_Fc90_B2 0.967266f
#define HPF_Fs32000_Fc90_A0 0.976292f
-#define HPF_Fs32000_Fc90_A1 -1.952584f
+#define HPF_Fs32000_Fc90_A1 (-1.952584f)
#define HPF_Fs32000_Fc90_A2 0.976292f
-#define HPF_Fs32000_Fc90_B1 -1.975040f
+#define HPF_Fs32000_Fc90_B1 (-1.975040f)
#define HPF_Fs32000_Fc90_B2 0.975347f
#define HPF_Fs44100_Fc90_A0 0.979641f
-#define HPF_Fs44100_Fc90_A1 -1.959282f
+#define HPF_Fs44100_Fc90_A1 (-1.959282f)
#define HPF_Fs44100_Fc90_A2 0.979641f
-#define HPF_Fs44100_Fc90_B1 -1.981888f
+#define HPF_Fs44100_Fc90_B1 (-1.981888f)
#define HPF_Fs44100_Fc90_B2 0.982050f
#define HPF_Fs48000_Fc90_A0 0.980362f
-#define HPF_Fs48000_Fc90_A1 -1.960724f
+#define HPF_Fs48000_Fc90_A1 (-1.960724f)
#define HPF_Fs48000_Fc90_A2 0.980362f
-#define HPF_Fs48000_Fc90_B1 -1.983359f
+#define HPF_Fs48000_Fc90_B1 (-1.983359f)
#define HPF_Fs48000_Fc90_B2 0.983497f
#ifdef HIGHER_FS
#define HPF_Fs96000_Fc90_A0 0.984446f
-#define HPF_Fs96000_Fc90_A1 -1.968892f
+#define HPF_Fs96000_Fc90_A1 (-1.968892f)
#define HPF_Fs96000_Fc90_A2 0.984446f
-#define HPF_Fs96000_Fc90_B1 -1.991680f
+#define HPF_Fs96000_Fc90_B1 (-1.991680f)
#define HPF_Fs96000_Fc90_B2 0.991714f
#define HPF_Fs192000_Fc90_A0 0.986496f
-#define HPF_Fs192000_Fc90_A1 -1.972992f
+#define HPF_Fs192000_Fc90_A1 (-1.972992f)
#define HPF_Fs192000_Fc90_A2 0.986496f
-#define HPF_Fs192000_Fc90_B1 -1.995840f
+#define HPF_Fs192000_Fc90_B1 (-1.995840f)
#define HPF_Fs192000_Fc90_B2 0.995848f
#endif
@@ -786,244 +786,244 @@
/* Coefficients for centre frequency 55Hz */
#define BPF_Fs8000_Fc55_A0 0.009197f
#define BPF_Fs8000_Fc55_A1 0.000000f
-#define BPF_Fs8000_Fc55_A2 -0.009197f
-#define BPF_Fs8000_Fc55_B1 -1.979545f
+#define BPF_Fs8000_Fc55_A2 (-0.009197f)
+#define BPF_Fs8000_Fc55_B1 (-1.979545f)
#define BPF_Fs8000_Fc55_B2 0.981393f
#define BPF_Fs11025_Fc55_A0 0.006691f
#define BPF_Fs11025_Fc55_A1 0.000000f
-#define BPF_Fs11025_Fc55_A2 -0.006691f
-#define BPF_Fs11025_Fc55_B1 -1.985488f
+#define BPF_Fs11025_Fc55_A2 (-0.006691f)
+#define BPF_Fs11025_Fc55_B1 (-1.985488f)
#define BPF_Fs11025_Fc55_B2 0.986464f
#define BPF_Fs12000_Fc55_A0 0.006150f
#define BPF_Fs12000_Fc55_A1 0.000000f
-#define BPF_Fs12000_Fc55_A2 -0.006150f
-#define BPF_Fs12000_Fc55_B1 -1.986733f
+#define BPF_Fs12000_Fc55_A2 (-0.006150f)
+#define BPF_Fs12000_Fc55_B1 (-1.986733f)
#define BPF_Fs12000_Fc55_B2 0.987557f
#define BPF_Fs16000_Fc55_A0 0.004620f
#define BPF_Fs16000_Fc55_A1 0.000000f
-#define BPF_Fs16000_Fc55_A2 -0.004620f
-#define BPF_Fs16000_Fc55_B1 -1.990189f
+#define BPF_Fs16000_Fc55_A2 (-0.004620f)
+#define BPF_Fs16000_Fc55_B1 (-1.990189f)
#define BPF_Fs16000_Fc55_B2 0.990653f
#define BPF_Fs22050_Fc55_A0 0.003357f
#define BPF_Fs22050_Fc55_A1 0.000000f
-#define BPF_Fs22050_Fc55_A2 -0.003357f
-#define BPF_Fs22050_Fc55_B1 -1.992964f
+#define BPF_Fs22050_Fc55_A2 (-0.003357f)
+#define BPF_Fs22050_Fc55_B1 (-1.992964f)
#define BPF_Fs22050_Fc55_B2 0.993209f
#define BPF_Fs24000_Fc55_A0 0.003085f
#define BPF_Fs24000_Fc55_A1 0.000000f
-#define BPF_Fs24000_Fc55_A2 -0.003085f
-#define BPF_Fs24000_Fc55_B1 -1.993552f
+#define BPF_Fs24000_Fc55_A2 (-0.003085f)
+#define BPF_Fs24000_Fc55_B1 (-1.993552f)
#define BPF_Fs24000_Fc55_B2 0.993759f
#define BPF_Fs32000_Fc55_A0 0.002315f
#define BPF_Fs32000_Fc55_A1 0.000000f
-#define BPF_Fs32000_Fc55_A2 -0.002315f
-#define BPF_Fs32000_Fc55_B1 -1.995199f
+#define BPF_Fs32000_Fc55_A2 (-0.002315f)
+#define BPF_Fs32000_Fc55_B1 (-1.995199f)
#define BPF_Fs32000_Fc55_B2 0.995316f
#define BPF_Fs44100_Fc55_A0 0.001681f
#define BPF_Fs44100_Fc55_A1 0.000000f
-#define BPF_Fs44100_Fc55_A2 -0.001681f
-#define BPF_Fs44100_Fc55_B1 -1.996537f
+#define BPF_Fs44100_Fc55_A2 (-0.001681f)
+#define BPF_Fs44100_Fc55_B1 (-1.996537f)
#define BPF_Fs44100_Fc55_B2 0.996599f
#define BPF_Fs48000_Fc55_A0 0.001545f
#define BPF_Fs48000_Fc55_A1 0.000000f
-#define BPF_Fs48000_Fc55_A2 -0.001545f
-#define BPF_Fs48000_Fc55_B1 -1.996823f
+#define BPF_Fs48000_Fc55_A2 (-0.001545f)
+#define BPF_Fs48000_Fc55_B1 (-1.996823f)
#define BPF_Fs48000_Fc55_B2 0.996875f
#ifdef HIGHER_FS
#define BPF_Fs96000_Fc55_A0 0.000762f
#define BPF_Fs96000_Fc55_A1 0.000000f
-#define BPF_Fs96000_Fc55_A2 -0.000762f
-#define BPF_Fs96000_Fc55_B1 -1.998461f
+#define BPF_Fs96000_Fc55_A2 (-0.000762f)
+#define BPF_Fs96000_Fc55_B1 (-1.998461f)
#define BPF_Fs96000_Fc55_B2 0.998477f
#define BPF_Fs192000_Fc55_A0 0.000381f
#define BPF_Fs192000_Fc55_A1 0.000000f
-#define BPF_Fs192000_Fc55_A2 -0.000381f
-#define BPF_Fs192000_Fc55_B1 -1.999234f
+#define BPF_Fs192000_Fc55_A2 (-0.000381f)
+#define BPF_Fs192000_Fc55_B1 (-1.999234f)
#define BPF_Fs192000_Fc55_B2 0.999238f
#endif
/* Coefficients for centre frequency 66Hz */
#define BPF_Fs8000_Fc66_A0 0.012648f
#define BPF_Fs8000_Fc66_A1 0.000000f
-#define BPF_Fs8000_Fc66_A2 -0.012648f
-#define BPF_Fs8000_Fc66_B1 -1.971760f
+#define BPF_Fs8000_Fc66_A2 (-0.012648f)
+#define BPF_Fs8000_Fc66_B1 (-1.971760f)
#define BPF_Fs8000_Fc66_B2 0.974412f
#define BPF_Fs11025_Fc66_A0 0.009209f
#define BPF_Fs11025_Fc66_A1 0.000000f
-#define BPF_Fs11025_Fc66_A2 -0.009209f
-#define BPF_Fs11025_Fc66_B1 -1.979966f
+#define BPF_Fs11025_Fc66_A2 (-0.009209f)
+#define BPF_Fs11025_Fc66_B1 (-1.979966f)
#define BPF_Fs11025_Fc66_B2 0.981368f
#define BPF_Fs12000_Fc66_A0 0.008468f
#define BPF_Fs12000_Fc66_A1 0.000000f
-#define BPF_Fs12000_Fc66_A2 -0.008468f
-#define BPF_Fs12000_Fc66_B1 -1.981685f
+#define BPF_Fs12000_Fc66_A2 (-0.008468f)
+#define BPF_Fs12000_Fc66_B1 (-1.981685f)
#define BPF_Fs12000_Fc66_B2 0.982869f
#define BPF_Fs16000_Fc66_A0 0.006364f
#define BPF_Fs16000_Fc66_A1 0.000000f
-#define BPF_Fs16000_Fc66_A2 -0.006364f
-#define BPF_Fs16000_Fc66_B1 -1.986457f
+#define BPF_Fs16000_Fc66_A2 (-0.006364f)
+#define BPF_Fs16000_Fc66_B1 (-1.986457f)
#define BPF_Fs16000_Fc66_B2 0.987124f
#define BPF_Fs22050_Fc66_A0 0.004626f
#define BPF_Fs22050_Fc66_A1 0.000000f
-#define BPF_Fs22050_Fc66_A2 -0.004626f
-#define BPF_Fs22050_Fc66_B1 -1.990288f
+#define BPF_Fs22050_Fc66_A2 (-0.004626f)
+#define BPF_Fs22050_Fc66_B1 (-1.990288f)
#define BPF_Fs22050_Fc66_B2 0.990641f
#define BPF_Fs24000_Fc66_A0 0.004252f
#define BPF_Fs24000_Fc66_A1 0.000000f
-#define BPF_Fs24000_Fc66_A2 -0.004252f
-#define BPF_Fs24000_Fc66_B1 -1.991100f
+#define BPF_Fs24000_Fc66_A2 (-0.004252f)
+#define BPF_Fs24000_Fc66_B1 (-1.991100f)
#define BPF_Fs24000_Fc66_B2 0.991398f
#define BPF_Fs32000_Fc66_A0 0.003192f
#define BPF_Fs32000_Fc66_A1 0.000000f
-#define BPF_Fs32000_Fc66_A2 -0.003192f
-#define BPF_Fs32000_Fc66_B1 -1.993374f
+#define BPF_Fs32000_Fc66_A2 (-0.003192f)
+#define BPF_Fs32000_Fc66_B1 (-1.993374f)
#define BPF_Fs32000_Fc66_B2 0.993541f
#define BPF_Fs44100_Fc66_A0 0.002318f
#define BPF_Fs44100_Fc66_A1 0.000000f
-#define BPF_Fs44100_Fc66_A2 -0.002318f
-#define BPF_Fs44100_Fc66_B1 -1.995221f
+#define BPF_Fs44100_Fc66_A2 (-0.002318f)
+#define BPF_Fs44100_Fc66_B1 (-1.995221f)
#define BPF_Fs44100_Fc66_B2 0.995309f
#define BPF_Fs48000_Fc66_A0 0.002131f
#define BPF_Fs48000_Fc66_A1 0.000000f
-#define BPF_Fs48000_Fc66_A2 -0.002131f
-#define BPF_Fs48000_Fc66_B1 -1.995615f
+#define BPF_Fs48000_Fc66_A2 (-0.002131f)
+#define BPF_Fs48000_Fc66_B1 (-1.995615f)
#define BPF_Fs48000_Fc66_B2 0.995690f
#ifdef HIGHER_FS
#define BPF_Fs96000_Fc66_A0 0.001055f
#define BPF_Fs96000_Fc66_A1 0.000000f
-#define BPF_Fs96000_Fc66_A2 -0.001055f
-#define BPF_Fs96000_Fc66_B1 -1.997868f
+#define BPF_Fs96000_Fc66_A2 (-0.001055f)
+#define BPF_Fs96000_Fc66_B1 (-1.997868f)
#define BPF_Fs96000_Fc66_B2 0.997891f
#define BPF_Fs192000_Fc66_A0 0.000528f
#define BPF_Fs192000_Fc66_A1 0.000000f
-#define BPF_Fs192000_Fc66_A2 -0.000528f
-#define BPF_Fs192000_Fc66_B1 -1.998939f
+#define BPF_Fs192000_Fc66_A2 (-0.000528f)
+#define BPF_Fs192000_Fc66_B1 (-1.998939f)
#define BPF_Fs192000_Fc66_B2 0.998945f
#endif
/* Coefficients for centre frequency 78Hz */
#define BPF_Fs8000_Fc78_A0 0.018572f
#define BPF_Fs8000_Fc78_A1 0.000000f
-#define BPF_Fs8000_Fc78_A2 -0.018572f
-#define BPF_Fs8000_Fc78_B1 -1.958745f
+#define BPF_Fs8000_Fc78_A2 (-0.018572f)
+#define BPF_Fs8000_Fc78_B1 (-1.958745f)
#define BPF_Fs8000_Fc78_B2 0.962427f
#define BPF_Fs11025_Fc78_A0 0.013545f
#define BPF_Fs11025_Fc78_A1 0.000000f
-#define BPF_Fs11025_Fc78_A2 -0.013545f
-#define BPF_Fs11025_Fc78_B1 -1.970647f
+#define BPF_Fs11025_Fc78_A2 (-0.013545f)
+#define BPF_Fs11025_Fc78_B1 (-1.970647f)
#define BPF_Fs11025_Fc78_B2 0.972596f
#define BPF_Fs12000_Fc78_A0 0.012458f
#define BPF_Fs12000_Fc78_A1 0.000000f
-#define BPF_Fs12000_Fc78_A2 -0.012458f
-#define BPF_Fs12000_Fc78_B1 -1.973148f
+#define BPF_Fs12000_Fc78_A2 (-0.012458f)
+#define BPF_Fs12000_Fc78_B1 (-1.973148f)
#define BPF_Fs12000_Fc78_B2 0.974795f
#define BPF_Fs16000_Fc78_A0 0.009373f
#define BPF_Fs16000_Fc78_A1 0.000000f
-#define BPF_Fs16000_Fc78_A2 -0.009373f
-#define BPF_Fs16000_Fc78_B1 -1.980108f
+#define BPF_Fs16000_Fc78_A2 (-0.009373f)
+#define BPF_Fs16000_Fc78_B1 (-1.980108f)
#define BPF_Fs16000_Fc78_B2 0.981037f
#define BPF_Fs22050_Fc78_A0 0.006819f
#define BPF_Fs22050_Fc78_A1 0.000000f
-#define BPF_Fs22050_Fc78_A2 -0.006819f
-#define BPF_Fs22050_Fc78_B1 -1.985714f
+#define BPF_Fs22050_Fc78_A2 (-0.006819f)
+#define BPF_Fs22050_Fc78_B1 (-1.985714f)
#define BPF_Fs22050_Fc78_B2 0.986204f
#define BPF_Fs24000_Fc78_A0 0.006268f
#define BPF_Fs24000_Fc78_A1 0.000000f
-#define BPF_Fs24000_Fc78_A2 -0.006268f
-#define BPF_Fs24000_Fc78_B1 -1.986904f
+#define BPF_Fs24000_Fc78_A2 (-0.006268f)
+#define BPF_Fs24000_Fc78_B1 (-1.986904f)
#define BPF_Fs24000_Fc78_B2 0.987318f
#define BPF_Fs32000_Fc78_A0 0.004709f
#define BPF_Fs32000_Fc78_A1 0.000000f
-#define BPF_Fs32000_Fc78_A2 -0.004709f
-#define BPF_Fs32000_Fc78_B1 -1.990240f
+#define BPF_Fs32000_Fc78_A2 (-0.004709f)
+#define BPF_Fs32000_Fc78_B1 (-1.990240f)
#define BPF_Fs32000_Fc78_B2 0.990473f
#define BPF_Fs44100_Fc78_A0 0.003421f
#define BPF_Fs44100_Fc78_A1 0.000000f
-#define BPF_Fs44100_Fc78_A2 -0.003421f
-#define BPF_Fs44100_Fc78_B1 -1.992955f
+#define BPF_Fs44100_Fc78_A2 (-0.003421f)
+#define BPF_Fs44100_Fc78_B1 (-1.992955f)
#define BPF_Fs44100_Fc78_B2 0.993078f
#define BPF_Fs48000_Fc78_A0 0.003144f
#define BPF_Fs48000_Fc78_A1 0.000000f
-#define BPF_Fs48000_Fc78_A2 -0.003144f
-#define BPF_Fs48000_Fc78_B1 -1.993535f
+#define BPF_Fs48000_Fc78_A2 (-0.003144f)
+#define BPF_Fs48000_Fc78_B1 (-1.993535f)
#define BPF_Fs48000_Fc78_B2 0.993639f
#ifdef HIGHER_FS
#define BPF_Fs96000_Fc78_A0 0.001555f
#define BPF_Fs96000_Fc78_A1 0.000000f
-#define BPF_Fs96000_Fc78_A2 -0.0015555f
-#define BPF_Fs96000_Fc78_B1 -1.996860f
+#define BPF_Fs96000_Fc78_A2 (-0.0015555f)
+#define BPF_Fs96000_Fc78_B1 (-1.996860f)
#define BPF_Fs96000_Fc78_B2 0.996891f
#define BPF_Fs192000_Fc78_A0 0.000778f
#define BPF_Fs192000_Fc78_A1 0.000000f
-#define BPF_Fs192000_Fc78_A2 -0.000778f
-#define BPF_Fs192000_Fc78_B1 -1.998437f
+#define BPF_Fs192000_Fc78_A2 (-0.000778f)
+#define BPF_Fs192000_Fc78_B1 (-1.998437f)
#define BPF_Fs192000_Fc78_B2 0.998444f
#endif
/* Coefficients for centre frequency 90Hz */
#define BPF_Fs8000_Fc90_A0 0.022760f
#define BPF_Fs8000_Fc90_A1 0.000000f
-#define BPF_Fs8000_Fc90_A2 -0.022760f
-#define BPF_Fs8000_Fc90_B1 -1.949073f
+#define BPF_Fs8000_Fc90_A2 (-0.022760f)
+#define BPF_Fs8000_Fc90_B1 (-1.949073f)
#define BPF_Fs8000_Fc90_B2 0.953953f
#define BPF_Fs11025_Fc90_A0 0.016619f
#define BPF_Fs11025_Fc90_A1 0.000000f
-#define BPF_Fs11025_Fc90_A2 -0.016619f
-#define BPF_Fs11025_Fc90_B1 -1.963791f
+#define BPF_Fs11025_Fc90_A2 (-0.016619f)
+#define BPF_Fs11025_Fc90_B1 (-1.963791f)
#define BPF_Fs11025_Fc90_B2 0.966377f
#define BPF_Fs12000_Fc90_A0 0.015289f
#define BPF_Fs12000_Fc90_A1 0.000000f
-#define BPF_Fs12000_Fc90_A2 -0.015289f
-#define BPF_Fs12000_Fc90_B1 -1.966882f
+#define BPF_Fs12000_Fc90_A2 (-0.015289f)
+#define BPF_Fs12000_Fc90_B1 (-1.966882f)
#define BPF_Fs12000_Fc90_B2 0.969067f
#define BPF_Fs16000_Fc90_A0 0.011511f
#define BPF_Fs16000_Fc90_A1 0.000000f
-#define BPF_Fs16000_Fc90_A2 -0.011511f
-#define BPF_Fs16000_Fc90_B1 -1.975477f
+#define BPF_Fs16000_Fc90_A2 (-0.011511f)
+#define BPF_Fs16000_Fc90_B1 (-1.975477f)
#define BPF_Fs16000_Fc90_B2 0.976711f
#define BPF_Fs22050_Fc90_A0 0.008379f
#define BPF_Fs22050_Fc90_A1 0.000000f
-#define BPF_Fs22050_Fc90_A2 -0.008379f
-#define BPF_Fs22050_Fc90_B1 -1.982395f
+#define BPF_Fs22050_Fc90_A2 (-0.008379f)
+#define BPF_Fs22050_Fc90_B1 (-1.982395f)
#define BPF_Fs22050_Fc90_B2 0.983047f
#define BPF_Fs24000_Fc90_A0 0.007704f
#define BPF_Fs24000_Fc90_A1 0.000000f
-#define BPF_Fs24000_Fc90_A2 -0.007704f
-#define BPF_Fs24000_Fc90_B1 -1.983863f
+#define BPF_Fs24000_Fc90_A2 (-0.007704f)
+#define BPF_Fs24000_Fc90_B1 (-1.983863f)
#define BPF_Fs24000_Fc90_B2 0.984414f
#define BPF_Fs32000_Fc90_A0 0.005789f
#define BPF_Fs32000_Fc90_A1 0.000000f
-#define BPF_Fs32000_Fc90_A2 -0.005789f
-#define BPF_Fs32000_Fc90_B1 -1.987977f
+#define BPF_Fs32000_Fc90_A2 (-0.005789f)
+#define BPF_Fs32000_Fc90_B1 (-1.987977f)
#define BPF_Fs32000_Fc90_B2 0.988288f
#define BPF_Fs44100_Fc90_A0 0.004207f
#define BPF_Fs44100_Fc90_A1 0.000000f
-#define BPF_Fs44100_Fc90_A2 -0.004207f
-#define BPF_Fs44100_Fc90_B1 -1.991324f
+#define BPF_Fs44100_Fc90_A2 (-0.004207f)
+#define BPF_Fs44100_Fc90_B1 (-1.991324f)
#define BPF_Fs44100_Fc90_B2 0.991488f
#define BPF_Fs48000_Fc90_A0 0.003867f
#define BPF_Fs48000_Fc90_A1 0.000000f
-#define BPF_Fs48000_Fc90_A2 -0.003867f
-#define BPF_Fs48000_Fc90_B1 -1.992038f
+#define BPF_Fs48000_Fc90_A2 (-0.003867f)
+#define BPF_Fs48000_Fc90_B1 (-1.992038f)
#define BPF_Fs48000_Fc90_B2 0.992177f
#ifdef HIGHER_FS
#define BPF_Fs96000_Fc90_A0 0.001913f
#define BPF_Fs96000_Fc90_A1 0.000000f
-#define BPF_Fs96000_Fc90_A2 -0.001913f
-#define BPF_Fs96000_Fc90_B1 -1.996134f
+#define BPF_Fs96000_Fc90_A2 (-0.001913f)
+#define BPF_Fs96000_Fc90_B1 (-1.996134f)
#define BPF_Fs96000_Fc90_B2 0.996174f
#define BPF_Fs192000_Fc90_A0 0.000958f
#define BPF_Fs192000_Fc90_A1 0.000000f
-#define BPF_Fs192000_Fc90_A2 -0.000958f
-#define BPF_Fs192000_Fc90_B1 -1.998075f
+#define BPF_Fs192000_Fc90_A2 (-0.000958f)
+#define BPF_Fs192000_Fc90_B1 (-1.998075f)
#define BPF_Fs192000_Fc90_B2 0.998085f
#endif
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
index 353560c..8c04847 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
@@ -69,55 +69,55 @@
#define HPF_Fs22050_Gain6_B2 0.000000
/* Gain = 7.000000 dB */
#define HPF_Fs22050_Gain7_A0 1.390177
-#define HPF_Fs22050_Gain7_A1 -0.020144
+#define HPF_Fs22050_Gain7_A1 (-0.020144)
#define HPF_Fs22050_Gain7_A2 0.000000
#define HPF_Fs22050_Gain7_B1 0.370033
#define HPF_Fs22050_Gain7_B2 0.000000
/* Gain = 8.000000 dB */
#define HPF_Fs22050_Gain8_A0 1.476219
-#define HPF_Fs22050_Gain8_A1 -0.106187
+#define HPF_Fs22050_Gain8_A1 (-0.106187)
#define HPF_Fs22050_Gain8_A2 0.000000
#define HPF_Fs22050_Gain8_B1 0.370033
#define HPF_Fs22050_Gain8_B2 0.000000
/* Gain = 9.000000 dB */
#define HPF_Fs22050_Gain9_A0 1.572761
-#define HPF_Fs22050_Gain9_A1 -0.202728
+#define HPF_Fs22050_Gain9_A1 (-0.202728)
#define HPF_Fs22050_Gain9_A2 0.000000
#define HPF_Fs22050_Gain9_B1 0.370033
#define HPF_Fs22050_Gain9_B2 0.000000
/* Gain = 10.000000 dB */
#define HPF_Fs22050_Gain10_A0 1.681082
-#define HPF_Fs22050_Gain10_A1 -0.311049
+#define HPF_Fs22050_Gain10_A1 (-0.311049)
#define HPF_Fs22050_Gain10_A2 0.000000
#define HPF_Fs22050_Gain10_B1 0.370033
#define HPF_Fs22050_Gain10_B2 0.000000
/* Gain = 11.000000 dB */
#define HPF_Fs22050_Gain11_A0 1.802620
-#define HPF_Fs22050_Gain11_A1 -0.432588
+#define HPF_Fs22050_Gain11_A1 (-0.432588)
#define HPF_Fs22050_Gain11_A2 0.000000
#define HPF_Fs22050_Gain11_B1 0.370033
#define HPF_Fs22050_Gain11_B2 0.000000
/* Gain = 12.000000 dB */
#define HPF_Fs22050_Gain12_A0 1.938989
-#define HPF_Fs22050_Gain12_A1 -0.568956
+#define HPF_Fs22050_Gain12_A1 (-0.568956)
#define HPF_Fs22050_Gain12_A2 0.000000
#define HPF_Fs22050_Gain12_B1 0.370033
#define HPF_Fs22050_Gain12_B2 0.000000
/* Gain = 13.000000 dB */
#define HPF_Fs22050_Gain13_A0 2.091997
-#define HPF_Fs22050_Gain13_A1 -0.721964
+#define HPF_Fs22050_Gain13_A1 (-0.721964)
#define HPF_Fs22050_Gain13_A2 0.000000
#define HPF_Fs22050_Gain13_B1 0.370033
#define HPF_Fs22050_Gain13_B2 0.000000
/* Gain = 14.000000 dB */
#define HPF_Fs22050_Gain14_A0 2.263674
-#define HPF_Fs22050_Gain14_A1 -0.893641
+#define HPF_Fs22050_Gain14_A1 (-0.893641)
#define HPF_Fs22050_Gain14_A2 0.000000
#define HPF_Fs22050_Gain14_B1 0.370033
#define HPF_Fs22050_Gain14_B2 0.000000
/* Gain = 15.000000 dB */
#define HPF_Fs22050_Gain15_A0 2.456300
-#define HPF_Fs22050_Gain15_A1 -1.086267
+#define HPF_Fs22050_Gain15_A1 (-1.086267)
#define HPF_Fs22050_Gain15_A2 0.000000
#define HPF_Fs22050_Gain15_B1 0.370033
#define HPF_Fs22050_Gain15_B2 0.000000
@@ -148,342 +148,342 @@
#define HPF_Fs24000_Gain4_B2 0.000000
/* Gain = 5.000000 dB */
#define HPF_Fs24000_Gain5_A0 1.284870
-#define HPF_Fs24000_Gain5_A1 -0.016921
+#define HPF_Fs24000_Gain5_A1 (-0.016921)
#define HPF_Fs24000_Gain5_A2 0.000000
#define HPF_Fs24000_Gain5_B1 0.267949
#define HPF_Fs24000_Gain5_B2 0.000000
/* Gain = 6.000000 dB */
#define HPF_Fs24000_Gain6_A0 1.364291
-#define HPF_Fs24000_Gain6_A1 -0.096342
+#define HPF_Fs24000_Gain6_A1 (-0.096342)
#define HPF_Fs24000_Gain6_A2 0.000000
#define HPF_Fs24000_Gain6_B1 0.267949
#define HPF_Fs24000_Gain6_B2 0.000000
/* Gain = 7.000000 dB */
#define HPF_Fs24000_Gain7_A0 1.453403
-#define HPF_Fs24000_Gain7_A1 -0.185454
+#define HPF_Fs24000_Gain7_A1 (-0.185454)
#define HPF_Fs24000_Gain7_A2 0.000000
#define HPF_Fs24000_Gain7_B1 0.267949
#define HPF_Fs24000_Gain7_B2 0.000000
/* Gain = 8.000000 dB */
#define HPF_Fs24000_Gain8_A0 1.553389
-#define HPF_Fs24000_Gain8_A1 -0.285440
+#define HPF_Fs24000_Gain8_A1 (-0.285440)
#define HPF_Fs24000_Gain8_A2 0.000000
#define HPF_Fs24000_Gain8_B1 0.267949
#define HPF_Fs24000_Gain8_B2 0.000000
/* Gain = 9.000000 dB */
#define HPF_Fs24000_Gain9_A0 1.665574
-#define HPF_Fs24000_Gain9_A1 -0.397625
+#define HPF_Fs24000_Gain9_A1 (-0.397625)
#define HPF_Fs24000_Gain9_A2 0.000000
#define HPF_Fs24000_Gain9_B1 0.267949
#define HPF_Fs24000_Gain9_B2 0.000000
/* Gain = 10.000000 dB */
#define HPF_Fs24000_Gain10_A0 1.791449
-#define HPF_Fs24000_Gain10_A1 -0.523499
+#define HPF_Fs24000_Gain10_A1 (-0.523499)
#define HPF_Fs24000_Gain10_A2 0.000000
#define HPF_Fs24000_Gain10_B1 0.267949
#define HPF_Fs24000_Gain10_B2 0.000000
/* Gain = 11.000000 dB */
#define HPF_Fs24000_Gain11_A0 1.932682
-#define HPF_Fs24000_Gain11_A1 -0.664733
+#define HPF_Fs24000_Gain11_A1 (-0.664733)
#define HPF_Fs24000_Gain11_A2 0.000000
#define HPF_Fs24000_Gain11_B1 0.267949
#define HPF_Fs24000_Gain11_B2 0.000000
/* Gain = 12.000000 dB */
#define HPF_Fs24000_Gain12_A0 2.091148
-#define HPF_Fs24000_Gain12_A1 -0.823199
+#define HPF_Fs24000_Gain12_A1 (-0.823199)
#define HPF_Fs24000_Gain12_A2 0.000000
#define HPF_Fs24000_Gain12_B1 0.267949
#define HPF_Fs24000_Gain12_B2 0.000000
/* Gain = 13.000000 dB */
#define HPF_Fs24000_Gain13_A0 2.268950
-#define HPF_Fs24000_Gain13_A1 -1.001001
+#define HPF_Fs24000_Gain13_A1 (-1.001001)
#define HPF_Fs24000_Gain13_A2 0.000000
#define HPF_Fs24000_Gain13_B1 0.267949
#define HPF_Fs24000_Gain13_B2 0.000000
/* Gain = 14.000000 dB */
#define HPF_Fs24000_Gain14_A0 2.468447
-#define HPF_Fs24000_Gain14_A1 -1.200498
+#define HPF_Fs24000_Gain14_A1 (-1.200498)
#define HPF_Fs24000_Gain14_A2 0.000000
#define HPF_Fs24000_Gain14_B1 0.267949
#define HPF_Fs24000_Gain14_B2 0.000000
/* Gain = 15.000000 dB */
#define HPF_Fs24000_Gain15_A0 2.692287
-#define HPF_Fs24000_Gain15_A1 -1.424338
+#define HPF_Fs24000_Gain15_A1 (-1.424338)
#define HPF_Fs24000_Gain15_A2 0.000000
#define HPF_Fs24000_Gain15_B1 0.267949
#define HPF_Fs24000_Gain15_B2 0.000000
/* Coefficients for sample rate 32000Hz */
/* Gain = 1.000000 dB */
#define HPF_Fs32000_Gain1_A0 1.061009
-#define HPF_Fs32000_Gain1_A1 -0.061009
+#define HPF_Fs32000_Gain1_A1 (-0.061009)
#define HPF_Fs32000_Gain1_A2 0.000000
-#define HPF_Fs32000_Gain1_B1 -0.000000
+#define HPF_Fs32000_Gain1_B1 (-0.000000)
#define HPF_Fs32000_Gain1_B2 0.000000
/* Gain = 2.000000 dB */
#define HPF_Fs32000_Gain2_A0 1.129463
-#define HPF_Fs32000_Gain2_A1 -0.129463
+#define HPF_Fs32000_Gain2_A1 (-0.129463)
#define HPF_Fs32000_Gain2_A2 0.000000
-#define HPF_Fs32000_Gain2_B1 -0.000000
+#define HPF_Fs32000_Gain2_B1 (-0.000000)
#define HPF_Fs32000_Gain2_B2 0.000000
/* Gain = 3.000000 dB */
#define HPF_Fs32000_Gain3_A0 1.206267
-#define HPF_Fs32000_Gain3_A1 -0.206267
+#define HPF_Fs32000_Gain3_A1 (-0.206267)
#define HPF_Fs32000_Gain3_A2 0.000000
-#define HPF_Fs32000_Gain3_B1 -0.000000
+#define HPF_Fs32000_Gain3_B1 (-0.000000)
#define HPF_Fs32000_Gain3_B2 0.000000
/* Gain = 4.000000 dB */
#define HPF_Fs32000_Gain4_A0 1.292447
-#define HPF_Fs32000_Gain4_A1 -0.292447
+#define HPF_Fs32000_Gain4_A1 (-0.292447)
#define HPF_Fs32000_Gain4_A2 0.000000
-#define HPF_Fs32000_Gain4_B1 -0.000000
+#define HPF_Fs32000_Gain4_B1 (-0.000000)
#define HPF_Fs32000_Gain4_B2 0.000000
/* Gain = 5.000000 dB */
#define HPF_Fs32000_Gain5_A0 1.389140
-#define HPF_Fs32000_Gain5_A1 -0.389140
+#define HPF_Fs32000_Gain5_A1 (-0.389140)
#define HPF_Fs32000_Gain5_A2 0.000000
-#define HPF_Fs32000_Gain5_B1 -0.000000
+#define HPF_Fs32000_Gain5_B1 (-0.000000)
#define HPF_Fs32000_Gain5_B2 0.000000
/* Gain = 6.000000 dB */
#define HPF_Fs32000_Gain6_A0 1.497631
-#define HPF_Fs32000_Gain6_A1 -0.497631
+#define HPF_Fs32000_Gain6_A1 (-0.497631)
#define HPF_Fs32000_Gain6_A2 0.000000
-#define HPF_Fs32000_Gain6_B1 -0.000000
+#define HPF_Fs32000_Gain6_B1 (-0.000000)
#define HPF_Fs32000_Gain6_B2 0.000000
/* Gain = 7.000000 dB */
#define HPF_Fs32000_Gain7_A0 1.619361
-#define HPF_Fs32000_Gain7_A1 -0.619361
+#define HPF_Fs32000_Gain7_A1 (-0.619361)
#define HPF_Fs32000_Gain7_A2 0.000000
-#define HPF_Fs32000_Gain7_B1 -0.000000
+#define HPF_Fs32000_Gain7_B1 (-0.000000)
#define HPF_Fs32000_Gain7_B2 0.000000
/* Gain = 8.000000 dB */
#define HPF_Fs32000_Gain8_A0 1.755943
-#define HPF_Fs32000_Gain8_A1 -0.755943
+#define HPF_Fs32000_Gain8_A1 (-0.755943)
#define HPF_Fs32000_Gain8_A2 0.000000
-#define HPF_Fs32000_Gain8_B1 -0.000000
+#define HPF_Fs32000_Gain8_B1 (-0.000000)
#define HPF_Fs32000_Gain8_B2 0.000000
/* Gain = 9.000000 dB */
#define HPF_Fs32000_Gain9_A0 1.909191
-#define HPF_Fs32000_Gain9_A1 -0.909191
+#define HPF_Fs32000_Gain9_A1 (-0.909191)
#define HPF_Fs32000_Gain9_A2 0.000000
-#define HPF_Fs32000_Gain9_B1 -0.000000
+#define HPF_Fs32000_Gain9_B1 (-0.000000)
#define HPF_Fs32000_Gain9_B2 0.000000
/* Gain = 10.000000 dB */
#define HPF_Fs32000_Gain10_A0 2.081139
-#define HPF_Fs32000_Gain10_A1 -1.081139
+#define HPF_Fs32000_Gain10_A1 (-1.081139)
#define HPF_Fs32000_Gain10_A2 0.000000
-#define HPF_Fs32000_Gain10_B1 -0.000000
+#define HPF_Fs32000_Gain10_B1 (-0.000000)
#define HPF_Fs32000_Gain10_B2 0.000000
/* Gain = 11.000000 dB */
#define HPF_Fs32000_Gain11_A0 2.274067
-#define HPF_Fs32000_Gain11_A1 -1.274067
+#define HPF_Fs32000_Gain11_A1 (-1.274067)
#define HPF_Fs32000_Gain11_A2 0.000000
-#define HPF_Fs32000_Gain11_B1 -0.000000
+#define HPF_Fs32000_Gain11_B1 (-0.000000)
#define HPF_Fs32000_Gain11_B2 0.000000
/* Gain = 12.000000 dB */
#define HPF_Fs32000_Gain12_A0 2.490536
-#define HPF_Fs32000_Gain12_A1 -1.490536
+#define HPF_Fs32000_Gain12_A1 (-1.490536)
#define HPF_Fs32000_Gain12_A2 0.000000
-#define HPF_Fs32000_Gain12_B1 -0.000000
+#define HPF_Fs32000_Gain12_B1 (-0.000000)
#define HPF_Fs32000_Gain12_B2 0.000000
/* Gain = 13.000000 dB */
#define HPF_Fs32000_Gain13_A0 2.733418
-#define HPF_Fs32000_Gain13_A1 -1.733418
+#define HPF_Fs32000_Gain13_A1 (-1.733418)
#define HPF_Fs32000_Gain13_A2 0.000000
-#define HPF_Fs32000_Gain13_B1 -0.000000
+#define HPF_Fs32000_Gain13_B1 (-0.000000)
#define HPF_Fs32000_Gain13_B2 0.000000
/* Gain = 14.000000 dB */
#define HPF_Fs32000_Gain14_A0 3.005936
-#define HPF_Fs32000_Gain14_A1 -2.005936
+#define HPF_Fs32000_Gain14_A1 (-2.005936)
#define HPF_Fs32000_Gain14_A2 0.000000
-#define HPF_Fs32000_Gain14_B1 -0.000000
+#define HPF_Fs32000_Gain14_B1 (-0.000000)
#define HPF_Fs32000_Gain14_B2 0.000000
/* Gain = 15.000000 dB */
#define HPF_Fs32000_Gain15_A0 3.311707
-#define HPF_Fs32000_Gain15_A1 -2.311707
+#define HPF_Fs32000_Gain15_A1 (-2.311707)
#define HPF_Fs32000_Gain15_A2 0.000000
-#define HPF_Fs32000_Gain15_B1 -0.000000
+#define HPF_Fs32000_Gain15_B1 (-0.000000)
#define HPF_Fs32000_Gain15_B2 0.000000
/* Coefficients for sample rate 44100Hz */
/* Gain = 1.000000 dB */
#define HPF_Fs44100_Gain1_A0 1.074364
-#define HPF_Fs44100_Gain1_A1 -0.293257
+#define HPF_Fs44100_Gain1_A1 (-0.293257)
#define HPF_Fs44100_Gain1_A2 0.000000
-#define HPF_Fs44100_Gain1_B1 -0.218894
+#define HPF_Fs44100_Gain1_B1 (-0.218894)
#define HPF_Fs44100_Gain1_B2 0.000000
/* Gain = 2.000000 dB */
#define HPF_Fs44100_Gain2_A0 1.157801
-#define HPF_Fs44100_Gain2_A1 -0.376695
+#define HPF_Fs44100_Gain2_A1 (-0.376695)
#define HPF_Fs44100_Gain2_A2 0.000000
-#define HPF_Fs44100_Gain2_B1 -0.218894
+#define HPF_Fs44100_Gain2_B1 (-0.218894)
#define HPF_Fs44100_Gain2_B2 0.000000
/* Gain = 3.000000 dB */
#define HPF_Fs44100_Gain3_A0 1.251420
-#define HPF_Fs44100_Gain3_A1 -0.470313
+#define HPF_Fs44100_Gain3_A1 (-0.470313)
#define HPF_Fs44100_Gain3_A2 0.000000
-#define HPF_Fs44100_Gain3_B1 -0.218894
+#define HPF_Fs44100_Gain3_B1 (-0.218894)
#define HPF_Fs44100_Gain3_B2 0.000000
/* Gain = 4.000000 dB */
#define HPF_Fs44100_Gain4_A0 1.356461
-#define HPF_Fs44100_Gain4_A1 -0.575355
+#define HPF_Fs44100_Gain4_A1 (-0.575355)
#define HPF_Fs44100_Gain4_A2 0.000000
-#define HPF_Fs44100_Gain4_B1 -0.218894
+#define HPF_Fs44100_Gain4_B1 (-0.218894)
#define HPF_Fs44100_Gain4_B2 0.000000
/* Gain = 5.000000 dB */
#define HPF_Fs44100_Gain5_A0 1.474320
-#define HPF_Fs44100_Gain5_A1 -0.693213
+#define HPF_Fs44100_Gain5_A1 (-0.693213)
#define HPF_Fs44100_Gain5_A2 0.000000
-#define HPF_Fs44100_Gain5_B1 -0.218894
+#define HPF_Fs44100_Gain5_B1 (-0.218894)
#define HPF_Fs44100_Gain5_B2 0.000000
/* Gain = 6.000000 dB */
#define HPF_Fs44100_Gain6_A0 1.606559
-#define HPF_Fs44100_Gain6_A1 -0.825453
+#define HPF_Fs44100_Gain6_A1 (-0.825453)
#define HPF_Fs44100_Gain6_A2 0.000000
-#define HPF_Fs44100_Gain6_B1 -0.218894
+#define HPF_Fs44100_Gain6_B1 (-0.218894)
#define HPF_Fs44100_Gain6_B2 0.000000
/* Gain = 7.000000 dB */
#define HPF_Fs44100_Gain7_A0 1.754935
-#define HPF_Fs44100_Gain7_A1 -0.973828
+#define HPF_Fs44100_Gain7_A1 (-0.973828)
#define HPF_Fs44100_Gain7_A2 0.000000
-#define HPF_Fs44100_Gain7_B1 -0.218894
+#define HPF_Fs44100_Gain7_B1 (-0.218894)
#define HPF_Fs44100_Gain7_B2 0.000000
/* Gain = 8.000000 dB */
#define HPF_Fs44100_Gain8_A0 1.921414
-#define HPF_Fs44100_Gain8_A1 -1.140308
+#define HPF_Fs44100_Gain8_A1 (-1.140308)
#define HPF_Fs44100_Gain8_A2 0.000000
-#define HPF_Fs44100_Gain8_B1 -0.218894
+#define HPF_Fs44100_Gain8_B1 (-0.218894)
#define HPF_Fs44100_Gain8_B2 0.000000
/* Gain = 9.000000 dB */
#define HPF_Fs44100_Gain9_A0 2.108208
-#define HPF_Fs44100_Gain9_A1 -1.327101
+#define HPF_Fs44100_Gain9_A1 (-1.327101)
#define HPF_Fs44100_Gain9_A2 0.000000
-#define HPF_Fs44100_Gain9_B1 -0.218894
+#define HPF_Fs44100_Gain9_B1 (-0.218894)
#define HPF_Fs44100_Gain9_B2 0.000000
/* Gain = 10.000000 dB */
#define HPF_Fs44100_Gain10_A0 2.317793
-#define HPF_Fs44100_Gain10_A1 -1.536687
+#define HPF_Fs44100_Gain10_A1 (-1.536687)
#define HPF_Fs44100_Gain10_A2 0.000000
-#define HPF_Fs44100_Gain10_B1 -0.218894
+#define HPF_Fs44100_Gain10_B1 (-0.218894)
#define HPF_Fs44100_Gain10_B2 0.000000
/* Gain = 11.000000 dB */
#define HPF_Fs44100_Gain11_A0 2.552952
-#define HPF_Fs44100_Gain11_A1 -1.771846
+#define HPF_Fs44100_Gain11_A1 (-1.771846)
#define HPF_Fs44100_Gain11_A2 0.000000
-#define HPF_Fs44100_Gain11_B1 -0.218894
+#define HPF_Fs44100_Gain11_B1 (-0.218894)
#define HPF_Fs44100_Gain11_B2 0.000000
/* Gain = 12.000000 dB */
#define HPF_Fs44100_Gain12_A0 2.816805
-#define HPF_Fs44100_Gain12_A1 -2.035698
+#define HPF_Fs44100_Gain12_A1 (-2.035698)
#define HPF_Fs44100_Gain12_A2 0.000000
-#define HPF_Fs44100_Gain12_B1 -0.218894
+#define HPF_Fs44100_Gain12_B1 (-0.218894)
#define HPF_Fs44100_Gain12_B2 0.000000
/* Gain = 13.000000 dB */
#define HPF_Fs44100_Gain13_A0 3.112852
-#define HPF_Fs44100_Gain13_A1 -2.331746
+#define HPF_Fs44100_Gain13_A1 (-2.331746)
#define HPF_Fs44100_Gain13_A2 0.000000
-#define HPF_Fs44100_Gain13_B1 -0.218894
+#define HPF_Fs44100_Gain13_B1 (-0.218894)
#define HPF_Fs44100_Gain13_B2 0.000000
/* Gain = 14.000000 dB */
#define HPF_Fs44100_Gain14_A0 3.445023
-#define HPF_Fs44100_Gain14_A1 -2.663916
+#define HPF_Fs44100_Gain14_A1 (-2.663916)
#define HPF_Fs44100_Gain14_A2 0.000000
-#define HPF_Fs44100_Gain14_B1 -0.218894
+#define HPF_Fs44100_Gain14_B1 (-0.218894)
#define HPF_Fs44100_Gain14_B2 0.000000
/* Gain = 15.000000 dB */
#define HPF_Fs44100_Gain15_A0 3.817724
-#define HPF_Fs44100_Gain15_A1 -3.036618
+#define HPF_Fs44100_Gain15_A1 (-3.036618)
#define HPF_Fs44100_Gain15_A2 0.000000
-#define HPF_Fs44100_Gain15_B1 -0.218894
+#define HPF_Fs44100_Gain15_B1 (-0.218894)
#define HPF_Fs44100_Gain15_B2 0.000000
/* Coefficients for sample rate 48000Hz */
/* Gain = 1.000000 dB */
#define HPF_Fs48000_Gain1_A0 1.077357
-#define HPF_Fs48000_Gain1_A1 -0.345306
+#define HPF_Fs48000_Gain1_A1 (-0.345306)
#define HPF_Fs48000_Gain1_A2 0.000000
-#define HPF_Fs48000_Gain1_B1 -0.267949
+#define HPF_Fs48000_Gain1_B1 (-0.267949)
#define HPF_Fs48000_Gain1_B2 0.000000
/* Gain = 2.000000 dB */
#define HPF_Fs48000_Gain2_A0 1.164152
-#define HPF_Fs48000_Gain2_A1 -0.432101
+#define HPF_Fs48000_Gain2_A1 (-0.432101)
#define HPF_Fs48000_Gain2_A2 0.000000
-#define HPF_Fs48000_Gain2_B1 -0.267949
+#define HPF_Fs48000_Gain2_B1 (-0.267949)
#define HPF_Fs48000_Gain2_B2 0.000000
/* Gain = 3.000000 dB */
#define HPF_Fs48000_Gain3_A0 1.261538
-#define HPF_Fs48000_Gain3_A1 -0.529488
+#define HPF_Fs48000_Gain3_A1 (-0.529488)
#define HPF_Fs48000_Gain3_A2 0.000000
-#define HPF_Fs48000_Gain3_B1 -0.267949
+#define HPF_Fs48000_Gain3_B1 (-0.267949)
#define HPF_Fs48000_Gain3_B2 0.000000
/* Gain = 4.000000 dB */
#define HPF_Fs48000_Gain4_A0 1.370807
-#define HPF_Fs48000_Gain4_A1 -0.638757
+#define HPF_Fs48000_Gain4_A1 (-0.638757)
#define HPF_Fs48000_Gain4_A2 0.000000
-#define HPF_Fs48000_Gain4_B1 -0.267949
+#define HPF_Fs48000_Gain4_B1 (-0.267949)
#define HPF_Fs48000_Gain4_B2 0.000000
/* Gain = 5.000000 dB */
#define HPF_Fs48000_Gain5_A0 1.493409
-#define HPF_Fs48000_Gain5_A1 -0.761359
+#define HPF_Fs48000_Gain5_A1 (-0.761359)
#define HPF_Fs48000_Gain5_A2 0.000000
-#define HPF_Fs48000_Gain5_B1 -0.267949
+#define HPF_Fs48000_Gain5_B1 (-0.267949)
#define HPF_Fs48000_Gain5_B2 0.000000
/* Gain = 6.000000 dB */
#define HPF_Fs48000_Gain6_A0 1.630971
-#define HPF_Fs48000_Gain6_A1 -0.898920
+#define HPF_Fs48000_Gain6_A1 (-0.898920)
#define HPF_Fs48000_Gain6_A2 0.000000
-#define HPF_Fs48000_Gain6_B1 -0.267949
+#define HPF_Fs48000_Gain6_B1 (-0.267949)
#define HPF_Fs48000_Gain6_B2 0.000000
/* Gain = 7.000000 dB */
#define HPF_Fs48000_Gain7_A0 1.785318
-#define HPF_Fs48000_Gain7_A1 -1.053267
+#define HPF_Fs48000_Gain7_A1 (-1.053267)
#define HPF_Fs48000_Gain7_A2 0.000000
-#define HPF_Fs48000_Gain7_B1 -0.267949
+#define HPF_Fs48000_Gain7_B1 (-0.267949)
#define HPF_Fs48000_Gain7_B2 0.000000
/* Gain = 8.000000 dB */
#define HPF_Fs48000_Gain8_A0 1.958498
-#define HPF_Fs48000_Gain8_A1 -1.226447
+#define HPF_Fs48000_Gain8_A1 (-1.226447)
#define HPF_Fs48000_Gain8_A2 0.000000
-#define HPF_Fs48000_Gain8_B1 -0.267949
+#define HPF_Fs48000_Gain8_B1 (-0.267949)
#define HPF_Fs48000_Gain8_B2 0.000000
/* Gain = 9.000000 dB */
#define HPF_Fs48000_Gain9_A0 2.152809
-#define HPF_Fs48000_Gain9_A1 -1.420758
+#define HPF_Fs48000_Gain9_A1 (-1.420758)
#define HPF_Fs48000_Gain9_A2 0.000000
-#define HPF_Fs48000_Gain9_B1 -0.267949
+#define HPF_Fs48000_Gain9_B1 (-0.267949)
#define HPF_Fs48000_Gain9_B2 0.000000
/* Gain = 10.000000 dB */
#define HPF_Fs48000_Gain10_A0 2.370829
-#define HPF_Fs48000_Gain10_A1 -1.638778
+#define HPF_Fs48000_Gain10_A1 (-1.638778)
#define HPF_Fs48000_Gain10_A2 0.000000
-#define HPF_Fs48000_Gain10_B1 -0.267949
+#define HPF_Fs48000_Gain10_B1 (-0.267949)
#define HPF_Fs48000_Gain10_B2 0.000000
/* Gain = 11.000000 dB */
#define HPF_Fs48000_Gain11_A0 2.615452
-#define HPF_Fs48000_Gain11_A1 -1.883401
+#define HPF_Fs48000_Gain11_A1 (-1.883401)
#define HPF_Fs48000_Gain11_A2 0.000000
-#define HPF_Fs48000_Gain11_B1 -0.267949
+#define HPF_Fs48000_Gain11_B1 (-0.267949)
#define HPF_Fs48000_Gain11_B2 0.000000
/* Gain = 12.000000 dB */
#define HPF_Fs48000_Gain12_A0 2.889924
-#define HPF_Fs48000_Gain12_A1 -2.157873
+#define HPF_Fs48000_Gain12_A1 (-2.157873)
#define HPF_Fs48000_Gain12_A2 0.000000
-#define HPF_Fs48000_Gain12_B1 -0.267949
+#define HPF_Fs48000_Gain12_B1 (-0.267949)
#define HPF_Fs48000_Gain12_B2 0.000000
/* Gain = 13.000000 dB */
#define HPF_Fs48000_Gain13_A0 3.197886
-#define HPF_Fs48000_Gain13_A1 -2.465835
+#define HPF_Fs48000_Gain13_A1 (-2.465835)
#define HPF_Fs48000_Gain13_A2 0.000000
-#define HPF_Fs48000_Gain13_B1 -0.267949
+#define HPF_Fs48000_Gain13_B1 (-0.267949)
#define HPF_Fs48000_Gain13_B2 0.000000
/* Gain = 14.000000 dB */
#define HPF_Fs48000_Gain14_A0 3.543425
-#define HPF_Fs48000_Gain14_A1 -2.811374
+#define HPF_Fs48000_Gain14_A1 (-2.811374)
#define HPF_Fs48000_Gain14_A2 0.000000
-#define HPF_Fs48000_Gain14_B1 -0.267949
+#define HPF_Fs48000_Gain14_B1 (-0.267949)
#define HPF_Fs48000_Gain14_B2 0.000000
/* Gain = 15.000000 dB */
#define HPF_Fs48000_Gain15_A0 3.931127
-#define HPF_Fs48000_Gain15_A1 -3.199076
+#define HPF_Fs48000_Gain15_A1 (-3.199076)
#define HPF_Fs48000_Gain15_A2 0.000000
-#define HPF_Fs48000_Gain15_B1 -0.267949
+#define HPF_Fs48000_Gain15_B1 (-0.267949)
#define HPF_Fs48000_Gain15_B2 0.000000
#ifdef HIGHER_FS
@@ -491,185 +491,185 @@
/* Coefficients for sample rate 96000Hz */
/* Gain = 1.000000 dB */
#define HPF_Fs96000_Gain1_A0 1.096233
-#define HPF_Fs96000_Gain1_A1 -0.673583
+#define HPF_Fs96000_Gain1_A1 (-0.673583)
#define HPF_Fs96000_Gain1_A2 0.000000
-#define HPF_Fs96000_Gain1_B1 -0.577350
+#define HPF_Fs96000_Gain1_B1 (-0.577350)
#define HPF_Fs96000_Gain1_B2 0.000000
/* Gain = 2.000000 dB */
#define HPF_Fs96000_Gain2_A0 1.204208
-#define HPF_Fs96000_Gain2_A1 -0.781558
+#define HPF_Fs96000_Gain2_A1 (-0.781558)
#define HPF_Fs96000_Gain2_A2 0.000000
-#define HPF_Fs96000_Gain2_B1 -0.577350
+#define HPF_Fs96000_Gain2_B1 (-0.577350)
#define HPF_Fs96000_Gain2_B2 0.000000
/* Gain = 3.000000 dB */
#define HPF_Fs96000_Gain3_A0 1.325358
-#define HPF_Fs96000_Gain3_A1 -0.902708
+#define HPF_Fs96000_Gain3_A1 (-0.902708)
#define HPF_Fs96000_Gain3_A2 0.000000
-#define HPF_Fs96000_Gain3_B1 -0.577350
+#define HPF_Fs96000_Gain3_B1 (-0.577350)
#define HPF_Fs96000_Gain3_B2 0.000000
/* Gain = 4.000000 dB */
#define HPF_Fs96000_Gain4_A0 1.461291
-#define HPF_Fs96000_Gain4_A1 -1.038641
+#define HPF_Fs96000_Gain4_A1 (-1.038641)
#define HPF_Fs96000_Gain4_A2 0.000000
-#define HPF_Fs96000_Gain4_B1 -0.577350
+#define HPF_Fs96000_Gain4_B1 (-0.577350)
#define HPF_Fs96000_Gain4_B2 0.000000
/* Gain = 5.000000 dB */
#define HPF_Fs96000_Gain5_A0 1.613810
-#define HPF_Fs96000_Gain5_A1 -1.191160
+#define HPF_Fs96000_Gain5_A1 (-1.191160)
#define HPF_Fs96000_Gain5_A2 0.000000
-#define HPF_Fs96000_Gain5_B1 -0.577350
+#define HPF_Fs96000_Gain5_B1 (-0.577350)
#define HPF_Fs96000_Gain5_B2 0.000000
/* Gain = 6.000000 dB */
#define HPF_Fs96000_Gain6_A0 1.784939
-#define HPF_Fs96000_Gain6_A1 -1.362289
+#define HPF_Fs96000_Gain6_A1 (-1.362289)
#define HPF_Fs96000_Gain6_A2 0.000000
-#define HPF_Fs96000_Gain6_B1 -0.577350
+#define HPF_Fs96000_Gain6_B1 (-0.577350)
#define HPF_Fs96000_Gain6_B2 0.000000
/* Gain = 7.000000 dB */
#define HPF_Fs96000_Gain7_A0 1.976949
-#define HPF_Fs96000_Gain7_A1 -1.554299
+#define HPF_Fs96000_Gain7_A1 (-1.554299)
#define HPF_Fs96000_Gain7_A2 0.000000
-#define HPF_Fs96000_Gain7_B1 -0.577350
+#define HPF_Fs96000_Gain7_B1 (-0.577350)
#define HPF_Fs96000_Gain7_B2 0.000000
/* Gain = 8.000000 dB */
#define HPF_Fs96000_Gain8_A0 2.192387
-#define HPF_Fs96000_Gain8_A1 -1.769738
+#define HPF_Fs96000_Gain8_A1 (-1.769738)
#define HPF_Fs96000_Gain8_A2 0.000000
-#define HPF_Fs96000_Gain8_B1 -0.577350
+#define HPF_Fs96000_Gain8_B1 (-0.577350)
#define HPF_Fs96000_Gain8_B2 0.000000
/* Gain = 9.000000 dB */
#define HPF_Fs96000_Gain9_A0 2.434113
-#define HPF_Fs96000_Gain9_A1 -2.011464
+#define HPF_Fs96000_Gain9_A1 (-2.011464)
#define HPF_Fs96000_Gain9_A2 0.000000
-#define HPF_Fs96000_Gain9_B1 -0.577350
+#define HPF_Fs96000_Gain9_B1 (-0.577350)
#define HPF_Fs96000_Gain9_B2 0.000000
/* Gain = 10.000000 dB */
#define HPF_Fs96000_Gain10_A0 2.705335
-#define HPF_Fs96000_Gain10_A1 -2.282685
+#define HPF_Fs96000_Gain10_A1 (-2.282685)
#define HPF_Fs96000_Gain10_A2 0.000000
-#define HPF_Fs96000_Gain10_B1 -0.577350
+#define HPF_Fs96000_Gain10_B1 (-0.577350)
#define HPF_Fs96000_Gain10_B2 0.000000
/* Gain = 11.000000 dB */
#define HPF_Fs96000_Gain11_A0 3.009650
-#define HPF_Fs96000_Gain11_A1 -2.587000
+#define HPF_Fs96000_Gain11_A1 (-2.587000)
#define HPF_Fs96000_Gain11_A2 0.000000
-#define HPF_Fs96000_Gain11_B1 -0.577350
+#define HPF_Fs96000_Gain11_B1 (-0.577350)
#define HPF_Fs96000_Gain11_B2 0.000000
/* Gain = 12.000000 dB */
#define HPF_Fs96000_Gain12_A0 3.351097
-#define HPF_Fs96000_Gain12_A1 -2.928447
+#define HPF_Fs96000_Gain12_A1 (-2.928447)
#define HPF_Fs96000_Gain12_A2 0.000000
-#define HPF_Fs96000_Gain12_B1 -0.577350
+#define HPF_Fs96000_Gain12_B1 (-0.577350)
#define HPF_Fs96000_Gain12_B2 0.000000
/* Gain = 13.000000 dB */
#define HPF_Fs96000_Gain13_A0 3.734207
-#define HPF_Fs96000_Gain13_A1 -3.311558
+#define HPF_Fs96000_Gain13_A1 (-3.311558)
#define HPF_Fs96000_Gain13_A2 0.000000
-#define HPF_Fs96000_Gain13_B1 -0.577350
+#define HPF_Fs96000_Gain13_B1 (-0.577350)
#define HPF_Fs96000_Gain13_B2 0.000000
/* Gain = 14.000000 dB */
#define HPF_Fs96000_Gain14_A0 4.164064
-#define HPF_Fs96000_Gain14_A1 -3.741414
+#define HPF_Fs96000_Gain14_A1 (-3.741414)
#define HPF_Fs96000_Gain14_A2 0.000000
-#define HPF_Fs96000_Gain14_B1 -0.577350
+#define HPF_Fs96000_Gain14_B1 (-0.577350)
#define HPF_Fs96000_Gain14_B2 0.000000
/* Gain = 15.000000 dB */
#define HPF_Fs96000_Gain15_A0 4.646371
-#define HPF_Fs96000_Gain15_A1 -4.223721
+#define HPF_Fs96000_Gain15_A1 (-4.223721)
#define HPF_Fs96000_Gain15_A2 0.000000
-#define HPF_Fs96000_Gain15_B1 -0.577350
+#define HPF_Fs96000_Gain15_B1 (-0.577350)
#define HPF_Fs96000_Gain15_B2 0.000000
/* Coefficients for sample rate 192000Hz */
/* Gain = 1.000000 dB */
#define HPF_Fs192000_Gain1_A0 1.107823
-#define HPF_Fs192000_Gain1_A1 -0.875150
+#define HPF_Fs192000_Gain1_A1 (-0.875150)
#define HPF_Fs192000_Gain1_A2 0.000000
-#define HPF_Fs192000_Gain1_B1 -0.767327
+#define HPF_Fs192000_Gain1_B1 (-0.767327)
#define HPF_Fs192000_Gain1_B2 0.000000
/* Gain = 2.000000 dB */
#define HPF_Fs192000_Gain2_A0 1.228803
-#define HPF_Fs192000_Gain2_A1 -0.996130
+#define HPF_Fs192000_Gain2_A1 (-0.996130)
#define HPF_Fs192000_Gain2_A2 0.000000
-#define HPF_Fs192000_Gain2_B1 -0.767327
+#define HPF_Fs192000_Gain2_B1 (-0.767327)
#define HPF_Fs192000_Gain2_B2 0.000000
/* Gain = 3.000000 dB */
#define HPF_Fs192000_Gain3_A0 1.364544
-#define HPF_Fs192000_Gain3_A1 -1.131871
+#define HPF_Fs192000_Gain3_A1 (-1.131871)
#define HPF_Fs192000_Gain3_A2 0.000000
-#define HPF_Fs192000_Gain3_B1 -0.767327
+#define HPF_Fs192000_Gain3_B1 (-0.767327)
#define HPF_Fs192000_Gain3_B2 0.000000
/* Gain = 4.000000 dB */
#define HPF_Fs192000_Gain4_A0 1.516849
-#define HPF_Fs192000_Gain4_A1 -1.284176
+#define HPF_Fs192000_Gain4_A1 (-1.284176)
#define HPF_Fs192000_Gain4_A2 0.000000
-#define HPF_Fs192000_Gain4_B1 -0.767327
+#define HPF_Fs192000_Gain4_B1 (-0.767327)
#define HPF_Fs192000_Gain4_B2 0.000000
/* Gain = 5.000000 dB */
#define HPF_Fs192000_Gain5_A0 1.687737
-#define HPF_Fs192000_Gain5_A1 -1.455064
+#define HPF_Fs192000_Gain5_A1 (-1.455064)
#define HPF_Fs192000_Gain5_A2 0.000000
-#define HPF_Fs192000_Gain5_B1 -0.767327
+#define HPF_Fs192000_Gain5_B1 (-0.767327)
#define HPF_Fs192000_Gain5_B2 0.000000
/* Gain = 6.000000 dB */
#define HPF_Fs192000_Gain6_A0 1.879477
-#define HPF_Fs192000_Gain6_A1 -1.646804
+#define HPF_Fs192000_Gain6_A1 (-1.646804)
#define HPF_Fs192000_Gain6_A2 0.000000
-#define HPF_Fs192000_Gain6_B1 -0.767327
+#define HPF_Fs192000_Gain6_B1 (-0.767327)
#define HPF_Fs192000_Gain6_B2 0.000000
/* Gain = 7.000000 dB */
#define HPF_Fs192000_Gain7_A0 2.094613
-#define HPF_Fs192000_Gain7_A1 -1.861940
+#define HPF_Fs192000_Gain7_A1 (-1.861940)
#define HPF_Fs192000_Gain7_A2 0.000000
-#define HPF_Fs192000_Gain7_B1 -0.767327
+#define HPF_Fs192000_Gain7_B1 (-0.767327)
#define HPF_Fs192000_Gain7_B2 0.000000
/* Gain = 8.000000 dB */
#define HPF_Fs192000_Gain8_A0 2.335999
-#define HPF_Fs192000_Gain8_A1 -2.103326
+#define HPF_Fs192000_Gain8_A1 (-2.103326)
#define HPF_Fs192000_Gain8_A2 0.000000
-#define HPF_Fs192000_Gain8_B1 -0.767327
+#define HPF_Fs192000_Gain8_B1 (-0.767327)
#define HPF_Fs192000_Gain8_B2 0.000000
/* Gain = 9.000000 dB */
#define HPF_Fs192000_Gain9_A0 2.606839
-#define HPF_Fs192000_Gain9_A1 -2.374166
+#define HPF_Fs192000_Gain9_A1 (-2.374166)
#define HPF_Fs192000_Gain9_A2 0.000000
-#define HPF_Fs192000_Gain9_B1 -0.767327
+#define HPF_Fs192000_Gain9_B1 (-0.767327)
#define HPF_Fs192000_Gain9_B2 0.000000
/* Gain = 10.000000 dB */
#define HPF_Fs192000_Gain10_A0 2.910726
-#define HPF_Fs192000_Gain10_A1 -2.678053
+#define HPF_Fs192000_Gain10_A1 (-2.678053)
#define HPF_Fs192000_Gain10_A2 0.000000
-#define HPF_Fs192000_Gain10_B1 -0.767327
+#define HPF_Fs192000_Gain10_B1 (-0.767327)
#define HPF_Fs192000_Gain10_B2 0.000000
/* Gain = 11.000000 dB */
#define HPF_Fs192000_Gain11_A0 3.251693
-#define HPF_Fs192000_Gain11_A1 -3.019020
+#define HPF_Fs192000_Gain11_A1 (-3.019020)
#define HPF_Fs192000_Gain11_A2 0.000000
-#define HPF_Fs192000_Gain11_B1 -0.767327
+#define HPF_Fs192000_Gain11_B1 (-0.767327)
#define HPF_Fs192000_Gain11_B2 0.000000
/* Gain = 12.000000 dB */
#define HPF_Fs192000_Gain12_A0 3.634264
-#define HPF_Fs192000_Gain12_A1 -3.401591
+#define HPF_Fs192000_Gain12_A1 (-3.401591)
#define HPF_Fs192000_Gain12_A2 0.000000
-#define HPF_Fs192000_Gain12_B1 -0.767327
+#define HPF_Fs192000_Gain12_B1 (-0.767327)
#define HPF_Fs192000_Gain12_B2 0.000000
/* Gain = 13.000000 dB */
#define HPF_Fs192000_Gain13_A0 4.063516
-#define HPF_Fs192000_Gain13_A1 -3.830843
+#define HPF_Fs192000_Gain13_A1 (-3.830843)
#define HPF_Fs192000_Gain13_A2 0.000000
-#define HPF_Fs192000_Gain13_B1 -0.767327
+#define HPF_Fs192000_Gain13_B1 (-0.767327)
#define HPF_Fs192000_Gain13_B2 0.000000
/* Gain = 14.000000 dB */
#define HPF_Fs192000_Gain14_A0 4.545145
-#define HPF_Fs192000_Gain14_A1 -4.312472
+#define HPF_Fs192000_Gain14_A1 (-4.312472)
#define HPF_Fs192000_Gain14_A2 0.000000
-#define HPF_Fs192000_Gain14_B1 -0.767327
+#define HPF_Fs192000_Gain14_B1 (-0.767327)
#define HPF_Fs192000_Gain14_B2 0.000000
/* Gain = 15.000000 dB */
#define HPF_Fs192000_Gain15_A0 5.085542
-#define HPF_Fs192000_Gain15_A1 -4.852868
+#define HPF_Fs192000_Gain15_A1 (-4.852868)
#define HPF_Fs192000_Gain15_A2 0.000000
-#define HPF_Fs192000_Gain15_B1 -0.767327
+#define HPF_Fs192000_Gain15_B1 (-0.767327)
#define HPF_Fs192000_Gain15_B2 0.000000
#endif
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
index f0deb6c..42ea46f 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
@@ -26,21 +26,21 @@
/* */
/************************************************************************************/
#ifdef BUILD_FLOAT
-#define LVEQNB_Gain_Neg15_dB -0.822172f
-#define LVEQNB_Gain_Neg14_dB -0.800474f
-#define LVEQNB_Gain_Neg13_dB -0.776128f
-#define LVEQNB_Gain_Neg12_dB -0.748811f
-#define LVEQNB_Gain_Neg11_dB -0.718162f
-#define LVEQNB_Gain_Neg10_dB -0.683772f
-#define LVEQNB_Gain_Neg9_dB -0.645187f
-#define LVEQNB_Gain_Neg8_dB -0.601893f
-#define LVEQNB_Gain_Neg7_dB -0.553316f
-#define LVEQNB_Gain_Neg6_dB -0.498813f
-#define LVEQNB_Gain_Neg5_dB -0.437659f
-#define LVEQNB_Gain_Neg4_dB -0.369043f
-#define LVEQNB_Gain_Neg3_dB -0.292054f
-#define LVEQNB_Gain_Neg2_dB -0.205672f
-#define LVEQNB_Gain_Neg1_dB -0.108749f
+#define LVEQNB_Gain_Neg15_dB (-0.822172f)
+#define LVEQNB_Gain_Neg14_dB (-0.800474f)
+#define LVEQNB_Gain_Neg13_dB (-0.776128f)
+#define LVEQNB_Gain_Neg12_dB (-0.748811f)
+#define LVEQNB_Gain_Neg11_dB (-0.718162f)
+#define LVEQNB_Gain_Neg10_dB (-0.683772f)
+#define LVEQNB_Gain_Neg9_dB (-0.645187f)
+#define LVEQNB_Gain_Neg8_dB (-0.601893f)
+#define LVEQNB_Gain_Neg7_dB (-0.553316f)
+#define LVEQNB_Gain_Neg6_dB (-0.498813f)
+#define LVEQNB_Gain_Neg5_dB (-0.437659f)
+#define LVEQNB_Gain_Neg4_dB (-0.369043f)
+#define LVEQNB_Gain_Neg3_dB (-0.292054f)
+#define LVEQNB_Gain_Neg2_dB (-0.205672f)
+#define LVEQNB_Gain_Neg1_dB (-0.108749f)
#define LVEQNB_Gain_0_dB 0.000000f
#define LVEQNB_Gain_1_dB 0.122018f
#define LVEQNB_Gain_2_dB 0.258925f
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
index 4f5221a..0c2fe53 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
@@ -27,127 +27,127 @@
#ifdef BUILD_FLOAT
/* Stereo Enhancer coefficients for 8000 Hz sample rate, scaled with 0.161258 */
#define CS_MIDDLE_8000_A0 0.227720
-#define CS_MIDDLE_8000_A1 -0.215125
+#define CS_MIDDLE_8000_A1 (-0.215125)
#define CS_MIDDLE_8000_A2 0.000000
-#define CS_MIDDLE_8000_B1 -0.921899
+#define CS_MIDDLE_8000_B1 (-0.921899)
#define CS_MIDDLE_8000_B2 0.000000
#define CS_MIDDLE_8000_SCALE 15
#define CS_SIDE_8000_A0 0.611441
-#define CS_SIDE_8000_A1 -0.380344
-#define CS_SIDE_8000_A2 -0.231097
-#define CS_SIDE_8000_B1 -0.622470
-#define CS_SIDE_8000_B2 -0.130759
+#define CS_SIDE_8000_A1 (-0.380344)
+#define CS_SIDE_8000_A2 (-0.231097)
+#define CS_SIDE_8000_B1 (-0.622470)
+#define CS_SIDE_8000_B2 (-0.130759)
#define CS_SIDE_8000_SCALE 15
/* Stereo Enhancer coefficients for 11025Hz sample rate, scaled with 0.162943 */
#define CS_MIDDLE_11025_A0 0.230838
-#define CS_MIDDLE_11025_A1 -0.221559
+#define CS_MIDDLE_11025_A1 (-0.221559)
#define CS_MIDDLE_11025_A2 0.000000
-#define CS_MIDDLE_11025_B1 -0.943056
+#define CS_MIDDLE_11025_B1 (-0.943056)
#define CS_MIDDLE_11025_B2 0.000000
#define CS_MIDDLE_11025_SCALE 15
#define CS_SIDE_11025_A0 0.557372
-#define CS_SIDE_11025_A1 -0.391490
-#define CS_SIDE_11025_A2 -0.165881
-#define CS_SIDE_11025_B1 -0.880608
+#define CS_SIDE_11025_A1 (-0.391490)
+#define CS_SIDE_11025_A2 (-0.165881)
+#define CS_SIDE_11025_B1 (-0.880608)
#define CS_SIDE_11025_B2 0.032397
#define CS_SIDE_11025_SCALE 15
/* Stereo Enhancer coefficients for 12000Hz sample rate, scaled with 0.162191 */
#define CS_MIDDLE_12000_A0 0.229932
-#define CS_MIDDLE_12000_A1 -0.221436
+#define CS_MIDDLE_12000_A1 (-0.221436)
#define CS_MIDDLE_12000_A2 0.000000
-#define CS_MIDDLE_12000_B1 -0.947616
+#define CS_MIDDLE_12000_B1 (-0.947616)
#define CS_MIDDLE_12000_B2 0.000000
#define CS_MIDDLE_12000_SCALE 15
#define CS_SIDE_12000_A0 0.558398
-#define CS_SIDE_12000_A1 -0.392211
-#define CS_SIDE_12000_A2 -0.166187
-#define CS_SIDE_12000_B1 -0.892550
+#define CS_SIDE_12000_A1 (-0.392211)
+#define CS_SIDE_12000_A2 (-0.166187)
+#define CS_SIDE_12000_B1 (-0.892550)
#define CS_SIDE_12000_B2 0.032856
#define CS_SIDE_12000_SCALE 15
/* Stereo Enhancer coefficients for 16000Hz sample rate, scaled with 0.162371 */
#define CS_MIDDLE_16000_A0 0.230638
-#define CS_MIDDLE_16000_A1 -0.224232
+#define CS_MIDDLE_16000_A1 (-0.224232)
#define CS_MIDDLE_16000_A2 0.000000
-#define CS_MIDDLE_16000_B1 -0.960550
+#define CS_MIDDLE_16000_B1 (-0.960550)
#define CS_MIDDLE_16000_B2 0.000000
#define CS_MIDDLE_16000_SCALE 15
#define CS_SIDE_16000_A0 0.499695
-#define CS_SIDE_16000_A1 -0.355543
-#define CS_SIDE_16000_A2 -0.144152
-#define CS_SIDE_16000_B1 -1.050788
+#define CS_SIDE_16000_A1 (-0.355543)
+#define CS_SIDE_16000_A2 (-0.144152)
+#define CS_SIDE_16000_B1 (-1.050788)
#define CS_SIDE_16000_B2 0.144104
#define CS_SIDE_16000_SCALE 14
/* Stereo Enhancer coefficients for 22050Hz sample rate, scaled with 0.160781 */
#define CS_MIDDLE_22050_A0 0.228749
-#define CS_MIDDLE_22050_A1 -0.224128
+#define CS_MIDDLE_22050_A1 (-0.224128)
#define CS_MIDDLE_22050_A2 0.000000
-#define CS_MIDDLE_22050_B1 -0.971262
+#define CS_MIDDLE_22050_B1 (-0.971262)
#define CS_MIDDLE_22050_B2 0.000000
#define CS_MIDDLE_22050_SCALE 15
#define CS_SIDE_22050_A0 0.440112
-#define CS_SIDE_22050_A1 -0.261096
-#define CS_SIDE_22050_A2 -0.179016
-#define CS_SIDE_22050_B1 -1.116786
+#define CS_SIDE_22050_A1 (-0.261096)
+#define CS_SIDE_22050_A2 (-0.179016)
+#define CS_SIDE_22050_B1 (-1.116786)
#define CS_SIDE_22050_B2 0.182507
#define CS_SIDE_22050_SCALE 14
/* Stereo Enhancer coefficients for 24000Hz sample rate, scaled with 0.161882 */
#define CS_MIDDLE_24000_A0 0.230395
-#define CS_MIDDLE_24000_A1 -0.226117
+#define CS_MIDDLE_24000_A1 (-0.226117)
#define CS_MIDDLE_24000_A2 0.000000
-#define CS_MIDDLE_24000_B1 -0.973573
+#define CS_MIDDLE_24000_B1 (-0.973573)
#define CS_MIDDLE_24000_B2 0.000000
#define CS_MIDDLE_24000_SCALE 15
#define CS_SIDE_24000_A0 0.414770
-#define CS_SIDE_24000_A1 -0.287182
-#define CS_SIDE_24000_A2 -0.127588
-#define CS_SIDE_24000_B1 -1.229648
+#define CS_SIDE_24000_A1 (-0.287182)
+#define CS_SIDE_24000_A2 (-0.127588)
+#define CS_SIDE_24000_B1 (-1.229648)
#define CS_SIDE_24000_B2 0.282177
#define CS_SIDE_24000_SCALE 14
/* Stereo Enhancer coefficients for 32000Hz sample rate, scaled with 0.160322 */
#define CS_MIDDLE_32000_A0 0.228400
-#define CS_MIDDLE_32000_A1 -0.225214
+#define CS_MIDDLE_32000_A1 (-0.225214)
#define CS_MIDDLE_32000_A2 0.000000
-#define CS_MIDDLE_32000_B1 -0.980126
+#define CS_MIDDLE_32000_B1 (-0.980126)
#define CS_MIDDLE_32000_B2 0.000000
#define CS_MIDDLE_32000_SCALE 15
#define CS_SIDE_32000_A0 0.364579
-#define CS_SIDE_32000_A1 -0.207355
-#define CS_SIDE_32000_A2 -0.157224
-#define CS_SIDE_32000_B1 -1.274231
+#define CS_SIDE_32000_A1 (-0.207355)
+#define CS_SIDE_32000_A2 (-0.157224)
+#define CS_SIDE_32000_B1 (-1.274231)
#define CS_SIDE_32000_B2 0.312495
#define CS_SIDE_32000_SCALE 14
/* Stereo Enhancer coefficients for 44100Hz sample rate, scaled with 0.163834 */
#define CS_MIDDLE_44100_A0 0.233593
-#define CS_MIDDLE_44100_A1 -0.231225
+#define CS_MIDDLE_44100_A1 (-0.231225)
#define CS_MIDDLE_44100_A2 0.000000
-#define CS_MIDDLE_44100_B1 -0.985545
+#define CS_MIDDLE_44100_B1 (-0.985545)
#define CS_MIDDLE_44100_B2 0.000000
#define CS_MIDDLE_44100_SCALE 15
#define CS_SIDE_44100_A0 0.284573
-#define CS_SIDE_44100_A1 -0.258910
-#define CS_SIDE_44100_A2 -0.025662
-#define CS_SIDE_44100_B1 -1.572248
+#define CS_SIDE_44100_A1 (-0.258910)
+#define CS_SIDE_44100_A2 (-0.025662)
+#define CS_SIDE_44100_B1 (-1.572248)
#define CS_SIDE_44100_B2 0.588399
#define CS_SIDE_44100_SCALE 14
/* Stereo Enhancer coefficients for 48000Hz sample rate, scaled with 0.164402 */
#define CS_MIDDLE_48000_A0 0.234445
-#define CS_MIDDLE_48000_A1 -0.232261
+#define CS_MIDDLE_48000_A1 (-0.232261)
#define CS_MIDDLE_48000_A2 0.000000
-#define CS_MIDDLE_48000_B1 -0.986713
+#define CS_MIDDLE_48000_B1 (-0.986713)
#define CS_MIDDLE_48000_B2 0.000000
#define CS_MIDDLE_48000_SCALE 15
#define CS_SIDE_48000_A0 0.272606
-#define CS_SIDE_48000_A1 -0.266952
-#define CS_SIDE_48000_A2 -0.005654
-#define CS_SIDE_48000_B1 -1.617141
+#define CS_SIDE_48000_A1 (-0.266952)
+#define CS_SIDE_48000_A2 (-0.005654)
+#define CS_SIDE_48000_B1 (-1.617141)
#define CS_SIDE_48000_B2 0.630405
#define CS_SIDE_48000_SCALE 14
@@ -155,31 +155,31 @@
/* Stereo Enhancer coefficients for 96000Hz sample rate, scaled with 0.165*/
/* high pass filter with cutoff frequency 102.18 Hz*/
#define CS_MIDDLE_96000_A0 0.235532
-#define CS_MIDDLE_96000_A1 -0.234432
+#define CS_MIDDLE_96000_A1 (-0.234432)
#define CS_MIDDLE_96000_A2 0.000000
-#define CS_MIDDLE_96000_B1 -0.993334
+#define CS_MIDDLE_96000_B1 (-0.993334)
#define CS_MIDDLE_96000_B2 0.000000
#define CS_MIDDLE_96000_SCALE 15
/* bandpass filter with fc1 270 and fc2 3703, designed using 2nd order butterworth */
#define CS_SIDE_96000_A0 0.016727
#define CS_SIDE_96000_A1 0.000000
-#define CS_SIDE_96000_A2 -0.016727
-#define CS_SIDE_96000_B1 -1.793372
+#define CS_SIDE_96000_A2 (-0.016727)
+#define CS_SIDE_96000_B1 (-1.793372)
#define CS_SIDE_96000_B2 0.797236
#define CS_SIDE_96000_SCALE 14
/* Stereo Enhancer coefficients for 192000Hz sample rate, scaled with 0.1689*/
#define CS_MIDDLE_192000_A0 0.241219
-#define CS_MIDDLE_192000_A1 -0.240656
+#define CS_MIDDLE_192000_A1 (-0.240656)
#define CS_MIDDLE_192000_A2 0.000000
-#define CS_MIDDLE_192000_B1 -0.996661
+#define CS_MIDDLE_192000_B1 (-0.996661)
#define CS_MIDDLE_192000_B2 0.000000
#define CS_MIDDLE_192000_SCALE 15
/* bandpass filter with fc1 270 and fc2 3703, designed using 2nd order butterworth */
#define CS_SIDE_192000_A0 0.008991
-#define CS_SIDE_192000_A1 -0.000000
-#define CS_SIDE_192000_A2 -0.008991
-#define CS_SIDE_192000_B1 -1.892509
+#define CS_SIDE_192000_A1 (-0.000000)
+#define CS_SIDE_192000_A2 (-0.008991)
+#define CS_SIDE_192000_B1 (-1.892509)
#define CS_SIDE_192000_B2 0.893524
#define CS_SIDE_192000_SCALE 14
#endif
@@ -203,74 +203,74 @@
/* Reverb coefficients for 8000 Hz sample rate, scaled with 1.038030 */
#define CS_REVERB_8000_A0 0.667271
-#define CS_REVERB_8000_A1 -0.667271
+#define CS_REVERB_8000_A1 (-0.667271)
#define CS_REVERB_8000_A2 0.000000
-#define CS_REVERB_8000_B1 -0.668179
+#define CS_REVERB_8000_B1 (-0.668179)
#define CS_REVERB_8000_B2 0.000000
#define CS_REVERB_8000_SCALE 15
/* Reverb coefficients for 11025Hz sample rate, scaled with 1.038030 */
#define CS_REVERB_11025_A0 0.699638
-#define CS_REVERB_11025_A1 -0.699638
+#define CS_REVERB_11025_A1 (-0.699638)
#define CS_REVERB_11025_A2 0.000000
-#define CS_REVERB_11025_B1 -0.749096
+#define CS_REVERB_11025_B1 (-0.749096)
#define CS_REVERB_11025_B2 0.000000
#define CS_REVERB_11025_SCALE 15
/* Reverb coefficients for 12000Hz sample rate, scaled with 1.038030 */
#define CS_REVERB_12000_A0 0.706931
-#define CS_REVERB_12000_A1 -0.706931
+#define CS_REVERB_12000_A1 (-0.706931)
#define CS_REVERB_12000_A2 0.000000
-#define CS_REVERB_12000_B1 -0.767327
+#define CS_REVERB_12000_B1 (-0.767327)
#define CS_REVERB_12000_B2 0.000000
#define CS_REVERB_12000_SCALE 15
/* Reverb coefficients for 16000Hz sample rate, scaled with 1.038030 */
#define CS_REVERB_16000_A0 0.728272
-#define CS_REVERB_16000_A1 -0.728272
+#define CS_REVERB_16000_A1 (-0.728272)
#define CS_REVERB_16000_A2 0.000000
-#define CS_REVERB_16000_B1 -0.820679
+#define CS_REVERB_16000_B1 (-0.820679)
#define CS_REVERB_16000_B2 0.000000
#define CS_REVERB_16000_SCALE 15
/* Reverb coefficients for 22050Hz sample rate, scaled with 1.038030 */
#define CS_REVERB_22050_A0 0.516396
#define CS_REVERB_22050_A1 0.000000
-#define CS_REVERB_22050_A2 -0.516396
-#define CS_REVERB_22050_B1 -0.518512
-#define CS_REVERB_22050_B2 -0.290990
+#define CS_REVERB_22050_A2 (-0.516396)
+#define CS_REVERB_22050_B1 (-0.518512)
+#define CS_REVERB_22050_B2 (-0.290990)
#define CS_REVERB_22050_SCALE 15
/* Reverb coefficients for 24000Hz sample rate, scaled with 1.038030 */
#define CS_REVERB_24000_A0 0.479565
#define CS_REVERB_24000_A1 0.000000
-#define CS_REVERB_24000_A2 -0.479565
-#define CS_REVERB_24000_B1 -0.637745
-#define CS_REVERB_24000_B2 -0.198912
+#define CS_REVERB_24000_A2 (-0.479565)
+#define CS_REVERB_24000_B1 (-0.637745)
+#define CS_REVERB_24000_B2 (-0.198912)
#define CS_REVERB_24000_SCALE 15
/* Reverb coefficients for 32000Hz sample rate, scaled with 1.038030 */
#define CS_REVERB_32000_A0 0.380349
#define CS_REVERB_32000_A1 0.000000
-#define CS_REVERB_32000_A2 -0.380349
-#define CS_REVERB_32000_B1 -0.950873
+#define CS_REVERB_32000_A2 (-0.380349)
+#define CS_REVERB_32000_B1 (-0.950873)
#define CS_REVERB_32000_B2 0.049127
#define CS_REVERB_32000_SCALE 15
/* Reverb coefficients for 44100Hz sample rate, scaled with 1.038030 */
#define CS_REVERB_44100_A0 0.297389
#define CS_REVERB_44100_A1 0.000000
-#define CS_REVERB_44100_A2 -0.297389
-#define CS_REVERB_44100_B1 -1.200423
+#define CS_REVERB_44100_A2 (-0.297389)
+#define CS_REVERB_44100_B1 (-1.200423)
#define CS_REVERB_44100_B2 0.256529
#define CS_REVERB_44100_SCALE 14
/* Reverb coefficients for 48000Hz sample rate, scaled with 1.038030 */
#define CS_REVERB_48000_A0 0.278661
#define CS_REVERB_48000_A1 0.000000
-#define CS_REVERB_48000_A2 -0.278661
-#define CS_REVERB_48000_B1 -1.254993
+#define CS_REVERB_48000_A2 (-0.278661)
+#define CS_REVERB_48000_B1 (-1.254993)
#define CS_REVERB_48000_B2 0.303347
#define CS_REVERB_48000_SCALE 14
@@ -279,8 +279,8 @@
/* Band pass filter with fc1=500 and fc2=8000*/
#define CS_REVERB_96000_A0 0.1602488
#define CS_REVERB_96000_A1 0.000000
-#define CS_REVERB_96000_A2 -0.1602488
-#define CS_REVERB_96000_B1 -1.585413
+#define CS_REVERB_96000_A2 (-0.1602488)
+#define CS_REVERB_96000_B1 (-1.585413)
#define CS_REVERB_96000_B2 0.599377
#define CS_REVERB_96000_SCALE 14
@@ -288,8 +288,8 @@
/* Band pass filter with fc1=500 and fc2=8000*/
#define CS_REVERB_192000_A0 0.0878369
#define CS_REVERB_192000_A1 0.000000
-#define CS_REVERB_192000_A2 -0.0878369
-#define CS_REVERB_192000_B1 -1.7765764
+#define CS_REVERB_192000_A2 (-0.0878369)
+#define CS_REVERB_192000_B1 (-1.7765764)
#define CS_REVERB_192000_B2 0.7804076
#define CS_REVERB_192000_SCALE 14
@@ -312,163 +312,163 @@
/* Equaliser coefficients for 8000 Hz sample rate, \
CS scaled with 1.038497 and CSEX scaled with 0.775480 */
#define CS_EQUALISER_8000_A0 1.263312
-#define CS_EQUALISER_8000_A1 -0.601748
-#define CS_EQUALISER_8000_A2 -0.280681
-#define CS_EQUALISER_8000_B1 -0.475865
-#define CS_EQUALISER_8000_B2 -0.408154
+#define CS_EQUALISER_8000_A1 (-0.601748)
+#define CS_EQUALISER_8000_A2 (-0.280681)
+#define CS_EQUALISER_8000_B1 (-0.475865)
+#define CS_EQUALISER_8000_B2 (-0.408154)
#define CS_EQUALISER_8000_SCALE 14
#define CSEX_EQUALISER_8000_A0 0.943357
-#define CSEX_EQUALISER_8000_A1 -0.449345
-#define CSEX_EQUALISER_8000_A2 -0.209594
-#define CSEX_EQUALISER_8000_B1 -0.475865
-#define CSEX_EQUALISER_8000_B2 -0.408154
+#define CSEX_EQUALISER_8000_A1 (-0.449345)
+#define CSEX_EQUALISER_8000_A2 (-0.209594)
+#define CSEX_EQUALISER_8000_B1 (-0.475865)
+#define CSEX_EQUALISER_8000_B2 (-0.408154)
#define CSEX_EQUALISER_8000_SCALE 15
/* Equaliser coefficients for 11025Hz sample rate, \
CS scaled with 1.027761 and CSEX scaled with 0.767463 */
#define CS_EQUALISER_11025_A0 1.101145
#define CS_EQUALISER_11025_A1 0.139020
-#define CS_EQUALISER_11025_A2 -0.864423
+#define CS_EQUALISER_11025_A2 (-0.864423)
#define CS_EQUALISER_11025_B1 0.024541
-#define CS_EQUALISER_11025_B2 -0.908930
+#define CS_EQUALISER_11025_B2 (-0.908930)
#define CS_EQUALISER_11025_SCALE 14
#define CSEX_EQUALISER_11025_A0 0.976058
-#define CSEX_EQUALISER_11025_A1 -0.695326
-#define CSEX_EQUALISER_11025_A2 -0.090809
-#define CSEX_EQUALISER_11025_B1 -0.610594
-#define CSEX_EQUALISER_11025_B2 -0.311149
+#define CSEX_EQUALISER_11025_A1 (-0.695326)
+#define CSEX_EQUALISER_11025_A2 (-0.090809)
+#define CSEX_EQUALISER_11025_B1 (-0.610594)
+#define CSEX_EQUALISER_11025_B2 (-0.311149)
#define CSEX_EQUALISER_11025_SCALE 15
/* Equaliser coefficients for 12000Hz sample rate, \
CS scaled with 1.032521 and CSEX scaled with 0.771017 */
#define CS_EQUALISER_12000_A0 1.276661
-#define CS_EQUALISER_12000_A1 -1.017519
-#define CS_EQUALISER_12000_A2 -0.044128
-#define CS_EQUALISER_12000_B1 -0.729616
-#define CS_EQUALISER_12000_B2 -0.204532
+#define CS_EQUALISER_12000_A1 (-1.017519)
+#define CS_EQUALISER_12000_A2 (-0.044128)
+#define CS_EQUALISER_12000_B1 (-0.729616)
+#define CS_EQUALISER_12000_B2 (-0.204532)
#define CS_EQUALISER_12000_SCALE 14
#define CSEX_EQUALISER_12000_A0 1.007095
-#define CSEX_EQUALISER_12000_A1 -0.871912
+#define CSEX_EQUALISER_12000_A1 (-0.871912)
#define CSEX_EQUALISER_12000_A2 0.023232
-#define CSEX_EQUALISER_12000_B1 -0.745857
-#define CSEX_EQUALISER_12000_B2 -0.189171
+#define CSEX_EQUALISER_12000_B1 (-0.745857)
+#define CSEX_EQUALISER_12000_B2 (-0.189171)
#define CSEX_EQUALISER_12000_SCALE 14
/* Equaliser coefficients for 16000Hz sample rate, \
CS scaled with 1.031378 and CSEX scaled with 0.770164 */
#define CS_EQUALISER_16000_A0 1.281629
-#define CS_EQUALISER_16000_A1 -1.075872
-#define CS_EQUALISER_16000_A2 -0.041365
-#define CS_EQUALISER_16000_B1 -0.725239
-#define CS_EQUALISER_16000_B2 -0.224358
+#define CS_EQUALISER_16000_A1 (-1.075872)
+#define CS_EQUALISER_16000_A2 (-0.041365)
+#define CS_EQUALISER_16000_B1 (-0.725239)
+#define CS_EQUALISER_16000_B2 (-0.224358)
#define CS_EQUALISER_16000_SCALE 14
#define CSEX_EQUALISER_16000_A0 1.081091
-#define CSEX_EQUALISER_16000_A1 -0.867183
-#define CSEX_EQUALISER_16000_A2 -0.070247
-#define CSEX_EQUALISER_16000_B1 -0.515121
-#define CSEX_EQUALISER_16000_B2 -0.425893
+#define CSEX_EQUALISER_16000_A1 (-0.867183)
+#define CSEX_EQUALISER_16000_A2 (-0.070247)
+#define CSEX_EQUALISER_16000_B1 (-0.515121)
+#define CSEX_EQUALISER_16000_B2 (-0.425893)
#define CSEX_EQUALISER_16000_SCALE 14
/* Equaliser coefficients for 22050Hz sample rate, \
CS scaled with 1.041576 and CSEX scaled with 0.777779 */
#define CS_EQUALISER_22050_A0 1.388605
-#define CS_EQUALISER_22050_A1 -1.305799
+#define CS_EQUALISER_22050_A1 (-1.305799)
#define CS_EQUALISER_22050_A2 0.039922
-#define CS_EQUALISER_22050_B1 -0.719494
-#define CS_EQUALISER_22050_B2 -0.243245
+#define CS_EQUALISER_22050_B1 (-0.719494)
+#define CS_EQUALISER_22050_B2 (-0.243245)
#define CS_EQUALISER_22050_SCALE 14
#define CSEX_EQUALISER_22050_A0 1.272910
-#define CSEX_EQUALISER_22050_A1 -1.341014
+#define CSEX_EQUALISER_22050_A1 (-1.341014)
#define CSEX_EQUALISER_22050_A2 0.167462
-#define CSEX_EQUALISER_22050_B1 -0.614219
-#define CSEX_EQUALISER_22050_B2 -0.345384
+#define CSEX_EQUALISER_22050_B1 (-0.614219)
+#define CSEX_EQUALISER_22050_B2 (-0.345384)
#define CSEX_EQUALISER_22050_SCALE 14
/* Equaliser coefficients for 24000Hz sample rate, \
CS scaled with 1.034495 and CSEX scaled with 0.772491 */
#define CS_EQUALISER_24000_A0 1.409832
-#define CS_EQUALISER_24000_A1 -1.456506
+#define CS_EQUALISER_24000_A1 (-1.456506)
#define CS_EQUALISER_24000_A2 0.151410
-#define CS_EQUALISER_24000_B1 -0.804201
-#define CS_EQUALISER_24000_B2 -0.163783
+#define CS_EQUALISER_24000_B1 (-0.804201)
+#define CS_EQUALISER_24000_B2 (-0.163783)
#define CS_EQUALISER_24000_SCALE 14
#define CSEX_EQUALISER_24000_A0 1.299198
-#define CSEX_EQUALISER_24000_A1 -1.452447
+#define CSEX_EQUALISER_24000_A1 (-1.452447)
#define CSEX_EQUALISER_24000_A2 0.240489
-#define CSEX_EQUALISER_24000_B1 -0.669303
-#define CSEX_EQUALISER_24000_B2 -0.294984
+#define CSEX_EQUALISER_24000_B1 (-0.669303)
+#define CSEX_EQUALISER_24000_B2 (-0.294984)
#define CSEX_EQUALISER_24000_SCALE 14
/* Equaliser coefficients for 32000Hz sample rate, \
CS scaled with 1.044559 and CSEX scaled with 0.780006 */
#define CS_EQUALISER_32000_A0 1.560988
-#define CS_EQUALISER_32000_A1 -1.877724
+#define CS_EQUALISER_32000_A1 (-1.877724)
#define CS_EQUALISER_32000_A2 0.389741
-#define CS_EQUALISER_32000_B1 -0.907410
-#define CS_EQUALISER_32000_B2 -0.070489
+#define CS_EQUALISER_32000_B1 (-0.907410)
+#define CS_EQUALISER_32000_B2 (-0.070489)
#define CS_EQUALISER_32000_SCALE 14
#define CSEX_EQUALISER_32000_A0 1.785049
-#define CSEX_EQUALISER_32000_A1 -2.233497
+#define CSEX_EQUALISER_32000_A1 (-2.233497)
#define CSEX_EQUALISER_32000_A2 0.526431
-#define CSEX_EQUALISER_32000_B1 -0.445939
-#define CSEX_EQUALISER_32000_B2 -0.522446
+#define CSEX_EQUALISER_32000_B1 (-0.445939)
+#define CSEX_EQUALISER_32000_B2 (-0.522446)
#define CSEX_EQUALISER_32000_SCALE 13
/* Equaliser coefficients for 44100Hz sample rate, \
CS scaled with 1.022170 and CSEX scaled with 0.763288 */
#define CS_EQUALISER_44100_A0 1.623993
-#define CS_EQUALISER_44100_A1 -2.270743
+#define CS_EQUALISER_44100_A1 (-2.270743)
#define CS_EQUALISER_44100_A2 0.688829
-#define CS_EQUALISER_44100_B1 -1.117190
+#define CS_EQUALISER_44100_B1 (-1.117190)
#define CS_EQUALISER_44100_B2 0.130208
#define CS_EQUALISER_44100_SCALE 13
#define CSEX_EQUALISER_44100_A0 2.028315
-#define CSEX_EQUALISER_44100_A1 -2.882459
+#define CSEX_EQUALISER_44100_A1 (-2.882459)
#define CSEX_EQUALISER_44100_A2 0.904535
-#define CSEX_EQUALISER_44100_B1 -0.593308
-#define CSEX_EQUALISER_44100_B2 -0.385816
+#define CSEX_EQUALISER_44100_B1 (-0.593308)
+#define CSEX_EQUALISER_44100_B2 (-0.385816)
#define CSEX_EQUALISER_44100_SCALE 13
/* Equaliser coefficients for 48000Hz sample rate, \
CS scaled with 1.018635 and CSEX scaled with 0.760648 */
#define CS_EQUALISER_48000_A0 1.641177
-#define CS_EQUALISER_48000_A1 -2.364687
+#define CS_EQUALISER_48000_A1 (-2.364687)
#define CS_EQUALISER_48000_A2 0.759910
-#define CS_EQUALISER_48000_B1 -1.166774
+#define CS_EQUALISER_48000_B1 (-1.166774)
#define CS_EQUALISER_48000_B2 0.178074
#define CS_EQUALISER_48000_SCALE 13
#define CSEX_EQUALISER_48000_A0 2.099655
-#define CSEX_EQUALISER_48000_A1 -3.065220
+#define CSEX_EQUALISER_48000_A1 (-3.065220)
#define CSEX_EQUALISER_48000_A2 1.010417
-#define CSEX_EQUALISER_48000_B1 -0.634021
-#define CSEX_EQUALISER_48000_B2 -0.347332
+#define CSEX_EQUALISER_48000_B1 (-0.634021)
+#define CSEX_EQUALISER_48000_B2 (-0.347332)
#define CSEX_EQUALISER_48000_SCALE 13
#ifdef HIGHER_FS
#define CS_EQUALISER_96000_A0 1.784497
-#define CS_EQUALISER_96000_A1 -3.001435
+#define CS_EQUALISER_96000_A1 (-3.001435)
#define CS_EQUALISER_96000_A2 1.228422
-#define CS_EQUALISER_96000_B1 -1.477804
+#define CS_EQUALISER_96000_B1 (-1.477804)
#define CS_EQUALISER_96000_B2 0.481369
#define CS_EQUALISER_96000_SCALE 13
#define CSEX_EQUALISER_96000_A0 2.7573
-#define CSEX_EQUALISER_96000_A1 -4.6721
+#define CSEX_EQUALISER_96000_A1 (-4.6721)
#define CSEX_EQUALISER_96000_A2 1.9317
-#define CSEX_EQUALISER_96000_B1 -0.971718
-#define CSEX_EQUALISER_96000_B2 -0.021216
+#define CSEX_EQUALISER_96000_B1 (-0.971718)
+#define CSEX_EQUALISER_96000_B2 (-0.021216)
#define CSEX_EQUALISER_96000_SCALE 13
#define CS_EQUALISER_192000_A0 1.889582
-#define CS_EQUALISER_192000_A1 -3.456140
+#define CS_EQUALISER_192000_A1 (-3.456140)
#define CS_EQUALISER_192000_A2 1.569864
-#define CS_EQUALISER_192000_B1 -1.700798
+#define CS_EQUALISER_192000_B1 (-1.700798)
#define CS_EQUALISER_192000_B2 0.701824
#define CS_EQUALISER_192000_SCALE 13
#define CSEX_EQUALISER_192000_A0 3.4273
-#define CSEX_EQUALISER_192000_A1 -6.2936
+#define CSEX_EQUALISER_192000_A1 (-6.2936)
#define CSEX_EQUALISER_192000_A2 2.8720
-#define CSEX_EQUALISER_192000_B1 -1.31074
+#define CSEX_EQUALISER_192000_B1 (-1.31074)
#define CSEX_EQUALISER_192000_B2 0.31312
#define CSEX_EQUALISER_192000_SCALE 13
#endif
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index ab6b63c..1717b49 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -188,12 +188,13 @@
void Reverb_free (ReverbContext *pContext);
int Reverb_setConfig (ReverbContext *pContext, effect_config_t *pConfig);
void Reverb_getConfig (ReverbContext *pContext, effect_config_t *pConfig);
-int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue);
+int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue, int vsize);
int Reverb_getParameter (ReverbContext *pContext,
void *pParam,
uint32_t *pValueSize,
void *pValue);
int Reverb_LoadPreset (ReverbContext *pContext);
+int Reverb_paramValueSize (int32_t param);
/* Effect Library Interface Implementation */
@@ -1870,12 +1871,13 @@
// pContext - handle to instance data
// pParam - pointer to parameter
// pValue - pointer to value
+// vsize - value size
//
// Outputs:
//
//----------------------------------------------------------------------------
-int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue){
+int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue, int vsize){
int status = 0;
int16_t level;
int16_t ratio;
@@ -1899,6 +1901,11 @@
return 0;
}
+ if (vsize < Reverb_paramValueSize(param)) {
+ android_errorWriteLog(0x534e4554, "63526567");
+ return -EINVAL;
+ }
+
switch (param){
case REVERB_PARAM_PROPERTIES:
ALOGV("\tReverb_setParameter() REVERB_PARAM_PROPERTIES");
@@ -1974,6 +1981,31 @@
return status;
} /* end Reverb_setParameter */
+
+/**
+ * returns the size in bytes of the value of each environmental reverb parameter
+ */
+int Reverb_paramValueSize(int32_t param) {
+ switch (param) {
+ case REVERB_PARAM_ROOM_LEVEL:
+ case REVERB_PARAM_ROOM_HF_LEVEL:
+ case REVERB_PARAM_REFLECTIONS_LEVEL:
+ case REVERB_PARAM_REVERB_LEVEL:
+ return sizeof(int16_t); // millibel
+ case REVERB_PARAM_DECAY_TIME:
+ case REVERB_PARAM_REFLECTIONS_DELAY:
+ case REVERB_PARAM_REVERB_DELAY:
+ return sizeof(uint32_t); // milliseconds
+ case REVERB_PARAM_DECAY_HF_RATIO:
+ case REVERB_PARAM_DIFFUSION:
+ case REVERB_PARAM_DENSITY:
+ return sizeof(int16_t); // permille
+ case REVERB_PARAM_PROPERTIES:
+ return sizeof(s_reverb_settings); // struct of all reverb properties
+ }
+ return sizeof(int32_t);
+}
+
} // namespace
} // namespace
@@ -2144,7 +2176,8 @@
*(int *)pReplyData = android::Reverb_setParameter(pContext,
(void *)p->data,
- p->data + p->psize);
+ p->data + p->psize,
+ p->vsize);
} break;
case EFFECT_CMD_ENABLE:
diff --git a/media/libheif/Android.bp b/media/libheif/Android.bp
new file mode 100644
index 0000000..7d5a4eb
--- /dev/null
+++ b/media/libheif/Android.bp
@@ -0,0 +1,23 @@
+cc_library_shared {
+ name: "libheif",
+
+ srcs: [
+ "HeifDecoderImpl.cpp",
+ ],
+
+ shared_libs: [
+ "libbinder",
+ "liblog",
+ "libutils",
+ "libmedia",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ include_dirs: [],
+
+ export_include_dirs: ["include"],
+}
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
new file mode 100644
index 0000000..8b846be
--- /dev/null
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -0,0 +1,315 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HeifDecoderImpl"
+
+#include "HeifDecoderImpl.h"
+
+#include <stdio.h>
+
+#include <binder/IMemory.h>
+#include <drm/drm_framework_common.h>
+#include <media/IDataSource.h>
+#include <media/mediametadataretriever.h>
+#include <media/stagefright/MediaSource.h>
+#include <private/media/VideoFrame.h>
+#include <utils/Log.h>
+#include <utils/RefBase.h>
+
+HeifDecoder* createHeifDecoder() {
+ return new android::HeifDecoderImpl();
+}
+
+namespace android {
+
+/*
+ * HeifDataSource
+ *
+ * Proxies data requests over IDataSource interface from MediaMetadataRetriever
+ * to the HeifStream interface we received from the heif decoder client.
+ */
+class HeifDataSource : public BnDataSource {
+public:
+ /*
+ * Constructs HeifDataSource; will take ownership of |stream|.
+ */
+ HeifDataSource(HeifStream* stream)
+ : mStream(stream), mReadPos(0), mEOS(false) {}
+
+ ~HeifDataSource() override {}
+
+ /*
+ * Initializes internal resources.
+ */
+ bool init();
+
+ sp<IMemory> getIMemory() override { return mMemory; }
+ ssize_t readAt(off64_t offset, size_t size) override;
+ status_t getSize(off64_t* size) override ;
+ void close() {}
+ uint32_t getFlags() override { return 0; }
+ String8 toString() override { return String8("HeifDataSource"); }
+ sp<DecryptHandle> DrmInitialization(const char*) override {
+ return nullptr;
+ }
+
+private:
+ /*
+ * Buffer size for passing the read data to mediaserver. Set to 64K
+ * (which is what MediaDataSource Java API's jni implementation uses).
+ */
+ enum {
+ kBufferSize = 64 * 1024,
+ };
+ sp<IMemory> mMemory;
+ std::unique_ptr<HeifStream> mStream;
+ off64_t mReadPos;
+ bool mEOS;
+};
+
+bool HeifDataSource::init() {
+ sp<MemoryDealer> memoryDealer =
+ new MemoryDealer(kBufferSize, "HeifDataSource");
+ mMemory = memoryDealer->allocate(kBufferSize);
+ if (mMemory == nullptr) {
+ ALOGE("Failed to allocate shared memory!");
+ return false;
+ }
+ return true;
+}
+
+ssize_t HeifDataSource::readAt(off64_t offset, size_t size) {
+ ALOGV("readAt: offset=%lld, size=%zu", (long long)offset, size);
+
+ if (size == 0) {
+ return mEOS ? ERROR_END_OF_STREAM : 0;
+ }
+
+ if (offset < mReadPos) {
+ // try seek, then rewind/skip, fail if none worked
+ if (mStream->seek(offset)) {
+ ALOGV("readAt: seek to offset=%lld", (long long)offset);
+ mReadPos = offset;
+ mEOS = false;
+ } else if (mStream->rewind()) {
+ ALOGV("readAt: rewind to offset=0");
+ mReadPos = 0;
+ mEOS = false;
+ } else {
+ ALOGE("readAt: couldn't seek or rewind!");
+ mEOS = true;
+ }
+ }
+
+ if (mEOS) {
+ ALOGV("readAt: EOS");
+ return ERROR_END_OF_STREAM;
+ }
+
+ if (offset > mReadPos) {
+ // skipping
+ size_t skipSize = offset - mReadPos;
+ size_t bytesSkipped = mStream->read(nullptr, skipSize);
+ if (bytesSkipped <= skipSize) {
+ mReadPos += bytesSkipped;
+ }
+ if (bytesSkipped != skipSize) {
+ mEOS = true;
+ return ERROR_END_OF_STREAM;
+ }
+ }
+
+ if (size > kBufferSize) {
+ size = kBufferSize;
+ }
+ size_t bytesRead = mStream->read(mMemory->pointer(), size);
+ if (bytesRead > size || bytesRead == 0) {
+ // bytesRead is invalid
+ mEOS = true;
+ return ERROR_END_OF_STREAM;
+ } if (bytesRead < size) {
+ // read some bytes but not all, set EOS and return ERROR_END_OF_STREAM next time
+ mEOS = true;
+ }
+ mReadPos += bytesRead;
+ return bytesRead;
+}
+
+status_t HeifDataSource::getSize(off64_t* size) {
+ if (!mStream->hasLength()) {
+ *size = -1;
+ ALOGE("getSize: not supported!");
+ return ERROR_UNSUPPORTED;
+ }
+ *size = mStream->getLength();
+ ALOGV("getSize: size=%lld", (long long)*size);
+ return OK;
+}
+
+/////////////////////////////////////////////////////////////////////////
+
+HeifDecoderImpl::HeifDecoderImpl() :
+ // output color format should always be set via setOutputColor(), in case
+ // it's not, default to HAL_PIXEL_FORMAT_RGB_565.
+ mOutputColor(HAL_PIXEL_FORMAT_RGB_565),
+ mCurScanline(0) {
+}
+
+HeifDecoderImpl::~HeifDecoderImpl() {
+}
+
+bool HeifDecoderImpl::init(HeifStream* stream, HeifFrameInfo* frameInfo) {
+ sp<HeifDataSource> dataSource = new HeifDataSource(stream);
+ if (!dataSource->init()) {
+ return false;
+ }
+ mDataSource = dataSource;
+
+ mRetriever = new MediaMetadataRetriever();
+ status_t err = mRetriever->setDataSource(mDataSource, "video/mp4");
+ if (err != OK) {
+ ALOGE("failed to set data source!");
+
+ mRetriever.clear();
+ mDataSource.clear();
+ return false;
+ }
+ ALOGV("successfully set data source.");
+
+ const char* hasVideo = mRetriever->extractMetadata(METADATA_KEY_HAS_VIDEO);
+ if (!hasVideo || strcasecmp(hasVideo, "yes")) {
+ ALOGE("no video: %s", hasVideo ? hasVideo : "null");
+ return false;
+ }
+
+ mFrameMemory = mRetriever->getFrameAtTime(0,
+ IMediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
+ mOutputColor, true /*metaOnly*/);
+ if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
+ ALOGE("getFrameAtTime: videoFrame is a nullptr");
+ return false;
+ }
+
+ VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
+
+ ALOGV("Meta dimension %dx%d, display %dx%d, angle %d, iccSize %d",
+ videoFrame->mWidth,
+ videoFrame->mHeight,
+ videoFrame->mDisplayWidth,
+ videoFrame->mDisplayHeight,
+ videoFrame->mRotationAngle,
+ videoFrame->mIccSize);
+
+ if (frameInfo != nullptr) {
+ frameInfo->set(
+ videoFrame->mWidth,
+ videoFrame->mHeight,
+ videoFrame->mRotationAngle,
+ videoFrame->mBytesPerPixel,
+ videoFrame->mIccSize,
+ videoFrame->getFlattenedIccData());
+ }
+ return true;
+}
+
+bool HeifDecoderImpl::getEncodedColor(HeifEncodedColor* /*outColor*/) const {
+ ALOGW("getEncodedColor: not implemented!");
+ return false;
+}
+
+bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
+ switch(heifColor) {
+ case kHeifColorFormat_RGB565:
+ {
+ mOutputColor = HAL_PIXEL_FORMAT_RGB_565;
+ return true;
+ }
+ case kHeifColorFormat_RGBA_8888:
+ {
+ mOutputColor = HAL_PIXEL_FORMAT_RGBA_8888;
+ return true;
+ }
+ case kHeifColorFormat_BGRA_8888:
+ {
+ mOutputColor = HAL_PIXEL_FORMAT_BGRA_8888;
+ return true;
+ }
+ default:
+ break;
+ }
+ ALOGE("Unsupported output color format %d", heifColor);
+ return false;
+}
+
+bool HeifDecoderImpl::decode(HeifFrameInfo* frameInfo) {
+ mFrameMemory = mRetriever->getFrameAtTime(0,
+ IMediaSource::ReadOptions::SEEK_PREVIOUS_SYNC, mOutputColor);
+ if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
+ ALOGE("getFrameAtTime: videoFrame is a nullptr");
+ return false;
+ }
+
+ VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
+ ALOGV("Decoded dimension %dx%d, display %dx%d, angle %d, rowbytes %d, size %d",
+ videoFrame->mWidth,
+ videoFrame->mHeight,
+ videoFrame->mDisplayWidth,
+ videoFrame->mDisplayHeight,
+ videoFrame->mRotationAngle,
+ videoFrame->mRowBytes,
+ videoFrame->mSize);
+
+ if (frameInfo != nullptr) {
+ frameInfo->set(
+ videoFrame->mWidth,
+ videoFrame->mHeight,
+ videoFrame->mRotationAngle,
+ videoFrame->mBytesPerPixel,
+ videoFrame->mIccSize,
+ videoFrame->getFlattenedIccData());
+ }
+ return true;
+}
+
+bool HeifDecoderImpl::getScanline(uint8_t* dst) {
+ if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
+ return false;
+ }
+ VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
+ if (mCurScanline >= videoFrame->mHeight) {
+ return false;
+ }
+ uint8_t* src = videoFrame->getFlattenedData() + videoFrame->mRowBytes * mCurScanline++;
+ memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mWidth);
+ return true;
+}
+
+size_t HeifDecoderImpl::skipScanlines(size_t count) {
+ if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
+ return 0;
+ }
+ VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
+
+ uint32_t oldScanline = mCurScanline;
+ mCurScanline += count;
+ if (mCurScanline >= videoFrame->mHeight) {
+ mCurScanline = videoFrame->mHeight;
+ }
+ return (mCurScanline > oldScanline) ? (mCurScanline - oldScanline) : 0;
+}
+
+} // namespace android
diff --git a/media/libheif/HeifDecoderImpl.h b/media/libheif/HeifDecoderImpl.h
new file mode 100644
index 0000000..2f8f0f8
--- /dev/null
+++ b/media/libheif/HeifDecoderImpl.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _HEIF_DECODER_IMPL_
+#define _HEIF_DECODER_IMPL_
+
+#include "include/HeifDecoderAPI.h"
+#include <system/graphics.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class IDataSource;
+class IMemory;
+class MediaMetadataRetriever;
+
+/*
+ * An implementation of HeifDecoder based on Android's MediaMetadataRetriever.
+ */
+class HeifDecoderImpl : public HeifDecoder {
+public:
+
+ HeifDecoderImpl();
+ ~HeifDecoderImpl() override;
+
+ bool init(HeifStream* stream, HeifFrameInfo* frameInfo) override;
+
+ bool getEncodedColor(HeifEncodedColor* outColor) const override;
+
+ bool setOutputColor(HeifColorFormat heifColor) override;
+
+ bool decode(HeifFrameInfo* frameInfo) override;
+
+ bool getScanline(uint8_t* dst) override;
+
+ size_t skipScanlines(size_t count) override;
+
+private:
+ sp<IDataSource> mDataSource;
+ sp<MediaMetadataRetriever> mRetriever;
+ sp<IMemory> mFrameMemory;
+ android_pixel_format_t mOutputColor;
+ size_t mCurScanline;
+};
+
+} // namespace android
+
+#endif // _HEIF_DECODER_IMPL_
diff --git a/media/libheif/include/HeifDecoderAPI.h b/media/libheif/include/HeifDecoderAPI.h
new file mode 100644
index 0000000..5183c39
--- /dev/null
+++ b/media/libheif/include/HeifDecoderAPI.h
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _HEIF_DECODER_API_
+#define _HEIF_DECODER_API_
+
+#include <memory>
+
+/*
+ * The output color pixel format of heif decoder.
+ */
+typedef enum {
+ kHeifColorFormat_RGB565 = 0,
+ kHeifColorFormat_RGBA_8888 = 1,
+ kHeifColorFormat_BGRA_8888 = 2,
+} HeifColorFormat;
+
+/*
+ * The color spaces encoded in the heif image.
+ */
+typedef enum {
+ kHeifEncodedColor_RGB = 0,
+ kHeifEncodedColor_YUV = 1,
+ kHeifEncodedColor_CMYK = 2,
+} HeifEncodedColor;
+
+/*
+ * Represents a color converted (RGB-based) video frame
+ */
+struct HeifFrameInfo
+{
+ HeifFrameInfo() :
+ mWidth(0), mHeight(0), mRotationAngle(0), mBytesPerPixel(0),
+ mIccSize(0), mIccData(nullptr) {}
+
+ // update the frame info, will make a copy of |iccData| internally
+ void set(uint32_t width, uint32_t height, int32_t rotation, uint32_t bpp,
+ uint32_t iccSize, uint8_t* iccData) {
+ mWidth = width;
+ mHeight = height;
+ mRotationAngle = rotation;
+ mBytesPerPixel = bpp;
+
+ if (mIccData != nullptr) {
+ mIccData.reset(nullptr);
+ }
+ mIccSize = iccSize;
+ if (iccSize > 0) {
+ mIccData.reset(new uint8_t[iccSize]);
+ if (mIccData.get() != nullptr) {
+ memcpy(mIccData.get(), iccData, iccSize);
+ } else {
+ mIccSize = 0;
+ }
+ }
+ }
+
+ // Intentional public access modifiers:
+ uint32_t mWidth;
+ uint32_t mHeight;
+ int32_t mRotationAngle; // Rotation angle, clockwise, should be multiple of 90
+ uint32_t mBytesPerPixel; // Number of bytes for one pixel
+ uint32_t mIccSize; // Number of bytes in mIccData
+ std::unique_ptr<uint8_t> mIccData; // Actual ICC data, memory is owned by this structure
+};
+
+/*
+ * Abstract interface to provide data to HeifDecoder.
+ */
+struct HeifStream {
+ HeifStream() {}
+
+ virtual ~HeifStream() {}
+
+ /*
+ * Reads or skips size number of bytes. return the number of bytes actually
+ * read or skipped.
+ * If |buffer| == NULL, skip size bytes, return how many were skipped.
+ * If |buffer| != NULL, copy size bytes into buffer, return how many were copied.
+ */
+ virtual size_t read(void* buffer, size_t size) = 0;
+
+ /*
+ * Rewinds to the beginning of the stream. Returns true if the stream is known
+ * to be at the beginning after this call returns.
+ */
+ virtual bool rewind() = 0;
+
+ /*
+ * Seeks to an absolute position in the stream. If this cannot be done, returns false.
+ * If an attempt is made to seek past the end of the stream, the position will be set
+ * to the end of the stream.
+ */
+ virtual bool seek(size_t /*position*/) = 0;
+
+ /** Returns true if this stream can report its total length. */
+ virtual bool hasLength() const = 0;
+
+ /** Returns the total length of the stream. If this cannot be done, returns 0. */
+ virtual size_t getLength() const = 0;
+
+private:
+ HeifStream(const HeifFrameInfo&) = delete;
+ HeifStream& operator=(const HeifFrameInfo&) = delete;
+};
+
+/*
+ * Abstract interface to decode heif images from a HeifStream data source.
+ */
+struct HeifDecoder {
+ HeifDecoder() {}
+
+ virtual ~HeifDecoder() {}
+
+ /*
+ * Returns true if it successfully sets outColor to the encoded color,
+ * and false otherwise.
+ */
+ virtual bool getEncodedColor(HeifEncodedColor* outColor) const = 0;
+
+ /*
+ * Returns true if it successfully sets the output color format to color,
+ * and false otherwise.
+ */
+ virtual bool setOutputColor(HeifColorFormat color) = 0;
+
+ /*
+ * Returns true if it successfully initialize heif decoder with source,
+ * and false otherwise. |frameInfo| will be filled with information of
+ * the primary picture upon success and unmodified upon failure.
+ * Takes ownership of |stream| regardless of result.
+ */
+ virtual bool init(HeifStream* stream, HeifFrameInfo* frameInfo) = 0;
+
+ /*
+ * Decode the picture internally, returning whether it succeeded. |frameInfo|
+ * will be filled with information of the primary picture upon success and
+ * unmodified upon failure.
+ *
+ * After this succeeded, getScanline can be called to read the scanlines
+ * that were decoded.
+ */
+ virtual bool decode(HeifFrameInfo* frameInfo) = 0;
+
+ /*
+ * Read the next scanline (in top-down order), returns true upon success
+ * and false otherwise.
+ */
+ virtual bool getScanline(uint8_t* dst) = 0;
+
+ /*
+ * Skip the next |count| scanlines, returns true upon success and
+ * false otherwise.
+ */
+ virtual size_t skipScanlines(size_t count) = 0;
+
+private:
+ HeifDecoder(const HeifFrameInfo&) = delete;
+ HeifDecoder& operator=(const HeifFrameInfo&) = delete;
+};
+
+/*
+ * Creates a HeifDecoder. Returns a HeifDecoder instance upon success, or NULL
+ * if the creation has failed.
+ */
+HeifDecoder* createHeifDecoder();
+
+#endif // _HEIF_DECODER_API_
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index fb4fe4b..12242b3 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -15,6 +15,9 @@
cc_library {
name: "libmedia_helper",
vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["AudioParameter.cpp", "TypeConverter.cpp"],
cflags: [
"-Werror",
@@ -121,6 +124,9 @@
cc_library_shared {
name: "libmedia_omx",
vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
defaults: ["libmedia_omx_defaults"],
}
@@ -198,6 +204,7 @@
],
export_shared_lib_headers: [
+ "libaudioclient",
"libbinder",
"libicuuc",
"libicui18n",
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index 7058ee8..5ea2e8b 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -127,22 +127,32 @@
return reply.readInt32();
}
- status_t setDataSource(const sp<IDataSource>& source)
+ status_t setDataSource(const sp<IDataSource>& source, const char *mime)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
data.writeStrongBinder(IInterface::asBinder(source));
+
+ if (mime != NULL) {
+ data.writeInt32(1);
+ data.writeCString(mime);
+ } else {
+ data.writeInt32(0);
+ }
remote()->transact(SET_DATA_SOURCE_CALLBACK, data, &reply);
return reply.readInt32();
}
- sp<IMemory> getFrameAtTime(int64_t timeUs, int option)
+ sp<IMemory> getFrameAtTime(int64_t timeUs, int option, int colorFormat, bool metaOnly)
{
- ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option);
+ ALOGV("getTimeAtTime: time(%" PRId64 " us), option(%d), colorFormat(%d) metaOnly(%d)",
+ timeUs, option, colorFormat, metaOnly);
Parcel data, reply;
data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
data.writeInt64(timeUs);
data.writeInt32(option);
+ data.writeInt32(colorFormat);
+ data.writeInt32(metaOnly);
#ifndef DISABLE_GROUP_SCHEDULE_HACK
sendSchedPolicy(data);
#endif
@@ -258,7 +268,12 @@
if (source == NULL) {
reply->writeInt32(BAD_VALUE);
} else {
- reply->writeInt32(setDataSource(source));
+ int32_t hasMime = data.readInt32();
+ const char *mime = NULL;
+ if (hasMime) {
+ mime = data.readCString();
+ }
+ reply->writeInt32(setDataSource(source, mime));
}
return NO_ERROR;
} break;
@@ -266,11 +281,14 @@
CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
int64_t timeUs = data.readInt64();
int option = data.readInt32();
- ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option);
+ int colorFormat = data.readInt32();
+ bool metaOnly = (data.readInt32() != 0);
+ ALOGV("getTimeAtTime: time(%" PRId64 " us), option(%d), colorFormat(%d), metaOnly(%d)",
+ timeUs, option, colorFormat, metaOnly);
#ifndef DISABLE_GROUP_SCHEDULE_HACK
setSchedPolicy(data);
#endif
- sp<IMemory> bitmap = getFrameAtTime(timeUs, option);
+ sp<IMemory> bitmap = getFrameAtTime(timeUs, option, colorFormat, metaOnly);
if (bitmap != 0) { // Don't send NULL across the binder interface
reply->writeInt32(NO_ERROR);
reply->writeStrongBinder(IInterface::asBinder(bitmap));
diff --git a/media/libmedia/include/media/IDataSource.h b/media/libmedia/include/media/IDataSource.h
index 655f337..3858f78 100644
--- a/media/libmedia/include/media/IDataSource.h
+++ b/media/libmedia/include/media/IDataSource.h
@@ -35,7 +35,9 @@
// Get the memory that readAt writes into.
virtual sp<IMemory> getIMemory() = 0;
// Read up to |size| bytes into the memory returned by getIMemory(). Returns
- // the number of bytes read, or -1 on error. |size| must not be larger than
+ // the number of bytes read, or negative value on error (eg.
+ // ERROR_END_OF_STREAM indicating EOS. This is needed by CallbackDataSource
+ // to properly handle reading of last chunk). |size| must not be larger than
// the buffer.
virtual ssize_t readAt(off64_t offset, size_t size) = 0;
// Get the size, or -1 if the size is unknown.
diff --git a/media/libmedia/include/media/IMediaMetadataRetriever.h b/media/libmedia/include/media/IMediaMetadataRetriever.h
index c90f254..ea95161 100644
--- a/media/libmedia/include/media/IMediaMetadataRetriever.h
+++ b/media/libmedia/include/media/IMediaMetadataRetriever.h
@@ -19,13 +19,12 @@
#define ANDROID_IMEDIAMETADATARETRIEVER_H
#include <binder/IInterface.h>
-#include <binder/Parcel.h>
#include <binder/IMemory.h>
#include <utils/KeyedVector.h>
#include <utils/RefBase.h>
namespace android {
-
+class Parcel;
class IDataSource;
struct IMediaHTTPService;
@@ -41,8 +40,10 @@
const KeyedVector<String8, String8> *headers = NULL) = 0;
virtual status_t setDataSource(int fd, int64_t offset, int64_t length) = 0;
- virtual status_t setDataSource(const sp<IDataSource>& dataSource) = 0;
- virtual sp<IMemory> getFrameAtTime(int64_t timeUs, int option) = 0;
+ virtual status_t setDataSource(
+ const sp<IDataSource>& dataSource, const char *mime) = 0;
+ virtual sp<IMemory> getFrameAtTime(
+ int64_t timeUs, int option, int colorFormat, bool metaOnly) = 0;
virtual sp<IMemory> extractAlbumArt() = 0;
virtual const char* extractMetadata(int keyCode) = 0;
};
diff --git a/media/libmedia/include/media/MediaMetadataRetrieverInterface.h b/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
index a5e1350..257002d 100644
--- a/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
+++ b/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
@@ -41,8 +41,9 @@
const KeyedVector<String8, String8> *headers = NULL) = 0;
virtual status_t setDataSource(int fd, int64_t offset, int64_t length) = 0;
- virtual status_t setDataSource(const sp<DataSource>& source) = 0;
- virtual VideoFrame* getFrameAtTime(int64_t timeUs, int option) = 0;
+ virtual status_t setDataSource(const sp<DataSource>& source, const char *mime) = 0;
+ virtual VideoFrame* getFrameAtTime(
+ int64_t timeUs, int option, int colorFormat, bool metaOnly) = 0;
virtual MediaAlbumArt* extractAlbumArt() = 0;
virtual const char* extractMetadata(int keyCode) = 0;
};
@@ -54,7 +55,9 @@
MediaMetadataRetrieverInterface() {}
virtual ~MediaMetadataRetrieverInterface() {}
- virtual VideoFrame* getFrameAtTime(int64_t /*timeUs*/, int /*option*/) { return NULL; }
+ virtual VideoFrame* getFrameAtTime(
+ int64_t /*timeUs*/, int /*option*/, int /*colorFormat*/, bool /*metaOnly*/)
+ { return NULL; }
virtual MediaAlbumArt* extractAlbumArt() { return NULL; }
virtual const char* extractMetadata(int /*keyCode*/) { return NULL; }
};
diff --git a/media/libmedia/include/media/mediametadataretriever.h b/media/libmedia/include/media/mediametadataretriever.h
index 8ed07ee..65c266b 100644
--- a/media/libmedia/include/media/mediametadataretriever.h
+++ b/media/libmedia/include/media/mediametadataretriever.h
@@ -76,8 +76,10 @@
const KeyedVector<String8, String8> *headers = NULL);
status_t setDataSource(int fd, int64_t offset, int64_t length);
- status_t setDataSource(const sp<IDataSource>& dataSource);
- sp<IMemory> getFrameAtTime(int64_t timeUs, int option);
+ status_t setDataSource(
+ const sp<IDataSource>& dataSource, const char *mime = NULL);
+ sp<IMemory> getFrameAtTime(int64_t timeUs, int option,
+ int colorFormat = HAL_PIXEL_FORMAT_RGB_565, bool metaOnly = false);
sp<IMemory> extractAlbumArt();
const char* extractMetadata(int keyCode);
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 08a9e6a..7d27d57 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -130,7 +130,7 @@
}
status_t MediaMetadataRetriever::setDataSource(
- const sp<IDataSource>& dataSource)
+ const sp<IDataSource>& dataSource, const char *mime)
{
ALOGV("setDataSource(IDataSource)");
Mutex::Autolock _l(mLock);
@@ -138,18 +138,20 @@
ALOGE("retriever is not initialized");
return INVALID_OPERATION;
}
- return mRetriever->setDataSource(dataSource);
+ return mRetriever->setDataSource(dataSource, mime);
}
-sp<IMemory> MediaMetadataRetriever::getFrameAtTime(int64_t timeUs, int option)
+sp<IMemory> MediaMetadataRetriever::getFrameAtTime(
+ int64_t timeUs, int option, int colorFormat, bool metaOnly)
{
- ALOGV("getFrameAtTime: time(%" PRId64 " us) option(%d)", timeUs, option);
+ ALOGV("getFrameAtTime: time(%" PRId64 " us) option(%d) colorFormat(%d) metaOnly(%d)",
+ timeUs, option, colorFormat, metaOnly);
Mutex::Autolock _l(mLock);
if (mRetriever == 0) {
ALOGE("retriever is not initialized");
return NULL;
}
- return mRetriever->getFrameAtTime(timeUs, option);
+ return mRetriever->getFrameAtTime(timeUs, option, colorFormat, metaOnly);
}
const char* MediaMetadataRetriever::extractMetadata(int keyCode)
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 793f476..5a468f3 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -175,7 +175,7 @@
}
status_t MetadataRetrieverClient::setDataSource(
- const sp<IDataSource>& source)
+ const sp<IDataSource>& source, const char *mime)
{
ALOGV("setDataSource(IDataSource)");
Mutex::Autolock lock(mLock);
@@ -186,16 +186,18 @@
ALOGV("player type = %d", playerType);
sp<MediaMetadataRetrieverBase> p = createRetriever(playerType);
if (p == NULL) return NO_INIT;
- status_t ret = p->setDataSource(dataSource);
+ status_t ret = p->setDataSource(dataSource, mime);
if (ret == NO_ERROR) mRetriever = p;
return ret;
}
Mutex MetadataRetrieverClient::sLock;
-sp<IMemory> MetadataRetrieverClient::getFrameAtTime(int64_t timeUs, int option)
+sp<IMemory> MetadataRetrieverClient::getFrameAtTime(
+ int64_t timeUs, int option, int colorFormat, bool metaOnly)
{
- ALOGV("getFrameAtTime: time(%lld us) option(%d)", (long long)timeUs, option);
+ ALOGV("getFrameAtTime: time(%lld us) option(%d) colorFormat(%d), metaOnly(%d)",
+ (long long)timeUs, option, colorFormat, metaOnly);
Mutex::Autolock lock(mLock);
Mutex::Autolock glock(sLock);
mThumbnail.clear();
@@ -203,12 +205,13 @@
ALOGE("retriever is not initialized");
return NULL;
}
- VideoFrame *frame = mRetriever->getFrameAtTime(timeUs, option);
+ VideoFrame *frame = mRetriever->getFrameAtTime(
+ timeUs, option, colorFormat, metaOnly);
if (frame == NULL) {
ALOGE("failed to capture a video frame");
return NULL;
}
- size_t size = sizeof(VideoFrame) + frame->mSize;
+ size_t size = frame->getFlattenedSize();
sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
if (heap == NULL) {
ALOGE("failed to create MemoryDealer");
@@ -222,16 +225,7 @@
return NULL;
}
VideoFrame *frameCopy = static_cast<VideoFrame *>(mThumbnail->pointer());
- frameCopy->mWidth = frame->mWidth;
- frameCopy->mHeight = frame->mHeight;
- frameCopy->mDisplayWidth = frame->mDisplayWidth;
- frameCopy->mDisplayHeight = frame->mDisplayHeight;
- frameCopy->mSize = frame->mSize;
- frameCopy->mRotationAngle = frame->mRotationAngle;
- ALOGV("rotation: %d", frameCopy->mRotationAngle);
- frameCopy->mData = (uint8_t *)frameCopy + sizeof(VideoFrame);
- memcpy(frameCopy->mData, frame->mData, frame->mSize);
- frameCopy->mData = 0;
+ frameCopy->copyFlattened(*frame);
delete frame; // Fix memory leakage
return mThumbnail;
}
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h
index fe7547c..c78cd4b 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.h
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.h
@@ -49,8 +49,9 @@
const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setDataSource(const sp<IDataSource>& source);
- virtual sp<IMemory> getFrameAtTime(int64_t timeUs, int option);
+ virtual status_t setDataSource(const sp<IDataSource>& source, const char *mime);
+ virtual sp<IMemory> getFrameAtTime(
+ int64_t timeUs, int option, int colorFormat, bool metaOnly);
virtual sp<IMemory> extractAlbumArt();
virtual const char* extractMetadata(int keyCode);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 8fe255b..ac187cc 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -343,7 +343,7 @@
format, mSurface, crypto, 0 /* flags */);
if (err != OK) {
- ALOGE("Failed to configure %s decoder (err=%d)", mComponentName.c_str(), err);
+ ALOGE("Failed to configure [%s] decoder (err=%d)", mComponentName.c_str(), err);
mCodec->release();
mCodec.clear();
handleError(err);
@@ -372,7 +372,7 @@
err = mCodec->start();
if (err != OK) {
- ALOGE("Failed to start %s decoder (err=%d)", mComponentName.c_str(), err);
+ ALOGE("Failed to start [%s] decoder (err=%d)", mComponentName.c_str(), err);
mCodec->release();
mCodec.clear();
handleError(err);
@@ -460,6 +460,12 @@
if (notifyComplete) {
mResumePending = true;
}
+
+ if (mCodec == NULL) {
+ ALOGE("[%s] onResume without a valid codec", mComponentName.c_str());
+ handleError(NO_INIT);
+ return;
+ }
mCodec->start();
}
@@ -481,7 +487,7 @@
}
if (err != OK) {
- ALOGE("failed to flush %s (err=%d)", mComponentName.c_str(), err);
+ ALOGE("failed to flush [%s] (err=%d)", mComponentName.c_str(), err);
handleError(err);
// finish with posting kWhatFlushCompleted.
// we attempt to release the buffers even if flush fails.
@@ -530,7 +536,7 @@
releaseAndResetMediaBuffers();
if (err != OK) {
- ALOGE("failed to release %s (err=%d)", mComponentName.c_str(), err);
+ ALOGE("failed to release [%s] (err=%d)", mComponentName.c_str(), err);
handleError(err);
// finish with posting kWhatShutdownCompleted.
}
@@ -631,10 +637,17 @@
return false;
}
+ if (mCodec == NULL) {
+ ALOGE("[%s] handleAnInputBuffer without a valid codec", mComponentName.c_str());
+ handleError(NO_INIT);
+ return false;
+ }
+
sp<MediaCodecBuffer> buffer;
mCodec->getInputBuffer(index, &buffer);
if (buffer == NULL) {
+ ALOGE("[%s] handleAnInputBuffer, failed to get input buffer", mComponentName.c_str());
handleError(UNKNOWN_ERROR);
return false;
}
@@ -697,11 +710,18 @@
size_t size,
int64_t timeUs,
int32_t flags) {
+ if (mCodec == NULL) {
+ ALOGE("[%s] handleAnOutputBuffer without a valid codec", mComponentName.c_str());
+ handleError(NO_INIT);
+ return false;
+ }
+
// CHECK_LT(bufferIx, mOutputBuffers.size());
sp<MediaCodecBuffer> buffer;
mCodec->getOutputBuffer(index, &buffer);
if (buffer == NULL) {
+ ALOGE("[%s] handleAnOutputBuffer, failed to get output buffer", mComponentName.c_str());
handleError(UNKNOWN_ERROR);
return false;
}
@@ -949,6 +969,12 @@
}
bool NuPlayer::Decoder::onInputBufferFetched(const sp<AMessage> &msg) {
+ if (mCodec == NULL) {
+ ALOGE("[%s] onInputBufferFetched without a valid codec", mComponentName.c_str());
+ handleError(NO_INIT);
+ return false;
+ }
+
size_t bufferIx;
CHECK(msg->findSize("buffer-ix", &bufferIx));
CHECK_LT(bufferIx, mInputBuffers.size());
@@ -979,7 +1005,7 @@
}
if (streamErr != ERROR_END_OF_STREAM) {
- ALOGE("Stream error for %s (err=%d), EOS %s queued",
+ ALOGE("Stream error for [%s] (err=%d), EOS %s queued",
mComponentName.c_str(),
streamErr,
err == OK ? "successfully" : "unsuccessfully");
@@ -1073,7 +1099,7 @@
} // no cryptInfo
if (err != OK) {
- ALOGE("onInputBufferFetched: queue%sInputBuffer failed for %s (err=%d, %s)",
+ ALOGE("onInputBufferFetched: queue%sInputBuffer failed for [%s] (err=%d, %s)",
(cryptInfo != NULL ? "Secure" : ""),
mComponentName.c_str(), err, errorDetailMsg.c_str());
handleError(err);
@@ -1102,7 +1128,9 @@
}
}
- if (msg->findInt32("render", &render) && render) {
+ if (mCodec == NULL) {
+ err = NO_INIT;
+ } else if (msg->findInt32("render", &render) && render) {
int64_t timestampNs;
CHECK(msg->findInt64("timestampNs", ×tampNs));
err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs);
@@ -1111,7 +1139,7 @@
err = mCodec->releaseOutputBuffer(bufferIx);
}
if (err != OK) {
- ALOGE("failed to release output buffer for %s (err=%d)",
+ ALOGE("failed to release output buffer for [%s] (err=%d)",
mComponentName.c_str(), err);
handleError(err);
}
diff --git a/media/libnbaio/NBLog.cpp b/media/libnbaio/NBLog.cpp
index c73632c..ad38390 100644
--- a/media/libnbaio/NBLog.cpp
+++ b/media/libnbaio/NBLog.cpp
@@ -12,78 +12,10 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ *
*/
-/*
-* Documentation: Workflow summary for histogram data processing:
-* For more details on FIFO, please see system/media/audio_utils; doxygen
-* TODO: add this documentation to doxygen once it is further developed
-* 1) Writing buffer period timestamp to the circular buffer
-* onWork()
-* Called every period length (e.g., 4ms)
-* Calls LOG_HIST_TS
-* LOG_HIST_TS
-* Hashes file name and line number, and writes single timestamp to buffer
-* calls NBLOG::Writer::logEventHistTS once
-* NBLOG::Writer::logEventHistTS
-* calls NBLOG::Writer::log on hash and current timestamp
-* time is in CLOCK_MONOTONIC converted to ns
-* NBLOG::Writer::log(Event, const void*, size_t)
-* Initializes Entry, a struct containing one log entry
-* Entry contains the event type (mEvent), data length (mLength),
-* and data pointer (mData)
-* TODO: why mLength (max length of buffer data) must be <= kMaxLength = 255?
-* calls NBLOG::Writer::log(Entry *, bool)
-* NBLog::Writer::log(Entry *, bool)
-* Calls copyEntryDataAt to format data as follows in temp array:
-* [type][length][data ... ][length]
-* calls audio_utils_fifo_writer.write on temp
-* audio_utils_fifo_writer.write
-* calls obtain(), memcpy (reference in doxygen)
-* returns number of frames written
-* ssize_t audio_utils_fifo_reader::obtain
-* Determines readable buffer section via pointer arithmetic on reader
-* and writer pointers
-* Similarly, LOG_AUDIO_STATE() is called by onStateChange whenever audio is
-* turned on or off, and writes this notification to the FIFO.
-*
-* 2) reading the data from shared memory
-* Thread::threadloop()
-* NBLog::MergeThread::threadLoop()
-* Waits on a mutex, called periodically
-* Calls NBLog::Merger::merge and MergeReader.getAndProcessSnapshot.
-* NBLog::Merger::merge
-* Merges snapshots sorted by timestamp
-* Calls Reader::getSnapshot on each individual thread buffer to in shared
-* memory and writes all their data to the single FIFO stored in mMerger.
-* NBLog::Reader::getSnapshot
-* copies snapshot of reader's fifo buffer into its own buffer
-* calls mFifoReader->obtain to find readable data
-* sets snapshot.begin() and .end() iterators to boundaries of valid entries
-* moves the fifo reader index to after the last entry read
-* in this case, the buffer is in shared memory. in (4), the buffer is private
-* NBLog::MergeThread::getAndProcessSnapshot
-* Iterates through the entries in the local FIFO. Processes the data in
-* specific ways depending on the entry type. If the data is a histogram
-* timestamp or an audio on/off signal, writes to a map of PerformanceAnalysis
-* class instances, where the wakeup() intervals are stored as histograms
-* and analyzed.
-*
-* 3) Dumpsys media.log call to report the data
-* MediaLogService::dump in MediaLogService.cpp
-* calls NBLog::Reader::dump, which calls ReportPerformance::dump
-* ReportPerformance::dump
-* calls PerformanceAnalysis::ReportPerformance
-* and ReportPerformance::WriteToFile
-* PerformanceAnalysis::ReportPerformance
-* for each thread/source file location instance of PerformanceAnalysis data,
-* combines all histograms into a single one and prints it to the console
-* along with outlier data
-* ReportPerformance::WriteToFile
-* writes histogram, outlier, and peak information to file separately for each
-* instance of PerformanceAnalysis data.
-*/
-
#define LOG_TAG "NBLog"
#include <algorithm>
@@ -875,14 +807,6 @@
void NBLog::MergeReader::getAndProcessSnapshot(NBLog::Reader::Snapshot &snapshot)
{
String8 timestamp, body;
- // TODO: check: is this thread safe?
- // TODO: add lost data information and notification to ReportPerformance
- size_t lost = snapshot.lost() + (snapshot.begin() - EntryIterator(snapshot.data()));
- if (lost > 0) {
- // TODO: ultimately, this will be += and reset to 0. TODO: check that this is
- // obsolete now that Merger::merge is called periodically. No data should be lost
- mLost = lost;
- }
for (auto entry = snapshot.begin(); entry != snapshot.end();) {
switch (entry->type) {
diff --git a/media/libnbaio/OWNERS b/media/libnbaio/OWNERS
index f9cb567..eece71f 100644
--- a/media/libnbaio/OWNERS
+++ b/media/libnbaio/OWNERS
@@ -1 +1,2 @@
gkasten@google.com
+hunga@google.com
diff --git a/media/libnbaio/PerformanceAnalysis.cpp b/media/libnbaio/PerformanceAnalysis.cpp
index e5e444e..9e0f84d 100644
--- a/media/libnbaio/PerformanceAnalysis.cpp
+++ b/media/libnbaio/PerformanceAnalysis.cpp
@@ -46,9 +46,8 @@
namespace ReportPerformance {
-// Given a the most recent timestamp of a series of audio processing
-// wakeup timestamps,
-// buckets the time interval into a histogram, searches for
+// Given an audio processing wakeup timestamp, buckets the time interval
+// since the previous timestamp into a histogram, searches for
// outliers, analyzes the outlier series for unexpectedly
// small or large values and stores these as peaks
void PerformanceAnalysis::logTsEntry(timestamp ts) {
@@ -75,7 +74,7 @@
// NormalMixer times vary much more than FastMixer times.
// TODO: mOutlierFactor values are set empirically based on what appears to be
// an outlier. Learn these values from the data.
- mBufferPeriod.mOutlierFactor = mBufferPeriod.mMean < kFastMixerMax ? 1.8 : 2.5;
+ mBufferPeriod.mOutlierFactor = mBufferPeriod.mMean < kFastMixerMax ? 1.8 : 2.0;
// set outlier threshold
mBufferPeriod.mOutlier = mBufferPeriod.mMean * mBufferPeriod.mOutlierFactor;
@@ -232,10 +231,15 @@
// rounds value to precision based on log-distance from mean
inline double logRound(double x, double mean) {
- // Larger values increase range of high resolution
- constexpr double kBase = 2;
+ // Larger values decrease range of high resolution and prevent overflow
+ // of a histogram on the console.
+ // The following formula adjusts kBase based on the buffer period length.
+ // Different threads have buffer periods ranging from 2 to 40. The
+ // formula below maps buffer period 2 to kBase = ~1, 4 to ~2, 20 to ~3, 40 to ~4.
+ // TODO: tighten this for higher means, the data still overflows
+ const double kBase = log(mean) / log(2.2);
const double power = floor(
- log(abs(x - mean) / mean) / log(kBase)) + 1;
+ log(abs(x - mean) / mean) / log(kBase)) + 2;
// do not round values close to the mean
if (power < 1) {
return x;
@@ -259,13 +263,12 @@
timestamp startingTs = mHists[0].first;
// histogram which stores .1 precision ms counts instead of Jiffy multiple counts
- // TODO: when there is more data, print many histograms, possibly separated at peaks
std::map<double, int> buckets;
for (const auto &shortHist: mHists) {
for (const auto &countPair : shortHist.second) {
const double ms = static_cast<double>(countPair.first) / kJiffyPerMs;
buckets[logRound(ms, mBufferPeriod.mMean)] += countPair.second;
- elapsedMs += ms;
+ elapsedMs += ms * countPair.second;
}
}
diff --git a/media/libnbaio/ReportPerformance.cpp b/media/libnbaio/ReportPerformance.cpp
index e64a6d3..efc1b84 100644
--- a/media/libnbaio/ReportPerformance.cpp
+++ b/media/libnbaio/ReportPerformance.cpp
@@ -37,6 +37,10 @@
namespace ReportPerformance {
+
+// TODO: use a function like this to extract logic from writeToFile
+// https://stackoverflow.com/a/9279620
+
// Writes outlier intervals, timestamps, and histograms spanning long time intervals to file.
// TODO: write data in binary format
void writeToFile(const std::deque<std::pair<timestamp, Histogram>> &hists,
@@ -80,13 +84,18 @@
// each histogram is written as a line where the first value is the timestamp and
// subsequent values are pairs of buckets and counts. Each value is separated
// by a comma, and each histogram is separated by a newline.
- for (const auto &hist : hists) {
- hfs << hist.first << ", ";
- for (const auto &bucket : hist.second) {
- hfs << bucket.first / static_cast<double>(kJiffyPerMs)
- << ", " << bucket.second << ", ";
+ for (auto hist = hists.begin(); hist != hists.end(); ++hist) {
+ hfs << hist->first << ", ";
+ for (auto bucket = hist->second.begin(); bucket != hist->second.end(); ++bucket) {
+ hfs << bucket->first / static_cast<double>(kJiffyPerMs)
+ << ", " << bucket->second;
+ if (std::next(bucket) != end(hist->second)) {
+ hfs << ", ";
+ }
}
- hfs << "\n";
+ if (std::next(hist) != end(hists)) {
+ hfs << "\n";
+ }
}
hfs.close();
@@ -110,8 +119,11 @@
return;
}
// peaks are simply timestamps separated by commas
- for (const auto &peak : peakTimestamps) {
- pfs << peak << ", ";
+ for (auto peak = peakTimestamps.begin(); peak != peakTimestamps.end(); ++peak) {
+ pfs << *peak;
+ if (std::next(peak) != end(peakTimestamps)) {
+ pfs << ", ";
+ }
}
pfs.close();
}
diff --git a/media/libnbaio/include/media/nbaio/NBLog.h b/media/libnbaio/include/media/nbaio/NBLog.h
index 5549728..2c00386 100644
--- a/media/libnbaio/include/media/nbaio/NBLog.h
+++ b/media/libnbaio/include/media/nbaio/NBLog.h
@@ -475,12 +475,6 @@
audio_utils_fifo_reader * const mFifoReader; // used to read from FIFO,
// non-NULL unless constructor fails
- // TODO: it might be clearer, instead of a direct map from source location to vector of
- // timestamps, if we instead first mapped from source location to an object that
- // represented that location. And one_of its fields would be a vector of timestamps.
- // That would allow us to record other information about the source location beyond
- // timestamps.
-
// Searches for the last entry of type <type> in the range [front, back)
// back has to be entry-aligned. Returns nullptr if none enconuntered.
static const uint8_t *findLastEntryOfTypes(const uint8_t *front, const uint8_t *back,
diff --git a/media/libnbaio/include/media/nbaio/PerformanceAnalysis.h b/media/libnbaio/include/media/nbaio/PerformanceAnalysis.h
index a673151..50367be 100644
--- a/media/libnbaio/include/media/nbaio/PerformanceAnalysis.h
+++ b/media/libnbaio/include/media/nbaio/PerformanceAnalysis.h
@@ -35,9 +35,9 @@
class PerformanceAnalysis {
// This class stores and analyzes audio processing wakeup timestamps from NBLog
- // FIXME: currently, all performance data is stored in deques. Need to add a mutex.
- // FIXME: continue this way until analysis is done in a separate thread. Then, use
- // the fifo writer utilities.
+ // FIXME: currently, all performance data is stored in deques. Turn these into circular
+ // buffers.
+ // TODO: add a mutex.
public:
PerformanceAnalysis() {};
@@ -45,20 +45,11 @@
friend void dump(int fd, int indent,
PerformanceAnalysisMap &threadPerformanceAnalysis);
- // Given a series of audio processing wakeup timestamps,
- // compresses and and analyzes the data, and flushes
- // the timestamp series from memory.
- void processAndFlushTimeStampSeries();
-
- // Called when an audio on/off event is read from the buffer,
- // e.g. EVENT_AUDIO_STATE.
- // calls flushTimeStampSeries on the data up to the event,
- // effectively discarding the idle audio time interval
+ // Called in the case of an audio on/off event, e.g., EVENT_AUDIO_STATE.
+ // Used to discard idle time intervals
void handleStateChange();
// Writes wakeup timestamp entry to log and runs analysis
- // TODO: make this thread safe. Each thread should have its own instance
- // of PerformanceAnalysis.
void logTsEntry(timestamp ts);
// FIXME: make peakdetector and storeOutlierData a single function
@@ -67,13 +58,11 @@
// writes timestamps of significant changes to mPeakTimestamps
bool detectAndStorePeak(msInterval delta, timestamp ts);
- // runs analysis on timestamp series before it is converted to a histogram
- // finds outliers
+ // stores timestamps of intervals above a threshold: these are assumed outliers.
// writes to mOutlierData <time elapsed since previous outlier, outlier timestamp>
bool detectAndStoreOutlier(const msInterval diffMs);
// Generates a string of analysis of the buffer periods and prints to console
- // TODO: WIP write more detailed analysis
// FIXME: move this data visualization to a separate class. Model/view/controller
void reportPerformance(String8 *body, int author, log_hash_t hash,
int maxHeight = 10);
@@ -102,16 +91,16 @@
} mBufferPeriod;
// capacity allocated to data structures
- // TODO: make these values longer when testing is finished
struct MaxLength {
size_t Hists; // number of histograms stored in memory
- size_t TimeStamps; // histogram size
size_t Outliers; // number of values stored in outlier array
size_t Peaks; // number of values stored in peak array
int HistTimespanMs; // maximum histogram timespan
};
- static constexpr MaxLength kMaxLength = {.Hists = 20, .TimeStamps = 1000,
- .Outliers = 100, .Peaks = 100, .HistTimespanMs = 5 * kMsPerSec };
+ // These values allow for 10 hours of data allowing for a glitch and a peak
+ // as often as every 3 seconds
+ static constexpr MaxLength kMaxLength = {.Hists = 60, .Outliers = 12000,
+ .Peaks = 12000, .HistTimespanMs = 10 * kSecPerMin * kMsPerSec };
// these variables ensure continuity while analyzing the timestamp
// series one sample at a time.
diff --git a/media/libnbaio/include/media/nbaio/ReportPerformance.h b/media/libnbaio/include/media/nbaio/ReportPerformance.h
index 0d4a7b9..ec0842f 100644
--- a/media/libnbaio/include/media/nbaio/ReportPerformance.h
+++ b/media/libnbaio/include/media/nbaio/ReportPerformance.h
@@ -23,18 +23,17 @@
namespace android {
-// This class is used by reportPerformance function
-// TODO move PerformanceAnalysis::reportPerformance function to ReportPerformance.cpp
+// The String8 class is used by reportPerformance function
class String8;
namespace ReportPerformance {
constexpr int kMsPerSec = 1000;
+constexpr int kSecPerMin = 60;
constexpr int kJiffyPerMs = 10; // time unit for histogram as a multiple of milliseconds
// stores a histogram: key: observed buffer period (multiple of jiffy). value: count
-// TODO: unsigned, unsigned
using Histogram = std::map<int, int>;
using msInterval = double;
@@ -44,7 +43,6 @@
using log_hash_t = uint64_t;
-// TODO: should this return an int64_t?
static inline int deltaMs(int64_t ns1, int64_t ns2) {
return (ns2 - ns1) / (1000 * 1000);
}
@@ -58,8 +56,7 @@
return 31 - __builtin_clz(x);
}
-// Writes outlier intervals, timestamps, and histograms spanning long time
-// intervals to a file.
+// Writes outlier intervals, timestamps, peaks timestamps, and histograms to a file.
void writeToFile(const std::deque<std::pair<timestamp, Histogram>> &hists,
const std::deque<std::pair<msInterval, timestamp>> &outlierData,
const std::deque<timestamp> &peakTimestamps,
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index d4ec30d..63ad0e0 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1866,16 +1866,15 @@
mFlags |= kFlagIsGrallocUsageProtected;
mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
}
+ }
+ if (mFlags & kFlagIsSecure) {
+ // use native_handles for secure input buffers
+ err = setPortMode(kPortIndexInput, IOMX::kPortModePresetSecureBuffer);
- if (mFlags & kFlagIsSecure) {
- // use native_handles for secure input buffers
- err = setPortMode(kPortIndexInput, IOMX::kPortModePresetSecureBuffer);
-
- if (err != OK) {
- ALOGI("falling back to non-native_handles");
- setPortMode(kPortIndexInput, IOMX::kPortModePresetByteBuffer);
- err = OK; // ignore error for now
- }
+ if (err != OK) {
+ ALOGI("falling back to non-native_handles");
+ setPortMode(kPortIndexInput, IOMX::kPortModePresetByteBuffer);
+ err = OK; // ignore error for now
}
}
if (haveNativeWindow) {
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 7070bdb..130992a 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -30,6 +30,7 @@
"FrameRenderTracker.cpp",
"HTTPBase.cpp",
"HevcUtils.cpp",
+ "ItemTable.cpp",
"JPEGSource.cpp",
"MP3Extractor.cpp",
"MPEG2TSWriter.cpp",
diff --git a/media/libstagefright/CallbackDataSource.cpp b/media/libstagefright/CallbackDataSource.cpp
index 4309372..6dfe2de 100644
--- a/media/libstagefright/CallbackDataSource.cpp
+++ b/media/libstagefright/CallbackDataSource.cpp
@@ -127,10 +127,6 @@
}
ssize_t TinyCacheSource::readAt(off64_t offset, void* data, size_t size) {
- if (size >= kCacheSize) {
- return mSource->readAt(offset, data, size);
- }
-
// Check if the cache satisfies the read.
if (mCachedOffset <= offset
&& offset < (off64_t) (mCachedOffset + mCachedSize)) {
@@ -154,6 +150,9 @@
}
}
+ if (size >= kCacheSize) {
+ return mSource->readAt(offset, data, size);
+ }
// Fill the cache and copy to the caller.
const ssize_t numRead = mSource->readAt(offset, mCache, kCacheSize);
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index a5760d1..c22053e 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -92,6 +92,48 @@
return true;
}
+bool DataSource::getUInt16Var(off64_t offset, uint16_t *x, size_t size) {
+ if (size == 2) {
+ return getUInt16(offset, x);
+ }
+ if (size == 1) {
+ uint8_t tmp;
+ if (readAt(offset, &tmp, 1) == 1) {
+ *x = tmp;
+ return true;
+ }
+ }
+ return false;
+}
+
+bool DataSource::getUInt32Var(off64_t offset, uint32_t *x, size_t size) {
+ if (size == 4) {
+ return getUInt32(offset, x);
+ }
+ if (size == 2) {
+ uint16_t tmp;
+ if (getUInt16(offset, &tmp)) {
+ *x = tmp;
+ return true;
+ }
+ }
+ return false;
+}
+
+bool DataSource::getUInt64Var(off64_t offset, uint64_t *x, size_t size) {
+ if (size == 8) {
+ return getUInt64(offset, x);
+ }
+ if (size == 4) {
+ uint32_t tmp;
+ if (getUInt32(offset, &tmp)) {
+ *x = tmp;
+ return true;
+ }
+ }
+ return false;
+}
+
status_t DataSource::getSize(off64_t *size) {
*size = 0;
diff --git a/media/libstagefright/ItemTable.cpp b/media/libstagefright/ItemTable.cpp
new file mode 100644
index 0000000..b7ff21b
--- /dev/null
+++ b/media/libstagefright/ItemTable.cpp
@@ -0,0 +1,1544 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ItemTable"
+//#define LOG_NDEBUG 0
+
+#include <include/ItemTable.h>
+#include <media/MediaDefs.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <utils/Log.h>
+
+namespace android {
+
+namespace heif {
+
+/////////////////////////////////////////////////////////////////////
+//
+// struct to keep track of one image item
+//
+
+struct ImageItem {
+ friend struct ItemReference;
+ friend struct ItemProperty;
+
+ ImageItem() : ImageItem(0) {}
+ ImageItem(uint32_t _type) : type(_type),
+ rows(0), columns(0), width(0), height(0), rotation(0),
+ offset(0), size(0), nextTileIndex(0) {}
+
+ bool isGrid() const {
+ return type == FOURCC('g', 'r', 'i', 'd');
+ }
+
+ status_t getNextTileItemId(uint32_t *nextTileItemId, bool reset) {
+ if (reset) {
+ nextTileIndex = 0;
+ }
+ if (nextTileIndex >= dimgRefs.size()) {
+ return ERROR_END_OF_STREAM;
+ }
+ *nextTileItemId = dimgRefs[nextTileIndex++];
+ return OK;
+ }
+
+ uint32_t type;
+ int32_t rows;
+ int32_t columns;
+ int32_t width;
+ int32_t height;
+ int32_t rotation;
+ off64_t offset;
+ size_t size;
+ sp<ABuffer> hvcc;
+ sp<ABuffer> icc;
+
+ Vector<uint32_t> thumbnails;
+ Vector<uint32_t> dimgRefs;
+ size_t nextTileIndex;
+};
+
+
+/////////////////////////////////////////////////////////////////////
+//
+// ISO boxes
+//
+
+struct Box {
+protected:
+ Box(const sp<DataSource> source, uint32_t type) :
+ mDataSource(source), mType(type) {}
+
+ virtual ~Box() {}
+
+ virtual status_t onChunkData(
+ uint32_t /*type*/, off64_t /*offset*/, size_t /*size*/) {
+ return OK;
+ }
+
+ inline uint32_t type() const { return mType; }
+
+ inline sp<DataSource> source() const { return mDataSource; }
+
+ status_t parseChunk(off64_t *offset);
+
+ status_t parseChunks(off64_t offset, size_t size);
+
+private:
+ sp<DataSource> mDataSource;
+ uint32_t mType;
+};
+
+status_t Box::parseChunk(off64_t *offset) {
+ if (*offset < 0) {
+ ALOGE("b/23540914");
+ return ERROR_MALFORMED;
+ }
+ uint32_t hdr[2];
+ if (mDataSource->readAt(*offset, hdr, 8) < 8) {
+ return ERROR_IO;
+ }
+ uint64_t chunk_size = ntohl(hdr[0]);
+ int32_t chunk_type = ntohl(hdr[1]);
+ off64_t data_offset = *offset + 8;
+
+ if (chunk_size == 1) {
+ if (mDataSource->readAt(*offset + 8, &chunk_size, 8) < 8) {
+ return ERROR_IO;
+ }
+ chunk_size = ntoh64(chunk_size);
+ data_offset += 8;
+
+ if (chunk_size < 16) {
+ // The smallest valid chunk is 16 bytes long in this case.
+ return ERROR_MALFORMED;
+ }
+ } else if (chunk_size == 0) {
+ // This shouldn't happen since we should never be top level
+ ALOGE("invalid chunk size 0 for non-top level box");
+ return ERROR_MALFORMED;
+ } else if (chunk_size < 8) {
+ // The smallest valid chunk is 8 bytes long.
+ ALOGE("invalid chunk size: %lld", (long long)chunk_size);
+ return ERROR_MALFORMED;
+ }
+
+ char chunk[5];
+ MakeFourCCString(chunk_type, chunk);
+ ALOGV("chunk: %s @ %lld", chunk, (long long)*offset);
+
+ off64_t chunk_data_size = chunk_size - (data_offset - *offset);
+ if (chunk_data_size < 0) {
+ ALOGE("b/23540914");
+ return ERROR_MALFORMED;
+ }
+
+ status_t err = onChunkData(chunk_type, data_offset, chunk_data_size);
+
+ if (err != OK) {
+ return err;
+ }
+ *offset += chunk_size;
+ return OK;
+}
+
+status_t Box::parseChunks(off64_t offset, size_t size) {
+ off64_t stopOffset = offset + size;
+ while (offset < stopOffset) {
+ status_t err = parseChunk(&offset);
+ if (err != OK) {
+ return err;
+ }
+ }
+ if (offset != stopOffset) {
+ return ERROR_MALFORMED;
+ }
+ return OK;
+}
+
+///////////////////////////////////////////////////////////////////////
+
+struct FullBox : public Box {
+protected:
+ FullBox(const sp<DataSource> source, uint32_t type) :
+ Box(source, type), mVersion(0), mFlags(0) {}
+
+ inline uint8_t version() const { return mVersion; }
+
+ inline uint32_t flags() const { return mFlags; }
+
+ status_t parseFullBoxHeader(off64_t *offset, size_t *size);
+
+private:
+ uint8_t mVersion;
+ uint32_t mFlags;
+};
+
+status_t FullBox::parseFullBoxHeader(off64_t *offset, size_t *size) {
+ if (*size < 4) {
+ return ERROR_MALFORMED;
+ }
+ if (!source()->readAt(*offset, &mVersion, 1)) {
+ return ERROR_IO;
+ }
+ if (!source()->getUInt24(*offset + 1, &mFlags)) {
+ return ERROR_IO;
+ }
+ *offset += 4;
+ *size -= 4;
+ return OK;
+}
+
+/////////////////////////////////////////////////////////////////////
+//
+// PrimaryImage box
+//
+
+struct PitmBox : public FullBox {
+ PitmBox(const sp<DataSource> source) :
+ FullBox(source, FOURCC('p', 'i', 't', 'm')) {}
+
+ status_t parse(off64_t offset, size_t size, uint32_t *primaryItemId);
+};
+
+status_t PitmBox::parse(off64_t offset, size_t size, uint32_t *primaryItemId) {
+ status_t err = parseFullBoxHeader(&offset, &size);
+ if (err != OK) {
+ return err;
+ }
+
+ size_t itemIdSize = (version() == 0) ? 2 : 4;
+ if (size < itemIdSize) {
+ return ERROR_MALFORMED;
+ }
+ uint32_t itemId;
+ if (!source()->getUInt32Var(offset, &itemId, itemIdSize)) {
+ return ERROR_IO;
+ }
+
+ ALOGV("primary id %d", itemId);
+ *primaryItemId = itemId;
+
+ return OK;
+}
+
+/////////////////////////////////////////////////////////////////////
+//
+// ItemLocation related boxes
+//
+
+struct ExtentEntry {
+ uint64_t extentIndex;
+ uint64_t extentOffset;
+ uint64_t extentLength;
+};
+
+struct ItemLoc {
+ ItemLoc() : ItemLoc(0, 0, 0, 0) {}
+ ItemLoc(uint32_t item_id, uint16_t construction_method,
+ uint16_t data_reference_index, uint64_t base_offset) :
+ itemId(item_id),
+ constructionMethod(construction_method),
+ dataReferenceIndex(data_reference_index),
+ baseOffset(base_offset) {}
+
+ void addExtent(const ExtentEntry& extent) {
+ extents.push_back(extent);
+ }
+
+ status_t getLoc(off64_t *offset, size_t *size,
+ off64_t idatOffset, size_t idatSize) const {
+ // TODO: fix extent handling, fix constructionMethod = 2
+ CHECK(extents.size() == 1);
+ if (constructionMethod == 0) {
+ *offset = baseOffset + extents[0].extentOffset;
+ *size = extents[0].extentLength;
+ return OK;
+ } else if (constructionMethod == 1) {
+ if (baseOffset + extents[0].extentOffset + extents[0].extentLength
+ > idatSize) {
+ return ERROR_MALFORMED;
+ }
+ *offset = baseOffset + extents[0].extentOffset + idatOffset;
+ *size = extents[0].extentLength;
+ return OK;
+ }
+ return ERROR_UNSUPPORTED;
+ }
+
+ // parsed info
+ uint32_t itemId;
+ uint16_t constructionMethod;
+ uint16_t dataReferenceIndex;
+ off64_t baseOffset;
+ Vector<ExtentEntry> extents;
+};
+
+struct IlocBox : public FullBox {
+ IlocBox(const sp<DataSource> source, KeyedVector<uint32_t, ItemLoc> *itemLocs) :
+ FullBox(source, FOURCC('i', 'l', 'o', 'c')),
+ mItemLocs(itemLocs), mHasConstructMethod1(false) {}
+
+ status_t parse(off64_t offset, size_t size);
+
+ bool hasConstructMethod1() { return mHasConstructMethod1; }
+
+private:
+ static bool isSizeFieldValid(uint32_t offset_size) {
+ return offset_size == 0 || offset_size == 4 || offset_size == 8;
+ }
+ KeyedVector<uint32_t, ItemLoc> *mItemLocs;
+ bool mHasConstructMethod1;
+};
+
+status_t IlocBox::parse(off64_t offset, size_t size) {
+ status_t err = parseFullBoxHeader(&offset, &size);
+ if (err != OK) {
+ return err;
+ }
+ if (version() > 2) {
+ ALOGE("%s: invalid version %d", __FUNCTION__, version());
+ return ERROR_MALFORMED;
+ }
+
+ if (size < 2) {
+ return ERROR_MALFORMED;
+ }
+ uint8_t offset_size;
+ if (!source()->readAt(offset++, &offset_size, 1)) {
+ return ERROR_IO;
+ }
+ uint8_t length_size = (offset_size & 0xF);
+ offset_size >>= 4;
+
+ uint8_t base_offset_size;
+ if (!source()->readAt(offset++, &base_offset_size, 1)) {
+ return ERROR_IO;
+ }
+ uint8_t index_size = 0;
+ if (version() == 1 || version() == 2) {
+ index_size = (base_offset_size & 0xF);
+ }
+ base_offset_size >>= 4;
+ size -= 2;
+
+ if (!isSizeFieldValid(offset_size)
+ || !isSizeFieldValid(length_size)
+ || !isSizeFieldValid(base_offset_size)
+ || !isSizeFieldValid((index_size))) {
+ ALOGE("%s: offset size not valid: %d, %d, %d, %d", __FUNCTION__,
+ offset_size, length_size, base_offset_size, index_size);
+ return ERROR_MALFORMED;
+ }
+
+ uint32_t item_count;
+ size_t itemFieldSize = version() < 2 ? 2 : 4;
+ if (size < itemFieldSize) {
+ return ERROR_MALFORMED;
+ }
+ if (!source()->getUInt32Var(offset, &item_count, itemFieldSize)) {
+ return ERROR_IO;
+ }
+
+ ALOGV("item_count %lld", (long long) item_count);
+ offset += itemFieldSize;
+ size -= itemFieldSize;
+
+ for (size_t i = 0; i < item_count; i++) {
+ uint32_t item_id;
+ if (!source()->getUInt32Var(offset, &item_id, itemFieldSize)) {
+ return ERROR_IO;
+ }
+ ALOGV("item[%zu]: id %lld", i, (long long)item_id);
+ offset += itemFieldSize;
+
+ uint8_t construction_method = 0;
+ if (version() == 1 || version() == 2) {
+ uint8_t buf[2];
+ if (!source()->readAt(offset, buf, 2)) {
+ return ERROR_IO;
+ }
+ construction_method = (buf[1] & 0xF);
+ ALOGV("construction_method %d", construction_method);
+ if (construction_method == 1) {
+ mHasConstructMethod1 = true;
+ }
+
+ offset += 2;
+ }
+
+ uint16_t data_reference_index;
+ if (!source()->getUInt16(offset, &data_reference_index)) {
+ return ERROR_IO;
+ }
+ ALOGV("data_reference_index %d", data_reference_index);
+ if (data_reference_index != 0) {
+ // we don't support reference to other files
+ return ERROR_UNSUPPORTED;
+ }
+ offset += 2;
+
+ uint64_t base_offset = 0;
+ if (base_offset_size != 0) {
+ if (!source()->getUInt64Var(offset, &base_offset, base_offset_size)) {
+ return ERROR_IO;
+ }
+ offset += base_offset_size;
+ }
+ ALOGV("base_offset %lld", (long long) base_offset);
+
+ ssize_t index = mItemLocs->add(item_id, ItemLoc(
+ item_id, construction_method, data_reference_index, base_offset));
+ ItemLoc &item = mItemLocs->editValueAt(index);
+
+ uint16_t extent_count;
+ if (!source()->getUInt16(offset, &extent_count)) {
+ return ERROR_IO;
+ }
+ ALOGV("extent_count %d", extent_count);
+
+ if (extent_count > 1 && (offset_size == 0 || length_size == 0)) {
+ // if the item is dividec into more than one extents, offset and
+ // length must be present.
+ return ERROR_MALFORMED;
+ }
+ offset += 2;
+
+ for (size_t j = 0; j < extent_count; j++) {
+ uint64_t extent_index = 1; // default=1
+ if ((version() == 1 || version() == 2) && (index_size > 0)) {
+ if (!source()->getUInt64Var(offset, &extent_index, index_size)) {
+ return ERROR_IO;
+ }
+ // TODO: add support for this mode
+ offset += index_size;
+ ALOGV("extent_index %lld", (long long)extent_index);
+ }
+
+ uint64_t extent_offset = 0; // default=0
+ if (offset_size > 0) {
+ if (!source()->getUInt64Var(offset, &extent_offset, offset_size)) {
+ return ERROR_IO;
+ }
+ offset += offset_size;
+ }
+ ALOGV("extent_offset %lld", (long long)extent_offset);
+
+ uint64_t extent_length = 0; // this indicates full length of file
+ if (length_size > 0) {
+ if (!source()->getUInt64Var(offset, &extent_length, length_size)) {
+ return ERROR_IO;
+ }
+ offset += length_size;
+ }
+ ALOGV("extent_length %lld", (long long)extent_length);
+
+ item.addExtent({ extent_index, extent_offset, extent_length });
+ }
+ }
+ return OK;
+}
+
+/////////////////////////////////////////////////////////////////////
+//
+// ItemReference related boxes
+//
+
+struct ItemReference : public Box, public RefBase {
+ ItemReference(const sp<DataSource> source, uint32_t type, uint32_t itemIdSize) :
+ Box(source, type), mItemId(0), mRefIdSize(itemIdSize) {}
+
+ status_t parse(off64_t offset, size_t size);
+
+ uint32_t itemId() { return mItemId; }
+
+ void apply(KeyedVector<uint32_t, ImageItem> &itemIdToImageMap) const {
+ ssize_t imageIndex = itemIdToImageMap.indexOfKey(mItemId);
+
+ // ignore non-image items
+ if (imageIndex < 0) {
+ return;
+ }
+
+ ALOGV("attach reference type 0x%x to item id %d)", type(), mItemId);
+
+ if (type() == FOURCC('d', 'i', 'm', 'g')) {
+ ImageItem &image = itemIdToImageMap.editValueAt(imageIndex);
+ if (!image.dimgRefs.empty()) {
+ ALOGW("dimgRefs if not clean!");
+ }
+ image.dimgRefs.appendVector(mRefs);
+ } else if (type() == FOURCC('t', 'h', 'm', 'b')) {
+ for (size_t i = 0; i < mRefs.size(); i++) {
+ imageIndex = itemIdToImageMap.indexOfKey(mRefs[i]);
+
+ // ignore non-image items
+ if (imageIndex < 0) {
+ continue;
+ }
+ ALOGV("Image item id %d uses thumbnail item id %d", mRefs[i], mItemId);
+ ImageItem &image = itemIdToImageMap.editValueAt(imageIndex);
+ if (!image.thumbnails.empty()) {
+ ALOGW("already has thumbnails!");
+ }
+ image.thumbnails.push_back(mItemId);
+ }
+ } else {
+ ALOGW("ignoring unsupported ref type 0x%x", type());
+ }
+ }
+
+private:
+ uint32_t mItemId;
+ uint32_t mRefIdSize;
+ Vector<uint32_t> mRefs;
+
+ DISALLOW_EVIL_CONSTRUCTORS(ItemReference);
+};
+
+status_t ItemReference::parse(off64_t offset, size_t size) {
+ if (size < mRefIdSize + 2) {
+ return ERROR_MALFORMED;
+ }
+ if (!source()->getUInt32Var(offset, &mItemId, mRefIdSize)) {
+ return ERROR_IO;
+ }
+ offset += mRefIdSize;
+
+ uint16_t count;
+ if (!source()->getUInt16(offset, &count)) {
+ return ERROR_IO;
+ }
+ offset += 2;
+ size -= (mRefIdSize + 2);
+
+ if (size < count * mRefIdSize) {
+ return ERROR_MALFORMED;
+ }
+
+ for (size_t i = 0; i < count; i++) {
+ uint32_t refItemId;
+ if (!source()->getUInt32Var(offset, &refItemId, mRefIdSize)) {
+ return ERROR_IO;
+ }
+ offset += mRefIdSize;
+ mRefs.push_back(refItemId);
+ ALOGV("item id %d: referencing item id %d", mItemId, refItemId);
+ }
+
+ return OK;
+}
+
+struct IrefBox : public FullBox {
+ IrefBox(const sp<DataSource> source, Vector<sp<ItemReference> > *itemRefs) :
+ FullBox(source, FOURCC('i', 'r', 'e', 'f')), mRefIdSize(0), mItemRefs(itemRefs) {}
+
+ status_t parse(off64_t offset, size_t size);
+
+protected:
+ status_t onChunkData(uint32_t type, off64_t offset, size_t size) override;
+
+private:
+ uint32_t mRefIdSize;
+ Vector<sp<ItemReference> > *mItemRefs;
+};
+
+status_t IrefBox::parse(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+ status_t err = parseFullBoxHeader(&offset, &size);
+ if (err != OK) {
+ return err;
+ }
+
+ mRefIdSize = (version() == 0) ? 2 : 4;
+ return parseChunks(offset, size);
+}
+
+status_t IrefBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
+ sp<ItemReference> itemRef = new ItemReference(source(), type, mRefIdSize);
+
+ status_t err = itemRef->parse(offset, size);
+ if (err != OK) {
+ return err;
+ }
+ mItemRefs->push_back(itemRef);
+ return OK;
+}
+
+/////////////////////////////////////////////////////////////////////
+//
+// ItemProperty related boxes
+//
+
+struct AssociationEntry {
+ uint32_t itemId;
+ bool essential;
+ uint16_t index;
+};
+
+struct ItemProperty : public RefBase {
+ ItemProperty() {}
+
+ virtual void attachTo(ImageItem &/*image*/) const {
+ ALOGW("Unrecognized property");
+ }
+ virtual status_t parse(off64_t /*offset*/, size_t /*size*/) {
+ ALOGW("Unrecognized property");
+ return OK;
+ }
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(ItemProperty);
+};
+
+struct IspeBox : public FullBox, public ItemProperty {
+ IspeBox(const sp<DataSource> source) :
+ FullBox(source, FOURCC('i', 's', 'p', 'e')), mWidth(0), mHeight(0) {}
+
+ status_t parse(off64_t offset, size_t size) override;
+
+ void attachTo(ImageItem &image) const override {
+ image.width = mWidth;
+ image.height = mHeight;
+ }
+
+private:
+ uint32_t mWidth;
+ uint32_t mHeight;
+};
+
+status_t IspeBox::parse(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ status_t err = parseFullBoxHeader(&offset, &size);
+ if (err != OK) {
+ return err;
+ }
+
+ if (size < 8) {
+ return ERROR_MALFORMED;
+ }
+ if (!source()->getUInt32(offset, &mWidth)
+ || !source()->getUInt32(offset + 4, &mHeight)) {
+ return ERROR_IO;
+ }
+ ALOGV("property ispe: %dx%d", mWidth, mHeight);
+
+ return OK;
+}
+
+struct HvccBox : public Box, public ItemProperty {
+ HvccBox(const sp<DataSource> source) :
+ Box(source, FOURCC('h', 'v', 'c', 'C')) {}
+
+ status_t parse(off64_t offset, size_t size) override;
+
+ void attachTo(ImageItem &image) const override {
+ image.hvcc = mHVCC;
+ }
+
+private:
+ sp<ABuffer> mHVCC;
+};
+
+status_t HvccBox::parse(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ mHVCC = new ABuffer(size);
+
+ if (mHVCC->data() == NULL) {
+ ALOGE("b/28471206");
+ return NO_MEMORY;
+ }
+
+ if (source()->readAt(offset, mHVCC->data(), size) < (ssize_t)size) {
+ return ERROR_IO;
+ }
+
+ ALOGV("property hvcC");
+
+ return OK;
+}
+
+struct IrotBox : public Box, public ItemProperty {
+ IrotBox(const sp<DataSource> source) :
+ Box(source, FOURCC('i', 'r', 'o', 't')), mAngle(0) {}
+
+ status_t parse(off64_t offset, size_t size) override;
+
+ void attachTo(ImageItem &image) const override {
+ image.rotation = mAngle * 90;
+ }
+
+private:
+ uint8_t mAngle;
+};
+
+status_t IrotBox::parse(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ if (size < 1) {
+ return ERROR_MALFORMED;
+ }
+ if (source()->readAt(offset, &mAngle, 1) != 1) {
+ return ERROR_IO;
+ }
+ mAngle &= 0x3;
+ ALOGV("property irot: %d", mAngle);
+
+ return OK;
+}
+
+struct ColrBox : public Box, public ItemProperty {
+ ColrBox(const sp<DataSource> source) :
+ Box(source, FOURCC('c', 'o', 'l', 'r')) {}
+
+ status_t parse(off64_t offset, size_t size) override;
+
+ void attachTo(ImageItem &image) const override {
+ image.icc = mICCData;
+ }
+
+private:
+ sp<ABuffer> mICCData;
+};
+
+status_t ColrBox::parse(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ if (size < 4) {
+ return ERROR_MALFORMED;
+ }
+ uint32_t colour_type;
+ if (!source()->getUInt32(offset, &colour_type)) {
+ return ERROR_IO;
+ }
+ offset += 4;
+ size -= 4;
+ if (colour_type == FOURCC('n', 'c', 'l', 'x')) {
+ return OK;
+ }
+ if ((colour_type != FOURCC('r', 'I', 'C', 'C')) &&
+ (colour_type != FOURCC('p', 'r', 'o', 'f'))) {
+ return ERROR_MALFORMED;
+ }
+
+ mICCData = new ABuffer(size);
+ if (mICCData->data() == NULL) {
+ ALOGE("b/28471206");
+ return NO_MEMORY;
+ }
+
+ if (source()->readAt(offset, mICCData->data(), size) != (ssize_t)size) {
+ return ERROR_IO;
+ }
+
+ ALOGV("property Colr: size %zd", size);
+ return OK;
+}
+
+struct IpmaBox : public FullBox {
+ IpmaBox(const sp<DataSource> source, Vector<AssociationEntry> *associations) :
+ FullBox(source, FOURCC('i', 'p', 'm', 'a')), mAssociations(associations) {}
+
+ status_t parse(off64_t offset, size_t size);
+private:
+ Vector<AssociationEntry> *mAssociations;
+};
+
+status_t IpmaBox::parse(off64_t offset, size_t size) {
+ status_t err = parseFullBoxHeader(&offset, &size);
+ if (err != OK) {
+ return err;
+ }
+
+ if (size < 4) {
+ return ERROR_MALFORMED;
+ }
+ uint32_t entryCount;
+ if (!source()->getUInt32(offset, &entryCount)) {
+ return ERROR_IO;
+ }
+ offset += 4;
+ size -= 4;
+
+ for (size_t k = 0; k < entryCount; ++k) {
+ uint32_t itemId = 0;
+ size_t itemIdSize = (version() < 1) ? 2 : 4;
+
+ if (size < itemIdSize + 1) {
+ return ERROR_MALFORMED;
+ }
+
+ if (!source()->getUInt32Var(offset, &itemId, itemIdSize)) {
+ return ERROR_IO;
+ }
+ offset += itemIdSize;
+ size -= itemIdSize;
+
+ uint8_t associationCount;
+ if (!source()->readAt(offset, &associationCount, 1)) {
+ return ERROR_IO;
+ }
+ offset++;
+ size--;
+
+ for (size_t i = 0; i < associationCount; ++i) {
+ size_t propIndexSize = (flags() & 1) ? 2 : 1;
+ if (size < propIndexSize) {
+ return ERROR_MALFORMED;
+ }
+ uint16_t propIndex;
+ if (!source()->getUInt16Var(offset, &propIndex, propIndexSize)) {
+ return ERROR_IO;
+ }
+ offset += propIndexSize;
+ size -= propIndexSize;
+ uint16_t bitmask = (1 << (8 * propIndexSize - 1));
+ AssociationEntry entry = {
+ .itemId = itemId,
+ .essential = !!(propIndex & bitmask),
+ .index = (uint16_t) (propIndex & ~bitmask)
+ };
+
+ ALOGV("item id %d associated to property %d (essential %d)",
+ itemId, entry.index, entry.essential);
+
+ mAssociations->push_back(entry);
+ }
+ }
+
+ return OK;
+}
+
+struct IpcoBox : public Box {
+ IpcoBox(const sp<DataSource> source, Vector<sp<ItemProperty> > *properties) :
+ Box(source, FOURCC('i', 'p', 'c', 'o')), mItemProperties(properties) {}
+
+ status_t parse(off64_t offset, size_t size);
+protected:
+ status_t onChunkData(uint32_t type, off64_t offset, size_t size) override;
+
+private:
+ Vector<sp<ItemProperty> > *mItemProperties;
+};
+
+status_t IpcoBox::parse(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+ // push dummy as the index is 1-based
+ mItemProperties->push_back(new ItemProperty());
+ return parseChunks(offset, size);
+}
+
+status_t IpcoBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
+ sp<ItemProperty> itemProperty;
+ switch(type) {
+ case FOURCC('h', 'v', 'c', 'C'):
+ {
+ itemProperty = new HvccBox(source());
+ break;
+ }
+ case FOURCC('i', 's', 'p', 'e'):
+ {
+ itemProperty = new IspeBox(source());
+ break;
+ }
+ case FOURCC('i', 'r', 'o', 't'):
+ {
+ itemProperty = new IrotBox(source());
+ break;
+ }
+ case FOURCC('c', 'o', 'l', 'r'):
+ {
+ itemProperty = new ColrBox(source());
+ break;
+ }
+ default:
+ {
+ // push dummy to maintain correct item property index
+ itemProperty = new ItemProperty();
+ break;
+ }
+ }
+ status_t err = itemProperty->parse(offset, size);
+ if (err != OK) {
+ return err;
+ }
+ mItemProperties->push_back(itemProperty);
+ return OK;
+}
+
+struct IprpBox : public Box {
+ IprpBox(const sp<DataSource> source,
+ Vector<sp<ItemProperty> > *properties,
+ Vector<AssociationEntry> *associations) :
+ Box(source, FOURCC('i', 'p', 'r', 'p')),
+ mProperties(properties), mAssociations(associations) {}
+
+ status_t parse(off64_t offset, size_t size);
+protected:
+ status_t onChunkData(uint32_t type, off64_t offset, size_t size) override;
+
+private:
+ Vector<sp<ItemProperty> > *mProperties;
+ Vector<AssociationEntry> *mAssociations;
+};
+
+status_t IprpBox::parse(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ status_t err = parseChunks(offset, size);
+ if (err != OK) {
+ return err;
+ }
+ return OK;
+}
+
+status_t IprpBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
+ switch(type) {
+ case FOURCC('i', 'p', 'c', 'o'):
+ {
+ IpcoBox ipcoBox(source(), mProperties);
+ return ipcoBox.parse(offset, size);
+ }
+ case FOURCC('i', 'p', 'm', 'a'):
+ {
+ IpmaBox ipmaBox(source(), mAssociations);
+ return ipmaBox.parse(offset, size);
+ }
+ default:
+ {
+ ALOGW("Unrecognized box.");
+ break;
+ }
+ }
+ return OK;
+}
+
+/////////////////////////////////////////////////////////////////////
+//
+// ItemInfo related boxes
+//
+struct ItemInfo {
+ uint32_t itemId;
+ uint32_t itemType;
+};
+
+struct InfeBox : public FullBox {
+ InfeBox(const sp<DataSource> source) :
+ FullBox(source, FOURCC('i', 'n', 'f', 'e')) {}
+
+ status_t parse(off64_t offset, size_t size, ItemInfo *itemInfo);
+
+private:
+ bool parseNullTerminatedString(off64_t *offset, size_t *size, String8 *out);
+};
+
+bool InfeBox::parseNullTerminatedString(
+ off64_t *offset, size_t *size, String8 *out) {
+ char tmp[256];
+ size_t len = 0;
+ off64_t newOffset = *offset;
+ off64_t stopOffset = *offset + *size;
+ while (newOffset < stopOffset) {
+ if (!source()->readAt(newOffset++, &tmp[len], 1)) {
+ return false;
+ }
+ if (tmp[len] == 0) {
+ out->append(tmp, len);
+
+ *offset = newOffset;
+ *size = stopOffset - newOffset;
+
+ return true;
+ }
+ if (++len >= sizeof(tmp)) {
+ out->append(tmp, len);
+ len = 0;
+ }
+ }
+ return false;
+}
+
+status_t InfeBox::parse(off64_t offset, size_t size, ItemInfo *itemInfo) {
+ status_t err = parseFullBoxHeader(&offset, &size);
+ if (err != OK) {
+ return err;
+ }
+
+ if (version() == 0 || version() == 1) {
+ if (size < 4) {
+ return ERROR_MALFORMED;
+ }
+ uint16_t item_id;
+ if (!source()->getUInt16(offset, &item_id)) {
+ return ERROR_IO;
+ }
+ ALOGV("item_id %d", item_id);
+ uint16_t item_protection_index;
+ if (!source()->getUInt16(offset + 2, &item_protection_index)) {
+ return ERROR_IO;
+ }
+ offset += 4;
+ size -= 4;
+
+ String8 item_name;
+ if (!parseNullTerminatedString(&offset, &size, &item_name)) {
+ return ERROR_MALFORMED;
+ }
+
+ String8 content_type;
+ if (!parseNullTerminatedString(&offset, &size, &content_type)) {
+ return ERROR_MALFORMED;
+ }
+
+ String8 content_encoding;
+ if (!parseNullTerminatedString(&offset, &size, &content_encoding)) {
+ return ERROR_MALFORMED;
+ }
+
+ if (version() == 1) {
+ uint32_t extension_type;
+ if (!source()->getUInt32(offset, &extension_type)) {
+ return ERROR_IO;
+ }
+ offset++;
+ size--;
+ // TODO: handle this case
+ }
+ } else { // version >= 2
+ uint32_t item_id;
+ size_t itemIdSize = (version() == 2) ? 2 : 4;
+ if (size < itemIdSize + 6) {
+ return ERROR_MALFORMED;
+ }
+ if (!source()->getUInt32Var(offset, &item_id, itemIdSize)) {
+ return ERROR_IO;
+ }
+ ALOGV("item_id %d", item_id);
+ offset += itemIdSize;
+ uint16_t item_protection_index;
+ if (!source()->getUInt16(offset, &item_protection_index)) {
+ return ERROR_IO;
+ }
+ ALOGV("item_protection_index %d", item_protection_index);
+ offset += 2;
+ uint32_t item_type;
+ if (!source()->getUInt32(offset, &item_type)) {
+ return ERROR_IO;
+ }
+
+ itemInfo->itemId = item_id;
+ itemInfo->itemType = item_type;
+
+ char itemTypeString[5];
+ MakeFourCCString(item_type, itemTypeString);
+ ALOGV("item_type %s", itemTypeString);
+ offset += 4;
+ size -= itemIdSize + 6;
+
+ String8 item_name;
+ if (!parseNullTerminatedString(&offset, &size, &item_name)) {
+ return ERROR_MALFORMED;
+ }
+ ALOGV("item_name %s", item_name.c_str());
+
+ if (item_type == FOURCC('m', 'i', 'm', 'e')) {
+ String8 content_type;
+ if (!parseNullTerminatedString(&offset, &size, &content_type)) {
+ return ERROR_MALFORMED;
+ }
+
+ String8 content_encoding;
+ if (!parseNullTerminatedString(&offset, &size, &content_encoding)) {
+ return ERROR_MALFORMED;
+ }
+ } else if (item_type == FOURCC('u', 'r', 'i', ' ')) {
+ String8 item_uri_type;
+ if (!parseNullTerminatedString(&offset, &size, &item_uri_type)) {
+ return ERROR_MALFORMED;
+ }
+ }
+ }
+ return OK;
+}
+
+struct IinfBox : public FullBox {
+ IinfBox(const sp<DataSource> source, Vector<ItemInfo> *itemInfos) :
+ FullBox(source, FOURCC('i', 'i', 'n', 'f')),
+ mItemInfos(itemInfos), mHasGrids(false) {}
+
+ status_t parse(off64_t offset, size_t size);
+
+ bool hasGrids() { return mHasGrids; }
+
+protected:
+ status_t onChunkData(uint32_t type, off64_t offset, size_t size) override;
+
+private:
+ Vector<ItemInfo> *mItemInfos;
+ bool mHasGrids;
+};
+
+status_t IinfBox::parse(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ status_t err = parseFullBoxHeader(&offset, &size);
+ if (err != OK) {
+ return err;
+ }
+
+ size_t entryCountSize = version() == 0 ? 2 : 4;
+ if (size < entryCountSize) {
+ return ERROR_MALFORMED;
+ }
+ uint32_t entry_count;
+ if (!source()->getUInt32Var(offset, &entry_count, entryCountSize)) {
+ return ERROR_IO;
+ }
+ ALOGV("entry_count %d", entry_count);
+
+ off64_t stopOffset = offset + size;
+ offset += entryCountSize;
+ for (size_t i = 0; i < entry_count && offset < stopOffset; i++) {
+ ALOGV("entry %zu", i);
+ status_t err = parseChunk(&offset);
+ if (err != OK) {
+ return err;
+ }
+ }
+ if (offset != stopOffset) {
+ return ERROR_MALFORMED;
+ }
+
+ return OK;
+}
+
+status_t IinfBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
+ if (type != FOURCC('i', 'n', 'f', 'e')) {
+ return OK;
+ }
+
+ InfeBox infeBox(source());
+ ItemInfo itemInfo;
+ status_t err = infeBox.parse(offset, size, &itemInfo);
+ if (err != OK) {
+ return err;
+ }
+ mItemInfos->push_back(itemInfo);
+ mHasGrids |= (itemInfo.itemType == FOURCC('g', 'r', 'i', 'd'));
+ return OK;
+}
+
+//////////////////////////////////////////////////////////////////
+
+ItemTable::ItemTable(const sp<DataSource> &source)
+ : mDataSource(source),
+ mPrimaryItemId(0),
+ mIdatOffset(0),
+ mIdatSize(0),
+ mImageItemsValid(false),
+ mCurrentImageIndex(0) {
+ mRequiredBoxes.insert('iprp');
+ mRequiredBoxes.insert('iloc');
+ mRequiredBoxes.insert('pitm');
+ mRequiredBoxes.insert('iinf');
+}
+
+ItemTable::~ItemTable() {}
+
+status_t ItemTable::parse(uint32_t type, off64_t data_offset, size_t chunk_data_size) {
+ switch(type) {
+ case FOURCC('i', 'l', 'o', 'c'):
+ {
+ return parseIlocBox(data_offset, chunk_data_size);
+ }
+ case FOURCC('i', 'i', 'n', 'f'):
+ {
+ return parseIinfBox(data_offset, chunk_data_size);
+ }
+ case FOURCC('i', 'p', 'r', 'p'):
+ {
+ return parseIprpBox(data_offset, chunk_data_size);
+ }
+ case FOURCC('p', 'i', 't', 'm'):
+ {
+ return parsePitmBox(data_offset, chunk_data_size);
+ }
+ case FOURCC('i', 'd', 'a', 't'):
+ {
+ return parseIdatBox(data_offset, chunk_data_size);
+ }
+ case FOURCC('i', 'r', 'e', 'f'):
+ {
+ return parseIrefBox(data_offset, chunk_data_size);
+ }
+ case FOURCC('i', 'p', 'r', 'o'):
+ {
+ ALOGW("ipro box not supported!");
+ break;
+ }
+ default:
+ {
+ ALOGW("unrecognized box type: 0x%x", type);
+ break;
+ }
+ }
+ return ERROR_UNSUPPORTED;
+}
+
+status_t ItemTable::parseIlocBox(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ IlocBox ilocBox(mDataSource, &mItemLocs);
+ status_t err = ilocBox.parse(offset, size);
+ if (err != OK) {
+ return err;
+ }
+
+ if (ilocBox.hasConstructMethod1()) {
+ mRequiredBoxes.insert('idat');
+ }
+
+ return buildImageItemsIfPossible('iloc');
+}
+
+status_t ItemTable::parseIinfBox(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ IinfBox iinfBox(mDataSource, &mItemInfos);
+ status_t err = iinfBox.parse(offset, size);
+ if (err != OK) {
+ return err;
+ }
+
+ if (iinfBox.hasGrids()) {
+ mRequiredBoxes.insert('iref');
+ }
+
+ return buildImageItemsIfPossible('iinf');
+}
+
+status_t ItemTable::parsePitmBox(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ PitmBox pitmBox(mDataSource);
+ status_t err = pitmBox.parse(offset, size, &mPrimaryItemId);
+ if (err != OK) {
+ return err;
+ }
+
+ return buildImageItemsIfPossible('pitm');
+}
+
+status_t ItemTable::parseIprpBox(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ IprpBox iprpBox(mDataSource, &mItemProperties, &mAssociations);
+ status_t err = iprpBox.parse(offset, size);
+ if (err != OK) {
+ return err;
+ }
+
+ return buildImageItemsIfPossible('iprp');
+}
+
+status_t ItemTable::parseIdatBox(off64_t offset, size_t size) {
+ ALOGV("%s: idat offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ // only remember the offset and size of idat box for later use
+ mIdatOffset = offset;
+ mIdatSize = size;
+
+ return buildImageItemsIfPossible('idat');
+}
+
+status_t ItemTable::parseIrefBox(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ IrefBox irefBox(mDataSource, &mItemReferences);
+ status_t err = irefBox.parse(offset, size);
+ if (err != OK) {
+ return err;
+ }
+
+ return buildImageItemsIfPossible('iref');
+}
+
+status_t ItemTable::buildImageItemsIfPossible(uint32_t type) {
+ if (mImageItemsValid) {
+ return OK;
+ }
+
+ mBoxesSeen.insert(type);
+
+ // need at least 'iprp', 'iloc', 'pitm', 'iinf';
+ // need 'idat' if any items used construction_method of 2;
+ // need 'iref' if there are grids.
+ if (!std::includes(
+ mBoxesSeen.begin(), mBoxesSeen.end(),
+ mRequiredBoxes.begin(), mRequiredBoxes.end())) {
+ return OK;
+ }
+
+ ALOGV("building image table...");
+
+ for (size_t i = 0; i < mItemInfos.size(); i++) {
+ const ItemInfo &info = mItemInfos[i];
+
+
+ // ignore non-image items
+ if (info.itemType != FOURCC('g', 'r', 'i', 'd') &&
+ info.itemType != FOURCC('h', 'v', 'c', '1')) {
+ continue;
+ }
+
+ ssize_t imageIndex = mItemIdToImageMap.indexOfKey(info.itemId);
+ if (imageIndex >= 0) {
+ ALOGW("ignoring duplicate image item id %d", info.itemId);
+ continue;
+ }
+
+ ssize_t ilocIndex = mItemLocs.indexOfKey(info.itemId);
+ if (ilocIndex < 0) {
+ ALOGE("iloc missing for image item id %d", info.itemId);
+ continue;
+ }
+ const ItemLoc &iloc = mItemLocs[ilocIndex];
+
+ off64_t offset;
+ size_t size;
+ if (iloc.getLoc(&offset, &size, mIdatOffset, mIdatSize) != OK) {
+ return ERROR_MALFORMED;
+ }
+
+ ImageItem image(info.itemType);
+
+ ALOGV("adding %s: itemId %d", image.isGrid() ? "grid" : "image", info.itemId);
+
+ if (image.isGrid()) {
+ if (size > 12) {
+ return ERROR_MALFORMED;
+ }
+ uint8_t buf[12];
+ if (!mDataSource->readAt(offset, buf, size)) {
+ return ERROR_IO;
+ }
+
+ image.rows = buf[2] + 1;
+ image.columns = buf[3] + 1;
+
+ ALOGV("rows %d, columans %d", image.rows, image.columns);
+ } else {
+ image.offset = offset;
+ image.size = size;
+ }
+ mItemIdToImageMap.add(info.itemId, image);
+ }
+
+ for (size_t i = 0; i < mAssociations.size(); i++) {
+ attachProperty(mAssociations[i]);
+ }
+
+ for (size_t i = 0; i < mItemReferences.size(); i++) {
+ mItemReferences[i]->apply(mItemIdToImageMap);
+ }
+
+ mImageItemsValid = true;
+ return OK;
+}
+
+void ItemTable::attachProperty(const AssociationEntry &association) {
+ ssize_t imageIndex = mItemIdToImageMap.indexOfKey(association.itemId);
+
+ // ignore non-image items
+ if (imageIndex < 0) {
+ return;
+ }
+
+ uint16_t propertyIndex = association.index;
+ if (propertyIndex >= mItemProperties.size()) {
+ ALOGW("Ignoring invalid property index %d", propertyIndex);
+ return;
+ }
+
+ ALOGV("attach property %d to item id %d)",
+ propertyIndex, association.itemId);
+
+ mItemProperties[propertyIndex]->attachTo(
+ mItemIdToImageMap.editValueAt(imageIndex));
+}
+
+sp<MetaData> ItemTable::getImageMeta() {
+ if (!mImageItemsValid) {
+ return NULL;
+ }
+
+ ssize_t imageIndex = mItemIdToImageMap.indexOfKey(mPrimaryItemId);
+ if (imageIndex < 0) {
+ ALOGE("Primary item id %d not found!", mPrimaryItemId);
+ return NULL;
+ }
+
+ ALOGV("primary image index %zu", imageIndex);
+
+ const ImageItem *image = &mItemIdToImageMap[imageIndex];
+
+ sp<MetaData> meta = new MetaData;
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+
+ ALOGV("setting image size %dx%d", image->width, image->height);
+ meta->setInt32(kKeyWidth, image->width);
+ meta->setInt32(kKeyHeight, image->height);
+ if (image->rotation != 0) {
+ meta->setInt32(kKeyRotation, image->rotation);
+ }
+ meta->setInt32(kKeyMaxInputSize, image->width * image->height * 1.5);
+
+ if (!image->thumbnails.empty()) {
+ ssize_t thumbnailIndex = mItemIdToImageMap.indexOfKey(image->thumbnails[0]);
+ if (thumbnailIndex >= 0) {
+ const ImageItem &thumbnail = mItemIdToImageMap[thumbnailIndex];
+
+ meta->setInt32(kKeyThumbnailWidth, thumbnail.width);
+ meta->setInt32(kKeyThumbnailHeight, thumbnail.height);
+ meta->setData(kKeyThumbnailHVCC, kTypeHVCC,
+ thumbnail.hvcc->data(), thumbnail.hvcc->size());
+ ALOGV("thumbnail meta: %dx%d, index %zd",
+ thumbnail.width, thumbnail.height, thumbnailIndex);
+ } else {
+ ALOGW("Referenced thumbnail does not exist!");
+ }
+ }
+
+ if (image->isGrid()) {
+ ssize_t tileIndex = mItemIdToImageMap.indexOfKey(image->dimgRefs[0]);
+ if (tileIndex < 0) {
+ return NULL;
+ }
+ meta->setInt32(kKeyGridRows, image->rows);
+ meta->setInt32(kKeyGridCols, image->columns);
+
+ image = &mItemIdToImageMap.editValueAt(tileIndex);
+ }
+
+ if (image->hvcc == NULL) {
+ ALOGE("hvcc is missing!");
+ return NULL;
+ }
+ meta->setData(kKeyHVCC, kTypeHVCC, image->hvcc->data(), image->hvcc->size());
+
+ if (image->icc != NULL) {
+ meta->setData(kKeyIccProfile, 0, image->icc->data(), image->icc->size());
+ }
+ return meta;
+}
+
+uint32_t ItemTable::countImages() const {
+ return mImageItemsValid ? mItemIdToImageMap.size() : 0;
+}
+
+status_t ItemTable::findPrimaryImage(uint32_t *imageIndex) {
+ if (!mImageItemsValid) {
+ return INVALID_OPERATION;
+ }
+
+ ssize_t index = mItemIdToImageMap.indexOfKey(mPrimaryItemId);
+ if (index < 0) {
+ return ERROR_MALFORMED;
+ }
+
+ *imageIndex = index;
+ return OK;
+}
+
+status_t ItemTable::findThumbnail(uint32_t *imageIndex) {
+ if (!mImageItemsValid) {
+ return INVALID_OPERATION;
+ }
+
+ ssize_t primaryIndex = mItemIdToImageMap.indexOfKey(mPrimaryItemId);
+ if (primaryIndex < 0) {
+ ALOGE("Primary item id %d not found!", mPrimaryItemId);
+ return ERROR_MALFORMED;
+ }
+
+ const ImageItem &primaryImage = mItemIdToImageMap[primaryIndex];
+ if (primaryImage.thumbnails.empty()) {
+ ALOGW("Using primary in place of thumbnail.");
+ *imageIndex = primaryIndex;
+ return OK;
+ }
+
+ ssize_t thumbnailIndex = mItemIdToImageMap.indexOfKey(
+ primaryImage.thumbnails[0]);
+ if (thumbnailIndex < 0) {
+ ALOGE("Thumbnail item id %d not found!", primaryImage.thumbnails[0]);
+ return ERROR_MALFORMED;
+ }
+
+ *imageIndex = thumbnailIndex;
+ return OK;
+}
+
+status_t ItemTable::getImageOffsetAndSize(
+ uint32_t *imageIndex, off64_t *offset, size_t *size) {
+ if (!mImageItemsValid) {
+ return INVALID_OPERATION;
+ }
+
+ if (imageIndex != NULL) {
+ if (*imageIndex >= mItemIdToImageMap.size()) {
+ ALOGE("Bad image index!");
+ return BAD_VALUE;
+ }
+ mCurrentImageIndex = *imageIndex;
+ }
+
+ ImageItem &image = mItemIdToImageMap.editValueAt(mCurrentImageIndex);
+ if (image.isGrid()) {
+ uint32_t tileItemId;
+ status_t err = image.getNextTileItemId(&tileItemId, imageIndex != NULL);
+ if (err != OK) {
+ return err;
+ }
+ ssize_t tileImageIndex = mItemIdToImageMap.indexOfKey(tileItemId);
+ if (tileImageIndex < 0) {
+ return ERROR_END_OF_STREAM;
+ }
+ *offset = mItemIdToImageMap[tileImageIndex].offset;
+ *size = mItemIdToImageMap[tileImageIndex].size;
+ } else {
+ if (imageIndex == NULL) {
+ // For single images, we only allow it to be read once, after that
+ // it's EOS. New image index must be requested each time.
+ return ERROR_END_OF_STREAM;
+ }
+ *offset = mItemIdToImageMap[mCurrentImageIndex].offset;
+ *size = mItemIdToImageMap[mCurrentImageIndex].size;
+ }
+
+ return OK;
+}
+
+} // namespace heif
+
+} // namespace android
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 459a1cb..5ef0f56 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -28,6 +28,7 @@
#include "include/MPEG4Extractor.h"
#include "include/SampleTable.h"
+#include "include/ItemTable.h"
#include "include/ESDS.h"
#include <media/stagefright/foundation/ABitReader.h>
@@ -36,6 +37,7 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDefs.h>
@@ -72,7 +74,8 @@
const sp<SampleTable> &sampleTable,
Vector<SidxEntry> &sidx,
const Trex *trex,
- off64_t firstMoofOffset);
+ off64_t firstMoofOffset,
+ const sp<ItemTable> &itemTable);
virtual status_t init();
virtual status_t start(MetaData *params = NULL);
@@ -134,6 +137,9 @@
uint8_t *mSrcBuffer;
+ bool mIsHEIF;
+ sp<ItemTable> mItemTable;
+
size_t parseNALSize(const uint8_t *data) const;
status_t parseChunk(off64_t *offset);
status_t parseTrackFragmentHeader(off64_t offset, off64_t size);
@@ -285,45 +291,6 @@
static const bool kUseHexDump = false;
-static void hexdump(const void *_data, size_t size) {
- const uint8_t *data = (const uint8_t *)_data;
- size_t offset = 0;
- while (offset < size) {
- printf("0x%04zx ", offset);
-
- size_t n = size - offset;
- if (n > 16) {
- n = 16;
- }
-
- for (size_t i = 0; i < 16; ++i) {
- if (i == 8) {
- printf(" ");
- }
-
- if (offset + i < size) {
- printf("%02x ", data[offset + i]);
- } else {
- printf(" ");
- }
- }
-
- printf(" ");
-
- for (size_t i = 0; i < n; ++i) {
- if (isprint(data[offset + i])) {
- printf("%c", data[offset + i]);
- } else {
- printf(".");
- }
- }
-
- printf("\n");
-
- offset += 16;
- }
-}
-
static const char *FourCC2MIME(uint32_t fourcc) {
switch (fourcc) {
case FOURCC('m', 'p', '4', 'a'):
@@ -378,6 +345,7 @@
mInitCheck(NO_INIT),
mHeaderTimescale(0),
mIsQT(false),
+ mIsHEIF(false),
mFirstTrack(NULL),
mLastTrack(NULL),
mFileMetaData(new MetaData),
@@ -512,14 +480,6 @@
return track->meta;
}
-static void MakeFourCCString(uint32_t x, char *s) {
- s[0] = x >> 24;
- s[1] = (x >> 16) & 0xff;
- s[2] = (x >> 8) & 0xff;
- s[3] = x & 0xff;
- s[4] = '\0';
-}
-
status_t MPEG4Extractor::readMetaData() {
if (mInitCheck != NO_INIT) {
return mInitCheck;
@@ -529,7 +489,8 @@
status_t err;
bool sawMoovOrSidx = false;
- while (!(sawMoovOrSidx && (mMdatFound || mMoofFound))) {
+ while (!((sawMoovOrSidx && (mMdatFound || mMoofFound)) ||
+ (mIsHEIF && (mItemTable != NULL) && mItemTable->isValid()))) {
off64_t orig_offset = offset;
err = parseChunk(&offset, 0);
@@ -581,6 +542,29 @@
mFileMetaData->setData(kKeyPssh, 'pssh', buf, psshsize);
free(buf);
}
+
+ if (mIsHEIF) {
+ sp<MetaData> meta = mItemTable->getImageMeta();
+ if (meta == NULL) {
+ return ERROR_MALFORMED;
+ }
+
+ Track *track = mLastTrack;
+ if (track != NULL) {
+ ALOGW("track is set before metadata is fully processed");
+ } else {
+ track = new Track;
+ track->next = NULL;
+ mFirstTrack = mLastTrack = track;
+ }
+
+ track->meta = meta;
+ track->meta->setInt32(kKeyTrackID, 0);
+ track->includes_expensive_metadata = false;
+ track->skipTrack = false;
+ track->timescale = 0;
+ }
+
return mInitCheck;
}
@@ -967,8 +951,9 @@
}
}
- if (mLastTrack == NULL)
+ if (mLastTrack == NULL) {
return ERROR_MALFORMED;
+ }
mLastTrack->sampleTable = new SampleTable(mDataSource);
}
@@ -1140,8 +1125,9 @@
original_fourcc = ntohl(original_fourcc);
ALOGV("read original format: %d", original_fourcc);
- if (mLastTrack == NULL)
+ if (mLastTrack == NULL) {
return ERROR_MALFORMED;
+ }
mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(original_fourcc));
uint32_t num_channels = 0;
@@ -1581,8 +1567,9 @@
case FOURCC('s', 't', 'c', 'o'):
case FOURCC('c', 'o', '6', '4'):
{
- if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+ if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) {
return ERROR_MALFORMED;
+ }
status_t err =
mLastTrack->sampleTable->setChunkOffsetParams(
@@ -1618,8 +1605,9 @@
case FOURCC('s', 't', 's', 'z'):
case FOURCC('s', 't', 'z', '2'):
{
- if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
+ if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) {
return ERROR_MALFORMED;
+ }
status_t err =
mLastTrack->sampleTable->setSampleSizeParams(
@@ -2035,6 +2023,28 @@
break;
}
+ case FOURCC('i', 'l', 'o', 'c'):
+ case FOURCC('i', 'i', 'n', 'f'):
+ case FOURCC('i', 'p', 'r', 'p'):
+ case FOURCC('p', 'i', 't', 'm'):
+ case FOURCC('i', 'd', 'a', 't'):
+ case FOURCC('i', 'r', 'e', 'f'):
+ case FOURCC('i', 'p', 'r', 'o'):
+ {
+ if (mIsHEIF) {
+ if (mItemTable == NULL) {
+ mItemTable = new ItemTable(mDataSource);
+ }
+ status_t err = mItemTable->parse(
+ chunk_type, data_offset, chunk_data_size);
+ if (err != OK) {
+ return err;
+ }
+ }
+ *offset += chunk_size;
+ break;
+ }
+
case FOURCC('m', 'e', 'a', 'n'):
case FOURCC('n', 'a', 'm', 'e'):
case FOURCC('d', 'a', 't', 'a'):
@@ -2382,6 +2392,7 @@
off64_t stop_offset = *offset + chunk_size;
uint32_t numCompatibleBrands = (chunk_data_size - 8) / 4;
+ std::set<uint32_t> brandSet;
for (size_t i = 0; i < numCompatibleBrands + 2; ++i) {
if (i == 1) {
// Skip this index, it refers to the minorVersion,
@@ -2395,10 +2406,15 @@
}
brand = ntohl(brand);
- if (brand == FOURCC('q', 't', ' ', ' ')) {
- mIsQT = true;
- break;
- }
+ brandSet.insert(brand);
+ }
+
+ if (brandSet.count(FOURCC('q', 't', ' ', ' ')) > 0) {
+ mIsQT = true;
+ } else if (brandSet.count(FOURCC('m', 'i', 'f', '1')) > 0
+ && brandSet.count(FOURCC('h', 'e', 'i', 'c')) > 0) {
+ mIsHEIF = true;
+ ALOGV("identified HEIF image");
}
*offset = stop_offset;
@@ -3347,7 +3363,7 @@
sp<MPEG4Source> source = new MPEG4Source(this,
track->meta, mDataSource, track->timescale, track->sampleTable,
- mSidxEntries, trex, mMoofOffset);
+ mSidxEntries, trex, mMoofOffset, mItemTable);
if (source->init() != OK) {
return NULL;
}
@@ -3736,7 +3752,8 @@
const sp<SampleTable> &sampleTable,
Vector<SidxEntry> &sidx,
const Trex *trex,
- off64_t firstMoofOffset)
+ off64_t firstMoofOffset,
+ const sp<ItemTable> &itemTable)
: mOwner(owner),
mFormat(format),
mDataSource(dataSource),
@@ -3761,7 +3778,9 @@
mGroup(NULL),
mBuffer(NULL),
mWantsNALFragments(false),
- mSrcBuffer(NULL) {
+ mSrcBuffer(NULL),
+ mIsHEIF(itemTable != NULL),
+ mItemTable(itemTable) {
memset(&mTrackFragmentHeaderInfo, 0, sizeof(mTrackFragmentHeaderInfo));
@@ -4536,77 +4555,93 @@
int64_t seekTimeUs;
ReadOptions::SeekMode mode;
if (options && options->getSeekTo(&seekTimeUs, &mode)) {
- uint32_t findFlags = 0;
- switch (mode) {
- case ReadOptions::SEEK_PREVIOUS_SYNC:
- findFlags = SampleTable::kFlagBefore;
- break;
- case ReadOptions::SEEK_NEXT_SYNC:
- findFlags = SampleTable::kFlagAfter;
- break;
- case ReadOptions::SEEK_CLOSEST_SYNC:
- case ReadOptions::SEEK_CLOSEST:
- findFlags = SampleTable::kFlagClosest;
- break;
- default:
- CHECK(!"Should not be here.");
- break;
- }
+ if (mIsHEIF) {
+ CHECK(mSampleTable == NULL);
+ CHECK(mItemTable != NULL);
- uint32_t sampleIndex;
- status_t err = mSampleTable->findSampleAtTime(
- seekTimeUs, 1000000, mTimescale,
- &sampleIndex, findFlags);
-
- if (mode == ReadOptions::SEEK_CLOSEST) {
- // We found the closest sample already, now we want the sync
- // sample preceding it (or the sample itself of course), even
- // if the subsequent sync sample is closer.
- findFlags = SampleTable::kFlagBefore;
- }
-
- uint32_t syncSampleIndex;
- if (err == OK) {
- err = mSampleTable->findSyncSampleNear(
- sampleIndex, &syncSampleIndex, findFlags);
- }
-
- uint32_t sampleTime;
- if (err == OK) {
- err = mSampleTable->getMetaDataForSample(
- sampleIndex, NULL, NULL, &sampleTime);
- }
-
- if (err != OK) {
- if (err == ERROR_OUT_OF_RANGE) {
- // An attempt to seek past the end of the stream would
- // normally cause this ERROR_OUT_OF_RANGE error. Propagating
- // this all the way to the MediaPlayer would cause abnormal
- // termination. Legacy behaviour appears to be to behave as if
- // we had seeked to the end of stream, ending normally.
- err = ERROR_END_OF_STREAM;
+ status_t err;
+ if (seekTimeUs >= 0) {
+ err = mItemTable->findPrimaryImage(&mCurrentSampleIndex);
+ } else {
+ err = mItemTable->findThumbnail(&mCurrentSampleIndex);
}
- ALOGV("end of stream");
- return err;
- }
+ if (err != OK) {
+ return err;
+ }
+ } else {
+ uint32_t findFlags = 0;
+ switch (mode) {
+ case ReadOptions::SEEK_PREVIOUS_SYNC:
+ findFlags = SampleTable::kFlagBefore;
+ break;
+ case ReadOptions::SEEK_NEXT_SYNC:
+ findFlags = SampleTable::kFlagAfter;
+ break;
+ case ReadOptions::SEEK_CLOSEST_SYNC:
+ case ReadOptions::SEEK_CLOSEST:
+ findFlags = SampleTable::kFlagClosest;
+ break;
+ default:
+ CHECK(!"Should not be here.");
+ break;
+ }
- if (mode == ReadOptions::SEEK_CLOSEST) {
- targetSampleTimeUs = (sampleTime * 1000000ll) / mTimescale;
- }
+ uint32_t sampleIndex;
+ status_t err = mSampleTable->findSampleAtTime(
+ seekTimeUs, 1000000, mTimescale,
+ &sampleIndex, findFlags);
+
+ if (mode == ReadOptions::SEEK_CLOSEST) {
+ // We found the closest sample already, now we want the sync
+ // sample preceding it (or the sample itself of course), even
+ // if the subsequent sync sample is closer.
+ findFlags = SampleTable::kFlagBefore;
+ }
+
+ uint32_t syncSampleIndex;
+ if (err == OK) {
+ err = mSampleTable->findSyncSampleNear(
+ sampleIndex, &syncSampleIndex, findFlags);
+ }
+
+ uint32_t sampleTime;
+ if (err == OK) {
+ err = mSampleTable->getMetaDataForSample(
+ sampleIndex, NULL, NULL, &sampleTime);
+ }
+
+ if (err != OK) {
+ if (err == ERROR_OUT_OF_RANGE) {
+ // An attempt to seek past the end of the stream would
+ // normally cause this ERROR_OUT_OF_RANGE error. Propagating
+ // this all the way to the MediaPlayer would cause abnormal
+ // termination. Legacy behaviour appears to be to behave as if
+ // we had seeked to the end of stream, ending normally.
+ err = ERROR_END_OF_STREAM;
+ }
+ ALOGV("end of stream");
+ return err;
+ }
+
+ if (mode == ReadOptions::SEEK_CLOSEST) {
+ targetSampleTimeUs = (sampleTime * 1000000ll) / mTimescale;
+ }
#if 0
- uint32_t syncSampleTime;
- CHECK_EQ(OK, mSampleTable->getMetaDataForSample(
- syncSampleIndex, NULL, NULL, &syncSampleTime));
+ uint32_t syncSampleTime;
+ CHECK_EQ(OK, mSampleTable->getMetaDataForSample(
+ syncSampleIndex, NULL, NULL, &syncSampleTime));
- ALOGI("seek to time %lld us => sample at time %lld us, "
- "sync sample at time %lld us",
- seekTimeUs,
- sampleTime * 1000000ll / mTimescale,
- syncSampleTime * 1000000ll / mTimescale);
+ ALOGI("seek to time %lld us => sample at time %lld us, "
+ "sync sample at time %lld us",
+ seekTimeUs,
+ sampleTime * 1000000ll / mTimescale,
+ syncSampleTime * 1000000ll / mTimescale);
#endif
- mCurrentSampleIndex = syncSampleIndex;
+ mCurrentSampleIndex = syncSampleIndex;
+ }
+
if (mBuffer != NULL) {
mBuffer->release();
mBuffer = NULL;
@@ -4623,9 +4658,19 @@
if (mBuffer == NULL) {
newBuffer = true;
- status_t err =
- mSampleTable->getMetaDataForSample(
+ status_t err;
+ if (!mIsHEIF) {
+ err = mSampleTable->getMetaDataForSample(
mCurrentSampleIndex, &offset, &size, &cts, &isSyncSample, &stts);
+ } else {
+ err = mItemTable->getImageOffsetAndSize(
+ options && options->getSeekTo(&seekTimeUs, &mode) ?
+ &mCurrentSampleIndex : NULL, &offset, &size);
+
+ cts = stts = 0;
+ isSyncSample = 0;
+ ALOGV("image offset %lld, size %zu", (long long)offset, size);
+ }
if (err != OK) {
return err;
@@ -5195,7 +5240,8 @@
|| !memcmp(header, "ftyp3ge6", 8) || !memcmp(header, "ftyp3gg6", 8)
|| !memcmp(header, "ftypisom", 8) || !memcmp(header, "ftypM4V ", 8)
|| !memcmp(header, "ftypM4A ", 8) || !memcmp(header, "ftypf4v ", 8)
- || !memcmp(header, "ftypkddi", 8) || !memcmp(header, "ftypM4VP", 8)) {
+ || !memcmp(header, "ftypkddi", 8) || !memcmp(header, "ftypM4VP", 8)
+ || !memcmp(header, "ftypmif1", 8) || !memcmp(header, "ftypheic", 8)) {
*mimeType = MEDIA_MIMETYPE_CONTAINER_MPEG4;
*confidence = 0.4;
@@ -5224,6 +5270,8 @@
FOURCC('3', 'g', '2', 'a'), // 3GPP2
FOURCC('3', 'g', '2', 'b'),
+ FOURCC('m', 'i', 'f', '1'), // HEIF image
+ FOURCC('h', 'e', 'i', 'c'), // HEIF image
};
for (size_t i = 0;
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index f0c27ac..4ff2bfe 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -85,7 +85,8 @@
status_t status;
if (fd < 0) {
// couldn't open it locally, maybe the media server can?
- status = mRetriever->setDataSource(NULL /* httpService */, path);
+ sp<IMediaHTTPService> nullService;
+ status = mRetriever->setDataSource(nullService, path);
} else {
status = mRetriever->setDataSource(fd, 0, 0x7ffffffffffffffL);
close(fd);
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index 57af772..f36ff97 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -121,12 +121,12 @@
}
status_t StagefrightMetadataRetriever::setDataSource(
- const sp<DataSource>& source) {
+ const sp<DataSource>& source, const char *mime) {
ALOGV("setDataSource(DataSource)");
clearMetadata();
mSource = source;
- mExtractor = MediaExtractor::Create(mSource);
+ mExtractor = MediaExtractor::Create(mSource, mime);
if (mExtractor == NULL) {
ALOGE("Failed to instantiate a MediaExtractor.");
@@ -137,17 +137,164 @@
return OK;
}
+static VideoFrame *allocVideoFrame(
+ const sp<MetaData> &trackMeta, int32_t width, int32_t height, int32_t bpp, bool metaOnly) {
+ int32_t rotationAngle;
+ if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
+ rotationAngle = 0; // By default, no rotation
+ }
+
+ uint32_t type;
+ const void *iccData;
+ size_t iccSize;
+ if (!trackMeta->findData(kKeyIccProfile, &type, &iccData, &iccSize)){
+ iccData = NULL;
+ iccSize = 0;
+ }
+
+ int32_t sarWidth, sarHeight;
+ int32_t displayWidth, displayHeight;
+ if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
+ && trackMeta->findInt32(kKeySARHeight, &sarHeight)
+ && sarHeight != 0) {
+ displayWidth = (width * sarWidth) / sarHeight;
+ displayHeight = height;
+ } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
+ && trackMeta->findInt32(kKeyDisplayHeight, &displayHeight)
+ && displayWidth > 0 && displayHeight > 0
+ && width > 0 && height > 0) {
+ ALOGV("found display size %dx%d", displayWidth, displayHeight);
+ } else {
+ displayWidth = width;
+ displayHeight = height;
+ }
+
+ return new VideoFrame(width, height, displayWidth, displayHeight,
+ rotationAngle, bpp, !metaOnly, iccData, iccSize);
+}
+
+static bool getDstColorFormat(android_pixel_format_t colorFormat,
+ OMX_COLOR_FORMATTYPE *omxColorFormat, int32_t *bpp) {
+ switch (colorFormat) {
+ case HAL_PIXEL_FORMAT_RGB_565:
+ {
+ *omxColorFormat = OMX_COLOR_Format16bitRGB565;
+ *bpp = 2;
+ return true;
+ }
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ {
+ *omxColorFormat = OMX_COLOR_Format32BitRGBA8888;
+ *bpp = 4;
+ return true;
+ }
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ {
+ *omxColorFormat = OMX_COLOR_Format32bitBGRA8888;
+ *bpp = 4;
+ return true;
+ }
+ default:
+ {
+ ALOGE("Unsupported color format: %d", colorFormat);
+ break;
+ }
+ }
+ return false;
+}
+
static VideoFrame *extractVideoFrame(
const AString &componentName,
const sp<MetaData> &trackMeta,
const sp<IMediaSource> &source,
int64_t frameTimeUs,
- int seekMode) {
-
+ int seekMode,
+ int colorFormat,
+ bool metaOnly) {
sp<MetaData> format = source->getFormat();
+ MediaSource::ReadOptions::SeekMode mode =
+ static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
+ if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
+ seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {
+ ALOGE("Unknown seek mode: %d", seekMode);
+ return NULL;
+ }
+
+ int32_t dstBpp;
+ OMX_COLOR_FORMATTYPE dstFormat;
+ if (!getDstColorFormat(
+ (android_pixel_format_t)colorFormat, &dstFormat, &dstBpp)) {
+ return NULL;
+ }
+
+ if (metaOnly) {
+ int32_t width, height;
+ CHECK(trackMeta->findInt32(kKeyWidth, &width));
+ CHECK(trackMeta->findInt32(kKeyHeight, &height));
+ return allocVideoFrame(trackMeta, width, height, dstBpp, true);
+ }
+
+ MediaSource::ReadOptions options;
+ sp<MetaData> overrideMeta;
+ if (frameTimeUs < 0) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ int64_t thumbNailTime;
+ int32_t thumbnailWidth, thumbnailHeight;
+
+ // if we have a stand-alone thumbnail, set up the override meta,
+ // and set seekTo time to -1.
+ if (trackMeta->findInt32(kKeyThumbnailWidth, &thumbnailWidth)
+ && trackMeta->findInt32(kKeyThumbnailHeight, &thumbnailHeight)
+ && trackMeta->findData(kKeyThumbnailHVCC, &type, &data, &size)){
+ overrideMeta = new MetaData(*trackMeta);
+ overrideMeta->setInt32(kKeyWidth, thumbnailWidth);
+ overrideMeta->setInt32(kKeyHeight, thumbnailHeight);
+ overrideMeta->setData(kKeyHVCC, type, data, size);
+ thumbNailTime = -1ll;
+ ALOGV("thumbnail: %dx%d", thumbnailWidth, thumbnailHeight);
+ } else if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)
+ || thumbNailTime < 0) {
+ thumbNailTime = 0;
+ }
+
+ options.setSeekTo(thumbNailTime, mode);
+ } else {
+ options.setSeekTo(frameTimeUs, mode);
+ }
+
+ int32_t gridRows = 1, gridCols = 1;
+ int32_t numTiles = 1, tilesDecoded = 0;
+ if (overrideMeta == NULL) {
+ // check if we're dealing with a tiled heif
+ if (trackMeta->findInt32(kKeyGridRows, &gridRows) && gridRows > 0
+ && trackMeta->findInt32(kKeyGridCols, &gridCols) && gridCols > 0) {
+ int32_t width, height;
+ CHECK(trackMeta->findInt32(kKeyWidth, &width));
+ CHECK(trackMeta->findInt32(kKeyHeight, &height));
+
+ if ((width % gridCols == 0) && (height % gridRows == 0)) {
+ width /= gridCols;
+ height /= gridRows;
+ numTiles = gridCols * gridRows;
+
+ ALOGV("tile: %dx%d, numTiles %d", width, height, numTiles);
+
+ overrideMeta = new MetaData(*trackMeta);
+ overrideMeta->setInt32(kKeyWidth, width);
+ overrideMeta->setInt32(kKeyHeight, height);
+ }
+ }
+ if (overrideMeta == NULL) {
+ gridRows = gridCols = numTiles = 1;
+ overrideMeta = trackMeta;
+ }
+ }
+
sp<AMessage> videoFormat;
- if (convertMetaDataToMessage(trackMeta, &videoFormat) != OK) {
+ if (convertMetaDataToMessage(overrideMeta, &videoFormat) != OK) {
ALOGE("b/23680780");
ALOGW("Failed to convert meta data to message");
return NULL;
@@ -160,7 +307,8 @@
// input and output ports, if seeking to a sync frame. NOTE: This request may
// fail if component requires more than that for decoding.
bool isSeekingClosest = (seekMode == MediaSource::ReadOptions::SEEK_CLOSEST);
- if (!isSeekingClosest) {
+ bool decodeSingleFrame = !isSeekingClosest && (numTiles == 1);
+ if (decodeSingleFrame) {
videoFormat->setInt32("android._num-input-buffers", 1);
videoFormat->setInt32("android._num-output-buffers", 1);
}
@@ -190,30 +338,6 @@
return NULL;
}
- MediaSource::ReadOptions options;
- if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
- seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {
-
- ALOGE("Unknown seek mode: %d", seekMode);
- decoder->release();
- return NULL;
- }
-
- MediaSource::ReadOptions::SeekMode mode =
- static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
-
- int64_t thumbNailTime;
- if (frameTimeUs < 0) {
- if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)
- || thumbNailTime < 0) {
- thumbNailTime = 0;
- }
- options.setSeekTo(thumbNailTime, mode);
- } else {
- thumbNailTime = -1;
- options.setSeekTo(frameTimeUs, mode);
- }
-
err = source->start();
if (err != OK) {
ALOGW("source failed to start: %d (%s)", err, asString(err));
@@ -258,6 +382,8 @@
bool firstSample = true;
int64_t targetTimeUs = -1ll;
+ VideoFrame *frame = NULL;
+
do {
size_t inputIndex = -1;
int64_t ptsUs = 0ll;
@@ -282,6 +408,9 @@
if (err != OK) {
ALOGW("Input Error or EOS");
haveMoreInputs = false;
+ if (err == ERROR_END_OF_STREAM) {
+ err = OK;
+ }
break;
}
if (firstSample && isSeekingClosest) {
@@ -293,6 +422,7 @@
if (mediaBuffer->range_length() > codecBuffer->capacity()) {
ALOGE("buffer size (%zu) too large for codec input size (%zu)",
mediaBuffer->range_length(), codecBuffer->capacity());
+ haveMoreInputs = false;
err = BAD_VALUE;
} else {
codecBuffer->setRange(0, mediaBuffer->range_length());
@@ -301,19 +431,20 @@
memcpy(codecBuffer->data(),
(const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
mediaBuffer->range_length());
- if (isAvcOrHevc && IsIDR(codecBuffer) && !isSeekingClosest) {
- // Only need to decode one IDR frame, unless we're seeking with CLOSEST
- // option, in which case we need to actually decode to targetTimeUs.
- haveMoreInputs = false;
- flags |= MediaCodec::BUFFER_FLAG_EOS;
- }
}
mediaBuffer->release();
break;
}
- if (err == OK && inputIndex < inputBuffers.size()) {
+ if (haveMoreInputs && inputIndex < inputBuffers.size()) {
+ if (isAvcOrHevc && IsIDR(codecBuffer) && decodeSingleFrame) {
+ // Only need to decode one IDR frame, unless we're seeking with CLOSEST
+ // option, in which case we need to actually decode to targetTimeUs.
+ haveMoreInputs = false;
+ flags |= MediaCodec::BUFFER_FLAG_EOS;
+ }
+
ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x",
codecBuffer->size(), ptsUs, flags);
err = decoder->queueInputBuffer(
@@ -352,11 +483,70 @@
} else if (err == OK) {
// If we're seeking with CLOSEST option and obtained a valid targetTimeUs
// from the extractor, decode to the specified frame. Otherwise we're done.
- done = (targetTimeUs < 0ll) || (timeUs >= targetTimeUs);
ALOGV("Received an output buffer, timeUs=%lld", (long long)timeUs);
- if (!done) {
- err = decoder->releaseOutputBuffer(index);
+ sp<MediaCodecBuffer> videoFrameBuffer = outputBuffers.itemAt(index);
+
+ int32_t width, height;
+ CHECK(outputFormat != NULL);
+ CHECK(outputFormat->findInt32("width", &width));
+ CHECK(outputFormat->findInt32("height", &height));
+
+ int32_t crop_left, crop_top, crop_right, crop_bottom;
+ if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
+ crop_left = crop_top = 0;
+ crop_right = width - 1;
+ crop_bottom = height - 1;
}
+
+ if (frame == NULL) {
+ frame = allocVideoFrame(
+ overrideMeta,
+ (crop_right - crop_left + 1) * gridCols,
+ (crop_bottom - crop_top + 1) * gridRows,
+ dstBpp,
+ false /*metaOnly*/);
+ }
+
+ int32_t srcFormat;
+ CHECK(outputFormat->findInt32("color-format", &srcFormat));
+
+ ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat);
+
+ int32_t dstLeft, dstTop, dstRight, dstBottom;
+ if (numTiles == 1) {
+ dstLeft = crop_left;
+ dstTop = crop_top;
+ dstRight = crop_right;
+ dstBottom = crop_bottom;
+ } else {
+ dstLeft = tilesDecoded % gridCols * width;
+ dstTop = tilesDecoded / gridCols * height;
+ dstRight = dstLeft + width - 1;
+ dstBottom = dstTop + height - 1;
+ }
+
+ if (converter.isValid()) {
+ err = converter.convert(
+ (const uint8_t *)videoFrameBuffer->data(),
+ width, height,
+ crop_left, crop_top, crop_right, crop_bottom,
+ frame->mData,
+ frame->mWidth,
+ frame->mHeight,
+ dstLeft, dstTop, dstRight, dstBottom);
+ } else {
+ ALOGE("Unable to convert from format 0x%08x to 0x%08x",
+ srcFormat, dstFormat);
+
+ err = ERROR_UNSUPPORTED;
+ }
+
+ done = (targetTimeUs < 0ll) || (timeUs >= targetTimeUs);
+ if (numTiles > 1) {
+ tilesDecoded++;
+ done &= (tilesDecoded >= numTiles);
+ }
+ err = decoder->releaseOutputBuffer(index);
} else {
ALOGW("Received error %d (%s) instead of output", err, asString(err));
done = true;
@@ -366,95 +556,11 @@
}
} while (err == OK && !done);
- if (err != OK || size <= 0 || outputFormat == NULL) {
- ALOGE("Failed to decode thumbnail frame");
- source->stop();
- decoder->release();
- return NULL;
- }
-
- ALOGV("successfully decoded video frame.");
- sp<MediaCodecBuffer> videoFrameBuffer = outputBuffers.itemAt(index);
-
- if (thumbNailTime >= 0) {
- if (timeUs != thumbNailTime) {
- AString mime;
- CHECK(outputFormat->findString("mime", &mime));
-
- ALOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s",
- (long long)thumbNailTime, (long long)timeUs, mime.c_str());
- }
- }
-
- int32_t width, height;
- CHECK(outputFormat->findInt32("width", &width));
- CHECK(outputFormat->findInt32("height", &height));
-
- int32_t crop_left, crop_top, crop_right, crop_bottom;
- if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
- crop_left = crop_top = 0;
- crop_right = width - 1;
- crop_bottom = height - 1;
- }
-
- int32_t rotationAngle;
- if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
- rotationAngle = 0; // By default, no rotation
- }
-
- VideoFrame *frame = new VideoFrame;
- frame->mWidth = crop_right - crop_left + 1;
- frame->mHeight = crop_bottom - crop_top + 1;
- frame->mDisplayWidth = frame->mWidth;
- frame->mDisplayHeight = frame->mHeight;
- frame->mSize = frame->mWidth * frame->mHeight * 2;
- frame->mData = new uint8_t[frame->mSize];
- frame->mRotationAngle = rotationAngle;
-
- int32_t sarWidth, sarHeight;
- if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
- && trackMeta->findInt32(kKeySARHeight, &sarHeight)
- && sarHeight != 0) {
- frame->mDisplayWidth = (frame->mDisplayWidth * sarWidth) / sarHeight;
- } else {
- int32_t width, height;
- if (trackMeta->findInt32(kKeyDisplayWidth, &width)
- && trackMeta->findInt32(kKeyDisplayHeight, &height)
- && frame->mDisplayWidth > 0 && frame->mDisplayHeight > 0
- && width > 0 && height > 0) {
- frame->mDisplayWidth = width;
- frame->mDisplayHeight = height;
- }
- }
-
- int32_t srcFormat;
- CHECK(outputFormat->findInt32("color-format", &srcFormat));
-
- ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);
-
- if (converter.isValid()) {
- err = converter.convert(
- (const uint8_t *)videoFrameBuffer->data(),
- width, height,
- crop_left, crop_top, crop_right, crop_bottom,
- frame->mData,
- frame->mWidth,
- frame->mHeight,
- 0, 0, frame->mWidth - 1, frame->mHeight - 1);
- } else {
- ALOGE("Unable to convert from format 0x%08x to RGB565", srcFormat);
-
- err = ERROR_UNSUPPORTED;
- }
-
- videoFrameBuffer.clear();
source->stop();
- decoder->releaseOutputBuffer(index);
decoder->release();
if (err != OK) {
- ALOGE("Colorconverter failed to convert frame.");
-
+ ALOGE("failed to get video frame (err %d)", err);
delete frame;
frame = NULL;
}
@@ -463,9 +569,10 @@
}
VideoFrame *StagefrightMetadataRetriever::getFrameAtTime(
- int64_t timeUs, int option) {
+ int64_t timeUs, int option, int colorFormat, bool metaOnly) {
- ALOGV("getFrameAtTime: %" PRId64 " us option: %d", timeUs, option);
+ ALOGV("getFrameAtTime: %" PRId64 " us option: %d colorFormat: %d, metaOnly: %d",
+ timeUs, option, colorFormat, metaOnly);
if (mExtractor.get() == NULL) {
ALOGV("no extractor.");
@@ -533,8 +640,8 @@
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
const AString &componentName = matchingCodecs[i];
- VideoFrame *frame =
- extractVideoFrame(componentName, trackMeta, source, timeUs, option);
+ VideoFrame *frame = extractVideoFrame(
+ componentName, trackMeta, source, timeUs, option, colorFormat, metaOnly);
if (frame != NULL) {
return frame;
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 0aea8e1..a3bda5d 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -1874,5 +1874,13 @@
return result;
}
+void MakeFourCCString(uint32_t x, char *s) {
+ s[0] = x >> 24;
+ s[1] = (x >> 16) & 0xff;
+ s[2] = (x >> 8) & 0xff;
+ s[3] = x & 0xff;
+ s[4] = '\0';
+}
+
} // namespace android
diff --git a/media/libstagefright/codec2/Android.bp b/media/libstagefright/codec2/Android.bp
index e5bc4b3..f79e058 100644
--- a/media/libstagefright/codec2/Android.bp
+++ b/media/libstagefright/codec2/Android.bp
@@ -1,6 +1,10 @@
cc_library_shared {
name: "libstagefright_codec2",
+ tags: [
+ "optional",
+ ],
+
srcs: ["C2.cpp"],
include_dirs: [
@@ -24,4 +28,5 @@
subdirs = [
"tests",
+ "vndk",
]
diff --git a/media/libstagefright/codec2/include/C2Buffer.h b/media/libstagefright/codec2/include/C2Buffer.h
index 9f6b487..88f1db3 100644
--- a/media/libstagefright/codec2/include/C2Buffer.h
+++ b/media/libstagefright/codec2/include/C2Buffer.h
@@ -223,7 +223,11 @@
*
* \return acquired object potentially invalidated if waiting for the fence failed.
*/
- T get();
+ T get() {
+ // TODO:
+ // wait();
+ return mT;
+ }
protected:
C2Acquirable(C2Error error, C2Fence fence, T t) : C2Fence(fence), mInitialError(error), mT(t) { }
@@ -268,7 +272,7 @@
: mCapacity(parent == nullptr ? 0 : parent->capacity()) { }
private:
- const uint32_t mCapacity;
+ uint32_t mCapacity;
/// @}
};
@@ -429,7 +433,7 @@
/**
* \return pointer to the start of the block or nullptr on error.
*/
- const uint8_t *data();
+ const uint8_t *data() const;
/**
* Returns a portion of this view.
@@ -447,6 +451,10 @@
*/
C2Error error();
+protected:
+ C2ReadView(const _C2LinearCapacityAspect *parent, const uint8_t *data);
+ explicit C2ReadView(C2Error error);
+
private:
class Impl;
std::shared_ptr<Impl> mImpl;
@@ -476,6 +484,10 @@
*/
C2Error error();
+protected:
+ C2WriteView(const _C2LinearRangeAspect *parent, uint8_t *base);
+ explicit C2WriteView(C2Error error);
+
private:
class Impl;
/// \todo should this be unique_ptr to make this movable only - to avoid inconsistent regions
@@ -516,7 +528,13 @@
*/
C2Fence fence() const { return mFence; }
+protected:
+ C2ConstLinearBlock(std::shared_ptr<C2LinearAllocation> alloc);
+ C2ConstLinearBlock(std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size);
+
private:
+ class Impl;
+ std::shared_ptr<Impl> mImpl;
C2Fence mFence;
};
@@ -544,6 +562,14 @@
* The block shall be modified only until firing the event for the fence.
*/
C2ConstLinearBlock share(size_t offset, size_t size, C2Fence fence);
+
+protected:
+ C2LinearBlock(std::shared_ptr<C2LinearAllocation> alloc);
+ C2LinearBlock(std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size);
+
+private:
+ class Impl;
+ std::shared_ptr<Impl> mImpl;
};
/// @}
diff --git a/media/libstagefright/codec2/include/C2Component.h b/media/libstagefright/codec2/include/C2Component.h
index 1ee9302..f071423 100644
--- a/media/libstagefright/codec2/include/C2Component.h
+++ b/media/libstagefright/codec2/include/C2Component.h
@@ -282,7 +282,7 @@
* fields in the same list?
*/
virtual status_t getSupportedValues(
- const std::vector<const C2ParamField> fields,
+ const std::vector<const C2ParamField> &fields,
std::vector<C2FieldSupportedValues>* const values) const = 0;
virtual ~C2ComponentInterface() = default;
@@ -543,6 +543,7 @@
};
class C2ComponentStore {
+public:
/**
* Creates a component.
*
diff --git a/media/libstagefright/codec2/include/C2Config.h b/media/libstagefright/codec2/include/C2Config.h
index 30e9193..18e0a47 100644
--- a/media/libstagefright/codec2/include/C2Config.h
+++ b/media/libstagefright/codec2/include/C2Config.h
@@ -67,6 +67,7 @@
kParamIndexStructStart = 0x1,
kParamIndexVideoSize,
kParamIndexMaxVideoSizeHint,
+ kParamIndexVideoSizeTuning,
kParamIndexParamStart = 0x800,
};
@@ -230,19 +231,22 @@
int32_t mWidth; ///< video width
int32_t mHeight; ///< video height
- DEFINE_AND_DESCRIBE_C2STRUCT(VideoSize)
+ DEFINE_C2STRUCT_NO_BASE(VideoSize)
+} C2_PACK;
+
+DESCRIBE_C2STRUCT(VideoSize, {
C2FIELD(mWidth, "width")
C2FIELD(mHeight, "height")
-};
+})
// video size for video decoder [OUT]
-typedef C2StreamParam<C2Info, C2VideoSizeStruct> C2VideoSizeStreamInfo;
+typedef C2StreamParam<C2Info, C2VideoSizeStruct, kParamIndexVideoSize> C2VideoSizeStreamInfo;
// max video size for video decoder [IN]
typedef C2PortParam<C2Setting, C2VideoSizeStruct, kParamIndexMaxVideoSizeHint> C2MaxVideoSizeHintPortSetting;
// video encoder size [IN]
-typedef C2StreamParam<C2Tuning, C2VideoSizeStruct> C2VideoSizeStreamTuning;
+typedef C2StreamParam<C2Tuning, C2VideoSizeStruct, kParamIndexVideoSizeTuning> C2VideoSizeStreamTuning;
/// @}
diff --git a/media/libstagefright/codec2/include/C2Param.h b/media/libstagefright/codec2/include/C2Param.h
index fd43061..aab0474 100644
--- a/media/libstagefright/codec2/include/C2Param.h
+++ b/media/libstagefright/codec2/include/C2Param.h
@@ -362,6 +362,17 @@
return param;
}
+ /// Returns managed clone of |orig| at heap.
+ inline static std::unique_ptr<C2Param> Copy(const C2Param &orig) {
+ if (orig.size() == 0) {
+ return nullptr;
+ }
+ void *mem = ::operator new (orig.size());
+ C2Param *param = new (mem) C2Param(orig.size(), orig._mIndex);
+ param->updateFrom(orig);
+ return std::unique_ptr<C2Param>(param);
+ }
+
#if 0
template<typename P, class=decltype(C2Param(P()))>
P *As() { return P::From(this); }
@@ -661,11 +672,11 @@
Primitive mValue;
};
-template<> const int32_t &C2Value::Primitive::ref<int32_t>() const { return i32; }
-template<> const int64_t &C2Value::Primitive::ref<int64_t>() const { return i64; }
-template<> const uint32_t &C2Value::Primitive::ref<uint32_t>() const { return u32; }
-template<> const uint64_t &C2Value::Primitive::ref<uint64_t>() const { return u64; }
-template<> const float &C2Value::Primitive::ref<float>() const { return fp; }
+template<> inline const int32_t &C2Value::Primitive::ref<int32_t>() const { return i32; }
+template<> inline const int64_t &C2Value::Primitive::ref<int64_t>() const { return i64; }
+template<> inline const uint32_t &C2Value::Primitive::ref<uint32_t>() const { return u32; }
+template<> inline const uint64_t &C2Value::Primitive::ref<uint64_t>() const { return u64; }
+template<> inline const float &C2Value::Primitive::ref<float>() const { return fp; }
template<> constexpr C2Value::Type C2Value::typeFor<int32_t>() { return INT32; }
template<> constexpr C2Value::Type C2Value::typeFor<int64_t>() { return INT64; }
diff --git a/media/libstagefright/codec2/tests/Android.bp b/media/libstagefright/codec2/tests/Android.bp
index 1dc6a58..a8a6565 100644
--- a/media/libstagefright/codec2/tests/Android.bp
+++ b/media/libstagefright/codec2/tests/Android.bp
@@ -6,6 +6,7 @@
],
srcs: [
+ "vndk/C2BufferTest.cpp",
"vndk/C2UtilTest.cpp",
"C2_test.cpp",
"C2Param_test.cpp",
@@ -21,6 +22,16 @@
"libcutils",
"liblog",
"libstagefright_codec2",
+ "libcutils",
+ "libhidlbase",
+ "libion",
+ "liblog",
+ "libstagefright_codec2",
+ "libutils",
+ ],
+
+ static_libs: [
+ "libstagefright_codec2_vndk",
],
cflags: [
diff --git a/media/libstagefright/codec2/tests/C2Param_test.cpp b/media/libstagefright/codec2/tests/C2Param_test.cpp
index ec82c84..9165aad 100644
--- a/media/libstagefright/codec2/tests/C2Param_test.cpp
+++ b/media/libstagefright/codec2/tests/C2Param_test.cpp
@@ -968,6 +968,10 @@
EXPECT_EQ(C2NumberStreamTuning::From(&tun), nullptr);
EXPECT_EQ(C2NumberStreamTuning::input::From(&tun), nullptr);
EXPECT_EQ(C2NumberStreamTuning::output::From(&tun), nullptr);
+
+ EXPECT_EQ(*(C2Param::Copy(btun)), btun);
+ btun.invalidate();
+ EXPECT_FALSE(C2Param::Copy(btun));
}
const C2NumberPortTuning outp1(true, 100), inp1(false, 100);
@@ -1171,6 +1175,11 @@
EXPECT_EQ(C2NumberStreamTuning::output::From(&inp2), nullptr);
EXPECT_EQ(C2NumberStreamTuning::output::From(&outp1), nullptr);
EXPECT_EQ(C2NumberStreamTuning::output::From(&outp2), nullptr);
+
+ EXPECT_EQ(*(C2Param::Copy(inp1)), inp1);
+ EXPECT_EQ(*(C2Param::Copy(inp2)), inp2);
+ EXPECT_EQ(*(C2Param::Copy(outp1)), outp1);
+ EXPECT_EQ(*(C2Param::Copy(outp2)), outp2);
}
const C2NumberStreamTuning outs1(true, 1u, 100), ins1(false, 1u, 100);
@@ -1383,6 +1392,10 @@
EXPECT_EQ(C2NumberStreamTuning::output::From(&outs1), (C2NumberStreamTuning::output*)&outs1);
EXPECT_EQ(C2NumberStreamTuning::output::From(&outs2), &outs2);
+ EXPECT_EQ(*(C2Param::Copy(ins1)), ins1);
+ EXPECT_EQ(*(C2Param::Copy(ins2)), ins2);
+ EXPECT_EQ(*(C2Param::Copy(outs1)), outs1);
+ EXPECT_EQ(*(C2Param::Copy(outs2)), outs2);
}
{
@@ -1518,6 +1531,8 @@
EXPECT_EQ(C2NumbersStreamTuning::From(tun.get()), nullptr);
EXPECT_EQ(C2NumbersStreamTuning::input::From(tun.get()), nullptr);
EXPECT_EQ(C2NumbersStreamTuning::output::From(tun.get()), nullptr);
+
+ EXPECT_EQ(*(C2Param::Copy(*tun)), *tun);
}
std::unique_ptr<C2NumbersPortTuning> outp1_(C2NumbersPortTuning::alloc_unique(1, true)),
@@ -1739,6 +1754,10 @@
EXPECT_EQ(C2NumbersStreamTuning::output::From(outp1.get()), nullptr);
EXPECT_EQ(C2NumbersStreamTuning::output::From(outp2.get()), nullptr);
+ EXPECT_EQ(*(C2Param::Copy(*inp1)), *inp1);
+ EXPECT_EQ(*(C2Param::Copy(*inp2)), *inp2);
+ EXPECT_EQ(*(C2Param::Copy(*outp1)), *outp1);
+ EXPECT_EQ(*(C2Param::Copy(*outp2)), *outp2);
}
std::unique_ptr<C2NumbersStreamTuning> outs1_(C2NumbersStreamTuning::alloc_unique(1, true, 1u));
@@ -1968,6 +1987,10 @@
EXPECT_EQ(C2NumbersStreamTuning::output::From(outs1.get()), (C2NumbersStreamTuning::output*)outs1.get());
EXPECT_EQ(C2NumbersStreamTuning::output::From(outs2.get()), outs2.get());
+ EXPECT_EQ(*(C2Param::Copy(*ins1)), *ins1);
+ EXPECT_EQ(*(C2Param::Copy(*ins2)), *ins2);
+ EXPECT_EQ(*(C2Param::Copy(*outs1)), *outs1);
+ EXPECT_EQ(*(C2Param::Copy(*outs2)), *outs2);
}
{
@@ -2262,7 +2285,7 @@
for (const C2Param::Index index : heapParamIndices) {
if (mMyParams.count(index)) {
C2Param & myParam = mMyParams.find(index)->second;
- std::unique_ptr<C2Param> paramCopy(C2Param::From(&myParam, myParam.size()));
+ std::unique_ptr<C2Param> paramCopy(C2Param::Copy(myParam));
heapParams->push_back(std::move(paramCopy));
}
}
@@ -2303,7 +2326,7 @@
};
virtual status_t getSupportedValues(
- const std::vector<const C2ParamField> fields,
+ const std::vector<const C2ParamField> &fields,
std::vector<C2FieldSupportedValues>* const values) const {
for (const C2ParamField &field : fields) {
if (field == C2ParamField(&mDomainInfo, &C2ComponentDomainInfo::mValue)) {
diff --git a/media/libstagefright/codec2/tests/vndk/C2BufferTest.cpp b/media/libstagefright/codec2/tests/vndk/C2BufferTest.cpp
new file mode 100644
index 0000000..0ba3cad
--- /dev/null
+++ b/media/libstagefright/codec2/tests/vndk/C2BufferTest.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include <C2Buffer.h>
+#include <C2BufferPriv.h>
+
+#include <system/graphics.h>
+
+namespace android {
+
+class C2BufferTest : public ::testing::Test {
+public:
+ C2BufferTest()
+ : mAllocator(std::make_shared<C2AllocatorIon>()),
+ mSize(0u),
+ mAddr(nullptr) {
+ }
+
+ ~C2BufferTest() = default;
+
+ void allocate(size_t capacity) {
+ C2Error err = mAllocator->allocateLinearBuffer(
+ capacity,
+ { C2MemoryUsage::kSoftwareRead, C2MemoryUsage::kSoftwareWrite },
+ &mAllocation);
+ if (err != C2_OK) {
+ mAllocation.reset();
+ FAIL() << "C2Allocator::allocateLinearBuffer() failed: " << err;
+ }
+ }
+
+ void map(size_t offset, size_t size, uint8_t **addr) {
+ ASSERT_TRUE(mAllocation);
+ C2Error err = mAllocation->map(
+ offset,
+ size,
+ { C2MemoryUsage::kSoftwareRead, C2MemoryUsage::kSoftwareWrite },
+ // TODO: fence
+ nullptr,
+ &mAddr);
+ if (err != C2_OK) {
+ mAddr = nullptr;
+ FAIL() << "C2LinearAllocation::map() failed: " << err;
+ }
+ ASSERT_NE(nullptr, mAddr);
+ mSize = size;
+ *addr = (uint8_t *)mAddr;
+ }
+
+ void unmap() {
+ ASSERT_TRUE(mAllocation);
+ ASSERT_NE(nullptr, mAddr);
+ ASSERT_NE(0u, mSize);
+
+ // TODO: fence
+ ASSERT_EQ(C2_OK, mAllocation->unmap(mAddr, mSize, nullptr));
+ mSize = 0u;
+ mAddr = nullptr;
+ }
+
+ std::shared_ptr<C2BlockAllocator> makeBlockAllocator() {
+ return std::make_shared<C2DefaultBlockAllocator>(mAllocator);
+ }
+
+private:
+ std::shared_ptr<C2Allocator> mAllocator;
+ std::shared_ptr<C2LinearAllocation> mAllocation;
+ size_t mSize;
+ void *mAddr;
+};
+
+TEST_F(C2BufferTest, LinearAllocationTest) {
+ constexpr size_t kCapacity = 1024u * 1024u;
+
+ allocate(kCapacity);
+
+ uint8_t *addr = nullptr;
+ map(0u, kCapacity, &addr);
+ ASSERT_NE(nullptr, addr);
+
+ for (size_t i = 0; i < kCapacity; ++i) {
+ addr[i] = i % 100u;
+ }
+
+ unmap();
+ addr = nullptr;
+
+ map(kCapacity / 3, kCapacity / 3, &addr);
+ ASSERT_NE(nullptr, addr);
+ for (size_t i = 0; i < kCapacity / 3; ++i) {
+ ASSERT_EQ((i + kCapacity / 3) % 100, addr[i]) << " at i = " << i;
+ }
+}
+
+TEST_F(C2BufferTest, BlockAllocatorTest) {
+ constexpr size_t kCapacity = 1024u * 1024u;
+
+ std::shared_ptr<C2BlockAllocator> blockAllocator(makeBlockAllocator());
+
+ std::shared_ptr<C2LinearBlock> block;
+ ASSERT_EQ(C2_OK, blockAllocator->allocateLinearBlock(
+ kCapacity,
+ { C2MemoryUsage::kSoftwareRead, C2MemoryUsage::kSoftwareWrite },
+ &block));
+ ASSERT_TRUE(block);
+
+ C2Acquirable<C2WriteView> writeViewHolder = block->map();
+ C2WriteView writeView = writeViewHolder.get();
+ ASSERT_EQ(C2_OK, writeView.error());
+ ASSERT_EQ(kCapacity, writeView.capacity());
+ ASSERT_EQ(0u, writeView.offset());
+ ASSERT_EQ(kCapacity, writeView.size());
+
+ uint8_t *data = writeView.data();
+ ASSERT_NE(nullptr, data);
+ for (size_t i = 0; i < writeView.size(); ++i) {
+ data[i] = i % 100u;
+ }
+
+ C2Fence fence;
+ C2ConstLinearBlock constBlock = block->share(
+ kCapacity / 3, kCapacity / 3, fence);
+
+ C2Acquirable<C2ReadView> readViewHolder = constBlock.map();
+ C2ReadView readView = readViewHolder.get();
+ ASSERT_EQ(C2_OK, readView.error());
+ ASSERT_EQ(kCapacity / 3, readView.capacity());
+
+ // TODO: fence
+ const uint8_t *constData = readView.data();
+ ASSERT_NE(nullptr, constData);
+ for (size_t i = 0; i < readView.capacity(); ++i) {
+ ASSERT_EQ((i + kCapacity / 3) % 100u, constData[i]) << " at i = " << i
+ << "; data = " << static_cast<void *>(data)
+ << "; constData = " << static_cast<const void *>(constData);
+ }
+
+ readView = readView.subView(333u, 100u);
+ ASSERT_EQ(C2_OK, readView.error());
+ ASSERT_EQ(100u, readView.capacity());
+
+ constData = readView.data();
+ ASSERT_NE(nullptr, constData);
+ for (size_t i = 0; i < readView.capacity(); ++i) {
+ ASSERT_EQ((i + 333u + kCapacity / 3) % 100u, constData[i]) << " at i = " << i;
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/vndk/Android.bp b/media/libstagefright/codec2/vndk/Android.bp
new file mode 100644
index 0000000..9426b4e
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/Android.bp
@@ -0,0 +1,30 @@
+cc_library_static {
+ name: "libstagefright_codec2_vndk",
+
+ srcs: ["C2Buffer.cpp"],
+
+ include_dirs: [
+ "frameworks/av/media/libstagefright/codec2/include",
+ "frameworks/av/media/libstagefright/codec2/vndk/include",
+ "frameworks/native/include/media/hardware",
+ ],
+
+ shared_libs: [
+ "libbinder",
+ "libcutils",
+ "libdl",
+ "libhardware",
+ "libhidlbase",
+ "libion",
+ "liblog",
+ "libmedia",
+ "libstagefright_foundation",
+ "libutils",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ "-std=c++14",
+ ],
+}
diff --git a/media/libstagefright/codec2/vndk/C2Buffer.cpp b/media/libstagefright/codec2/vndk/C2Buffer.cpp
new file mode 100644
index 0000000..ffb6c2e
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/C2Buffer.cpp
@@ -0,0 +1,720 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2Buffer"
+#include <utils/Log.h>
+
+#include <C2BufferPriv.h>
+
+#include <ion/ion.h>
+#include <sys/mman.h>
+
+namespace android {
+
+// standard ERRNO mappings
+template<int N> constexpr C2Error _c2_errno2error_impl();
+template<> constexpr C2Error _c2_errno2error_impl<0>() { return C2_OK; }
+template<> constexpr C2Error _c2_errno2error_impl<EINVAL>() { return C2_BAD_VALUE; }
+template<> constexpr C2Error _c2_errno2error_impl<EACCES>() { return C2_NO_PERMISSION; }
+template<> constexpr C2Error _c2_errno2error_impl<EPERM>() { return C2_NO_PERMISSION; }
+template<> constexpr C2Error _c2_errno2error_impl<ENOMEM>() { return C2_NO_MEMORY; }
+
+// map standard errno-s to the equivalent C2Error
+template<int... N> struct _c2_map_errno_impl;
+template<int E, int ... N> struct _c2_map_errno_impl<E, N...> {
+ static C2Error map(int result) {
+ if (result == E) {
+ return _c2_errno2error_impl<E>();
+ } else {
+ return _c2_map_errno_impl<N...>::map(result);
+ }
+ }
+};
+template<> struct _c2_map_errno_impl<> {
+ static C2Error map(int result) {
+ return result == 0 ? C2_OK : C2_CORRUPTED;
+ }
+};
+
+template<int... N>
+C2Error c2_map_errno(int result) {
+ return _c2_map_errno_impl<N...>::map(result);
+}
+
+namespace {
+
+// Inherit from the parent, share with the friend.
+
+class DummyCapacityAspect : public _C2LinearCapacityAspect {
+ using _C2LinearCapacityAspect::_C2LinearCapacityAspect;
+ friend class ::android::C2ReadView;
+ friend class ::android::C2ConstLinearBlock;
+};
+
+class C2DefaultReadView : public C2ReadView {
+ using C2ReadView::C2ReadView;
+ friend class ::android::C2ConstLinearBlock;
+};
+
+class C2DefaultWriteView : public C2WriteView {
+ using C2WriteView::C2WriteView;
+ friend class ::android::C2LinearBlock;
+};
+
+class C2AcquirableReadView : public C2Acquirable<C2ReadView> {
+ using C2Acquirable::C2Acquirable;
+ friend class ::android::C2ConstLinearBlock;
+};
+
+class C2AcquirableWriteView : public C2Acquirable<C2WriteView> {
+ using C2Acquirable::C2Acquirable;
+ friend class ::android::C2LinearBlock;
+};
+
+class C2DefaultConstLinearBlock : public C2ConstLinearBlock {
+ using C2ConstLinearBlock::C2ConstLinearBlock;
+ friend class ::android::C2LinearBlock;
+};
+
+class C2DefaultLinearBlock : public C2LinearBlock {
+ using C2LinearBlock::C2LinearBlock;
+ friend class ::android::C2DefaultBlockAllocator;
+};
+
+} // namespace
+
+/* ======================================= ION ALLOCATION ====================================== */
+
+/**
+ * ION handle
+ */
+struct C2HandleIon : public C2Handle {
+ C2HandleIon(int ionFd, ion_user_handle_t buffer) : C2Handle(cHeader),
+ mFds{ ionFd, buffer },
+ mInts{ kMagic } { }
+
+ static bool isValid(const C2Handle * const o);
+
+ int ionFd() const { return mFds.mIon; }
+ ion_user_handle_t buffer() const { return mFds.mBuffer; }
+
+ void setBuffer(ion_user_handle_t bufferFd) { mFds.mBuffer = bufferFd; }
+
+protected:
+ struct {
+ int mIon;
+ int mBuffer; // ion_user_handle_t
+ } mFds;
+ struct {
+ int mMagic;
+ } mInts;
+
+private:
+ typedef C2HandleIon _type;
+ enum {
+ kMagic = 'ion1',
+ numFds = sizeof(mFds) / sizeof(int),
+ numInts = sizeof(mInts) / sizeof(int),
+ version = sizeof(C2Handle) + sizeof(mFds) + sizeof(mInts)
+ };
+ //constexpr static C2Handle cHeader = { version, numFds, numInts, {} };
+ const static C2Handle cHeader;
+};
+
+const C2Handle C2HandleIon::cHeader = {
+ C2HandleIon::version,
+ C2HandleIon::numFds,
+ C2HandleIon::numInts,
+ {}
+};
+
+// static
+bool C2HandleIon::isValid(const C2Handle * const o) {
+ if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
+ return false;
+ }
+ const C2HandleIon *other = static_cast<const C2HandleIon*>(o);
+ return other->mInts.mMagic == kMagic;
+}
+
+// TODO: is the dup of an ion fd identical to ion_share?
+
+class C2AllocationIon : public C2LinearAllocation {
+public:
+ virtual C2Error map(
+ size_t offset, size_t size, C2MemoryUsage usage, int *fence,
+ void **addr /* nonnull */);
+ virtual C2Error unmap(void *addr, size_t size, int *fenceFd);
+ virtual bool isValid() const;
+ virtual ~C2AllocationIon();
+ virtual const C2Handle *handle() const;
+ virtual bool equals(const std::shared_ptr<C2LinearAllocation> &other) const;
+
+ // internal methods
+ C2AllocationIon(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags);
+ C2AllocationIon(int ionFd, size_t size, int shareFd);
+ int dup() const;
+ C2Error status() const;
+
+protected:
+ class Impl;
+ Impl *mImpl;
+};
+
+class C2AllocationIon::Impl {
+public:
+ // NOTE: using constructor here instead of a factory method as we will need the
+ // error value and this simplifies the error handling by the wrapper.
+ Impl(int ionFd, size_t capacity, size_t align, unsigned heapMask, unsigned flags)
+ : mInit(C2_OK),
+ mHandle(ionFd, -1),
+ mMapFd(-1),
+ mCapacity(capacity) {
+ ion_user_handle_t buffer = -1;
+ int ret = ion_alloc(mHandle.ionFd(), mCapacity, align, heapMask, flags, &buffer);
+ if (ret == 0) {
+ mHandle.setBuffer(buffer);
+ } else {
+ mInit = c2_map_errno<ENOMEM, EACCES, EINVAL>(-ret);
+ }
+ }
+
+ Impl(int ionFd, size_t capacity, int shareFd)
+ : mHandle(ionFd, -1),
+ mMapFd(-1),
+ mCapacity(capacity) {
+ ion_user_handle_t buffer;
+ mInit = ion_import(mHandle.ionFd(), shareFd, &buffer);
+ if (mInit == 0) {
+ mHandle.setBuffer(buffer);
+ }
+ (void)mCapacity; // TODO
+ }
+
+ C2Error map(size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd, void **addr) {
+ (void)fenceFd; // TODO: wait for fence
+ *addr = nullptr;
+ int prot = PROT_NONE;
+ int flags = MAP_PRIVATE;
+ if (usage.mConsumer & GRALLOC_USAGE_SW_READ_MASK) {
+ prot |= PROT_READ;
+ }
+ if (usage.mProducer & GRALLOC_USAGE_SW_WRITE_MASK) {
+ prot |= PROT_WRITE;
+ flags = MAP_SHARED;
+ }
+
+ size_t alignmentBytes = offset % PAGE_SIZE;
+ size_t mapOffset = offset - alignmentBytes;
+ size_t mapSize = size + alignmentBytes;
+
+ C2Error err = C2_OK;
+ if (mMapFd == -1) {
+ int ret = ion_map(mHandle.ionFd(), mHandle.buffer(), mapSize, prot,
+ flags, mapOffset, (unsigned char**)&mMapAddr, &mMapFd);
+ if (ret) {
+ mMapFd = -1;
+ *addr = nullptr;
+ err = c2_map_errno<EINVAL>(-ret);
+ } else {
+ *addr = (uint8_t *)mMapAddr + alignmentBytes;
+ mMapAlignmentBytes = alignmentBytes;
+ mMapSize = mapSize;
+ }
+ } else {
+ mMapAddr = mmap(nullptr, mapSize, prot, flags, mMapFd, mapOffset);
+ if (mMapAddr == MAP_FAILED) {
+ mMapAddr = *addr = nullptr;
+ err = c2_map_errno<EINVAL>(errno);
+ } else {
+ *addr = (uint8_t *)mMapAddr + alignmentBytes;
+ mMapAlignmentBytes = alignmentBytes;
+ mMapSize = mapSize;
+ }
+ }
+ return err;
+ }
+
+ C2Error unmap(void *addr, size_t size, int *fenceFd) {
+ if (addr != (uint8_t *)mMapAddr + mMapAlignmentBytes ||
+ size + mMapAlignmentBytes != mMapSize) {
+ return C2_BAD_VALUE;
+ }
+ int err = munmap(mMapAddr, mMapSize);
+ if (err != 0) {
+ return c2_map_errno<EINVAL>(errno);
+ }
+ if (fenceFd) {
+ *fenceFd = -1;
+ }
+ return C2_OK;
+ }
+
+ ~Impl() {
+ if (mMapFd != -1) {
+ close(mMapFd);
+ mMapFd = -1;
+ }
+
+ (void)ion_free(mHandle.ionFd(), mHandle.buffer());
+ }
+
+ C2Error status() const {
+ return mInit;
+ }
+
+ const C2Handle * handle() const {
+ return &mHandle;
+ }
+
+ int dup() const {
+ int fd = -1;
+ if (mInit != 0 || ion_share(mHandle.ionFd(), mHandle.buffer(), &fd) != 0) {
+ fd = -1;
+ }
+ return fd;
+ }
+
+private:
+ C2Error mInit;
+ C2HandleIon mHandle;
+ int mMapFd; // only one for now
+ void *mMapAddr;
+ size_t mMapAlignmentBytes;
+ size_t mMapSize;
+ size_t mCapacity;
+};
+
+C2Error C2AllocationIon::map(
+ size_t offset, size_t size, C2MemoryUsage usage, int *fenceFd, void **addr) {
+ return mImpl->map(offset, size, usage, fenceFd, addr);
+}
+
+C2Error C2AllocationIon::unmap(void *addr, size_t size, int *fenceFd) {
+ return mImpl->unmap(addr, size, fenceFd);
+}
+
+bool C2AllocationIon::isValid() const {
+ return mImpl->status() == C2_OK;
+}
+
+C2Error C2AllocationIon::status() const {
+ return mImpl->status();
+}
+
+bool C2AllocationIon::equals(const std::shared_ptr<C2LinearAllocation> &other) const {
+ return other != nullptr &&
+ other->handle(); // TODO
+}
+
+const C2Handle *C2AllocationIon::handle() const {
+ return mImpl->handle();
+}
+
+C2AllocationIon::~C2AllocationIon() {
+ delete mImpl;
+}
+
+C2AllocationIon::C2AllocationIon(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags)
+ : C2LinearAllocation(size),
+ mImpl(new Impl(ionFd, size, align, heapMask, flags)) { }
+
+C2AllocationIon::C2AllocationIon(int ionFd, size_t size, int shareFd)
+ : C2LinearAllocation(size),
+ mImpl(new Impl(ionFd, size, shareFd)) { }
+
+int C2AllocationIon::dup() const {
+ return mImpl->dup();
+}
+
+/* ======================================= ION ALLOCATOR ====================================== */
+
+C2AllocatorIon::C2AllocatorIon() : mInit(C2_OK), mIonFd(ion_open()) {
+ if (mIonFd < 0) {
+ switch (errno) {
+ case ENOENT: mInit = C2_UNSUPPORTED; break;
+ default: mInit = c2_map_errno<EACCES>(errno); break;
+ }
+ }
+}
+
+C2AllocatorIon::~C2AllocatorIon() {
+ if (mInit == C2_OK) {
+ ion_close(mIonFd);
+ }
+}
+
+/**
+ * Allocates a 1D allocation of given |capacity| and |usage|. If successful, the allocation is
+ * stored in |allocation|. Otherwise, |allocation| is set to 'nullptr'.
+ *
+ * \param capacity the size of requested allocation (the allocation could be slightly
+ * larger, e.g. to account for any system-required alignment)
+ * \param usage the memory usage info for the requested allocation. \note that the
+ * returned allocation may be later used/mapped with different usage.
+ * The allocator should layout the buffer to be optimized for this usage,
+ * but must support any usage. One exception: protected buffers can
+ * only be used in a protected scenario.
+ * \param allocation pointer to where the allocation shall be stored on success. nullptr
+ * will be stored here on failure
+ *
+ * \retval C2_OK the allocation was successful
+ * \retval C2_NO_MEMORY not enough memory to complete the allocation
+ * \retval C2_TIMED_OUT the allocation timed out
+ * \retval C2_NO_PERMISSION no permission to complete the allocation
+ * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
+ * \retval C2_UNSUPPORTED this allocator does not support 1D allocations
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ */
+C2Error C2AllocatorIon::allocateLinearBuffer(
+ uint32_t capacity, C2MemoryUsage usage, std::shared_ptr<C2LinearAllocation> *allocation) {
+ *allocation = nullptr;
+ if (mInit != C2_OK) {
+ return C2_UNSUPPORTED;
+ }
+
+ // get align, heapMask and flags
+ //size_t align = 1;
+ size_t align = 0;
+ unsigned heapMask = ~0;
+ unsigned flags = 0;
+ //TODO
+ (void) usage;
+#if 0
+ int err = mUsageMapper(usage, capacity, &align, &heapMask, &flags);
+ if (err < 0) {
+ return c2_map_errno<EINVAL, ENOMEM, EACCES>(-err);
+ }
+#endif
+
+ std::shared_ptr<C2AllocationIon> alloc
+ = std::make_shared<C2AllocationIon>(mIonFd, capacity, align, heapMask, flags);
+ C2Error ret = alloc->status();
+ if (ret == C2_OK) {
+ *allocation = alloc;
+ }
+ return ret;
+}
+
+/**
+ * (Re)creates a 1D allocation from a native |handle|. If successful, the allocation is stored
+ * in |allocation|. Otherwise, |allocation| is set to 'nullptr'.
+ *
+ * \param handle the handle for the existing allocation
+ * \param allocation pointer to where the allocation shall be stored on success. nullptr
+ * will be stored here on failure
+ *
+ * \retval C2_OK the allocation was recreated successfully
+ * \retval C2_NO_MEMORY not enough memory to recreate the allocation
+ * \retval C2_TIMED_OUT the recreation timed out (unexpected)
+ * \retval C2_NO_PERMISSION no permission to recreate the allocation
+ * \retval C2_BAD_VALUE invalid handle (caller error)
+ * \retval C2_UNSUPPORTED this allocator does not support 1D allocations
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ */
+C2Error C2AllocatorIon::recreateLinearBuffer(
+ const C2Handle *handle, std::shared_ptr<C2LinearAllocation> *allocation) {
+ *allocation = nullptr;
+ if (mInit != C2_OK) {
+ return C2_UNSUPPORTED;
+ }
+
+ if (!C2HandleIon::isValid(handle)) {
+ return C2_BAD_VALUE;
+ }
+
+ // TODO: get capacity and validate it
+ const C2HandleIon *h = static_cast<const C2HandleIon*>(handle);
+ std::shared_ptr<C2AllocationIon> alloc
+ = std::make_shared<C2AllocationIon>(mIonFd, 0 /* capacity */, h->buffer());
+ C2Error ret = alloc->status();
+ if (ret == C2_OK) {
+ *allocation = alloc;
+ }
+ return ret;
+}
+
+/* ========================================== 1D BLOCK ========================================= */
+
+class C2Block1D::Impl {
+public:
+ const C2Handle *handle() const {
+ return mAllocation->handle();
+ }
+
+ Impl(std::shared_ptr<C2LinearAllocation> alloc)
+ : mAllocation(alloc) {}
+
+private:
+ std::shared_ptr<C2LinearAllocation> mAllocation;
+};
+
+const C2Handle *C2Block1D::handle() const {
+ return mImpl->handle();
+};
+
+C2Block1D::C2Block1D(std::shared_ptr<C2LinearAllocation> alloc)
+ : _C2LinearRangeAspect(alloc.get()), mImpl(new Impl(alloc)) {
+}
+
+C2Block1D::C2Block1D(std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size)
+ : _C2LinearRangeAspect(alloc.get(), offset, size), mImpl(new Impl(alloc)) {
+}
+
+class C2ReadView::Impl {
+public:
+ explicit Impl(const uint8_t *data)
+ : mData(data), mError(C2_OK) {}
+
+ explicit Impl(C2Error error)
+ : mData(nullptr), mError(error) {}
+
+ const uint8_t *data() const {
+ return mData;
+ }
+
+ C2Error error() const {
+ return mError;
+ }
+
+private:
+ const uint8_t *mData;
+ C2Error mError;
+};
+
+C2ReadView::C2ReadView(const _C2LinearCapacityAspect *parent, const uint8_t *data)
+ : _C2LinearCapacityAspect(parent), mImpl(std::make_shared<Impl>(data)) {}
+
+C2ReadView::C2ReadView(C2Error error)
+ : _C2LinearCapacityAspect(0u), mImpl(std::make_shared<Impl>(error)) {}
+
+const uint8_t *C2ReadView::data() const {
+ return mImpl->data();
+}
+
+C2ReadView C2ReadView::subView(size_t offset, size_t size) const {
+ if (offset > capacity()) {
+ offset = capacity();
+ }
+ if (size > capacity() - offset) {
+ size = capacity() - offset;
+ }
+ // TRICKY: newCapacity will just be used to grab the size.
+ DummyCapacityAspect newCapacity((uint32_t)size);
+ return C2ReadView(&newCapacity, data() + offset);
+}
+
+C2Error C2ReadView::error() {
+ return mImpl->error();
+}
+
+class C2WriteView::Impl {
+public:
+ explicit Impl(uint8_t *base)
+ : mBase(base), mError(C2_OK) {}
+
+ explicit Impl(C2Error error)
+ : mBase(nullptr), mError(error) {}
+
+ uint8_t *base() const {
+ return mBase;
+ }
+
+ C2Error error() const {
+ return mError;
+ }
+
+private:
+ uint8_t *mBase;
+ C2Error mError;
+};
+
+C2WriteView::C2WriteView(const _C2LinearRangeAspect *parent, uint8_t *base)
+ : _C2EditableLinearRange(parent), mImpl(std::make_shared<Impl>(base)) {}
+
+C2WriteView::C2WriteView(C2Error error)
+ : _C2EditableLinearRange(nullptr), mImpl(std::make_shared<Impl>(error)) {}
+
+uint8_t *C2WriteView::base() { return mImpl->base(); }
+
+uint8_t *C2WriteView::data() { return mImpl->base() + offset(); }
+
+C2Error C2WriteView::error() { return mImpl->error(); }
+
+class C2ConstLinearBlock::Impl {
+public:
+ explicit Impl(std::shared_ptr<C2LinearAllocation> alloc)
+ : mAllocation(alloc), mBase(nullptr), mSize(0u), mError(C2_CORRUPTED) {}
+
+ ~Impl() {
+ if (mBase != nullptr) {
+ // TODO: fence
+ C2Error err = mAllocation->unmap(mBase, mSize, nullptr);
+ if (err != C2_OK) {
+ // TODO: Log?
+ }
+ }
+ }
+
+ C2ConstLinearBlock subBlock(size_t offset, size_t size) const {
+ return C2ConstLinearBlock(mAllocation, offset, size);
+ }
+
+ void map(size_t offset, size_t size) {
+ if (mBase == nullptr) {
+ void *base = nullptr;
+ mError = mAllocation->map(
+ offset, size, { C2MemoryUsage::kSoftwareRead, 0 }, nullptr, &base);
+ // TODO: fence
+ if (mError == C2_OK) {
+ mBase = (uint8_t *)base;
+ mSize = size;
+ }
+ }
+ }
+
+ const uint8_t *base() const { return mBase; }
+
+ C2Error error() const { return mError; }
+
+private:
+ std::shared_ptr<C2LinearAllocation> mAllocation;
+ uint8_t *mBase;
+ size_t mSize;
+ C2Error mError;
+};
+
+C2ConstLinearBlock::C2ConstLinearBlock(std::shared_ptr<C2LinearAllocation> alloc)
+ : C2Block1D(alloc), mImpl(std::make_shared<Impl>(alloc)) {}
+
+C2ConstLinearBlock::C2ConstLinearBlock(
+ std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size)
+ : C2Block1D(alloc, offset, size), mImpl(std::make_shared<Impl>(alloc)) {}
+
+C2Acquirable<C2ReadView> C2ConstLinearBlock::map() const {
+ mImpl->map(offset(), size());
+ if (mImpl->base() == nullptr) {
+ C2DefaultReadView view(mImpl->error());
+ return C2AcquirableReadView(mImpl->error(), mFence, view);
+ }
+ DummyCapacityAspect newCapacity(size());
+ C2DefaultReadView view(&newCapacity, mImpl->base());
+ return C2AcquirableReadView(mImpl->error(), mFence, view);
+}
+
+C2ConstLinearBlock C2ConstLinearBlock::subBlock(size_t offset, size_t size) const {
+ return mImpl->subBlock(offset, size);
+}
+
+class C2LinearBlock::Impl {
+public:
+ Impl(std::shared_ptr<C2LinearAllocation> alloc)
+ : mAllocation(alloc), mBase(nullptr), mSize(0u), mError(C2_CORRUPTED) {}
+
+ ~Impl() {
+ if (mBase != nullptr) {
+ // TODO: fence
+ C2Error err = mAllocation->unmap(mBase, mSize, nullptr);
+ if (err != C2_OK) {
+ // TODO: Log?
+ }
+ }
+ }
+
+ void map(size_t capacity) {
+ if (mBase == nullptr) {
+ void *base = nullptr;
+ // TODO: fence
+ mError = mAllocation->map(
+ 0u,
+ capacity,
+ { C2MemoryUsage::kSoftwareRead, C2MemoryUsage::kSoftwareWrite },
+ nullptr,
+ &base);
+ if (mError == C2_OK) {
+ mBase = (uint8_t *)base;
+ mSize = capacity;
+ }
+ }
+ }
+
+ C2ConstLinearBlock share(size_t offset, size_t size, C2Fence &fence) {
+ // TODO
+ (void) fence;
+ return C2DefaultConstLinearBlock(mAllocation, offset, size);
+ }
+
+ uint8_t *base() const { return mBase; }
+
+ C2Error error() const { return mError; }
+
+ C2Fence fence() const { return mFence; }
+
+private:
+ std::shared_ptr<C2LinearAllocation> mAllocation;
+ uint8_t *mBase;
+ size_t mSize;
+ C2Error mError;
+ C2Fence mFence;
+};
+
+C2LinearBlock::C2LinearBlock(std::shared_ptr<C2LinearAllocation> alloc)
+ : C2Block1D(alloc),
+ mImpl(new Impl(alloc)) {}
+
+C2LinearBlock::C2LinearBlock(std::shared_ptr<C2LinearAllocation> alloc, size_t offset, size_t size)
+ : C2Block1D(alloc, offset, size),
+ mImpl(new Impl(alloc)) {}
+
+C2Acquirable<C2WriteView> C2LinearBlock::map() {
+ mImpl->map(capacity());
+ if (mImpl->base() == nullptr) {
+ C2DefaultWriteView view(mImpl->error());
+ return C2AcquirableWriteView(mImpl->error(), mImpl->fence(), view);
+ }
+ C2DefaultWriteView view(this, mImpl->base());
+ view.setOffset_be(offset());
+ view.setSize_be(size());
+ return C2AcquirableWriteView(mImpl->error(), mImpl->fence(), view);
+}
+
+C2ConstLinearBlock C2LinearBlock::share(size_t offset, size_t size, C2Fence fence) {
+ return mImpl->share(offset, size, fence);
+}
+
+C2DefaultBlockAllocator::C2DefaultBlockAllocator(
+ const std::shared_ptr<C2Allocator> &allocator)
+ : mAllocator(allocator) {}
+
+C2Error C2DefaultBlockAllocator::allocateLinearBlock(
+ uint32_t capacity,
+ C2MemoryUsage usage,
+ std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
+ block->reset();
+
+ std::shared_ptr<C2LinearAllocation> alloc;
+ C2Error err = mAllocator->allocateLinearBuffer(capacity, usage, &alloc);
+ if (err != C2_OK) {
+ return err;
+ }
+
+ block->reset(new C2DefaultLinearBlock(alloc));
+
+ return C2_OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/codec2/vndk/include/C2BufferPriv.h b/media/libstagefright/codec2/vndk/include/C2BufferPriv.h
new file mode 100644
index 0000000..bfb069c
--- /dev/null
+++ b/media/libstagefright/codec2/vndk/include/C2BufferPriv.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_BUFFER_PRIV_H_
+#define STAGEFRIGHT_CODEC2_BUFFER_PRIV_H_
+
+#include <functional>
+
+#include <C2Buffer.h>
+
+namespace android {
+
+class C2AllocatorIon : public C2Allocator {
+public:
+ // (usage, capacity) => (align, heapMask, flags)
+ typedef std::function<int (C2MemoryUsage, size_t,
+ /* => */ size_t*, unsigned*, unsigned*)> usage_mapper_fn;
+
+ virtual C2Error allocateLinearBuffer(
+ uint32_t capacity, C2MemoryUsage usage,
+ std::shared_ptr<C2LinearAllocation> *allocation) override;
+
+ virtual C2Error recreateLinearBuffer(
+ const C2Handle *handle,
+ std::shared_ptr<C2LinearAllocation> *allocation) override;
+
+ C2AllocatorIon();
+
+ C2Error status() const { return mInit; }
+
+ virtual ~C2AllocatorIon();
+
+private:
+ C2Error mInit;
+ int mIonFd;
+ usage_mapper_fn mUsageMapper;
+};
+
+class C2DefaultBlockAllocator : public C2BlockAllocator {
+public:
+ explicit C2DefaultBlockAllocator(const std::shared_ptr<C2Allocator> &allocator);
+
+ virtual ~C2DefaultBlockAllocator() = default;
+
+ virtual C2Error allocateLinearBlock(
+ uint32_t capacity,
+ C2MemoryUsage usage,
+ std::shared_ptr<C2LinearBlock> *block /* nonnull */) override;
+
+ // TODO:
+private:
+ const std::shared_ptr<C2Allocator> mAllocator;
+};
+
+#if 0
+class C2Allocation::Impl {
+public:
+ Impl() : mMapped(false), mBase(nullptr) { }
+ uint8_t* base() { return mMapped ? mBase : nullptr; }
+
+ // TODO: call map...
+
+private:
+ bool mMapped;
+ uint8_t *mBase;
+};
+#endif
+
+} // namespace android
+
+#endif // STAGEFRIGHT_CODEC2_BUFFER_PRIV_H_
diff --git a/media/libstagefright/codecs/aacdec/Android.bp b/media/libstagefright/codecs/aacdec/Android.bp
index 6e04c1e..1daaf49 100644
--- a/media/libstagefright/codecs/aacdec/Android.bp
+++ b/media/libstagefright/codecs/aacdec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_aacdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: [
"SoftAAC2.cpp",
diff --git a/media/libstagefright/codecs/aacenc/Android.bp b/media/libstagefright/codecs/aacenc/Android.bp
index 1a7ffca..4b478ea 100644
--- a/media/libstagefright/codecs/aacenc/Android.bp
+++ b/media/libstagefright/codecs/aacenc/Android.bp
@@ -1,5 +1,6 @@
cc_library_static {
name: "libstagefright_aacenc",
+ vendor_available: true,
srcs: [
"basic_op/basicop2.c",
@@ -111,6 +112,10 @@
cc_library_shared {
name: "libstagefright_soft_aacenc",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftAACEncoder2.cpp"],
diff --git a/media/libstagefright/codecs/amrnb/common/Android.bp b/media/libstagefright/codecs/amrnb/common/Android.bp
index c5ac558..5177593 100644
--- a/media/libstagefright/codecs/amrnb/common/Android.bp
+++ b/media/libstagefright/codecs/amrnb/common/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_amrnb_common",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: [
"src/add.cpp",
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.bp b/media/libstagefright/codecs/amrnb/dec/Android.bp
index 996183b..a61fb57 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.bp
+++ b/media/libstagefright/codecs/amrnb/dec/Android.bp
@@ -1,5 +1,6 @@
cc_library_static {
name: "libstagefright_amrnbdec",
+ vendor_available: true,
srcs: [
"src/a_refl.cpp",
@@ -55,13 +56,20 @@
// ],
//},
- shared_libs: ["libstagefright_amrnb_common"],
+ shared_libs: [
+ "libstagefright_amrnb_common",
+ "liblog",
+ ],
}
//###############################################################################
cc_library_shared {
name: "libstagefright_soft_amrdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftAMR.cpp"],
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.bp b/media/libstagefright/codecs/amrnb/enc/Android.bp
index af0f8c2..04ed07f 100644
--- a/media/libstagefright/codecs/amrnb/enc/Android.bp
+++ b/media/libstagefright/codecs/amrnb/enc/Android.bp
@@ -1,5 +1,6 @@
cc_library_static {
name: "libstagefright_amrnbenc",
+ vendor_available: true,
srcs: [
"src/amrencode.cpp",
@@ -83,6 +84,10 @@
cc_library_shared {
name: "libstagefright_soft_amrnbenc",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftAMRNBEncoder.cpp"],
diff --git a/media/libstagefright/codecs/amrwb/Android.bp b/media/libstagefright/codecs/amrwb/Android.bp
index e261c04..b932ccc 100644
--- a/media/libstagefright/codecs/amrwb/Android.bp
+++ b/media/libstagefright/codecs/amrwb/Android.bp
@@ -1,5 +1,6 @@
cc_library_static {
name: "libstagefright_amrwbdec",
+ vendor_available: true,
srcs: [
"src/agc2_amr_wb.cpp",
diff --git a/media/libstagefright/codecs/amrwbenc/Android.bp b/media/libstagefright/codecs/amrwbenc/Android.bp
index 5c5a122..d337cde 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/Android.bp
@@ -1,5 +1,6 @@
cc_library_static {
name: "libstagefright_amrwbenc",
+ vendor_available: true,
srcs: [
"src/autocorr.c",
@@ -144,6 +145,10 @@
cc_library_shared {
name: "libstagefright_soft_amrwbenc",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftAMRWBEncoder.cpp"],
diff --git a/media/libstagefright/codecs/avcdec/Android.bp b/media/libstagefright/codecs/avcdec/Android.bp
index 6b996a7..3fa8d7f 100644
--- a/media/libstagefright/codecs/avcdec/Android.bp
+++ b/media/libstagefright/codecs/avcdec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_avcdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
static_libs: ["libavcdec"],
srcs: ["SoftAVCDec.cpp"],
@@ -12,7 +16,7 @@
],
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libstagefright_foundation",
"libutils",
diff --git a/media/libstagefright/codecs/avcenc/Android.bp b/media/libstagefright/codecs/avcenc/Android.bp
index 49021a9..6c4311b 100644
--- a/media/libstagefright/codecs/avcenc/Android.bp
+++ b/media/libstagefright/codecs/avcenc/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_avcenc",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
static_libs: ["libavcenc"],
srcs: ["SoftAVCEnc.cpp"],
@@ -13,7 +17,7 @@
],
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libutils",
"liblog",
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
index 55ae60d..fe5a17b 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
@@ -27,7 +27,6 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
#include <OMX_IndexExt.h>
#include <OMX_VideoExt.h>
diff --git a/media/libstagefright/codecs/common/Android.bp b/media/libstagefright/codecs/common/Android.bp
index 021e6af..3726922 100644
--- a/media/libstagefright/codecs/common/Android.bp
+++ b/media/libstagefright/codecs/common/Android.bp
@@ -1,5 +1,9 @@
cc_library {
name: "libstagefright_enc_common",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["cmnMemory.c"],
diff --git a/media/libstagefright/codecs/flac/dec/Android.bp b/media/libstagefright/codecs/flac/dec/Android.bp
index 6ac264d..5652594 100644
--- a/media/libstagefright/codecs/flac/dec/Android.bp
+++ b/media/libstagefright/codecs/flac/dec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_flacdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: [
"SoftFlacDecoder.cpp",
diff --git a/media/libstagefright/codecs/flac/enc/Android.bp b/media/libstagefright/codecs/flac/enc/Android.bp
index d1413f6..6197157 100644
--- a/media/libstagefright/codecs/flac/enc/Android.bp
+++ b/media/libstagefright/codecs/flac/enc/Android.bp
@@ -22,7 +22,7 @@
},
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libstagefright_foundation",
"libutils",
@@ -32,5 +32,9 @@
static_libs: ["libFLAC"],
name: "libstagefright_soft_flacenc",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
}
diff --git a/media/libstagefright/codecs/g711/dec/Android.bp b/media/libstagefright/codecs/g711/dec/Android.bp
index b78b689..0e6f468 100644
--- a/media/libstagefright/codecs/g711/dec/Android.bp
+++ b/media/libstagefright/codecs/g711/dec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_g711dec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftG711.cpp"],
@@ -9,7 +13,7 @@
],
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libutils",
"liblog",
diff --git a/media/libstagefright/codecs/gsm/dec/Android.bp b/media/libstagefright/codecs/gsm/dec/Android.bp
index 8e86ad6..7be86a4 100644
--- a/media/libstagefright/codecs/gsm/dec/Android.bp
+++ b/media/libstagefright/codecs/gsm/dec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_gsmdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftGSM.cpp"],
@@ -23,7 +27,7 @@
},
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libutils",
"liblog",
diff --git a/media/libstagefright/codecs/hevcdec/Android.bp b/media/libstagefright/codecs/hevcdec/Android.bp
index cd75c97..3fd1652 100644
--- a/media/libstagefright/codecs/hevcdec/Android.bp
+++ b/media/libstagefright/codecs/hevcdec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_hevcdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
static_libs: ["libhevcdec"],
srcs: ["SoftHEVC.cpp"],
@@ -22,7 +26,7 @@
},
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libstagefright_foundation",
"libutils",
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.bp b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
index 04ea075..2619131 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
@@ -1,5 +1,7 @@
cc_library_static {
name: "libstagefright_m4vh263dec",
+ vendor_available: true,
+ shared_libs: ["liblog"],
srcs: [
"src/adaptive_smooth_no_mmx.cpp",
@@ -66,6 +68,10 @@
cc_library_shared {
name: "libstagefright_soft_mpeg4dec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftMPEG4.cpp"],
@@ -87,7 +93,7 @@
static_libs: ["libstagefright_m4vh263dec"],
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libstagefright_foundation",
"libutils",
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.bp b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
index da5b162..919b9d4 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
@@ -1,5 +1,6 @@
cc_library_static {
name: "libstagefright_m4vh263enc",
+ vendor_available: true,
srcs: [
"src/bitstream_io.cpp",
@@ -52,6 +53,10 @@
cc_library_shared {
name: "libstagefright_soft_mpeg4enc",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftMPEG4Encoder.cpp"],
@@ -75,7 +80,7 @@
static_libs: ["libstagefright_m4vh263enc"],
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libutils",
"liblog",
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index 6d4cb69..7b90a01 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -29,7 +29,6 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
#include "SoftMPEG4Encoder.h"
diff --git a/media/libstagefright/codecs/mp3dec/Android.bp b/media/libstagefright/codecs/mp3dec/Android.bp
index 0d0a2c6..b2e8f9b 100644
--- a/media/libstagefright/codecs/mp3dec/Android.bp
+++ b/media/libstagefright/codecs/mp3dec/Android.bp
@@ -1,5 +1,6 @@
cc_library_static {
name: "libstagefright_mp3dec",
+ vendor_available: true,
srcs: [
"src/pvmp3_normalize.cpp",
@@ -77,6 +78,10 @@
cc_library_shared {
name: "libstagefright_soft_mp3dec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftMP3.cpp"],
@@ -102,7 +107,7 @@
},
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libstagefright_foundation",
"libutils",
diff --git a/media/libstagefright/codecs/mpeg2dec/Android.bp b/media/libstagefright/codecs/mpeg2dec/Android.bp
index 0144581..ed51797 100644
--- a/media/libstagefright/codecs/mpeg2dec/Android.bp
+++ b/media/libstagefright/codecs/mpeg2dec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_mpeg2dec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
static_libs: ["libmpeg2dec"],
srcs: ["SoftMPEG2.cpp"],
@@ -12,7 +16,7 @@
],
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libstagefright_foundation",
"libutils",
diff --git a/media/libstagefright/codecs/on2/dec/Android.bp b/media/libstagefright/codecs/on2/dec/Android.bp
index c4242c2..249ab92 100644
--- a/media/libstagefright/codecs/on2/dec/Android.bp
+++ b/media/libstagefright/codecs/on2/dec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_vpxdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftVPX.cpp"],
@@ -11,7 +15,7 @@
static_libs: ["libvpx"],
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libstagefright_foundation",
"libutils",
diff --git a/media/libstagefright/codecs/on2/enc/Android.bp b/media/libstagefright/codecs/on2/enc/Android.bp
index 114c1be..0284719 100644
--- a/media/libstagefright/codecs/on2/enc/Android.bp
+++ b/media/libstagefright/codecs/on2/enc/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_vpxenc",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: [
"SoftVPXEncoder.cpp",
@@ -26,7 +30,7 @@
static_libs: ["libvpx"],
shared_libs: [
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libstagefright_foundation",
"libutils",
diff --git a/media/libstagefright/codecs/opus/dec/Android.bp b/media/libstagefright/codecs/opus/dec/Android.bp
index 5d9c4c8..d7569a9 100644
--- a/media/libstagefright/codecs/opus/dec/Android.bp
+++ b/media/libstagefright/codecs/opus/dec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_opusdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftOpus.cpp"],
@@ -10,13 +14,15 @@
shared_libs: [
"libopus",
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libstagefright_foundation",
"libutils",
"liblog",
],
+ cflags: ["-Werror"],
+
sanitize: {
misc_undefined: [
"signed-integer-overflow",
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
index d1f5e59..813004b 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -62,6 +62,7 @@
mSeekPreRoll(0),
mAnchorTimeUs(0),
mNumFramesOutput(0),
+ mHaveEOS(false),
mOutputPortSettingsChange(NONE) {
initPorts();
CHECK_EQ(initDecoder(), (status_t)OK);
@@ -384,7 +385,31 @@
return static_cast<double>(ns) * kRate / 1000000000;
}
-void SoftOpus::onQueueFilled(OMX_U32 portIndex) {
+void SoftOpus::handleEOS() {
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+ CHECK(!inQueue.empty() && !outQueue.empty());
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ mHaveEOS = true;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ ++mInputBufferCount;
+}
+
+void SoftOpus::onQueueFilled(OMX_U32 /* portIndex */) {
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
@@ -392,104 +417,108 @@
return;
}
- if (portIndex == 0 && mInputBufferCount < 3) {
- BufferInfo *info = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *header = info->mHeader;
-
- const uint8_t *data = header->pBuffer + header->nOffset;
- size_t size = header->nFilledLen;
-
- if (mInputBufferCount == 0) {
- CHECK(mHeader == NULL);
- mHeader = new OpusHeader();
- memset(mHeader, 0, sizeof(*mHeader));
- if (!ParseOpusHeader(data, size, mHeader)) {
- ALOGV("Parsing Opus Header failed.");
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
- }
-
- uint8_t channel_mapping[kMaxChannels] = {0};
- if (mHeader->channels <= kMaxChannelsWithDefaultLayout) {
- memcpy(&channel_mapping,
- kDefaultOpusChannelLayout,
- kMaxChannelsWithDefaultLayout);
- } else {
- memcpy(&channel_mapping,
- mHeader->stream_map,
- mHeader->channels);
- }
-
- int status = OPUS_INVALID_STATE;
- mDecoder = opus_multistream_decoder_create(kRate,
- mHeader->channels,
- mHeader->num_streams,
- mHeader->num_coupled,
- channel_mapping,
- &status);
- if (!mDecoder || status != OPUS_OK) {
- ALOGV("opus_multistream_decoder_create failed status=%s",
- opus_strerror(status));
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
- }
- status =
- opus_multistream_decoder_ctl(mDecoder,
- OPUS_SET_GAIN(mHeader->gain_db));
- if (status != OPUS_OK) {
- ALOGV("Failed to set OPUS header gain; status=%s",
- opus_strerror(status));
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
- }
- } else if (mInputBufferCount == 1) {
- mCodecDelay = ns_to_samples(
- *(reinterpret_cast<int64_t*>(header->pBuffer +
- header->nOffset)),
- kRate);
- mSamplesToDiscard = mCodecDelay;
- } else {
- mSeekPreRoll = ns_to_samples(
- *(reinterpret_cast<int64_t*>(header->pBuffer +
- header->nOffset)),
- kRate);
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
- }
-
- inQueue.erase(inQueue.begin());
- info->mOwnedByUs = false;
- notifyEmptyBufferDone(header);
- ++mInputBufferCount;
- return;
- }
-
- while (!inQueue.empty() && !outQueue.empty()) {
+ while (!mHaveEOS && !inQueue.empty() && !outQueue.empty()) {
BufferInfo *inInfo = *inQueue.begin();
OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
- // Ignore CSD re-submissions.
- if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ if (mInputBufferCount < 3) {
+ const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
+ size_t size = inHeader->nFilledLen;
+
+ if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && size == 0) {
+ handleEOS();
+ return;
+ }
+
+ if (mInputBufferCount == 0) {
+ CHECK(mHeader == NULL);
+ mHeader = new OpusHeader();
+ memset(mHeader, 0, sizeof(*mHeader));
+ if (!ParseOpusHeader(data, size, mHeader)) {
+ ALOGV("Parsing Opus Header failed.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ uint8_t channel_mapping[kMaxChannels] = {0};
+ if (mHeader->channels <= kMaxChannelsWithDefaultLayout) {
+ memcpy(&channel_mapping,
+ kDefaultOpusChannelLayout,
+ kMaxChannelsWithDefaultLayout);
+ } else {
+ memcpy(&channel_mapping,
+ mHeader->stream_map,
+ mHeader->channels);
+ }
+
+ int status = OPUS_INVALID_STATE;
+ mDecoder = opus_multistream_decoder_create(kRate,
+ mHeader->channels,
+ mHeader->num_streams,
+ mHeader->num_coupled,
+ channel_mapping,
+ &status);
+ if (!mDecoder || status != OPUS_OK) {
+ ALOGV("opus_multistream_decoder_create failed status=%s",
+ opus_strerror(status));
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ status =
+ opus_multistream_decoder_ctl(mDecoder,
+ OPUS_SET_GAIN(mHeader->gain_db));
+ if (status != OPUS_OK) {
+ ALOGV("Failed to set OPUS header gain; status=%s",
+ opus_strerror(status));
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ } else if (mInputBufferCount == 1) {
+ mCodecDelay = ns_to_samples(
+ *(reinterpret_cast<int64_t*>(inHeader->pBuffer +
+ inHeader->nOffset)),
+ kRate);
+ mSamplesToDiscard = mCodecDelay;
+ } else {
+ mSeekPreRoll = ns_to_samples(
+ *(reinterpret_cast<int64_t*>(inHeader->pBuffer +
+ inHeader->nOffset)),
+ kRate);
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ }
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ handleEOS();
+ return;
+ }
+
inQueue.erase(inQueue.begin());
inInfo->mOwnedByUs = false;
notifyEmptyBufferDone(inHeader);
- return;
+ ++mInputBufferCount;
+
+ continue;
+ }
+
+ // Ignore CSD re-submissions.
+ if (mInputBufferCount >= 3 && (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ handleEOS();
+ return;
+ }
+
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+ continue;
}
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && inHeader->nFilledLen == 0) {
- inQueue.erase(inQueue.begin());
- inInfo->mOwnedByUs = false;
- notifyEmptyBufferDone(inHeader);
-
- outHeader->nFilledLen = 0;
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
- outQueue.erase(outQueue.begin());
- outInfo->mOwnedByUs = false;
- notifyFillBufferDone(outHeader);
+ handleEOS();
return;
}
@@ -539,7 +568,6 @@
}
outHeader->nFilledLen = numFrames * sizeof(int16_t) * mHeader->channels;
- outHeader->nFlags = 0;
outHeader->nTimeStamp = mAnchorTimeUs +
(mNumFramesOutput * 1000000ll) /
@@ -548,22 +576,20 @@
mNumFramesOutput += numFrames;
if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
- inHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ mHaveEOS = true;
} else {
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- inInfo = NULL;
- notifyEmptyBufferDone(inHeader);
- inHeader = NULL;
+ outHeader->nFlags = 0;
}
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ notifyEmptyBufferDone(inHeader);
+ ++mInputBufferCount;
+
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
- outInfo = NULL;
notifyFillBufferDone(outHeader);
- outHeader = NULL;
-
- ++mInputBufferCount;
}
}
@@ -575,6 +601,7 @@
opus_multistream_decoder_ctl(mDecoder, OPUS_RESET_STATE);
mAnchorTimeUs = 0;
mSamplesToDiscard = mSeekPreRoll;
+ mHaveEOS = false;
}
}
@@ -591,6 +618,7 @@
}
mOutputPortSettingsChange = NONE;
+ mHaveEOS = false;
}
void SoftOpus::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
index fab925d..91cafa1 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.h
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h
@@ -75,6 +75,7 @@
int64_t mSamplesToDiscard;
int64_t mAnchorTimeUs;
int64_t mNumFramesOutput;
+ bool mHaveEOS;
enum {
NONE,
@@ -85,6 +86,7 @@
void initPorts();
status_t initDecoder();
bool isConfigured() const;
+ void handleEOS();
DISALLOW_EVIL_CONSTRUCTORS(SoftOpus);
};
diff --git a/media/libstagefright/codecs/raw/Android.bp b/media/libstagefright/codecs/raw/Android.bp
index c64027b..1bd75c6 100644
--- a/media/libstagefright/codecs/raw/Android.bp
+++ b/media/libstagefright/codecs/raw/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_rawdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftRaw.cpp"],
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.bp b/media/libstagefright/codecs/vorbis/dec/Android.bp
index 1a4de60..fedfb67 100644
--- a/media/libstagefright/codecs/vorbis/dec/Android.bp
+++ b/media/libstagefright/codecs/vorbis/dec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_soft_vorbisdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: ["SoftVorbis.cpp"],
@@ -10,7 +14,7 @@
shared_libs: [
"libvorbisidec",
- "libmedia",
+ "libmedia_omx",
"libstagefright_omx",
"libstagefright_foundation",
"libutils",
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
index 96e01b6..8912f8a 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
@@ -309,7 +309,33 @@
oggpack_readinit(bits, ref);
}
-void SoftVorbis::onQueueFilled(OMX_U32 portIndex) {
+void SoftVorbis::handleEOS() {
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ CHECK(!inQueue.empty() && !outQueue.empty());
+
+ mSawInputEos = true;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ mSignalledOutputEos = true;
+
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+ ++mInputBufferCount;
+}
+
+void SoftVorbis::onQueueFilled(OMX_U32 /* portIndex */) {
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
@@ -317,69 +343,7 @@
return;
}
- if (portIndex == 0 && mInputBufferCount < 2) {
- BufferInfo *info = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *header = info->mHeader;
-
- const uint8_t *data = header->pBuffer + header->nOffset;
- size_t size = header->nFilledLen;
- if (size < 7) {
- ALOGE("Too small input buffer: %zu bytes", size);
- android_errorWriteLog(0x534e4554, "27833616");
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- mSignalledError = true;
- return;
- }
-
- ogg_buffer buf;
- ogg_reference ref;
- oggpack_buffer bits;
-
- makeBitReader(
- (const uint8_t *)data + 7, size - 7,
- &buf, &ref, &bits);
-
- if (mInputBufferCount == 0) {
- CHECK(mVi == NULL);
- mVi = new vorbis_info;
- vorbis_info_init(mVi);
-
- int ret = _vorbis_unpack_info(mVi, &bits);
- if (ret != 0) {
- notify(OMX_EventError, OMX_ErrorUndefined, ret, NULL);
- mSignalledError = true;
- return;
- }
- } else {
- int ret = _vorbis_unpack_books(mVi, &bits);
- if (ret != 0) {
- notify(OMX_EventError, OMX_ErrorUndefined, ret, NULL);
- mSignalledError = true;
- return;
- }
-
- CHECK(mState == NULL);
- mState = new vorbis_dsp_state;
- CHECK_EQ(0, vorbis_dsp_init(mState, mVi));
-
- if (mVi->rate != kDefaultSamplingRate ||
- mVi->channels != kDefaultChannelCount) {
- ALOGV("vorbis: rate/channels changed: %ld/%d", mVi->rate, mVi->channels);
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
- }
- }
-
- inQueue.erase(inQueue.begin());
- info->mOwnedByUs = false;
- notifyEmptyBufferDone(header);
-
- ++mInputBufferCount;
-
- return;
- }
-
- while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) {
+ while (!mSignalledOutputEos && (!inQueue.empty() || mSawInputEos) && !outQueue.empty()) {
BufferInfo *inInfo = NULL;
OMX_BUFFERHEADERTYPE *inHeader = NULL;
if (!inQueue.empty()) {
@@ -393,6 +357,73 @@
int32_t numPageSamples = 0;
if (inHeader) {
+ if (mInputBufferCount < 2) {
+ const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
+ size_t size = inHeader->nFilledLen;
+
+ if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && size == 0) {
+ handleEOS();
+ return;
+ }
+
+ if (size < 7) {
+ ALOGE("Too small input buffer: %zu bytes", size);
+ android_errorWriteLog(0x534e4554, "27833616");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
+
+ ogg_buffer buf;
+ ogg_reference ref;
+ oggpack_buffer bits;
+
+ makeBitReader((const uint8_t *)data + 7, size - 7, &buf, &ref, &bits);
+
+ if (mInputBufferCount == 0) {
+ CHECK(mVi == NULL);
+ mVi = new vorbis_info;
+ vorbis_info_init(mVi);
+
+ int ret = _vorbis_unpack_info(mVi, &bits);
+ if (ret != 0) {
+ notify(OMX_EventError, OMX_ErrorUndefined, ret, NULL);
+ mSignalledError = true;
+ return;
+ }
+ } else {
+ int ret = _vorbis_unpack_books(mVi, &bits);
+ if (ret != 0) {
+ notify(OMX_EventError, OMX_ErrorUndefined, ret, NULL);
+ mSignalledError = true;
+ return;
+ }
+
+ CHECK(mState == NULL);
+ mState = new vorbis_dsp_state;
+ CHECK_EQ(0, vorbis_dsp_init(mState, mVi));
+
+ if (mVi->rate != kDefaultSamplingRate ||
+ mVi->channels != kDefaultChannelCount) {
+ ALOGV("vorbis: rate/channels changed: %ld/%d", mVi->rate, mVi->channels);
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ }
+ }
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ handleEOS();
+ return;
+ }
+
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+ ++mInputBufferCount;
+
+ continue;
+ }
+
if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
mSawInputEos = true;
}
@@ -406,8 +437,7 @@
return;
}
memcpy(&numPageSamples,
- inHeader->pBuffer
- + inHeader->nOffset + inHeader->nFilledLen - 4,
+ inHeader->pBuffer + inHeader->nOffset + inHeader->nFilledLen - 4,
sizeof(numPageSamples));
if (inHeader->nOffset == 0) {
@@ -446,6 +476,14 @@
int numFrames = 0;
outHeader->nFlags = 0;
+
+ if (mState == nullptr || mVi == nullptr) {
+ notify(OMX_EventError, OMX_ErrorStreamCorrupt, 0, NULL);
+ mSignalledError = true;
+ ALOGE("onQueueFilled, input does not have CSD");
+ return;
+ }
+
int err = vorbis_dsp_synthesis(mState, &pack, 1);
if (err != 0) {
// FIXME temporary workaround for log spam
@@ -495,18 +533,13 @@
if (inHeader) {
inInfo->mOwnedByUs = false;
inQueue.erase(inQueue.begin());
- inInfo = NULL;
notifyEmptyBufferDone(inHeader);
- inHeader = NULL;
+ ++mInputBufferCount;
}
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
- outInfo = NULL;
notifyFillBufferDone(outHeader);
- outHeader = NULL;
-
- ++mInputBufferCount;
}
}
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
index 52d1632..5ff8ea4 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
@@ -72,6 +72,7 @@
void initPorts();
status_t initDecoder();
bool isConfigured() const;
+ void handleEOS();
DISALLOW_EVIL_CONSTRUCTORS(SoftVorbis);
};
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 3ca7cc0..0982006 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -23,6 +23,7 @@
#include <media/stagefright/MediaErrors.h>
#include "libyuv/convert_from.h"
+#include "libyuv/video_common.h"
#define USE_LIBYUV
@@ -41,17 +42,17 @@
}
bool ColorConverter::isValid() const {
- if (mDstFormat != OMX_COLOR_Format16bitRGB565) {
- return false;
- }
-
switch (mSrcFormat) {
case OMX_COLOR_FormatYUV420Planar:
+ return mDstFormat == OMX_COLOR_Format16bitRGB565
+ || mDstFormat == OMX_COLOR_Format32BitRGBA8888
+ || mDstFormat == OMX_COLOR_Format32bitBGRA8888;
+
case OMX_COLOR_FormatCbYCrY:
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
case OMX_COLOR_FormatYUV420SemiPlanar:
case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
- return true;
+ return mDstFormat == OMX_COLOR_Format16bitRGB565;
default:
return false;
@@ -62,14 +63,43 @@
void *bits,
size_t width, size_t height,
size_t cropLeft, size_t cropTop,
- size_t cropRight, size_t cropBottom)
+ size_t cropRight, size_t cropBottom,
+ OMX_COLOR_FORMATTYPE colorFromat)
: mBits(bits),
+ mColorFormat(colorFromat),
mWidth(width),
mHeight(height),
mCropLeft(cropLeft),
mCropTop(cropTop),
mCropRight(cropRight),
mCropBottom(cropBottom) {
+ switch(mColorFormat) {
+ case OMX_COLOR_Format16bitRGB565:
+ mBpp = 2;
+ mStride = 2 * mWidth;
+ break;
+
+ case OMX_COLOR_Format32bitBGRA8888:
+ case OMX_COLOR_Format32BitRGBA8888:
+ mBpp = 4;
+ mStride = 4 * mWidth;
+ break;
+
+ case OMX_COLOR_FormatYUV420Planar:
+ case OMX_COLOR_FormatCbYCrY:
+ case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+ mBpp = 1;
+ mStride = mWidth;
+ break;
+
+ default:
+ ALOGE("Unsupported color format %d", mColorFormat);
+ mBpp = 1;
+ mStride = mWidth;
+ break;
+ }
}
size_t ColorConverter::BitmapParams::cropWidth() const {
@@ -89,19 +119,15 @@
size_t dstWidth, size_t dstHeight,
size_t dstCropLeft, size_t dstCropTop,
size_t dstCropRight, size_t dstCropBottom) {
- if (mDstFormat != OMX_COLOR_Format16bitRGB565) {
- return ERROR_UNSUPPORTED;
- }
-
BitmapParams src(
const_cast<void *>(srcBits),
srcWidth, srcHeight,
- srcCropLeft, srcCropTop, srcCropRight, srcCropBottom);
+ srcCropLeft, srcCropTop, srcCropRight, srcCropBottom, mSrcFormat);
BitmapParams dst(
dstBits,
dstWidth, dstHeight,
- dstCropLeft, dstCropTop, dstCropRight, dstCropBottom);
+ dstCropLeft, dstCropTop, dstCropRight, dstCropBottom, mDstFormat);
status_t err;
@@ -212,26 +238,104 @@
return ERROR_UNSUPPORTED;
}
- uint16_t *dst_ptr = (uint16_t *)dst.mBits
- + dst.mCropTop * dst.mWidth + dst.mCropLeft;
+ uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
const uint8_t *src_y =
- (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
+ (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
const uint8_t *src_u =
- (const uint8_t *)src_y + src.mWidth * src.mHeight
- + src.mCropTop * (src.mWidth / 2) + src.mCropLeft / 2;
+ (const uint8_t *)src.mBits + src.mStride * src.mHeight
+ + (src.mCropTop / 2) * (src.mStride / 2) + (src.mCropLeft / 2);
const uint8_t *src_v =
- src_u + (src.mWidth / 2) * (src.mHeight / 2);
+ src_u + (src.mStride / 2) * (src.mHeight / 2);
+ switch (mDstFormat) {
+ case OMX_COLOR_Format16bitRGB565:
+ libyuv::I420ToRGB565(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
+ (uint8 *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
+ break;
- libyuv::I420ToRGB565(src_y, src.mWidth, src_u, src.mWidth / 2, src_v, src.mWidth / 2,
- (uint8 *)dst_ptr, dst.mWidth * 2, dst.mWidth, dst.mHeight);
+ case OMX_COLOR_Format32BitRGBA8888:
+ libyuv::ConvertFromI420(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
+ (uint8 *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight(), libyuv::FOURCC_ABGR);
+ break;
+
+ case OMX_COLOR_Format32bitBGRA8888:
+ libyuv::ConvertFromI420(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
+ (uint8 *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight(), libyuv::FOURCC_ARGB);
+ break;
+
+ default:
+ return ERROR_UNSUPPORTED;
+ }
return OK;
}
+void ColorConverter::writeToDst(
+ void *dst_ptr, uint8_t *kAdjustedClip, bool uncropped,
+ signed r1, signed g1, signed b1,
+ signed r2, signed g2, signed b2) {
+ switch (mDstFormat) {
+ case OMX_COLOR_Format16bitRGB565:
+ {
+ uint32_t rgb1 =
+ ((kAdjustedClip[r1] >> 3) << 11)
+ | ((kAdjustedClip[g1] >> 2) << 5)
+ | (kAdjustedClip[b1] >> 3);
+
+ if (uncropped) {
+ uint32_t rgb2 =
+ ((kAdjustedClip[r2] >> 3) << 11)
+ | ((kAdjustedClip[g2] >> 2) << 5)
+ | (kAdjustedClip[b2] >> 3);
+
+ *(uint32_t *)dst_ptr = (rgb2 << 16) | rgb1;
+ } else {
+ *(uint16_t *)dst_ptr = rgb1;
+ }
+ break;
+ }
+ case OMX_COLOR_Format32BitRGBA8888:
+ {
+ ((uint32_t *)dst_ptr)[0] =
+ (kAdjustedClip[r1])
+ | (kAdjustedClip[g1] << 8)
+ | (kAdjustedClip[b1] << 16)
+ | (0xFF << 24);
+
+ if (uncropped) {
+ ((uint32_t *)dst_ptr)[1] =
+ (kAdjustedClip[r2])
+ | (kAdjustedClip[g2] << 8)
+ | (kAdjustedClip[b2] << 16)
+ | (0xFF << 24);
+ }
+ break;
+ }
+ case OMX_COLOR_Format32bitBGRA8888:
+ {
+ ((uint32_t *)dst_ptr)[0] =
+ (kAdjustedClip[b1])
+ | (kAdjustedClip[g1] << 8)
+ | (kAdjustedClip[r1] << 16)
+ | (0xFF << 24);
+
+ if (uncropped) {
+ ((uint32_t *)dst_ptr)[1] =
+ (kAdjustedClip[b2])
+ | (kAdjustedClip[g2] << 8)
+ | (kAdjustedClip[r2] << 16)
+ | (0xFF << 24);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+}
status_t ColorConverter::convertYUV420Planar(
const BitmapParams &src, const BitmapParams &dst) {
if (!((src.mCropLeft & 1) == 0
@@ -242,18 +346,18 @@
uint8_t *kAdjustedClip = initClip();
- uint16_t *dst_ptr = (uint16_t *)dst.mBits
- + dst.mCropTop * dst.mWidth + dst.mCropLeft;
+ uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
const uint8_t *src_y =
- (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
+ (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
const uint8_t *src_u =
- (const uint8_t *)src_y + src.mWidth * src.mHeight
- + src.mCropTop * (src.mWidth / 2) + src.mCropLeft / 2;
+ (const uint8_t *)src.mBits + src.mStride * src.mHeight
+ + (src.mCropTop / 2) * (src.mStride / 2) + src.mCropLeft / 2;
const uint8_t *src_v =
- src_u + (src.mWidth / 2) * (src.mHeight / 2);
+ src_u + (src.mStride / 2) * (src.mHeight / 2);
for (size_t y = 0; y < src.cropHeight(); ++y) {
for (size_t x = 0; x < src.cropWidth(); x += 2) {
@@ -296,31 +400,19 @@
signed g2 = (tmp2 + v_g + u_g) / 256;
signed r2 = (tmp2 + v_r) / 256;
- uint32_t rgb1 =
- ((kAdjustedClip[r1] >> 3) << 11)
- | ((kAdjustedClip[g1] >> 2) << 5)
- | (kAdjustedClip[b1] >> 3);
-
- uint32_t rgb2 =
- ((kAdjustedClip[r2] >> 3) << 11)
- | ((kAdjustedClip[g2] >> 2) << 5)
- | (kAdjustedClip[b2] >> 3);
-
- if (x + 1 < src.cropWidth()) {
- *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
- } else {
- dst_ptr[x] = rgb1;
- }
+ bool uncropped = x + 1 < src.cropWidth();
+ (void)writeToDst(dst_ptr + x * dst.mBpp,
+ kAdjustedClip, uncropped, r1, g1, b1, r2, g2, b2);
}
- src_y += src.mWidth;
+ src_y += src.mStride;
if (y & 1) {
- src_u += src.mWidth / 2;
- src_v += src.mWidth / 2;
+ src_u += src.mStride / 2;
+ src_v += src.mStride / 2;
}
- dst_ptr += dst.mWidth;
+ dst_ptr += dst.mStride;
}
return OK;
diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml
index ce164a2..a127843 100644
--- a/media/libstagefright/data/media_codecs_google_video.xml
+++ b/media/libstagefright/data/media_codecs_google_video.xml
@@ -34,20 +34,21 @@
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="OMX.google.h264.decoder" type="video/avc">
- <!-- profiles and levels: ProfileHigh : Level41 -->
- <Limit name="size" min="16x16" max="1920x1088" />
+ <!-- profiles and levels: ProfileHigh : Level52 -->
+ <Limit name="size" min="2x2" max="4096x4096" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
- <Limit name="blocks-per-second" range="1-244800" />
- <Limit name="bitrate" range="1-12000000" />
+ <Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 -->
+ <Limit name="blocks-per-second" range="1-1966080" />
+ <Limit name="bitrate" range="1-48000000" />
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="OMX.google.hevc.decoder" type="video/hevc">
<!-- profiles and levels: ProfileMain : MainTierLevel51 -->
- <Limit name="size" min="2x2" max="2048x2048" />
+ <Limit name="size" min="2x2" max="4096x4096" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="8x8" />
- <Limit name="block-count" range="1-139264" />
+ <Limit name="block-count" range="1-196608" /> <!-- max 4096x3072 -->
<Limit name="blocks-per-second" range="1-2000000" />
<Limit name="bitrate" range="1-10000000" />
<Feature name="adaptive-playback" />
@@ -56,6 +57,7 @@
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-16384" />
<Limit name="blocks-per-second" range="1-1000000" />
<Limit name="bitrate" range="1-40000000" />
<Feature name="adaptive-playback" />
@@ -64,6 +66,7 @@
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-16384" />
<Limit name="blocks-per-second" range="1-500000" />
<Limit name="bitrate" range="1-40000000" />
<Feature name="adaptive-playback" />
@@ -79,10 +82,11 @@
</MediaCodec>
<MediaCodec name="OMX.google.h264.encoder" type="video/avc">
<!-- profiles and levels: ProfileBaseline : Level41 -->
- <Limit name="size" min="16x16" max="1920x1088" />
+ <Limit name="size" min="16x16" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
- <Limit name="blocks-per-second" range="1-244800" />
+ <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+ <Limit name="blocks-per-second" range="1-245760" />
<Limit name="bitrate" range="1-12000000" />
<Feature name="intra-refresh" />
</MediaCodec>
@@ -98,6 +102,9 @@
<!-- profiles and levels: ProfileMain : Level_Version0-3 -->
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <!-- 2016 devices can encode at about 10fps at this block count -->
+ <Limit name="block-count" range="1-16384" />
<Limit name="bitrate" range="1-40000000" />
<Feature name="bitrate-modes" value="VBR,CBR" />
</MediaCodec>
@@ -105,6 +112,9 @@
<!-- profiles and levels: ProfileMain : Level_Version0-3 -->
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <!-- 2016 devices can encode at about 8fps at this block count -->
+ <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
<Limit name="bitrate" range="1-40000000" />
<Feature name="bitrate-modes" value="VBR,CBR" />
</MediaCodec>
diff --git a/media/libstagefright/data/media_codecs_google_video_le.xml b/media/libstagefright/data/media_codecs_google_video_le.xml
index 034a038..d7c6570 100644
--- a/media/libstagefright/data/media_codecs_google_video_le.xml
+++ b/media/libstagefright/data/media_codecs_google_video_le.xml
@@ -34,22 +34,22 @@
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="OMX.google.h264.decoder" type="video/avc">
- <!-- profiles and levels: ProfileBaseline : Level51 -->
+ <!-- profiles and levels: ProfileHigh : Level51 -->
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
- <Limit name="block-count" range="1-8160" />
- <Limit name="blocks-per-second" range="1-489600" />
+ <Limit name="block-count" range="1-16384" />
+ <Limit name="blocks-per-second" range="1-491520" />
<Limit name="bitrate" range="1-40000000" />
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="OMX.google.hevc.decoder" type="video/hevc">
<!-- profiles and levels: ProfileMain : MainTierLevel51 -->
- <Limit name="size" min="2x2" max="1280x1280" />
+ <Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="8x8" />
- <Limit name="block-count" range="1-139264" />
- <Limit name="blocks-per-second" range="1-432000" />
+ <Limit name="block-count" range="1-65536" />
+ <Limit name="blocks-per-second" range="1-491520" />
<Limit name="bitrate" range="1-5000000" />
<Feature name="adaptive-playback" />
</MediaCodec>
@@ -57,7 +57,7 @@
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
- <Limit name="block-count" range="1-8160" />
+ <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
<Limit name="blocks-per-second" range="1-500000" />
<Limit name="bitrate" range="1-40000000" />
<Feature name="adaptive-playback" />
@@ -66,7 +66,7 @@
<Limit name="size" min="2x2" max="1280x1280" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
- <Limit name="block-count" range="1-3600" />
+ <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
<Limit name="blocks-per-second" range="1-108000" />
<Limit name="bitrate" range="1-5000000" />
<Feature name="adaptive-playback" />
@@ -81,12 +81,14 @@
<Limit name="bitrate" range="1-128000" />
</MediaCodec>
<MediaCodec name="OMX.google.h264.encoder" type="video/avc">
- <!-- profiles and levels: ProfileBaseline : Level2 -->
- <Limit name="size" min="16x16" max="896x896" />
- <Limit name="alignment" value="16x16" />
+ <!-- profiles and levels: ProfileBaseline : Level3 -->
+ <Limit name="size" min="16x16" max="1808x1808" />
+ <Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
- <Limit name="blocks-per-second" range="1-11880" />
+ <Limit name="block-count" range="1-1620" />
+ <Limit name="blocks-per-second" range="1-40500" />
<Limit name="bitrate" range="1-2000000" />
+ <Feature name="intra-refresh" />
</MediaCodec>
<MediaCodec name="OMX.google.mpeg4.encoder" type="video/mp4v-es">
<!-- profiles and levels: ProfileCore : Level2 -->
@@ -100,7 +102,8 @@
<!-- profiles and levels: ProfileMain : Level_Version0-3 -->
<Limit name="size" min="2x2" max="1280x1280" />
<Limit name="alignment" value="2x2" />
- <Limit name="block-count" range="1-3600" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
<Limit name="bitrate" range="1-20000000" />
<Feature name="bitrate-modes" value="VBR,CBR" />
</MediaCodec>
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 284c25f..1b9fe0f 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -1,5 +1,9 @@
cc_library_shared {
name: "libstagefright_flacdec",
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: [
"FLACDecoder.cpp",
@@ -31,4 +35,5 @@
"libstagefright_foundation",
"libutils",
],
+ header_libs: ["libmedia_headers"],
}
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 9108ce1..221af1d 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -7,6 +7,9 @@
cc_library_shared {
name: "libstagefright_foundation",
vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
include_dirs: [
"frameworks/av/include",
"frameworks/native/include",
diff --git a/media/libstagefright/foundation/base64.cpp b/media/libstagefright/foundation/base64.cpp
index 7da7db9..cc89064 100644
--- a/media/libstagefright/foundation/base64.cpp
+++ b/media/libstagefright/foundation/base64.cpp
@@ -78,8 +78,7 @@
accum = (accum << 6) | value;
if (((i + 1) % 4) == 0) {
- out[j++] = (accum >> 16);
-
+ if (j < outLen) { out[j++] = (accum >> 16); }
if (j < outLen) { out[j++] = (accum >> 8) & 0xff; }
if (j < outLen) { out[j++] = accum & 0xff; }
diff --git a/media/libstagefright/include/ItemTable.h b/media/libstagefright/include/ItemTable.h
new file mode 100644
index 0000000..5a6af5e
--- /dev/null
+++ b/media/libstagefright/include/ItemTable.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ITEM_TABLE_H_
+#define ITEM_TABLE_H_
+
+#include <set>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class DataSource;
+class MetaData;
+
+namespace heif {
+
+struct AssociationEntry;
+struct ImageItem;
+struct ItemLoc;
+struct ItemInfo;
+struct ItemProperty;
+struct ItemReference;
+
+/*
+ * ItemTable keeps track of all image items (including coded images, grids and
+ * tiles) inside a HEIF still image (ISO/IEC FDIS 23008-12.2:2017(E)).
+ */
+
+class ItemTable : public RefBase {
+public:
+ explicit ItemTable(const sp<DataSource> &source);
+
+ status_t parse(uint32_t type, off64_t offset, size_t size);
+
+ bool isValid() { return mImageItemsValid; }
+ sp<MetaData> getImageMeta();
+ uint32_t countImages() const;
+ status_t findPrimaryImage(uint32_t *imageIndex);
+ status_t findThumbnail(uint32_t *thumbnailIndex);
+ status_t getImageOffsetAndSize(
+ uint32_t *imageIndex, off64_t *offset, size_t *size);
+
+protected:
+ ~ItemTable();
+
+private:
+ sp<DataSource> mDataSource;
+
+ KeyedVector<uint32_t, ItemLoc> mItemLocs;
+ Vector<ItemInfo> mItemInfos;
+ Vector<AssociationEntry> mAssociations;
+ Vector<sp<ItemProperty> > mItemProperties;
+ Vector<sp<ItemReference> > mItemReferences;
+
+ uint32_t mPrimaryItemId;
+ off64_t mIdatOffset;
+ size_t mIdatSize;
+
+ std::set<uint32_t> mRequiredBoxes;
+ std::set<uint32_t> mBoxesSeen;
+
+ bool mImageItemsValid;
+ uint32_t mCurrentImageIndex;
+ KeyedVector<uint32_t, ImageItem> mItemIdToImageMap;
+
+ status_t parseIlocBox(off64_t offset, size_t size);
+ status_t parseIinfBox(off64_t offset, size_t size);
+ status_t parsePitmBox(off64_t offset, size_t size);
+ status_t parseIprpBox(off64_t offset, size_t size);
+ status_t parseIdatBox(off64_t offset, size_t size);
+ status_t parseIrefBox(off64_t offset, size_t size);
+
+ void attachProperty(const AssociationEntry &association);
+ status_t buildImageItemsIfPossible(uint32_t type);
+
+ DISALLOW_EVIL_CONSTRUCTORS(ItemTable);
+};
+
+} // namespace heif
+} // namespace android
+
+#endif // ITEM_TABLE_H_
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index f847119..4a4c538 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -28,11 +28,14 @@
#include <utils/String8.h>
namespace android {
-
struct AMessage;
class DataSource;
class SampleTable;
class String8;
+namespace heif {
+class ItemTable;
+}
+using heif::ItemTable;
struct SidxEntry {
size_t mSize;
@@ -97,6 +100,7 @@
status_t mInitCheck;
uint32_t mHeaderTimescale;
bool mIsQT;
+ bool mIsHEIF;
Track *mFirstTrack, *mLastTrack;
@@ -134,6 +138,8 @@
SINF *mFirstSINF;
bool mIsDrm;
+ sp<ItemTable> mItemTable;
+
status_t parseDrmSINF(off64_t *offset, off64_t data_offset);
status_t parseTrackHeader(off64_t data_offset, off64_t data_size);
diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h
index b7ac718..277eb3e 100644
--- a/media/libstagefright/include/StagefrightMetadataRetriever.h
+++ b/media/libstagefright/include/StagefrightMetadataRetriever.h
@@ -38,9 +38,9 @@
const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setDataSource(const sp<DataSource>& source);
+ virtual status_t setDataSource(const sp<DataSource>& source, const char *mime);
- virtual VideoFrame *getFrameAtTime(int64_t timeUs, int option);
+ virtual VideoFrame *getFrameAtTime(int64_t timeUs, int option, int colorFormat, bool metaOnly);
virtual MediaAlbumArt *extractAlbumArt();
virtual const char *extractMetadata(int keyCode);
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 270c809..7ac9b37 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -49,14 +49,17 @@
void *bits,
size_t width, size_t height,
size_t cropLeft, size_t cropTop,
- size_t cropRight, size_t cropBottom);
+ size_t cropRight, size_t cropBottom,
+ OMX_COLOR_FORMATTYPE colorFromat);
size_t cropWidth() const;
size_t cropHeight() const;
void *mBits;
+ OMX_COLOR_FORMATTYPE mColorFormat;
size_t mWidth, mHeight;
size_t mCropLeft, mCropTop, mCropRight, mCropBottom;
+ size_t mBpp, mStride;
};
OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
@@ -82,6 +85,10 @@
status_t convertTIYUV420PackedSemiPlanar(
const BitmapParams &src, const BitmapParams &dst);
+ void writeToDst(void *dst_ptr, uint8_t *kAdjustedClip, bool uncropped,
+ signed r1, signed g1, signed b1,
+ signed r2, signed g2, signed b2);
+
ColorConverter(const ColorConverter &);
ColorConverter &operator=(const ColorConverter &);
};
diff --git a/media/libstagefright/include/media/stagefright/DataSource.h b/media/libstagefright/include/media/stagefright/DataSource.h
index 63eccea..bd863ba 100644
--- a/media/libstagefright/include/media/stagefright/DataSource.h
+++ b/media/libstagefright/include/media/stagefright/DataSource.h
@@ -73,6 +73,11 @@
bool getUInt32(off64_t offset, uint32_t *x);
bool getUInt64(off64_t offset, uint64_t *x);
+ // read either int<N> or int<2N> into a uint<2N>_t, size is the int size in bytes.
+ bool getUInt16Var(off64_t offset, uint16_t *x, size_t size);
+ bool getUInt32Var(off64_t offset, uint32_t *x, size_t size);
+ bool getUInt64Var(off64_t offset, uint64_t *x, size_t size);
+
// Reads in "count" entries of type T into vector *x.
// Returns true if "count" entries can be read.
// If fewer than "count" entries can be read, return false. In this case,
diff --git a/media/libstagefright/include/media/stagefright/MetaData.h b/media/libstagefright/include/media/stagefright/MetaData.h
index 9676b97..65ad3e4 100644
--- a/media/libstagefright/include/media/stagefright/MetaData.h
+++ b/media/libstagefright/include/media/stagefright/MetaData.h
@@ -38,6 +38,8 @@
kKeyDisplayHeight = 'dHgt', // int32_t, display/presentation
kKeySARWidth = 'sarW', // int32_t, sampleAspectRatio width
kKeySARHeight = 'sarH', // int32_t, sampleAspectRatio height
+ kKeyThumbnailWidth = 'thbW', // int32_t, thumbnail width
+ kKeyThumbnailHeight = 'thbH', // int32_t, thumbnail height
// a rectangle, if absent assumed to be (0, 0, width - 1, height - 1)
kKeyCropRect = 'crop',
@@ -58,6 +60,7 @@
kKeyAACProfile = 'aacp', // int32_t
kKeyAVCC = 'avcc', // raw data
kKeyHVCC = 'hvcc', // raw data
+ kKeyThumbnailHVCC = 'thvc', // raw data
kKeyD263 = 'd263', // raw data
kKeyVorbisInfo = 'vinf', // raw data
kKeyVorbisBooks = 'vboo', // raw data
@@ -209,6 +212,10 @@
// color Matrix, value defined by ColorAspects.MatrixCoeffs.
kKeyTemporalLayerId = 'iLyr', // int32_t, temporal layer-id. 0-based (0 => base layer)
kKeyTemporalLayerCount = 'cLyr', // int32_t, number of temporal layers encoded
+
+ kKeyGridRows = 'rows', // int32_t, HEIF grid rows
+ kKeyGridCols = 'clms', // int32_t, HEIF grid columns
+ kKeyIccProfile = 'prof', // raw data
};
enum {
diff --git a/media/libstagefright/include/media/stagefright/Utils.h b/media/libstagefright/include/media/stagefright/Utils.h
index 88a416a..77cbd4c 100644
--- a/media/libstagefright/include/media/stagefright/Utils.h
+++ b/media/libstagefright/include/media/stagefright/Utils.h
@@ -95,7 +95,7 @@
void readFromAMessage(const sp<AMessage> &msg, BufferingSettings *buffering /* nonnull */);
AString nameForFd(int fd);
-
+void MakeFourCCString(uint32_t x, char *s);
} // namespace android
#endif // UTILS_H_
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 3027cdd..bb05740 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -1,6 +1,9 @@
cc_library_shared {
name: "libstagefright_omx",
vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: [
"FrameDropper.cpp",
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index 09e6d75..87c2411 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -213,6 +213,13 @@
Mutex::Autolock autoLock(mLock);
CHECK_LT(portIndex, mPorts.size());
+ PortInfo *port = &mPorts.editItemAt(portIndex);
+ if (size < port->mDef.nBufferSize) {
+ ALOGE("b/63522430, Buffer size is too small.");
+ android_errorWriteLog(0x534e4554, "63522430");
+ return OMX_ErrorBadParameter;
+ }
+
*header = new OMX_BUFFERHEADERTYPE;
(*header)->nSize = sizeof(OMX_BUFFERHEADERTYPE);
(*header)->nVersion.s.nVersionMajor = 1;
@@ -235,8 +242,6 @@
(*header)->nOutputPortIndex = portIndex;
(*header)->nInputPortIndex = portIndex;
- PortInfo *port = &mPorts.editItemAt(portIndex);
-
CHECK(mState == OMX_StateLoaded || port->mDef.bEnabled == OMX_FALSE);
CHECK_LT(port->mBuffers.size(), port->mDef.nBufferCountActual);
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 24ed981..cb811a0 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -182,11 +182,11 @@
uint32_t SoftVideoDecoderOMXComponent::outputBufferWidth() {
- return mIsAdaptive ? mAdaptiveMaxWidth : mWidth;
+ return max(mIsAdaptive ? mAdaptiveMaxWidth : 0, mWidth);
}
uint32_t SoftVideoDecoderOMXComponent::outputBufferHeight() {
- return mIsAdaptive ? mAdaptiveMaxHeight : mHeight;
+ return max(mIsAdaptive ? mAdaptiveMaxHeight : 0, mHeight);
}
void SoftVideoDecoderOMXComponent::handlePortSettingsChange(
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 8e22f9e..ab893de 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -1,6 +1,9 @@
cc_library_shared {
name: "libstagefright_xmlparser",
vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
srcs: [
"MediaCodecsXmlParser.cpp",
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index c50af2f..4132fed 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -719,9 +719,22 @@
}
int MtpFfsHandle::sendEvent(mtp_event me) {
+ // Mimic the behavior of f_mtp by sending the event async.
+ // Events aren't critical to the connection, so we don't need to check the return value.
+ char *temp = new char[me.length];
+ memcpy(temp, me.data, me.length);
+ me.data = temp;
+ std::thread t([this, me]() { return this->doSendEvent(me); });
+ t.detach();
+ return 0;
+}
+
+void MtpFfsHandle::doSendEvent(mtp_event me) {
unsigned length = me.length;
- int ret = writeHandle(mIntr, me.data, length);
- return static_cast<unsigned>(ret) == length ? 0 : -1;
+ int ret = ::write(mIntr, me.data, length);
+ if (static_cast<unsigned>(ret) != length)
+ PLOG(ERROR) << "Mtp error sending event thread!";
+ delete[] reinterpret_cast<char*>(me.data);
}
} // namespace android
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index 98669ff..b637d65 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -33,6 +33,7 @@
bool initFunctionfs();
void closeConfig();
void closeEndpoints();
+ void doSendEvent(mtp_event me);
bool mPtp;
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 40974f3..0d48de1 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -90,3 +90,9 @@
},
},
}
+
+llndk_library {
+ name: "libmediandk",
+ symbol_file: "libmediandk.map.txt",
+ export_include_dirs: ["include"],
+}
diff --git a/media/ndk/OWNERS b/media/ndk/OWNERS
new file mode 100644
index 0000000..43e4bb3
--- /dev/null
+++ b/media/ndk/OWNERS
@@ -0,0 +1 @@
+marcone@google.com
diff --git a/media/utils/OWNERS b/media/utils/OWNERS
new file mode 100644
index 0000000..f9cb567
--- /dev/null
+++ b/media/utils/OWNERS
@@ -0,0 +1 @@
+gkasten@google.com
diff --git a/services/OWNERS b/services/OWNERS
index d500dce..d5d00da 100644
--- a/services/OWNERS
+++ b/services/OWNERS
@@ -1,4 +1,4 @@
elaurent@google.com
etalvala@google.com
-gkasten@android.com
+gkasten@google.com
hunga@google.com
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 3a95a3b..be62e6c 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1561,7 +1561,7 @@
// ----------------------------------------------------------------------------
-sp<IAudioRecord> AudioFlinger::openRecord(
+sp<media::IAudioRecord> AudioFlinger::openRecord(
audio_io_handle_t input,
uint32_t sampleRate,
audio_format_t format,
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 9023b2d..1b7d875 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -33,7 +33,6 @@
#include <media/IAudioFlinger.h>
#include <media/IAudioFlingerClient.h>
#include <media/IAudioTrack.h>
-#include <media/IAudioRecord.h>
#include <media/AudioSystem.h>
#include <media/AudioTrack.h>
#include <media/MmapStreamInterface.h>
@@ -76,6 +75,8 @@
#include <private/media/AudioEffectShared.h>
#include <private/media/AudioTrackShared.h>
+#include "android/media/BnAudioRecord.h"
+
namespace android {
class AudioMixer;
@@ -129,7 +130,7 @@
status_t *status /*non-NULL*/,
audio_port_handle_t portId);
- virtual sp<IAudioRecord> openRecord(
+ virtual sp<media::IAudioRecord> openRecord(
audio_io_handle_t input,
uint32_t sampleRate,
audio_format_t format,
@@ -571,15 +572,13 @@
};
// server side of the client's IAudioRecord
- class RecordHandle : public android::BnAudioRecord {
+ class RecordHandle : public android::media::BnAudioRecord {
public:
explicit RecordHandle(const sp<RecordThread::RecordTrack>& recordTrack);
virtual ~RecordHandle();
- virtual status_t start(int /*AudioSystem::sync_event_t*/ event,
- audio_session_t triggerSession);
- virtual void stop();
- virtual status_t onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags);
+ virtual binder::Status start(int /*AudioSystem::sync_event_t*/ event,
+ int /*audio_session_t*/ triggerSession);
+ virtual binder::Status stop();
private:
const sp<RecordThread::RecordTrack> mRecordTrack;
@@ -604,7 +603,7 @@
virtual status_t standby();
private:
- sp<MmapThread> mThread;
+ const sp<MmapThread> mThread;
};
ThreadBase *checkThread_l(audio_io_handle_t ioHandle) const;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 6378a14..2bc7bd5 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -7508,34 +7508,22 @@
AudioFlinger::MmapThreadHandle::MmapThreadHandle(const sp<MmapThread>& thread)
: mThread(thread)
{
+ assert(thread != 0); // thread must start non-null and stay non-null
}
AudioFlinger::MmapThreadHandle::~MmapThreadHandle()
{
- MmapThread *thread = mThread.get();
- // clear our strong reference before disconnecting the thread: the last strong reference
- // will be removed when closeInput/closeOutput is executed upon call from audio policy manager
- // and the thread removed from mMMapThreads list causing the thread destruction.
- mThread.clear();
- if (thread != nullptr) {
- thread->disconnect();
- }
+ mThread->disconnect();
}
status_t AudioFlinger::MmapThreadHandle::createMmapBuffer(int32_t minSizeFrames,
struct audio_mmap_buffer_info *info)
{
- if (mThread == 0) {
- return NO_INIT;
- }
return mThread->createMmapBuffer(minSizeFrames, info);
}
status_t AudioFlinger::MmapThreadHandle::getMmapPosition(struct audio_mmap_position *position)
{
- if (mThread == 0) {
- return NO_INIT;
- }
return mThread->getMmapPosition(position);
}
@@ -7543,25 +7531,16 @@
audio_port_handle_t *handle)
{
- if (mThread == 0) {
- return NO_INIT;
- }
return mThread->start(client, handle);
}
status_t AudioFlinger::MmapThreadHandle::stop(audio_port_handle_t handle)
{
- if (mThread == 0) {
- return NO_INIT;
- }
return mThread->stop(handle);
}
status_t AudioFlinger::MmapThreadHandle::standby()
{
- if (mThread == 0) {
- return NO_INIT;
- }
return mThread->standby();
}
@@ -7593,7 +7572,7 @@
for (const sp<MmapTrack> &t : mActiveTracks) {
stop(t->portId());
}
- // this will cause the destruction of this thread.
+ // This will decrement references and may cause the destruction of this thread.
if (isOutput()) {
AudioSystem::releaseOutput(mId, streamType(), mSessionId);
} else {
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 0f25153..16eeccc 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1561,14 +1561,16 @@
mRecordTrack->destroy();
}
-status_t AudioFlinger::RecordHandle::start(int /*AudioSystem::sync_event_t*/ event,
- audio_session_t triggerSession) {
+binder::Status AudioFlinger::RecordHandle::start(int /*AudioSystem::sync_event_t*/ event,
+ int /*audio_session_t*/ triggerSession) {
ALOGV("RecordHandle::start()");
- return mRecordTrack->start((AudioSystem::sync_event_t)event, triggerSession);
+ return binder::Status::fromStatusT(
+ mRecordTrack->start((AudioSystem::sync_event_t)event, (audio_session_t) triggerSession));
}
-void AudioFlinger::RecordHandle::stop() {
+binder::Status AudioFlinger::RecordHandle::stop() {
stop_nonvirtual();
+ return binder::Status::ok();
}
void AudioFlinger::RecordHandle::stop_nonvirtual() {
@@ -1576,12 +1578,6 @@
mRecordTrack->stop();
}
-status_t AudioFlinger::RecordHandle::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- return BnAudioRecord::onTransact(code, data, reply, flags);
-}
-
// ----------------------------------------------------------------------------
// RecordTrack constructor must be called with AudioFlinger::mLock and ThreadBase::mLock held
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 1a7db26..2c88e47 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -54,6 +54,11 @@
//FIXME: workaround for truncated touch sounds
// to be removed when the problem is handled by system UI
#define TOUCH_SOUND_FIXED_DELAY_MS 100
+
+// Largest difference in dB on earpiece in call between the voice volume and another
+// media / notification / system volume.
+constexpr float IN_CALL_EARPIECE_HEADROOM_DB = 3.f;
+
// ----------------------------------------------------------------------------
// AudioPolicyInterface implementation
// ----------------------------------------------------------------------------
@@ -869,48 +874,6 @@
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
status_t status;
-#ifdef AUDIO_POLICY_TEST
- if (mCurOutput != 0) {
- ALOGV("getOutput() test output mCurOutput %d, samplingRate %d, format %d, channelMask %x, mDirectOutput %d",
- mCurOutput, mTestSamplingRate, mTestFormat, mTestChannels, mDirectOutput);
-
- if (mTestOutputs[mCurOutput] == 0) {
- ALOGV("getOutput() opening test output");
- sp<AudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(NULL,
- mpClientInterface);
- outputDesc->mDevice = mTestDevice;
- outputDesc->mLatency = mTestLatencyMs;
- outputDesc->mFlags =
- (audio_output_flags_t)(mDirectOutput ? AUDIO_OUTPUT_FLAG_DIRECT : 0);
- outputDesc->mRefCount[stream] = 0;
- audio_config_t config = AUDIO_CONFIG_INITIALIZER;
- config.sample_rate = mTestSamplingRate;
- config.channel_mask = mTestChannels;
- config.format = mTestFormat;
- if (offloadInfo != NULL) {
- config.offload_info = *offloadInfo;
- }
- status = mpClientInterface->openOutput(0,
- &mTestOutputs[mCurOutput],
- &config,
- &outputDesc->mDevice,
- String8(""),
- &outputDesc->mLatency,
- outputDesc->mFlags);
- if (status == NO_ERROR) {
- outputDesc->mSamplingRate = config.sample_rate;
- outputDesc->mFormat = config.format;
- outputDesc->mChannelMask = config.channel_mask;
- AudioParameter outputCmd = AudioParameter();
- outputCmd.addInt(String8("set_id"),mCurOutput);
- mpClientInterface->setParameters(mTestOutputs[mCurOutput],outputCmd.toString());
- addOutput(mTestOutputs[mCurOutput], outputDesc);
- }
- }
- return mTestOutputs[mCurOutput];
- }
-#endif //AUDIO_POLICY_TEST
-
// open a direct output if required by specified parameters
//force direct flag if offload flag is set: offloading implies a direct output stream
// and all common behaviors are driven by checking only the direct flag
@@ -1449,19 +1412,6 @@
return;
}
-#ifdef AUDIO_POLICY_TEST
- int testIndex = testOutputIndex(output);
- if (testIndex != 0) {
- sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(index);
- if (outputDesc->isActive()) {
- mpClientInterface->closeOutput(output);
- removeOutput(output);
- mTestOutputs[testIndex] = 0;
- }
- return;
- }
-#endif //AUDIO_POLICY_TEST
-
// Routing
mOutputRoutes.removeRoute(session);
@@ -3564,9 +3514,6 @@
AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface)
:
-#ifdef AUDIO_POLICY_TEST
- Thread(false),
-#endif //AUDIO_POLICY_TEST
mLimitRingtoneVolume(false), mLastVoiceVolume(-1.0f),
mA2dpSuspended(false),
mAudioPortGeneration(1),
@@ -3807,37 +3754,10 @@
ALOGE_IF((mPrimaryOutput == 0), "Failed to open primary output");
updateDevicesAndOutputs();
-
-#ifdef AUDIO_POLICY_TEST
- if (mPrimaryOutput != 0) {
- AudioParameter outputCmd = AudioParameter();
- outputCmd.addInt(String8("set_id"), 0);
- mpClientInterface->setParameters(mPrimaryOutput->mIoHandle, outputCmd.toString());
-
- mTestDevice = AUDIO_DEVICE_OUT_SPEAKER;
- mTestSamplingRate = 44100;
- mTestFormat = AUDIO_FORMAT_PCM_16_BIT;
- mTestChannels = AUDIO_CHANNEL_OUT_STEREO;
- mTestLatencyMs = 0;
- mCurOutput = 0;
- mDirectOutput = false;
- for (int i = 0; i < NUM_TEST_OUTPUTS; i++) {
- mTestOutputs[i] = 0;
- }
-
- const size_t SIZE = 256;
- char buffer[SIZE];
- snprintf(buffer, SIZE, "AudioPolicyManagerTest");
- run(buffer, ANDROID_PRIORITY_AUDIO);
- }
-#endif //AUDIO_POLICY_TEST
}
AudioPolicyManager::~AudioPolicyManager()
{
-#ifdef AUDIO_POLICY_TEST
- exit();
-#endif //AUDIO_POLICY_TEST
for (size_t i = 0; i < mOutputs.size(); i++) {
mpClientInterface->closeOutput(mOutputs.keyAt(i));
}
@@ -3856,164 +3776,6 @@
return hasPrimaryOutput() ? NO_ERROR : NO_INIT;
}
-#ifdef AUDIO_POLICY_TEST
-bool AudioPolicyManager::threadLoop()
-{
- ALOGV("entering threadLoop()");
- while (!exitPending())
- {
- String8 command;
- int valueInt;
- String8 value;
-
- Mutex::Autolock _l(mLock);
- mWaitWorkCV.waitRelative(mLock, milliseconds(50));
-
- command = mpClientInterface->getParameters(0, String8("test_cmd_policy"));
- AudioParameter param = AudioParameter(command);
-
- if (param.getInt(String8("test_cmd_policy"), valueInt) == NO_ERROR &&
- valueInt != 0) {
- ALOGV("Test command %s received", command.string());
- String8 target;
- if (param.get(String8("target"), target) != NO_ERROR) {
- target = "Manager";
- }
- if (param.getInt(String8("test_cmd_policy_output"), valueInt) == NO_ERROR) {
- param.remove(String8("test_cmd_policy_output"));
- mCurOutput = valueInt;
- }
- if (param.get(String8("test_cmd_policy_direct"), value) == NO_ERROR) {
- param.remove(String8("test_cmd_policy_direct"));
- if (value == "false") {
- mDirectOutput = false;
- } else if (value == "true") {
- mDirectOutput = true;
- }
- }
- if (param.getInt(String8("test_cmd_policy_input"), valueInt) == NO_ERROR) {
- param.remove(String8("test_cmd_policy_input"));
- mTestInput = valueInt;
- }
-
- if (param.get(String8("test_cmd_policy_format"), value) == NO_ERROR) {
- param.remove(String8("test_cmd_policy_format"));
- int format = AUDIO_FORMAT_INVALID;
- if (value == "PCM 16 bits") {
- format = AUDIO_FORMAT_PCM_16_BIT;
- } else if (value == "PCM 8 bits") {
- format = AUDIO_FORMAT_PCM_8_BIT;
- } else if (value == "Compressed MP3") {
- format = AUDIO_FORMAT_MP3;
- }
- if (format != AUDIO_FORMAT_INVALID) {
- if (target == "Manager") {
- mTestFormat = format;
- } else if (mTestOutputs[mCurOutput] != 0) {
- AudioParameter outputParam = AudioParameter();
- outputParam.addInt(String8(AudioParameter::keyStreamSupportedFormats), format);
- mpClientInterface->setParameters(mTestOutputs[mCurOutput], outputParam.toString());
- }
- }
- }
- if (param.get(String8("test_cmd_policy_channels"), value) == NO_ERROR) {
- param.remove(String8("test_cmd_policy_channels"));
- int channels = 0;
-
- if (value == "Channels Stereo") {
- channels = AUDIO_CHANNEL_OUT_STEREO;
- } else if (value == "Channels Mono") {
- channels = AUDIO_CHANNEL_OUT_MONO;
- }
- if (channels != 0) {
- if (target == "Manager") {
- mTestChannels = channels;
- } else if (mTestOutputs[mCurOutput] != 0) {
- AudioParameter outputParam = AudioParameter();
- outputParam.addInt(String8(AudioParameter::keyStreamSupportedChannels), channels);
- mpClientInterface->setParameters(mTestOutputs[mCurOutput], outputParam.toString());
- }
- }
- }
- if (param.getInt(String8("test_cmd_policy_sampleRate"), valueInt) == NO_ERROR) {
- param.remove(String8("test_cmd_policy_sampleRate"));
- if (valueInt >= 0 && valueInt <= 96000) {
- int samplingRate = valueInt;
- if (target == "Manager") {
- mTestSamplingRate = samplingRate;
- } else if (mTestOutputs[mCurOutput] != 0) {
- AudioParameter outputParam = AudioParameter();
- outputParam.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), samplingRate);
- mpClientInterface->setParameters(mTestOutputs[mCurOutput], outputParam.toString());
- }
- }
- }
-
- if (param.get(String8("test_cmd_policy_reopen"), value) == NO_ERROR) {
- param.remove(String8("test_cmd_policy_reopen"));
-
- mpClientInterface->closeOutput(mpClientInterface->closeOutput(mPrimaryOutput););
-
- audio_module_handle_t moduleHandle = mPrimaryOutput->getModuleHandle();
-
- removeOutput(mPrimaryOutput->mIoHandle);
- sp<SwAudioOutputDescriptor> outputDesc = new AudioOutputDescriptor(NULL,
- mpClientInterface);
- outputDesc->mDevice = AUDIO_DEVICE_OUT_SPEAKER;
- audio_config_t config = AUDIO_CONFIG_INITIALIZER;
- config.sample_rate = outputDesc->mSamplingRate;
- config.channel_mask = outputDesc->mChannelMask;
- config.format = outputDesc->mFormat;
- audio_io_handle_t handle;
- status_t status = mpClientInterface->openOutput(moduleHandle,
- &handle,
- &config,
- &outputDesc->mDevice,
- String8(""),
- &outputDesc->mLatency,
- outputDesc->mFlags);
- if (status != NO_ERROR) {
- ALOGE("Failed to reopen hardware output stream, "
- "samplingRate: %d, format %d, channels %d",
- outputDesc->mSamplingRate, outputDesc->mFormat, outputDesc->mChannelMask);
- } else {
- outputDesc->mSamplingRate = config.sample_rate;
- outputDesc->mChannelMask = config.channel_mask;
- outputDesc->mFormat = config.format;
- mPrimaryOutput = outputDesc;
- AudioParameter outputCmd = AudioParameter();
- outputCmd.addInt(String8("set_id"), 0);
- mpClientInterface->setParameters(handle, outputCmd.toString());
- addOutput(handle, outputDesc);
- }
- }
-
-
- mpClientInterface->setParameters(0, String8("test_cmd_policy="));
- }
- }
- return false;
-}
-
-void AudioPolicyManager::exit()
-{
- {
- AutoMutex _l(mLock);
- requestExit();
- mWaitWorkCV.signal();
- }
- requestExitAndWait();
-}
-
-int AudioPolicyManager::testOutputIndex(audio_io_handle_t output)
-{
- for (int i = 0; i < NUM_TEST_OUTPUTS; i++) {
- if (output == mTestOutputs[i]) return i;
- }
- return 0;
-}
-#endif //AUDIO_POLICY_TEST
-
// ---
void AudioPolicyManager::addOutput(audio_io_handle_t output, const sp<SwAudioOutputDescriptor>& outputDesc)
@@ -5348,6 +5110,30 @@
return ringVolumeDB - 4 > volumeDB ? ringVolumeDB - 4 : volumeDB;
}
+ // in-call: always cap earpiece volume by voice volume + some low headroom
+ if ((stream != AUDIO_STREAM_VOICE_CALL) && (device & AUDIO_DEVICE_OUT_EARPIECE) && isInCall()) {
+ switch (stream) {
+ case AUDIO_STREAM_SYSTEM:
+ case AUDIO_STREAM_RING:
+ case AUDIO_STREAM_MUSIC:
+ case AUDIO_STREAM_ALARM:
+ case AUDIO_STREAM_NOTIFICATION:
+ case AUDIO_STREAM_ENFORCED_AUDIBLE:
+ case AUDIO_STREAM_DTMF:
+ case AUDIO_STREAM_ACCESSIBILITY: {
+ const float maxVoiceVolDb = computeVolume(AUDIO_STREAM_VOICE_CALL, index, device)
+ + IN_CALL_EARPIECE_HEADROOM_DB;
+ if (volumeDB > maxVoiceVolDb) {
+ ALOGV("computeVolume() stream %d at vol=%f overriden by stream %d at vol=%f",
+ stream, volumeDB, AUDIO_STREAM_VOICE_CALL, maxVoiceVolDb);
+ volumeDB = maxVoiceVolDb;
+ }
+ } break;
+ default:
+ break;
+ }
+ }
+
// if a headset is connected, apply the following rules to ring tones and notifications
// to avoid sound level bursts in user's ears:
// - always attenuate notifications volume by 6dB
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 82c4c35..7ba0669 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -76,10 +76,6 @@
// ----------------------------------------------------------------------------
class AudioPolicyManager : public AudioPolicyInterface, public AudioPolicyManagerObserver
-
-#ifdef AUDIO_POLICY_TEST
- , public Thread
-#endif //AUDIO_POLICY_TEST
{
public:
@@ -419,11 +415,6 @@
{
return mEffects.getMaxEffectsMemory();
}
-#ifdef AUDIO_POLICY_TEST
- virtual bool threadLoop();
- void exit();
- int testOutputIndex(audio_io_handle_t output);
-#endif //AUDIO_POLICY_TEST
SortedVector<audio_io_handle_t> getOutputsForDevice(audio_devices_t device,
const SwAudioOutputCollection& openOutputs);
@@ -574,22 +565,6 @@
AudioPolicyMixCollection mPolicyMixes; // list of registered mixes
audio_io_handle_t mMusicEffectOutput; // output selected for music effects
-
-#ifdef AUDIO_POLICY_TEST
- Mutex mLock;
- Condition mWaitWorkCV;
-
- int mCurOutput;
- bool mDirectOutput;
- audio_io_handle_t mTestOutputs[NUM_TEST_OUTPUTS];
- int mTestInput;
- uint32_t mTestDevice;
- uint32_t mTestSamplingRate;
- uint32_t mTestFormat;
- uint32_t mTestChannels;
- uint32_t mTestLatencyMs;
-#endif //AUDIO_POLICY_TEST
-
uint32_t nextAudioPortGeneration();
// Audio Policy Engine Interface.
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
old mode 100644
new mode 100755
index d6d8dde..4981ce7
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -396,7 +396,7 @@
}
// Read JFIF segment markers, skip over segment data
- size = 0;
+ size = MARKER_LENGTH; //jump SOI;
while (size <= maxSize - MARKER_LENGTH) {
segment_t *segment = (segment_t*)(jpegBuffer + size);
uint8_t type = checkJpegMarker(segment->marker);
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 3d81cbb..29bc21c 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -313,11 +313,13 @@
binder::Status CameraDeviceClient::beginConfigure() {
// TODO: Implement this.
+ ATRACE_CALL();
ALOGV("%s: Not implemented yet.", __FUNCTION__);
return binder::Status::ok();
}
binder::Status CameraDeviceClient::endConfigure(int operatingMode) {
+ ATRACE_CALL();
ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
__FUNCTION__, mInputStream.configured ? 1 : 0,
mStreamMap.size());
@@ -568,7 +570,7 @@
/*out*/
int* newStreamId) {
int width, height, format, surfaceType;
- int32_t consumerUsage;
+ uint64_t consumerUsage;
android_dataspace dataSpace;
status_t err;
binder::Status res;
@@ -764,24 +766,23 @@
// Query consumer usage bits to set async operation mode for
// GLConsumer using controlledByApp parameter.
bool useAsync = false;
- int32_t consumerUsage;
+ uint64_t consumerUsage = 0;
status_t err;
- if ((err = gbp->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS,
- &consumerUsage)) != OK) {
+ if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
mCameraIdStr.string(), strerror(-err), err);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
}
if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
- ALOGW("%s: Camera %s with consumer usage flag: 0x%x: Forcing asynchronous mode for stream",
+ ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for stream",
__FUNCTION__, mCameraIdStr.string(), consumerUsage);
useAsync = true;
}
- int32_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
+ uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
GRALLOC_USAGE_RENDERSCRIPT;
- int32_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
+ uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
GraphicBuffer::USAGE_HW_TEXTURE |
GraphicBuffer::USAGE_HW_COMPOSER;
bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
@@ -874,7 +875,7 @@
//surface class type. Use usage flag to approximate the comparison.
if (consumerUsage != streamInfo.consumerUsage) {
String8 msg = String8::format(
- "Camera %s:Surface usage flag doesn't match 0x%x vs 0x%x",
+ "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
mCameraIdStr.string(), consumerUsage, streamInfo.consumerUsage);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index e8fc080..50661cb 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -213,13 +213,13 @@
int height;
int format;
android_dataspace dataSpace;
- int32_t consumerUsage;
+ uint64_t consumerUsage;
bool finalized = false;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
consumerUsage(0) {}
OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
- int32_t _consumerUsage) :
+ uint64_t _consumerUsage) :
width(_width), height(_height), format(_format),
dataSpace(_dataSpace), consumerUsage(_consumerUsage) {}
};
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index d9059f3..54fcb0a 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -119,7 +119,7 @@
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
- bool isShared = false, uint32_t consumerUsage = 0) = 0;
+ bool isShared = false, uint64_t consumerUsage = 0) = 0;
/**
* Create an output stream of the requested size, format, rotation and
@@ -132,7 +132,7 @@
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
- bool isShared = false, uint32_t consumerUsage = 0) = 0;
+ bool isShared = false, uint64_t consumerUsage = 0) = 0;
/**
* Create an input stream of width, height, and format.
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
index 469c86c..991b50f 100644
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
@@ -29,11 +29,6 @@
CameraHardwareInterface::~CameraHardwareInterface()
{
ALOGI("Destroying camera %s", mName.string());
- if (mDevice) {
- int rc = mDevice->common.close(&mDevice->common);
- if (rc != OK)
- ALOGE("Could not close camera %s: %d", mName.string(), rc);
- }
if (mHidlDevice != nullptr) {
mHidlDevice->close();
mHidlDevice.clear();
@@ -42,12 +37,6 @@
}
status_t CameraHardwareInterface::initialize(sp<CameraProviderManager> manager) {
- if (mDevice) {
- ALOGE("%s: camera hardware interface has been initialized to libhardware path!",
- __FUNCTION__);
- return INVALID_OPERATION;
- }
-
ALOGI("Opening camera %s", mName.string());
status_t ret = manager->openSession(mName.string(), this, &mHidlDevice);
@@ -372,7 +361,7 @@
ALOGE("%s: preview window is null", __FUNCTION__);
return s;
}
- mPreviewUsage = (int) usage;
+ mPreviewUsage = static_cast<uint64_t> (usage);
int rc = native_window_set_usage(a, mPreviewUsage);
if (rc == OK) {
cleanupCirculatingBuffers();
@@ -444,23 +433,6 @@
}
return CameraProviderManager::mapToStatusT(
mHidlDevice->setPreviewWindow(buf.get() ? this : nullptr));
- } else if (mDevice) {
- if (mDevice->ops->set_preview_window) {
- mPreviewWindow = buf;
- if (buf != nullptr) {
- if (mPreviewScalingMode != NOT_SET) {
- setPreviewScalingMode(mPreviewScalingMode);
- }
- if (mPreviewTransform != NOT_SET) {
- setPreviewTransform(mPreviewTransform);
- }
- }
- mHalPreviewWindow.user = this;
- ALOGV("%s &mHalPreviewWindow %p mHalPreviewWindow.user %p",__FUNCTION__,
- &mHalPreviewWindow, mHalPreviewWindow.user);
- return mDevice->ops->set_preview_window(mDevice,
- buf.get() ? &mHalPreviewWindow.nw : 0);
- }
}
return INVALID_OPERATION;
}
@@ -478,15 +450,6 @@
mCbUser = user;
ALOGV("%s(%s)", __FUNCTION__, mName.string());
-
- if (mDevice && mDevice->ops->set_callbacks) {
- mDevice->ops->set_callbacks(mDevice,
- sNotifyCb,
- sDataCb,
- sDataCbTimestamp,
- sGetMemory,
- this);
- }
}
void CameraHardwareInterface::enableMsgType(int32_t msgType)
@@ -494,8 +457,6 @@
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (CC_LIKELY(mHidlDevice != nullptr)) {
mHidlDevice->enableMsgType(msgType);
- } else if (mDevice && mDevice->ops->enable_msg_type) {
- mDevice->ops->enable_msg_type(mDevice, msgType);
}
}
@@ -504,8 +465,6 @@
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (CC_LIKELY(mHidlDevice != nullptr)) {
mHidlDevice->disableMsgType(msgType);
- } else if (mDevice && mDevice->ops->disable_msg_type) {
- mDevice->ops->disable_msg_type(mDevice, msgType);
}
}
@@ -514,8 +473,6 @@
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (CC_LIKELY(mHidlDevice != nullptr)) {
return mHidlDevice->msgTypeEnabled(msgType);
- } else if (mDevice && mDevice->ops->msg_type_enabled) {
- return mDevice->ops->msg_type_enabled(mDevice, msgType);
}
return false;
}
@@ -526,8 +483,6 @@
if (CC_LIKELY(mHidlDevice != nullptr)) {
return CameraProviderManager::mapToStatusT(
mHidlDevice->startPreview());
- } else if (mDevice && mDevice->ops->start_preview) {
- return mDevice->ops->start_preview(mDevice);
}
return INVALID_OPERATION;
}
@@ -537,8 +492,6 @@
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (CC_LIKELY(mHidlDevice != nullptr)) {
mHidlDevice->stopPreview();
- } else if (mDevice && mDevice->ops->stop_preview) {
- mDevice->ops->stop_preview(mDevice);
}
}
@@ -547,8 +500,6 @@
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (CC_LIKELY(mHidlDevice != nullptr)) {
return mHidlDevice->previewEnabled();
- } else if (mDevice && mDevice->ops->preview_enabled) {
- return mDevice->ops->preview_enabled(mDevice);
}
return false;
}
@@ -559,8 +510,6 @@
if (CC_LIKELY(mHidlDevice != nullptr)) {
return CameraProviderManager::mapToStatusT(
mHidlDevice->storeMetaDataInBuffers(enable));
- } else if (mDevice && mDevice->ops->store_meta_data_in_buffers) {
- return mDevice->ops->store_meta_data_in_buffers(mDevice, enable);
}
return enable ? INVALID_OPERATION: OK;
}
@@ -571,8 +520,6 @@
if (CC_LIKELY(mHidlDevice != nullptr)) {
return CameraProviderManager::mapToStatusT(
mHidlDevice->startRecording());
- } else if (mDevice && mDevice->ops->start_recording) {
- return mDevice->ops->start_recording(mDevice);
}
return INVALID_OPERATION;
}
@@ -585,8 +532,6 @@
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (CC_LIKELY(mHidlDevice != nullptr)) {
mHidlDevice->stopRecording();
- } else if (mDevice && mDevice->ops->stop_recording) {
- mDevice->ops->stop_recording(mDevice);
}
}
@@ -598,8 +543,6 @@
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (CC_LIKELY(mHidlDevice != nullptr)) {
return mHidlDevice->recordingEnabled();
- } else if (mDevice && mDevice->ops->recording_enabled) {
- return mDevice->ops->recording_enabled(mDevice);
}
return false;
}
@@ -624,9 +567,6 @@
} else {
mHidlDevice->releaseRecordingFrame(heapId, bufferIndex);
}
- } else if (mDevice && mDevice->ops->release_recording_frame) {
- void *data = ((uint8_t *)heap->base()) + offset;
- return mDevice->ops->release_recording_frame(mDevice, data);
}
}
@@ -653,9 +593,6 @@
ALOGE("%s only supports VideoNativeHandleMetadata mode", __FUNCTION__);
return;
}
- } else {
- ALOGE("Non HIDL mode do not support %s", __FUNCTION__);
- return;
}
}
@@ -674,8 +611,6 @@
if (CC_LIKELY(mHidlDevice != nullptr)) {
return CameraProviderManager::mapToStatusT(
mHidlDevice->autoFocus());
- } else if (mDevice && mDevice->ops->auto_focus) {
- return mDevice->ops->auto_focus(mDevice);
}
return INVALID_OPERATION;
}
@@ -686,8 +621,6 @@
if (CC_LIKELY(mHidlDevice != nullptr)) {
return CameraProviderManager::mapToStatusT(
mHidlDevice->cancelAutoFocus());
- } else if (mDevice && mDevice->ops->cancel_auto_focus) {
- return mDevice->ops->cancel_auto_focus(mDevice);
}
return INVALID_OPERATION;
}
@@ -698,8 +631,6 @@
if (CC_LIKELY(mHidlDevice != nullptr)) {
return CameraProviderManager::mapToStatusT(
mHidlDevice->takePicture());
- } else if (mDevice && mDevice->ops->take_picture) {
- return mDevice->ops->take_picture(mDevice);
}
return INVALID_OPERATION;
}
@@ -710,8 +641,6 @@
if (CC_LIKELY(mHidlDevice != nullptr)) {
return CameraProviderManager::mapToStatusT(
mHidlDevice->cancelPicture());
- } else if (mDevice && mDevice->ops->cancel_picture) {
- return mDevice->ops->cancel_picture(mDevice);
}
return INVALID_OPERATION;
}
@@ -722,8 +651,6 @@
if (CC_LIKELY(mHidlDevice != nullptr)) {
return CameraProviderManager::mapToStatusT(
mHidlDevice->setParameters(params.flatten().string()));
- } else if (mDevice && mDevice->ops->set_parameters) {
- return mDevice->ops->set_parameters(mDevice, params.flatten().string());
}
return INVALID_OPERATION;
}
@@ -740,14 +667,6 @@
});
String8 tmp(outParam.c_str());
parms.unflatten(tmp);
- } else if (mDevice && mDevice->ops->get_parameters) {
- char *temp = mDevice->ops->get_parameters(mDevice);
- String8 str_parms(temp);
- if (mDevice->ops->put_parameters)
- mDevice->ops->put_parameters(mDevice, temp);
- else
- free(temp);
- parms.unflatten(str_parms);
}
return parms;
}
@@ -758,8 +677,6 @@
if (CC_LIKELY(mHidlDevice != nullptr)) {
return CameraProviderManager::mapToStatusT(
mHidlDevice->sendCommand((CommandType) cmd, arg1, arg2));
- } else if (mDevice && mDevice->ops->send_command) {
- return mDevice->ops->send_command(mDevice, cmd, arg1, arg2);
}
return INVALID_OPERATION;
}
@@ -773,8 +690,6 @@
if (CC_LIKELY(mHidlDevice != nullptr)) {
mHidlDevice->close();
mHidlDevice.clear();
- } else if (mDevice && mDevice->ops->release) {
- mDevice->ops->release(mDevice);
}
}
@@ -790,15 +705,10 @@
Status s = mHidlDevice->dumpState(handle);
native_handle_delete(handle);
return CameraProviderManager::mapToStatusT(s);
- } else if (mDevice && mDevice->ops->dump) {
- return mDevice->ops->dump(mDevice, fd);
}
return OK; // It's fine if the HAL doesn't implement dump()
}
-/**
- * Methods for legacy (non-HIDL) path follows
- */
void CameraHardwareInterface::sNotifyCb(int32_t msg_type, int32_t ext1,
int32_t ext2, void *user)
{
@@ -868,177 +778,4 @@
mem->decStrong(mem);
}
-ANativeWindow* CameraHardwareInterface::sToAnw(void *user)
-{
- CameraHardwareInterface *object =
- reinterpret_cast<CameraHardwareInterface *>(user);
- return object->mPreviewWindow.get();
-}
-#define anw(n) sToAnw(((struct camera_preview_window *)(n))->user)
-#define hwi(n) reinterpret_cast<CameraHardwareInterface *>(\
- ((struct camera_preview_window *)(n))->user)
-
-int CameraHardwareInterface::sDequeueBuffer(struct preview_stream_ops* w,
- buffer_handle_t** buffer, int *stride)
-{
- int rc;
- ANativeWindow *a = anw(w);
- ANativeWindowBuffer* anb;
- rc = native_window_dequeue_buffer_and_wait(a, &anb);
- if (rc == OK) {
- *buffer = &anb->handle;
- *stride = anb->stride;
- }
- return rc;
-}
-
-#ifndef container_of
-#define container_of(ptr, type, member) ({ \
- const __typeof__(((type *) 0)->member) *__mptr = (ptr); \
- (type *) ((char *) __mptr - (char *)(&((type *)0)->member)); })
-#endif
-
-int CameraHardwareInterface::sLockBuffer(struct preview_stream_ops* w,
- buffer_handle_t* /*buffer*/)
-{
- ANativeWindow *a = anw(w);
- (void)a;
- return 0;
-}
-
-int CameraHardwareInterface::sEnqueueBuffer(struct preview_stream_ops* w,
- buffer_handle_t* buffer)
-{
- ANativeWindow *a = anw(w);
- return a->queueBuffer(a,
- container_of(buffer, ANativeWindowBuffer, handle), -1);
-}
-
-int CameraHardwareInterface::sCancelBuffer(struct preview_stream_ops* w,
- buffer_handle_t* buffer)
-{
- ANativeWindow *a = anw(w);
- return a->cancelBuffer(a,
- container_of(buffer, ANativeWindowBuffer, handle), -1);
-}
-
-int CameraHardwareInterface::sSetBufferCount(struct preview_stream_ops* w, int count)
-{
- ANativeWindow *a = anw(w);
-
- if (a != nullptr) {
- // Workaround for b/27039775
- // Previously, setting the buffer count would reset the buffer
- // queue's flag that allows for all buffers to be dequeued on the
- // producer side, instead of just the producer's declared max count,
- // if no filled buffers have yet been queued by the producer. This
- // reset no longer happens, but some HALs depend on this behavior,
- // so it needs to be maintained for HAL backwards compatibility.
- // Simulate the prior behavior by disconnecting/reconnecting to the
- // window and setting the values again. This has the drawback of
- // actually causing memory reallocation, which may not have happened
- // in the past.
- CameraHardwareInterface *hw = hwi(w);
- native_window_api_disconnect(a, NATIVE_WINDOW_API_CAMERA);
- native_window_api_connect(a, NATIVE_WINDOW_API_CAMERA);
- if (hw->mPreviewScalingMode != NOT_SET) {
- native_window_set_scaling_mode(a, hw->mPreviewScalingMode);
- }
- if (hw->mPreviewTransform != NOT_SET) {
- native_window_set_buffers_transform(a, hw->mPreviewTransform);
- }
- if (hw->mPreviewWidth != NOT_SET) {
- native_window_set_buffers_dimensions(a,
- hw->mPreviewWidth, hw->mPreviewHeight);
- native_window_set_buffers_format(a, hw->mPreviewFormat);
- }
- if (hw->mPreviewUsage != 0) {
- native_window_set_usage(a, hw->mPreviewUsage);
- }
- if (hw->mPreviewSwapInterval != NOT_SET) {
- a->setSwapInterval(a, hw->mPreviewSwapInterval);
- }
- if (hw->mPreviewCrop.left != NOT_SET) {
- native_window_set_crop(a, &(hw->mPreviewCrop));
- }
- }
-
- return native_window_set_buffer_count(a, count);
-}
-
-int CameraHardwareInterface::sSetBuffersGeometry(struct preview_stream_ops* w,
- int width, int height, int format)
-{
- int rc;
- ANativeWindow *a = anw(w);
- CameraHardwareInterface *hw = hwi(w);
- hw->mPreviewWidth = width;
- hw->mPreviewHeight = height;
- hw->mPreviewFormat = format;
- rc = native_window_set_buffers_dimensions(a, width, height);
- if (rc == OK) {
- rc = native_window_set_buffers_format(a, format);
- }
- return rc;
-}
-
-int CameraHardwareInterface::sSetCrop(struct preview_stream_ops *w,
- int left, int top, int right, int bottom)
-{
- ANativeWindow *a = anw(w);
- CameraHardwareInterface *hw = hwi(w);
- hw->mPreviewCrop.left = left;
- hw->mPreviewCrop.top = top;
- hw->mPreviewCrop.right = right;
- hw->mPreviewCrop.bottom = bottom;
- return native_window_set_crop(a, &(hw->mPreviewCrop));
-}
-
-int CameraHardwareInterface::sSetTimestamp(struct preview_stream_ops *w,
- int64_t timestamp) {
- ANativeWindow *a = anw(w);
- return native_window_set_buffers_timestamp(a, timestamp);
-}
-
-int CameraHardwareInterface::sSetUsage(struct preview_stream_ops* w, int usage)
-{
- ANativeWindow *a = anw(w);
- CameraHardwareInterface *hw = hwi(w);
- hw->mPreviewUsage = usage;
- return native_window_set_usage(a, usage);
-}
-
-int CameraHardwareInterface::sSetSwapInterval(struct preview_stream_ops *w, int interval)
-{
- ANativeWindow *a = anw(w);
- CameraHardwareInterface *hw = hwi(w);
- hw->mPreviewSwapInterval = interval;
- return a->setSwapInterval(a, interval);
-}
-
-int CameraHardwareInterface::sGetMinUndequeuedBufferCount(
- const struct preview_stream_ops *w,
- int *count)
-{
- ANativeWindow *a = anw(w);
- return a->query(a, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, count);
-}
-
-void CameraHardwareInterface::initHalPreviewWindow()
-{
- mHalPreviewWindow.nw.cancel_buffer = sCancelBuffer;
- mHalPreviewWindow.nw.lock_buffer = sLockBuffer;
- mHalPreviewWindow.nw.dequeue_buffer = sDequeueBuffer;
- mHalPreviewWindow.nw.enqueue_buffer = sEnqueueBuffer;
- mHalPreviewWindow.nw.set_buffer_count = sSetBufferCount;
- mHalPreviewWindow.nw.set_buffers_geometry = sSetBuffersGeometry;
- mHalPreviewWindow.nw.set_crop = sSetCrop;
- mHalPreviewWindow.nw.set_timestamp = sSetTimestamp;
- mHalPreviewWindow.nw.set_usage = sSetUsage;
- mHalPreviewWindow.nw.set_swap_interval = sSetSwapInterval;
-
- mHalPreviewWindow.nw.get_min_undequeued_buffer_count =
- sGetMinUndequeuedBufferCount;
-}
-
}; // namespace android
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
index 1c38d00..6a1b4fb 100644
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
@@ -90,7 +90,6 @@
public:
explicit CameraHardwareInterface(const char *name):
- mDevice(nullptr),
mHidlDevice(nullptr),
mName(name),
mPreviewScalingMode(NOT_SET),
@@ -299,7 +298,6 @@
status_t dump(int fd, const Vector<String16>& /*args*/) const;
private:
- camera_device_t *mDevice;
sp<hardware::camera::device::V1_0::ICameraDevice> mHidlDevice;
String8 mName;
@@ -369,41 +367,6 @@
static void sPutMemory(camera_memory_t *data);
- static ANativeWindow *sToAnw(void *user);
-
- static int sDequeueBuffer(struct preview_stream_ops* w,
- buffer_handle_t** buffer, int *stride);
-
- static int sLockBuffer(struct preview_stream_ops* w,
- buffer_handle_t* /*buffer*/);
-
- static int sEnqueueBuffer(struct preview_stream_ops* w,
- buffer_handle_t* buffer);
-
- static int sCancelBuffer(struct preview_stream_ops* w,
- buffer_handle_t* buffer);
-
- static int sSetBufferCount(struct preview_stream_ops* w, int count);
-
- static int sSetBuffersGeometry(struct preview_stream_ops* w,
- int width, int height, int format);
-
- static int sSetCrop(struct preview_stream_ops *w,
- int left, int top, int right, int bottom);
-
- static int sSetTimestamp(struct preview_stream_ops *w,
- int64_t timestamp);
-
- static int sSetUsage(struct preview_stream_ops* w, int usage);
-
- static int sSetSwapInterval(struct preview_stream_ops *w, int interval);
-
- static int sGetMinUndequeuedBufferCount(
- const struct preview_stream_ops *w,
- int *count);
-
- void initHalPreviewWindow();
-
std::pair<bool, uint64_t> getBufferId(ANativeWindowBuffer* anb);
void cleanupCirculatingBuffers();
@@ -459,13 +422,6 @@
sp<ANativeWindow> mPreviewWindow;
- struct camera_preview_window {
- struct preview_stream_ops nw;
- void *user;
- };
-
- struct camera_preview_window mHalPreviewWindow;
-
notify_callback mNotifyCb;
data_callback mDataCb;
data_callback_timestamp mDataCbTimestamp;
@@ -479,7 +435,7 @@
int mPreviewWidth;
int mPreviewHeight;
int mPreviewFormat;
- int mPreviewUsage;
+ uint64_t mPreviewUsage;
int mPreviewSwapInterval;
android_native_rect_t mPreviewCrop;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 69b1d7d..02f5424 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -407,7 +407,7 @@
}
BufferUsageFlags Camera3Device::mapToConsumerUsage(
- uint32_t usage) {
+ uint64_t usage) {
return usage;
}
@@ -460,12 +460,12 @@
return static_cast<uint32_t>(pixelFormat);
}
-uint32_t Camera3Device::mapConsumerToFrameworkUsage(
+uint64_t Camera3Device::mapConsumerToFrameworkUsage(
BufferUsageFlags usage) {
return usage;
}
-uint32_t Camera3Device::mapProducerToFrameworkUsage(
+uint64_t Camera3Device::mapProducerToFrameworkUsage(
BufferUsageFlags usage) {
return usage;
}
@@ -1208,7 +1208,7 @@
status_t Camera3Device::createStream(sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
- int streamSetId, bool isShared, uint32_t consumerUsage) {
+ int streamSetId, bool isShared, uint64_t consumerUsage) {
ATRACE_CALL();
if (consumer == nullptr) {
@@ -1226,13 +1226,13 @@
status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
- int streamSetId, bool isShared, uint32_t consumerUsage) {
+ int streamSetId, bool isShared, uint64_t consumerUsage) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
Mutex::Autolock l(mLock);
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
- " consumer usage 0x%x, isShared %d", mId.string(), mNextStreamId, width, height, format,
+ " consumer usage %" PRIu64 ", isShared %d", mId.string(), mNextStreamId, width, height, format,
dataSpace, rotation, consumerUsage, isShared);
status_t res;
@@ -1478,6 +1478,7 @@
status_t Camera3Device::getInputBufferProducer(
sp<IGraphicBufferProducer> *producer) {
+ ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
@@ -1691,6 +1692,7 @@
}
status_t Camera3Device::waitForNextFrame(nsecs_t timeout) {
+ ATRACE_CALL();
status_t res;
Mutex::Autolock l(mOutputLock);
@@ -1884,6 +1886,7 @@
*/
void Camera3Device::notifyStatus(bool idle) {
+ ATRACE_CALL();
{
// Need mLock to safely update state and synchronize to current
// state of methods in flight.
@@ -2317,6 +2320,7 @@
}
void Camera3Device::setErrorState(const char *fmt, ...) {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
va_list args;
va_start(args, fmt);
@@ -2327,6 +2331,7 @@
}
void Camera3Device::setErrorStateV(const char *fmt, va_list args) {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
setErrorStateLockedV(fmt, args);
}
@@ -2411,6 +2416,7 @@
}
void Camera3Device::removeInFlightMapEntryLocked(int idx) {
+ ATRACE_CALL();
nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
mInFlightMap.removeItemsAt(idx, 1);
@@ -2495,6 +2501,7 @@
}
void Camera3Device::flushInflightRequests() {
+ ATRACE_CALL();
{ // First return buffers cached in mInFlightMap
Mutex::Autolock l(mInFlightLock);
for (size_t idx = 0; idx < mInFlightMap.size(); idx++) {
@@ -2621,6 +2628,7 @@
void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
+ ATRACE_CALL();
Mutex::Autolock l(mOutputLock);
CaptureResult captureResult;
@@ -2636,6 +2644,7 @@
CameraMetadata &collectedPartialResult,
uint32_t frameNumber,
bool reprocess) {
+ ATRACE_CALL();
if (pendingMetadata.isEmpty())
return;
@@ -2884,7 +2893,7 @@
void Camera3Device::notifyError(const camera3_error_msg_t &msg,
sp<NotificationListener> listener) {
-
+ ATRACE_CALL();
// Map camera HAL error codes to ICameraDeviceCallback error codes
// Index into this with the HAL error code
static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
@@ -2962,6 +2971,7 @@
void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg,
sp<NotificationListener> listener) {
+ ATRACE_CALL();
ssize_t idx;
// Set timestamp for the request in the in-flight tracking
@@ -3048,24 +3058,20 @@
Camera3Device::HalInterface::HalInterface(
sp<ICameraDeviceSession> &session,
std::shared_ptr<RequestMetadataQueue> queue) :
- mHal3Device(nullptr),
mHidlSession(session),
mRequestMetadataQueue(queue) {}
-Camera3Device::HalInterface::HalInterface() :
- mHal3Device(nullptr) {}
+Camera3Device::HalInterface::HalInterface() {}
Camera3Device::HalInterface::HalInterface(const HalInterface& other) :
- mHal3Device(other.mHal3Device),
mHidlSession(other.mHidlSession),
mRequestMetadataQueue(other.mRequestMetadataQueue) {}
bool Camera3Device::HalInterface::valid() {
- return (mHal3Device != nullptr) || (mHidlSession != nullptr);
+ return (mHidlSession != nullptr);
}
void Camera3Device::HalInterface::clear() {
- mHal3Device = nullptr;
mHidlSession.clear();
}
@@ -3080,72 +3086,60 @@
if (!valid()) return INVALID_OPERATION;
status_t res = OK;
- if (mHal3Device != nullptr) {
- const camera_metadata *r;
- r = mHal3Device->ops->construct_default_request_settings(
- mHal3Device, templateId);
- if (r == nullptr) return BAD_VALUE;
- *requestTemplate = clone_camera_metadata(r);
- if (requestTemplate == nullptr) {
- ALOGE("%s: Unable to clone camera metadata received from HAL",
- __FUNCTION__);
- return INVALID_OPERATION;
- }
- } else {
- common::V1_0::Status status;
- RequestTemplate id;
- switch (templateId) {
- case CAMERA3_TEMPLATE_PREVIEW:
- id = RequestTemplate::PREVIEW;
- break;
- case CAMERA3_TEMPLATE_STILL_CAPTURE:
- id = RequestTemplate::STILL_CAPTURE;
- break;
- case CAMERA3_TEMPLATE_VIDEO_RECORD:
- id = RequestTemplate::VIDEO_RECORD;
- break;
- case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
- id = RequestTemplate::VIDEO_SNAPSHOT;
- break;
- case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
- id = RequestTemplate::ZERO_SHUTTER_LAG;
- break;
- case CAMERA3_TEMPLATE_MANUAL:
- id = RequestTemplate::MANUAL;
- break;
- default:
- // Unknown template ID
- return BAD_VALUE;
- }
- auto err = mHidlSession->constructDefaultRequestSettings(id,
- [&status, &requestTemplate]
- (common::V1_0::Status s, const device::V3_2::CameraMetadata& request) {
- status = s;
- if (status == common::V1_0::Status::OK) {
- const camera_metadata *r =
- reinterpret_cast<const camera_metadata_t*>(request.data());
- size_t expectedSize = request.size();
- int ret = validate_camera_metadata_structure(r, &expectedSize);
- if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
- *requestTemplate = clone_camera_metadata(r);
- if (*requestTemplate == nullptr) {
- ALOGE("%s: Unable to clone camera metadata received from HAL",
- __FUNCTION__);
- status = common::V1_0::Status::INTERNAL_ERROR;
- }
- } else {
- ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+ common::V1_0::Status status;
+ RequestTemplate id;
+ switch (templateId) {
+ case CAMERA3_TEMPLATE_PREVIEW:
+ id = RequestTemplate::PREVIEW;
+ break;
+ case CAMERA3_TEMPLATE_STILL_CAPTURE:
+ id = RequestTemplate::STILL_CAPTURE;
+ break;
+ case CAMERA3_TEMPLATE_VIDEO_RECORD:
+ id = RequestTemplate::VIDEO_RECORD;
+ break;
+ case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+ id = RequestTemplate::VIDEO_SNAPSHOT;
+ break;
+ case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+ id = RequestTemplate::ZERO_SHUTTER_LAG;
+ break;
+ case CAMERA3_TEMPLATE_MANUAL:
+ id = RequestTemplate::MANUAL;
+ break;
+ default:
+ // Unknown template ID
+ return BAD_VALUE;
+ }
+ auto err = mHidlSession->constructDefaultRequestSettings(id,
+ [&status, &requestTemplate]
+ (common::V1_0::Status s, const device::V3_2::CameraMetadata& request) {
+ status = s;
+ if (status == common::V1_0::Status::OK) {
+ const camera_metadata *r =
+ reinterpret_cast<const camera_metadata_t*>(request.data());
+ size_t expectedSize = request.size();
+ int ret = validate_camera_metadata_structure(r, &expectedSize);
+ if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
+ *requestTemplate = clone_camera_metadata(r);
+ if (*requestTemplate == nullptr) {
+ ALOGE("%s: Unable to clone camera metadata received from HAL",
+ __FUNCTION__);
status = common::V1_0::Status::INTERNAL_ERROR;
}
+ } else {
+ ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+ status = common::V1_0::Status::INTERNAL_ERROR;
}
- });
- if (!err.isOk()) {
- ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
- res = DEAD_OBJECT;
- } else {
- res = CameraProviderManager::mapToStatusT(status);
- }
+ }
+ });
+ if (!err.isOk()) {
+ ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+ res = DEAD_OBJECT;
+ } else {
+ res = CameraProviderManager::mapToStatusT(status);
}
+
return res;
}
@@ -3154,145 +3148,144 @@
if (!valid()) return INVALID_OPERATION;
status_t res = OK;
- if (mHal3Device != nullptr) {
- res = mHal3Device->ops->configure_streams(mHal3Device, config);
- } else {
- // Convert stream config to HIDL
- std::set<int> activeStreams;
- StreamConfiguration requestedConfiguration;
- requestedConfiguration.streams.resize(config->num_streams);
- for (size_t i = 0; i < config->num_streams; i++) {
- Stream &dst = requestedConfiguration.streams[i];
- camera3_stream_t *src = config->streams[i];
+ // Convert stream config to HIDL
+ std::set<int> activeStreams;
+ StreamConfiguration requestedConfiguration;
+ requestedConfiguration.streams.resize(config->num_streams);
+ for (size_t i = 0; i < config->num_streams; i++) {
+ Stream &dst = requestedConfiguration.streams[i];
+ camera3_stream_t *src = config->streams[i];
- Camera3Stream* cam3stream = Camera3Stream::cast(src);
- cam3stream->setBufferFreedListener(this);
- int streamId = cam3stream->getId();
- StreamType streamType;
- switch (src->stream_type) {
- case CAMERA3_STREAM_OUTPUT:
- streamType = StreamType::OUTPUT;
- break;
- case CAMERA3_STREAM_INPUT:
- streamType = StreamType::INPUT;
- break;
- default:
- ALOGE("%s: Stream %d: Unsupported stream type %d",
- __FUNCTION__, streamId, config->streams[i]->stream_type);
- return BAD_VALUE;
+ Camera3Stream* cam3stream = Camera3Stream::cast(src);
+ cam3stream->setBufferFreedListener(this);
+ int streamId = cam3stream->getId();
+ StreamType streamType;
+ switch (src->stream_type) {
+ case CAMERA3_STREAM_OUTPUT:
+ streamType = StreamType::OUTPUT;
+ break;
+ case CAMERA3_STREAM_INPUT:
+ streamType = StreamType::INPUT;
+ break;
+ default:
+ ALOGE("%s: Stream %d: Unsupported stream type %d",
+ __FUNCTION__, streamId, config->streams[i]->stream_type);
+ return BAD_VALUE;
+ }
+ dst.id = streamId;
+ dst.streamType = streamType;
+ dst.width = src->width;
+ dst.height = src->height;
+ dst.format = mapToPixelFormat(src->format);
+ dst.usage = mapToConsumerUsage(cam3stream->getUsage());
+ dst.dataSpace = mapToHidlDataspace(src->data_space);
+ dst.rotation = mapToStreamRotation((camera3_stream_rotation_t) src->rotation);
+
+ activeStreams.insert(streamId);
+ // Create Buffer ID map if necessary
+ if (mBufferIdMaps.count(streamId) == 0) {
+ mBufferIdMaps.emplace(streamId, BufferIdMap{});
+ }
+ }
+ // remove BufferIdMap for deleted streams
+ for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
+ int streamId = it->first;
+ bool active = activeStreams.count(streamId) > 0;
+ if (!active) {
+ it = mBufferIdMaps.erase(it);
+ } else {
+ ++it;
+ }
+ }
+
+ res = mapToStreamConfigurationMode(
+ (camera3_stream_configuration_mode_t) config->operation_mode,
+ /*out*/ &requestedConfiguration.operationMode);
+ if (res != OK) {
+ return res;
+ }
+
+ // Invoke configureStreams
+
+ HalStreamConfiguration finalConfiguration;
+ common::V1_0::Status status;
+ auto err = mHidlSession->configureStreams(requestedConfiguration,
+ [&status, &finalConfiguration]
+ (common::V1_0::Status s, const HalStreamConfiguration& halConfiguration) {
+ finalConfiguration = halConfiguration;
+ status = s;
+ });
+ if (!err.isOk()) {
+ ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+ return DEAD_OBJECT;
+ }
+
+ if (status != common::V1_0::Status::OK ) {
+ return CameraProviderManager::mapToStatusT(status);
+ }
+
+ // And convert output stream configuration from HIDL
+
+ for (size_t i = 0; i < config->num_streams; i++) {
+ camera3_stream_t *dst = config->streams[i];
+ int streamId = Camera3Stream::cast(dst)->getId();
+
+ // Start scan at i, with the assumption that the stream order matches
+ size_t realIdx = i;
+ bool found = false;
+ for (size_t idx = 0; idx < finalConfiguration.streams.size(); idx++) {
+ if (finalConfiguration.streams[realIdx].id == streamId) {
+ found = true;
+ break;
}
- dst.id = streamId;
- dst.streamType = streamType;
- dst.width = src->width;
- dst.height = src->height;
- dst.format = mapToPixelFormat(src->format);
- dst.usage = mapToConsumerUsage(src->usage);
- dst.dataSpace = mapToHidlDataspace(src->data_space);
- dst.rotation = mapToStreamRotation((camera3_stream_rotation_t) src->rotation);
+ realIdx = (realIdx >= finalConfiguration.streams.size()) ? 0 : realIdx + 1;
+ }
+ if (!found) {
+ ALOGE("%s: Stream %d not found in stream configuration response from HAL",
+ __FUNCTION__, streamId);
+ return INVALID_OPERATION;
+ }
+ HalStream &src = finalConfiguration.streams[realIdx];
- activeStreams.insert(streamId);
- // Create Buffer ID map if necessary
- if (mBufferIdMaps.count(streamId) == 0) {
- mBufferIdMaps.emplace(streamId, BufferIdMap{});
+ int overrideFormat = mapToFrameworkFormat(src.overrideFormat);
+ if (dst->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ if (dst->format != overrideFormat) {
+ ALOGE("%s: Stream %d: Format override not allowed for format 0x%x", __FUNCTION__,
+ streamId, dst->format);
}
- }
- // remove BufferIdMap for deleted streams
- for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
- int streamId = it->first;
- bool active = activeStreams.count(streamId) > 0;
- if (!active) {
- it = mBufferIdMaps.erase(it);
- } else {
- ++it;
- }
+ } else {
+ // Override allowed with IMPLEMENTATION_DEFINED
+ dst->format = overrideFormat;
}
- res = mapToStreamConfigurationMode(
- (camera3_stream_configuration_mode_t) config->operation_mode,
- /*out*/ &requestedConfiguration.operationMode);
- if (res != OK) {
- return res;
- }
-
- // Invoke configureStreams
-
- HalStreamConfiguration finalConfiguration;
- common::V1_0::Status status;
- auto err = mHidlSession->configureStreams(requestedConfiguration,
- [&status, &finalConfiguration]
- (common::V1_0::Status s, const HalStreamConfiguration& halConfiguration) {
- finalConfiguration = halConfiguration;
- status = s;
- });
- if (!err.isOk()) {
- ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
- return DEAD_OBJECT;
- }
-
- if (status != common::V1_0::Status::OK ) {
- return CameraProviderManager::mapToStatusT(status);
- }
-
- // And convert output stream configuration from HIDL
-
- for (size_t i = 0; i < config->num_streams; i++) {
- camera3_stream_t *dst = config->streams[i];
- int streamId = Camera3Stream::cast(dst)->getId();
-
- // Start scan at i, with the assumption that the stream order matches
- size_t realIdx = i;
- bool found = false;
- for (size_t idx = 0; idx < finalConfiguration.streams.size(); idx++) {
- if (finalConfiguration.streams[realIdx].id == streamId) {
- found = true;
- break;
- }
- realIdx = (realIdx >= finalConfiguration.streams.size()) ? 0 : realIdx + 1;
- }
- if (!found) {
- ALOGE("%s: Stream %d not found in stream configuration response from HAL",
+ if (dst->stream_type == CAMERA3_STREAM_INPUT) {
+ if (src.producerUsage != 0) {
+ ALOGE("%s: Stream %d: INPUT streams must have 0 for producer usage",
__FUNCTION__, streamId);
return INVALID_OPERATION;
}
- HalStream &src = finalConfiguration.streams[realIdx];
-
- int overrideFormat = mapToFrameworkFormat(src.overrideFormat);
- if (dst->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
- if (dst->format != overrideFormat) {
- ALOGE("%s: Stream %d: Format override not allowed for format 0x%x", __FUNCTION__,
- streamId, dst->format);
- }
- } else {
- // Override allowed with IMPLEMENTATION_DEFINED
- dst->format = overrideFormat;
+ Camera3Stream::cast(dst)->setUsage(
+ mapConsumerToFrameworkUsage(src.consumerUsage));
+ } else {
+ // OUTPUT
+ if (src.consumerUsage != 0) {
+ ALOGE("%s: Stream %d: OUTPUT streams must have 0 for consumer usage",
+ __FUNCTION__, streamId);
+ return INVALID_OPERATION;
}
-
- if (dst->stream_type == CAMERA3_STREAM_INPUT) {
- if (src.producerUsage != 0) {
- ALOGE("%s: Stream %d: INPUT streams must have 0 for producer usage",
- __FUNCTION__, streamId);
- return INVALID_OPERATION;
- }
- dst->usage = mapConsumerToFrameworkUsage(src.consumerUsage);
- } else {
- // OUTPUT
- if (src.consumerUsage != 0) {
- ALOGE("%s: Stream %d: OUTPUT streams must have 0 for consumer usage",
- __FUNCTION__, streamId);
- return INVALID_OPERATION;
- }
- dst->usage = mapProducerToFrameworkUsage(src.producerUsage);
- }
- dst->max_buffers = src.maxBuffers;
+ Camera3Stream::cast(dst)->setUsage(
+ mapProducerToFrameworkUsage(src.producerUsage));
}
+ dst->max_buffers = src.maxBuffers;
}
+
return res;
}
void Camera3Device::HalInterface::wrapAsHidlRequest(camera3_capture_request_t* request,
/*out*/device::V3_2::CaptureRequest* captureRequest,
/*out*/std::vector<native_handle_t*>* handlesCreated) {
-
+ ATRACE_CALL();
if (captureRequest == nullptr || handlesCreated == nullptr) {
ALOGE("%s: captureRequest (%p) and handlesCreated (%p) must not be null",
__FUNCTION__, captureRequest, handlesCreated);
@@ -3441,14 +3434,11 @@
if (!valid()) return INVALID_OPERATION;
status_t res = OK;
- if (mHal3Device != nullptr) {
- res = mHal3Device->ops->process_capture_request(mHal3Device, request);
- } else {
- uint32_t numRequestProcessed = 0;
- std::vector<camera3_capture_request_t*> requests(1);
- requests[0] = request;
- res = processBatchCaptureRequests(requests, &numRequestProcessed);
- }
+ uint32_t numRequestProcessed = 0;
+ std::vector<camera3_capture_request_t*> requests(1);
+ requests[0] = request;
+ res = processBatchCaptureRequests(requests, &numRequestProcessed);
+
return res;
}
@@ -3457,31 +3447,24 @@
if (!valid()) return INVALID_OPERATION;
status_t res = OK;
- if (mHal3Device != nullptr) {
- res = mHal3Device->ops->flush(mHal3Device);
+ auto err = mHidlSession->flush();
+ if (!err.isOk()) {
+ ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+ res = DEAD_OBJECT;
} else {
- auto err = mHidlSession->flush();
- if (!err.isOk()) {
- ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
- res = DEAD_OBJECT;
- } else {
- res = CameraProviderManager::mapToStatusT(err);
- }
+ res = CameraProviderManager::mapToStatusT(err);
}
+
return res;
}
-status_t Camera3Device::HalInterface::dump(int fd) {
+status_t Camera3Device::HalInterface::dump(int /*fd*/) {
ATRACE_NAME("CameraHal::dump");
if (!valid()) return INVALID_OPERATION;
- status_t res = OK;
- if (mHal3Device != nullptr) {
- mHal3Device->ops->dump(mHal3Device, fd);
- } else {
- // Handled by CameraProviderManager::dump
- }
- return res;
+ // Handled by CameraProviderManager::dump
+
+ return OK;
}
status_t Camera3Device::HalInterface::close() {
@@ -3489,15 +3472,12 @@
if (!valid()) return INVALID_OPERATION;
status_t res = OK;
- if (mHal3Device != nullptr) {
- mHal3Device->common.close(&mHal3Device->common);
- } else {
- auto err = mHidlSession->close();
- // Interface will be dead shortly anyway, so don't log errors
- if (!err.isOk()) {
- res = DEAD_OBJECT;
- }
+ auto err = mHidlSession->close();
+ // Interface will be dead shortly anyway, so don't log errors
+ if (!err.isOk()) {
+ res = DEAD_OBJECT;
}
+
return res;
}
@@ -3614,11 +3594,13 @@
void Camera3Device::RequestThread::setNotificationListener(
wp<NotificationListener> listener) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
mListener = listener;
}
void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
mReconfigured = true;
// Prepare video stream for high speed recording.
@@ -3629,6 +3611,7 @@
List<sp<CaptureRequest> > &requests,
/*out*/
int64_t *lastFrameNumber) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
for (List<sp<CaptureRequest> >::iterator it = requests.begin(); it != requests.end();
++it) {
@@ -3651,7 +3634,7 @@
status_t Camera3Device::RequestThread::queueTrigger(
RequestTrigger trigger[],
size_t count) {
-
+ ATRACE_CALL();
Mutex::Autolock l(mTriggerMutex);
status_t ret;
@@ -3708,6 +3691,7 @@
const RequestList &requests,
/*out*/
int64_t *lastFrameNumber) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
if (lastFrameNumber != NULL) {
*lastFrameNumber = mRepeatingLastFrameNumber;
@@ -3734,6 +3718,7 @@
}
status_t Camera3Device::RequestThread::clearRepeatingRequests(/*out*/int64_t *lastFrameNumber) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
return clearRepeatingRequestsLocked(lastFrameNumber);
@@ -3750,6 +3735,7 @@
status_t Camera3Device::RequestThread::clear(
/*out*/int64_t *lastFrameNumber) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
ALOGV("RequestThread::%s:", __FUNCTION__);
@@ -3805,6 +3791,7 @@
}
void Camera3Device::RequestThread::setPaused(bool paused) {
+ ATRACE_CALL();
Mutex::Autolock l(mPauseLock);
mDoPause = paused;
mDoPauseSignal.signal();
@@ -3812,6 +3799,7 @@
status_t Camera3Device::RequestThread::waitUntilRequestProcessed(
int32_t requestId, nsecs_t timeout) {
+ ATRACE_CALL();
Mutex::Autolock l(mLatestRequestMutex);
status_t res;
while (mLatestRequestId != requestId) {
@@ -3838,6 +3826,7 @@
}
void Camera3Device::RequestThread::checkAndStopRepeatingRequest() {
+ ATRACE_CALL();
bool surfaceAbandoned = false;
int64_t lastFrameNumber = 0;
sp<NotificationListener> listener;
@@ -3866,6 +3855,7 @@
}
bool Camera3Device::RequestThread::sendRequestsBatch() {
+ ATRACE_CALL();
status_t res;
size_t batchSize = mNextRequests.size();
std::vector<camera3_capture_request_t*> requests(batchSize);
@@ -4261,6 +4251,7 @@
}
CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
+ ATRACE_CALL();
Mutex::Autolock al(mLatestRequestMutex);
ALOGV("RequestThread::%s", __FUNCTION__);
@@ -4270,6 +4261,7 @@
bool Camera3Device::RequestThread::isStreamPending(
sp<Camera3StreamInterface>& stream) {
+ ATRACE_CALL();
Mutex::Autolock l(mRequestLock);
for (const auto& nextRequest : mNextRequests) {
@@ -4299,6 +4291,7 @@
}
nsecs_t Camera3Device::getExpectedInFlightDuration() {
+ ATRACE_CALL();
Mutex::Autolock al(mInFlightLock);
return mExpectedInflightDuration > kMinInflightDuration ?
mExpectedInflightDuration : kMinInflightDuration;
@@ -4370,6 +4363,7 @@
}
void Camera3Device::RequestThread::waitForNextRequestBatch() {
+ ATRACE_CALL();
// Optimized a bit for the simple steady-state case (single repeating
// request), to avoid putting that request in the queue temporarily.
Mutex::Autolock l(mRequestLock);
@@ -4519,6 +4513,7 @@
}
bool Camera3Device::RequestThread::waitIfPaused() {
+ ATRACE_CALL();
status_t res;
Mutex::Autolock l(mPauseLock);
while (mDoPause) {
@@ -4543,6 +4538,7 @@
}
void Camera3Device::RequestThread::unpauseForNewRequests() {
+ ATRACE_CALL();
// With work to do, mark thread as unpaused.
// If paused by request (setPaused), don't resume, to avoid
// extra signaling/waiting overhead to waitUntilPaused
@@ -4574,7 +4570,7 @@
status_t Camera3Device::RequestThread::insertTriggers(
const sp<CaptureRequest> &request) {
-
+ ATRACE_CALL();
Mutex::Autolock al(mTriggerMutex);
sp<Camera3Device> parent = mParent.promote();
@@ -4663,6 +4659,7 @@
status_t Camera3Device::RequestThread::removeTriggers(
const sp<CaptureRequest> &request) {
+ ATRACE_CALL();
Mutex::Autolock al(mTriggerMutex);
CameraMetadata &metadata = request->mSettings;
@@ -4779,6 +4776,7 @@
}
status_t Camera3Device::PreparerThread::prepare(int maxCount, sp<Camera3StreamInterface>& stream) {
+ ATRACE_CALL();
status_t res;
Mutex::Autolock l(mLock);
@@ -4822,6 +4820,7 @@
}
status_t Camera3Device::PreparerThread::clear() {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
for (const auto& stream : mPendingStreams) {
@@ -4834,6 +4833,7 @@
}
void Camera3Device::PreparerThread::setNotificationListener(wp<NotificationListener> listener) {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
mListener = listener;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index d700e03..b5f19d7 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -117,12 +117,12 @@
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
- bool isShared = false, uint32_t consumerUsage = 0) override;
+ bool isShared = false, uint64_t consumerUsage = 0) override;
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
- bool isShared = false, uint32_t consumerUsage = 0) override;
+ bool isShared = false, uint64_t consumerUsage = 0) override;
status_t createInputStream(
uint32_t width, uint32_t height, int format,
@@ -271,7 +271,6 @@
void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out);
private:
- camera3_device_t *mHal3Device;
sp<hardware::camera::device::V3_2::ICameraDeviceSession> mHidlSession;
std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
@@ -590,7 +589,7 @@
static hardware::graphics::common::V1_0::PixelFormat mapToPixelFormat(int frameworkFormat);
static hardware::camera::device::V3_2::DataspaceFlags mapToHidlDataspace(
android_dataspace dataSpace);
- static hardware::camera::device::V3_2::BufferUsageFlags mapToConsumerUsage(uint32_t usage);
+ static hardware::camera::device::V3_2::BufferUsageFlags mapToConsumerUsage(uint64_t usage);
static hardware::camera::device::V3_2::StreamRotation mapToStreamRotation(
camera3_stream_rotation_t rotation);
// Returns a negative error code if the passed-in operation mode is not valid.
@@ -598,9 +597,9 @@
/*out*/ hardware::camera::device::V3_2::StreamConfigurationMode *mode);
static camera3_buffer_status_t mapHidlBufferStatus(hardware::camera::device::V3_2::BufferStatus status);
static int mapToFrameworkFormat(hardware::graphics::common::V1_0::PixelFormat pixelFormat);
- static uint32_t mapConsumerToFrameworkUsage(
+ static uint64_t mapConsumerToFrameworkUsage(
hardware::camera::device::V3_2::BufferUsageFlags usage);
- static uint32_t mapProducerToFrameworkUsage(
+ static uint64_t mapProducerToFrameworkUsage(
hardware::camera::device::V3_2::BufferUsageFlags usage);
struct RequestTrigger {
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
index 9c951b7..6e2978f 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
@@ -95,7 +95,7 @@
return OK;
}
-status_t Camera3DummyStream::getEndpointUsage(uint32_t *usage) const {
+status_t Camera3DummyStream::getEndpointUsage(uint64_t *usage) const {
*usage = DUMMY_USAGE;
return OK;
}
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
index 35a6a18..492fb49 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h
@@ -94,7 +94,7 @@
static const int DUMMY_FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
static const android_dataspace DUMMY_DATASPACE = HAL_DATASPACE_UNKNOWN;
static const camera3_stream_rotation_t DUMMY_ROTATION = CAMERA3_STREAM_ROTATION_0;
- static const uint32_t DUMMY_USAGE = GRALLOC_USAGE_HW_COMPOSER;
+ static const uint64_t DUMMY_USAGE = GRALLOC_USAGE_HW_COMPOSER;
/**
* Internal Camera3Stream interface
@@ -107,7 +107,7 @@
virtual status_t configureQueueLocked();
- virtual status_t getEndpointUsage(uint32_t *usage) const;
+ virtual status_t getEndpointUsage(uint64_t *usage) const;
}; // class Camera3DummyStream
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 7ad2300..a52422d 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -69,7 +69,7 @@
(void) args;
String8 lines;
- uint32_t consumerUsage = 0;
+ uint64_t consumerUsage = 0;
status_t res = getEndpointUsage(&consumerUsage);
if (res != OK) consumerUsage = 0;
@@ -78,8 +78,8 @@
camera3_stream::width, camera3_stream::height,
camera3_stream::format, camera3_stream::data_space);
lines.appendFormat(" Max size: %zu\n", mMaxSize);
- lines.appendFormat(" Combined usage: %d, max HAL buffers: %d\n",
- camera3_stream::usage | consumerUsage, camera3_stream::max_buffers);
+ lines.appendFormat(" Combined usage: %" PRIu64 ", max HAL buffers: %d\n",
+ mUsage | consumerUsage, camera3_stream::max_buffers);
lines.appendFormat(" Frames produced: %d, last timestamp: %" PRId64 " ns\n",
mFrameCount, mLastTimestamp);
lines.appendFormat(" Total buffers: %zu, currently dequeued: %zu\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 35dda39..2376058 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -85,7 +85,7 @@
virtual size_t getHandoutInputBufferCountLocked();
- virtual status_t getEndpointUsage(uint32_t *usage) const = 0;
+ virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
status_t getBufferPreconditionCheckLocked() const;
status_t returnBufferPreconditionCheckLocked() const;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index ff2dcef..2cb1ea7 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -258,7 +258,7 @@
camera3_stream::max_buffers : minBufs;
// TODO: somehow set the total buffer count when producer connects?
- mConsumer = new BufferItemConsumer(consumer, camera3_stream::usage,
+ mConsumer = new BufferItemConsumer(consumer, mUsage,
mTotalBufferCount);
mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
@@ -284,7 +284,7 @@
return OK;
}
-status_t Camera3InputStream::getEndpointUsage(uint32_t *usage) const {
+status_t Camera3InputStream::getEndpointUsage(uint64_t *usage) const {
// Per HAL3 spec, input streams have 0 for their initial usage field.
*usage = 0;
return OK;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 8f5b431..81226f8 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -76,7 +76,7 @@
virtual status_t configureQueueLocked();
- virtual status_t getEndpointUsage(uint32_t *usage) const;
+ virtual status_t getEndpointUsage(uint64_t *usage) const;
/**
* BufferItemConsumer::BufferFreedListener interface
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 865b44d..0a02a32 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -90,7 +90,7 @@
Camera3OutputStream::Camera3OutputStream(int id,
uint32_t width, uint32_t height, int format,
- uint32_t consumerUsage, android_dataspace dataSpace,
+ uint64_t consumerUsage, android_dataspace dataSpace,
camera3_stream_rotation_t rotation, nsecs_t timestampOffset, int setId) :
Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation, setId),
@@ -111,7 +111,8 @@
// Sanity check for the consumer usage flag.
if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
(consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
- ALOGE("%s: Deferred consumer usage flag is illegal (0x%x)!", __FUNCTION__, consumerUsage);
+ ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
+ __FUNCTION__, consumerUsage);
mState = STATE_ERROR;
}
@@ -127,7 +128,7 @@
int format,
android_dataspace dataSpace,
camera3_stream_rotation_t rotation,
- uint32_t consumerUsage, nsecs_t timestampOffset,
+ uint64_t consumerUsage, nsecs_t timestampOffset,
int setId) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
@@ -365,10 +366,10 @@
mConsumerName = mConsumer->getConsumerName();
- res = native_window_set_usage(mConsumer.get(), camera3_stream::usage);
+ res = native_window_set_usage(mConsumer.get(), mUsage);
if (res != OK) {
- ALOGE("%s: Unable to configure usage %08x for stream %d",
- __FUNCTION__, camera3_stream::usage, mId);
+ ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
+ __FUNCTION__, mUsage, mId);
return res;
}
@@ -461,11 +462,11 @@
* HAL3.2 devices may not support the dynamic buffer registeration.
*/
if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID) {
- uint32_t consumerUsage = 0;
+ uint64_t consumerUsage = 0;
getEndpointUsage(&consumerUsage);
StreamInfo streamInfo(
getId(), getStreamSetId(), getWidth(), getHeight(), getFormat(), getDataSpace(),
- camera3_stream::usage | consumerUsage, mTotalBufferCount,
+ mUsage | consumerUsage, mTotalBufferCount,
/*isConfigured*/true);
wp<Camera3OutputStream> weakThis(this);
res = mBufferManager->registerStream(weakThis,
@@ -628,7 +629,7 @@
return OK;
}
-status_t Camera3OutputStream::getEndpointUsage(uint32_t *usage) const {
+status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
status_t res;
@@ -643,14 +644,12 @@
return res;
}
-status_t Camera3OutputStream::getEndpointUsageForSurface(uint32_t *usage,
+status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
const sp<Surface>& surface) const {
status_t res;
- int32_t u = 0;
+ uint64_t u = 0;
- res = static_cast<ANativeWindow*>(surface.get())->query(surface.get(),
- NATIVE_WINDOW_CONSUMER_USAGE_BITS, &u);
-
+ res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
// If an opaque output stream's endpoint is ImageReader, add
// GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
// for the ZSL use case.
@@ -670,7 +669,7 @@
}
bool Camera3OutputStream::isVideoStream() const {
- uint32_t usage = 0;
+ uint64_t usage = 0;
status_t res = getEndpointUsage(&usage);
if (res != OK) {
ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
@@ -813,7 +812,7 @@
}
bool Camera3OutputStream::isConsumedByHWComposer() const {
- uint32_t usage = 0;
+ uint64_t usage = 0;
status_t res = getEndpointUsage(&usage);
if (res != OK) {
ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
@@ -824,7 +823,7 @@
}
bool Camera3OutputStream::isConsumedByHWTexture() const {
- uint32_t usage = 0;
+ uint64_t usage = 0;
status_t res = getEndpointUsage(&usage);
if (res != OK) {
ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 97aa7d4..7023d5d 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -44,7 +44,7 @@
uint32_t height;
uint32_t format;
android_dataspace dataSpace;
- uint32_t combinedUsage;
+ uint64_t combinedUsage;
size_t totalBufferCount;
bool isConfigured;
explicit StreamInfo(int id = CAMERA3_STREAM_ID_INVALID,
@@ -53,7 +53,7 @@
uint32_t h = 0,
uint32_t fmt = 0,
android_dataspace ds = HAL_DATASPACE_UNKNOWN,
- uint32_t usage = 0,
+ uint64_t usage = 0,
size_t bufferCount = 0,
bool configured = false) :
streamId(id),
@@ -101,7 +101,7 @@
* stream set id needs to be set to support buffer sharing between multiple streams.
*/
Camera3OutputStream(int id, uint32_t width, uint32_t height, int format,
- uint32_t consumerUsage, android_dataspace dataSpace,
+ uint64_t consumerUsage, android_dataspace dataSpace,
camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
int setId = CAMERA3_STREAM_SET_ID_INVALID);
@@ -176,7 +176,7 @@
Camera3OutputStream(int id, camera3_stream_type_t type,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation,
- uint32_t consumerUsage = 0, nsecs_t timestampOffset = 0,
+ uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
int setId = CAMERA3_STREAM_SET_ID_INVALID);
/**
@@ -191,14 +191,14 @@
virtual status_t disconnectLocked();
- status_t getEndpointUsageForSurface(uint32_t *usage,
+ status_t getEndpointUsageForSurface(uint64_t *usage,
const sp<Surface>& surface) const;
status_t configureConsumerQueueLocked();
// Consumer as the output of camera HAL
sp<Surface> mConsumer;
- uint32_t getPresetConsumerUsage() const { return mConsumerUsage; }
+ uint64_t getPresetConsumerUsage() const { return mConsumerUsage; }
static const nsecs_t kDequeueBufferTimeout = 1000000000; // 1 sec
@@ -245,7 +245,7 @@
* Consumer end point usage flag set by the constructor for the deferred
* consumer case.
*/
- uint32_t mConsumerUsage;
+ uint64_t mConsumerUsage;
/**
* Internal Camera3Stream interface
@@ -262,7 +262,7 @@
virtual status_t configureQueueLocked();
- virtual status_t getEndpointUsage(uint32_t *usage) const;
+ virtual status_t getEndpointUsage(uint64_t *usage) const;
/**
* Private methods
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index de7c869..fb7472b 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -23,7 +23,7 @@
Camera3SharedOutputStream::Camera3SharedOutputStream(int id,
const std::vector<sp<Surface>>& surfaces,
uint32_t width, uint32_t height, int format,
- uint32_t consumerUsage, android_dataspace dataSpace,
+ uint64_t consumerUsage, android_dataspace dataSpace,
camera3_stream_rotation_t rotation,
nsecs_t timestampOffset, int setId) :
Camera3OutputStream(id, CAMERA3_STREAM_OUTPUT, width, height,
@@ -41,7 +41,7 @@
mStreamSplitter = new Camera3StreamSplitter();
- uint32_t usage;
+ uint64_t usage;
getEndpointUsage(&usage);
res = mStreamSplitter->connect(mSurfaces, usage, camera3_stream::max_buffers, &mConsumer);
@@ -191,10 +191,10 @@
return res;
}
-status_t Camera3SharedOutputStream::getEndpointUsage(uint32_t *usage) const {
+status_t Camera3SharedOutputStream::getEndpointUsage(uint64_t *usage) const {
status_t res = OK;
- uint32_t u = 0;
+ uint64_t u = 0;
if (mConsumer == nullptr) {
// Called before shared buffer queue is constructed.
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 7be0940..22bb2fc 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -34,7 +34,7 @@
*/
Camera3SharedOutputStream(int id, const std::vector<sp<Surface>>& surfaces,
uint32_t width, uint32_t height, int format,
- uint32_t consumerUsage, android_dataspace dataSpace,
+ uint64_t consumerUsage, android_dataspace dataSpace,
camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
int setId = CAMERA3_STREAM_SET_ID_INVALID);
@@ -74,7 +74,7 @@
virtual status_t disconnectLocked();
- virtual status_t getEndpointUsage(uint32_t *usage) const;
+ virtual status_t getEndpointUsage(uint64_t *usage) const;
}; // class Camera3SharedOutputStream
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 9e6ac79..25e44a5 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -56,6 +56,7 @@
mState(STATE_CONSTRUCTED),
mStatusId(StatusTracker::NO_STATUS_ID),
mStreamUnpreparable(true),
+ mUsage(0),
mOldUsage(0),
mOldMaxBuffers(0),
mPrepared(false),
@@ -69,7 +70,6 @@
camera3_stream::format = format;
camera3_stream::data_space = dataSpace;
camera3_stream::rotation = rotation;
- camera3_stream::usage = 0;
camera3_stream::max_buffers = 0;
camera3_stream::priv = NULL;
@@ -104,6 +104,14 @@
return camera3_stream::data_space;
}
+uint64_t Camera3Stream::getUsage() const {
+ return mUsage;
+}
+
+void Camera3Stream::setUsage(uint64_t usage) {
+ mUsage = usage;
+}
+
camera3_stream* Camera3Stream::startConfiguration() {
ATRACE_CALL();
Mutex::Autolock l(mLock);
@@ -133,10 +141,10 @@
return NULL;
}
- mOldUsage = camera3_stream::usage;
+ mOldUsage = mUsage;
mOldMaxBuffers = camera3_stream::max_buffers;
- res = getEndpointUsage(&(camera3_stream::usage));
+ res = getEndpointUsage(&mUsage);
if (res != OK) {
ALOGE("%s: Cannot query consumer endpoint usage!",
__FUNCTION__);
@@ -197,7 +205,7 @@
// Check if the stream configuration is unchanged, and skip reallocation if
// so. As documented in hardware/camera3.h:configure_streams().
if (mState == STATE_IN_RECONFIG &&
- mOldUsage == camera3_stream::usage &&
+ mOldUsage == mUsage &&
mOldMaxBuffers == camera3_stream::max_buffers) {
mState = STATE_CONFIGURED;
return OK;
@@ -243,7 +251,7 @@
return INVALID_OPERATION;
}
- camera3_stream::usage = mOldUsage;
+ mUsage = mOldUsage;
camera3_stream::max_buffers = mOldMaxBuffers;
mState = (mState == STATE_IN_RECONFIG) ? STATE_CONFIGURED : STATE_CONSTRUCTED;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 44fe6b6..9090f83 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -144,6 +144,8 @@
uint32_t getHeight() const;
int getFormat() const;
android_dataspace getDataSpace() const;
+ uint64_t getUsage() const;
+ void setUsage(uint64_t usage);
camera3_stream* asHalStream() override {
return this;
@@ -459,7 +461,7 @@
// Get the usage flags for the other endpoint, or return
// INVALID_OPERATION if they cannot be obtained.
- virtual status_t getEndpointUsage(uint32_t *usage) const = 0;
+ virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
// Return whether the buffer is in the list of outstanding buffers.
bool isOutstandingBuffer(const camera3_stream_buffer& buffer) const;
@@ -473,8 +475,10 @@
// prepareNextBuffer called on it.
bool mStreamUnpreparable;
+ uint64_t mUsage;
+
private:
- uint32_t mOldUsage;
+ uint64_t mOldUsage;
uint32_t mOldMaxBuffers;
Condition mOutputBufferReturnedSignal;
Condition mInputBufferReturnedSignal;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index 869e93a..a0a50c2 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -39,7 +39,7 @@
namespace android {
status_t Camera3StreamSplitter::connect(const std::vector<sp<Surface> >& surfaces,
- uint32_t consumerUsage, size_t halMaxBuffers, sp<Surface>* consumer) {
+ uint64_t consumerUsage, size_t halMaxBuffers, sp<Surface>* consumer) {
ATRACE_CALL();
if (consumer == nullptr) {
SP_LOGE("%s: consumer pointer is NULL", __FUNCTION__);
@@ -195,10 +195,8 @@
// Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
// We need skip these cases as timeout will disable the non-blocking (async) mode.
- int32_t usage = 0;
- static_cast<ANativeWindow*>(outputQueue.get())->query(
- outputQueue.get(),
- NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usage);
+ uint64_t usage = 0;
+ res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(outputQueue.get()), &usage);
if (!(usage & (GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_TEXTURE))) {
outputQueue->setDequeueTimeout(kDequeueBufferTimeout);
}
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
index cc623e0..3b8839e 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -52,7 +52,7 @@
// Connect to the stream splitter by creating buffer queue and connecting it
// with output surfaces.
status_t connect(const std::vector<sp<Surface> >& surfaces,
- uint32_t consumerUsage, size_t halMaxBuffers,
+ uint64_t consumerUsage, size_t halMaxBuffers,
sp<Surface>* consumer);
// addOutput adds an output BufferQueue to the splitter. The splitter
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index 3d54460..ee018c3 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -38,7 +38,7 @@
namespace android {
RingBufferConsumer::RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer,
- uint32_t consumerUsage,
+ uint64_t consumerUsage,
int bufferCount) :
ConsumerBase(consumer),
mBufferCount(bufferCount),
@@ -368,7 +368,7 @@
return mConsumer->setDefaultBufferFormat(defaultFormat);
}
-status_t RingBufferConsumer::setConsumerUsage(uint32_t usage) {
+status_t RingBufferConsumer::setConsumerUsage(uint64_t usage) {
Mutex::Autolock _l(mMutex);
return mConsumer->setConsumerUsageBits(usage);
}
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index 2bafe4a..b737469 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -60,7 +60,7 @@
// the consumer usage flags passed to the graphics allocator. The
// bufferCount parameter specifies how many buffers can be pinned for user
// access at the same time.
- RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint32_t consumerUsage,
+ RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint64_t consumerUsage,
int bufferCount);
virtual ~RingBufferConsumer();
@@ -80,7 +80,7 @@
// setConsumerUsage allows the BufferQueue consumer usage to be
// set at a later time after construction.
- status_t setConsumerUsage(uint32_t usage);
+ status_t setConsumerUsage(uint64_t usage);
// Buffer info, minus the graphics buffer/slot itself.
struct BufferInfo {
diff --git a/services/mediaanalytics/OWNERS b/services/mediaanalytics/OWNERS
new file mode 100644
index 0000000..9af258b
--- /dev/null
+++ b/services/mediaanalytics/OWNERS
@@ -0,0 +1 @@
+essick@google.com
diff --git a/services/medialog/OWNERS b/services/medialog/OWNERS
index fb8b8ee..21723ba 100644
--- a/services/medialog/OWNERS
+++ b/services/medialog/OWNERS
@@ -1,3 +1,3 @@
elaurent@google.com
-gkasten@android.com
+gkasten@google.com
hunga@google.com
diff --git a/services/minijail/OWNERS b/services/minijail/OWNERS
new file mode 100644
index 0000000..19f4f9f
--- /dev/null
+++ b/services/minijail/OWNERS
@@ -0,0 +1,2 @@
+jorgelo@google.com
+marcone@google.com
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 970d734..68dcaff 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -31,6 +31,7 @@
#include "SharedMemoryProxy.h"
#include "utility/AAudioUtilities.h"
+using android::base::unique_fd;
using namespace android;
using namespace aaudio;
@@ -69,11 +70,6 @@
AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
}
- if (mAudioDataFileDescriptor != -1) {
- ::close(mAudioDataFileDescriptor);
- mAudioDataFileDescriptor = -1;
- }
-
return AAudioServiceStreamBase::close();
}
@@ -193,7 +189,13 @@
? audio_channel_count_from_out_mask(config.channel_mask)
: audio_channel_count_from_in_mask(config.channel_mask);
- mAudioDataFileDescriptor = mMmapBufferinfo.shared_memory_fd;
+ // AAudio creates a copy of this FD and retains ownership of the copy.
+ // Assume that AudioFlinger will close the original shared_memory_fd.
+ mAudioDataFileDescriptor.reset(dup(mMmapBufferinfo.shared_memory_fd));
+ if (mAudioDataFileDescriptor.get() == -1) {
+ ALOGE("AAudioServiceStreamMMAP::open() - could not dup shared_memory_fd");
+ return AAUDIO_ERROR_INTERNAL; // TODO review
+ }
mFramesPerBurst = mMmapBufferinfo.burst_size_frames;
mAudioFormat = AAudioConvert_androidToAAudioDataFormat(config.format);
mSampleRate = config.sample_rate;
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index e6f8fad..e631fd3 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -19,6 +19,7 @@
#include <atomic>
+#include <android-base/unique_fd.h>
#include <media/audiohal/StreamHalInterface.h>
#include <media/MmapStreamCallback.h>
#include <media/MmapStreamInterface.h>
@@ -33,6 +34,7 @@
#include "TimestampScheduler.h"
#include "utility/MonotonicCounter.h"
+
namespace aaudio {
/**
@@ -133,9 +135,9 @@
MonotonicCounter mFramesWritten;
MonotonicCounter mFramesRead;
int32_t mPreviousFrameCounter = 0; // from HAL
- int mAudioDataFileDescriptor = -1;
int64_t mHardwareTimeOffsetNanos = 0; // TODO get from HAL
+
// Interface to the AudioFlinger MMAP support.
android::sp<android::MmapStreamInterface> mMmapStream;
struct audio_mmap_buffer_info mMmapBufferinfo;
@@ -143,6 +145,7 @@
audio_port_handle_t mDeviceId = AUDIO_PORT_HANDLE_NONE;
android::AudioClient mServiceClient;
bool mInService = false;
+ android::base::unique_fd mAudioDataFileDescriptor;
};
} // namespace aaudio
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index d648c6d..57990ce 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -131,7 +131,7 @@
aaudio_direction_t direction = request.getDirection();
AAudioEndpointManager &mEndpointManager = AAudioEndpointManager::getInstance();
- mServiceEndpoint = mEndpointManager.openEndpoint(mAudioService, configurationOutput, direction);
+ mServiceEndpoint = mEndpointManager.openEndpoint(mAudioService, configurationInput, direction);
if (mServiceEndpoint == nullptr) {
ALOGE("AAudioServiceStreamShared::open() mServiceEndPoint = %p", mServiceEndpoint);
return AAUDIO_ERROR_UNAVAILABLE;
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
index 6b3fb4c..83b25b3 100644
--- a/services/oboeservice/SharedRingBuffer.cpp
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -35,11 +35,6 @@
munmap(mSharedMemory, mSharedMemorySizeInBytes);
mSharedMemory = nullptr;
}
- if (mFileDescriptor != -1) {
- ALOGV("SharedRingBuffer: LEAK? close(mFileDescriptor = %d)\n", mFileDescriptor);
- close(mFileDescriptor);
- mFileDescriptor = -1;
- }
}
aaudio_result_t SharedRingBuffer::allocate(fifo_frames_t bytesPerFrame,
@@ -49,17 +44,17 @@
// Create shared memory large enough to hold the data and the read and write counters.
mDataMemorySizeInBytes = bytesPerFrame * capacityInFrames;
mSharedMemorySizeInBytes = mDataMemorySizeInBytes + (2 * (sizeof(fifo_counter_t)));
- mFileDescriptor = ashmem_create_region("AAudioSharedRingBuffer", mSharedMemorySizeInBytes);
- ALOGV("SharedRingBuffer::allocate() LEAK? mFileDescriptor = %d\n", mFileDescriptor);
- if (mFileDescriptor < 0) {
+ mFileDescriptor.reset(ashmem_create_region("AAudioSharedRingBuffer", mSharedMemorySizeInBytes));
+ if (mFileDescriptor.get() == -1) {
ALOGE("SharedRingBuffer::allocate() ashmem_create_region() failed %d", errno);
return AAUDIO_ERROR_INTERNAL;
}
+ ALOGV("SharedRingBuffer::allocate() mFileDescriptor = %d\n", mFileDescriptor.get());
- int err = ashmem_set_prot_region(mFileDescriptor, PROT_READ|PROT_WRITE); // TODO error handling?
+ int err = ashmem_set_prot_region(mFileDescriptor.get(), PROT_READ|PROT_WRITE); // TODO error handling?
if (err < 0) {
ALOGE("SharedRingBuffer::allocate() ashmem_set_prot_region() failed %d", errno);
- close(mFileDescriptor);
+ mFileDescriptor.reset();
return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
}
@@ -67,10 +62,10 @@
mSharedMemory = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
PROT_READ|PROT_WRITE,
MAP_SHARED,
- mFileDescriptor, 0);
+ mFileDescriptor.get(), 0);
if (mSharedMemory == MAP_FAILED) {
ALOGE("SharedRingBuffer::allocate() mmap() failed %d", errno);
- close(mFileDescriptor);
+ mFileDescriptor.reset();
return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
}
diff --git a/services/oboeservice/SharedRingBuffer.h b/services/oboeservice/SharedRingBuffer.h
index a2c3766..79169bc 100644
--- a/services/oboeservice/SharedRingBuffer.h
+++ b/services/oboeservice/SharedRingBuffer.h
@@ -17,6 +17,7 @@
#ifndef AAUDIO_SHARED_RINGBUFFER_H
#define AAUDIO_SHARED_RINGBUFFER_H
+#include <android-base/unique_fd.h>
#include <stdint.h>
#include <cutils/ashmem.h>
#include <sys/mman.h>
@@ -51,12 +52,12 @@
}
private:
- int mFileDescriptor = -1;
- android::FifoBuffer *mFifoBuffer = nullptr;
- uint8_t *mSharedMemory = nullptr;
- int32_t mSharedMemorySizeInBytes = 0;
- int32_t mDataMemorySizeInBytes = 0;
- android::fifo_frames_t mCapacityInFrames = 0;
+ android::base::unique_fd mFileDescriptor;
+ android::FifoBuffer *mFifoBuffer = nullptr;
+ uint8_t *mSharedMemory = nullptr;
+ int32_t mSharedMemorySizeInBytes = 0;
+ int32_t mDataMemorySizeInBytes = 0;
+ android::fifo_frames_t mCapacityInFrames = 0;
};
} /* namespace aaudio */
diff --git a/tools/OWNERS b/tools/OWNERS
index 6dcb035..f9cb567 100644
--- a/tools/OWNERS
+++ b/tools/OWNERS
@@ -1 +1 @@
-gkasten@android.com
+gkasten@google.com
diff --git a/tools/resampler_tools/OWNERS b/tools/resampler_tools/OWNERS
new file mode 100644
index 0000000..b4a6798
--- /dev/null
+++ b/tools/resampler_tools/OWNERS
@@ -0,0 +1 @@
+hunga@google.com