[automerger skipped] Merge changes from topic "adtConversion" am: 9410d2ed17 am: 6a728fc42b
am: e62fdc5564 -s ours
am skip reason: change_id If4a5c5002d6d4ccb7aaf823111b371d417784b19 with SHA1 daf49955e6 is in history
Change-Id: Ia92423538d1bb18292b13c69f8155b85bd5482c9
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index f56e1b5..ec8f049 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -38,9 +38,14 @@
namespace.platform.isolated = true
namespace.platform.search.paths = /system/${LIB}
-namespace.platform.search.paths += /apex/com.android.runtime/${LIB}
namespace.platform.asan.search.paths = /data/asan/system/${LIB}
namespace.platform.asan.search.paths += /system/${LIB}
+
+# TODO(b/140790209): These directories are wrong in R and later because they
+# only contain Bionic internal libraries dependencies that should not be
+# accessed from the outside. However, they may be necessary for APEX builds that
+# are pushed to Q. Remove them as soon as Q compatibility is no longer required.
+namespace.platform.search.paths += /apex/com.android.runtime/${LIB}
namespace.platform.asan.search.paths += /apex/com.android.runtime/${LIB}
# /system/lib/libc.so, etc are symlinks to /apex/com.android.lib/lib/bionic/libc.so, etc.
diff --git a/apex/manifest.json b/apex/manifest.json
index 3011ee8..ddd642e 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,4 +1,4 @@
{
"name": "com.android.media",
- "version": 290000000
+ "version": 300000000
}
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index 83a5178..2320fd7 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,4 +1,4 @@
{
"name": "com.android.media.swcodec",
- "version": 290000000
+ "version": 300000000
}
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index c6c35ef..84d1d93 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -347,6 +347,20 @@
return c->setPreviewCallbackTarget(callbackProducer);
}
+status_t Camera::setAudioRestriction(int32_t mode)
+{
+ sp <::android::hardware::ICamera> c = mCamera;
+ if (c == 0) return NO_INIT;
+ return c->setAudioRestriction(mode);
+}
+
+int32_t Camera::getGlobalAudioRestriction()
+{
+ sp <::android::hardware::ICamera> c = mCamera;
+ if (c == 0) return NO_INIT;
+ return c->getGlobalAudioRestriction();
+}
+
// callback from camera service
void Camera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2)
{
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index f0945c7..b83edf7 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -56,6 +56,8 @@
SET_VIDEO_BUFFER_TARGET,
RELEASE_RECORDING_FRAME_HANDLE,
RELEASE_RECORDING_FRAME_HANDLE_BATCH,
+ SET_AUDIO_RESTRICTION,
+ GET_GLOBAL_AUDIO_RESTRICTION,
};
class BpCamera: public BpInterface<ICamera>
@@ -191,6 +193,21 @@
}
}
+ status_t setAudioRestriction(int32_t mode) {
+ Parcel data, reply;
+ data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+ data.writeInt32(mode);
+ remote()->transact(SET_AUDIO_RESTRICTION, data, &reply);
+ return reply.readInt32();
+ }
+
+ int32_t getGlobalAudioRestriction() {
+ Parcel data, reply;
+ data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+ remote()->transact(GET_GLOBAL_AUDIO_RESTRICTION, data, &reply);
+ return reply.readInt32();
+ }
+
status_t setVideoBufferMode(int32_t videoBufferMode)
{
ALOGV("setVideoBufferMode: %d", videoBufferMode);
@@ -494,6 +511,17 @@
reply->writeInt32(setVideoTarget(st));
return NO_ERROR;
} break;
+ case SET_AUDIO_RESTRICTION: {
+ CHECK_INTERFACE(ICamera, data, reply);
+ int32_t mode = data.readInt32();
+ reply->writeInt32(setAudioRestriction(mode));
+ return NO_ERROR;
+ } break;
+ case GET_GLOBAL_AUDIO_RESTRICTION: {
+ CHECK_INTERFACE(ICamera, data, reply);
+ reply->writeInt32(getGlobalAudioRestriction());
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 3e8992a..62dbb5e 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -87,6 +87,7 @@
ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
String cameraId,
String opPackageName,
+ @nullable String featureId,
int clientUid);
/**
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 49dfde8..93549e0 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -155,4 +155,26 @@
void updateOutputConfiguration(int streamId, in OutputConfiguration outputConfiguration);
void finalizeOutputConfigurations(int streamId, in OutputConfiguration outputConfiguration);
+
+
+ // Keep in sync with public API in
+ // frameworks/base/core/java/android/hardware/camera2/CameraDevice.java
+ const int AUDIO_RESTRICTION_NONE = 0;
+ const int AUDIO_RESTRICTION_VIBRATION = 1;
+ const int AUDIO_RESTRICTION_VIBRATION_SOUND = 3;
+
+ /**
+ * Set audio restriction mode for this camera device.
+ *
+ * @param mode the audio restriction mode ID as above
+ *
+ */
+ void setCameraAudioRestriction(int mode);
+
+ /**
+ * Get global audio restriction mode for all camera clients.
+ *
+ * @return the currently applied system-wide audio restriction mode
+ */
+ int getGlobalAudioRestriction();
}
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 430aa1c..2cdb617 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -167,6 +167,9 @@
sp<ICameraRecordingProxy> getRecordingProxy();
+ status_t setAudioRestriction(int32_t mode);
+ int32_t getGlobalAudioRestriction();
+
// ICameraClient interface
virtual void notifyCallback(int32_t msgType, int32_t ext, int32_t ext2);
virtual void dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
diff --git a/camera/include/camera/android/hardware/ICamera.h b/camera/include/camera/android/hardware/ICamera.h
index 80823d6..ec19e5d 100644
--- a/camera/include/camera/android/hardware/ICamera.h
+++ b/camera/include/camera/android/hardware/ICamera.h
@@ -140,6 +140,12 @@
// Set the video buffer producer for camera to use in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
virtual status_t setVideoTarget(
const sp<IGraphicBufferProducer>& bufferProducer) = 0;
+
+ // Set the audio restriction mode
+ virtual status_t setAudioRestriction(int32_t mode) = 0;
+
+ // Get the global audio restriction mode
+ virtual int32_t getGlobalAudioRestriction() = 0;
};
// ----------------------------------------------------------------------------
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index d6f1412..68db233 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -33,7 +33,9 @@
dev->unlockDevice();
}
// Fire onClosed callback
- (*mUserSessionCallback.onClosed)(mUserSessionCallback.context, this);
+ if (mUserSessionCallback.onClosed != nullptr) {
+ (*mUserSessionCallback.onClosed)(mUserSessionCallback.context, this);
+ }
ALOGV("~ACameraCaptureSession: %p is deleted", this);
}
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 457dea9..7a0f63b 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -540,7 +540,7 @@
// No way to get package name from native.
// Send a zero length package name and let camera service figure it out from UID
binder::Status serviceRet = cs->connectDevice(
- callbacks, String16(cameraId), String16(""),
+ callbacks, String16(cameraId), String16(""), std::unique_ptr<String16>(),
hardware::ICameraService::USE_CALLING_UID, /*out*/&deviceRemote);
if (!serviceRet.isOk()) {
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 77dcd48..7c41b5e 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -479,6 +479,7 @@
case ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE:
case ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST:
case ACAMERA_CONTROL_ENABLE_ZSL:
+ case ACAMERA_CONTROL_BOKEH_MODE:
case ACAMERA_EDGE_MODE:
case ACAMERA_FLASH_MODE:
case ACAMERA_HOT_PIXEL_MODE:
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 8dd6e00..825f308 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -140,7 +140,7 @@
* application controls how the color mapping is performed.</p>
* <p>We define the expected processing pipeline below. For consistency
* across devices, this is always the case with TRANSFORM_MATRIX.</p>
- * <p>When either FULL or HIGH_QUALITY is used, the camera device may
+ * <p>When either FAST or HIGH_QUALITY is used, the camera device may
* do additional processing but ACAMERA_COLOR_CORRECTION_GAINS and
* ACAMERA_COLOR_CORRECTION_TRANSFORM will still be provided by the
* camera device (in the results) and be roughly correct.</p>
@@ -1126,10 +1126,17 @@
* </ul>
* <p>For devices at the LIMITED level or above:</p>
* <ul>
- * <li>For YUV_420_888 burst capture use case, this list will always include (<code>min</code>, <code>max</code>)
- * and (<code>max</code>, <code>max</code>) where <code>min</code> <= 15 and <code>max</code> = the maximum output frame rate of the
+ * <li>For devices that advertise NIR color filter arrangement in
+ * ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, this list will always include
+ * (<code>max</code>, <code>max</code>) where <code>max</code> = the maximum output frame rate of the maximum YUV_420_888
+ * output size.</li>
+ * <li>For devices advertising any color filter arrangement other than NIR, or devices not
+ * advertising color filter arrangement, this list will always include (<code>min</code>, <code>max</code>) and
+ * (<code>max</code>, <code>max</code>) where <code>min</code> <= 15 and <code>max</code> = the maximum output frame rate of the
* maximum YUV_420_888 output size.</li>
* </ul>
+ *
+ * @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
*/
ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES = // int32[2*n]
ACAMERA_CONTROL_START + 20,
@@ -1727,6 +1734,77 @@
*/
ACAMERA_CONTROL_AF_SCENE_CHANGE = // byte (acamera_metadata_enum_android_control_af_scene_change_t)
ACAMERA_CONTROL_START + 42,
+ /**
+ * <p>The list of bokeh modes that are supported by this camera device, and each bokeh mode's
+ * maximum streaming (non-stall) size with bokeh effect.</p>
+ *
+ * <p>Type: int32[3*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>For OFF mode, the camera behaves normally with no bokeh effect.</p>
+ * <p>For STILL_CAPTURE mode, the maximum streaming dimension specifies the limit under which
+ * bokeh is effective when capture intent is PREVIEW. Note that when capture intent is
+ * PREVIEW, the bokeh effect may not be as high quality compared to STILL_CAPTURE intent
+ * in order to maintain reasonable frame rate. The maximum streaming dimension must be one
+ * of the YUV_420_888 or PRIVATE resolutions in availableStreamConfigurations, or (0, 0)
+ * if preview bokeh is not supported. If the application configures a stream larger than
+ * the maximum streaming dimension, bokeh effect may not be applied for this stream for
+ * PREVIEW intent.</p>
+ * <p>For CONTINUOUS mode, the maximum streaming dimension specifies the limit under which
+ * bokeh is effective. This dimension must be one of the YUV_420_888 or PRIVATE resolutions
+ * in availableStreamConfigurations, and if the sensor maximum resolution is larger than or
+ * equal to 1080p, the maximum streaming dimension must be at least 1080p. If the
+ * application configures a stream with larger dimension, the stream may not have bokeh
+ * effect applied.</p>
+ */
+ ACAMERA_CONTROL_AVAILABLE_BOKEH_CAPABILITIES = // int32[3*n]
+ ACAMERA_CONTROL_START + 43,
+ /**
+ * <p>Whether bokeh mode is enabled for a particular capture request.</p>
+ *
+ * <p>Type: byte (acamera_metadata_enum_android_control_bokeh_mode_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>With bokeh mode, the camera device may blur out the parts of scene that are not in
+ * focus, creating a bokeh (or shallow depth of field) effect for people or objects.</p>
+ * <p>When set to STILL_CAPTURE bokeh mode with STILL_CAPTURE capture intent, due to the extra
+ * processing needed for high quality bokeh effect, the stall may be longer than when
+ * capture intent is not STILL_CAPTURE.</p>
+ * <p>When set to STILL_CAPTURE bokeh mode with PREVIEW capture intent,</p>
+ * <ul>
+ * <li>If the camera device has BURST_CAPTURE capability, the frame rate requirement of
+ * BURST_CAPTURE must still be met.</li>
+ * <li>All streams not larger than the maximum streaming dimension for STILL_CAPTURE mode
+ * (queried via {@link ACAMERA_CONTROL_AVAILABLE_BOKEH_CAPABILITIES })
+ * will have preview bokeh effect applied.</li>
+ * </ul>
+ * <p>When set to CONTINUOUS mode, configured streams dimension should not exceed this mode's
+ * maximum streaming dimension in order to have bokeh effect applied. Bokeh effect may not
+ * be available for streams larger than the maximum streaming dimension.</p>
+ * <p>Switching between different bokeh modes may involve reconfiguration of the camera
+ * pipeline, resulting in long latency. The application should check this key against the
+ * available session keys queried via
+ * {@link ACameraManager_getCameraCharacteristics }.</p>
+ * <p>When bokeh mode is on, the camera device may override certain control parameters, such as
+ * reduce frame rate or use face priority scene mode, to achieve best power and quality
+ * tradeoffs. When turned on, AE, AWB, and AF run in auto modes, and only the mandatory
+ * stream combinations of LIMITED hardware level are guaranteed.</p>
+ * <p>For a logical multi-camera, bokeh may be implemented by stereo vision from sub-cameras
+ * with different field of view. As a result, when bokeh mode is enabled, the camera device
+ * may override android.scaler.CropRegion, and the field of view will be smaller than when
+ * bokeh mode is off.</p>
+ */
+ ACAMERA_CONTROL_BOKEH_MODE = // byte (acamera_metadata_enum_android_control_bokeh_mode_t)
+ ACAMERA_CONTROL_START + 44,
ACAMERA_CONTROL_END,
/**
@@ -3536,11 +3614,19 @@
* output capture result.</p>
* <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
* OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * <p>Note that for devices supporting postRawSensitivityBoost, the total sensitivity applied
+ * to the final processed image is the combination of ACAMERA_SENSOR_SENSITIVITY and
+ * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST. In case the application uses the sensor
+ * sensitivity from last capture result of an auto request for a manual request, in order
+ * to achieve the same brightness in the output image, the application should also
+ * set postRawSensitivityBoost.</p>
*
* @see ACAMERA_CONTROL_AE_MODE
* @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
* @see ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE
* @see ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY
+ * @see ACAMERA_SENSOR_SENSITIVITY
*/
ACAMERA_SENSOR_SENSITIVITY = // int32
ACAMERA_SENSOR_START + 2,
@@ -6987,6 +7073,31 @@
} acamera_metadata_enum_android_control_af_scene_change_t;
+// ACAMERA_CONTROL_BOKEH_MODE
+typedef enum acamera_metadata_enum_acamera_control_bokeh_mode {
+ /**
+ * <p>Bokeh mode is disabled.</p>
+ */
+ ACAMERA_CONTROL_BOKEH_MODE_OFF = 0,
+
+ /**
+ * <p>High quality bokeh mode is enabled for all non-raw streams (including YUV,
+ * JPEG, and IMPLEMENTATION_DEFINED) when capture intent is STILL_CAPTURE. Due to the
+ * extra image processing, this mode may introduce additional stall to non-raw streams.
+ * This mode should be used in high quality still capture use case.</p>
+ */
+ ACAMERA_CONTROL_BOKEH_MODE_STILL_CAPTURE = 1,
+
+ /**
+ * <p>Bokeh effect must not slow down capture rate relative to sensor raw output,
+ * and the effect is applied to all processed streams no larger than the maximum
+ * streaming dimension. This mode should be used if performance and power are a
+ * priority, such as video recording.</p>
+ */
+ ACAMERA_CONTROL_BOKEH_MODE_CONTINUOUS = 2,
+
+} acamera_metadata_enum_android_control_bokeh_mode_t;
+
// ACAMERA_EDGE_MODE
@@ -7751,6 +7862,13 @@
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA = 13,
+ /**
+ * <p>The camera device is only accessible by Android's system components and privileged
+ * applications. Processes need to have the android.permission.SYSTEM_CAMERA in
+ * addition to android.permission.CAMERA in order to connect to this camera device.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA = 14,
+
} acamera_metadata_enum_android_request_available_capabilities_t;
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 7ab0124..938b5f5 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -253,21 +253,9 @@
return true;
}
- static void onDeviceDisconnected(void* /*obj*/, ACameraDevice* /*device*/) {}
-
- static void onDeviceError(void* /*obj*/, ACameraDevice* /*device*/, int /*errorCode*/) {}
-
- static void onSessionClosed(void* /*obj*/, ACameraCaptureSession* /*session*/) {}
-
- static void onSessionReady(void* /*obj*/, ACameraCaptureSession* /*session*/) {}
-
- static void onSessionActive(void* /*obj*/, ACameraCaptureSession* /*session*/) {}
-
private:
- ACameraDevice_StateCallbacks mDeviceCb{this, onDeviceDisconnected,
- onDeviceError};
- ACameraCaptureSession_stateCallbacks mSessionCb{
- this, onSessionClosed, onSessionReady, onSessionActive};
+ ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
+ ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
native_handle_t* mImgReaderAnw = nullptr; // not owned by us.
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 8fe029a..9a18b10 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -57,7 +57,7 @@
#include <algorithm>
using namespace android;
-using ::android::hardware::ICameraServiceDefault;
+using ::android::hardware::ICameraService;
using ::android::hardware::camera2::ICameraDeviceUser;
#define ASSERT_NOT_NULL(x) \
@@ -361,7 +361,8 @@
sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
sp<hardware::camera2::ICameraDeviceUser> device;
res = service->connectDevice(callbacks, cameraId, String16("meeeeeeeee!"),
- hardware::ICameraService::USE_CALLING_UID, /*out*/&device);
+ std::unique_ptr<String16>(), hardware::ICameraService::USE_CALLING_UID,
+ /*out*/&device);
EXPECT_TRUE(res.isOk()) << res;
ASSERT_NE(nullptr, device.get());
device->disconnect();
@@ -403,7 +404,8 @@
{
SCOPED_TRACE("openNewDevice");
binder::Status res = service->connectDevice(callbacks, deviceId, String16("meeeeeeeee!"),
- hardware::ICameraService::USE_CALLING_UID, /*out*/&device);
+ std::unique_ptr<String16>(), hardware::ICameraService::USE_CALLING_UID,
+ /*out*/&device);
EXPECT_TRUE(res.isOk()) << res;
}
auto p = std::make_pair(callbacks, device);
@@ -507,7 +509,7 @@
bool queryStatus;
res = device->isSessionConfigurationSupported(sessionConfiguration, &queryStatus);
EXPECT_TRUE(res.isOk() ||
- (res.serviceSpecificErrorCode() == ICameraServiceDefault::ERROR_INVALID_OPERATION))
+ (res.serviceSpecificErrorCode() == ICameraService::ERROR_INVALID_OPERATION))
<< res;
if (res.isOk()) {
EXPECT_TRUE(queryStatus);
diff --git a/cmds/screenrecord/Android.bp b/cmds/screenrecord/Android.bp
index 6bdbab1..72008c2 100644
--- a/cmds/screenrecord/Android.bp
+++ b/cmds/screenrecord/Android.bp
@@ -31,6 +31,7 @@
shared_libs: [
"libstagefright",
"libmedia",
+ "libmediandk",
"libmedia_omx",
"libutils",
"libbinder",
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index f2a71b3..b534f8a 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -37,6 +37,7 @@
#include <binder/IPCThreadState.h>
#include <utils/Errors.h>
+#include <utils/SystemClock.h>
#include <utils/Timers.h>
#include <utils/Trace.h>
@@ -44,13 +45,15 @@
#include <gui/SurfaceComposerClient.h>
#include <gui/ISurfaceComposer.h>
#include <ui/DisplayInfo.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <media/NdkMediaMuxer.h>
#include <media/openmax/OMX_IVCommon.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaMuxer.h>
#include <media/stagefright/PersistentSurface.h>
#include <mediadrm/ICrypto.h>
#include <media/MediaCodecBuffer.h>
@@ -70,9 +73,9 @@
using android::ISurfaceComposer;
using android::MediaCodec;
using android::MediaCodecBuffer;
-using android::MediaMuxer;
using android::Overlay;
using android::PersistentSurface;
+using android::PhysicalDisplayId;
using android::ProcessState;
using android::Rect;
using android::String8;
@@ -95,6 +98,8 @@
static const uint32_t kFallbackWidth = 1280; // 720p
static const uint32_t kFallbackHeight = 720;
static const char* kMimeTypeAvc = "video/avc";
+static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
+static const char* kWinscopeMagicString = "#VV1NSC0PET1ME!#";
// Command-line parameters.
static bool gVerbose = false; // chatty on stdout
@@ -113,7 +118,7 @@
static uint32_t gBitRate = 20000000; // 20Mbps
static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
static uint32_t gBframes = 0;
-
+static PhysicalDisplayId gPhysicalDisplayId;
// Set by signal handler to stop recording.
static volatile bool gStopRequested = false;
@@ -266,14 +271,14 @@
static status_t setDisplayProjection(
SurfaceComposerClient::Transaction& t,
const sp<IBinder>& dpy,
- const DisplayInfo& mainDpyInfo) {
+ const DisplayInfo& displayInfo) {
// Set the region of the layer stack we're interested in, which in our
// case is "all of it".
- Rect layerStackRect(mainDpyInfo.viewportW, mainDpyInfo.viewportH);
+ Rect layerStackRect(displayInfo.viewportW, displayInfo.viewportH);
// We need to preserve the aspect ratio of the display.
- float displayAspect = (float) mainDpyInfo.viewportH / (float) mainDpyInfo.viewportW;
+ float displayAspect = (float) displayInfo.viewportH / (float) displayInfo.viewportW;
// Set the way we map the output onto the display surface (which will
@@ -332,16 +337,15 @@
* Configures the virtual display. When this completes, virtual display
* frames will start arriving from the buffer producer.
*/
-static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
+static status_t prepareVirtualDisplay(const DisplayInfo& displayInfo,
const sp<IGraphicBufferProducer>& bufferProducer,
sp<IBinder>* pDisplayHandle) {
sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
String8("ScreenRecorder"), false /*secure*/);
-
SurfaceComposerClient::Transaction t;
t.setDisplaySurface(dpy, bufferProducer);
- setDisplayProjection(t, dpy, mainDpyInfo);
- t.setDisplayLayerStack(dpy, 0); // default stack
+ setDisplayProjection(t, dpy, displayInfo);
+ t.setDisplayLayerStack(dpy, displayInfo.layerStack);
t.apply();
*pDisplayHandle = dpy;
@@ -350,6 +354,56 @@
}
/*
+ * Writes an unsigned integer byte-by-byte in little endian order regardless
+ * of the platform endianness.
+ */
+template <typename UINT>
+static void writeValueLE(UINT value, uint8_t* buffer) {
+ for (int i = 0; i < sizeof(UINT); ++i) {
+ buffer[i] = static_cast<uint8_t>(value);
+ value >>= 8;
+ }
+}
+
+/*
+ * Saves frames presentation time relative to the elapsed realtime clock in microseconds
+ * preceded by a Winscope magic string and frame count to a metadata track.
+ * This metadata is used by the Winscope tool to sync video with SurfaceFlinger
+ * and WindowManager traces.
+ *
+ * The metadata is written as a binary array as follows:
+ * - winscope magic string (kWinscopeMagicString constant), without trailing null char,
+ * - the number of recorded frames (as little endian uint32),
+ * - for every frame its presentation time relative to the elapsed realtime clock in microseconds
+ * (as little endian uint64).
+ */
+static status_t writeWinscopeMetadata(const Vector<int64_t>& timestamps,
+ const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
+ ALOGV("Writing metadata");
+ int64_t systemTimeToElapsedTimeOffsetMicros = (android::elapsedRealtimeNano()
+ - systemTime(SYSTEM_TIME_MONOTONIC)) / 1000;
+ sp<ABuffer> buffer = new ABuffer(timestamps.size() * sizeof(int64_t)
+ + sizeof(uint32_t) + strlen(kWinscopeMagicString));
+ uint8_t* pos = buffer->data();
+ strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicString);
+ pos += strlen(kWinscopeMagicString);
+ writeValueLE<uint32_t>(timestamps.size(), pos);
+ pos += sizeof(uint32_t);
+ for (size_t idx = 0; idx < timestamps.size(); ++idx) {
+ writeValueLE<uint64_t>(static_cast<uint64_t>(timestamps[idx]
+ + systemTimeToElapsedTimeOffsetMicros), pos);
+ pos += sizeof(uint64_t);
+ }
+ AMediaCodecBufferInfo bufferInfo = {
+ 0,
+ static_cast<int32_t>(buffer->size()),
+ timestamps[0],
+ 0
+ };
+ return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
+}
+
+/*
* Runs the MediaCodec encoder, sending the output to the MediaMuxer. The
* input frames are coming from the virtual display as fast as SurfaceFlinger
* wants to send them.
@@ -359,15 +413,17 @@
* The muxer must *not* have been started before calling.
*/
static status_t runEncoder(const sp<MediaCodec>& encoder,
- const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
+ AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
const sp<IBinder>& virtualDpy, uint8_t orientation) {
static int kTimeout = 250000; // be responsive on signal
status_t err;
ssize_t trackIdx = -1;
+ ssize_t metaTrackIdx = -1;
uint32_t debugNumFrames = 0;
int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
- DisplayInfo mainDpyInfo;
+ DisplayInfo displayInfo;
+ Vector<int64_t> timestamps;
bool firstFrame = true;
assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
@@ -423,16 +479,16 @@
//
// Polling for changes is inefficient and wrong, but the
// useful stuff is hard to get at without a Dalvik VM.
- err = SurfaceComposerClient::getDisplayInfo(mainDpy,
- &mainDpyInfo);
+ err = SurfaceComposerClient::getDisplayInfo(display,
+ &displayInfo);
if (err != NO_ERROR) {
ALOGW("getDisplayInfo(main) failed: %d", err);
- } else if (orientation != mainDpyInfo.orientation) {
- ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
+ } else if (orientation != displayInfo.orientation) {
+ ALOGD("orientation changed, now %d", displayInfo.orientation);
SurfaceComposerClient::Transaction t;
- setDisplayProjection(t, virtualDpy, mainDpyInfo);
+ setDisplayProjection(t, virtualDpy, displayInfo);
t.apply();
- orientation = mainDpyInfo.orientation;
+ orientation = displayInfo.orientation;
}
}
@@ -464,13 +520,21 @@
// TODO
sp<ABuffer> buffer = new ABuffer(
buffers[bufIndex]->data(), buffers[bufIndex]->size());
- err = muxer->writeSampleData(buffer, trackIdx,
- ptsUsec, flags);
+ AMediaCodecBufferInfo bufferInfo = {
+ 0,
+ static_cast<int32_t>(buffer->size()),
+ ptsUsec,
+ flags
+ };
+ err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
if (err != NO_ERROR) {
fprintf(stderr,
"Failed writing data to muxer (err=%d)\n", err);
return err;
}
+ if (gOutputFormat == FORMAT_MP4) {
+ timestamps.add(ptsUsec);
+ }
}
debugNumFrames++;
}
@@ -495,10 +559,18 @@
ALOGV("Encoder format changed");
sp<AMessage> newFormat;
encoder->getOutputFormat(&newFormat);
+ // TODO remove when MediaCodec has been replaced with AMediaCodec
+ AMediaFormat *ndkFormat = AMediaFormat_fromMsg(&newFormat);
if (muxer != NULL) {
- trackIdx = muxer->addTrack(newFormat);
+ trackIdx = AMediaMuxer_addTrack(muxer, ndkFormat);
+ if (gOutputFormat == FORMAT_MP4) {
+ AMediaFormat *metaFormat = AMediaFormat_new();
+ AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
+ metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
+ AMediaFormat_delete(metaFormat);
+ }
ALOGV("Starting muxer");
- err = muxer->start();
+ err = AMediaMuxer_start(muxer);
if (err != NO_ERROR) {
fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
return err;
@@ -533,6 +605,13 @@
systemTime(CLOCK_MONOTONIC) - startWhenNsec));
fflush(stdout);
}
+ if (metaTrackIdx >= 0 && !timestamps.isEmpty()) {
+ err = writeWinscopeMetadata(timestamps, metaTrackIdx, muxer);
+ if (err != NO_ERROR) {
+ fprintf(stderr, "Failed writing metadata to muxer (err=%d)\n", err);
+ return err;
+ }
+ }
return NO_ERROR;
}
@@ -597,32 +676,33 @@
self->startThreadPool();
// Get main display parameters.
- const sp<IBinder> mainDpy = SurfaceComposerClient::getInternalDisplayToken();
- if (mainDpy == nullptr) {
+ sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(
+ gPhysicalDisplayId);
+ if (display == nullptr) {
fprintf(stderr, "ERROR: no display\n");
return NAME_NOT_FOUND;
}
- DisplayInfo mainDpyInfo;
- err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
+ DisplayInfo displayInfo;
+ err = SurfaceComposerClient::getDisplayInfo(display, &displayInfo);
if (err != NO_ERROR) {
fprintf(stderr, "ERROR: unable to get display characteristics\n");
return err;
}
if (gVerbose) {
- printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
- mainDpyInfo.viewportW, mainDpyInfo.viewportH, mainDpyInfo.fps,
- mainDpyInfo.orientation);
+ printf("Display is %dx%d @%.2ffps (orientation=%u), layerStack=%u\n",
+ displayInfo.viewportW, displayInfo.viewportH, displayInfo.fps,
+ displayInfo.orientation, displayInfo.layerStack);
fflush(stdout);
}
// Encoder can't take odd number as config
if (gVideoWidth == 0) {
- gVideoWidth = floorToEven(mainDpyInfo.viewportW);
+ gVideoWidth = floorToEven(displayInfo.viewportW);
}
if (gVideoHeight == 0) {
- gVideoHeight = floorToEven(mainDpyInfo.viewportH);
+ gVideoHeight = floorToEven(displayInfo.viewportH);
}
// Configure and start the encoder.
@@ -630,7 +710,7 @@
sp<FrameOutput> frameOutput;
sp<IGraphicBufferProducer> encoderInputSurface;
if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
- err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
+ err = prepareEncoder(displayInfo.fps, &encoder, &encoderInputSurface);
if (err != NO_ERROR && !gSizeSpecified) {
// fallback is defined for landscape; swap if we're in portrait
@@ -643,7 +723,7 @@
gVideoWidth, gVideoHeight, newWidth, newHeight);
gVideoWidth = newWidth;
gVideoHeight = newHeight;
- err = prepareEncoder(mainDpyInfo.fps, &encoder,
+ err = prepareEncoder(displayInfo.fps, &encoder,
&encoderInputSurface);
}
}
@@ -691,13 +771,13 @@
// Configure virtual display.
sp<IBinder> dpy;
- err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
+ err = prepareVirtualDisplay(displayInfo, bufferProducer, &dpy);
if (err != NO_ERROR) {
if (encoder != NULL) encoder->release();
return err;
}
- sp<MediaMuxer> muxer = NULL;
+ AMediaMuxer *muxer = nullptr;
FILE* rawFp = NULL;
switch (gOutputFormat) {
case FORMAT_MP4:
@@ -716,15 +796,15 @@
abort();
}
if (gOutputFormat == FORMAT_MP4) {
- muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+ muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
} else if (gOutputFormat == FORMAT_WEBM) {
- muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_WEBM);
+ muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_WEBM);
} else {
- muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_THREE_GPP);
+ muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP);
}
close(fd);
if (gRotate) {
- muxer->setOrientationHint(90); // TODO: does this do anything?
+ AMediaMuxer_setOrientationHint(muxer, 90); // TODO: does this do anything?
}
break;
}
@@ -774,8 +854,8 @@
}
} else {
// Main encoder loop.
- err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
- mainDpyInfo.orientation);
+ err = runEncoder(encoder, muxer, rawFp, display, dpy,
+ displayInfo.orientation);
if (err != NO_ERROR) {
fprintf(stderr, "Encoder failed (err=%d)\n", err);
// fall through to cleanup
@@ -795,7 +875,7 @@
if (muxer != NULL) {
// If we don't stop muxer explicitly, i.e. let the destructor run,
// it may hang (b/11050628).
- err = muxer->stop();
+ err = AMediaMuxer_stop(muxer);
} else if (rawFp != stdout) {
fclose(rawFp);
}
@@ -941,6 +1021,9 @@
" in videos captured to illustrate bugs.\n"
"--time-limit TIME\n"
" Set the maximum recording time, in seconds. Default / maximum is %d.\n"
+ "--display-id ID\n"
+ " specify the physical display ID to record. Default is the primary display.\n"
+ " see \"dumpsys SurfaceFlinger --display-id\" for valid display IDs.\n"
"--verbose\n"
" Display interesting information on stdout.\n"
"--help\n"
@@ -972,9 +1055,18 @@
{ "monotonic-time", no_argument, NULL, 'm' },
{ "persistent-surface", no_argument, NULL, 'p' },
{ "bframes", required_argument, NULL, 'B' },
+ { "display-id", required_argument, NULL, 'd' },
{ NULL, 0, NULL, 0 }
};
+ std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId();
+ if (!displayId) {
+ fprintf(stderr, "Failed to get token for internal display\n");
+ return 1;
+ }
+
+ gPhysicalDisplayId = *displayId;
+
while (true) {
int optionIndex = 0;
int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
@@ -1069,6 +1161,18 @@
return 2;
}
break;
+ case 'd':
+ gPhysicalDisplayId = atoll(optarg);
+ if (gPhysicalDisplayId == 0) {
+ fprintf(stderr, "Please specify a valid physical display id\n");
+ return 2;
+ } else if (SurfaceComposerClient::
+ getPhysicalDisplayToken(gPhysicalDisplayId) == nullptr) {
+ fprintf(stderr, "Invalid physical display id: %"
+ ANDROID_PHYSICAL_DISPLAY_ID_FORMAT "\n", gPhysicalDisplayId);
+ return 2;
+ }
+ break;
default:
if (ic != '?') {
fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
diff --git a/cmds/screenrecord/screenrecord.h b/cmds/screenrecord/screenrecord.h
index 9b058c2..cec7c13 100644
--- a/cmds/screenrecord/screenrecord.h
+++ b/cmds/screenrecord/screenrecord.h
@@ -18,6 +18,6 @@
#define SCREENRECORD_SCREENRECORD_H
#define kVersionMajor 1
-#define kVersionMinor 2
+#define kVersionMinor 3
#endif /*SCREENRECORD_SCREENRECORD_H*/
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 02ade94..e8cfece 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -988,7 +988,7 @@
failed = false;
printf("getFrameAtTime(%s) => OK\n", filename);
- VideoFrame *frame = (VideoFrame *)mem->pointer();
+ VideoFrame *frame = (VideoFrame *)mem->unsecurePointer();
CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
frame->getFlattenedData(),
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 22e2ef3..fe613a8 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -116,7 +116,7 @@
sp<IMemory> mem = mBuffers.itemAt(index);
- ssize_t n = read(mFd, mem->pointer(), mem->size());
+ ssize_t n = read(mFd, mem->unsecurePointer(), mem->size());
if (n <= 0) {
mListener->issueCommand(IStreamListener::EOS, false /* synchronous */);
} else {
@@ -238,7 +238,7 @@
copy = mem->size() - mCurrentBufferOffset;
}
- memcpy((uint8_t *)mem->pointer() + mCurrentBufferOffset, data, copy);
+ memcpy((uint8_t *)mem->unsecurePointer() + mCurrentBufferOffset, data, copy);
mCurrentBufferOffset += copy;
if (mCurrentBufferOffset == mem->size()) {
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index c25a0a1..fd71837 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -25,6 +25,7 @@
shared_libs: [
"libmedia",
+ "libmediametrics",
"libutils",
"liblog",
"libbinder",
diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp
index afbcb39..e2ea83a 100644
--- a/drm/drmserver/DrmManager.cpp
+++ b/drm/drmserver/DrmManager.cpp
@@ -19,7 +19,10 @@
#include "utils/Log.h"
#include <utils/String8.h>
+
+#include <binder/IPCThreadState.h>
#include <drm/DrmInfo.h>
+
#include <drm/DrmInfoEvent.h>
#include <drm/DrmRights.h>
#include <drm/DrmConstraints.h>
@@ -28,6 +31,7 @@
#include <drm/DrmInfoRequest.h>
#include <drm/DrmSupportInfo.h>
#include <drm/DrmConvertedStatus.h>
+#include <media/MediaAnalyticsItem.h>
#include <IDrmEngine.h>
#include "DrmManager.h"
@@ -50,6 +54,39 @@
}
+void DrmManager::reportEngineMetrics(
+ const char func[], const String8& plugInId, const String8& mimeType) {
+ IDrmEngine& engine = mPlugInManager.getPlugIn(plugInId);
+
+ std::unique_ptr<MediaAnalyticsItem> item(MediaAnalyticsItem::create("drmmanager"));
+ item->setUid(IPCThreadState::self()->getCallingUid());
+ item->setCString("function_name", func);
+ item->setCString("plugin_id", plugInId.getPathLeaf().getBasePath().c_str());
+
+ std::unique_ptr<DrmSupportInfo> info(engine.getSupportInfo(0));
+ if (NULL != info) {
+ item->setCString("description", info->getDescription().c_str());
+ }
+
+ if (!mimeType.isEmpty()) {
+ item->setCString("mime_types", mimeType.c_str());
+ } else if (NULL != info) {
+ DrmSupportInfo::MimeTypeIterator mimeIter = info->getMimeTypeIterator();
+ String8 mimes;
+ while (mimeIter.hasNext()) {
+ mimes += mimeIter.next();
+ if (mimeIter.hasNext()) {
+ mimes += ",";
+ }
+ }
+ item->setCString("mime_types", mimes.c_str());
+ }
+
+ if (!item->selfrecord()) {
+ ALOGE("Failed to record metrics");
+ }
+}
+
int DrmManager::addUniqueId(bool isNative) {
Mutex::Autolock _l(mLock);
@@ -147,27 +184,36 @@
for (size_t index = 0; index < plugInIdList.size(); index++) {
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInIdList.itemAt(index));
rDrmEngine.terminate(uniqueId);
+ reportEngineMetrics(__func__, plugInIdList[index]);
}
}
DrmConstraints* DrmManager::getConstraints(int uniqueId, const String8* path, const int action) {
Mutex::Autolock _l(mLock);
+ DrmConstraints *constraints = NULL;
const String8 plugInId = getSupportedPlugInIdFromPath(uniqueId, *path);
if (EMPTY_STRING != plugInId) {
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
- return rDrmEngine.getConstraints(uniqueId, path, action);
+ constraints = rDrmEngine.getConstraints(uniqueId, path, action);
}
- return NULL;
+ if (NULL != constraints) {
+ reportEngineMetrics(__func__, plugInId);
+ }
+ return constraints;
}
DrmMetadata* DrmManager::getMetadata(int uniqueId, const String8* path) {
Mutex::Autolock _l(mLock);
+ DrmMetadata *meta = NULL;
const String8 plugInId = getSupportedPlugInIdFromPath(uniqueId, *path);
if (EMPTY_STRING != plugInId) {
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
- return rDrmEngine.getMetadata(uniqueId, path);
+ meta = rDrmEngine.getMetadata(uniqueId, path);
}
- return NULL;
+ if (NULL != meta) {
+ reportEngineMetrics(__func__, plugInId);
+ }
+ return meta;
}
bool DrmManager::canHandle(int uniqueId, const String8& path, const String8& mimeType) {
@@ -175,6 +221,10 @@
const String8 plugInId = getSupportedPlugInId(mimeType);
bool result = (EMPTY_STRING != plugInId) ? true : false;
+ if (result) {
+ reportEngineMetrics(__func__, plugInId, mimeType);
+ }
+
if (0 < path.length()) {
if (result) {
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
@@ -191,12 +241,17 @@
DrmInfoStatus* DrmManager::processDrmInfo(int uniqueId, const DrmInfo* drmInfo) {
Mutex::Autolock _l(mLock);
- const String8 plugInId = getSupportedPlugInId(drmInfo->getMimeType());
+ DrmInfoStatus *infoStatus = NULL;
+ const String8 mimeType = drmInfo->getMimeType();
+ const String8 plugInId = getSupportedPlugInId(mimeType);
if (EMPTY_STRING != plugInId) {
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
- return rDrmEngine.processDrmInfo(uniqueId, drmInfo);
+ infoStatus = rDrmEngine.processDrmInfo(uniqueId, drmInfo);
}
- return NULL;
+ if (NULL != infoStatus) {
+ reportEngineMetrics(__func__, plugInId, mimeType);
+ }
+ return infoStatus;
}
bool DrmManager::canHandle(int uniqueId, const String8& path) {
@@ -208,6 +263,7 @@
result = rDrmEngine.canHandle(uniqueId, path);
if (result) {
+ reportEngineMetrics(__func__, plugInPathList[i]);
break;
}
}
@@ -216,54 +272,75 @@
DrmInfo* DrmManager::acquireDrmInfo(int uniqueId, const DrmInfoRequest* drmInfoRequest) {
Mutex::Autolock _l(mLock);
- const String8 plugInId = getSupportedPlugInId(drmInfoRequest->getMimeType());
+ DrmInfo *info = NULL;
+ const String8 mimeType = drmInfoRequest->getMimeType();
+ const String8 plugInId = getSupportedPlugInId(mimeType);
if (EMPTY_STRING != plugInId) {
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
- return rDrmEngine.acquireDrmInfo(uniqueId, drmInfoRequest);
+ info = rDrmEngine.acquireDrmInfo(uniqueId, drmInfoRequest);
}
- return NULL;
+ if (NULL != info) {
+ reportEngineMetrics(__func__, plugInId, mimeType);
+ }
+ return info;
}
status_t DrmManager::saveRights(int uniqueId, const DrmRights& drmRights,
const String8& rightsPath, const String8& contentPath) {
Mutex::Autolock _l(mLock);
- const String8 plugInId = getSupportedPlugInId(drmRights.getMimeType());
+ const String8 mimeType = drmRights.getMimeType();
+ const String8 plugInId = getSupportedPlugInId(mimeType);
status_t result = DRM_ERROR_UNKNOWN;
if (EMPTY_STRING != plugInId) {
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
result = rDrmEngine.saveRights(uniqueId, drmRights, rightsPath, contentPath);
}
+ if (DRM_NO_ERROR == result) {
+ reportEngineMetrics(__func__, plugInId, mimeType);
+ }
return result;
}
String8 DrmManager::getOriginalMimeType(int uniqueId, const String8& path, int fd) {
Mutex::Autolock _l(mLock);
+ String8 mimeType(EMPTY_STRING);
const String8 plugInId = getSupportedPlugInIdFromPath(uniqueId, path);
if (EMPTY_STRING != plugInId) {
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
- return rDrmEngine.getOriginalMimeType(uniqueId, path, fd);
+ mimeType = rDrmEngine.getOriginalMimeType(uniqueId, path, fd);
}
- return EMPTY_STRING;
+ if (!mimeType.isEmpty()) {
+ reportEngineMetrics(__func__, plugInId, mimeType);
+ }
+ return mimeType;
}
int DrmManager::getDrmObjectType(int uniqueId, const String8& path, const String8& mimeType) {
Mutex::Autolock _l(mLock);
+ int type = DrmObjectType::UNKNOWN;
const String8 plugInId = getSupportedPlugInId(uniqueId, path, mimeType);
if (EMPTY_STRING != plugInId) {
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
- return rDrmEngine.getDrmObjectType(uniqueId, path, mimeType);
+ type = rDrmEngine.getDrmObjectType(uniqueId, path, mimeType);
}
- return DrmObjectType::UNKNOWN;
+ if (DrmObjectType::UNKNOWN != type) {
+ reportEngineMetrics(__func__, plugInId, mimeType);
+ }
+ return type;
}
int DrmManager::checkRightsStatus(int uniqueId, const String8& path, int action) {
Mutex::Autolock _l(mLock);
+ int rightsStatus = RightsStatus::RIGHTS_INVALID;
const String8 plugInId = getSupportedPlugInIdFromPath(uniqueId, path);
if (EMPTY_STRING != plugInId) {
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
- return rDrmEngine.checkRightsStatus(uniqueId, path, action);
+ rightsStatus = rDrmEngine.checkRightsStatus(uniqueId, path, action);
}
- return RightsStatus::RIGHTS_INVALID;
+ if (RightsStatus::RIGHTS_INVALID != rightsStatus) {
+ reportEngineMetrics(__func__, plugInId);
+ }
+ return rightsStatus;
}
status_t DrmManager::consumeRights(
@@ -307,6 +384,9 @@
IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(plugInId);
result = rDrmEngine.removeRights(uniqueId, path);
}
+ if (DRM_NO_ERROR == result) {
+ reportEngineMetrics(__func__, plugInId);
+ }
return result;
}
@@ -319,6 +399,7 @@
if (DRM_NO_ERROR != result) {
break;
}
+ reportEngineMetrics(__func__, plugInIdList[index]);
}
return result;
}
@@ -335,6 +416,7 @@
++mConvertId;
convertId = mConvertId;
mConvertSessionMap.add(convertId, &rDrmEngine);
+ reportEngineMetrics(__func__, plugInId, mimeType);
}
}
return convertId;
@@ -415,6 +497,7 @@
if (DRM_NO_ERROR == result) {
++mDecryptSessionId;
mDecryptSessionMap.add(mDecryptSessionId, &rDrmEngine);
+ reportEngineMetrics(__func__, plugInId, String8(mime));
break;
}
}
@@ -443,6 +526,7 @@
if (DRM_NO_ERROR == result) {
++mDecryptSessionId;
mDecryptSessionMap.add(mDecryptSessionId, &rDrmEngine);
+ reportEngineMetrics(__func__, plugInId, String8(mime));
break;
}
}
@@ -472,6 +556,7 @@
if (DRM_NO_ERROR == result) {
++mDecryptSessionId;
mDecryptSessionMap.add(mDecryptSessionId, &rDrmEngine);
+ reportEngineMetrics(__func__, plugInId, mimeType);
break;
}
}