Merge "CCodec: read usage from component and apply to input buffers"
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..a7cf3e5
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,62 @@
+aidl_interface {
+ name: "av-types-aidl",
+ unstable: true,
+ host_supported: true,
+ vendor_available: true,
+ double_loadable: true,
+ local_include_dir: "aidl",
+ srcs: [
+ "aidl/android/media/InterpolatorConfig.aidl",
+ "aidl/android/media/InterpolatorType.aidl",
+ "aidl/android/media/MicrophoneInfoData.aidl",
+ "aidl/android/media/VolumeShaperConfiguration.aidl",
+ "aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl",
+ "aidl/android/media/VolumeShaperConfigurationType.aidl",
+ "aidl/android/media/VolumeShaperOperation.aidl",
+ "aidl/android/media/VolumeShaperOperationFlag.aidl",
+ "aidl/android/media/VolumeShaperState.aidl",
+ ],
+ backend: {
+ cpp: {
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.bluetooth.updatable",
+ "com.android.media",
+ "com.android.media.swcodec",
+ ],
+ },
+ },
+}
+
+cc_library_headers {
+ name: "av-headers",
+ export_include_dirs: ["include"],
+ static_libs: [
+ "av-types-aidl-unstable-cpp",
+ ],
+ export_static_lib_headers: [
+ "av-types-aidl-unstable-cpp",
+ ],
+ header_libs: [
+ "libaudioclient_aidl_conversion_util",
+ ],
+ export_header_lib_headers: [
+ "libaudioclient_aidl_conversion_util",
+ ],
+ host_supported: true,
+ vendor_available: true,
+ double_loadable: true,
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.bluetooth.updatable",
+ "com.android.media",
+ "com.android.media.swcodec",
+ ],
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
diff --git a/MainlineFiles.cfg b/MainlineFiles.cfg
new file mode 100644
index 0000000..37d714c
--- /dev/null
+++ b/MainlineFiles.cfg
@@ -0,0 +1,34 @@
+#
+# mainline files for frameworks/av
+#
+# ignore comment (#) lines and blank lines
+# rest are path prefixes starting at root of the project
+# (so OWNERS, not frameworks/av/OWNERS)
+#
+# path
+# INCLUDE path
+# EXCLUDE path
+#
+# 'path' and 'INCLUDE path' are identical -- they both indicate that this path
+# is part of mainline
+# EXCLUDE indicates that this is not part of mainline,
+# so 'foo/' and 'EXCLUDE foo/nope'
+# means everything under foo/ is part of mainline EXCEPT foo/nope.
+# INCLUDE/EXCLUDE/INCLUDE nested structuring is not supported
+#
+# matching is purely prefix
+# so 'foo' will match 'foo', 'foo.c', 'foo/bar/baz'
+# if you want to exclude a directory, best to use a pattern like "foo/"
+#
+
+media/codec2/components/
+media/codecs/
+media/extractors/
+media/libstagefright/codecs/amrnb/
+media/libstagefright/codecs/amrwb/
+media/libstagefright/codecs/amrwbenc/
+media/libstagefright/codecs/common/
+media/libstagefright/codecs/flac/
+media/libstagefright/codecs/m4v_h263/
+media/libstagefright/codecs/mp3dec/
+media/libstagefright/mpeg2ts
diff --git a/OWNERS b/OWNERS
index 8f405e9..7f523a2 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,4 +1,10 @@
+chz@google.com
elaurent@google.com
etalvala@google.com
+hkuang@google.com
lajos@google.com
marcone@google.com
+
+# LON
+olly@google.com
+andrewlewis@google.com
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index bfd907e..8fe48c2 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -1,5 +1,5 @@
[Hook Scripts]
-mainline_hook = tools/mainline_hook.sh ${PREUPLOAD_COMMIT} "."
+mainline_hook = ${REPO_ROOT}/frameworks/av/tools/mainline_hook_partial.sh ${REPO_ROOT} ${PREUPLOAD_FILES}
[Builtin Hooks]
clang_format = true
@@ -8,3 +8,4 @@
# Only turn on clang-format check for the following subfolders.
clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
media/libmediatranscoding/
+ services/mediatranscoding/
diff --git a/aidl/android/media/InterpolatorConfig.aidl b/aidl/android/media/InterpolatorConfig.aidl
new file mode 100644
index 0000000..ef7486e
--- /dev/null
+++ b/aidl/android/media/InterpolatorConfig.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.InterpolatorType;
+
+/**
+ * {@hide}
+ */
+parcelable InterpolatorConfig {
+ InterpolatorType type;
+ /** For cubic interpolation, the boundary conditions in slope. */
+ float firstSlope;
+ float lastSlope;
+ /** A flattened list of <x, y> pairs, monotonically increasing in x. */
+ float[] xy;
+}
diff --git a/aidl/android/media/InterpolatorType.aidl b/aidl/android/media/InterpolatorType.aidl
new file mode 100644
index 0000000..b722cad
--- /dev/null
+++ b/aidl/android/media/InterpolatorType.aidl
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * Polynomial spline interpolators.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum InterpolatorType {
+ /** Not continuous. */
+ STEP,
+ /** C0. */
+ LINEAR,
+ /** C1. */
+ CUBIC,
+ /** C1 (to provide locally monotonic curves). */
+ CUBIC_MONOTONIC,
+ // CUBIC_C2, // TODO - requires global computation / cache
+}
diff --git a/aidl/android/media/MicrophoneInfoData.aidl b/aidl/android/media/MicrophoneInfoData.aidl
new file mode 100644
index 0000000..747bfa5
--- /dev/null
+++ b/aidl/android/media/MicrophoneInfoData.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable MicrophoneInfoData {
+ @utf8InCpp String deviceId;
+ int portId;
+ int type;
+ @utf8InCpp String address;
+ int deviceLocation;
+ int deviceGroup;
+ int indexInTheGroup;
+ float[] geometricLocation;
+ float[] orientation;
+ float[] frequencies;
+ float[] frequencyResponses;
+ int[] channelMapping;
+ float sensitivity;
+ float maxSpl;
+ float minSpl;
+ int directionality;
+}
diff --git a/aidl/android/media/VolumeShaperConfiguration.aidl b/aidl/android/media/VolumeShaperConfiguration.aidl
new file mode 100644
index 0000000..6361851
--- /dev/null
+++ b/aidl/android/media/VolumeShaperConfiguration.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.InterpolatorConfig;
+import android.media.VolumeShaperConfigurationOptionFlag;
+import android.media.VolumeShaperConfigurationType;
+
+/**
+ * {@hide}
+ */
+parcelable VolumeShaperConfiguration {
+ VolumeShaperConfigurationType type;
+ int id;
+ /** Bitmask, indexed by VolumeShaperConfigurationOptionFlag. */
+ int optionFlags;
+ double durationMs;
+ InterpolatorConfig interpolatorConfig;
+}
diff --git a/aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl b/aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl
new file mode 100644
index 0000000..f583cee
--- /dev/null
+++ b/aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum VolumeShaperConfigurationOptionFlag {
+ VOLUME_IN_DBFS,
+ CLOCK_TIME,
+}
diff --git a/aidl/android/media/VolumeShaperConfigurationType.aidl b/aidl/android/media/VolumeShaperConfigurationType.aidl
new file mode 100644
index 0000000..aa6334e
--- /dev/null
+++ b/aidl/android/media/VolumeShaperConfigurationType.aidl
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum VolumeShaperConfigurationType {
+ ID,
+ SCALE,
+}
diff --git a/aidl/android/media/VolumeShaperOperation.aidl b/aidl/android/media/VolumeShaperOperation.aidl
new file mode 100644
index 0000000..dd9a0e7
--- /dev/null
+++ b/aidl/android/media/VolumeShaperOperation.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable VolumeShaperOperation {
+ /** Operations to do. Bitmask of VolumeShaperOperationFlag. */
+ int flags;
+ /** If >= 0 the id to remove in a replace operation. */
+ int replaceId;
+ /** Position in the curve to set if a valid number (not nan). */
+ float xOffset;
+}
diff --git a/aidl/android/media/VolumeShaperOperationFlag.aidl b/aidl/android/media/VolumeShaperOperationFlag.aidl
new file mode 100644
index 0000000..8fe5275
--- /dev/null
+++ b/aidl/android/media/VolumeShaperOperationFlag.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum VolumeShaperOperationFlag {
+ /** The absence of this flag indicates "play". */
+ REVERSE,
+ TERMINATE,
+ JOIN,
+ DELAY,
+ CREATE_IF_NECESSARY,
+}
diff --git a/aidl/android/media/VolumeShaperState.aidl b/aidl/android/media/VolumeShaperState.aidl
new file mode 100644
index 0000000..4085e2b
--- /dev/null
+++ b/aidl/android/media/VolumeShaperState.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable VolumeShaperState {
+ /** Linear volume in the range MIN_LINEAR_VOLUME to MAX_LINEAR_VOLUME. */
+ float volume;
+ /** Position on curve expressed from MIN_CURVE_TIME to MAX_CURVE_TIME. */
+ float xOffset;
+}
diff --git a/apex/Android.bp b/apex/Android.bp
index c1ef3d8..b314e5d 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -34,6 +34,9 @@
],
},
},
+ // JNI
+ native_shared_libs: ["libmediaparser-jni"],
+ compile_multilib: "both",
prebuilts: [
"mediaextractor.policy",
"code_coverage.policy",
@@ -59,6 +62,15 @@
name: "com.android.media",
manifest: "manifest.json",
defaults: ["com.android.media-defaults"],
+ prebuilts: [
+ "media-linker-config",
+ ],
+}
+
+linker_config {
+ name: "media-linker-config",
+ src: "linker.config.json",
+ installable: false,
}
filegroup {
@@ -77,6 +89,9 @@
binaries: [
"mediaswcodec",
],
+ native_shared_libs: [
+ "libstagefright_foundation",
+ ],
prebuilts: [
"com.android.media.swcodec-mediaswcodec.rc",
"com.android.media.swcodec-ld.config.txt",
@@ -85,7 +100,6 @@
"crash_dump.policy",
"mediaswcodec.xml",
],
- use_vendor: true,
key: "com.android.media.swcodec.key",
certificate: ":com.android.media.swcodec.certificate",
diff --git a/apex/TEST_MAPPING b/apex/TEST_MAPPING
index a2e98cc..09c46d6 100644
--- a/apex/TEST_MAPPING
+++ b/apex/TEST_MAPPING
@@ -3,5 +3,22 @@
{
"path": "system/apex/tests"
}
+ ],
+ "presubmit": [
+ // The following tests validate codec and drm path.
+ {
+ "name": "GtsMediaTestCases",
+ "options" : [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ },
+ {
+ "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+ },
+ {
+ "include-filter": "com.google.android.media.gts.WidevineYouTubePerformanceTests"
+ }
+ ]
+ }
]
}
diff --git a/apex/linker.config.json b/apex/linker.config.json
new file mode 100644
index 0000000..67c076e
--- /dev/null
+++ b/apex/linker.config.json
@@ -0,0 +1,3 @@
+{
+ "visible": true
+}
diff --git a/apex/manifest.json b/apex/manifest.json
index ddd642e..f1f69f4 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,4 +1,4 @@
{
"name": "com.android.media",
- "version": 300000000
+ "version": 309999900
}
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index 2320fd7..e20d867 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,4 +1,7 @@
{
"name": "com.android.media.swcodec",
- "version": 300000000
+ "version": 309999900,
+ "requireNativeLibs": [
+ ":sphal"
+ ]
}
diff --git a/apex/mediaswcodec.rc b/apex/mediaswcodec.rc
index d17481b..0c9b8c8 100644
--- a/apex/mediaswcodec.rc
+++ b/apex/mediaswcodec.rc
@@ -2,6 +2,5 @@
class main
user mediacodec
group camera drmrpc mediadrm
- override
ioprio rt 4
writepid /dev/cpuset/foreground/tasks
diff --git a/apex/testing/Android.bp b/apex/testing/Android.bp
index 376d3e4..d86094e 100644
--- a/apex/testing/Android.bp
+++ b/apex/testing/Android.bp
@@ -17,6 +17,10 @@
manifest: "test_manifest.json",
file_contexts: ":com.android.media-file_contexts",
defaults: ["com.android.media-defaults"],
+ prebuilts: [
+ "sdkinfo_45",
+ "media-linker-config",
+ ],
installable: false,
}
diff --git a/apex/testing/test_manifest.json b/apex/testing/test_manifest.json
index ddd642e..e1295a2 100644
--- a/apex/testing/test_manifest.json
+++ b/apex/testing/test_manifest.json
@@ -1,4 +1,4 @@
{
"name": "com.android.media",
- "version": 300000000
+ "version": 2147483647
}
diff --git a/camera/Android.bp b/camera/Android.bp
index fa36bb3..a9e00d0 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -35,10 +35,10 @@
"CameraParameters.cpp",
"CaptureResult.cpp",
"CameraParameters2.cpp",
+ "CameraSessionStats.cpp",
"ICamera.cpp",
"ICameraClient.cpp",
"ICameraRecordingProxy.cpp",
- "ICameraRecordingProxyListener.cpp",
"camera2/CaptureRequest.cpp",
"camera2/ConcurrentCamera.cpp",
"camera2/OutputConfiguration.cpp",
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 84d1d93..f7d194e 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -25,7 +25,6 @@
#include <binder/IMemory.h>
#include <Camera.h>
-#include <ICameraRecordingProxyListener.h>
#include <android/hardware/ICameraService.h>
#include <android/hardware/ICamera.h>
@@ -77,63 +76,6 @@
return CameraBaseT::connect(cameraId, clientPackageName, clientUid, clientPid);
}
-status_t Camera::connectLegacy(int cameraId, int halVersion,
- const String16& clientPackageName,
- int clientUid,
- sp<Camera>& camera)
-{
- ALOGV("%s: connect legacy camera device", __FUNCTION__);
- sp<Camera> c = new Camera(cameraId);
- sp<::android::hardware::ICameraClient> cl = c;
- status_t status = NO_ERROR;
- const sp<::android::hardware::ICameraService>& cs = CameraBaseT::getCameraService();
-
- binder::Status ret;
- if (cs != nullptr) {
- ret = cs.get()->connectLegacy(cl, cameraId, halVersion, clientPackageName,
- clientUid, /*out*/&(c->mCamera));
- }
- if (ret.isOk() && c->mCamera != nullptr) {
- IInterface::asBinder(c->mCamera)->linkToDeath(c);
- c->mStatus = NO_ERROR;
- camera = c;
- } else {
- switch(ret.serviceSpecificErrorCode()) {
- case hardware::ICameraService::ERROR_DISCONNECTED:
- status = -ENODEV;
- break;
- case hardware::ICameraService::ERROR_CAMERA_IN_USE:
- status = -EBUSY;
- break;
- case hardware::ICameraService::ERROR_INVALID_OPERATION:
- status = -EINVAL;
- break;
- case hardware::ICameraService::ERROR_MAX_CAMERAS_IN_USE:
- status = -EUSERS;
- break;
- case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
- status = BAD_VALUE;
- break;
- case hardware::ICameraService::ERROR_DEPRECATED_HAL:
- status = -EOPNOTSUPP;
- break;
- case hardware::ICameraService::ERROR_DISABLED:
- status = -EACCES;
- break;
- case hardware::ICameraService::ERROR_PERMISSION_DENIED:
- status = PERMISSION_DENIED;
- break;
- default:
- status = -EINVAL;
- ALOGW("An error occurred while connecting to camera %d: %s", cameraId,
- (cs != nullptr) ? "Service not available" : ret.toString8().string());
- break;
- }
- c.clear();
- }
- return status;
-}
-
status_t Camera::reconnect()
{
ALOGV("reconnect");
@@ -214,10 +156,6 @@
void Camera::stopRecording()
{
ALOGV("stopRecording");
- {
- Mutex::Autolock _l(mLock);
- mRecordingProxyListener.clear();
- }
sp <::android::hardware::ICamera> c = mCamera;
if (c == 0) return;
c->stopRecording();
@@ -325,12 +263,6 @@
mListener = listener;
}
-void Camera::setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener)
-{
- Mutex::Autolock _l(mLock);
- mRecordingProxyListener = listener;
-}
-
void Camera::setPreviewCallbackFlags(int flag)
{
ALOGV("setPreviewCallbackFlags");
@@ -384,19 +316,6 @@
// callback from camera service when timestamped frame is ready
void Camera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr)
{
- // If recording proxy listener is registered, forward the frame and return.
- // The other listener (mListener) is ignored because the receiver needs to
- // call releaseRecordingFrame.
- sp<ICameraRecordingProxyListener> proxylistener;
- {
- Mutex::Autolock _l(mLock);
- proxylistener = mRecordingProxyListener;
- }
- if (proxylistener != NULL) {
- proxylistener->dataCallbackTimestamp(timestamp, msgType, dataPtr);
- return;
- }
-
sp<CameraListener> listener;
{
Mutex::Autolock _l(mLock);
@@ -413,19 +332,6 @@
void Camera::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle)
{
- // If recording proxy listener is registered, forward the frame and return.
- // The other listener (mListener) is ignored because the receiver needs to
- // call releaseRecordingFrameHandle.
- sp<ICameraRecordingProxyListener> proxylistener;
- {
- Mutex::Autolock _l(mLock);
- proxylistener = mRecordingProxyListener;
- }
- if (proxylistener != NULL) {
- proxylistener->recordingFrameHandleCallbackTimestamp(timestamp, handle);
- return;
- }
-
sp<CameraListener> listener;
{
Mutex::Autolock _l(mLock);
@@ -444,19 +350,6 @@
const std::vector<nsecs_t>& timestamps,
const std::vector<native_handle_t*>& handles)
{
- // If recording proxy listener is registered, forward the frame and return.
- // The other listener (mListener) is ignored because the receiver needs to
- // call releaseRecordingFrameHandle.
- sp<ICameraRecordingProxyListener> proxylistener;
- {
- Mutex::Autolock _l(mLock);
- proxylistener = mRecordingProxyListener;
- }
- if (proxylistener != NULL) {
- proxylistener->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
- return;
- }
-
sp<CameraListener> listener;
{
Mutex::Autolock _l(mLock);
@@ -476,10 +369,9 @@
return new RecordingProxy(this);
}
-status_t Camera::RecordingProxy::startRecording(const sp<ICameraRecordingProxyListener>& listener)
+status_t Camera::RecordingProxy::startRecording()
{
ALOGV("RecordingProxy::startRecording");
- mCamera->setRecordingProxyListener(listener);
mCamera->reconnect();
return mCamera->startRecording();
}
@@ -490,23 +382,6 @@
mCamera->stopRecording();
}
-void Camera::RecordingProxy::releaseRecordingFrame(const sp<IMemory>& mem)
-{
- ALOGV("RecordingProxy::releaseRecordingFrame");
- mCamera->releaseRecordingFrame(mem);
-}
-
-void Camera::RecordingProxy::releaseRecordingFrameHandle(native_handle_t* handle) {
- ALOGV("RecordingProxy::releaseRecordingFrameHandle");
- mCamera->releaseRecordingFrameHandle(handle);
-}
-
-void Camera::RecordingProxy::releaseRecordingFrameHandleBatch(
- const std::vector<native_handle_t*>& handles) {
- ALOGV("RecordingProxy::releaseRecordingFrameHandleBatch");
- mCamera->releaseRecordingFrameHandleBatch(handles);
-}
-
Camera::RecordingProxy::RecordingProxy(const sp<Camera>& camera)
{
mCamera = camera;
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index aecb70a..0b0f584 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -29,6 +29,7 @@
#include <binder/IMemory.h>
#include <camera/CameraBase.h>
+#include <camera/CameraUtils.h>
// needed to instantiate
#include <camera/Camera.h>
@@ -124,9 +125,7 @@
{
Mutex::Autolock _l(gLock);
if (gCameraService.get() == 0) {
- char value[PROPERTY_VALUE_MAX];
- property_get("config.disable_cameraservice", value, "0");
- if (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0) {
+ if (CameraUtils::isCameraServiceDisabled()) {
return gCameraService;
}
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index 135384a..96ea5f2 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -22,6 +22,7 @@
#include <binder/Parcel.h>
#include <camera/CameraMetadata.h>
+#include <camera_metadata_hidden.h>
namespace android {
@@ -169,6 +170,11 @@
return entryCount() == 0;
}
+size_t CameraMetadata::bufferSize() const {
+ return (mBuffer == NULL) ? 0 :
+ get_camera_metadata_size(mBuffer);
+}
+
status_t CameraMetadata::sort() {
if (mLocked) {
ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
@@ -872,5 +878,8 @@
return OK;
}
+metadata_vendor_id_t CameraMetadata::getVendorId() {
+ return get_camera_metadata_vendor_id(mBuffer);
+}
}; // namespace android
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
new file mode 100644
index 0000000..818b4d0
--- /dev/null
+++ b/camera/CameraSessionStats.cpp
@@ -0,0 +1,392 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "CameraSessionStats"
+#include <utils/Log.h>
+#include <utils/String16.h>
+
+#include <camera/CameraSessionStats.h>
+
+#include <binder/Parcel.h>
+
+namespace android {
+namespace hardware {
+
+status_t CameraStreamStats::readFromParcel(const android::Parcel* parcel) {
+ if (parcel == NULL) {
+ ALOGE("%s: Null parcel", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ status_t err = OK;
+
+ int width = 0;
+ if ((err = parcel->readInt32(&width)) != OK) {
+ ALOGE("%s: Failed to read width from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int height = 0;
+ if ((err = parcel->readInt32(&height)) != OK) {
+ ALOGE("%s: Failed to read height from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int format = 0;
+ if ((err = parcel->readInt32(&format)) != OK) {
+ ALOGE("%s: Failed to read format from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int dataSpace = 0;
+ if ((err = parcel->readInt32(&dataSpace)) != OK) {
+ ALOGE("%s: Failed to read dataSpace from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int64_t usage = 0;
+ if ((err = parcel->readInt64(&usage)) != OK) {
+ ALOGE("%s: Failed to read usage from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int64_t requestCount = 0;
+ if ((err = parcel->readInt64(&requestCount)) != OK) {
+ ALOGE("%s: Failed to read request count from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int64_t errorCount = 0;
+ if ((err = parcel->readInt64(&errorCount)) != OK) {
+ ALOGE("%s: Failed to read error count from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int startLatencyMs = 0;
+ if ((err = parcel->readInt32(&startLatencyMs)) != OK) {
+ ALOGE("%s: Failed to read start latency from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int maxHalBuffers = 0;
+ if ((err = parcel->readInt32(&maxHalBuffers)) != OK) {
+ ALOGE("%s: Failed to read max Hal buffers from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int maxAppBuffers = 0;
+ if ((err = parcel->readInt32(&maxAppBuffers)) != OK) {
+ ALOGE("%s: Failed to read max app buffers from parcel", __FUNCTION__);
+ return err;
+ }
+
+ mWidth = width;
+ mHeight = height;
+ mFormat = format;
+ mDataSpace = dataSpace;
+ mUsage = usage;
+ mRequestCount = requestCount;
+ mErrorCount = errorCount;
+ mStartLatencyMs = startLatencyMs;
+ mMaxHalBuffers = maxHalBuffers;
+ mMaxAppBuffers = maxAppBuffers;
+
+ return OK;
+}
+
+status_t CameraStreamStats::writeToParcel(android::Parcel* parcel) const {
+ if (parcel == NULL) {
+ ALOGE("%s: Null parcel", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ status_t err = OK;
+
+ if ((err = parcel->writeInt32(mWidth)) != OK) {
+ ALOGE("%s: Failed to write stream width!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mHeight)) != OK) {
+ ALOGE("%s: Failed to write stream height!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mFormat)) != OK) {
+ ALOGE("%s: Failed to write stream format!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mDataSpace)) != OK) {
+ ALOGE("%s: Failed to write stream dataSpace!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt64(mUsage)) != OK) {
+ ALOGE("%s: Failed to write stream usage!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt64(mRequestCount)) != OK) {
+ ALOGE("%s: Failed to write stream request count!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt64(mErrorCount)) != OK) {
+ ALOGE("%s: Failed to write stream error count!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mStartLatencyMs)) != OK) {
+ ALOGE("%s: Failed to write stream start latency!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mMaxHalBuffers)) != OK) {
+ ALOGE("%s: Failed to write max hal buffers", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mMaxAppBuffers)) != OK) {
+ ALOGE("%s: Failed to write max app buffers", __FUNCTION__);
+ return err;
+ }
+
+ return OK;
+}
+
+const int CameraSessionStats::CAMERA_STATE_OPEN = 0;
+const int CameraSessionStats::CAMERA_STATE_ACTIVE = 1;
+const int CameraSessionStats::CAMERA_STATE_IDLE = 2;
+const int CameraSessionStats::CAMERA_STATE_CLOSED = 3;
+
+const int CameraSessionStats::CAMERA_FACING_BACK = 0;
+const int CameraSessionStats::CAMERA_FACING_FRONT = 1;
+const int CameraSessionStats::CAMERA_FACING_EXTERNAL = 2;
+
+const int CameraSessionStats::CAMERA_API_LEVEL_1 = 1;
+const int CameraSessionStats::CAMERA_API_LEVEL_2 = 2;
+
+CameraSessionStats::CameraSessionStats() :
+ mFacing(CAMERA_FACING_BACK),
+ mNewCameraState(CAMERA_STATE_CLOSED),
+ mApiLevel(0),
+ mIsNdk(false),
+ mLatencyMs(-1),
+ mSessionType(0),
+ mInternalReconfigure(0),
+ mRequestCount(0),
+ mResultErrorCount(0),
+ mDeviceError(false) {}
+
+CameraSessionStats::CameraSessionStats(const String16& cameraId,
+ int facing, int newCameraState, const String16& clientName,
+ int apiLevel, bool isNdk, int32_t latencyMs) :
+ mCameraId(cameraId),
+ mFacing(facing),
+ mNewCameraState(newCameraState),
+ mClientName(clientName),
+ mApiLevel(apiLevel),
+ mIsNdk(isNdk),
+ mLatencyMs(latencyMs),
+ mSessionType(0),
+ mInternalReconfigure(0),
+ mRequestCount(0),
+ mResultErrorCount(0),
+ mDeviceError(0) {}
+
+status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
+ if (parcel == NULL) {
+ ALOGE("%s: Null parcel", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ status_t err = OK;
+
+ String16 id;
+ if ((err = parcel->readString16(&id)) != OK) {
+ ALOGE("%s: Failed to read camera id!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ int facing = 0;
+ if ((err = parcel->readInt32(&facing)) != OK) {
+ ALOGE("%s: Failed to read camera facing from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int32_t newCameraState;
+ if ((err = parcel->readInt32(&newCameraState)) != OK) {
+ ALOGE("%s: Failed to read new camera state from parcel", __FUNCTION__);
+ return err;
+ }
+
+ String16 clientName;
+ if ((err = parcel->readString16(&clientName)) != OK) {
+ ALOGE("%s: Failed to read client name!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ int32_t apiLevel;
+ if ((err = parcel->readInt32(&apiLevel)) != OK) {
+ ALOGE("%s: Failed to read api level from parcel", __FUNCTION__);
+ return err;
+ }
+
+ bool isNdk;
+ if ((err = parcel->readBool(&isNdk)) != OK) {
+ ALOGE("%s: Failed to read isNdk flag from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int32_t latencyMs;
+ if ((err = parcel->readInt32(&latencyMs)) != OK) {
+ ALOGE("%s: Failed to read latencyMs from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int32_t sessionType;
+ if ((err = parcel->readInt32(&sessionType)) != OK) {
+ ALOGE("%s: Failed to read session type from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int32_t internalReconfigure;
+ if ((err = parcel->readInt32(&internalReconfigure)) != OK) {
+ ALOGE("%s: Failed to read internal reconfigure count from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int64_t requestCount;
+ if ((err = parcel->readInt64(&requestCount)) != OK) {
+ ALOGE("%s: Failed to read request count from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int64_t resultErrorCount;
+ if ((err = parcel->readInt64(&resultErrorCount)) != OK) {
+ ALOGE("%s: Failed to read result error count from parcel", __FUNCTION__);
+ return err;
+ }
+
+ bool deviceError;
+ if ((err = parcel->readBool(&deviceError)) != OK) {
+ ALOGE("%s: Failed to read device error flag from parcel", __FUNCTION__);
+ return err;
+ }
+
+ std::vector<CameraStreamStats> streamStats;
+ if ((err = parcel->readParcelableVector(&streamStats)) != OK) {
+ ALOGE("%s: Failed to read stream state from parcel", __FUNCTION__);
+ return err;
+ }
+
+ mCameraId = id;
+ mFacing = facing;
+ mNewCameraState = newCameraState;
+ mClientName = clientName;
+ mApiLevel = apiLevel;
+ mIsNdk = isNdk;
+ mLatencyMs = latencyMs;
+ mSessionType = sessionType;
+ mInternalReconfigure = internalReconfigure;
+ mRequestCount = requestCount;
+ mResultErrorCount = resultErrorCount;
+ mDeviceError = deviceError;
+ mStreamStats = std::move(streamStats);
+
+ return OK;
+}
+
+status_t CameraSessionStats::writeToParcel(android::Parcel* parcel) const {
+ if (parcel == NULL) {
+ ALOGE("%s: Null parcel", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ status_t err = OK;
+
+ if ((err = parcel->writeString16(mCameraId)) != OK) {
+ ALOGE("%s: Failed to write camera id!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mFacing)) != OK) {
+ ALOGE("%s: Failed to write camera facing!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mNewCameraState)) != OK) {
+ ALOGE("%s: Failed to write new camera state!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeString16(mClientName)) != OK) {
+ ALOGE("%s: Failed to write client name!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mApiLevel)) != OK) {
+ ALOGE("%s: Failed to write api level!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeBool(mIsNdk)) != OK) {
+ ALOGE("%s: Failed to write isNdk flag!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mLatencyMs)) != OK) {
+ ALOGE("%s: Failed to write latency in Ms!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mSessionType)) != OK) {
+ ALOGE("%s: Failed to write session type!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mInternalReconfigure)) != OK) {
+ ALOGE("%s: Failed to write internal reconfigure count!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt64(mRequestCount)) != OK) {
+ ALOGE("%s: Failed to write request count!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt64(mResultErrorCount)) != OK) {
+ ALOGE("%s: Failed to write result error count!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeBool(mDeviceError)) != OK) {
+ ALOGE("%s: Failed to write device error flag!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeParcelableVector(mStreamStats)) != OK) {
+ ALOGE("%s: Failed to write stream states!", __FUNCTION__);
+ return err;
+ }
+
+ return OK;
+}
+
+} // namespace hardware
+} // namesmpace android
diff --git a/camera/CameraUtils.cpp b/camera/CameraUtils.cpp
index 67fc116..f9b1b37 100644
--- a/camera/CameraUtils.cpp
+++ b/camera/CameraUtils.cpp
@@ -23,6 +23,7 @@
#include <system/window.h>
#include <system/graphics.h>
+#include <cutils/properties.h>
#include <utils/Log.h>
namespace android {
@@ -122,4 +123,10 @@
return OK;
}
+bool CameraUtils::isCameraServiceDisabled() {
+ char value[PROPERTY_VALUE_MAX];
+ property_get("config.disable_cameraservice", value, "0");
+ return (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0);
+}
+
} /* namespace android */
diff --git a/camera/CaptureResult.cpp b/camera/CaptureResult.cpp
index 9cbfdb0..755051c 100644
--- a/camera/CaptureResult.cpp
+++ b/camera/CaptureResult.cpp
@@ -49,6 +49,9 @@
}
errorPhysicalCameraId = cameraId;
}
+ parcel->readInt64(&lastCompletedRegularFrameNumber);
+ parcel->readInt64(&lastCompletedReprocessFrameNumber);
+ parcel->readInt64(&lastCompletedZslFrameNumber);
return OK;
}
@@ -76,6 +79,9 @@
} else {
parcel->writeBool(false);
}
+ parcel->writeInt64(lastCompletedRegularFrameNumber);
+ parcel->writeInt64(lastCompletedReprocessFrameNumber);
+ parcel->writeInt64(lastCompletedZslFrameNumber);
return OK;
}
diff --git a/camera/ICameraClient.cpp b/camera/ICameraClient.cpp
index c02c81b..bef2ea0 100644
--- a/camera/ICameraClient.cpp
+++ b/camera/ICameraClient.cpp
@@ -142,7 +142,8 @@
camera_frame_metadata_t metadata;
if (data.dataAvail() > 0) {
metadata.number_of_faces = data.readInt32();
- if (metadata.number_of_faces <= 0 ||
+ // Zero faces is a valid case, to notify clients that no faces are now visible
+ if (metadata.number_of_faces < 0 ||
metadata.number_of_faces > (int32_t)(INT32_MAX / sizeof(camera_face_t))) {
ALOGE("%s: Too large face count: %d", __FUNCTION__, metadata.number_of_faces);
return BAD_VALUE;
diff --git a/camera/ICameraRecordingProxy.cpp b/camera/ICameraRecordingProxy.cpp
index bd6af75..97523a5 100644
--- a/camera/ICameraRecordingProxy.cpp
+++ b/camera/ICameraRecordingProxy.cpp
@@ -18,7 +18,6 @@
#define LOG_TAG "ICameraRecordingProxy"
#include <camera/CameraUtils.h>
#include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
#include <binder/IMemory.h>
#include <binder/Parcel.h>
#include <media/hardware/HardwareAPI.h>
@@ -29,10 +28,7 @@
enum {
START_RECORDING = IBinder::FIRST_CALL_TRANSACTION,
- STOP_RECORDING,
- RELEASE_RECORDING_FRAME,
- RELEASE_RECORDING_FRAME_HANDLE,
- RELEASE_RECORDING_FRAME_HANDLE_BATCH,
+ STOP_RECORDING
};
@@ -44,12 +40,11 @@
{
}
- status_t startRecording(const sp<ICameraRecordingProxyListener>& listener)
+ status_t startRecording()
{
ALOGV("startRecording");
Parcel data, reply;
data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
- data.writeStrongBinder(IInterface::asBinder(listener));
remote()->transact(START_RECORDING, data, &reply);
return reply.readInt32();
}
@@ -61,46 +56,6 @@
data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
remote()->transact(STOP_RECORDING, data, &reply);
}
-
- void releaseRecordingFrame(const sp<IMemory>& mem)
- {
- ALOGV("releaseRecordingFrame");
- Parcel data, reply;
- data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
- data.writeStrongBinder(IInterface::asBinder(mem));
- remote()->transact(RELEASE_RECORDING_FRAME, data, &reply);
- }
-
- void releaseRecordingFrameHandle(native_handle_t *handle) {
- ALOGV("releaseRecordingFrameHandle");
- Parcel data, reply;
- data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
- data.writeNativeHandle(handle);
-
- remote()->transact(RELEASE_RECORDING_FRAME_HANDLE, data, &reply);
-
- // Close the native handle because camera received a dup copy.
- native_handle_close(handle);
- native_handle_delete(handle);
- }
-
- void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
- ALOGV("releaseRecordingFrameHandleBatch");
- Parcel data, reply;
- data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
- uint32_t n = handles.size();
- data.writeUint32(n);
- for (auto& handle : handles) {
- data.writeNativeHandle(handle);
- }
- remote()->transact(RELEASE_RECORDING_FRAME_HANDLE_BATCH, data, &reply);
-
- // Close the native handle because camera received a dup copy.
- for (auto& handle : handles) {
- native_handle_close(handle);
- native_handle_delete(handle);
- }
- }
};
IMPLEMENT_META_INTERFACE(CameraRecordingProxy, "android.hardware.ICameraRecordingProxy");
@@ -114,9 +69,7 @@
case START_RECORDING: {
ALOGV("START_RECORDING");
CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
- sp<ICameraRecordingProxyListener> listener =
- interface_cast<ICameraRecordingProxyListener>(data.readStrongBinder());
- reply->writeInt32(startRecording(listener));
+ reply->writeInt32(startRecording());
return NO_ERROR;
} break;
case STOP_RECORDING: {
@@ -125,46 +78,6 @@
stopRecording();
return NO_ERROR;
} break;
- case RELEASE_RECORDING_FRAME: {
- ALOGV("RELEASE_RECORDING_FRAME");
- CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
- sp<IMemory> mem = interface_cast<IMemory>(data.readStrongBinder());
- releaseRecordingFrame(mem);
- return NO_ERROR;
- } break;
- case RELEASE_RECORDING_FRAME_HANDLE: {
- ALOGV("RELEASE_RECORDING_FRAME_HANDLE");
- CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-
- // releaseRecordingFrameHandle will be responsble to close the native handle.
- releaseRecordingFrameHandle(data.readNativeHandle());
- return NO_ERROR;
- } break;
- case RELEASE_RECORDING_FRAME_HANDLE_BATCH: {
- ALOGV("RELEASE_RECORDING_FRAME_HANDLE_BATCH");
- CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
- uint32_t n = 0;
- status_t res = data.readUint32(&n);
- if (res != OK) {
- ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
- return BAD_VALUE;
- }
- std::vector<native_handle_t*> handles;
- handles.reserve(n);
- for (uint32_t i = 0; i < n; i++) {
- native_handle_t* handle = data.readNativeHandle();
- if (handle == nullptr) {
- ALOGE("%s: Received a null native handle at handles[%d]",
- __FUNCTION__, i);
- return BAD_VALUE;
- }
- handles.push_back(handle);
- }
-
- // releaseRecordingFrameHandleBatch will be responsble to close the native handle.
- releaseRecordingFrameHandleBatch(handles);
- return NO_ERROR;
- } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
@@ -173,4 +86,3 @@
// ----------------------------------------------------------------------------
}; // namespace android
-
diff --git a/camera/ICameraRecordingProxyListener.cpp b/camera/ICameraRecordingProxyListener.cpp
deleted file mode 100644
index 66faf8f..0000000
--- a/camera/ICameraRecordingProxyListener.cpp
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ICameraRecordingProxyListener"
-#include <camera/CameraUtils.h>
-#include <camera/ICameraRecordingProxyListener.h>
-#include <binder/IMemory.h>
-#include <binder/Parcel.h>
-#include <media/hardware/HardwareAPI.h>
-#include <utils/Log.h>
-
-namespace android {
-
-enum {
- DATA_CALLBACK_TIMESTAMP = IBinder::FIRST_CALL_TRANSACTION,
- RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP,
- RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH
-};
-
-class BpCameraRecordingProxyListener: public BpInterface<ICameraRecordingProxyListener>
-{
-public:
- explicit BpCameraRecordingProxyListener(const sp<IBinder>& impl)
- : BpInterface<ICameraRecordingProxyListener>(impl)
- {
- }
-
- void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& imageData)
- {
- ALOGV("dataCallback");
- Parcel data, reply;
- data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
- data.writeInt64(timestamp);
- data.writeInt32(msgType);
- data.writeStrongBinder(IInterface::asBinder(imageData));
- remote()->transact(DATA_CALLBACK_TIMESTAMP, data, &reply, IBinder::FLAG_ONEWAY);
- }
-
- void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle) {
- ALOGV("recordingFrameHandleCallbackTimestamp");
- Parcel data, reply;
- data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
- data.writeInt64(timestamp);
- data.writeNativeHandle(handle);
- remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP, data, &reply,
- IBinder::FLAG_ONEWAY);
-
- // The native handle is dupped in ICameraClient so we need to free it here.
- native_handle_close(handle);
- native_handle_delete(handle);
- }
-
- void recordingFrameHandleCallbackTimestampBatch(
- const std::vector<nsecs_t>& timestamps,
- const std::vector<native_handle_t*>& handles) {
- ALOGV("recordingFrameHandleCallbackTimestampBatch");
- Parcel data, reply;
- data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
-
- uint32_t n = timestamps.size();
- if (n != handles.size()) {
- ALOGE("%s: size of timestamps(%zu) and handles(%zu) mismatch!",
- __FUNCTION__, timestamps.size(), handles.size());
- return;
- }
- data.writeUint32(n);
- for (auto ts : timestamps) {
- data.writeInt64(ts);
- }
- for (auto& handle : handles) {
- data.writeNativeHandle(handle);
- }
- remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH, data, &reply,
- IBinder::FLAG_ONEWAY);
-
- // The native handle is dupped in ICameraClient so we need to free it here.
- for (auto& handle : handles) {
- native_handle_close(handle);
- native_handle_delete(handle);
- }
- }
-};
-
-IMPLEMENT_META_INTERFACE(CameraRecordingProxyListener, "android.hardware.ICameraRecordingProxyListener");
-
-// ----------------------------------------------------------------------
-
-status_t BnCameraRecordingProxyListener::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- switch(code) {
- case DATA_CALLBACK_TIMESTAMP: {
- ALOGV("DATA_CALLBACK_TIMESTAMP");
- CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
- nsecs_t timestamp = data.readInt64();
- int32_t msgType = data.readInt32();
- sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
- dataCallbackTimestamp(timestamp, msgType, imageData);
- return NO_ERROR;
- } break;
- case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP: {
- ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP");
- CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
- nsecs_t timestamp;
- status_t res = data.readInt64(×tamp);
- if (res != OK) {
- ALOGE("%s: Failed to read timestamp: %s (%d)", __FUNCTION__, strerror(-res), res);
- return BAD_VALUE;
- }
-
- native_handle_t* handle = data.readNativeHandle();
- if (handle == nullptr) {
- ALOGE("%s: Received a null native handle", __FUNCTION__);
- return BAD_VALUE;
- }
- // The native handle will be freed in
- // BpCameraRecordingProxy::releaseRecordingFrameHandle.
- recordingFrameHandleCallbackTimestamp(timestamp, handle);
- return NO_ERROR;
- } break;
- case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH: {
- ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH");
- CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
- uint32_t n = 0;
- status_t res = data.readUint32(&n);
- if (res != OK) {
- ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
- return BAD_VALUE;
- }
- std::vector<nsecs_t> timestamps;
- std::vector<native_handle_t*> handles;
- timestamps.reserve(n);
- handles.reserve(n);
- for (uint32_t i = 0; i < n; i++) {
- nsecs_t t;
- res = data.readInt64(&t);
- if (res != OK) {
- ALOGE("%s: Failed to read timestamp[%d]: %s (%d)",
- __FUNCTION__, i, strerror(-res), res);
- return BAD_VALUE;
- }
- timestamps.push_back(t);
- }
- for (uint32_t i = 0; i < n; i++) {
- native_handle_t* handle = data.readNativeHandle();
- if (handle == nullptr) {
- ALOGE("%s: Received a null native handle at handles[%d]",
- __FUNCTION__, i);
- return BAD_VALUE;
- }
- handles.push_back(handle);
- }
- // The native handle will be freed in
- // BpCameraRecordingProxy::releaseRecordingFrameHandleBatch.
- recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
- return NO_ERROR;
- } break;
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
diff --git a/camera/TEST_MAPPING b/camera/TEST_MAPPING
new file mode 100644
index 0000000..683e183
--- /dev/null
+++ b/camera/TEST_MAPPING
@@ -0,0 +1,11 @@
+{
+ "postsubmit": [
+ {
+ "name": "CtsCameraTestCases"
+ },
+ {
+ "name": "CtsCameraTestCases",
+ "keywords": ["primary-device"]
+ }
+ ]
+}
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index d713d2d..24fa912 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -660,6 +660,16 @@
return sGlobalVendorTagDescriptorCache;
}
+bool VendorTagDescriptorCache::isVendorCachePresent(metadata_vendor_id_t vendorId) {
+ Mutex::Autolock al(sLock);
+ if ((sGlobalVendorTagDescriptorCache.get() != nullptr) &&
+ (sGlobalVendorTagDescriptorCache->getVendorIdsAndTagDescriptors().find(vendorId) !=
+ sGlobalVendorTagDescriptorCache->getVendorIdsAndTagDescriptors().end())) {
+ return true;
+ }
+ return false;
+}
+
extern "C" {
int vendor_tag_descriptor_get_tag_count(const vendor_tag_ops_t* /*v*/) {
diff --git a/camera/aidl/android/hardware/CameraSessionStats.aidl b/camera/aidl/android/hardware/CameraSessionStats.aidl
new file mode 100644
index 0000000..a8e6774
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraSessionStats.aidl
@@ -0,0 +1,20 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/** @hide */
+parcelable CameraSessionStats cpp_header "camera/CameraSessionStats.h";
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index ac7a35b..8af704d 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -69,7 +69,7 @@
/**
* Default UID/PID values for non-privileged callers of
- * connect(), connectDevice(), and connectLegacy()
+ * connect() and connectDevice()
*/
const int USE_CALLING_UID = -1;
const int USE_CALLING_PID = -1;
@@ -93,20 +93,6 @@
int clientUid);
/**
- * halVersion constant for connectLegacy
- */
- const int CAMERA_HAL_API_VERSION_UNSPECIFIED = -1;
-
- /**
- * Open a camera device in legacy mode, if supported by the camera module HAL.
- */
- ICamera connectLegacy(ICameraClient client,
- int cameraId,
- int halVersion,
- String opPackageName,
- int clientUid);
-
- /**
* Add listener for changes to camera device and flashlight state.
*
* Also returns the set of currently-known camera IDs and state of each device.
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index 7575948..d428b4e 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -16,11 +16,11 @@
package android.hardware;
+import android.hardware.CameraSessionStats;
+
/**
* Binder interface for the camera service proxy running in system_server.
*
- * Keep in sync with frameworks/av/include/camera/ICameraServiceProxy.h
- *
* @hide
*/
interface ICameraServiceProxy
@@ -30,30 +30,9 @@
*/
oneway void pingForUserUpdate();
- /**
- * Values for notifyCameraState newCameraState
- */
- const int CAMERA_STATE_OPEN = 0;
- const int CAMERA_STATE_ACTIVE = 1;
- const int CAMERA_STATE_IDLE = 2;
- const int CAMERA_STATE_CLOSED = 3;
-
- /**
- * Values for notifyCameraState facing
- */
- const int CAMERA_FACING_BACK = 0;
- const int CAMERA_FACING_FRONT = 1;
- const int CAMERA_FACING_EXTERNAL = 2;
-
- /**
- * Values for notifyCameraState api level
- */
- const int CAMERA_API_LEVEL_1 = 1;
- const int CAMERA_API_LEVEL_2 = 2;
/**
* Update the status of a camera device.
*/
- oneway void notifyCameraState(String cameraId, int facing, int newCameraState,
- String clientName, int apiLevel);
+ oneway void notifyCameraState(in CameraSessionStats cameraSessionStats);
}
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index b183ccc..28a57bd 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -83,9 +83,11 @@
* @param operatingMode The kind of session to create; either NORMAL_MODE or
* CONSTRAINED_HIGH_SPEED_MODE. Must be a non-negative value.
* @param sessionParams Session wide camera parameters
+ * @param startTimeMs The timestamp of session creation start, measured by
+ * SystemClock.uptimeMillis.
* @return a list of stream ids that can be used in offline mode via "switchToOffline"
*/
- int[] endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
+ int[] endConfigure(int operatingMode, in CameraMetadataNative sessionParams, long startTimeMs);
/**
* Check whether a particular session configuration has camera device
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index 1843ec4..ebc09d7 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -94,12 +94,12 @@
// Do not distinguish null arrays from 0-sized arrays.
for (int32_t i = 0; i < size; ++i) {
// Parcel.writeParcelableArray
- size_t len;
- const char16_t* className = parcel->readString16Inplace(&len);
+ std::optional<std::string> className;
+ parcel->readUtf8FromUtf16(&className);
ALOGV("%s: Read surface class = %s", __FUNCTION__,
- className != NULL ? String8(className).string() : "<null>");
+ className.value_or("<null>").c_str());
- if (className == NULL) {
+ if (className == std::nullopt) {
continue;
}
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index dc7f88a..5c3e3b0 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -37,7 +37,7 @@
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
],
- compile_multilib: "32",
+ compile_multilib: "first",
cflags: [
"-Wall",
"-Wextra",
@@ -48,6 +48,6 @@
init_rc: ["cameraserver.rc"],
vintf_fragments: [
- "manifest_android.frameworks.cameraservice.service@2.1.xml",
+ "manifest_android.frameworks.cameraservice.service@2.2.xml",
],
}
diff --git a/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.1.xml b/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.1.xml
deleted file mode 100644
index 5a15b35..0000000
--- a/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-<manifest version="1.0" type="framework">
- <hal>
- <name>android.frameworks.cameraservice.service</name>
- <transport>hwbinder</transport>
- <version>2.1</version>
- <interface>
- <name>ICameraService</name>
- <instance>default</instance>
- </interface>
- </hal>
-</manifest>
diff --git a/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml b/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml
new file mode 100644
index 0000000..eeafc91
--- /dev/null
+++ b/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml
@@ -0,0 +1,11 @@
+<manifest version="1.0" type="framework">
+ <hal>
+ <name>android.frameworks.cameraservice.service</name>
+ <transport>hwbinder</transport>
+ <version>2.2</version>
+ <interface>
+ <name>ICameraService</name>
+ <instance>default</instance>
+ </interface>
+ </hal>
+</manifest>
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 2cdb617..5579183 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -24,7 +24,6 @@
#include <gui/IGraphicBufferProducer.h>
#include <system/camera.h>
#include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
#include <camera/android/hardware/ICamera.h>
#include <camera/android/hardware/ICameraClient.h>
#include <camera/CameraBase.h>
@@ -84,10 +83,6 @@
const String16& clientPackageName,
int clientUid, int clientPid);
- static status_t connectLegacy(int cameraId, int halVersion,
- const String16& clientPackageName,
- int clientUid, sp<Camera>& camera);
-
virtual ~Camera();
status_t reconnect();
@@ -154,7 +149,6 @@
status_t setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
void setListener(const sp<CameraListener>& listener);
- void setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener);
// Configure preview callbacks to app. Only one of the older
// callbacks or the callback surface can be active at the same time;
@@ -187,12 +181,8 @@
explicit RecordingProxy(const sp<Camera>& camera);
// ICameraRecordingProxy interface
- virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener);
+ virtual status_t startRecording();
virtual void stopRecording();
- virtual void releaseRecordingFrame(const sp<IMemory>& mem);
- virtual void releaseRecordingFrameHandle(native_handle_t* handle);
- virtual void releaseRecordingFrameHandleBatch(
- const std::vector<native_handle_t*>& handles);
private:
sp<Camera> mCamera;
@@ -203,8 +193,6 @@
Camera(const Camera&);
Camera& operator=(const Camera);
- sp<ICameraRecordingProxyListener> mRecordingProxyListener;
-
friend class CameraBase;
};
diff --git a/camera/include/camera/CameraMetadata.h b/camera/include/camera/CameraMetadata.h
index 9d1b5c7..c56ee6d 100644
--- a/camera/include/camera/CameraMetadata.h
+++ b/camera/include/camera/CameraMetadata.h
@@ -128,6 +128,11 @@
bool isEmpty() const;
/**
+ * Return the allocated camera metadata buffer size in bytes.
+ */
+ size_t bufferSize() const;
+
+ /**
* Sort metadata buffer for faster find
*/
status_t sort();
@@ -237,6 +242,11 @@
static status_t getTagFromName(const char *name,
const VendorTagDescriptor* vTags, uint32_t *tag);
+ /**
+ * Return the current vendor tag id associated with this metadata.
+ */
+ metadata_vendor_id_t getVendorId();
+
private:
camera_metadata_t *mBuffer;
mutable bool mLocked;
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
new file mode 100644
index 0000000..27a756f
--- /dev/null
+++ b/camera/include/camera/CameraSessionStats.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_CAMERA_SERVICE_SESSION_STATS_H
+#define ANDROID_HARDWARE_CAMERA_SERVICE_SESSION_STATS_H
+
+#include <binder/Parcelable.h>
+
+namespace android {
+namespace hardware {
+
+/**
+ * Camera stream info and statistics
+ */
+class CameraStreamStats : public android::Parcelable {
+public:
+ int mWidth;
+ int mHeight;
+ int mFormat;
+ int mDataSpace;
+ int64_t mUsage;
+
+ // The number of requested buffers
+ int64_t mRequestCount;
+ // The number of buffer errors
+ int64_t mErrorCount;
+
+ // The capture latency of 1st request for this stream
+ int32_t mStartLatencyMs;
+
+ // Buffer count info
+ int mMaxHalBuffers;
+ int mMaxAppBuffers;
+
+ CameraStreamStats() :
+ mWidth(0), mHeight(0), mFormat(0), mDataSpace(0), mUsage(0),
+ mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
+ mMaxHalBuffers(0), mMaxAppBuffers(0) {}
+ CameraStreamStats(int width, int height, int format, int dataSpace, int64_t usage,
+ int maxHalBuffers, int maxAppBuffers)
+ : mWidth(width), mHeight(height), mFormat(format), mDataSpace(dataSpace),
+ mUsage(usage), mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
+ mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers) {}
+
+ virtual status_t readFromParcel(const android::Parcel* parcel) override;
+ virtual status_t writeToParcel(android::Parcel* parcel) const override;
+};
+
+/**
+ * Camera session statistics
+ *
+ * This includes session wide info and stream statistics.
+ */
+class CameraSessionStats : public android::Parcelable {
+public:
+ /**
+ * Values for notifyCameraState newCameraState
+ */
+ static const int CAMERA_STATE_OPEN;
+ static const int CAMERA_STATE_ACTIVE;
+ static const int CAMERA_STATE_IDLE;
+ static const int CAMERA_STATE_CLOSED;
+
+ /**
+ * Values for notifyCameraState facing
+ */
+ static const int CAMERA_FACING_BACK;
+ static const int CAMERA_FACING_FRONT;
+ static const int CAMERA_FACING_EXTERNAL;
+
+ /**
+ * Values for notifyCameraState api level
+ */
+ static const int CAMERA_API_LEVEL_1;
+ static const int CAMERA_API_LEVEL_2;
+
+ String16 mCameraId;
+ int mFacing;
+ int mNewCameraState;
+ String16 mClientName;
+ int mApiLevel;
+ bool mIsNdk;
+ // latency in ms for camera open, close, or session creation.
+ int mLatencyMs;
+
+ // Session info and statistics
+ int mSessionType;
+ int mInternalReconfigure;
+ // The number of capture requests
+ int64_t mRequestCount;
+ // The number of result error
+ int64_t mResultErrorCount;
+ // Whether the device runs into an error state
+ bool mDeviceError;
+ std::vector<CameraStreamStats> mStreamStats;
+
+ // Constructors
+ CameraSessionStats();
+ CameraSessionStats(const String16& cameraId, int facing, int newCameraState,
+ const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs);
+
+ virtual status_t readFromParcel(const android::Parcel* parcel) override;
+ virtual status_t writeToParcel(android::Parcel* parcel) const override;
+};
+
+}; // namespace hardware
+}; // namespace android
+
+#endif // ANDROID_HARDWARE_CAMERA_SERVICE_SESSION_STATS_H
diff --git a/camera/include/camera/CameraUtils.h b/camera/include/camera/CameraUtils.h
index f596f80..a397ccd 100644
--- a/camera/include/camera/CameraUtils.h
+++ b/camera/include/camera/CameraUtils.h
@@ -47,6 +47,11 @@
*/
static bool isNativeHandleMetadata(const sp<IMemory>& imageData);
+ /**
+ * Check if camera service is disabled on this device
+ */
+ static bool isCameraServiceDisabled();
+
private:
CameraUtils();
};
diff --git a/camera/include/camera/CaptureResult.h b/camera/include/camera/CaptureResult.h
index dc3d282..f163c1e 100644
--- a/camera/include/camera/CaptureResult.h
+++ b/camera/include/camera/CaptureResult.h
@@ -76,6 +76,34 @@
*/
String16 errorPhysicalCameraId;
+ // The last completed frame numbers shouldn't be checked in onResultReceived() and notifyError()
+ // because the output buffers could be arriving after onResultReceived() and
+ // notifyError(). Given this constraint, we check it for each
+ // onCaptureStarted, and if there is no further onCaptureStarted(),
+ // check for onDeviceIdle() to clear out all pending frame numbers.
+
+ /**
+ * The latest regular request frameNumber for which all buffers and capture result have been
+ * returned or notified as an BUFFER_ERROR/RESULT_ERROR/REQUEST_ERROR. -1 if
+ * none has completed.
+ */
+ int64_t lastCompletedRegularFrameNumber;
+
+ /**
+ * The latest reprocess request frameNumber for which all buffers and capture result have been
+ * returned or notified as an BUFFER_ERROR/RESULT_ERROR/REQUEST_ERROR. -1 if
+ * none has completed.
+ */
+ int64_t lastCompletedReprocessFrameNumber;
+
+ /**
+ * The latest Zsl request frameNumber for which all buffers and capture result have been
+ * returned or notified as an BUFFER_ERROR/RESULT_ERROR/REQUEST_ERROR. -1 if
+ * none has completed.
+ */
+ int64_t lastCompletedZslFrameNumber;
+
+
/**
* Constructor initializes object as invalid by setting requestId to be -1.
*/
@@ -87,7 +115,10 @@
frameNumber(0),
partialResultCount(0),
errorStreamId(-1),
- errorPhysicalCameraId() {
+ errorPhysicalCameraId(),
+ lastCompletedRegularFrameNumber(-1),
+ lastCompletedReprocessFrameNumber(-1),
+ lastCompletedZslFrameNumber(-1) {
}
/**
diff --git a/camera/include/camera/ICameraRecordingProxy.h b/camera/include/camera/ICameraRecordingProxy.h
index 02af2f3..4306dc1 100644
--- a/camera/include/camera/ICameraRecordingProxy.h
+++ b/camera/include/camera/ICameraRecordingProxy.h
@@ -24,13 +24,11 @@
namespace android {
-class ICameraRecordingProxyListener;
-class IMemory;
class Parcel;
/*
- * The purpose of ICameraRecordingProxy and ICameraRecordingProxyListener is to
- * allow applications using the camera during recording.
+ * The purpose of ICameraRecordingProxy is to
+ * allow applications to use the camera during recording with the old camera API.
*
* Camera service allows only one client at a time. Since camcorder application
* needs to own the camera to do things like zoom, the media recorder cannot
@@ -42,35 +40,29 @@
* ICameraRecordingProxy
* startRecording()
* stopRecording()
- * releaseRecordingFrame()
*
- * ICameraRecordingProxyListener
- * dataCallbackTimestamp()
-
* The camcorder app opens the camera and starts the preview. The app passes
* ICamera and ICameraRecordingProxy to the media recorder by
* MediaRecorder::setCamera(). The recorder uses ICamera to setup the camera in
* MediaRecorder::start(). After setup, the recorder disconnects from camera
- * service. The recorder calls ICameraRecordingProxy::startRecording() and
- * passes a ICameraRecordingProxyListener to the app. The app connects back to
- * camera service and starts the recording. The app owns the camera and can do
- * things like zoom. The media recorder receives the video frames from the
- * listener and releases them by ICameraRecordingProxy::releaseRecordingFrame.
- * The recorder calls ICameraRecordingProxy::stopRecording() to stop the
- * recording.
+ * service. The recorder calls ICameraRecordingProxy::startRecording() and The
+ * app owns the camera and can do things like zoom. The media recorder receives
+ * the video frames via a buffer queue. The recorder calls
+ * ICameraRecordingProxy::stopRecording() to stop the recording.
*
* The call sequences are as follows:
* 1. The app: Camera.unlock().
* 2. The app: MediaRecorder.setCamera().
* 3. Start recording
* (1) The app: MediaRecorder.start().
- * (2) The recorder: ICamera.unlock() and ICamera.disconnect().
- * (3) The recorder: ICameraRecordingProxy.startRecording().
- * (4) The app: ICamera.reconnect().
- * (5) The app: ICamera.startRecording().
+ * (2) The recorder: ICamera.setVideoTarget(buffer queue).
+ * (3) The recorder: ICamera.unlock() and ICamera.disconnect().
+ * (4) The recorder: ICameraRecordingProxy.startRecording().
+ * (5) The app: ICamera.reconnect().
+ * (6) The app: ICamera.startRecording().
* 4. During recording
- * (1) The recorder: receive frames from ICameraRecordingProxyListener.dataCallbackTimestamp()
- * (2) The recorder: release frames by ICameraRecordingProxy.releaseRecordingFrame().
+ * (1) The recorder: receive frames via a buffer queue
+ * (2) The recorder: release frames via a buffer queue
* 5. Stop recording
* (1) The app: MediaRecorder.stop()
* (2) The recorder: ICameraRecordingProxy.stopRecording().
@@ -82,12 +74,8 @@
public:
DECLARE_META_INTERFACE(CameraRecordingProxy);
- virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener) = 0;
+ virtual status_t startRecording() = 0;
virtual void stopRecording() = 0;
- virtual void releaseRecordingFrame(const sp<IMemory>& mem) = 0;
- virtual void releaseRecordingFrameHandle(native_handle_t *handle) = 0;
- virtual void releaseRecordingFrameHandleBatch(
- const std::vector<native_handle_t*>& handles) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/camera/include/camera/ICameraRecordingProxyListener.h b/camera/include/camera/ICameraRecordingProxyListener.h
deleted file mode 100644
index da03c56..0000000
--- a/camera/include/camera/ICameraRecordingProxyListener.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
-#define ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
-
-#include <vector>
-#include <binder/IInterface.h>
-#include <cutils/native_handle.h>
-#include <stdint.h>
-#include <utils/RefBase.h>
-#include <utils/Timers.h>
-
-namespace android {
-
-class Parcel;
-class IMemory;
-
-class ICameraRecordingProxyListener: public IInterface
-{
-public:
- DECLARE_META_INTERFACE(CameraRecordingProxyListener);
-
- virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType,
- const sp<IMemory>& data) = 0;
-
- virtual void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
- native_handle_t* handle) = 0;
-
- virtual void recordingFrameHandleCallbackTimestampBatch(
- const std::vector<nsecs_t>& timestamps,
- const std::vector<native_handle_t*>& handles) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnCameraRecordingProxyListener: public BnInterface<ICameraRecordingProxyListener>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif
diff --git a/camera/include/camera/VendorTagDescriptor.h b/camera/include/camera/VendorTagDescriptor.h
index b2fbf3a..b3440d5 100644
--- a/camera/include/camera/VendorTagDescriptor.h
+++ b/camera/include/camera/VendorTagDescriptor.h
@@ -249,6 +249,12 @@
*/
static void clearGlobalVendorTagCache();
+ /**
+ * Return true if given vendor id is present in the vendor tag caches, return
+ * false otherwise.
+ */
+ static bool isVendorCachePresent(metadata_vendor_id_t vendorId);
+
};
} /* namespace android */
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 7ba82c1..3cf94d0 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -121,9 +121,11 @@
"libcamera_metadata",
"libmediandk",
"android.frameworks.cameraservice.device@2.0",
+ "android.frameworks.cameraservice.device@2.1",
"android.frameworks.cameraservice.common@2.0",
"android.frameworks.cameraservice.service@2.0",
"android.frameworks.cameraservice.service@2.1",
+ "android.frameworks.cameraservice.service@2.2",
],
static_libs: [
"android.hardware.camera.common@1.0-helper",
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 0d7180a..08c88ce 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -186,6 +186,7 @@
const ACaptureRequest* sessionParameters,
const ACameraCaptureSession_stateCallbacks* callbacks,
/*out*/ACameraCaptureSession** session) {
+ nsecs_t startTimeNs = systemTime();
sp<ACameraCaptureSession> currentSession = mCurrentSession.promote();
Mutex::Autolock _l(mDeviceLock);
camera_status_t ret = checkCameraClosedOrErrorLocked();
@@ -199,7 +200,7 @@
}
// Create new session
- ret = configureStreamsLocked(outputs, sessionParameters);
+ ret = configureStreamsLocked(outputs, sessionParameters, startTimeNs);
if (ret != ACAMERA_OK) {
ALOGE("Fail to create new session. cannot configure streams");
return ret;
@@ -450,7 +451,11 @@
}
// No new session, unconfigure now
- camera_status_t ret = configureStreamsLocked(nullptr, nullptr);
+ // Note: The unconfiguration of session won't be accounted for session
+ // latency because a stream configuration with 0 streams won't ever become
+ // active.
+ nsecs_t startTimeNs = systemTime();
+ camera_status_t ret = configureStreamsLocked(nullptr, nullptr, startTimeNs);
if (ret != ACAMERA_OK) {
ALOGE("Unconfigure stream failed. Device might still be configured! ret %d", ret);
}
@@ -609,7 +614,7 @@
camera_status_t
CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
- const ACaptureRequest* sessionParameters) {
+ const ACaptureRequest* sessionParameters, nsecs_t startTimeNs) {
ACaptureSessionOutputContainer emptyOutput;
if (outputs == nullptr) {
outputs = &emptyOutput;
@@ -711,7 +716,8 @@
params.append(sessionParameters->settings->getInternalData());
}
std::vector<int> offlineStreamIds;
- remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params, &offlineStreamIds);
+ remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params,
+ ns2ms(startTimeNs), &offlineStreamIds);
if (remoteRet.serviceSpecificErrorCode() == hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT) {
ALOGE("Camera device %s cannnot support app output configuration: %s", getId(),
remoteRet.toString8().string());
@@ -1336,56 +1342,97 @@
void
CameraDevice::checkAndFireSequenceCompleteLocked() {
int64_t completedFrameNumber = mFrameNumberTracker.getCompletedFrameNumber();
- //std::map<int, int64_t> mSequenceLastFrameNumberMap;
auto it = mSequenceLastFrameNumberMap.begin();
while (it != mSequenceLastFrameNumberMap.end()) {
int sequenceId = it->first;
- int64_t lastFrameNumber = it->second;
- bool seqCompleted = false;
- bool hasCallback = true;
+ int64_t lastFrameNumber = it->second.lastFrameNumber;
+ bool hasCallback = true;
+
+ if (mRemote == nullptr) {
+ ALOGW("Camera %s closed while checking sequence complete", getId());
+ return;
+ }
+ ALOGV("%s: seq %d's last frame number %" PRId64 ", completed %" PRId64,
+ __FUNCTION__, sequenceId, lastFrameNumber, completedFrameNumber);
+ if (!it->second.isSequenceCompleted) {
+ // Check if there is callback for this sequence
+ // This should not happen because we always register callback (with nullptr inside)
+ if (mSequenceCallbackMap.count(sequenceId) == 0) {
+ ALOGW("No callback found for sequenceId %d", sequenceId);
+ hasCallback = false;
+ }
+
+ if (lastFrameNumber <= completedFrameNumber) {
+ ALOGV("Mark sequenceId %d as sequence completed", sequenceId);
+ it->second.isSequenceCompleted = true;
+ }
+
+ if (it->second.isSequenceCompleted && hasCallback) {
+ auto cbIt = mSequenceCallbackMap.find(sequenceId);
+ CallbackHolder cbh = cbIt->second;
+
+ // send seq complete callback
+ sp<AMessage> msg = new AMessage(kWhatCaptureSeqEnd, mHandler);
+ msg->setPointer(kContextKey, cbh.mContext);
+ msg->setObject(kSessionSpKey, cbh.mSession);
+ msg->setPointer(kCallbackFpKey, (void*) cbh.mOnCaptureSequenceCompleted);
+ msg->setInt32(kSequenceIdKey, sequenceId);
+ msg->setInt64(kFrameNumberKey, lastFrameNumber);
+
+ // Clear the session sp before we send out the message
+ // This will guarantee the rare case where the message is processed
+ // before cbh goes out of scope and causing we call the session
+ // destructor while holding device lock
+ cbh.mSession.clear();
+ postSessionMsgAndCleanup(msg);
+ }
+ }
+
+ if (it->second.isSequenceCompleted && it->second.isInflightCompleted) {
+ if (mSequenceCallbackMap.find(sequenceId) != mSequenceCallbackMap.end()) {
+ mSequenceCallbackMap.erase(sequenceId);
+ }
+ it = mSequenceLastFrameNumberMap.erase(it);
+ ALOGV("%s: Remove holder for sequenceId %d", __FUNCTION__, sequenceId);
+ } else {
+ ++it;
+ }
+ }
+}
+
+void
+CameraDevice::removeCompletedCallbackHolderLocked(int64_t lastCompletedRegularFrameNumber) {
+ auto it = mSequenceLastFrameNumberMap.begin();
+ while (it != mSequenceLastFrameNumberMap.end()) {
+ int sequenceId = it->first;
+ int64_t lastFrameNumber = it->second.lastFrameNumber;
if (mRemote == nullptr) {
ALOGW("Camera %s closed while checking sequence complete", getId());
return;
}
- // Check if there is callback for this sequence
- // This should not happen because we always register callback (with nullptr inside)
- if (mSequenceCallbackMap.count(sequenceId) == 0) {
- ALOGW("No callback found for sequenceId %d", sequenceId);
- hasCallback = false;
- }
+ ALOGV("%s: seq %d's last frame number %" PRId64
+ ", completed inflight frame number %" PRId64,
+ __FUNCTION__, sequenceId, lastFrameNumber,
+ lastCompletedRegularFrameNumber);
+ if (lastFrameNumber <= lastCompletedRegularFrameNumber) {
+ if (it->second.isSequenceCompleted) {
+ // Check if there is callback for this sequence
+ // This should not happen because we always register callback (with nullptr inside)
+ if (mSequenceCallbackMap.count(sequenceId) == 0) {
+ ALOGW("No callback found for sequenceId %d", sequenceId);
+ } else {
+ mSequenceCallbackMap.erase(sequenceId);
+ }
- if (lastFrameNumber <= completedFrameNumber) {
- ALOGV("seq %d reached last frame %" PRId64 ", completed %" PRId64,
- sequenceId, lastFrameNumber, completedFrameNumber);
- seqCompleted = true;
- }
-
- if (seqCompleted && hasCallback) {
- // remove callback holder from callback map
- auto cbIt = mSequenceCallbackMap.find(sequenceId);
- CallbackHolder cbh = cbIt->second;
- mSequenceCallbackMap.erase(cbIt);
- // send seq complete callback
- sp<AMessage> msg = new AMessage(kWhatCaptureSeqEnd, mHandler);
- msg->setPointer(kContextKey, cbh.mContext);
- msg->setObject(kSessionSpKey, cbh.mSession);
- msg->setPointer(kCallbackFpKey, (void*) cbh.mOnCaptureSequenceCompleted);
- msg->setInt32(kSequenceIdKey, sequenceId);
- msg->setInt64(kFrameNumberKey, lastFrameNumber);
-
- // Clear the session sp before we send out the message
- // This will guarantee the rare case where the message is processed
- // before cbh goes out of scope and causing we call the session
- // destructor while holding device lock
- cbh.mSession.clear();
- postSessionMsgAndCleanup(msg);
- }
-
- // No need to track sequence complete if there is no callback registered
- if (seqCompleted || !hasCallback) {
- it = mSequenceLastFrameNumberMap.erase(it);
+ it = mSequenceLastFrameNumberMap.erase(it);
+ ALOGV("%s: Remove holder for sequenceId %d", __FUNCTION__, sequenceId);
+ } else {
+ ALOGV("Mark sequenceId %d as inflight completed", sequenceId);
+ it->second.isInflightCompleted = true;
+ ++it;
+ }
} else {
++it;
}
@@ -1480,6 +1527,9 @@
return ret;
}
+ dev->removeCompletedCallbackHolderLocked(
+ std::numeric_limits<int64_t>::max()/*lastCompletedRegularFrameNumber*/);
+
if (dev->mIdle) {
// Already in idle state. Possibly other thread did waitUntilIdle
return ret;
@@ -1522,6 +1572,9 @@
return ret;
}
+ dev->removeCompletedCallbackHolderLocked(
+ resultExtras.lastCompletedRegularFrameNumber);
+
int sequenceId = resultExtras.requestId;
int32_t burstId = resultExtras.burstId;
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 6c2ceb3..125e6e3 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -167,7 +167,7 @@
void notifySessionEndOfLifeLocked(ACameraCaptureSession* session);
camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
- const ACaptureRequest* sessionParameters);
+ const ACaptureRequest* sessionParameters, nsecs_t startTimeNs);
// Input message will be posted and cleared after this returns
void postSessionMsgAndCleanup(sp<AMessage>& msg);
@@ -267,8 +267,23 @@
static const int REQUEST_ID_NONE = -1;
int mRepeatingSequenceId = REQUEST_ID_NONE;
- // sequence id -> last frame number map
- std::map<int, int64_t> mSequenceLastFrameNumberMap;
+ // sequence id -> last frame number holder map
+ struct RequestLastFrameNumberHolder {
+ int64_t lastFrameNumber;
+ // Whether the current sequence is completed (capture results are
+ // generated). May be set to true, but
+ // not removed from the map if not all inflight requests in the sequence
+ // have been completed.
+ bool isSequenceCompleted = false;
+ // Whether all inflight requests in the sequence are completed
+ // (capture results and buffers are generated). May be
+ // set to true, but not removed from the map yet if the capture results
+ // haven't been delivered to the app yet.
+ bool isInflightCompleted = false;
+ RequestLastFrameNumberHolder(int64_t lastFN) :
+ lastFrameNumber(lastFN) {}
+ };
+ std::map<int, RequestLastFrameNumberHolder> mSequenceLastFrameNumberMap;
struct CallbackHolder {
CallbackHolder(sp<ACameraCaptureSession> session,
@@ -338,6 +353,7 @@
void checkRepeatingSequenceCompleteLocked(const int sequenceId, const int64_t lastFrameNumber);
void checkAndFireSequenceCompleteLocked();
+ void removeCompletedCallbackHolderLocked(int64_t lastCompletedRegularFrameNumber);
// Misc variables
int32_t mShadingMapSize[2]; // const after constructor
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 419250c..73cabbf 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -24,6 +24,7 @@
#include <utils/Vector.h>
#include <cutils/properties.h>
#include <stdlib.h>
+#include <camera/CameraUtils.h>
#include <camera/VendorTagDescriptor.h>
using namespace android::acam;
@@ -70,12 +71,6 @@
mCameraService.clear();
}
-static bool isCameraServiceDisabled() {
- char value[PROPERTY_VALUE_MAX];
- property_get("config.disable_cameraservice", value, "0");
- return (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0);
-}
-
sp<hardware::ICameraService> CameraManagerGlobal::getCameraService() {
Mutex::Autolock _l(mLock);
return getCameraServiceLocked();
@@ -83,7 +78,7 @@
sp<hardware::ICameraService> CameraManagerGlobal::getCameraServiceLocked() {
if (mCameraService.get() == nullptr) {
- if (isCameraServiceDisabled()) {
+ if (CameraUtils::isCameraServiceDisabled()) {
return mCameraService;
}
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 631f6cd..895514e 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -426,6 +426,7 @@
camera_metadata_ro_entry_t entry;
int ret = get_camera_metadata_ro_entry(rawMetadata, i, &entry);
if (ret != 0) {
+ mData->unlock(rawMetadata);
ALOGE("%s: error reading metadata index %zu", __FUNCTION__, i);
return ACAMERA_ERROR_UNKNOWN;
}
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 072bb02..a840bd1 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -36,6 +36,7 @@
#ifndef _NDK_CAMERA_METADATA_H
#define _NDK_CAMERA_METADATA_H
+#include <stdbool.h>
#include <stdint.h>
#include <sys/cdefs.h>
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 9efbd1b..6b912f1 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -1890,10 +1890,8 @@
* <li>ACaptureRequest</li>
* </ul></p>
*
- * <p>Instead of using ACAMERA_SCALER_CROP_REGION with dual purposes of crop and zoom, the
- * application can now choose to use this tag to specify the desired zoom level. The
- * ACAMERA_SCALER_CROP_REGION can still be used to specify the horizontal or vertical
- * crop to achieve aspect ratios different than the native camera sensor.</p>
+ * <p>Instead of using ACAMERA_SCALER_CROP_REGION for zoom, the application can now choose to
+ * use this tag to specify the desired zoom level.</p>
* <p>By using this control, the application gains a simpler way to control zoom, which can
* be a combination of optical and digital zoom. For example, a multi-camera system may
* contain more than one lens with different focal lengths, and the user can use optical
@@ -1959,7 +1957,10 @@
* explicitly set ACAMERA_CONTROL_ZOOM_RATIO, its value defaults to 1.0.</p>
* <p>One limitation of controlling zoom using zoomRatio is that the ACAMERA_SCALER_CROP_REGION
* must only be used for letterboxing or pillarboxing of the sensor active array, and no
- * FREEFORM cropping can be used with ACAMERA_CONTROL_ZOOM_RATIO other than 1.0.</p>
+ * FREEFORM cropping can be used with ACAMERA_CONTROL_ZOOM_RATIO other than 1.0. If
+ * ACAMERA_CONTROL_ZOOM_RATIO is not 1.0, and ACAMERA_SCALER_CROP_REGION is set to be
+ * windowboxing, the camera framework will override the ACAMERA_SCALER_CROP_REGION to be
+ * the active array.</p>
*
* @see ACAMERA_CONTROL_AE_REGIONS
* @see ACAMERA_CONTROL_ZOOM_RATIO
@@ -3413,16 +3414,24 @@
* respectively.</p>
* <p>The camera device may adjust the crop region to account for rounding and other hardware
* requirements; the final crop region used will be included in the output capture result.</p>
+ * <p>The camera sensor output aspect ratio depends on factors such as output stream
+ * combination and ACAMERA_CONTROL_AE_TARGET_FPS_RANGE, and shouldn't be adjusted by using
+ * this control. And the camera device will treat different camera sensor output sizes
+ * (potentially with in-sensor crop) as the same crop of
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE. As a result, the application shouldn't assume the
+ * maximum crop region always maps to the same aspect ratio or field of view for the
+ * sensor output.</p>
* <p>Starting from API level 30, it's strongly recommended to use ACAMERA_CONTROL_ZOOM_RATIO
* to take advantage of better support for zoom with logical multi-camera. The benefits
* include better precision with optical-digital zoom combination, and ability to do
* zoom-out from 1.0x. When using ACAMERA_CONTROL_ZOOM_RATIO for zoom, the crop region in
- * the capture request must be either letterboxing or pillarboxing (but not both). The
+ * the capture request should be left as the default activeArray size. The
* coordinate system is post-zoom, meaning that the activeArraySize or
* preCorrectionActiveArraySize covers the camera device's field of view "after" zoom. See
* ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
* <p>The data representation is int[4], which maps to (left, top, width, height).</p>
*
+ * @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
* @see ACAMERA_CONTROL_ZOOM_RATIO
* @see ACAMERA_DISTORTION_CORRECTION_MODE
* @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
@@ -3645,7 +3654,9 @@
* </ol>
* </li>
* <li>Setting ACAMERA_CONTROL_ZOOM_RATIO to values different than 1.0 and
- * ACAMERA_SCALER_CROP_REGION to be windowboxing at the same time is undefined behavior.</li>
+ * ACAMERA_SCALER_CROP_REGION to be windowboxing at the same time are not supported. In this
+ * case, the camera framework will override the ACAMERA_SCALER_CROP_REGION to be the active
+ * array.</li>
* </ul>
* <p>LEGACY capability devices will only support CENTER_ONLY cropping.</p>
*
@@ -4485,8 +4496,8 @@
ACAMERA_SENSOR_AVAILABLE_TEST_PATTERN_MODES = // int32[n]
ACAMERA_SENSOR_START + 25,
/**
- * <p>Duration between the start of first row exposure
- * and the start of last row exposure.</p>
+ * <p>Duration between the start of exposure for the first row of the image sensor,
+ * and the start of exposure for one past the last row of the image sensor.</p>
*
* <p>Type: int64</p>
*
@@ -4495,12 +4506,22 @@
* <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
* </ul></p>
*
- * <p>This is the exposure time skew between the first and last
- * row exposure start times. The first row and the last row are
- * the first and last rows inside of the
+ * <p>This is the exposure time skew between the first and <code>(last+1)</code> row exposure start times. The
+ * first row and the last row are the first and last rows inside of the
* ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
- * <p>For typical camera sensors that use rolling shutters, this is also equivalent
- * to the frame readout time.</p>
+ * <p>For typical camera sensors that use rolling shutters, this is also equivalent to the frame
+ * readout time.</p>
+ * <p>If the image sensor is operating in a binned or cropped mode due to the current output
+ * target resolutions, it's possible this skew is reported to be larger than the exposure
+ * time, for example, since it is based on the full array even if a partial array is read
+ * out. Be sure to scale the number to cover the section of the sensor actually being used
+ * for the outputs you care about. So if your output covers N rows of the active array of
+ * height H, scale this value by N/H to get the total skew for that viewport.</p>
+ * <p><em>Note:</em> Prior to Android 11, this field was described as measuring duration from
+ * first to last row of the image sensor, which is not equal to the frame readout time for a
+ * rolling shutter sensor. Implementations generally reported the latter value, so to resolve
+ * the inconsistency, the description has been updated to range from (first, last+1) row
+ * exposure start, instead.</p>
*
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
*/
@@ -4865,7 +4886,7 @@
* rectangle, and cropping to the rectangle given in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
* <p>E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
* dimensions in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE given the position of a pixel,
- * (x', y'), in the raw pixel array with dimensions give in
+ * (x', y'), in the raw pixel array with dimensions given in
* ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE:</p>
* <ol>
* <li>Choose a pixel (x', y') within the active array region of the raw buffer given in
@@ -6146,10 +6167,11 @@
* </ul></p>
*
* <p>The accuracy of the frame timestamp synchronization determines the physical cameras'
- * ability to start exposure at the same time. If the sensorSyncType is CALIBRATED,
- * the physical camera sensors usually run in master-slave mode so that their shutter
- * time is synchronized. For APPROXIMATE sensorSyncType, the camera sensors usually run in
- * master-master mode, and there could be offset between their start of exposure.</p>
+ * ability to start exposure at the same time. If the sensorSyncType is CALIBRATED, the
+ * physical camera sensors usually run in leader/follower mode where one sensor generates a
+ * timing signal for the other, so that their shutter time is synchronized. For APPROXIMATE
+ * sensorSyncType, the camera sensors usually run in leader/leader mode, where both sensors
+ * use their own timing generator, and there could be offset between their start of exposure.</p>
* <p>In both cases, all images generated for a particular capture request still carry the same
* timestamps, so that they can be used to look up the matching frame number and
* onCaptureStarted callback.</p>
@@ -8180,19 +8202,35 @@
* <li>ACAMERA_LENS_POSE_REFERENCE</li>
* <li>ACAMERA_LENS_DISTORTION</li>
* </ul>
- * <p>The field of view of all non-RAW physical streams must be the same or as close as
- * possible to that of non-RAW logical streams. If the requested FOV is outside of the
- * range supported by the physical camera, the physical stream for that physical camera
- * will use either the maximum or minimum scaler crop region, depending on which one is
- * closer to the requested FOV. For example, for a logical camera with wide-tele lens
- * configuration where the wide lens is the default, if the logical camera's crop region
- * is set to maximum, the physical stream for the tele lens will be configured to its
- * maximum crop region. On the other hand, if the logical camera has a normal-wide lens
- * configuration where the normal lens is the default, when the logical camera's crop
- * region is set to maximum, the FOV of the logical streams will be that of the normal
- * lens. The FOV of the physical streams for the wide lens will be the same as the
- * logical stream, by making the crop region smaller than its active array size to
- * compensate for the smaller focal length.</p>
+ * <p>The field of view of non-RAW physical streams must not be smaller than that of the
+ * non-RAW logical streams, or the maximum field-of-view of the physical camera,
+ * whichever is smaller. The application should check the physical capture result
+ * metadata for how the physical streams are cropped or zoomed. More specifically, given
+ * the physical camera result metadata, the effective horizontal field-of-view of the
+ * physical camera is:</p>
+ * <pre><code>fov = 2 * atan2(cropW * sensorW / (2 * zoomRatio * activeArrayW), focalLength)
+ * </code></pre>
+ * <p>where the equation parameters are the physical camera's crop region width, physical
+ * sensor width, zoom ratio, active array width, and focal length respectively. Typically
+ * the physical stream of active physical camera has the same field-of-view as the
+ * logical streams. However, the same may not be true for physical streams from
+ * non-active physical cameras. For example, if the logical camera has a wide-ultrawide
+ * configuration where the wide lens is the default, when the crop region is set to the
+ * logical camera's active array size, (and the zoom ratio set to 1.0 starting from
+ * Android 11), a physical stream for the ultrawide camera may prefer outputing images
+ * with larger field-of-view than that of the wide camera for better stereo matching
+ * margin or more robust motion tracking. At the same time, the physical non-RAW streams'
+ * field of view must not be smaller than the requested crop region and zoom ratio, as
+ * long as it's within the physical lens' capability. For example, for a logical camera
+ * with wide-tele lens configuration where the wide lens is the default, if the logical
+ * camera's crop region is set to maximum size, and zoom ratio set to 1.0, the physical
+ * stream for the tele lens will be configured to its maximum size crop region (no zoom).</p>
+ * <p><em>Deprecated:</em> Prior to Android 11, the field of view of all non-RAW physical streams
+ * cannot be larger than that of non-RAW logical streams. If the logical camera has a
+ * wide-ultrawide lens configuration where the wide lens is the default, when the logical
+ * camera's crop region is set to maximum size, the FOV of the physical streams for the
+ * ultrawide lens will be the same as the logical stream, by making the crop region
+ * smaller than its active array size to compensate for the smaller focal length.</p>
* <p>Even if the underlying physical cameras have different RAW characteristics (such as
* size or CFA pattern), a logical camera can still advertise RAW capability. In this
* case, when the application configures a RAW stream, the camera device will make sure
@@ -8484,10 +8522,10 @@
* respective color channel provided in
* ACAMERA_SENSOR_TEST_PATTERN_DATA.</p>
* <p>For example:</p>
- * <pre><code>android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
+ * <pre><code>android.control.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
* </code></pre>
* <p>All green pixels are 100% green. All red/blue pixels are black.</p>
- * <pre><code>android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
+ * <pre><code>android.control.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
* </code></pre>
* <p>All red pixels are 100% red. Only the odd green pixels
* are 100% green. All blue pixels are 100% black.</p>
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 0fcb700..9f63099 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -180,6 +180,7 @@
const ACaptureRequest* sessionParameters,
const ACameraCaptureSession_stateCallbacks* callbacks,
/*out*/ACameraCaptureSession** session) {
+ nsecs_t startTimeNs = systemTime();
sp<ACameraCaptureSession> currentSession = mCurrentSession.promote();
Mutex::Autolock _l(mDeviceLock);
camera_status_t ret = checkCameraClosedOrErrorLocked();
@@ -193,7 +194,7 @@
}
// Create new session
- ret = configureStreamsLocked(outputs, sessionParameters);
+ ret = configureStreamsLocked(outputs, sessionParameters, startTimeNs);
if (ret != ACAMERA_OK) {
ALOGE("Fail to create new session. cannot configure streams");
return ret;
@@ -472,7 +473,11 @@
}
// No new session, unconfigure now
- camera_status_t ret = configureStreamsLocked(nullptr, nullptr);
+ // Note: The unconfiguration of session won't be accounted for session
+ // latency because a stream configuration with 0 streams won't ever become
+ // active.
+ nsecs_t startTimeNs = systemTime();
+ camera_status_t ret = configureStreamsLocked(nullptr, nullptr, startTimeNs);
if (ret != ACAMERA_OK) {
ALOGE("Unconfigure stream failed. Device might still be configured! ret %d", ret);
}
@@ -598,7 +603,7 @@
camera_status_t
CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
- const ACaptureRequest* sessionParameters) {
+ const ACaptureRequest* sessionParameters, nsecs_t startTimeNs) {
ACaptureSessionOutputContainer emptyOutput;
if (outputs == nullptr) {
outputs = &emptyOutput;
@@ -697,7 +702,8 @@
utils::convertToHidl(params_metadata, &hidlParams);
params.unlock(params_metadata);
}
- remoteRet = mRemote->endConfigure(StreamConfigurationMode::NORMAL_MODE, hidlParams);
+ remoteRet = mRemote->endConfigure_2_1(StreamConfigurationMode::NORMAL_MODE,
+ hidlParams, startTimeNs);
CHECK_TRANSACTION_AND_RET(remoteRet, remoteRet, "endConfigure()")
return ACAMERA_OK;
}
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 7fc699e..0b6c7c8 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -26,7 +26,7 @@
#include <utils/Mutex.h>
#include <utils/List.h>
#include <utils/Vector.h>
-#include <android/frameworks/cameraservice/device/2.0/ICameraDeviceUser.h>
+#include <android/frameworks/cameraservice/device/2.1/ICameraDeviceUser.h>
#include <android/frameworks/cameraservice/device/2.0/ICameraDeviceCallback.h>
#include <android/frameworks/cameraservice/device/2.0/types.h>
#include <fmq/MessageQueue.h>
@@ -44,7 +44,8 @@
namespace acam {
using ICameraDeviceCallback = frameworks::cameraservice::device::V2_0::ICameraDeviceCallback;
-using ICameraDeviceUser = frameworks::cameraservice::device::V2_0::ICameraDeviceUser;
+using ICameraDeviceUser_2_0 = frameworks::cameraservice::device::V2_0::ICameraDeviceUser;
+using ICameraDeviceUser = frameworks::cameraservice::device::V2_1::ICameraDeviceUser;
using CaptureResultExtras = frameworks::cameraservice::device::V2_0::CaptureResultExtras;
using PhysicalCaptureResultInfo = frameworks::cameraservice::device::V2_0::PhysicalCaptureResultInfo;
using PhysicalCameraSettings = frameworks::cameraservice::device::V2_0::PhysicalCameraSettings;
@@ -201,7 +202,7 @@
void notifySessionEndOfLifeLocked(ACameraCaptureSession* session);
camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
- const ACaptureRequest* sessionParameters);
+ const ACaptureRequest* sessionParameters, nsecs_t startTimeNs);
// Input message will be posted and cleared after this returns
void postSessionMsgAndCleanup(sp<AMessage>& msg);
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 5aa9c46..77c934a 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -764,15 +764,15 @@
}
sp<ICameraDeviceCallback> callbacks = device->getServiceCallback();
- sp<ICameraDeviceUser> deviceRemote;
+ sp<ICameraDeviceUser_2_0> deviceRemote_2_0;
// No way to get package name from native.
// Send a zero length package name and let camera service figure it out from UID
Status status = Status::NO_ERROR;
auto serviceRet = cs->connectDevice(
- callbacks, cameraId, [&status, &deviceRemote](auto s, auto &device) {
+ callbacks, cameraId, [&status, &deviceRemote_2_0](auto s, auto &device) {
status = s;
- deviceRemote = device;
+ deviceRemote_2_0 = device;
});
if (!serviceRet.isOk() || status != Status::NO_ERROR) {
@@ -780,11 +780,18 @@
delete device;
return utils::convertFromHidl(status);
}
- if (deviceRemote == nullptr) {
+ if (deviceRemote_2_0 == nullptr) {
ALOGE("%s: connect camera device failed! remote device is null", __FUNCTION__);
delete device;
return ACAMERA_ERROR_CAMERA_DISCONNECTED;
}
+ auto castResult = ICameraDeviceUser::castFrom(deviceRemote_2_0);
+ if (!castResult.isOk()) {
+ ALOGE("%s: failed to cast remote device to version 2.1", __FUNCTION__);
+ delete device;
+ return ACAMERA_ERROR_CAMERA_DISCONNECTED;
+ }
+ sp<ICameraDeviceUser> deviceRemote = castResult;
device->setRemoteDevice(deviceRemote);
device->setDeviceMetadataQueues();
*outDevice = device;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 85da3e9..8359bb1 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -22,6 +22,7 @@
#include <android-base/parseint.h>
#include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
#include <android/frameworks/cameraservice/service/2.1/ICameraService.h>
+#include <android/frameworks/cameraservice/service/2.2/ICameraService.h>
#include <android/frameworks/cameraservice/service/2.1/ICameraServiceListener.h>
#include <CameraMetadata.h>
@@ -38,7 +39,7 @@
namespace android {
namespace acam {
-using ICameraService = frameworks::cameraservice::service::V2_1::ICameraService;
+using ICameraService = frameworks::cameraservice::service::V2_2::ICameraService;
using CameraDeviceStatus = frameworks::cameraservice::service::V2_0::CameraDeviceStatus;
using ICameraServiceListener = frameworks::cameraservice::service::V2_1::ICameraServiceListener;
using PhysicalCameraStatusAndId = frameworks::cameraservice::service::V2_1::PhysicalCameraStatusAndId;
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index eee05ff..0cf390f 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -517,7 +517,7 @@
CameraMetadata sessionParams;
std::vector<int> offlineStreamIds;
res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams,
- &offlineStreamIds);
+ ns2ms(systemTime()), &offlineStreamIds);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_FALSE(callbacks->hadError());
@@ -629,7 +629,7 @@
res = device->deleteStream(streamId);
EXPECT_TRUE(res.isOk()) << res;
res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams,
- &offlineStreamIds);
+ ns2ms(systemTime()), &offlineStreamIds);
EXPECT_TRUE(res.isOk()) << res;
sleep(/*second*/1); // allow some time for errors to show up, if any
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index f4fb626..b31a58b 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -273,14 +273,11 @@
SurfaceComposerClient::Transaction& t,
const sp<IBinder>& dpy,
const ui::DisplayState& displayState) {
- const ui::Size& viewport = displayState.viewport;
-
- // Set the region of the layer stack we're interested in, which in our
- // case is "all of it".
- Rect layerStackRect(viewport);
+ // Set the region of the layer stack we're interested in, which in our case is "all of it".
+ Rect layerStackRect(displayState.layerStackSpaceRect);
// We need to preserve the aspect ratio of the display.
- float displayAspect = viewport.getHeight() / static_cast<float>(viewport.getWidth());
+ float displayAspect = layerStackRect.getHeight() / static_cast<float>(layerStackRect.getWidth());
// Set the way we map the output onto the display surface (which will
@@ -699,20 +696,21 @@
return err;
}
- const ui::Size& viewport = displayState.viewport;
+ const ui::Size& layerStackSpaceRect = displayState.layerStackSpaceRect;
if (gVerbose) {
printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
- viewport.getWidth(), viewport.getHeight(), displayConfig.refreshRate,
- toCString(displayState.orientation), displayState.layerStack);
+ layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
+ displayConfig.refreshRate, toCString(displayState.orientation),
+ displayState.layerStack);
fflush(stdout);
}
// Encoder can't take odd number as config
if (gVideoWidth == 0) {
- gVideoWidth = floorToEven(viewport.getWidth());
+ gVideoWidth = floorToEven(layerStackSpaceRect.getWidth());
}
if (gVideoHeight == 0) {
- gVideoHeight = floorToEven(viewport.getHeight());
+ gVideoHeight = floorToEven(layerStackSpaceRect.getHeight());
}
// Configure and start the encoder.
@@ -1170,14 +1168,14 @@
}
break;
case 'd':
- gPhysicalDisplayId = atoll(optarg);
- if (gPhysicalDisplayId == 0) {
+ gPhysicalDisplayId = PhysicalDisplayId(atoll(optarg));
+ if (gPhysicalDisplayId.value == 0) {
fprintf(stderr, "Please specify a valid physical display id\n");
return 2;
} else if (SurfaceComposerClient::
getPhysicalDisplayToken(gPhysicalDisplayId) == nullptr) {
- fprintf(stderr, "Invalid physical display id: %"
- ANDROID_PHYSICAL_DISPLAY_ID_FORMAT "\n", gPhysicalDisplayId);
+ fprintf(stderr, "Invalid physical display id: %s\n",
+ to_string(gPhysicalDisplayId).c_str());
return 2;
}
break;
diff --git a/cmds/stagefright/AudioPlayer.cpp b/cmds/stagefright/AudioPlayer.cpp
index eb76953..55427ca 100644
--- a/cmds/stagefright/AudioPlayer.cpp
+++ b/cmds/stagefright/AudioPlayer.cpp
@@ -134,15 +134,18 @@
success = format->findInt32(kKeySampleRate, &mSampleRate);
CHECK(success);
- int32_t numChannels, channelMask;
+ int32_t numChannels;
success = format->findInt32(kKeyChannelCount, &numChannels);
CHECK(success);
- if(!format->findInt32(kKeyChannelMask, &channelMask)) {
+ audio_channel_mask_t channelMask;
+ if (int32_t rawChannelMask; !format->findInt32(kKeyChannelMask, &rawChannelMask)) {
// log only when there's a risk of ambiguity of channel mask selection
ALOGI_IF(numChannels > 2,
"source format didn't specify channel mask, using (%d) channel order", numChannels);
channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
+ } else {
+ channelMask = static_cast<audio_channel_mask_t>(rawChannelMask);
}
audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp
index f4b8164..e000633 100644
--- a/cmds/stagefright/SimplePlayer.cpp
+++ b/cmds/stagefright/SimplePlayer.cpp
@@ -272,7 +272,7 @@
status_t SimplePlayer::onPrepare() {
CHECK_EQ(mState, UNPREPARED);
- mExtractor = new NuMediaExtractor;
+ mExtractor = new NuMediaExtractor(NuMediaExtractor::EntryPoint::OTHER);
status_t err = mExtractor->setDataSource(
NULL /* httpService */, mPath.c_str());
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index c26e0b9..33c4663 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -79,7 +79,7 @@
static int64_t kTimeout = 500ll;
- sp<NuMediaExtractor> extractor = new NuMediaExtractor;
+ sp<NuMediaExtractor> extractor = new NuMediaExtractor(NuMediaExtractor::EntryPoint::OTHER);
if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
fprintf(stderr, "unable to instantiate extractor.\n");
return 1;
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
index b894545..ca058ab 100644
--- a/cmds/stagefright/mediafilter.cpp
+++ b/cmds/stagefright/mediafilter.cpp
@@ -319,7 +319,8 @@
static int64_t kTimeout = 500ll;
- sp<NuMediaExtractor> extractor = new NuMediaExtractor;
+ sp<NuMediaExtractor> extractor = new NuMediaExtractor(NuMediaExtractor::EntryPoint::OTHER);
+
if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
fprintf(stderr, "unable to instantiate extractor.\n");
return 1;
diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp
index 4a83a4a..bc7e41e 100644
--- a/cmds/stagefright/muxer.cpp
+++ b/cmds/stagefright/muxer.cpp
@@ -62,7 +62,7 @@
int trimEndTimeMs,
int rotationDegrees,
MediaMuxer::OutputFormat container = MediaMuxer::OUTPUT_FORMAT_MPEG_4) {
- sp<NuMediaExtractor> extractor = new NuMediaExtractor;
+ sp<NuMediaExtractor> extractor = new NuMediaExtractor(NuMediaExtractor::EntryPoint::OTHER);
if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
fprintf(stderr, "unable to instantiate extractor. %s\n", path);
return 1;
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index 37091c4..098c278 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -259,31 +259,6 @@
printf("$\n");
#endif
-#if 0
- CameraSource *source = CameraSource::Create(
- String16(argv[0], strlen(argv[0])));
- source->start();
-
- printf("source = %p\n", source);
-
- for (int i = 0; i < 100; ++i) {
- MediaBuffer *buffer;
- status_t err = source->read(&buffer);
- CHECK_EQ(err, (status_t)OK);
-
- printf("got a frame, data=%p, size=%d\n",
- buffer->data(), buffer->range_length());
-
- buffer->release();
- buffer = NULL;
- }
-
- err = source->stop();
-
- delete source;
- source = NULL;
-#endif
-
if (err != OK && err != ERROR_END_OF_STREAM) {
fprintf(stderr, "record failed: %d\n", err);
return 1;
diff --git a/drm/TEST_MAPPING b/drm/TEST_MAPPING
index 2595e3e..aa8a7d8 100644
--- a/drm/TEST_MAPPING
+++ b/drm/TEST_MAPPING
@@ -1,5 +1,5 @@
{
- "presubmit": [
+ "presubmit-large": [
// The following tests validate codec and drm path.
{
"name": "GtsMediaTestCases",
@@ -9,17 +9,9 @@
},
{
"include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
- }
- ]
- },
- {
- "name": "GtsExoPlayerTestCases",
- "options" : [
- {
- "include-annotation": "android.platform.test.annotations.SocPresubmit"
},
{
- "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+ "include-filter": "com.google.android.media.gts.WidevineYouTubePerformanceTests"
}
]
}
diff --git a/drm/common/Android.bp b/drm/common/Android.bp
index 272684c..248570e 100644
--- a/drm/common/Android.bp
+++ b/drm/common/Android.bp
@@ -14,7 +14,7 @@
// limitations under the License.
//
-cc_library_static {
+cc_library {
name: "libdrmframeworkcommon",
srcs: [
@@ -35,7 +35,11 @@
cflags: ["-Wall", "-Werror"],
- shared_libs: ["libbinder"],
+ shared_libs: [
+ "libbinder",
+ "liblog",
+ "libutils"
+ ],
export_include_dirs: ["include"],
}
diff --git a/drm/common/include/DrmEngineBase.h b/drm/common/include/DrmEngineBase.h
index 73f11a4..c0a5e3b 100644
--- a/drm/common/include/DrmEngineBase.h
+++ b/drm/common/include/DrmEngineBase.h
@@ -309,7 +309,7 @@
/**
* Removes all the rights information of each plug-in associated with
- * DRM framework. Will be used in master reset
+ * DRM framework.
*
* @param[in] uniqueId Unique identifier for a session
* @return status_t
diff --git a/drm/common/include/IDrmEngine.h b/drm/common/include/IDrmEngine.h
index 1837a11..a545941 100644
--- a/drm/common/include/IDrmEngine.h
+++ b/drm/common/include/IDrmEngine.h
@@ -250,7 +250,7 @@
/**
* Removes all the rights information of each plug-in associated with
- * DRM framework. Will be used in master reset
+ * DRM framework.
*
* @param[in] uniqueId Unique identifier for a session
* @return status_t
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index b68e6c2..8b7c551 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -31,19 +31,18 @@
"liblog",
"libbinder",
"libdl",
+ "libdrmframeworkcommon",
"libselinux",
"libstagefright_foundation",
],
- static_libs: ["libdrmframeworkcommon"],
-
cflags: [
"-Wall",
"-Wextra",
"-Werror",
],
- compile_multilib: "32",
+ compile_multilib: "prefer32",
init_rc: ["drmserver.rc"],
}
diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp
index 9a32cc5..74e3223 100644
--- a/drm/drmserver/DrmManager.cpp
+++ b/drm/drmserver/DrmManager.cpp
@@ -99,13 +99,13 @@
}
default:
{
- ALOGW("Unrecognized message type: %zd", msg->what());
+ ALOGW("Unrecognized message type: %u", msg->what());
}
}
}
int64_t DrmManager::getMetricsFlushPeriodUs() {
- return 1000 * 1000 * std::max(1ll, property_get_int64("drmmanager.metrics.period", 86400));
+ return 1000 * 1000 * std::max(1ll, (long long)property_get_int64("drmmanager.metrics.period", 86400));
}
void DrmManager::recordEngineMetrics(
diff --git a/drm/libdrmframework/Android.bp b/drm/libdrmframework/Android.bp
index 940c17d..b4a7b25 100644
--- a/drm/libdrmframework/Android.bp
+++ b/drm/libdrmframework/Android.bp
@@ -29,12 +29,11 @@
"liblog",
"libbinder",
"libdl",
+ "libdrmframeworkcommon",
],
- static_libs: ["libdrmframeworkcommon"],
-
export_include_dirs: ["include"],
- export_static_lib_headers: ["libdrmframeworkcommon"],
+ export_shared_lib_headers: ["libdrmframeworkcommon"],
cflags: ["-Werror"],
}
diff --git a/drm/libdrmframework/include/DrmManagerClientImpl.h b/drm/libdrmframework/include/DrmManagerClientImpl.h
index 3858675..8c8783b 100644
--- a/drm/libdrmframework/include/DrmManagerClientImpl.h
+++ b/drm/libdrmframework/include/DrmManagerClientImpl.h
@@ -230,7 +230,7 @@
/**
* Removes all the rights information of each plug-in associated with
- * DRM framework. Will be used in master reset
+ * DRM framework.
*
* @param[in] uniqueId Unique identifier for a session
* @return status_t
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
index bb9d7ec..9f52f7a 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
@@ -36,11 +36,11 @@
"libcrypto",
"libssl",
"libdrmframework",
+ "libdrmframeworkcommon",
],
static_libs: [
"libdrmutility",
- "libdrmframeworkcommon",
"libfwdlock-common",
"libfwdlock-converter",
"libfwdlock-decoder",
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h
index b62ddb9..eb5b0f6 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h
@@ -252,8 +252,7 @@
/**
* Removes all the rights information of each plug-in associated with
- * DRM framework. Will be used in master reset but does nothing for
- * Forward Lock Engine.
+ * DRM framework. Does nothing for Forward Lock Engine.
*
* @param uniqueId Unique identifier for a session
* @return status_t
diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html b/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html
index 8f95cd2..c1d5b3d 100644
--- a/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html
+++ b/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html
@@ -488,7 +488,7 @@
<p class=MsoBodyText><b>Note:</b> The key-encryption key must be unique to each
device; this is what makes the files forward lock–protected. Ideally, it should
be derived from secret hardware parameters, but at the very least it should be
-persistent from one master reset to the next.</p>
+persistent from one factory reset to the next.</p>
<div style='margin-bottom:24.0pt;border:solid windowtext 1.0pt;padding:1.0pt 4.0pt 1.0pt 4.0pt;
background:#F2F2F2'>
diff --git a/drm/libdrmframework/plugins/passthru/Android.bp b/drm/libdrmframework/plugins/passthru/Android.bp
index 05b6440..8045586 100644
--- a/drm/libdrmframework/plugins/passthru/Android.bp
+++ b/drm/libdrmframework/plugins/passthru/Android.bp
@@ -19,12 +19,11 @@
srcs: ["src/DrmPassthruPlugIn.cpp"],
- static_libs: ["libdrmframeworkcommon"],
-
shared_libs: [
"libutils",
"liblog",
"libdl",
+ "libdrmframeworkcommon",
],
local_include_dirs: ["include"],
diff --git a/drm/libmediadrm/DrmMetricsConsumer.cpp b/drm/libmediadrm/DrmMetricsConsumer.cpp
index b47b4ff..5f0b26e 100644
--- a/drm/libmediadrm/DrmMetricsConsumer.cpp
+++ b/drm/libmediadrm/DrmMetricsConsumer.cpp
@@ -37,8 +37,8 @@
template <> std::string GetAttributeName<KeyStatusType>(KeyStatusType type) {
static const char *type_names[] = {"USABLE", "EXPIRED",
"OUTPUT_NOT_ALLOWED", "STATUS_PENDING",
- "INTERNAL_ERROR"};
- if (((size_t)type) > arraysize(type_names)) {
+ "INTERNAL_ERROR", "USABLE_IN_FUTURE"};
+ if (((size_t)type) >= arraysize(type_names)) {
return "UNKNOWN_TYPE";
}
return type_names[(size_t)type];
@@ -48,7 +48,7 @@
static const char *type_names[] = {"PROVISION_REQUIRED", "KEY_NEEDED",
"KEY_EXPIRED", "VENDOR_DEFINED",
"SESSION_RECLAIMED"};
- if (((size_t)type) > arraysize(type_names)) {
+ if (((size_t)type) >= arraysize(type_names)) {
return "UNKNOWN_TYPE";
}
return type_names[(size_t)type];
diff --git a/drm/libmediadrm/include/mediadrm/DrmSessionManager.h b/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
index 9e43504..c56bf01 100644
--- a/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
+++ b/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
@@ -62,7 +62,7 @@
void removeSession(const Vector<uint8_t>& sessionId);
bool reclaimSession(int callingPid);
- // sanity check APIs
+ // inspection APIs
size_t getSessionCount() const;
bool containsSession(const Vector<uint8_t>& sessionId) const;
diff --git a/drm/libmediadrm/protos/Android.bp b/drm/libmediadrm/protos/Android.bp
new file mode 100644
index 0000000..b26cda4
--- /dev/null
+++ b/drm/libmediadrm/protos/Android.bp
@@ -0,0 +1,38 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This is the version of the drm metrics configured for protobuf full on host.
+// It is used by the metrics_dump tool.
+
+cc_library_host_shared {
+ name: "libdrm_metrics_protos_full_host",
+ vendor_available: true,
+
+ include_dirs: ["external/protobuf/src"],
+
+ srcs: [
+ "metrics.proto",
+ ],
+
+ proto: {
+ export_proto_headers: true,
+ type: "full",
+ },
+
+ cflags: [
+ // Suppress unused parameter error. This error occurs
+ // when using the map type in a proto definition.
+ "-Wno-unused-parameter",
+ ],
+}
diff --git a/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp b/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp
index cb69f91..466e571 100644
--- a/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp
+++ b/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp
@@ -62,8 +62,8 @@
}
ALOGV("descriptor_size=%zu", container.descriptor_size());
- // Sanity check to verify that the BroadcastEncryptor is sending a properly
- // formed EcmContainer. If it contains two Ecms, the ids should have different
+ // Validate that the BroadcastEncryptor is sending a properly formed
+ // EcmContainer. If it contains two Ecms, the ids should have different
// parity (one odd, one even). This does not necessarily affect decryption
// but indicates a problem with Ecm generation.
if (container.descriptor_size() == 2) {
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index 3ecf6d5..1495703 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -148,14 +148,17 @@
// Calculate the output buffer size and determine if any subsamples are
// encrypted.
size_t destSize = 0;
+ size_t srcSize = 0;
bool haveEncryptedSubsamples = false;
for (size_t i = 0; i < subSamples.size(); i++) {
const SubSample &subSample = subSamples[i];
- if (__builtin_add_overflow(destSize, subSample.numBytesOfClearData, &destSize)) {
+ if (__builtin_add_overflow(destSize, subSample.numBytesOfClearData, &destSize) ||
+ __builtin_add_overflow(srcSize, subSample.numBytesOfClearData, &srcSize)) {
_hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample clear size overflow");
return Void();
}
- if (__builtin_add_overflow(destSize, subSample.numBytesOfEncryptedData, &destSize)) {
+ if (__builtin_add_overflow(destSize, subSample.numBytesOfEncryptedData, &destSize) ||
+ __builtin_add_overflow(srcSize, subSample.numBytesOfEncryptedData, &srcSize)) {
_hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample encrypted size overflow");
return Void();
}
@@ -164,7 +167,7 @@
}
}
- if (destSize > destBuffer.size) {
+ if (destSize > destBuffer.size || srcSize > source.size) {
_hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample sum too large");
return Void();
}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
index 2dcd00f..051a968 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
@@ -15,7 +15,7 @@
namespace clearkey {
std::string MemoryFileSystem::GetFileName(const std::string& path) {
- size_t index = path.find_last_of("/");
+ size_t index = path.find_last_of('/');
if (index != std::string::npos) {
return path.substr(index+1);
} else {
diff --git a/include/drm/DrmManagerClient.h b/include/drm/DrmManagerClient.h
index 866edac..a38aa9b 100644
--- a/include/drm/DrmManagerClient.h
+++ b/include/drm/DrmManagerClient.h
@@ -318,7 +318,7 @@
/**
* Removes all the rights information of each plug-in associated with
- * DRM framework. Will be used in master reset
+ * DRM framework.
*
* @return status_t
* Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
diff --git a/include/drm/TEST_MAPPING b/include/drm/TEST_MAPPING
index 28e432e..512e844 100644
--- a/include/drm/TEST_MAPPING
+++ b/include/drm/TEST_MAPPING
@@ -8,17 +8,9 @@
},
{
"include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
- }
- ]
- },
- {
- "name": "GtsExoPlayerTestCases",
- "options" : [
- {
- "include-annotation": "android.platform.test.annotations.SocPresubmit"
},
{
- "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+ "include-filter": "com.google.android.media.gts.WidevineYouTubePerformanceTests"
}
]
}
diff --git a/include/media/Interpolator.h b/include/media/Interpolator.h
index 703cf77..2004acb 100644
--- a/include/media/Interpolator.h
+++ b/include/media/Interpolator.h
@@ -21,6 +21,7 @@
#include <sstream>
#include <unordered_map>
+#include <android/media/InterpolatorConfig.h>
#include <binder/Parcel.h>
#include <utils/RefBase.h>
@@ -39,17 +40,10 @@
class Interpolator : public std::map<S, T> {
public:
// Polynomial spline interpolators
- // Extend only at the end of enum, as this must match order in VolumeShapers.java.
- enum InterpolatorType : int32_t {
- INTERPOLATOR_TYPE_STEP, // Not continuous
- INTERPOLATOR_TYPE_LINEAR, // C0
- INTERPOLATOR_TYPE_CUBIC, // C1
- INTERPOLATOR_TYPE_CUBIC_MONOTONIC, // C1 (to provide locally monotonic curves)
- // INTERPOLATOR_TYPE_CUBIC_C2, // TODO - requires global computation / cache
- };
+ using InterpolatorType = media::InterpolatorType;
explicit Interpolator(
- InterpolatorType interpolatorType = INTERPOLATOR_TYPE_LINEAR,
+ InterpolatorType interpolatorType = InterpolatorType::LINEAR,
bool cache = true)
: mCache(cache)
, mFirstSlope(0)
@@ -82,13 +76,13 @@
// now that we have two adjacent points:
switch (mInterpolatorType) {
- case INTERPOLATOR_TYPE_STEP:
+ case InterpolatorType::STEP:
return high->first == x ? high->second : low->second;
- case INTERPOLATOR_TYPE_LINEAR:
+ case InterpolatorType::LINEAR:
return ((high->first - x) * low->second + (x - low->first) * high->second)
/ (high->first - low->first);
- case INTERPOLATOR_TYPE_CUBIC:
- case INTERPOLATOR_TYPE_CUBIC_MONOTONIC:
+ case InterpolatorType::CUBIC:
+ case InterpolatorType::CUBIC_MONOTONIC:
default: {
// See https://en.wikipedia.org/wiki/Cubic_Hermite_spline
@@ -116,7 +110,7 @@
// non catmullRom (finite difference) with regular cubic;
// the choices here minimize computation.
bool monotonic, catmullRom;
- if (mInterpolatorType == INTERPOLATOR_TYPE_CUBIC_MONOTONIC) {
+ if (mInterpolatorType == InterpolatorType::CUBIC_MONOTONIC) {
monotonic = true;
catmullRom = false;
} else {
@@ -202,11 +196,11 @@
status_t setInterpolatorType(InterpolatorType interpolatorType) {
switch (interpolatorType) {
- case INTERPOLATOR_TYPE_STEP: // Not continuous
- case INTERPOLATOR_TYPE_LINEAR: // C0
- case INTERPOLATOR_TYPE_CUBIC: // C1
- case INTERPOLATOR_TYPE_CUBIC_MONOTONIC: // C1 + other constraints
- // case INTERPOLATOR_TYPE_CUBIC_C2:
+ case InterpolatorType::STEP: // Not continuous
+ case InterpolatorType::LINEAR: // C0
+ case InterpolatorType::CUBIC: // C1
+ case InterpolatorType::CUBIC_MONOTONIC: // C1 + other constraints
+ // case InterpolatorType::CUBIC_C2:
mInterpolatorType = interpolatorType;
return NO_ERROR;
default:
@@ -235,49 +229,50 @@
mMemo.clear();
}
+ // TODO(ytai): remove this method once it is not used.
status_t writeToParcel(Parcel *parcel) const {
- if (parcel == nullptr) {
- return BAD_VALUE;
- }
- status_t res = parcel->writeInt32(mInterpolatorType)
- ?: parcel->writeFloat(mFirstSlope)
- ?: parcel->writeFloat(mLastSlope)
- ?: parcel->writeUint32((uint32_t)this->size()); // silent truncation
- if (res != NO_ERROR) {
- return res;
- }
- for (const auto &pt : *this) {
- res = parcel->writeFloat(pt.first)
- ?: parcel->writeFloat(pt.second);
- if (res != NO_ERROR) {
- return res;
- }
- }
- return NO_ERROR;
+ media::InterpolatorConfig config;
+ writeToConfig(&config);
+ return config.writeToParcel(parcel);
}
+ void writeToConfig(media::InterpolatorConfig *config) const {
+ config->type = mInterpolatorType;
+ config->firstSlope = mFirstSlope;
+ config->lastSlope = mLastSlope;
+ for (const auto &pt : *this) {
+ config->xy.push_back(pt.first);
+ config->xy.push_back(pt.second);
+ }
+ }
+
+ // TODO(ytai): remove this method once it is not used.
status_t readFromParcel(const Parcel &parcel) {
- this->clear();
- int32_t type;
- uint32_t size;
- status_t res = parcel.readInt32(&type)
- ?: parcel.readFloat(&mFirstSlope)
- ?: parcel.readFloat(&mLastSlope)
- ?: parcel.readUint32(&size)
- ?: setInterpolatorType((InterpolatorType)type);
+ media::InterpolatorConfig config;
+ status_t res = config.readFromParcel(&parcel);
if (res != NO_ERROR) {
return res;
}
+ return readFromConfig(config);
+ }
+
+ status_t readFromConfig(const media::InterpolatorConfig &config) {
+ this->clear();
+ setInterpolatorType(config.type);
+ if ((config.xy.size() & 1) != 0) {
+ // xy size must be even.
+ return BAD_VALUE;
+ }
+ uint32_t size = config.xy.size() / 2;
+ mFirstSlope = config.firstSlope;
+ mLastSlope = config.lastSlope;
+
// Note: We don't need to check size is within some bounds as
// the Parcel read will fail if size is incorrectly specified too large.
float lastx;
for (uint32_t i = 0; i < size; ++i) {
- float x, y;
- res = parcel.readFloat(&x)
- ?: parcel.readFloat(&y);
- if (res != NO_ERROR) {
- return res;
- }
+ float x = config.xy[i * 2];
+ float y = config.xy[i * 2 + 1];
if ((i > 0 && !(x > lastx)) /* handle nan */
|| y != y /* handle nan */) {
// This is a std::map object which imposes sorted order
diff --git a/include/media/MicrophoneInfo.h b/include/media/MicrophoneInfo.h
index 2287aca..a5045b9 100644
--- a/include/media/MicrophoneInfo.h
+++ b/include/media/MicrophoneInfo.h
@@ -17,33 +17,24 @@
#ifndef ANDROID_MICROPHONE_INFO_H
#define ANDROID_MICROPHONE_INFO_H
+#include <android/media/MicrophoneInfoData.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
+#include <media/AidlConversionUtil.h>
#include <system/audio.h>
-#include <utils/String16.h>
-#include <utils/Vector.h>
namespace android {
namespace media {
-#define RETURN_IF_FAILED(calledOnce) \
- { \
- status_t returnStatus = calledOnce; \
- if (returnStatus) { \
- ALOGE("Failed at %s:%d (%s)", __FILE__, __LINE__, __func__); \
- return returnStatus; \
- } \
- }
-
class MicrophoneInfo : public Parcelable {
public:
MicrophoneInfo() = default;
MicrophoneInfo(const MicrophoneInfo& microphoneInfo) = default;
MicrophoneInfo(audio_microphone_characteristic_t& characteristic) {
- mDeviceId = String16(&characteristic.device_id[0]);
+ mDeviceId = std::string(&characteristic.device_id[0]);
mPortId = characteristic.id;
mType = characteristic.device;
- mAddress = String16(&characteristic.address[0]);
+ mAddress = std::string(&characteristic.address[0]);
mDeviceLocation = characteristic.location;
mDeviceGroup = characteristic.group;
mIndexInTheGroup = characteristic.index_in_the_group;
@@ -53,8 +44,8 @@
mOrientation.push_back(characteristic.orientation.x);
mOrientation.push_back(characteristic.orientation.y);
mOrientation.push_back(characteristic.orientation.z);
- Vector<float> frequencies;
- Vector<float> responses;
+ std::vector<float> frequencies;
+ std::vector<float> responses;
for (size_t i = 0; i < characteristic.num_frequency_responses; i++) {
frequencies.push_back(characteristic.frequency_responses[0][i]);
responses.push_back(characteristic.frequency_responses[1][i]);
@@ -73,76 +64,73 @@
virtual ~MicrophoneInfo() = default;
virtual status_t writeToParcel(Parcel* parcel) const {
- RETURN_IF_FAILED(parcel->writeString16(mDeviceId));
- RETURN_IF_FAILED(parcel->writeInt32(mPortId));
- RETURN_IF_FAILED(parcel->writeUint32(mType));
- RETURN_IF_FAILED(parcel->writeString16(mAddress));
- RETURN_IF_FAILED(parcel->writeInt32(mDeviceLocation));
- RETURN_IF_FAILED(parcel->writeInt32(mDeviceGroup));
- RETURN_IF_FAILED(parcel->writeInt32(mIndexInTheGroup));
- RETURN_IF_FAILED(writeFloatVector(parcel, mGeometricLocation));
- RETURN_IF_FAILED(writeFloatVector(parcel, mOrientation));
+ MicrophoneInfoData parcelable;
+ return writeToParcelable(&parcelable)
+ ?: parcelable.writeToParcel(parcel);
+ }
+
+ virtual status_t writeToParcelable(MicrophoneInfoData* parcelable) const {
+ parcelable->deviceId = mDeviceId;
+ parcelable->portId = mPortId;
+ parcelable->type = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(mType));
+ parcelable->address = mAddress;
+ parcelable->deviceGroup = mDeviceGroup;
+ parcelable->indexInTheGroup = mIndexInTheGroup;
+ parcelable->geometricLocation = mGeometricLocation;
+ parcelable->orientation = mOrientation;
if (mFrequencyResponses.size() != 2) {
return BAD_VALUE;
}
- for (size_t i = 0; i < mFrequencyResponses.size(); i++) {
- RETURN_IF_FAILED(parcel->writeInt32(mFrequencyResponses[i].size()));
- RETURN_IF_FAILED(writeFloatVector(parcel, mFrequencyResponses[i]));
- }
- std::vector<int> channelMapping;
- for (size_t i = 0; i < mChannelMapping.size(); ++i) {
- channelMapping.push_back(mChannelMapping[i]);
- }
- RETURN_IF_FAILED(parcel->writeInt32Vector(channelMapping));
- RETURN_IF_FAILED(parcel->writeFloat(mSensitivity));
- RETURN_IF_FAILED(parcel->writeFloat(mMaxSpl));
- RETURN_IF_FAILED(parcel->writeFloat(mMinSpl));
- RETURN_IF_FAILED(parcel->writeInt32(mDirectionality));
+ parcelable->frequencies = mFrequencyResponses[0];
+ parcelable->frequencyResponses = mFrequencyResponses[1];
+ parcelable->channelMapping = mChannelMapping;
+ parcelable->sensitivity = mSensitivity;
+ parcelable->maxSpl = mMaxSpl;
+ parcelable->minSpl = mMinSpl;
+ parcelable->directionality = mDirectionality;
return OK;
}
virtual status_t readFromParcel(const Parcel* parcel) {
- RETURN_IF_FAILED(parcel->readString16(&mDeviceId));
- RETURN_IF_FAILED(parcel->readInt32(&mPortId));
- RETURN_IF_FAILED(parcel->readUint32(&mType));
- RETURN_IF_FAILED(parcel->readString16(&mAddress));
- RETURN_IF_FAILED(parcel->readInt32(&mDeviceLocation));
- RETURN_IF_FAILED(parcel->readInt32(&mDeviceGroup));
- RETURN_IF_FAILED(parcel->readInt32(&mIndexInTheGroup));
- RETURN_IF_FAILED(readFloatVector(parcel, &mGeometricLocation, 3));
- RETURN_IF_FAILED(readFloatVector(parcel, &mOrientation, 3));
- int32_t frequenciesNum;
- RETURN_IF_FAILED(parcel->readInt32(&frequenciesNum));
- Vector<float> frequencies;
- RETURN_IF_FAILED(readFloatVector(parcel, &frequencies, frequenciesNum));
- int32_t responsesNum;
- RETURN_IF_FAILED(parcel->readInt32(&responsesNum));
- Vector<float> responses;
- RETURN_IF_FAILED(readFloatVector(parcel, &responses, responsesNum));
- if (frequencies.size() != responses.size()) {
+ MicrophoneInfoData data;
+ return data.readFromParcel(parcel)
+ ?: readFromParcelable(data);
+ }
+
+ virtual status_t readFromParcelable(const MicrophoneInfoData& parcelable) {
+ mDeviceId = parcelable.deviceId;
+ mPortId = parcelable.portId;
+ mType = VALUE_OR_RETURN_STATUS(convertReinterpret<uint32_t>(parcelable.type));
+ mAddress = parcelable.address;
+ mDeviceLocation = parcelable.deviceLocation;
+ mDeviceGroup = parcelable.deviceGroup;
+ mIndexInTheGroup = parcelable.indexInTheGroup;
+ if (parcelable.geometricLocation.size() != 3) {
return BAD_VALUE;
}
- mFrequencyResponses.push_back(frequencies);
- mFrequencyResponses.push_back(responses);
- std::vector<int> channelMapping;
- status_t result = parcel->readInt32Vector(&channelMapping);
- if (result != OK) {
- return result;
- }
- if (channelMapping.size() != AUDIO_CHANNEL_COUNT_MAX) {
+ mGeometricLocation = parcelable.geometricLocation;
+ if (parcelable.orientation.size() != 3) {
return BAD_VALUE;
}
- for (size_t i = 0; i < channelMapping.size(); i++) {
- mChannelMapping.push_back(channelMapping[i]);
+ mOrientation = parcelable.orientation;
+ if (parcelable.frequencies.size() != parcelable.frequencyResponses.size()) {
+ return BAD_VALUE;
}
- RETURN_IF_FAILED(parcel->readFloat(&mSensitivity));
- RETURN_IF_FAILED(parcel->readFloat(&mMaxSpl));
- RETURN_IF_FAILED(parcel->readFloat(&mMinSpl));
- RETURN_IF_FAILED(parcel->readInt32(&mDirectionality));
+
+ mFrequencyResponses.push_back(parcelable.frequencies);
+ mFrequencyResponses.push_back(parcelable.frequencyResponses);
+ if (parcelable.channelMapping.size() != AUDIO_CHANNEL_COUNT_MAX) {
+ return BAD_VALUE;
+ }
+ mChannelMapping = parcelable.channelMapping;
+ mSensitivity = parcelable.sensitivity;
+ mMaxSpl = parcelable.maxSpl;
+ mMinSpl = parcelable.minSpl;
+ mDirectionality = parcelable.directionality;
return OK;
}
- String16 getDeviceId() const {
+ std::string getDeviceId() const {
return mDeviceId;
}
@@ -154,7 +142,7 @@
return mType;
}
- String16 getAddress() const {
+ std::string getAddress() const {
return mAddress;
}
@@ -170,19 +158,19 @@
return mIndexInTheGroup;
}
- const Vector<float>& getGeometricLocation() const {
+ const std::vector<float>& getGeometricLocation() const {
return mGeometricLocation;
}
- const Vector<float>& getOrientation() const {
+ const std::vector<float>& getOrientation() const {
return mOrientation;
}
- const Vector<Vector<float>>& getFrequencyResponses() const {
+ const std::vector<std::vector<float>>& getFrequencyResponses() const {
return mFrequencyResponses;
}
- const Vector<int>& getChannelMapping() const {
+ const std::vector<int>& getChannelMapping() const {
return mChannelMapping;
}
@@ -203,46 +191,38 @@
}
private:
- status_t readFloatVector(
- const Parcel* parcel, Vector<float> *vectorPtr, size_t defaultLength) {
- std::unique_ptr<std::vector<float>> v;
- status_t result = parcel->readFloatVector(&v);
- if (result != OK) return result;
- vectorPtr->clear();
- if (v.get() != nullptr) {
- for (const auto& iter : *v) {
- vectorPtr->push_back(iter);
- }
- } else {
- vectorPtr->resize(defaultLength);
- }
- return OK;
- }
- status_t writeFloatVector(Parcel* parcel, const Vector<float>& vector) const {
- std::vector<float> v;
- for (size_t i = 0; i < vector.size(); i++) {
- v.push_back(vector[i]);
- }
- return parcel->writeFloatVector(v);
- }
-
- String16 mDeviceId;
+ std::string mDeviceId;
int32_t mPortId;
uint32_t mType;
- String16 mAddress;
+ std::string mAddress;
int32_t mDeviceLocation;
int32_t mDeviceGroup;
int32_t mIndexInTheGroup;
- Vector<float> mGeometricLocation;
- Vector<float> mOrientation;
- Vector<Vector<float>> mFrequencyResponses;
- Vector<int> mChannelMapping;
+ std::vector<float> mGeometricLocation;
+ std::vector<float> mOrientation;
+ std::vector<std::vector<float>> mFrequencyResponses;
+ std::vector<int> mChannelMapping;
float mSensitivity;
float mMaxSpl;
float mMinSpl;
int32_t mDirectionality;
};
+// Conversion routines, according to AidlConversion.h conventions.
+inline ConversionResult<MicrophoneInfo>
+aidl2legacy_MicrophoneInfo(const media::MicrophoneInfoData& aidl) {
+ MicrophoneInfo legacy;
+ RETURN_IF_ERROR(legacy.readFromParcelable(aidl));
+ return legacy;
+}
+
+inline ConversionResult<media::MicrophoneInfoData>
+legacy2aidl_MicrophoneInfo(const MicrophoneInfo& legacy) {
+ media::MicrophoneInfoData aidl;
+ RETURN_IF_ERROR(legacy.writeToParcelable(&aidl));
+ return aidl;
+}
+
} // namespace media
} // namespace android
diff --git a/include/media/MmapStreamInterface.h b/include/media/MmapStreamInterface.h
index b3bf16d..61de987 100644
--- a/include/media/MmapStreamInterface.h
+++ b/include/media/MmapStreamInterface.h
@@ -22,6 +22,8 @@
#include <utils/Errors.h>
#include <utils/RefBase.h>
+#include <time.h>
+
namespace android {
class MmapStreamCallback;
@@ -103,6 +105,19 @@
virtual status_t getMmapPosition(struct audio_mmap_position *position) = 0;
/**
+ * Get a recent count of the number of audio frames presented/received to/from an
+ * external observer.
+ *
+ * \param[out] position count of presented audio frames
+ * \param[out] timeNanos associated clock time
+ *
+ * \return OK if the external position is set correctly.
+ * NO_INIT in case of initialization error
+ * INVALID_OPERATION if the interface is not implemented
+ */
+ virtual status_t getExternalPosition(uint64_t* position, int64_t* timeNanos) = 0;
+
+ /**
* Start a stream operating in mmap mode.
* createMmapBuffer() must be called before calling start()
*
diff --git a/include/media/VolumeShaper.h b/include/media/VolumeShaper.h
index fe519bb..f8ead2f 100644
--- a/include/media/VolumeShaper.h
+++ b/include/media/VolumeShaper.h
@@ -22,6 +22,11 @@
#include <math.h>
#include <sstream>
+#include <android/media/VolumeShaperConfiguration.h>
+#include <android/media/VolumeShaperConfigurationOptionFlag.h>
+#include <android/media/VolumeShaperOperation.h>
+#include <android/media/VolumeShaperOperationFlag.h>
+#include <android/media/VolumeShaperState.h>
#include <binder/Parcel.h>
#include <media/Interpolator.h>
#include <utils/Mutex.h>
@@ -284,30 +289,38 @@
clampVolume();
}
- // The parcel layout must match VolumeShaper.java
status_t writeToParcel(Parcel *parcel) const override {
- if (parcel == nullptr) return BAD_VALUE;
- return parcel->writeInt32((int32_t)mType)
- ?: parcel->writeInt32(mId)
- ?: mType == TYPE_ID
- ? NO_ERROR
- : parcel->writeInt32((int32_t)mOptionFlags)
- ?: parcel->writeDouble(mDurationMs)
- ?: Interpolator<S, T>::writeToParcel(parcel);
+ VolumeShaperConfiguration parcelable;
+ writeToParcelable(&parcelable);
+ return parcelable.writeToParcel(parcel);
}
- status_t readFromParcel(const Parcel *parcel) override {
- int32_t type, optionFlags;
- return parcel->readInt32(&type)
- ?: setType((Type)type)
- ?: parcel->readInt32(&mId)
- ?: mType == TYPE_ID
- ? NO_ERROR
- : parcel->readInt32(&optionFlags)
- ?: setOptionFlags((OptionFlag)optionFlags)
- ?: parcel->readDouble(&mDurationMs)
- ?: Interpolator<S, T>::readFromParcel(*parcel)
- ?: checkCurve();
+ void writeToParcelable(VolumeShaperConfiguration *parcelable) const {
+ parcelable->id = getId();
+ parcelable->type = getTypeAsAidl();
+ parcelable->optionFlags = 0;
+ if (mType != TYPE_ID) {
+ parcelable->optionFlags = getOptionFlagsAsAidl();
+ parcelable->durationMs = getDurationMs();
+ Interpolator<S, T>::writeToConfig(&parcelable->interpolatorConfig);
+ }
+ }
+
+ status_t readFromParcel(const Parcel* parcel) override {
+ VolumeShaperConfiguration data;
+ return data.readFromParcel(parcel)
+ ?: readFromParcelable(data);
+ }
+
+ status_t readFromParcelable(const VolumeShaperConfiguration& parcelable) {
+ setId(parcelable.id);
+ return setTypeFromAidl(parcelable.type)
+ ?: mType == TYPE_ID
+ ? NO_ERROR
+ : setOptionFlagsFromAidl(parcelable.optionFlags)
+ ?: setDurationMs(parcelable.durationMs)
+ ?: Interpolator<S, T>::readFromConfig(parcelable.interpolatorConfig)
+ ?: checkCurve();
}
// Returns a string for debug printing.
@@ -329,6 +342,51 @@
int32_t mId; // A valid id is >= 0.
OptionFlag mOptionFlags; // option flags for the configuration.
double mDurationMs; // duration, must be > 0; default is 1000 ms.
+
+ int32_t getOptionFlagsAsAidl() const {
+ int32_t result = 0;
+ if (getOptionFlags() & OPTION_FLAG_VOLUME_IN_DBFS) {
+ result |=
+ 1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::VOLUME_IN_DBFS);
+ }
+ if (getOptionFlags() & OPTION_FLAG_CLOCK_TIME) {
+ result |= 1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::CLOCK_TIME);
+ }
+ return result;
+ }
+
+ status_t setOptionFlagsFromAidl(int32_t aidl) {
+ std::underlying_type_t<OptionFlag> options = 0;
+ if (aidl & (1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::VOLUME_IN_DBFS))) {
+ options |= OPTION_FLAG_VOLUME_IN_DBFS;
+ }
+ if (aidl & (1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::CLOCK_TIME))) {
+ options |= OPTION_FLAG_CLOCK_TIME;
+ }
+ return setOptionFlags(static_cast<OptionFlag>(options));
+ }
+
+ status_t setTypeFromAidl(VolumeShaperConfigurationType aidl) {
+ switch (aidl) {
+ case VolumeShaperConfigurationType::ID:
+ return setType(TYPE_ID);
+ case VolumeShaperConfigurationType::SCALE:
+ return setType(TYPE_SCALE);
+ default:
+ return BAD_VALUE;
+ }
+ }
+
+ VolumeShaperConfigurationType getTypeAsAidl() const {
+ switch (getType()) {
+ case TYPE_ID:
+ return VolumeShaperConfigurationType::ID;
+ case TYPE_SCALE:
+ return VolumeShaperConfigurationType::SCALE;
+ default:
+ LOG_ALWAYS_FATAL("Shouldn't get here");
+ }
+ }
}; // Configuration
/* VolumeShaper::Operation expresses an operation to perform on the
@@ -420,19 +478,29 @@
return NO_ERROR;
}
- status_t writeToParcel(Parcel *parcel) const override {
+ status_t writeToParcel(Parcel* parcel) const override {
if (parcel == nullptr) return BAD_VALUE;
- return parcel->writeInt32((int32_t)mFlags)
- ?: parcel->writeInt32(mReplaceId)
- ?: parcel->writeFloat(mXOffset);
+ VolumeShaperOperation op;
+ writeToParcelable(&op);
+ return op.writeToParcel(parcel);
}
- status_t readFromParcel(const Parcel *parcel) override {
- int32_t flags;
- return parcel->readInt32(&flags)
- ?: parcel->readInt32(&mReplaceId)
- ?: parcel->readFloat(&mXOffset)
- ?: setFlags((Flag)flags);
+ void writeToParcelable(VolumeShaperOperation* op) const {
+ op->flags = getFlagsAsAidl();
+ op->replaceId = mReplaceId;
+ op->xOffset = mXOffset;
+ }
+
+ status_t readFromParcel(const Parcel* parcel) override {
+ VolumeShaperOperation op;
+ return op.readFromParcel(parcel)
+ ?: readFromParcelable(op);
+ }
+
+ status_t readFromParcelable(const VolumeShaperOperation& op) {
+ mReplaceId = op.replaceId;
+ mXOffset = op.xOffset;
+ return setFlagsFromAidl(op.flags);
}
std::string toString() const {
@@ -445,6 +513,48 @@
}
private:
+ status_t setFlagsFromAidl(int32_t aidl) {
+ std::underlying_type_t<Flag> flags = 0;
+ if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::REVERSE))) {
+ flags |= FLAG_REVERSE;
+ }
+ if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::TERMINATE))) {
+ flags |= FLAG_TERMINATE;
+ }
+ if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::JOIN))) {
+ flags |= FLAG_JOIN;
+ }
+ if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::DELAY))) {
+ flags |= FLAG_DELAY;
+ }
+ if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::CREATE_IF_NECESSARY))) {
+ flags |= FLAG_CREATE_IF_NECESSARY;
+ }
+ return setFlags(static_cast<Flag>(flags));
+ }
+
+ int32_t getFlagsAsAidl() const {
+ int32_t aidl = 0;
+ std::underlying_type_t<Flag> flags = getFlags();
+ if (flags & FLAG_REVERSE) {
+ aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::REVERSE));
+ }
+ if (flags & FLAG_TERMINATE) {
+ aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::TERMINATE));
+ }
+ if (flags & FLAG_JOIN) {
+ aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::JOIN));
+ }
+ if (flags & FLAG_DELAY) {
+ aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::DELAY));
+ }
+ if (flags & FLAG_CREATE_IF_NECESSARY) {
+ aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::CREATE_IF_NECESSARY));
+ }
+ return aidl;
+ }
+
+ private:
Flag mFlags; // operation to do
int32_t mReplaceId; // if >= 0 the id to remove in a replace operation.
S mXOffset; // position in the curve to set if a valid number (not nan)
@@ -483,15 +593,28 @@
mXOffset = xOffset;
}
- status_t writeToParcel(Parcel *parcel) const override {
+ status_t writeToParcel(Parcel* parcel) const override {
if (parcel == nullptr) return BAD_VALUE;
- return parcel->writeFloat(mVolume)
- ?: parcel->writeFloat(mXOffset);
+ VolumeShaperState state;
+ writeToParcelable(&state);
+ return state.writeToParcel(parcel);
}
- status_t readFromParcel(const Parcel *parcel) override {
- return parcel->readFloat(&mVolume)
- ?: parcel->readFloat(&mXOffset);
+ void writeToParcelable(VolumeShaperState* parcelable) const {
+ parcelable->volume = mVolume;
+ parcelable->xOffset = mXOffset;
+ }
+
+ status_t readFromParcel(const Parcel* parcel) override {
+ VolumeShaperState state;
+ return state.readFromParcel(parcel)
+ ?: readFromParcelable(state);
+ }
+
+ status_t readFromParcelable(const VolumeShaperState& parcelable) {
+ mVolume = parcelable.volume;
+ mXOffset = parcelable.xOffset;
+ return OK;
}
std::string toString() const {
diff --git a/media/OWNERS b/media/OWNERS
index 1afc253..3e194f0 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -8,13 +8,14 @@
hunga@google.com
jiabin@google.com
jmtrivi@google.com
-krocard@google.com
lajos@google.com
marcone@google.com
mnaganov@google.com
+nchalko@google.com
pawin@google.com
philburk@google.com
pmclean@google.com
+quxiangfang@google.com
rachad@google.com
rago@google.com
robertshih@google.com
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 206f87f..80e0924 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -1,6 +1,6 @@
// for frameworks/av/media
{
- "presubmit": [
+ "presubmit-large": [
// runs whenever we change something in this tree
{
"name": "CtsMediaTestCases",
@@ -17,7 +17,9 @@
"include-filter": "android.media.cts.DecodeEditEncodeTest"
}
]
- },
+ }
+ ],
+ "presubmit": [
{
"name": "GtsMediaTestCases",
"options" : [
@@ -26,17 +28,9 @@
},
{
"include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
- }
- ]
- },
- {
- "name": "GtsExoPlayerTestCases",
- "options" : [
- {
- "include-annotation": "android.platform.test.annotations.SocPresubmit"
},
{
- "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+ "include-filter": "com.google.android.media.gts.WidevineYouTubePerformanceTests"
}
]
}
@@ -48,7 +42,7 @@
}
],
- "staged-platinum-postsubmit": [
+ "platinum-postsubmit": [
// runs regularly, independent of changes in this tree.
// signals if changes elsewhere break media functionality
{
diff --git a/media/audioserver/audioserver.rc b/media/audioserver/audioserver.rc
index f05c2d2..c4a6601 100644
--- a/media/audioserver/audioserver.rc
+++ b/media/audioserver/audioserver.rc
@@ -6,8 +6,12 @@
capabilities BLOCK_SUSPEND
ioprio rt 4
task_profiles ProcessCapacityHigh HighPerformance
-
- onrestart setprop sys.audio.restart.hal 1
+ onrestart restart vendor.audio-hal
+ onrestart restart vendor.audio-hal-4-0-msd
+ onrestart restart audio_proxy_service
+ # Keep the original service names for backward compatibility
+ onrestart restart vendor.audio-hal-2-0
+ onrestart restart audio-hal-2-0
on property:vts.native_server.on=1
stop audioserver
@@ -17,6 +21,7 @@
on property:init.svc.audioserver=stopped
stop vendor.audio-hal
stop vendor.audio-hal-4-0-msd
+ stop audio_proxy_service
# Keep the original service names for backward compatibility
stop vendor.audio-hal-2-0
stop audio-hal-2-0
@@ -25,6 +30,7 @@
# audioserver bringing it back into running state.
start vendor.audio-hal
start vendor.audio-hal-4-0-msd
+ start audio_proxy_service
# Keep the original service names for backward compatibility
start vendor.audio-hal-2-0
start audio-hal-2-0
@@ -32,16 +38,24 @@
on property:init.svc.audioserver=running
start vendor.audio-hal
start vendor.audio-hal-4-0-msd
+ start audio_proxy_service
# Keep the original service names for backward compatibility
start vendor.audio-hal-2-0
start audio-hal-2-0
on property:sys.audio.restart.hal=1
- restart vendor.audio-hal
- restart vendor.audio-hal-4-0-msd
+ # See b/159966243. Avoid restart loop between audioserver and HAL.
# Keep the original service names for backward compatibility
- restart vendor.audio-hal-2-0
- restart audio-hal-2-0
+ stop vendor.audio-hal
+ stop vendor.audio-hal-4-0-msd
+ stop audio_proxy_service
+ stop vendor.audio-hal-2-0
+ stop audio-hal-2-0
+ start vendor.audio-hal
+ start vendor.audio-hal-4-0-msd
+ start audio_proxy_service
+ start vendor.audio-hal-2-0
+ start audio-hal-2-0
# reset the property
setprop sys.audio.restart.hal 0
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index 533d330..8ee1efb 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -49,7 +49,12 @@
signal(SIGPIPE, SIG_IGN);
+#if 1
+ // FIXME See bug 165702394 and bug 168511485
+ const bool doLog = false;
+#else
bool doLog = (bool) property_get_bool("ro.test_harness", 0);
+#endif
pid_t childPid;
// FIXME The advantage of making the process containing media.log service the parent process of
diff --git a/media/bufferpool/1.0/vts/multi.cpp b/media/bufferpool/1.0/vts/multi.cpp
index 1796819..d8cc285 100644
--- a/media/bufferpool/1.0/vts/multi.cpp
+++ b/media/bufferpool/1.0/vts/multi.cpp
@@ -215,7 +215,7 @@
} // anonymous namespace
int main(int argc, char** argv) {
- setenv("TREBLE_TESTING_OVERRIDE", "true", true);
+ android::hardware::details::setTrebleTestingOverride(true);
::testing::InitGoogleTest(&argc, argv);
int status = RUN_ALL_TESTS();
LOG(INFO) << "Test result = " << status;
diff --git a/media/bufferpool/2.0/AccessorImpl.cpp b/media/bufferpool/2.0/AccessorImpl.cpp
index 6111fea..1d2562e 100644
--- a/media/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/bufferpool/2.0/AccessorImpl.cpp
@@ -39,6 +39,8 @@
static constexpr size_t kMinAllocBytesForEviction = 1024*1024*15;
static constexpr size_t kMinBufferCountForEviction = 25;
+ static constexpr size_t kMaxUnusedBufferCount = 64;
+ static constexpr size_t kUnusedBufferCountTarget = kMaxUnusedBufferCount - 16;
static constexpr nsecs_t kEvictGranularityNs = 1000000000; // 1 sec
static constexpr nsecs_t kEvictDurationNs = 5000000000; // 5 secs
@@ -724,9 +726,11 @@
}
void Accessor::Impl::BufferPool::cleanUp(bool clearCache) {
- if (clearCache || mTimestampUs > mLastCleanUpUs + kCleanUpDurationUs) {
+ if (clearCache || mTimestampUs > mLastCleanUpUs + kCleanUpDurationUs ||
+ mStats.buffersNotInUse() > kMaxUnusedBufferCount) {
mLastCleanUpUs = mTimestampUs;
- if (mTimestampUs > mLastLogUs + kLogDurationUs) {
+ if (mTimestampUs > mLastLogUs + kLogDurationUs ||
+ mStats.buffersNotInUse() > kMaxUnusedBufferCount) {
mLastLogUs = mTimestampUs;
ALOGD("bufferpool2 %p : %zu(%zu size) total buffers - "
"%zu(%zu size) used buffers - %zu/%zu (recycle/alloc) - "
@@ -737,8 +741,9 @@
mStats.mTotalFetches, mStats.mTotalTransfers);
}
for (auto freeIt = mFreeBuffers.begin(); freeIt != mFreeBuffers.end();) {
- if (!clearCache && (mStats.mSizeCached < kMinAllocBytesForEviction
- || mBuffers.size() < kMinBufferCountForEviction)) {
+ if (!clearCache && mStats.buffersNotInUse() <= kUnusedBufferCountTarget &&
+ (mStats.mSizeCached < kMinAllocBytesForEviction ||
+ mBuffers.size() < kMinBufferCountForEviction)) {
break;
}
auto it = mBuffers.find(*freeIt);
diff --git a/media/bufferpool/2.0/AccessorImpl.h b/media/bufferpool/2.0/AccessorImpl.h
index cd1b4d0..3d39941 100644
--- a/media/bufferpool/2.0/AccessorImpl.h
+++ b/media/bufferpool/2.0/AccessorImpl.h
@@ -193,6 +193,12 @@
: mSizeCached(0), mBuffersCached(0), mSizeInUse(0), mBuffersInUse(0),
mTotalAllocations(0), mTotalRecycles(0), mTotalTransfers(0), mTotalFetches(0) {}
+ /// # of currently unused buffers
+ size_t buffersNotInUse() const {
+ ALOG_ASSERT(mBuffersCached >= mBuffersInUse);
+ return mBuffersCached - mBuffersInUse;
+ }
+
/// A new buffer is allocated on an allocation request.
void onBufferAllocated(size_t allocSize) {
mSizeCached += allocSize;
diff --git a/media/bufferpool/2.0/Android.bp b/media/bufferpool/2.0/Android.bp
index 557b7ef..536f75e 100644
--- a/media/bufferpool/2.0/Android.bp
+++ b/media/bufferpool/2.0/Android.bp
@@ -30,6 +30,7 @@
name: "libstagefright_bufferpool@2.0.1",
defaults: ["libstagefright_bufferpool@2.0-default"],
vendor_available: true,
+ min_sdk_version: "29",
// TODO: b/147147992
double_loadable: true,
cflags: [
diff --git a/media/bufferpool/2.0/BufferPoolClient.cpp b/media/bufferpool/2.0/BufferPoolClient.cpp
index 342fef6..9308b81 100644
--- a/media/bufferpool/2.0/BufferPoolClient.cpp
+++ b/media/bufferpool/2.0/BufferPoolClient.cpp
@@ -32,6 +32,8 @@
static constexpr int64_t kReceiveTimeoutUs = 1000000; // 100ms
static constexpr int kPostMaxRetry = 3;
static constexpr int kCacheTtlUs = 1000000; // TODO: tune
+static constexpr size_t kMaxCachedBufferCount = 64;
+static constexpr size_t kCachedBufferCountTarget = kMaxCachedBufferCount - 16;
class BufferPoolClient::Impl
: public std::enable_shared_from_this<BufferPoolClient::Impl> {
@@ -136,6 +138,10 @@
--mActive;
mLastChangeUs = getTimestampNow();
}
+
+ int cachedBufferCount() const {
+ return mBuffers.size() - mActive;
+ }
} mCache;
// FMQ - release notifier
@@ -668,10 +674,12 @@
// should have mCache.mLock
void BufferPoolClient::Impl::evictCaches(bool clearCache) {
int64_t now = getTimestampNow();
- if (now >= mLastEvictCacheUs + kCacheTtlUs || clearCache) {
+ if (now >= mLastEvictCacheUs + kCacheTtlUs ||
+ clearCache || mCache.cachedBufferCount() > kMaxCachedBufferCount) {
size_t evicted = 0;
for (auto it = mCache.mBuffers.begin(); it != mCache.mBuffers.end();) {
- if (!it->second->hasCache() && (it->second->expire() || clearCache)) {
+ if (!it->second->hasCache() && (it->second->expire() ||
+ clearCache || mCache.cachedBufferCount() > kCachedBufferCountTarget)) {
it = mCache.mBuffers.erase(it);
++evicted;
} else {
diff --git a/media/bufferpool/2.0/tests/Android.bp b/media/bufferpool/2.0/tests/Android.bp
index 8492939..56bda89 100644
--- a/media/bufferpool/2.0/tests/Android.bp
+++ b/media/bufferpool/2.0/tests/Android.bp
@@ -25,7 +25,7 @@
static_libs: [
"android.hardware.media.bufferpool@2.0",
"libcutils",
- "libstagefright_bufferpool@2.0",
+ "libstagefright_bufferpool@2.0.1",
],
shared_libs: [
"libfmq",
@@ -44,10 +44,30 @@
static_libs: [
"android.hardware.media.bufferpool@2.0",
"libcutils",
- "libstagefright_bufferpool@2.0",
+ "libstagefright_bufferpool@2.0.1",
],
shared_libs: [
"libfmq",
],
compile_multilib: "both",
}
+
+cc_test {
+ name: "VtsVndkHidlBufferpoolV2_0TargetCondTest",
+ test_suites: ["device-tests"],
+ defaults: ["VtsHalTargetTestDefaults"],
+ srcs: [
+ "allocator.cpp",
+ "cond.cpp",
+ ],
+ static_libs: [
+ "android.hardware.media.bufferpool@2.0",
+ "libcutils",
+ "libstagefright_bufferpool@2.0.1",
+ ],
+ shared_libs: [
+ "libhidlbase",
+ "libfmq",
+ ],
+ compile_multilib: "both",
+}
diff --git a/media/bufferpool/2.0/tests/allocator.cpp b/media/bufferpool/2.0/tests/allocator.cpp
index 843f7ea..25b08ef 100644
--- a/media/bufferpool/2.0/tests/allocator.cpp
+++ b/media/bufferpool/2.0/tests/allocator.cpp
@@ -120,6 +120,24 @@
}
+void IpcMutex::init() {
+ pthread_mutexattr_t mattr;
+ pthread_mutexattr_init(&mattr);
+ pthread_mutexattr_setpshared(&mattr, PTHREAD_PROCESS_SHARED);
+ pthread_mutex_init(&lock, &mattr);
+ pthread_mutexattr_destroy(&mattr);
+
+ pthread_condattr_t cattr;
+ pthread_condattr_init(&cattr);
+ pthread_condattr_setpshared(&cattr, PTHREAD_PROCESS_SHARED);
+ pthread_cond_init(&cond, &cattr);
+ pthread_condattr_destroy(&cattr);
+}
+
+IpcMutex *IpcMutex::Import(void *pMutex) {
+ return reinterpret_cast<IpcMutex *>(pMutex);
+}
+
ResultStatus TestBufferPoolAllocator::allocate(
const std::vector<uint8_t> ¶ms,
@@ -201,9 +219,33 @@
return false;
}
+bool TestBufferPoolAllocator::MapMemoryForMutex(const native_handle_t *handle, void **mem) {
+ if (!HandleAshmem::isValid(handle)) {
+ return false;
+ }
+ const HandleAshmem *o = static_cast<const HandleAshmem*>(handle);
+ *mem = mmap(
+ NULL, o->size(), PROT_READ|PROT_WRITE, MAP_SHARED, o->ashmemFd(), 0);
+ if (*mem == MAP_FAILED || *mem == nullptr) {
+ return false;
+ }
+ return true;
+}
+
+bool TestBufferPoolAllocator::UnmapMemoryForMutex(void *mem) {
+ munmap(mem, sizeof(IpcMutex));
+ return true;
+}
+
void getTestAllocatorParams(std::vector<uint8_t> *params) {
constexpr static int kAllocationSize = 1024 * 10;
Params ashmemParams(kAllocationSize);
params->assign(ashmemParams.array, ashmemParams.array + sizeof(ashmemParams));
}
+
+void getIpcMutexParams(std::vector<uint8_t> *params) {
+ Params ashmemParams(sizeof(IpcMutex));
+
+ params->assign(ashmemParams.array, ashmemParams.array + sizeof(ashmemParams));
+}
diff --git a/media/bufferpool/2.0/tests/allocator.h b/media/bufferpool/2.0/tests/allocator.h
index 5281dc3..862d1a5 100644
--- a/media/bufferpool/2.0/tests/allocator.h
+++ b/media/bufferpool/2.0/tests/allocator.h
@@ -17,6 +17,7 @@
#ifndef VNDK_HIDL_BUFFERPOOL_V2_0_ALLOCATOR_H
#define VNDK_HIDL_BUFFERPOOL_V2_0_ALLOCATOR_H
+#include <pthread.h>
#include <bufferpool/BufferPoolTypes.h>
using android::hardware::media::bufferpool::V2_0::ResultStatus;
@@ -25,6 +26,17 @@
using android::hardware::media::bufferpool::V2_0::implementation::
BufferPoolAllocator;
+struct IpcMutex {
+ pthread_mutex_t lock;
+ pthread_cond_t cond;
+ int counter = 0;
+ bool signalled = false;
+
+ void init();
+
+ static IpcMutex *Import(void *mem);
+};
+
// buffer allocator for the tests
class TestBufferPoolAllocator : public BufferPoolAllocator {
public:
@@ -43,9 +55,14 @@
static bool Verify(const native_handle_t *handle, const unsigned char val);
+ static bool MapMemoryForMutex(const native_handle_t *handle, void **mem);
+
+ static bool UnmapMemoryForMutex(void *mem);
};
// retrieve buffer allocator paramters
void getTestAllocatorParams(std::vector<uint8_t> *params);
+void getIpcMutexParams(std::vector<uint8_t> *params);
+
#endif // VNDK_HIDL_BUFFERPOOL_V2_0_ALLOCATOR_H
diff --git a/media/bufferpool/2.0/tests/cond.cpp b/media/bufferpool/2.0/tests/cond.cpp
new file mode 100644
index 0000000..21beea8
--- /dev/null
+++ b/media/bufferpool/2.0/tests/cond.cpp
@@ -0,0 +1,269 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "buffferpool_unit_test"
+
+#include <gtest/gtest.h>
+
+#include <android-base/logging.h>
+#include <binder/ProcessState.h>
+#include <bufferpool/ClientManager.h>
+#include <errno.h>
+#include <hidl/HidlSupport.h>
+#include <hidl/HidlTransportSupport.h>
+#include <hidl/LegacySupport.h>
+#include <hidl/Status.h>
+#include <signal.h>
+#include <sys/types.h>
+#include <sys/wait.h>
+#include <unistd.h>
+#include <iostream>
+#include <memory>
+#include <vector>
+#include "allocator.h"
+
+using android::hardware::configureRpcThreadpool;
+using android::hardware::hidl_handle;
+using android::hardware::media::bufferpool::V2_0::IClientManager;
+using android::hardware::media::bufferpool::V2_0::ResultStatus;
+using android::hardware::media::bufferpool::V2_0::implementation::BufferId;
+using android::hardware::media::bufferpool::V2_0::implementation::ClientManager;
+using android::hardware::media::bufferpool::V2_0::implementation::ConnectionId;
+using android::hardware::media::bufferpool::V2_0::implementation::TransactionId;
+using android::hardware::media::bufferpool::BufferPoolData;
+
+namespace {
+
+// communication message types between processes.
+enum PipeCommand : int32_t {
+ INIT_OK = 0,
+ INIT_ERROR,
+ SEND,
+ RECEIVE_OK,
+ RECEIVE_ERROR,
+};
+
+// communication message between processes.
+union PipeMessage {
+ struct {
+ int32_t command;
+ BufferId bufferId;
+ ConnectionId connectionId;
+ TransactionId transactionId;
+ int64_t timestampUs;
+ } data;
+ char array[0];
+};
+
+constexpr int kSignalInt = 200;
+
+// media.bufferpool test setup
+class BufferpoolMultiTest : public ::testing::Test {
+ public:
+ virtual void SetUp() override {
+ ResultStatus status;
+ mReceiverPid = -1;
+ mConnectionValid = false;
+
+ ASSERT_TRUE(pipe(mCommandPipeFds) == 0);
+ ASSERT_TRUE(pipe(mResultPipeFds) == 0);
+
+ mReceiverPid = fork();
+ ASSERT_TRUE(mReceiverPid >= 0);
+
+ if (mReceiverPid == 0) {
+ doReceiver();
+ // In order to ignore gtest behaviour, wait for being killed from
+ // tearDown
+ pause();
+ }
+
+ mManager = ClientManager::getInstance();
+ ASSERT_NE(mManager, nullptr);
+
+ mAllocator = std::make_shared<TestBufferPoolAllocator>();
+ ASSERT_TRUE((bool)mAllocator);
+
+ status = mManager->create(mAllocator, &mConnectionId);
+ ASSERT_TRUE(status == ResultStatus::OK);
+ mConnectionValid = true;
+ }
+
+ virtual void TearDown() override {
+ if (mReceiverPid > 0) {
+ kill(mReceiverPid, SIGKILL);
+ int wstatus;
+ wait(&wstatus);
+ }
+
+ if (mConnectionValid) {
+ mManager->close(mConnectionId);
+ }
+ }
+
+ protected:
+ static void description(const std::string& description) {
+ RecordProperty("description", description);
+ }
+
+ android::sp<ClientManager> mManager;
+ std::shared_ptr<BufferPoolAllocator> mAllocator;
+ bool mConnectionValid;
+ ConnectionId mConnectionId;
+ pid_t mReceiverPid;
+ int mCommandPipeFds[2];
+ int mResultPipeFds[2];
+
+ bool sendMessage(int *pipes, const PipeMessage &message) {
+ int ret = write(pipes[1], message.array, sizeof(PipeMessage));
+ return ret == sizeof(PipeMessage);
+ }
+
+ bool receiveMessage(int *pipes, PipeMessage *message) {
+ int ret = read(pipes[0], message->array, sizeof(PipeMessage));
+ return ret == sizeof(PipeMessage);
+ }
+
+ void doReceiver() {
+ configureRpcThreadpool(1, false);
+ PipeMessage message;
+ mManager = ClientManager::getInstance();
+ if (!mManager) {
+ message.data.command = PipeCommand::INIT_ERROR;
+ sendMessage(mResultPipeFds, message);
+ return;
+ }
+ android::status_t status = mManager->registerAsService();
+ if (status != android::OK) {
+ message.data.command = PipeCommand::INIT_ERROR;
+ sendMessage(mResultPipeFds, message);
+ return;
+ }
+ message.data.command = PipeCommand::INIT_OK;
+ sendMessage(mResultPipeFds, message);
+
+ int val = 0;
+ receiveMessage(mCommandPipeFds, &message);
+ {
+ native_handle_t *rhandle = nullptr;
+ std::shared_ptr<BufferPoolData> rbuffer;
+ void *mem = nullptr;
+ IpcMutex *mutex = nullptr;
+ ResultStatus status = mManager->receive(
+ message.data.connectionId, message.data.transactionId,
+ message.data.bufferId, message.data.timestampUs, &rhandle, &rbuffer);
+ mManager->close(message.data.connectionId);
+ if (status != ResultStatus::OK) {
+ message.data.command = PipeCommand::RECEIVE_ERROR;
+ sendMessage(mResultPipeFds, message);
+ return;
+ }
+ if (!TestBufferPoolAllocator::MapMemoryForMutex(rhandle, &mem)) {
+ message.data.command = PipeCommand::RECEIVE_ERROR;
+ sendMessage(mResultPipeFds, message);
+ return;
+ }
+ mutex = IpcMutex::Import(mem);
+ pthread_mutex_lock(&(mutex->lock));
+ while (mutex->signalled != true) {
+ pthread_cond_wait(&(mutex->cond), &(mutex->lock));
+ }
+ val = mutex->counter;
+ pthread_mutex_unlock(&(mutex->lock));
+
+ (void)TestBufferPoolAllocator::UnmapMemoryForMutex(mem);
+ if (rhandle) {
+ native_handle_close(rhandle);
+ native_handle_delete(rhandle);
+ }
+ }
+ if (val == kSignalInt) {
+ message.data.command = PipeCommand::RECEIVE_OK;
+ } else {
+ message.data.command = PipeCommand::RECEIVE_ERROR;
+ }
+ sendMessage(mResultPipeFds, message);
+ }
+};
+
+// Buffer transfer test between processes.
+TEST_F(BufferpoolMultiTest, TransferBuffer) {
+ ResultStatus status;
+ PipeMessage message;
+
+ ASSERT_TRUE(receiveMessage(mResultPipeFds, &message));
+
+ android::sp<IClientManager> receiver = IClientManager::getService();
+ ConnectionId receiverId;
+ ASSERT_TRUE((bool)receiver);
+
+ status = mManager->registerSender(receiver, mConnectionId, &receiverId);
+ ASSERT_TRUE(status == ResultStatus::OK);
+ {
+ native_handle_t *shandle = nullptr;
+ std::shared_ptr<BufferPoolData> sbuffer;
+ TransactionId transactionId;
+ int64_t postUs;
+ std::vector<uint8_t> vecParams;
+ void *mem = nullptr;
+ IpcMutex *mutex = nullptr;
+
+ getIpcMutexParams(&vecParams);
+ status = mManager->allocate(mConnectionId, vecParams, &shandle, &sbuffer);
+ ASSERT_TRUE(status == ResultStatus::OK);
+
+ ASSERT_TRUE(TestBufferPoolAllocator::MapMemoryForMutex(shandle, &mem));
+
+ mutex = new(mem) IpcMutex();
+ mutex->init();
+
+ status = mManager->postSend(receiverId, sbuffer, &transactionId, &postUs);
+ ASSERT_TRUE(status == ResultStatus::OK);
+
+ message.data.command = PipeCommand::SEND;
+ message.data.bufferId = sbuffer->mId;
+ message.data.connectionId = receiverId;
+ message.data.transactionId = transactionId;
+ message.data.timestampUs = postUs;
+ sendMessage(mCommandPipeFds, message);
+ for (int i=0; i < 200000000; ++i) {
+ // no-op in order to ensure
+ // pthread_cond_wait is called before pthread_cond_signal
+ }
+ pthread_mutex_lock(&(mutex->lock));
+ mutex->counter = kSignalInt;
+ mutex->signalled = true;
+ pthread_cond_signal(&(mutex->cond));
+ pthread_mutex_unlock(&(mutex->lock));
+ (void)TestBufferPoolAllocator::UnmapMemoryForMutex(mem);
+ if (shandle) {
+ native_handle_close(shandle);
+ native_handle_delete(shandle);
+ }
+ }
+ EXPECT_TRUE(receiveMessage(mResultPipeFds, &message));
+ EXPECT_TRUE(message.data.command == PipeCommand::RECEIVE_OK);
+}
+
+} // anonymous namespace
+
+int main(int argc, char** argv) {
+ android::hardware::details::setTrebleTestingOverride(true);
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = RUN_ALL_TESTS();
+ LOG(INFO) << "Test result = " << status;
+ return status;
+}
diff --git a/media/bufferpool/2.0/tests/multi.cpp b/media/bufferpool/2.0/tests/multi.cpp
index 68b6992..43b0a8c 100644
--- a/media/bufferpool/2.0/tests/multi.cpp
+++ b/media/bufferpool/2.0/tests/multi.cpp
@@ -161,11 +161,18 @@
message.data.bufferId, message.data.timestampUs, &rhandle, &rbuffer);
mManager->close(message.data.connectionId);
if (status != ResultStatus::OK) {
- if (!TestBufferPoolAllocator::Verify(rhandle, 0x77)) {
- message.data.command = PipeCommand::RECEIVE_ERROR;
- sendMessage(mResultPipeFds, message);
- return;
- }
+ message.data.command = PipeCommand::RECEIVE_ERROR;
+ sendMessage(mResultPipeFds, message);
+ return;
+ }
+ if (!TestBufferPoolAllocator::Verify(rhandle, 0x77)) {
+ message.data.command = PipeCommand::RECEIVE_ERROR;
+ sendMessage(mResultPipeFds, message);
+ return;
+ }
+ if (rhandle) {
+ native_handle_close(rhandle);
+ native_handle_delete(rhandle);
}
}
message.data.command = PipeCommand::RECEIVE_OK;
@@ -198,6 +205,10 @@
ASSERT_TRUE(status == ResultStatus::OK);
ASSERT_TRUE(TestBufferPoolAllocator::Fill(shandle, 0x77));
+ if (shandle) {
+ native_handle_close(shandle);
+ native_handle_delete(shandle);
+ }
status = mManager->postSend(receiverId, sbuffer, &transactionId, &postUs);
ASSERT_TRUE(status == ResultStatus::OK);
@@ -210,12 +221,13 @@
sendMessage(mCommandPipeFds, message);
}
EXPECT_TRUE(receiveMessage(mResultPipeFds, &message));
+ EXPECT_TRUE(message.data.command == PipeCommand::RECEIVE_OK);
}
} // anonymous namespace
int main(int argc, char** argv) {
- setenv("TREBLE_TESTING_OVERRIDE", "true", true);
+ android::hardware::details::setTrebleTestingOverride(true);
::testing::InitGoogleTest(&argc, argv);
int status = RUN_ALL_TESTS();
LOG(INFO) << "Test result = " << status;
diff --git a/media/bufferpool/2.0/tests/single.cpp b/media/bufferpool/2.0/tests/single.cpp
index 777edcf..1e9027b 100644
--- a/media/bufferpool/2.0/tests/single.cpp
+++ b/media/bufferpool/2.0/tests/single.cpp
@@ -102,6 +102,10 @@
for (int i = 0; i < kNumAllocationTest; ++i) {
status = mManager->allocate(mConnectionId, vecParams, &allocHandle, &buffer[i]);
ASSERT_TRUE(status == ResultStatus::OK);
+ if (allocHandle) {
+ native_handle_close(allocHandle);
+ native_handle_delete(allocHandle);
+ }
}
for (int i = 0; i < kNumAllocationTest; ++i) {
for (int j = i + 1; j < kNumAllocationTest; ++j) {
@@ -125,6 +129,10 @@
status = mManager->allocate(mConnectionId, vecParams, &allocHandle, &buffer);
ASSERT_TRUE(status == ResultStatus::OK);
bid[i] = buffer->mId;
+ if (allocHandle) {
+ native_handle_close(allocHandle);
+ native_handle_delete(allocHandle);
+ }
}
for (int i = 1; i < kNumRecycleTest; ++i) {
ASSERT_TRUE(bid[i - 1] == bid[i]);
@@ -154,6 +162,15 @@
&recvHandle, &rbuffer);
EXPECT_TRUE(status == ResultStatus::OK);
ASSERT_TRUE(TestBufferPoolAllocator::Verify(recvHandle, 0x77));
+
+ if (allocHandle) {
+ native_handle_close(allocHandle);
+ native_handle_delete(allocHandle);
+ }
+ if (recvHandle) {
+ native_handle_close(recvHandle);
+ native_handle_delete(recvHandle);
+ }
}
} // anonymous namespace
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index fca3477..6ac4210 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -4,7 +4,9 @@
// { "name": "codec2_core_param_test"},
// TODO(b/155516524)
// { "name": "codec2_vndk_interface_test"},
- { "name": "codec2_vndk_test"},
+ { "name": "codec2_vndk_test"}
+ ],
+ "presubmit-large": [
{
"name": "CtsMediaTestCases",
"options": [
diff --git a/media/codec2/components/aac/Android.bp b/media/codec2/components/aac/Android.bp
index 9eca585..50495a9 100644
--- a/media/codec2/components/aac/Android.bp
+++ b/media/codec2/components/aac/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_aacdec",
defaults: [
"libcodec2_soft-defaults",
@@ -15,7 +15,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_aacenc",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index f39620e..677f316 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -89,11 +89,18 @@
addParameter(
DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 1))
- .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+ .withFields({C2F(mChannelCount, value).inRange(1, MAX_CHANNEL_COUNT)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
+ DefineParam(mMaxChannelCount, C2_PARAMKEY_MAX_CHANNEL_COUNT)
+ .withDefault(new C2StreamMaxChannelCountInfo::input(0u, MAX_CHANNEL_COUNT))
+ .withFields({C2F(mMaxChannelCount, value).inRange(1, MAX_CHANNEL_COUNT)})
+ .withSetter(Setter<decltype(*mMaxChannelCount)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
.withDefault(new C2StreamBitrateInfo::input(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(8000, 960000)})
@@ -225,6 +232,7 @@
int32_t getDrcAttenuationFactor() const { return mDrcAttenuationFactor->value * 127. + 0.5; }
int32_t getDrcEffectType() const { return mDrcEffectType->value; }
int32_t getDrcAlbumMode() const { return mDrcAlbumMode->value; }
+ u_int32_t getMaxChannelCount() const { return mMaxChannelCount->value; }
int32_t getDrcOutputLoudness() const { return (mDrcOutputLoudness->value <= 0 ? -mDrcOutputLoudness->value * 4. + 0.5 : -1); }
private:
@@ -241,6 +249,7 @@
std::shared_ptr<C2StreamDrcAttenuationFactorTuning::input> mDrcAttenuationFactor;
std::shared_ptr<C2StreamDrcEffectTypeTuning::input> mDrcEffectType;
std::shared_ptr<C2StreamDrcAlbumModeTuning::input> mDrcAlbumMode;
+ std::shared_ptr<C2StreamMaxChannelCountInfo::input> mMaxChannelCount;
std::shared_ptr<C2StreamDrcOutputLoudnessTuning::output> mDrcOutputLoudness;
// TODO Add : C2StreamAacSbrModeTuning
};
@@ -366,9 +375,10 @@
ALOGV("AAC decoder using MPEG-D DRC album mode %d", albumMode);
aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_ALBUM_MODE, albumMode);
- // By default, the decoder creates a 5.1 channel downmix signal.
- // For seven and eight channel input streams, enable 6.1 and 7.1 channel output
- aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1);
+ // AAC_PCM_MAX_OUTPUT_CHANNELS
+ u_int32_t maxChannelCount = mIntf->getMaxChannelCount();
+ ALOGV("AAC decoder using maximum output channel count %d", maxChannelCount);
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, maxChannelCount);
return status;
}
@@ -707,6 +717,11 @@
ALOGV("AAC decoder using MPEG-D DRC album mode %d", albumMode);
aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_ALBUM_MODE, albumMode);
+ // AAC_PCM_MAX_OUTPUT_CHANNELS
+ int32_t maxChannelCount = mIntf->getMaxChannelCount();
+ ALOGV("AAC decoder using maximum output channel count %d", maxChannelCount);
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, maxChannelCount);
+
mDrcWrap.update();
UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
@@ -776,7 +791,6 @@
// After an error, replace bufferSize with the sum of the
// decodedSizes to resynchronize the in/out lists.
- inInfo.decodedSizes.pop_back();
inInfo.bufferSize = std::accumulate(
inInfo.decodedSizes.begin(), inInfo.decodedSizes.end(), 0);
@@ -847,6 +861,51 @@
ALOGE("Getting output loudness failed");
}
}
+
+ // update config with values used for decoding:
+ // Album mode, target reference level, DRC effect type, DRC attenuation and boost
+ // factor, DRC compression mode, encoder target level and max channel count
+ // with input values as they were not modified by decoder
+
+ C2StreamDrcAttenuationFactorTuning::input currentAttenuationFactor(0u,
+ (C2FloatValue) (attenuationFactor/127.));
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(currentAttenuationFactor));
+
+ C2StreamDrcBoostFactorTuning::input currentBoostFactor(0u,
+ (C2FloatValue) (boostFactor/127.));
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(currentBoostFactor));
+
+ C2StreamDrcCompressionModeTuning::input currentCompressMode(0u,
+ (C2Config::drc_compression_mode_t) compressMode);
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(currentCompressMode));
+
+ C2StreamDrcEncodedTargetLevelTuning::input currentEncodedTargetLevel(0u,
+ (C2FloatValue) (encTargetLevel*-0.25));
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(currentEncodedTargetLevel));
+
+ C2StreamDrcAlbumModeTuning::input currentAlbumMode(0u,
+ (C2Config::drc_album_mode_t) albumMode);
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(currentAlbumMode));
+
+ C2StreamDrcTargetReferenceLevelTuning::input currentTargetRefLevel(0u,
+ (float) (targetRefLevel*-0.25));
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(currentTargetRefLevel));
+
+ C2StreamDrcEffectTypeTuning::input currentEffectype(0u,
+ (C2Config::drc_effect_type_t) effectType);
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(currentEffectype));
+
+ C2StreamMaxChannelCountInfo::input currentMaxChannelCnt(0u, maxChannelCount);
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(currentMaxChannelCnt));
+
} while (decoderErr == AAC_DEC_OK);
}
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index 4db94f5..2e85915 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -293,6 +293,30 @@
return OK;
}
+static void MaybeLogTimestampWarning(
+ long long lastFrameEndTimestampUs, long long inputTimestampUs) {
+ using Clock = std::chrono::steady_clock;
+ thread_local Clock::time_point sLastLogTimestamp{};
+ thread_local int32_t sOverlapCount = -1;
+ if (Clock::now() - sLastLogTimestamp > std::chrono::minutes(1) || sOverlapCount < 0) {
+ AString countMessage = "";
+ if (sOverlapCount > 0) {
+ countMessage = AStringPrintf(
+ "(%d overlapping timestamp detected since last log)", sOverlapCount);
+ }
+ ALOGI("Correcting overlapping timestamp: last frame ended at %lldus but "
+ "current frame is starting at %lldus. Using the last frame's end timestamp %s",
+ lastFrameEndTimestampUs, inputTimestampUs, countMessage.c_str());
+ sLastLogTimestamp = Clock::now();
+ sOverlapCount = 0;
+ } else {
+ ALOGV("Correcting overlapping timestamp: last frame ended at %lldus but "
+ "current frame is starting at %lldus. Using the last frame's end timestamp",
+ lastFrameEndTimestampUs, inputTimestampUs);
+ ++sOverlapCount;
+ }
+}
+
void C2SoftAacEnc::process(
const std::unique_ptr<C2Work> &work,
const std::shared_ptr<C2BlockPool> &pool) {
@@ -366,9 +390,7 @@
}
c2_cntr64_t inputTimestampUs = work->input.ordinal.timestamp;
if (inputTimestampUs < mLastFrameEndTimestampUs.value_or(inputTimestampUs)) {
- ALOGW("Correcting overlapping timestamp: last frame ended at %lldus but "
- "current frame is starting at %lldus. Using the last frame's end timestamp",
- mLastFrameEndTimestampUs->peekll(), inputTimestampUs.peekll());
+ MaybeLogTimestampWarning(mLastFrameEndTimestampUs->peekll(), inputTimestampUs.peekll());
inputTimestampUs = *mLastFrameEndTimestampUs;
}
if (capacity > 0) {
diff --git a/media/codec2/components/aac/DrcPresModeWrap.cpp b/media/codec2/components/aac/DrcPresModeWrap.cpp
index bee969b..7ce5c9d 100644
--- a/media/codec2/components/aac/DrcPresModeWrap.cpp
+++ b/media/codec2/components/aac/DrcPresModeWrap.cpp
@@ -161,7 +161,7 @@
int newHeavy = mDesHeavy;
if (mDataUpdate) {
- // sanity check
+ // Validation check
if ((mDesTarget < MAX_TARGET_LEVEL) && (mDesTarget != -1)){
mDesTarget = MAX_TARGET_LEVEL; // limit target level to -10 dB or below
newTarget = MAX_TARGET_LEVEL;
@@ -217,7 +217,7 @@
}
else { // handle other used encoder target levels
- // Sanity check: DRC presentation mode is only specified for max. 5.1 channels
+ // Validation check: DRC presentation mode is only specified for max. 5.1 channels
if (mStreamNrAACChan > 6) {
drcPresMode = 0;
}
@@ -308,7 +308,7 @@
} // switch()
} // if (mEncoderTarget == GPM_ENCODER_TARGET_LEVEL)
- // sanity again
+ // Validation check again
if (newHeavy == 1) {
newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
newAttFactor = 127;
diff --git a/media/codec2/components/amr_nb_wb/Android.bp b/media/codec2/components/amr_nb_wb/Android.bp
index ce25bc9..b09a505 100644
--- a/media/codec2/components/amr_nb_wb/Android.bp
+++ b/media/codec2/components/amr_nb_wb/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_amrnbdec",
defaults: [
"libcodec2_soft-defaults",
@@ -21,7 +21,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_amrwbdec",
defaults: [
"libcodec2_soft-defaults",
@@ -40,7 +40,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_amrnbenc",
defaults: [
"libcodec2_soft-defaults",
@@ -58,7 +58,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_amrwbenc",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/aom/Android.bp b/media/codec2/components/aom/Android.bp
index 61dbd4c..fcc4552 100644
--- a/media/codec2/components/aom/Android.bp
+++ b/media/codec2/components/aom/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_av1dec_aom",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index c7046cb..9ba3b697 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -506,30 +506,28 @@
}
static void copyOutputBufferToYuvPlanarFrame(
- uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+ uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+ const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
size_t dstYStride, size_t dstUVStride,
uint32_t width, uint32_t height) {
- uint8_t* dstStart = dst;
for (size_t i = 0; i < height; ++i) {
- memcpy(dst, srcY, width);
+ memcpy(dstY, srcY, width);
srcY += srcYStride;
- dst += dstYStride;
+ dstY += dstYStride;
}
- dst = dstStart + dstYStride * height;
for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dst, srcV, width / 2);
+ memcpy(dstV, srcV, width / 2);
srcV += srcVStride;
- dst += dstUVStride;
+ dstV += dstUVStride;
}
- dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dst, srcU, width / 2);
+ memcpy(dstU, srcU, width / 2);
srcU += srcUStride;
- dst += dstUVStride;
+ dstU += dstUVStride;
}
}
@@ -596,16 +594,12 @@
return;
}
-static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
+static void convertYUV420Planar16ToYUV420Planar(
+ uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
-
- uint8_t *dstY = (uint8_t *)dst;
- size_t dstYSize = dstYStride * height;
- size_t dstUVSize = dstUVStride * height / 2;
- uint8_t *dstV = dstY + dstYSize;
- uint8_t *dstU = dstV + dstUVSize;
+ size_t dstYStride, size_t dstUVStride,
+ size_t width, size_t height) {
for (size_t y = 0; y < height; ++y) {
for (size_t x = 0; x < width; ++x) {
@@ -696,7 +690,9 @@
block->width(), block->height(), mWidth, mHeight,
(int)*(int64_t*)img->user_priv);
- uint8_t* dst = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+ uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
size_t srcYStride = img->stride[AOM_PLANE_Y];
size_t srcUStride = img->stride[AOM_PLANE_U];
size_t srcVStride = img->stride[AOM_PLANE_V];
@@ -710,13 +706,14 @@
const uint16_t *srcV = (const uint16_t *)img->planes[AOM_PLANE_V];
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
- convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+ convertYUV420Planar16ToY410((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2,
dstYStride / sizeof(uint32_t),
mWidth, mHeight);
} else {
- convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
+ convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
+ srcY, srcU, srcV,
+ srcYStride / 2, srcUStride / 2, srcVStride / 2,
dstYStride, dstUVStride,
mWidth, mHeight);
}
@@ -725,7 +722,7 @@
const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
copyOutputBufferToYuvPlanarFrame(
- dst, srcY, srcU, srcV,
+ dstY, dstU, dstV, srcY, srcU, srcV,
srcYStride, srcUStride, srcVStride,
dstYStride, dstUVStride,
mWidth, mHeight);
diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp
index 4021444..6b0e363 100644
--- a/media/codec2/components/avc/Android.bp
+++ b/media/codec2/components/avc/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_avcdec",
defaults: [
"libcodec2_soft-defaults",
@@ -15,7 +15,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_avcenc",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index d7b9e12..3afd670 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -34,7 +34,11 @@
constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
constexpr char COMPONENT_NAME[] = "c2.android.avc.decoder";
constexpr uint32_t kDefaultOutputDelay = 8;
-constexpr uint32_t kMaxOutputDelay = 16;
+/* avc specification allows for a maximum delay of 16 frames.
+ As soft avc decoder supports interlaced, this delay would be 32 fields.
+ And avc decoder implementation has an additional delay of 2 decode calls.
+ So total maximum output delay is 34 */
+constexpr uint32_t kMaxOutputDelay = 34;
constexpr uint32_t kMinInputBytes = 4;
} // namespace
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index f10835f..3712564 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -1,6 +1,6 @@
// DO NOT DEPEND ON THIS DIRECTLY
// use libcodec2_soft-defaults instead
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_common",
defaults: ["libcodec2-impl-defaults"],
vendor_available: true,
@@ -96,7 +96,7 @@
}
// TEMP: used by cheets2 project - remove when no longer used
-cc_library_shared {
+cc_library {
name: "libcodec2_simple_component",
vendor_available: true,
diff --git a/media/codec2/components/base/SimpleC2Interface.cpp b/media/codec2/components/base/SimpleC2Interface.cpp
index 5c019f3..29740d1 100644
--- a/media/codec2/components/base/SimpleC2Interface.cpp
+++ b/media/codec2/components/base/SimpleC2Interface.cpp
@@ -39,6 +39,16 @@
setDerivedInstance(this);
addParameter(
+ DefineParam(mApiFeatures, C2_PARAMKEY_API_FEATURES)
+ .withConstValue(new C2ApiFeaturesSetting(C2Config::api_feature_t(
+ API_REFLECTION |
+ API_VALUES |
+ API_CURRENT_VALUES |
+ API_DEPENDENCY |
+ API_SAME_INPUT_BUFFER)))
+ .build());
+
+ addParameter(
DefineParam(mName, C2_PARAMKEY_COMPONENT_NAME)
.withConstValue(AllocSharedString<C2ComponentNameSetting>(name.c_str()))
.build());
@@ -305,7 +315,6 @@
Clients need to handle the following base params due to custom dependency.
std::shared_ptr<C2ApiLevelSetting> mApiLevel;
- std::shared_ptr<C2ApiFeaturesSetting> mApiFeatures;
std::shared_ptr<C2ComponentAttributesSetting> mAttrib;
std::shared_ptr<C2PortSuggestedBufferCountTuning::input> mSuggestedInputBufferCount;
diff --git a/media/codec2/components/cmds/codec2.cpp b/media/codec2/components/cmds/codec2.cpp
index d6025de..a17b04e 100644
--- a/media/codec2/components/cmds/codec2.cpp
+++ b/media/codec2/components/cmds/codec2.cpp
@@ -138,7 +138,7 @@
SimplePlayer::SimplePlayer()
: mListener(new Listener(this)),
- mProducerListener(new DummyProducerListener),
+ mProducerListener(new StubProducerListener),
mLinearPoolId(C2BlockPool::PLATFORM_START),
mComposerClient(new SurfaceComposerClient) {
CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
diff --git a/media/codec2/components/flac/Android.bp b/media/codec2/components/flac/Android.bp
index 48cc51b..603c412 100644
--- a/media/codec2/components/flac/Android.bp
+++ b/media/codec2/components/flac/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_flacdec",
defaults: [
"libcodec2_soft-defaults",
@@ -14,7 +14,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_flacenc",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 408db7e..72910c5 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -262,9 +262,10 @@
work->result = C2_NO_MEMORY;
return;
}
- C2WriteView wView = mOutputBlock->map().get();
- if (wView.error()) {
- ALOGE("write view map failed %d", wView.error());
+
+ err = mOutputBlock->map().get().error();
+ if (err) {
+ ALOGE("write view map failed %d", err);
work->result = C2_CORRUPTED;
return;
}
diff --git a/media/codec2/components/g711/Android.bp b/media/codec2/components/g711/Android.bp
index 0101b1a..c39df7b 100644
--- a/media/codec2/components/g711/Android.bp
+++ b/media/codec2/components/g711/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_g711alawdec",
defaults: [
"libcodec2_soft-defaults",
@@ -14,7 +14,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_g711mlawdec",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/gav1/Android.bp b/media/codec2/components/gav1/Android.bp
index 5c4abb7..32aa98d 100644
--- a/media/codec2/components/gav1/Android.bp
+++ b/media/codec2/components/gav1/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_av1dec_gav1",
defaults: [
"libcodec2_soft-defaults",
@@ -13,8 +13,4 @@
srcs: ["C2SoftGav1Dec.cpp"],
static_libs: ["libgav1"],
-
- include_dirs: [
- "external/libgav1/libgav1/",
- ],
}
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index ec5f549..a1929e7 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -288,9 +288,7 @@
void C2SoftGav1Dec::onRelease() { destroyDecoder(); }
c2_status_t C2SoftGav1Dec::onFlush_sm() {
- Libgav1StatusCode status =
- mCodecCtx->EnqueueFrame(/*data=*/nullptr, /*size=*/0,
- /*user_private_data=*/0);
+ Libgav1StatusCode status = mCodecCtx->SignalEOS();
if (status != kLibgav1StatusOk) {
ALOGE("Failed to flush av1 decoder. status: %d.", status);
return C2_CORRUPTED;
@@ -299,7 +297,7 @@
// Dequeue frame (if any) that was enqueued previously.
const libgav1::DecoderBuffer *buffer;
status = mCodecCtx->DequeueFrame(&buffer);
- if (status != kLibgav1StatusOk) {
+ if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
ALOGE("Failed to dequeue frame after flushing the av1 decoder. status: %d",
status);
return C2_CORRUPTED;
@@ -433,7 +431,8 @@
TIME_DIFF(mTimeEnd, mTimeStart, delay);
const Libgav1StatusCode status =
- mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex);
+ mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex,
+ /*buffer_private_data=*/nullptr);
GETTIME(&mTimeEnd, nullptr);
TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
@@ -447,17 +446,6 @@
return;
}
- } else {
- const Libgav1StatusCode status =
- mCodecCtx->EnqueueFrame(/*data=*/nullptr, /*size=*/0,
- /*user_private_data=*/0);
- if (status != kLibgav1StatusOk) {
- ALOGE("Failed to flush av1 decoder. status: %d.", status);
- work->result = C2_CORRUPTED;
- work->workletsProcessed = 1u;
- mSignalledError = true;
- return;
- }
}
(void)outputBuffer(pool, work);
@@ -470,33 +458,28 @@
}
}
-static void copyOutputBufferToYV12Frame(uint8_t *dst, const uint8_t *srcY,
- const uint8_t *srcU,
- const uint8_t *srcV, size_t srcYStride,
- size_t srcUStride, size_t srcVStride,
+static void copyOutputBufferToYV12Frame(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+ const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstYStride, size_t dstUVStride,
uint32_t width, uint32_t height) {
- const size_t dstYStride = align(width, 16);
- const size_t dstUVStride = align(dstYStride / 2, 16);
- uint8_t *const dstStart = dst;
for (size_t i = 0; i < height; ++i) {
- memcpy(dst, srcY, width);
+ memcpy(dstY, srcY, width);
srcY += srcYStride;
- dst += dstYStride;
+ dstY += dstYStride;
}
- dst = dstStart + dstYStride * height;
for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dst, srcV, width / 2);
+ memcpy(dstV, srcV, width / 2);
srcV += srcVStride;
- dst += dstUVStride;
+ dstV += dstUVStride;
}
- dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dst, srcU, width / 2);
+ memcpy(dstU, srcU, width / 2);
srcU += srcUStride;
- dst += dstUVStride;
+ dstU += dstUVStride;
}
}
@@ -568,15 +551,11 @@
}
static void convertYUV420Planar16ToYUV420Planar(
- uint8_t *dst, const uint16_t *srcY, const uint16_t *srcU,
- const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
- size_t srcVStride, size_t dstStride, size_t width, size_t height) {
- uint8_t *dstY = (uint8_t *)dst;
- size_t dstYSize = dstStride * height;
- size_t dstUVStride = align(dstStride / 2, 16);
- size_t dstUVSize = dstUVStride * height / 2;
- uint8_t *dstV = dstY + dstYSize;
- uint8_t *dstU = dstV + dstUVSize;
+ uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+ const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstYStride, size_t dstUVStride,
+ size_t width, size_t height) {
for (size_t y = 0; y < height; ++y) {
for (size_t x = 0; x < width; ++x) {
@@ -584,7 +563,7 @@
}
srcY += srcYStride;
- dstY += dstStride;
+ dstY += dstYStride;
}
for (size_t y = 0; y < (height + 1) / 2; ++y) {
@@ -607,13 +586,14 @@
const libgav1::DecoderBuffer *buffer;
const Libgav1StatusCode status = mCodecCtx->DequeueFrame(&buffer);
- if (status != kLibgav1StatusOk) {
+ if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
ALOGE("av1 decoder DequeueFrame failed. status: %d.", status);
return false;
}
- // |buffer| can be NULL if status was equal to kLibgav1StatusOk. This is not
- // an error. This could mean one of two things:
+ // |buffer| can be NULL if status was equal to kLibgav1StatusOk or
+ // kLibgav1StatusNothingToDequeue. This is not an error. This could mean one
+ // of two things:
// - The EnqueueFrame() call was either a flush (called with nullptr).
// - The enqueued frame did not have any displayable frames.
if (!buffer) {
@@ -679,11 +659,17 @@
ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
block->height(), mWidth, mHeight, (int)buffer->user_private_data);
- uint8_t *dst = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
+ uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
size_t srcYStride = buffer->stride[0];
size_t srcUStride = buffer->stride[1];
size_t srcVStride = buffer->stride[2];
+ C2PlanarLayout layout = wView.layout();
+ size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+
if (buffer->bitdepth == 10) {
const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
@@ -691,19 +677,24 @@
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
convertYUV420Planar16ToY410(
- (uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
- srcVStride / 2, align(mWidth, 16), mWidth, mHeight);
+ (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+ srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
} else {
- convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
- align(mWidth, 16), mWidth, mHeight);
+ convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
+ srcY, srcU, srcV,
+ srcYStride / 2, srcUStride / 2, srcVStride / 2,
+ dstYStride, dstUVStride,
+ mWidth, mHeight);
}
} else {
const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
- copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV, srcYStride, srcUStride,
- srcVStride, mWidth, mHeight);
+ copyOutputBufferToYV12Frame(dstY, dstU, dstV,
+ srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride,
+ dstYStride, dstUVStride,
+ mWidth, mHeight);
}
finishWork(buffer->user_private_data, work, std::move(block));
block = nullptr;
@@ -722,9 +713,7 @@
return C2_OMITTED;
}
- Libgav1StatusCode status =
- mCodecCtx->EnqueueFrame(/*data=*/nullptr, /*size=*/0,
- /*user_private_data=*/0);
+ const Libgav1StatusCode status = mCodecCtx->SignalEOS();
if (status != kLibgav1StatusOk) {
ALOGE("Failed to flush av1 decoder. status: %d.", status);
return C2_CORRUPTED;
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index a7c08bb..555adc9 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -18,8 +18,8 @@
#define ANDROID_C2_SOFT_GAV1_DEC_H_
#include <SimpleC2Component.h>
-#include "libgav1/src/decoder.h"
-#include "libgav1/src/decoder_settings.h"
+#include "libgav1/src/gav1/decoder.h"
+#include "libgav1/src/gav1/decoder_settings.h"
#define GETTIME(a, b) gettimeofday(a, b);
#define TIME_DIFF(start, end, diff) \
diff --git a/media/codec2/components/gsm/Android.bp b/media/codec2/components/gsm/Android.bp
index 9330c01..7f54af8 100644
--- a/media/codec2/components/gsm/Android.bp
+++ b/media/codec2/components/gsm/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_gsmdec",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/hevc/Android.bp b/media/codec2/components/hevc/Android.bp
index 369bd78..2858212 100644
--- a/media/codec2/components/hevc/Android.bp
+++ b/media/codec2/components/hevc/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_hevcdec",
defaults: [
"libcodec2_soft-defaults",
@@ -11,7 +11,7 @@
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_hevcenc",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/mp3/Android.bp b/media/codec2/components/mp3/Android.bp
index 66665ed..b4fb1b0 100644
--- a/media/codec2/components/mp3/Android.bp
+++ b/media/codec2/components/mp3/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_mp3dec",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index 841f0a9..666e697 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_mpeg2dec",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/mpeg4_h263/Android.bp b/media/codec2/components/mpeg4_h263/Android.bp
index 41e4f44..0673709 100644
--- a/media/codec2/components/mpeg4_h263/Android.bp
+++ b/media/codec2/components/mpeg4_h263/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_mpeg4dec",
defaults: [
"libcodec2_soft-defaults",
@@ -15,7 +15,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_h263dec",
defaults: [
"libcodec2_soft-defaults",
@@ -31,7 +31,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_mpeg4enc",
defaults: [
"libcodec2_soft-defaults",
@@ -49,7 +49,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_h263enc",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 61b286c..13cc0ec 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -464,34 +464,34 @@
/* TODO: can remove temporary copy after library supports writing to display
* buffer Y, U and V plane pointers using stride info. */
static void copyOutputBufferToYuvPlanarFrame(
- uint8_t *dst, uint8_t *src,
+ uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, uint8_t *src,
size_t dstYStride, size_t dstUVStride,
size_t srcYStride, uint32_t width,
uint32_t height) {
size_t srcUVStride = srcYStride / 2;
uint8_t *srcStart = src;
- uint8_t *dstStart = dst;
+
size_t vStride = align(height, 16);
for (size_t i = 0; i < height; ++i) {
- memcpy(dst, src, width);
+ memcpy(dstY, src, width);
src += srcYStride;
- dst += dstYStride;
+ dstY += dstYStride;
}
+
/* U buffer */
src = srcStart + vStride * srcYStride;
- dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dst, src, width / 2);
+ memcpy(dstU, src, width / 2);
src += srcUVStride;
- dst += dstUVStride;
+ dstU += dstUVStride;
}
+
/* V buffer */
src = srcStart + vStride * srcYStride * 5 / 4;
- dst = dstStart + (dstYStride * height);
for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dst, src, width / 2);
+ memcpy(dstV, src, width / 2);
src += srcUVStride;
- dst += dstUVStride;
+ dstV += dstUVStride;
}
}
@@ -672,11 +672,14 @@
}
uint8_t *outputBufferY = wView.data()[C2PlanarLayout::PLANE_Y];
+ uint8_t *outputBufferU = wView.data()[C2PlanarLayout::PLANE_U];
+ uint8_t *outputBufferV = wView.data()[C2PlanarLayout::PLANE_V];
+
C2PlanarLayout layout = wView.layout();
size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
(void)copyOutputBufferToYuvPlanarFrame(
- outputBufferY,
+ outputBufferY, outputBufferU, outputBufferV,
mOutputBuffer[mNumSamplesOutput & 1],
dstYStride, dstUVStride,
align(mWidth, 16), mWidth, mHeight);
diff --git a/media/codec2/components/opus/Android.bp b/media/codec2/components/opus/Android.bp
index 0ed141b..32e2bf8 100644
--- a/media/codec2/components/opus/Android.bp
+++ b/media/codec2/components/opus/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_opusdec",
defaults: [
"libcodec2_soft-defaults",
@@ -9,7 +9,7 @@
shared_libs: ["libopus"],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_opusenc",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/raw/Android.bp b/media/codec2/components/raw/Android.bp
index dc944da..d4fb8f8 100644
--- a/media/codec2/components/raw/Android.bp
+++ b/media/codec2/components/raw/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_rawdec",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/vorbis/Android.bp b/media/codec2/components/vorbis/Android.bp
index bc1c380..ff1183f 100644
--- a/media/codec2/components/vorbis/Android.bp
+++ b/media/codec2/components/vorbis/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_vorbisdec",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
index 15564d9..d3b6e31 100644
--- a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
+++ b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
@@ -279,6 +279,8 @@
// skip 7 <type + "vorbis"> bytes
makeBitReader((const uint8_t *)data + 7, inSize - 7, &buf, &ref, &bits);
if (data[0] == 1) {
+ // release any memory that vorbis_info_init will blindly overwrite
+ vorbis_info_clear(mVi);
vorbis_info_init(mVi);
if (0 != _vorbis_unpack_info(mVi, &bits)) {
ALOGE("Encountered error while unpacking info");
@@ -323,6 +325,8 @@
work->result = C2_CORRUPTED;
return;
}
+ // release any memory that vorbis_dsp_init will blindly overwrite
+ vorbis_dsp_clear(mState);
if (0 != vorbis_dsp_init(mState, mVi)) {
ALOGE("Encountered error while dsp init");
mSignalledError = true;
@@ -355,6 +359,10 @@
}
memcpy(&numPageFrames, data + inSize - sizeof(numPageFrames), sizeof(numPageFrames));
inSize -= sizeof(numPageFrames);
+ if (inSize == 0) {
+ // empty buffer, ignore
+ return;
+ }
if (numPageFrames >= 0) {
mNumFramesLeftOnPage = numPageFrames;
}
@@ -405,7 +413,7 @@
mState, reinterpret_cast<int16_t *> (wView.data()),
kMaxNumSamplesPerChannel);
if (numFrames < 0) {
- ALOGD("vorbis_dsp_pcmout returned %d", numFrames);
+ ALOGD("vorbis_dsp_pcmout returned %d frames", numFrames);
numFrames = 0;
}
}
diff --git a/media/codec2/components/vpx/Android.bp b/media/codec2/components/vpx/Android.bp
index 34f5753..72178aa 100644
--- a/media/codec2/components/vpx/Android.bp
+++ b/media/codec2/components/vpx/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_vp9dec",
defaults: [
"libcodec2_soft-defaults",
@@ -14,7 +14,7 @@
],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_vp8dec",
defaults: [
"libcodec2_soft-defaults",
@@ -26,7 +26,7 @@
shared_libs: ["libvpx"],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_vp9enc",
defaults: [
"libcodec2_soft-defaults",
@@ -43,7 +43,7 @@
cflags: ["-DVP9"],
}
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_vp8enc",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 3eef1e3..91238e8 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -631,31 +631,30 @@
}
static void copyOutputBufferToYuvPlanarFrame(
- uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+ uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+ const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
size_t dstYStride, size_t dstUVStride,
uint32_t width, uint32_t height) {
- uint8_t *dstStart = dst;
for (size_t i = 0; i < height; ++i) {
- memcpy(dst, srcY, width);
+ memcpy(dstY, srcY, width);
srcY += srcYStride;
- dst += dstYStride;
+ dstY += dstYStride;
}
- dst = dstStart + dstYStride * height;
for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dst, srcV, width / 2);
+ memcpy(dstV, srcV, width / 2);
srcV += srcVStride;
- dst += dstUVStride;
+ dstV += dstUVStride;
}
- dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
for (size_t i = 0; i < height / 2; ++i) {
- memcpy(dst, srcU, width / 2);
+ memcpy(dstU, srcU, width / 2);
srcU += srcUStride;
- dst += dstUVStride;
+ dstU += dstUVStride;
}
+
}
static void convertYUV420Planar16ToY410(uint32_t *dst,
@@ -721,16 +720,12 @@
return;
}
-static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
+static void convertYUV420Planar16ToYUV420Planar(
+ uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
- size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
-
- uint8_t *dstY = (uint8_t *)dst;
- size_t dstYSize = dstYStride * height;
- size_t dstUVSize = dstUVStride * height / 2;
- uint8_t *dstV = dstY + dstYSize;
- uint8_t *dstU = dstV + dstUVSize;
+ size_t dstYStride, size_t dstUVStride,
+ size_t width, size_t height) {
for (size_t y = 0; y < height; ++y) {
for (size_t x = 0; x < width; ++x) {
@@ -823,7 +818,10 @@
block->width(), block->height(), mWidth, mHeight,
((c2_cntr64_t *)img->user_priv)->peekll());
- uint8_t *dst = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
+ uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
+
size_t srcYStride = img->stride[VPX_PLANE_Y];
size_t srcUStride = img->stride[VPX_PLANE_U];
size_t srcVStride = img->stride[VPX_PLANE_V];
@@ -842,18 +840,18 @@
constexpr size_t kHeight = 64;
for (; i < mHeight; i += kHeight) {
queue->entries.push_back(
- [dst, srcY, srcU, srcV,
+ [dstY, srcY, srcU, srcV,
srcYStride, srcUStride, srcVStride, dstYStride,
width = mWidth, height = std::min(mHeight - i, kHeight)] {
convertYUV420Planar16ToY410(
- (uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+ (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
width, height);
});
srcY += srcYStride / 2 * kHeight;
srcU += srcUStride / 2 * (kHeight / 2);
srcV += srcVStride / 2 * (kHeight / 2);
- dst += dstYStride * kHeight;
+ dstY += dstYStride * kHeight;
}
CHECK_EQ(0u, queue->numPending);
queue->numPending = queue->entries.size();
@@ -862,8 +860,9 @@
queue.waitForCondition(queue->cond);
}
} else {
- convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
+ convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
+ srcY, srcU, srcV,
+ srcYStride / 2, srcUStride / 2, srcVStride / 2,
dstYStride, dstUVStride,
mWidth, mHeight);
}
@@ -871,8 +870,10 @@
const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
+
copyOutputBufferToYuvPlanarFrame(
- dst, srcY, srcU, srcV,
+ dstY, dstU, dstV,
+ srcY, srcU, srcV,
srcYStride, srcUStride, srcVStride,
dstYStride, dstUVStride,
mWidth, mHeight);
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 74e105e..7e9090f 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -460,8 +460,8 @@
const C2ConstGraphicBlock inBuffer =
inputBuffer->data().graphicBlocks().front();
- if (inBuffer.width() != mSize->width ||
- inBuffer.height() != mSize->height) {
+ if (inBuffer.width() < mSize->width ||
+ inBuffer.height() < mSize->height) {
ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)",
inBuffer.width(), mSize->width, inBuffer.height(),
mSize->height);
@@ -472,8 +472,8 @@
bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
vpx_image_t raw_frame;
const C2PlanarLayout &layout = rView->layout();
- uint32_t width = rView->width();
- uint32_t height = rView->height();
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
if (width > 0x8000 || height > 0x8000) {
ALOGE("Image too big: %u x %u", width, height);
work->result = C2_BAD_VALUE;
diff --git a/media/codec2/components/xaac/Android.bp b/media/codec2/components/xaac/Android.bp
index 7795cc1..4889d78 100644
--- a/media/codec2/components/xaac/Android.bp
+++ b/media/codec2/components/xaac/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libcodec2_soft_xaacdec",
defaults: [
"libcodec2_soft-defaults",
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index c8c5148..beeadb8 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -1,12 +1,14 @@
cc_library_headers {
name: "libcodec2_headers",
vendor_available: true,
+ min_sdk_version: "29",
export_include_dirs: ["include"],
}
-cc_library_shared {
+cc_library {
name: "libcodec2",
vendor_available: true,
+ min_sdk_version: "29",
vndk: {
enabled: true,
},
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index 3d3587c..fe37b05 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -734,6 +734,22 @@
}
virtual ~C2Allocator() = default;
+
+ /**
+ * Returns a true if the handle looks valid for this allocator.
+ *
+ * It does not actually validate that the handle represents a valid allocation (by this
+ * allocator), only that the handle could have been returned by this allocator. As such,
+ * multiple allocators may return true for looksValid for the same handle.
+ *
+ * This method MUST be "non-blocking", MUST not access kernel and/or device drivers, and
+ * return within 1us.
+ *
+ * \param handle the handle for an existing allocation (possibly from another
+ * allocator)
+ */
+ virtual bool checkHandle(const C2Handle *const handle) const = 0;
+
protected:
C2Allocator() = default;
};
@@ -2156,9 +2172,12 @@
};
/**
- * An extension of C2Info objects that can contain arbitrary buffer data.
+ * A const metadata object that can contain arbitrary buffer data.
*
- * \note This object is not describable and contains opaque data.
+ * This object is not an actual C2Info and is not attached to buffers (C2Buffer), but rather to
+ * frames (C2FrameData). It is not describable via C2ParamDescriptor.
+ *
+ * C2InfoBuffer is a const object that can be allocated on stack and is copiable.
*/
class C2InfoBuffer {
public:
@@ -2167,14 +2186,65 @@
*
* \return the parameter index.
*/
- const C2Param::Index index() const;
+ const C2Param::Index index() const { return mIndex; }
/**
* Gets the buffer's data.
*
* \return the buffer's data.
*/
- const C2BufferData data() const;
+ const C2BufferData data() const { return mData; }
+
+ /// Returns a clone of this as a global info buffer.
+ C2InfoBuffer asGlobal() const {
+ C2Param::Index index = mIndex;
+ index.convertToGlobal();
+ return C2InfoBuffer(index, mData);
+ }
+
+ /// Returns a clone of this as a port info buffer.
+ C2InfoBuffer asPort(bool output) const {
+ C2Param::Index index = mIndex;
+ index.convertToPort(output);
+ return C2InfoBuffer(index, mData);
+ }
+
+ /// Returns a clone of this as a stream info buffer.
+ C2InfoBuffer asStream(bool output, unsigned stream) const {
+ C2Param::Index index = mIndex;
+ index.convertToStream(output, stream);
+ return C2InfoBuffer(index, mData);
+ }
+
+ /**
+ * Creates a global info buffer containing a single linear block.
+ *
+ * \param index the core parameter index of this info buffer.
+ * \param block the content of the info buffer.
+ *
+ * \return shared pointer to the created info buffer.
+ */
+ static C2InfoBuffer CreateLinearBuffer(C2Param::CoreIndex index, const C2ConstLinearBlock &block);
+
+ /**
+ * Creates a global info buffer containing a single graphic block.
+ *
+ * \param index the core parameter index of this info buffer.
+ * \param block the content of the info buffer.
+ *
+ * \return shared pointer to the created info buffer.
+ */
+ static C2InfoBuffer CreateGraphicBuffer(C2Param::CoreIndex index, const C2ConstGraphicBlock &block);
+
+protected:
+ // no public constructor
+ explicit C2InfoBuffer(C2Param::Index index, const std::vector<C2ConstLinearBlock> &blocks);
+ explicit C2InfoBuffer(C2Param::Index index, const std::vector<C2ConstGraphicBlock> &blocks);
+
+private:
+ C2Param::Index mIndex;
+ C2BufferData mData;
+ explicit C2InfoBuffer(C2Param::Index index, const C2BufferData &data);
};
/// @}
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 9fc0e17..38f7389 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -249,6 +249,13 @@
// low latency mode
kParamIndexLowLatencyMode, // bool
+
+ // tunneled codec
+ kParamIndexTunneledMode, // struct
+ kParamIndexTunnelHandle, // int32[]
+ kParamIndexTunnelSystemTime, // int64
+
+ kParamIndexStoreDmaBufUsage, // store, struct
};
}
@@ -278,16 +285,19 @@
C2ApiLevelSetting;
constexpr char C2_PARAMKEY_API_LEVEL[] = "api.level";
-enum C2Config::api_feature_t : uint64_t {
+C2ENUM(C2Config::api_feature_t, uint64_t,
API_REFLECTION = (1U << 0), ///< ability to list supported parameters
API_VALUES = (1U << 1), ///< ability to list supported values for each parameter
API_CURRENT_VALUES = (1U << 2), ///< ability to list currently supported values for each parameter
API_DEPENDENCY = (1U << 3), ///< have a defined parameter dependency
+ API_SAME_INPUT_BUFFER = (1U << 16), ///< supporting multiple input buffers
+ ///< backed by the same allocation
+
API_STREAMS = (1ULL << 32), ///< supporting variable number of streams
- API_TUNNELING = (1ULL << 48), ///< tunneling API
-};
+ API_TUNNELING = (1ULL << 48) ///< tunneling API
+)
// read-only
typedef C2GlobalParam<C2Setting, C2SimpleValueStruct<C2Config::api_feature_t>, kParamIndexApiFeatures>
@@ -2033,6 +2043,33 @@
C2StoreIonUsageInfo;
/**
+ * This structure describes the preferred DMA-Buf allocation parameters for a given memory usage.
+ */
+struct C2StoreDmaBufUsageStruct {
+ inline C2StoreDmaBufUsageStruct() { memset(this, 0, sizeof(*this)); }
+
+ inline C2StoreDmaBufUsageStruct(size_t flexCount, uint64_t usage_, uint32_t capacity_)
+ : usage(usage_), capacity(capacity_), allocFlags(0) {
+ memset(heapName, 0, flexCount);
+ }
+
+ uint64_t usage; ///< C2MemoryUsage
+ uint32_t capacity; ///< capacity
+ int32_t allocFlags; ///< ion allocation flags
+ char heapName[]; ///< dmabuf heap name
+
+ DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(StoreDmaBufUsage, heapName)
+ C2FIELD(usage, "usage")
+ C2FIELD(capacity, "capacity")
+ C2FIELD(allocFlags, "alloc-flags")
+ C2FIELD(heapName, "heap-name")
+};
+
+// store, private
+typedef C2GlobalParam<C2Info, C2StoreDmaBufUsageStruct, kParamIndexStoreDmaBufUsage>
+ C2StoreDmaBufUsageInfo;
+
+/**
* Flexible pixel format descriptors
*/
struct C2FlexiblePixelFormatDescriptorStruct {
@@ -2179,6 +2216,79 @@
typedef C2PortParam<C2Tuning, C2TimestampGapAdjustmentStruct> C2PortTimestampGapTuning;
constexpr char C2_PARAMKEY_INPUT_SURFACE_TIMESTAMP_ADJUSTMENT[] = "input-surface.timestamp-adjustment";
+/* ===================================== TUNNELED CODEC ==================================== */
+
+/**
+ * Tunneled codec control.
+ */
+struct C2TunneledModeStruct {
+ /// mode
+ enum mode_t : uint32_t;
+ /// sync type
+ enum sync_type_t : uint32_t;
+
+ inline C2TunneledModeStruct() = default;
+
+ inline C2TunneledModeStruct(
+ size_t flexCount, mode_t mode_, sync_type_t type, std::vector<int32_t> id)
+ : mode(mode_), syncType(type) {
+ memcpy(&syncId, &id[0], c2_min(id.size(), flexCount) * FLEX_SIZE);
+ }
+
+ inline C2TunneledModeStruct(size_t flexCount, mode_t mode_, sync_type_t type, int32_t id)
+ : mode(mode_), syncType(type) {
+ if (flexCount >= 1) {
+ syncId[0] = id;
+ }
+ }
+
+ mode_t mode; ///< tunneled mode
+ sync_type_t syncType; ///< type of sync used for tunneled mode
+ int32_t syncId[]; ///< sync id
+
+ DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(TunneledMode, syncId)
+ C2FIELD(mode, "mode")
+ C2FIELD(syncType, "sync-type")
+ C2FIELD(syncId, "sync-id")
+
+};
+
+C2ENUM(C2TunneledModeStruct::mode_t, uint32_t,
+ NONE,
+ SIDEBAND,
+);
+
+
+C2ENUM(C2TunneledModeStruct::sync_type_t, uint32_t,
+ REALTIME,
+ AUDIO_HW_SYNC,
+ HW_AV_SYNC,
+);
+
+/**
+ * Configure tunneled mode
+ */
+typedef C2PortParam<C2Tuning, C2TunneledModeStruct, kParamIndexTunneledMode>
+ C2PortTunneledModeTuning;
+constexpr char C2_PARAMKEY_TUNNELED_RENDER[] = "output.tunneled-render";
+
+/**
+ * Tunneled mode handle. The meaning of this is depends on the
+ * tunneled mode. If the tunneled mode is SIDEBAND, this is the
+ * sideband handle.
+ */
+typedef C2PortParam<C2Tuning, C2Int32Array, kParamIndexTunnelHandle> C2PortTunnelHandleTuning;
+constexpr char C2_PARAMKEY_OUTPUT_TUNNEL_HANDLE[] = "output.tunnel-handle";
+
+/**
+ * The system time using CLOCK_MONOTONIC in nanoseconds at the tunnel endpoint.
+ * For decoders this is the render time for the output frame and
+ * this corresponds to the media timestamp of the output frame.
+ */
+typedef C2PortParam<C2Info, C2SimpleValueStruct<int64_t>, kParamIndexTunnelSystemTime>
+ C2PortTunnelSystemTime;
+constexpr char C2_PARAMKEY_OUTPUT_RENDER_TIME[] = "output.render-time";
+
/// @}
#endif // C2CONFIG_H_
diff --git a/media/codec2/core/include/C2Enum.h b/media/codec2/core/include/C2Enum.h
index b0fad8f..da1f43b 100644
--- a/media/codec2/core/include/C2Enum.h
+++ b/media/codec2/core/include/C2Enum.h
@@ -54,7 +54,7 @@
/// \note this will contain any initialization, which we will remove when converting to lower-case
#define _C2_GET_ENUM_NAME(x, y) #x
/// mapper to get value of enum
-#define _C2_GET_ENUM_VALUE(x, type) (_C2EnumConst<type>)x
+#define _C2_GET_ENUM_VALUE(x, type_) (_C2EnumConst<typename std::underlying_type<type_>::type>)type_::x
/// \endcond
@@ -106,7 +106,7 @@
template<> \
C2FieldDescriptor::NamedValuesType C2FieldDescriptor::namedValuesFor(const name &r __unused) { \
return _C2EnumUtils::sanitizeEnumValues( \
- std::vector<C2Value::Primitive> { _C2_MAP(_C2_GET_ENUM_VALUE, type, __VA_ARGS__) }, \
+ std::vector<C2Value::Primitive> { _C2_MAP(_C2_GET_ENUM_VALUE, name, __VA_ARGS__) }, \
{ _C2_MAP(_C2_GET_ENUM_NAME, type, __VA_ARGS__) }, \
prefix); \
}
diff --git a/media/codec2/core/include/C2Param.h b/media/codec2/core/include/C2Param.h
index 51d417a..e938f96 100644
--- a/media/codec2/core/include/C2Param.h
+++ b/media/codec2/core/include/C2Param.h
@@ -317,7 +317,8 @@
DEFINE_FIELD_BASED_COMPARISON_OPERATORS(Index, mIndex)
private:
- friend struct C2Param; // for setStream, MakeStreamId, isValid
+ friend class C2InfoBuffer; // for convertTo*
+ friend struct C2Param; // for setStream, MakeStreamId, isValid, convertTo*
friend struct _C2ParamInspector; // for testing
/**
@@ -508,6 +509,14 @@
return _mIndex.setPort(output);
}
+ /// sets the size of this parameter.
+ inline void setSize(size_t size) {
+ if (size < sizeof(C2Param)) {
+ size = 0;
+ }
+ _mSize = c2_min(size, _mSize);
+ }
+
public:
/// invalidate this parameter. There is no recovery from this call; e.g. parameter
/// cannot be 'corrected' to be valid.
diff --git a/media/codec2/core/include/C2ParamDef.h b/media/codec2/core/include/C2ParamDef.h
index 0a33283..d578820 100644
--- a/media/codec2/core/include/C2ParamDef.h
+++ b/media/codec2/core/include/C2ParamDef.h
@@ -97,6 +97,9 @@
PARAM_TYPE = CoreIndex | TypeFlags
};
+ // the underlying param struct type
+ typedef S Struct;
+
protected:
enum : uint32_t {
FLEX_SIZE = 0,
@@ -270,6 +273,11 @@
} \
return 0; \
} \
+ inline void setFlexCount(size_t count) { \
+ if (count < flexCount()) { \
+ this->setSize(sizeof(_Type) + _Type::FLEX_SIZE * count); \
+ } \
+ } \
/// Mark flexible member variable and make structure flexible.
#define FLEX(cls, m) \
diff --git a/media/codec2/core/include/C2Work.h b/media/codec2/core/include/C2Work.h
index 6923f3e..67084cc 100644
--- a/media/codec2/core/include/C2Work.h
+++ b/media/codec2/core/include/C2Work.h
@@ -161,7 +161,7 @@
//< for initial work item, these may also come from the parser - if provided
//< for output buffers, these are the responses to requestedInfos
std::vector<std::unique_ptr<C2Param>> configUpdate;
- std::vector<std::shared_ptr<C2InfoBuffer>> infoBuffers;
+ std::vector<C2InfoBuffer> infoBuffers;
};
struct C2Worklet {
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hidl/1.0/utils/Android.bp
index 75c9424..3b73350 100644
--- a/media/codec2/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hidl/1.0/utils/Android.bp
@@ -48,6 +48,7 @@
cc_library {
name: "libcodec2_hidl@1.0",
vendor_available: true,
+ min_sdk_version: "29",
defaults: ["hidl_defaults"],
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index c73cb52..1f0c856 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -943,14 +943,9 @@
d->infoBuffers.resize(s.infoBuffers.size());
i = 0;
- for (const std::shared_ptr<C2InfoBuffer>& sInfoBuffer : s.infoBuffers) {
+ for (const C2InfoBuffer& sInfoBuffer : s.infoBuffers) {
InfoBuffer& dInfoBuffer = d->infoBuffers[i++];
- if (!sInfoBuffer) {
- LOG(ERROR) << "Null C2FrameData::infoBuffers["
- << i - 1 << "].";
- return false;
- }
- if (!objcpy(&dInfoBuffer, *sInfoBuffer,
+ if (!objcpy(&dInfoBuffer, sInfoBuffer,
bufferPoolSender, baseBlocks, baseBlockIndices)) {
LOG(ERROR) << "Invalid C2FrameData::infoBuffers["
<< i - 1 << "].";
diff --git a/media/codec2/hidl/1.0/vts/functional/Android.bp b/media/codec2/hidl/1.0/vts/functional/Android.bp
index cd3be81..5ea4825 100644
--- a/media/codec2/hidl/1.0/vts/functional/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/Android.bp
@@ -91,6 +91,14 @@
"res/bbb_av1_176_144.info",
"res/bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
"res/bbb_vp9_704x480_280kbps_24fps_altref_2.info",
+ "res/bbb_avc_176x144_300kbps_60fps_chksum.md5",
+ "res/bbb_avc_640x360_768kbps_30fps_chksum.md5",
+ "res/bbb_hevc_176x144_176kbps_60fps_chksum.md5",
+ "res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5",
+ "res/bbb_vp8_640x360_2mbps_30fps_chksm.md5",
+ "res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5",
+ "res/bbb_av1_640_360_chksum.md5",
+ "res/bbb_av1_176_144_chksm.md5",
],
}
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 264abba..3a47ae9 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -22,7 +22,6 @@
#include <hidl/GtestPrinter.h>
#include <stdio.h>
#include <algorithm>
-#include <fstream>
#include <C2AllocatorIon.h>
#include <C2Buffer.h>
@@ -35,15 +34,11 @@
#include "media_c2_hidl_test_common.h"
-struct FrameInfo {
- int bytesCount;
- uint32_t flags;
- int64_t timestamp;
-};
-
static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
kDecodeTestParameters;
+static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+
// Resource directory
static std::string sResourceDir = "";
@@ -105,6 +100,7 @@
mEos = false;
mFramesReceived = 0;
mTimestampUs = 0u;
+ mWorkResult = C2_OK;
mTimestampDevTest = false;
if (mCompName == unknown_comp) mDisableTest = true;
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
@@ -121,6 +117,8 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
+ virtual void validateTimestampList(int32_t* bitStreamInfo);
+
struct outputMetaData {
uint64_t timestampUs;
uint32_t rangeLength;
@@ -131,6 +129,7 @@
if (!work->worklets.empty()) {
// For decoder components current timestamp always exceeds
// previous timestamp
+ mWorkResult |= work->result;
bool codecConfig = ((work->worklets.front()->output.flags &
C2FrameData::FLAG_CODEC_CONFIG) != 0);
if (!codecConfig && !work->worklets.front()->output.buffers.empty()) {
@@ -182,6 +181,8 @@
bool mDisableTest;
bool mTimestampDevTest;
standardComp mCompName;
+
+ int32_t mWorkResult;
uint64_t mTimestampUs;
uint32_t mFramesReceived;
std::list<uint64_t> mFlushedIndices;
@@ -457,6 +458,31 @@
}
}
+void Codec2AudioDecHidlTestBase::validateTimestampList(int32_t* bitStreamInfo) {
+ uint32_t samplesReceived = 0;
+ // Update SampleRate and ChannelCount
+ ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+ int32_t nSampleRate = bitStreamInfo[0];
+ int32_t nChannels = bitStreamInfo[1];
+ std::list<uint64_t>::iterator itIn = mTimestampUslist.begin();
+ auto itOut = oBufferMetaData.begin();
+ EXPECT_EQ(*itIn, itOut->timestampUs);
+ uint64_t expectedTimeStamp = *itIn;
+ while (itOut != oBufferMetaData.end()) {
+ EXPECT_EQ(expectedTimeStamp, itOut->timestampUs);
+ if (expectedTimeStamp != itOut->timestampUs) break;
+ // buffer samples = ((total bytes) / (ac * (bits per sample / 8))
+ samplesReceived += ((itOut->rangeLength) / (nChannels * 2));
+ expectedTimeStamp = samplesReceived * 1000000ll / nSampleRate;
+ itOut++;
+ }
+ itIn = mTimestampUslist.end();
+ --itIn;
+ EXPECT_GT(expectedTimeStamp, *itIn);
+ oBufferMetaData.clear();
+ mTimestampUslist.clear();
+}
+
TEST_P(Codec2AudioDecHidlTest, validateCompName) {
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
ALOGV("Checks if the given component is a valid audio component");
@@ -493,7 +519,7 @@
bool signalEOS = !std::get<3>(GetParam()).compare("true");
mTimestampDevTest = true;
char mURL[512], info[512];
- std::ifstream eleStream, eleInfo;
+ android::Vector<FrameInfo> Info;
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
@@ -503,21 +529,9 @@
return;
}
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true);
- android::Vector<FrameInfo> Info;
- int bytesCount = 0;
- uint32_t flags = 0;
- uint32_t timestamp = 0;
- while (1) {
- if (!(eleInfo >> bytesCount)) break;
- eleInfo >> flags;
- eleInfo >> timestamp;
- bool codecConfig = ((1 << (flags - 1)) & C2FrameData::FLAG_CODEC_CONFIG) != 0;
- if (mTimestampDevTest && !codecConfig) mTimestampUslist.push_back(timestamp);
- Info.push_back({bytesCount, flags, timestamp});
- }
- eleInfo.close();
+ int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+
// Reset total no of frames received
mFramesReceived = 0;
mTimestampUs = 0;
@@ -534,6 +548,7 @@
}
ASSERT_EQ(mComponent->start(), C2_OK);
ALOGV("mURL : %s", mURL);
+ std::ifstream eleStream;
eleStream.open(mURL, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
@@ -550,7 +565,7 @@
}
// blocking call to ensures application to Wait till all the inputs are
// consumed
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
eleStream.close();
if (mFramesReceived != infoSize) {
ALOGE("Input buffer count and Output buffer count mismatch");
@@ -558,32 +573,12 @@
ASSERT_TRUE(false);
}
ASSERT_EQ(mEos, true);
+
if (mTimestampDevTest) {
- uint64_t expTs;
- uint32_t samplesReceived = 0;
- // Update SampleRate and ChannelCount
- ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
- int nSampleRate = bitStreamInfo[0];
- int nChannels = bitStreamInfo[1];
- std::list<uint64_t>::iterator itIn = mTimestampUslist.begin();
- auto itOut = oBufferMetaData.begin();
- EXPECT_EQ(*itIn, itOut->timestampUs);
- expTs = *itIn;
- while (itOut != oBufferMetaData.end()) {
- EXPECT_EQ(expTs, itOut->timestampUs);
- if (expTs != itOut->timestampUs) break;
- // buffer samples = ((total bytes) / (ac * (bits per sample / 8))
- samplesReceived += ((itOut->rangeLength) / (nChannels * 2));
- expTs = samplesReceived * 1000000ll / nSampleRate;
- itOut++;
- }
- itIn = mTimestampUslist.end();
- --itIn;
- EXPECT_GT(expTs, *itIn);
- oBufferMetaData.clear();
- mTimestampUslist.clear();
+ validateTimestampList(bitStreamInfo);
}
ASSERT_EQ(mComponent->stop(), C2_OK);
+ ASSERT_EQ(mWorkResult, C2_OK);
}
// thumbnail test
@@ -592,25 +587,15 @@
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
char mURL[512], info[512];
- std::ifstream eleStream, eleInfo;
+ android::Vector<FrameInfo> Info;
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
GetURLForComponent(mCompName, mURL, info);
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true);
- android::Vector<FrameInfo> Info;
- int bytesCount = 0;
- uint32_t flags = 0;
- uint32_t timestamp = 0;
- while (1) {
- if (!(eleInfo >> bytesCount)) break;
- eleInfo >> flags;
- eleInfo >> timestamp;
- Info.push_back({bytesCount, flags, timestamp});
- }
- eleInfo.close();
+ int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+
int32_t bitStreamInfo[2] = {0};
if (mCompName == raw) {
bitStreamInfo[0] = 8000;
@@ -628,22 +613,25 @@
// request EOS for thumbnail
// signal EOS flag with last frame
size_t i = -1;
+ uint32_t flags;
do {
i++;
flags = 0;
if (Info[i].flags) flags = 1u << (Info[i].flags - 1);
} while (!(flags & SYNC_FRAME));
+ std::ifstream eleStream;
eleStream.open(mURL, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
i + 1));
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
eleStream.close();
EXPECT_GE(mFramesReceived, 1U);
ASSERT_EQ(mEos, true);
ASSERT_EQ(mComponent->stop(), C2_OK);
+ ASSERT_EQ(mWorkResult, C2_OK);
}
TEST_P(Codec2AudioDecHidlTest, EOSTest) {
@@ -684,33 +672,22 @@
ASSERT_EQ(mEos, true);
ASSERT_EQ(mWorkQueue.size(), (size_t)MAX_INPUT_BUFFERS);
ASSERT_EQ(mComponent->stop(), C2_OK);
+ ASSERT_EQ(mWorkResult, C2_OK);
}
TEST_P(Codec2AudioDecHidlTest, FlushTest) {
description("Tests Flush calls");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- typedef std::unique_lock<std::mutex> ULock;
char mURL[512], info[512];
- std::ifstream eleStream, eleInfo;
+ android::Vector<FrameInfo> Info;
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
GetURLForComponent(mCompName, mURL, info);
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true);
- android::Vector<FrameInfo> Info;
- int bytesCount = 0;
- uint32_t flags = 0;
- uint32_t timestamp = 0;
- mFlushedIndices.clear();
- while (1) {
- if (!(eleInfo >> bytesCount)) break;
- eleInfo >> flags;
- eleInfo >> timestamp;
- Info.push_back({bytesCount, flags, timestamp});
- }
- eleInfo.close();
+ int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+
int32_t bitStreamInfo[2] = {0};
if (mCompName == raw) {
bitStreamInfo[0] = 8000;
@@ -723,44 +700,37 @@
return;
}
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true);
- // Decode 128 frames and flush. here 128 is chosen to ensure there is a key
- // frame after this so that the below section can be covered for all
- // components
- uint32_t numFramesFlushed = 128;
- ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
- mFlushedIndices, mLinearPool, eleStream, &Info, 0,
- numFramesFlushed, false));
// flush
std::list<std::unique_ptr<C2Work>> flushedWork;
c2_status_t err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
- (size_t)MAX_INPUT_BUFFERS - flushedWork.size()));
- uint64_t frameIndex;
- {
- // Update mFlushedIndices based on the index received from flush()
- ULock l(mQueueLock);
- for (std::unique_ptr<C2Work>& work : flushedWork) {
- ASSERT_NE(work, nullptr);
- frameIndex = work->input.ordinal.frameIndex.peeku();
- std::list<uint64_t>::iterator frameIndexIt =
- std::find(mFlushedIndices.begin(), mFlushedIndices.end(), frameIndex);
- if (!mFlushedIndices.empty() && (frameIndexIt != mFlushedIndices.end())) {
- mFlushedIndices.erase(frameIndexIt);
- work->input.buffers.clear();
- work->worklets.clear();
- mWorkQueue.push_back(std::move(work));
- }
- }
- }
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+
+ ALOGV("mURL : %s", mURL);
+ std::ifstream eleStream;
+ eleStream.open(mURL, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true);
+ // Decode 30 frames and flush.
+ uint32_t numFramesFlushed = FLUSH_INTERVAL;
+ ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
+ mFlushedIndices, mLinearPool, eleStream, &Info, 0,
+ numFramesFlushed, false));
+ // flush
+ err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
+ ASSERT_EQ(err, C2_OK);
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ (size_t)MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+
// Seek to next key frame and start decoding till the end
mFlushedIndices.clear();
int index = numFramesFlushed;
bool keyFrame = false;
- flags = 0;
+ uint32_t flags = 0;
while (index < (int)Info.size()) {
if (Info[index].flags) flags = 1u << (Info[index].flags - 1);
if ((flags & SYNC_FRAME) == SYNC_FRAME) {
@@ -779,25 +749,13 @@
eleStream.close();
err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
- (size_t)MAX_INPUT_BUFFERS - flushedWork.size()));
- {
- // Update mFlushedIndices based on the index received from flush()
- ULock l(mQueueLock);
- for (std::unique_ptr<C2Work>& work : flushedWork) {
- ASSERT_NE(work, nullptr);
- frameIndex = work->input.ordinal.frameIndex.peeku();
- std::list<uint64_t>::iterator frameIndexIt =
- std::find(mFlushedIndices.begin(), mFlushedIndices.end(), frameIndex);
- if (!mFlushedIndices.empty() && (frameIndexIt != mFlushedIndices.end())) {
- mFlushedIndices.erase(frameIndexIt);
- work->input.buffers.clear();
- work->worklets.clear();
- mWorkQueue.push_back(std::move(work));
- }
- }
- }
- ASSERT_EQ(mFlushedIndices.empty(), true);
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ (size_t)MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+ // TODO: (b/154671521)
+ // Add assert for mWorkResult
ASSERT_EQ(mComponent->stop(), C2_OK);
}
@@ -862,7 +820,7 @@
// consumed
if (!mEos) {
ALOGV("Waiting for input consumption");
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
}
eleStream.close();
@@ -875,6 +833,109 @@
ASSERT_EQ(mComponent->stop(), C2_OK);
}
+class Codec2AudioDecCsdInputTests
+ : public Codec2AudioDecHidlTestBase,
+ public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+ void getParams() {
+ mInstanceName = std::get<0>(GetParam());
+ mComponentName = std::get<1>(GetParam());
+ }
+};
+
+// Test the codecs for the following
+// start - csd - data… - (with/without)flush - data… - flush - data…
+TEST_P(Codec2AudioDecCsdInputTests, CSDFlushTest) {
+ description("Tests codecs for flush at different states");
+ if (mDisableTest) GTEST_SKIP() << "Test is disabled";
+
+ char mURL[512], info[512];
+ android::Vector<FrameInfo> Info;
+
+ strcpy(mURL, sResourceDir.c_str());
+ strcpy(info, sResourceDir.c_str());
+ GetURLForComponent(mCompName, mURL, info);
+ if (!strcmp(mURL, sResourceDir.c_str())) {
+ ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL %s ", sResourceDir.c_str(), mURL);
+ return;
+ }
+ ALOGV("mURL : %s", mURL);
+
+ int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
+
+ int32_t bitStreamInfo[2] = {0};
+ if (mCompName == raw) {
+ bitStreamInfo[0] = 8000;
+ bitStreamInfo[1] = 1;
+ } else {
+ ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+ }
+ if (!setupConfigParam(mComponent, bitStreamInfo)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+
+ ASSERT_EQ(mComponent->start(), C2_OK);
+ std::ifstream eleStream;
+ eleStream.open(mURL, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true);
+
+ bool signalEOS = false;
+ bool flushCsd = !std::get<2>(GetParam()).compare("true");
+ ALOGV("sending %d csd data ", numCsds);
+ int framesToDecode = numCsds;
+ ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
+ mFlushedIndices, mLinearPool, eleStream, &Info, 0,
+ framesToDecode, false));
+
+ c2_status_t err = C2_OK;
+ std::list<std::unique_ptr<C2Work>> flushedWork;
+ if (numCsds && flushCsd) {
+ // We wait for all the CSD buffers to get consumed.
+ // Once we have received all CSD work back, we call flush
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
+
+ err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
+ ASSERT_EQ(err, C2_OK);
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+ oBufferMetaData.clear();
+ }
+
+ int offset = framesToDecode;
+ while (1) {
+ framesToDecode = c2_min(FLUSH_INTERVAL, (int)Info.size() - offset);
+ if (framesToDecode < FLUSH_INTERVAL) signalEOS = true;
+ ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
+ mFlushedIndices, mLinearPool, eleStream, &Info,
+ offset, framesToDecode, signalEOS));
+ offset += framesToDecode;
+ err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
+ ASSERT_EQ(err, C2_OK);
+ // blocking call to ensures application to Wait till remaining
+ // 'non-flushed' inputs are consumed
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+ if (signalEOS || offset >= (int)Info.size()) {
+ break;
+ }
+ }
+ if (!signalEOS) {
+ ASSERT_NO_FATAL_FAILURE(testInputBuffer(mComponent, mQueueLock, mWorkQueue,
+ C2FrameData::FLAG_END_OF_STREAM, false));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
+ }
+ eleStream.close();
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+ ASSERT_EQ(mComponent->stop(), C2_OK);
+}
+
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioDecHidlTest, testing::ValuesIn(kTestParameters),
android::hardware::PrintInstanceTupleNameToString<>);
@@ -883,6 +944,10 @@
testing::ValuesIn(kDecodeTestParameters),
android::hardware::PrintInstanceTupleNameToString<>);
+INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2AudioDecCsdInputTests,
+ testing::ValuesIn(kCsdFlushTestParameters),
+ android::hardware::PrintInstanceTupleNameToString<>);
+
} // anonymous namespace
int main(int argc, char** argv) {
@@ -896,6 +961,11 @@
std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
kDecodeTestParameters.push_back(
std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+
+ kCsdFlushTestParameters.push_back(
+ std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+ kCsdFlushTestParameters.push_back(
+ std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
}
// Set the resource directory based on command line args.
@@ -909,4 +979,4 @@
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
-}
\ No newline at end of file
+}
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index 5f3ae41..e3a4f68 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -95,6 +95,8 @@
mEos = false;
mCsd = false;
mFramesReceived = 0;
+ mWorkResult = C2_OK;
+ mOutputSize = 0u;
if (mCompName == unknown_comp) mDisableTest = true;
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
getInputMaxBufSize();
@@ -115,6 +117,17 @@
void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
for (std::unique_ptr<C2Work>& work : workItems) {
if (!work->worklets.empty()) {
+ mWorkResult |= work->result;
+ if (!work->worklets.front()->output.buffers.empty()) {
+ mOutputSize += work->worklets.front()
+ ->output.buffers[0]
+ ->data()
+ .linearBlocks()
+ .front()
+ .map()
+ .get()
+ .capacity();
+ }
workDone(mComponent, work, mFlushedIndices, mQueueLock, mQueueCondition, mWorkQueue,
mEos, mCsd, mFramesReceived);
}
@@ -135,8 +148,11 @@
bool mCsd;
bool mDisableTest;
standardComp mCompName;
+
+ int32_t mWorkResult;
uint32_t mFramesReceived;
int32_t mInputMaxBufSize;
+ uint64_t mOutputSize;
std::list<uint64_t> mFlushedIndices;
C2BlockPool::local_id_t mBlockPoolId;
@@ -236,6 +252,41 @@
return false;
}
+// Get config params for a component
+bool getConfigParams(Codec2AudioEncHidlTest::standardComp compName, int32_t* nChannels,
+ int32_t* nSampleRate, int32_t* samplesPerFrame) {
+ switch (compName) {
+ case Codec2AudioEncHidlTest::aac:
+ *nChannels = 2;
+ *nSampleRate = 48000;
+ *samplesPerFrame = 1024;
+ break;
+ case Codec2AudioEncHidlTest::flac:
+ *nChannels = 2;
+ *nSampleRate = 48000;
+ *samplesPerFrame = 1152;
+ break;
+ case Codec2AudioEncHidlTest::opus:
+ *nChannels = 2;
+ *nSampleRate = 48000;
+ *samplesPerFrame = 960;
+ break;
+ case Codec2AudioEncHidlTest::amrnb:
+ *nChannels = 1;
+ *nSampleRate = 8000;
+ *samplesPerFrame = 160;
+ break;
+ case Codec2AudioEncHidlTest::amrwb:
+ *nChannels = 1;
+ *nSampleRate = 16000;
+ *samplesPerFrame = 160;
+ break;
+ default:
+ return false;
+ }
+ return true;
+}
+
// LookUpTable of clips and metadata for component testing
void GetURLForComponent(Codec2AudioEncHidlTest::standardComp comp, char* mURL) {
struct CompToURL {
@@ -367,36 +418,18 @@
bool signalEOS = !std::get<2>(GetParam()).compare("true");
// Ratio w.r.t to mInputMaxBufSize
int32_t inputMaxBufRatio = std::stoi(std::get<3>(GetParam()));
- ;
- // Setting default sampleRate
- int32_t nChannels = 2;
- int32_t nSampleRate = 44100;
- switch (mCompName) {
- case aac:
- nChannels = 2;
- nSampleRate = 48000;
- break;
- case flac:
- nChannels = 2;
- nSampleRate = 48000;
- break;
- case opus:
- nChannels = 2;
- nSampleRate = 48000;
- break;
- case amrnb:
- nChannels = 1;
- nSampleRate = 8000;
- break;
- case amrwb:
- nChannels = 1;
- nSampleRate = 16000;
- break;
- default:
- ASSERT_TRUE(false);
+ int32_t nChannels;
+ int32_t nSampleRate;
+ int32_t samplesPerFrame;
+
+ if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ std::cout << "Failed to get the config params for " << mCompName << " component\n";
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
}
- int32_t samplesPerFrame = ((mInputMaxBufSize / inputMaxBufRatio) / (nChannels * 2));
+
+ samplesPerFrame = ((mInputMaxBufSize / inputMaxBufRatio) / (nChannels * 2));
ALOGV("signalEOS %d mInputMaxBufSize %d samplesPerFrame %d", signalEOS, mInputMaxBufSize,
samplesPerFrame);
@@ -416,7 +449,7 @@
// If EOS is not sent, sending empty input with EOS flag
if (!signalEOS) {
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue, 1));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue, 1);
ASSERT_NO_FATAL_FAILURE(testInputBuffer(mComponent, mQueueLock, mWorkQueue,
C2FrameData::FLAG_END_OF_STREAM, false));
numFrames += 1;
@@ -424,7 +457,7 @@
// blocking call to ensures application to Wait till all the inputs are
// consumed
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
eleStream.close();
if (mFramesReceived != numFrames) {
ALOGE("Input buffer count and Output buffer count mismatch");
@@ -439,6 +472,7 @@
}
ASSERT_EQ(mEos, true);
ASSERT_EQ(mComponent->stop(), C2_OK);
+ ASSERT_EQ(mWorkResult, C2_OK);
}
TEST_P(Codec2AudioEncHidlTest, EOSTest) {
@@ -479,50 +513,26 @@
}
ASSERT_EQ(mEos, true);
ASSERT_EQ(mComponent->stop(), C2_OK);
+ ASSERT_EQ(mWorkResult, C2_OK);
}
TEST_P(Codec2AudioEncHidlTest, FlushTest) {
description("Test Request for flush");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- typedef std::unique_lock<std::mutex> ULock;
char mURL[512];
strcpy(mURL, sResourceDir.c_str());
GetURLForComponent(mCompName, mURL);
- // Setting default configuration
mFlushedIndices.clear();
- int32_t nChannels = 2;
- int32_t nSampleRate = 44100;
- int32_t samplesPerFrame = 1024;
- switch (mCompName) {
- case aac:
- nChannels = 2;
- nSampleRate = 48000;
- samplesPerFrame = 1024;
- break;
- case flac:
- nChannels = 2;
- nSampleRate = 48000;
- samplesPerFrame = 1152;
- break;
- case opus:
- nChannels = 2;
- nSampleRate = 48000;
- samplesPerFrame = 960;
- break;
- case amrnb:
- nChannels = 1;
- nSampleRate = 8000;
- samplesPerFrame = 160;
- break;
- case amrwb:
- nChannels = 1;
- nSampleRate = 16000;
- samplesPerFrame = 160;
- break;
- default:
- ASSERT_TRUE(false);
+ int32_t nChannels;
+ int32_t nSampleRate;
+ int32_t samplesPerFrame;
+
+ if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ std::cout << "Failed to get the config params for " << mCompName << " component\n";
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
}
if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
@@ -536,33 +546,24 @@
uint32_t numFrames = 128;
eleStream.open(mURL, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
+ // flush
+ std::list<std::unique_ptr<C2Work>> flushedWork;
+ c2_status_t err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
+ ASSERT_EQ(err, C2_OK);
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
ALOGV("mURL : %s", mURL);
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, numFramesFlushed,
samplesPerFrame, nChannels, nSampleRate));
- std::list<std::unique_ptr<C2Work>> flushedWork;
- c2_status_t err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
+ err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
- (size_t)MAX_INPUT_BUFFERS - flushedWork.size()));
- uint64_t frameIndex;
- {
- // Update mFlushedIndices based on the index received from flush()
- ULock l(mQueueLock);
- for (std::unique_ptr<C2Work>& work : flushedWork) {
- ASSERT_NE(work, nullptr);
- frameIndex = work->input.ordinal.frameIndex.peeku();
- std::list<uint64_t>::iterator frameIndexIt =
- std::find(mFlushedIndices.begin(), mFlushedIndices.end(), frameIndex);
- if (!mFlushedIndices.empty() && (frameIndexIt != mFlushedIndices.end())) {
- mFlushedIndices.erase(frameIndexIt);
- work->input.buffers.clear();
- work->worklets.clear();
- mWorkQueue.push_back(std::move(work));
- }
- }
- }
- mFlushedIndices.clear();
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ (size_t)MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream,
numFrames - numFramesFlushed, samplesPerFrame, nChannels,
@@ -570,28 +571,220 @@
eleStream.close();
err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
- (size_t)MAX_INPUT_BUFFERS - flushedWork.size()));
- {
- // Update mFlushedIndices based on the index received from flush()
- ULock l(mQueueLock);
- for (std::unique_ptr<C2Work>& work : flushedWork) {
- ASSERT_NE(work, nullptr);
- frameIndex = work->input.ordinal.frameIndex.peeku();
- std::list<uint64_t>::iterator frameIndexIt =
- std::find(mFlushedIndices.begin(), mFlushedIndices.end(), frameIndex);
- if (!mFlushedIndices.empty() && (frameIndexIt != mFlushedIndices.end())) {
- mFlushedIndices.erase(frameIndexIt);
- work->input.buffers.clear();
- work->worklets.clear();
- mWorkQueue.push_back(std::move(work));
- }
- }
- }
- ASSERT_EQ(mFlushedIndices.empty(), true);
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ (size_t)MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+ // TODO: (b/154671521)
+ // Add assert for mWorkResult
ASSERT_EQ(mComponent->stop(), C2_OK);
}
+TEST_P(Codec2AudioEncHidlTest, MultiChannelCountTest) {
+ description("Encodes input file for different channel count");
+ if (mDisableTest) GTEST_SKIP() << "Test is disabled";
+
+ char mURL[512];
+ strcpy(mURL, sResourceDir.c_str());
+ GetURLForComponent(mCompName, mURL);
+
+ std::ifstream eleStream;
+ eleStream.open(mURL, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
+ ALOGV("mURL : %s", mURL);
+
+ int32_t nSampleRate;
+ int32_t samplesPerFrame;
+ int32_t nChannels;
+ int32_t numFrames = 16;
+ int32_t maxChannelCount = 8;
+
+ if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ std::cout << "Failed to get the config params for " << mCompName << " component\n";
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+
+ uint64_t prevOutputSize = 0u;
+ uint32_t prevChannelCount = 0u;
+
+ // Looping through the maximum number of channel count supported by encoder
+ for (nChannels = 1; nChannels < maxChannelCount; nChannels++) {
+ ALOGV("Configuring %u encoder for channel count = %d", mCompName, nChannels);
+ if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+
+ std::vector<std::unique_ptr<C2Param>> inParams;
+ c2_status_t c2_status = mComponent->query({}, {C2StreamChannelCountInfo::input::PARAM_TYPE},
+ C2_DONT_BLOCK, &inParams);
+ ASSERT_TRUE(!c2_status && inParams.size())
+ << "Query configured channelCount failed => %d" << c2_status;
+
+ size_t offset = sizeof(C2Param);
+ C2Param* param = inParams[0].get();
+ int32_t channelCount = *(int32_t*)((uint8_t*)param + offset);
+ if (channelCount != nChannels) {
+ std::cout << "[ WARN ] Test Skipped for ChannelCount " << nChannels << "\n";
+ continue;
+ }
+
+ // To check if the input stream is sufficient to encode for the higher channel count
+ int32_t bytesCount = (samplesPerFrame * nChannels * 2) * numFrames;
+ if (eleStream.gcount() < bytesCount) {
+ std::cout << "[ WARN ] Test Skipped for ChannelCount " << nChannels
+ << " because of insufficient input data\n";
+ continue;
+ }
+
+ ASSERT_EQ(mComponent->start(), C2_OK);
+
+ ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
+ mFlushedIndices, mLinearPool, eleStream, numFrames,
+ samplesPerFrame, nChannels, nSampleRate));
+
+ // mDisableTest will be set if buffer was not fetched properly.
+ // This may happen when config params is not proper but config succeeded
+ // In this cases, we skip encoding the input stream
+ if (mDisableTest) {
+ std::cout << "[ WARN ] Test Disabled for ChannelCount " << nChannels << "\n";
+ ASSERT_EQ(mComponent->stop(), C2_OK);
+ return;
+ }
+
+ // blocking call to ensures application to Wait till all the inputs are consumed
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
+
+ // Validate output size based on chosen ChannelCount
+ EXPECT_GE(mOutputSize, prevOutputSize);
+
+ prevChannelCount = nChannels;
+ prevOutputSize = mOutputSize;
+
+ if (mFramesReceived != numFrames) {
+ ALOGE("Input buffer count and Output buffer count mismatch");
+ ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
+ ASSERT_TRUE(false);
+ }
+ if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
+ ASSERT_TRUE(mCsd) << "CSD buffer missing";
+ }
+ ASSERT_TRUE(mEos);
+ ASSERT_EQ(mComponent->stop(), C2_OK);
+ mFramesReceived = 0;
+ mOutputSize = 0;
+ mEos = false;
+ mCsd = false;
+ eleStream.seekg(0, eleStream.beg);
+ }
+}
+
+TEST_P(Codec2AudioEncHidlTest, MultiSampleRateTest) {
+ description("Encodes input file for different SampleRate");
+ if (mDisableTest) GTEST_SKIP() << "Test is disabled";
+
+ char mURL[512];
+ strcpy(mURL, sResourceDir.c_str());
+ GetURLForComponent(mCompName, mURL);
+
+ std::ifstream eleStream;
+ eleStream.open(mURL, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
+ ALOGV("mURL : %s", mURL);
+
+ int32_t nSampleRate;
+ int32_t samplesPerFrame;
+ int32_t nChannels;
+ int32_t numFrames = 16;
+
+ if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ std::cout << "Failed to get the config params for " << mCompName << " component\n";
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+
+ int32_t sampleRateValues[] = {1000, 8000, 16000, 24000, 48000, 96000, 192000};
+
+ uint64_t prevOutputSize = 0u;
+ uint32_t prevSampleRate = 0u;
+
+ for (int32_t nSampleRate : sampleRateValues) {
+ ALOGV("Configuring %u encoder for SampleRate = %d", mCompName, nSampleRate);
+ if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+
+ std::vector<std::unique_ptr<C2Param>> inParams;
+ c2_status_t c2_status = mComponent->query({}, {C2StreamSampleRateInfo::input::PARAM_TYPE},
+ C2_DONT_BLOCK, &inParams);
+
+ ASSERT_TRUE(!c2_status && inParams.size())
+ << "Query configured SampleRate failed => %d" << c2_status;
+ size_t offset = sizeof(C2Param);
+ C2Param* param = inParams[0].get();
+ int32_t configuredSampleRate = *(int32_t*)((uint8_t*)param + offset);
+
+ if (configuredSampleRate != nSampleRate) {
+ std::cout << "[ WARN ] Test Skipped for SampleRate " << nSampleRate << "\n";
+ continue;
+ }
+
+ // To check if the input stream is sufficient to encode for the higher SampleRate
+ int32_t bytesCount = (samplesPerFrame * nChannels * 2) * numFrames;
+ if (eleStream.gcount() < bytesCount) {
+ std::cout << "[ WARN ] Test Skipped for SampleRate " << nSampleRate
+ << " because of insufficient input data\n";
+ continue;
+ }
+
+ ASSERT_EQ(mComponent->start(), C2_OK);
+
+ ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
+ mFlushedIndices, mLinearPool, eleStream, numFrames,
+ samplesPerFrame, nChannels, nSampleRate));
+
+ // mDisableTest will be set if buffer was not fetched properly.
+ // This may happen when config params is not proper but config succeeded
+ // In this case, we skip encoding the input stream
+ if (mDisableTest) {
+ std::cout << "[ WARN ] Test Disabled for SampleRate" << nSampleRate << "\n";
+ ASSERT_EQ(mComponent->stop(), C2_OK);
+ return;
+ }
+
+ // blocking call to ensures application to Wait till all the inputs are consumed
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
+
+ // Validate output size based on chosen samplerate
+ if (prevSampleRate >= nSampleRate) {
+ EXPECT_LE(mOutputSize, prevOutputSize);
+ } else {
+ EXPECT_GT(mOutputSize, prevOutputSize);
+ }
+ prevSampleRate = nSampleRate;
+ prevOutputSize = mOutputSize;
+
+ if (mFramesReceived != numFrames) {
+ ALOGE("Input buffer count and Output buffer count mismatch");
+ ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
+ ASSERT_TRUE(false);
+ }
+ if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
+ ASSERT_TRUE(mCsd) << "CSD buffer missing";
+ }
+ ASSERT_TRUE(mEos);
+ ASSERT_EQ(mComponent->stop(), C2_OK);
+ mFramesReceived = 0;
+ mOutputSize = 0;
+ mEos = false;
+ mCsd = false;
+ eleStream.seekg(0, eleStream.beg);
+ }
+}
+
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioEncHidlTest, testing::ValuesIn(kTestParameters),
android::hardware::PrintInstanceTupleNameToString<>);
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index da8225c..0251ec2 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -92,7 +92,10 @@
for (size_t i = 0; i < updates.size(); ++i) {
C2Param* param = updates[i].get();
if (param->index() == C2StreamInitDataInfo::output::PARAM_TYPE) {
- csd = true;
+ C2StreamInitDataInfo::output* csdBuffer =
+ (C2StreamInitDataInfo::output*)(param);
+ size_t csdSize = csdBuffer->flexCount();
+ if (csdSize > 0) csd = true;
} else if ((param->index() == C2StreamSampleRateInfo::output::PARAM_TYPE) ||
(param->index() == C2StreamChannelCountInfo::output::PARAM_TYPE) ||
(param->index() == C2StreamPictureSizeInfo::output::PARAM_TYPE)) {
@@ -160,4 +163,57 @@
}
return parameters;
-}
\ No newline at end of file
+}
+
+// Populate Info vector and return number of CSDs
+int32_t populateInfoVector(std::string info, android::Vector<FrameInfo>* frameInfo,
+ bool timestampDevTest, std::list<uint64_t>* timestampUslist) {
+ std::ifstream eleInfo;
+ eleInfo.open(info);
+ if (!eleInfo.is_open()) {
+ ALOGE("Can't open info file");
+ return -1;
+ }
+ int32_t numCsds = 0;
+ int32_t bytesCount = 0;
+ uint32_t flags = 0;
+ uint32_t timestamp = 0;
+ while (1) {
+ if (!(eleInfo >> bytesCount)) break;
+ eleInfo >> flags;
+ eleInfo >> timestamp;
+ bool codecConfig = flags ? ((1 << (flags - 1)) & C2FrameData::FLAG_CODEC_CONFIG) != 0 : 0;
+ if (codecConfig) numCsds++;
+ bool nonDisplayFrame = ((flags & FLAG_NON_DISPLAY_FRAME) != 0);
+ if (timestampDevTest && !codecConfig && !nonDisplayFrame) {
+ timestampUslist->push_back(timestamp);
+ }
+ frameInfo->push_back({bytesCount, flags, timestamp});
+ }
+ ALOGV("numCsds : %d", numCsds);
+ eleInfo.close();
+ return numCsds;
+}
+
+void verifyFlushOutput(std::list<std::unique_ptr<C2Work>>& flushedWork,
+ std::list<std::unique_ptr<C2Work>>& workQueue,
+ std::list<uint64_t>& flushedIndices, std::mutex& queueLock) {
+ // Update mFlushedIndices based on the index received from flush()
+ typedef std::unique_lock<std::mutex> ULock;
+ uint64_t frameIndex;
+ ULock l(queueLock);
+ for (std::unique_ptr<C2Work>& work : flushedWork) {
+ ASSERT_NE(work, nullptr);
+ frameIndex = work->input.ordinal.frameIndex.peeku();
+ std::list<uint64_t>::iterator frameIndexIt =
+ std::find(flushedIndices.begin(), flushedIndices.end(), frameIndex);
+ if (!flushedIndices.empty() && (frameIndexIt != flushedIndices.end())) {
+ flushedIndices.erase(frameIndexIt);
+ work->input.buffers.clear();
+ work->worklets.clear();
+ workQueue.push_back(std::move(work));
+ }
+ }
+ ASSERT_EQ(flushedIndices.empty(), true);
+ flushedWork.clear();
+}
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index 4b5e0a6..50e3ac5 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -25,10 +25,13 @@
#include <gtest/gtest.h>
#include <hidl/HidlSupport.h>
#include <chrono>
+#include <fstream>
+#define FLAG_NON_DISPLAY_FRAME (1 << 4)
#define MAX_RETRY 20
#define TIME_OUT 400ms
#define MAX_INPUT_BUFFERS 8
+#define FLUSH_INTERVAL 30
using ::android::hardware::hidl_string;
using ::android::hardware::hidl_vec;
@@ -39,6 +42,12 @@
static std::vector<std::tuple<std::string, std::string>> kTestParameters;
+struct FrameInfo {
+ int bytesCount;
+ uint32_t flags;
+ int64_t timestamp;
+};
+
/*
* Handle Callback functions onWorkDone(), onTripped(),
* onError(), onDeath(), onFramesRendered()
@@ -123,4 +132,10 @@
int64_t getNowUs();
+int32_t populateInfoVector(std::string info, android::Vector<FrameInfo>* frameInfo,
+ bool timestampDevTest, std::list<uint64_t>* timestampUslist);
+
+void verifyFlushOutput(std::list<std::unique_ptr<C2Work>>& flushedWork,
+ std::list<std::unique_ptr<C2Work>>& workQueue,
+ std::list<uint64_t>& flushedIndices, std::mutex& queueLock);
#endif // MEDIA_C2_HIDL_TEST_COMMON_H
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5 b/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5
new file mode 100644
index 0000000..cb69709
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5 b/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5
new file mode 100644
index 0000000..2693071
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5 b/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5
new file mode 100644
index 0000000..5c802d9
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5 b/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5
new file mode 100644
index 0000000..073f8eb
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5 b/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5
new file mode 100644
index 0000000..83f11c0
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5 b/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5
new file mode 100644
index 0000000..3344881
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5 b/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5
new file mode 100644
index 0000000..738b1da
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5 b/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5
new file mode 100644
index 0000000..a52faf2
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hidl/1.0/vts/functional/video/Android.bp
index 760f4da..c7b0c12 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/video/Android.bp
@@ -26,6 +26,7 @@
"libbinder",
"libgui",
"libutils",
+ "libcrypto",
],
data: [":media_c2_v1_video_decode_res"],
test_config: "VtsHalMediaC2V1_0TargetVideoDecTest.xml",
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index f216429..b520c17 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -21,7 +21,8 @@
#include <gtest/gtest.h>
#include <hidl/GtestPrinter.h>
#include <stdio.h>
-#include <fstream>
+
+#include <openssl/md5.h>
#include <C2AllocatorIon.h>
#include <C2Buffer.h>
@@ -39,15 +40,11 @@
#include "media_c2_hidl_test_common.h"
#include "media_c2_video_hidl_test_common.h"
-struct FrameInfo {
- int bytesCount;
- uint32_t flags;
- int64_t timestamp;
-};
-
static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
kDecodeTestParameters;
+static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+
// Resource directory
static std::string sResourceDir = "";
@@ -111,7 +108,12 @@
mEos = false;
mFramesReceived = 0;
mTimestampUs = 0u;
+ mWorkResult = C2_OK;
+ mReorderDepth = -1;
mTimestampDevTest = false;
+ mMd5Offset = 0;
+ mMd5Enable = false;
+ mRefMd5 = nullptr;
if (mCompName == unknown_comp) mDisableTest = true;
C2SecureModeTuning secureModeTuning{};
@@ -134,43 +136,130 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
+ /* Calculate the CKSUM for the data in inbuf */
+ void calc_md5_cksum(uint8_t* pu1_inbuf, uint32_t u4_stride, uint32_t u4_width,
+ uint32_t u4_height, uint8_t* pu1_cksum_p) {
+ int32_t row;
+ MD5_CTX s_md5_context;
+ MD5_Init(&s_md5_context);
+ for (row = 0; row < u4_height; row++) {
+ MD5_Update(&s_md5_context, pu1_inbuf, u4_width);
+ pu1_inbuf += u4_stride;
+ }
+ MD5_Final(pu1_cksum_p, &s_md5_context);
+ }
+
+ void compareMd5Chksm(std::unique_ptr<C2Work>& work) {
+ uint8_t chksum[48];
+ uint8_t* au1_y_chksum = chksum;
+ uint8_t* au1_u_chksum = chksum + 16;
+ uint8_t* au1_v_chksum = chksum + 32;
+ const C2GraphicView output = work->worklets.front()
+ ->output.buffers[0]
+ ->data()
+ .graphicBlocks()
+ .front()
+ .map()
+ .get();
+ uint8_t* yPlane = const_cast<uint8_t*>(output.data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t* uPlane = const_cast<uint8_t*>(output.data()[C2PlanarLayout::PLANE_U]);
+ uint8_t* vPlane = const_cast<uint8_t*>(output.data()[C2PlanarLayout::PLANE_V]);
+ C2PlanarLayout layout = output.layout();
+
+ size_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ size_t uvStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ size_t colInc = layout.planes[C2PlanarLayout::PLANE_U].colInc;
+ size_t bitDepth = layout.planes[C2PlanarLayout::PLANE_Y].bitDepth;
+ uint32_t layoutType = layout.type;
+ size_t cropWidth = output.crop().width;
+ size_t cropHeight = output.crop().height;
+
+ if (bitDepth == 8 && layoutType == C2PlanarLayout::TYPE_YUV && colInc == 1) {
+ calc_md5_cksum(yPlane, yStride, cropWidth, cropHeight, au1_y_chksum);
+ calc_md5_cksum(uPlane, uvStride, cropWidth / 2, cropHeight / 2, au1_u_chksum);
+ calc_md5_cksum(vPlane, uvStride, cropWidth / 2, cropHeight / 2, au1_v_chksum);
+ } else if (bitDepth == 8 && layoutType == C2PlanarLayout::TYPE_YUV && colInc == 2) {
+ uint8_t* cbPlane = (uint8_t*)malloc(cropWidth * cropHeight / 4);
+ uint8_t* crPlane = (uint8_t*)malloc(cropWidth * cropHeight / 4);
+ ASSERT_NE(cbPlane, nullptr);
+ ASSERT_NE(crPlane, nullptr);
+ size_t count = 0;
+ for (size_t k = 0; k < (cropHeight / 2); k++) {
+ for (size_t l = 0; l < (cropWidth); l = l + 2) {
+ cbPlane[count] = uPlane[k * uvStride + l];
+ crPlane[count] = vPlane[k * uvStride + l];
+ count++;
+ }
+ }
+ calc_md5_cksum(yPlane, yStride, cropWidth, cropHeight, au1_y_chksum);
+ calc_md5_cksum(cbPlane, cropWidth / 2, cropWidth / 2, cropHeight / 2, au1_u_chksum);
+ calc_md5_cksum(crPlane, cropWidth / 2, cropWidth / 2, cropHeight / 2, au1_v_chksum);
+ free(cbPlane);
+ free(crPlane);
+ } else {
+ mMd5Enable = false;
+ ALOGV("Disabling MD5 chksm flag");
+ return;
+ }
+ if (memcmp(mRefMd5 + mMd5Offset, chksum, 48)) ASSERT_TRUE(false);
+ mMd5Offset += 48;
+ return;
+ }
+ bool configPixelFormat(uint32_t format);
+
// callback function to process onWorkDone received by Listener
void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
for (std::unique_ptr<C2Work>& work : workItems) {
if (!work->worklets.empty()) {
// For decoder components current timestamp always exceeds
- // previous timestamp
+ // previous timestamp if output is in display order
typedef std::unique_lock<std::mutex> ULock;
+ mWorkResult |= work->result;
bool codecConfig = ((work->worklets.front()->output.flags &
C2FrameData::FLAG_CODEC_CONFIG) != 0);
if (!codecConfig && !work->worklets.front()->output.buffers.empty()) {
- EXPECT_GE((work->worklets.front()->output.ordinal.timestamp.peeku()),
- mTimestampUs);
- mTimestampUs = work->worklets.front()->output.ordinal.timestamp.peeku();
-
- ULock l(mQueueLock);
- if (mTimestampDevTest) {
- bool tsHit = false;
- std::list<uint64_t>::iterator it = mTimestampUslist.begin();
- while (it != mTimestampUslist.end()) {
- if (*it == mTimestampUs) {
- mTimestampUslist.erase(it);
- tsHit = true;
- break;
- }
- it++;
+ if (mReorderDepth < 0) {
+ C2PortReorderBufferDepthTuning::output reorderBufferDepth;
+ mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK,
+ nullptr);
+ mReorderDepth = reorderBufferDepth.value;
+ if (mReorderDepth > 0) {
+ // TODO: Add validation for reordered output
+ mTimestampDevTest = false;
}
- if (tsHit == false) {
- if (mTimestampUslist.empty() == false) {
- EXPECT_EQ(tsHit, true) << "TimeStamp not recognized";
- } else {
- std::cout << "[ INFO ] Received non-zero "
- "output / TimeStamp not recognized \n";
+ }
+ if (mTimestampDevTest) {
+ EXPECT_GE((work->worklets.front()->output.ordinal.timestamp.peeku()),
+ mTimestampUs);
+ mTimestampUs = work->worklets.front()->output.ordinal.timestamp.peeku();
+
+ ULock l(mQueueLock);
+ {
+ bool tsHit = false;
+ std::list<uint64_t>::iterator it = mTimestampUslist.begin();
+ while (it != mTimestampUslist.end()) {
+ if (*it == mTimestampUs) {
+ mTimestampUslist.erase(it);
+ tsHit = true;
+ break;
+ }
+ it++;
+ }
+ if (tsHit == false) {
+ if (mTimestampUslist.empty() == false) {
+ EXPECT_EQ(tsHit, true) << "TimeStamp not recognized";
+ } else {
+ std::cout << "[ INFO ] Received non-zero "
+ "output / TimeStamp not recognized \n";
+ }
}
}
}
+ if (mMd5Enable) {
+ compareMd5Chksm(work);
+ }
}
- bool mCsd;
+ bool mCsd = false;
workDone(mComponent, work, mFlushedIndices, mQueueLock, mQueueCondition, mWorkQueue,
mEos, mCsd, mFramesReceived);
(void)mCsd;
@@ -195,11 +284,17 @@
bool mEos;
bool mDisableTest;
+ bool mMd5Enable;
bool mTimestampDevTest;
uint64_t mTimestampUs;
+ uint64_t mMd5Offset;
+ char* mRefMd5;
std::list<uint64_t> mTimestampUslist;
std::list<uint64_t> mFlushedIndices;
standardComp mCompName;
+
+ int32_t mWorkResult;
+ int32_t mReorderDepth;
uint32_t mFramesReceived;
C2BlockPool::local_id_t mBlockPoolId;
std::shared_ptr<C2BlockPool> mLinearPool;
@@ -268,54 +363,73 @@
// number of elementary streams per component
#define STREAM_COUNT 3
-// LookUpTable of clips and metadata for component testing
-void GetURLForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
- size_t streamIndex = 1) {
+// LookUpTable of clips, metadata and chksum for component testing
+void GetURLChksmForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
+ char* chksum, size_t streamIndex = 1) {
struct CompToURL {
Codec2VideoDecHidlTest::standardComp comp;
const char mURL[STREAM_COUNT][512];
const char info[STREAM_COUNT][512];
+ const char chksum[STREAM_COUNT][512];
};
ASSERT_TRUE(streamIndex < STREAM_COUNT);
static const CompToURL kCompToURL[] = {
{Codec2VideoDecHidlTest::standardComp::avc,
{"bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_640x360_768kbps_30fps.h264", ""},
- {"bbb_avc_176x144_300kbps_60fps.info", "bbb_avc_640x360_768kbps_30fps.info", ""}},
+ {"bbb_avc_176x144_300kbps_60fps.info", "bbb_avc_640x360_768kbps_30fps.info", ""},
+ {"bbb_avc_176x144_300kbps_60fps_chksum.md5",
+ "bbb_avc_640x360_768kbps_30fps_chksum.md5", ""}},
{Codec2VideoDecHidlTest::standardComp::hevc,
{"bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", ""},
- {"bbb_hevc_176x144_176kbps_60fps.info", "bbb_hevc_640x360_1600kbps_30fps.info", ""}},
+ {"bbb_hevc_176x144_176kbps_60fps.info", "bbb_hevc_640x360_1600kbps_30fps.info", ""},
+ {"bbb_hevc_176x144_176kbps_60fps_chksum.md5",
+ "bbb_hevc_640x360_1600kbps_30fps_chksum.md5", ""}},
{Codec2VideoDecHidlTest::standardComp::mpeg2,
{"bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.m2v", ""},
- {"bbb_mpeg2_176x144_105kbps_25fps.info", "bbb_mpeg2_352x288_1mbps_60fps.info", ""}},
+ {"bbb_mpeg2_176x144_105kbps_25fps.info", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
+ {"", "", ""}},
{Codec2VideoDecHidlTest::standardComp::h263,
{"", "bbb_h263_352x288_300kbps_12fps.h263", ""},
- {"", "bbb_h263_352x288_300kbps_12fps.info", ""}},
+ {"", "bbb_h263_352x288_300kbps_12fps.info", ""},
+ {"", "", ""}},
{Codec2VideoDecHidlTest::standardComp::mpeg4,
{"", "bbb_mpeg4_352x288_512kbps_30fps.m4v", ""},
- {"", "bbb_mpeg4_352x288_512kbps_30fps.info", ""}},
+ {"", "bbb_mpeg4_352x288_512kbps_30fps.info", ""},
+ {"", "", ""}},
{Codec2VideoDecHidlTest::standardComp::vp8,
{"bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", ""},
- {"bbb_vp8_176x144_240kbps_60fps.info", "bbb_vp8_640x360_2mbps_30fps.info", ""}},
+ {"bbb_vp8_176x144_240kbps_60fps.info", "bbb_vp8_640x360_2mbps_30fps.info", ""},
+ {"", "bbb_vp8_640x360_2mbps_30fps_chksm.md5", ""}},
{Codec2VideoDecHidlTest::standardComp::vp9,
{"bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9",
"bbb_vp9_704x480_280kbps_24fps_altref_2.vp9"},
{"bbb_vp9_176x144_285kbps_60fps.info", "bbb_vp9_640x360_1600kbps_30fps.info",
- "bbb_vp9_704x480_280kbps_24fps_altref_2.info"}},
+ "bbb_vp9_704x480_280kbps_24fps_altref_2.info"},
+ {"", "bbb_vp9_640x360_1600kbps_30fps_chksm.md5", ""}},
{Codec2VideoDecHidlTest::standardComp::av1,
{"bbb_av1_640_360.av1", "bbb_av1_176_144.av1", ""},
- {"bbb_av1_640_360.info", "bbb_av1_176_144.info", ""}},
+ {"bbb_av1_640_360.info", "bbb_av1_176_144.info", ""},
+ {"bbb_av1_640_360_chksum.md5", "bbb_av1_176_144_chksm.md5", ""}},
};
for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
if (kCompToURL[i].comp == comp) {
strcat(mURL, kCompToURL[i].mURL[streamIndex]);
strcat(info, kCompToURL[i].info[streamIndex]);
+ strcat(chksum, kCompToURL[i].chksum[streamIndex]);
return;
}
}
}
+void GetURLForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
+ size_t streamIndex = 1) {
+ char chksum[512];
+ strcpy(chksum, sResourceDir.c_str());
+ GetURLChksmForComponent(comp, mURL, info, chksum, streamIndex);
+}
+
void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
std::mutex& queueLock, std::condition_variable& queueCondition,
std::list<std::unique_ptr<C2Work>>& workQueue,
@@ -446,6 +560,19 @@
ASSERT_EQ(producer->setSidebandStream(nativeHandle), NO_ERROR);
}
+// Config output pixel format
+bool Codec2VideoDecHidlTestBase::configPixelFormat(uint32_t format) {
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ C2StreamPixelFormatInfo::output pixelformat(0u, format);
+
+ std::vector<C2Param*> configParam{&pixelformat};
+ c2_status_t status = mComponent->config(configParam, C2_DONT_BLOCK, &failures);
+ if (status == C2_OK && failures.size() == 0u) {
+ return true;
+ }
+ return false;
+}
+
class Codec2VideoDecDecodeTest
: public Codec2VideoDecHidlTestBase,
public ::testing::WithParamInterface<
@@ -463,40 +590,61 @@
uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
bool signalEOS = !std::get<2>(GetParam()).compare("true");
- char mURL[512], info[512];
- std::ifstream eleStream, eleInfo;
+ mTimestampDevTest = true;
+
+ char mURL[512], info[512], chksum[512];
+ android::Vector<FrameInfo> Info;
+
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info, streamIndex);
+ strcpy(chksum, sResourceDir.c_str());
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
- android::Vector<FrameInfo> Info;
- int bytesCount = 0;
- uint32_t flags = 0;
- uint32_t timestamp = 0;
- mTimestampDevTest = true;
+ GetURLChksmForComponent(mCompName, mURL, info, chksum, streamIndex);
+ if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
+ ALOGV("Skipping Test, Stream not available");
+ return;
+ }
+ mMd5Enable = true;
+ if (!strcmp(chksum, sResourceDir.c_str())) mMd5Enable = false;
+
+ uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
+ if (!configPixelFormat(format)) {
+ std::cout << "[ WARN ] Test Skipped PixelFormat not configured\n";
+ return;
+ }
+
mFlushedIndices.clear();
mTimestampUslist.clear();
- while (1) {
- if (!(eleInfo >> bytesCount)) break;
- eleInfo >> flags;
- eleInfo >> timestamp;
- bool codecConfig = flags ? ((1 << (flags - 1)) & C2FrameData::FLAG_CODEC_CONFIG) != 0 : 0;
- bool nonDisplayFrame = ((flags & FLAG_NON_DISPLAY_FRAME) != 0);
- if (mTimestampDevTest && !codecConfig && !nonDisplayFrame)
- mTimestampUslist.push_back(timestamp);
- Info.push_back({bytesCount, flags, timestamp});
- }
- eleInfo.close();
+
+ int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
ASSERT_EQ(mComponent->start(), C2_OK);
// Reset total no of frames received
mFramesReceived = 0;
mTimestampUs = 0;
ALOGV("mURL : %s", mURL);
+ std::ifstream eleStream;
eleStream.open(mURL, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
+
+ size_t refChksmSize = 0;
+ std::ifstream refChksum;
+ if (mMd5Enable) {
+ ALOGV("chksum file name: %s", chksum);
+ refChksum.open(chksum, std::ifstream::binary | std::ifstream::ate);
+ ASSERT_EQ(refChksum.is_open(), true);
+ refChksmSize = refChksum.tellg();
+ refChksum.seekg(0, std::ifstream::beg);
+
+ ALOGV("chksum Size %zu ", refChksmSize);
+ mRefMd5 = (char*)malloc(refChksmSize);
+ ASSERT_NE(mRefMd5, nullptr);
+ refChksum.read(mRefMd5, refChksmSize);
+ ASSERT_EQ(refChksum.gcount(), refChksmSize);
+ refChksum.close();
+ }
+
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
(int)Info.size(), signalEOS));
@@ -504,7 +652,7 @@
// If EOS is not sent, sending empty input with EOS flag
size_t infoSize = Info.size();
if (!signalEOS) {
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue, 1));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue, 1);
ASSERT_NO_FATAL_FAILURE(testInputBuffer(mComponent, mQueueLock, mWorkQueue,
C2FrameData::FLAG_END_OF_STREAM, false));
infoSize += 1;
@@ -513,7 +661,7 @@
// consumed
if (!mEos) {
ALOGV("Waiting for input consumption");
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
}
eleStream.close();
@@ -523,8 +671,17 @@
ASSERT_TRUE(false);
}
+ if (mRefMd5 != nullptr) free(mRefMd5);
+ if (mMd5Enable && refChksmSize != mMd5Offset) {
+ ALOGE("refChksum size and generated chksum size mismatch refChksum size %zu generated "
+ "chksum size %" PRId64 "",
+ refChksmSize, mMd5Offset);
+ ASSERT_TRUE(false);
+ }
+
if (mTimestampDevTest) EXPECT_EQ(mTimestampUslist.empty(), true);
ASSERT_EQ(mComponent->stop(), C2_OK);
+ ASSERT_EQ(mWorkResult, C2_OK);
}
// Adaptive Test
@@ -577,7 +734,7 @@
}
if (timestampMax < timestamp) timestampMax = timestamp;
}
- timestampOffset = timestampMax;
+ timestampOffset = timestampMax + 33333;
eleInfo.close();
// Reset Total frames before second decode loop
@@ -622,7 +779,7 @@
// blocking call to ensures application to Wait till all the inputs are
// consumed
ALOGV("Waiting for input consumption");
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
if (mFramesReceived != ((Info.size()) + 1)) {
ALOGE("Input buffer count and Output buffer count mismatch");
@@ -631,6 +788,7 @@
}
if (mTimestampDevTest) EXPECT_EQ(mTimestampUslist.empty(), true);
+ ASSERT_EQ(mWorkResult, C2_OK);
}
// thumbnail test
@@ -639,26 +797,16 @@
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
char mURL[512], info[512];
- std::ifstream eleStream, eleInfo;
+ android::Vector<FrameInfo> Info;
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
GetURLForComponent(mCompName, mURL, info);
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true);
- android::Vector<FrameInfo> Info;
- int bytesCount = 0;
- uint32_t flags = 0;
- uint32_t timestamp = 0;
- while (1) {
- if (!(eleInfo >> bytesCount)) break;
- eleInfo >> flags;
- eleInfo >> timestamp;
- Info.push_back({bytesCount, flags, timestamp});
- }
- eleInfo.close();
+ int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ uint32_t flags = 0;
for (size_t i = 0; i < MAX_ITERATIONS; i++) {
ASSERT_EQ(mComponent->start(), C2_OK);
@@ -671,18 +819,21 @@
if (Info[j].flags) flags = 1u << (Info[j].flags - 1);
} while (!(flags & SYNC_FRAME));
+
+ std::ifstream eleStream;
eleStream.open(mURL, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
j + 1));
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
eleStream.close();
EXPECT_GE(mFramesReceived, 1U);
ASSERT_EQ(mEos, true);
ASSERT_EQ(mComponent->stop(), C2_OK);
}
ASSERT_EQ(mComponent->release(), C2_OK);
+ ASSERT_EQ(mWorkResult, C2_OK);
}
TEST_P(Codec2VideoDecHidlTest, EOSTest) {
@@ -723,72 +874,59 @@
ASSERT_EQ(mEos, true);
ASSERT_EQ(mWorkQueue.size(), (size_t)MAX_INPUT_BUFFERS);
ASSERT_EQ(mComponent->stop(), C2_OK);
+ ASSERT_EQ(mWorkResult, C2_OK);
}
TEST_P(Codec2VideoDecHidlTest, FlushTest) {
description("Tests Flush calls");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- typedef std::unique_lock<std::mutex> ULock;
+
ASSERT_EQ(mComponent->start(), C2_OK);
+
char mURL[512], info[512];
- std::ifstream eleStream, eleInfo;
+ android::Vector<FrameInfo> Info;
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
GetURLForComponent(mCompName, mURL, info);
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true);
- android::Vector<FrameInfo> Info;
- int bytesCount = 0;
- uint32_t flags = 0;
- uint32_t timestamp = 0;
mFlushedIndices.clear();
- while (1) {
- if (!(eleInfo >> bytesCount)) break;
- eleInfo >> flags;
- eleInfo >> timestamp;
- Info.push_back({bytesCount, flags, timestamp});
- }
- eleInfo.close();
+
+ int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
ALOGV("mURL : %s", mURL);
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true);
- // Decode 128 frames and flush. here 128 is chosen to ensure there is a key
- // frame after this so that the below section can be covered for all
- // components
- uint32_t numFramesFlushed = 128;
- ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
- mFlushedIndices, mLinearPool, eleStream, &Info, 0,
- numFramesFlushed, false));
+
// flush
std::list<std::unique_ptr<C2Work>> flushedWork;
c2_status_t err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
- (size_t)MAX_INPUT_BUFFERS - flushedWork.size()));
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
- {
- // Update mFlushedIndices based on the index received from flush()
- ULock l(mQueueLock);
- for (std::unique_ptr<C2Work>& work : flushedWork) {
- ASSERT_NE(work, nullptr);
- auto frameIndexIt = std::find(mFlushedIndices.begin(), mFlushedIndices.end(),
- work->input.ordinal.frameIndex.peeku());
- if (!mFlushedIndices.empty() && (frameIndexIt != mFlushedIndices.end())) {
- mFlushedIndices.erase(frameIndexIt);
- work->input.buffers.clear();
- work->worklets.clear();
- mWorkQueue.push_back(std::move(work));
- }
- }
- }
+ std::ifstream eleStream;
+ eleStream.open(mURL, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true);
+ // Decode 30 frames and flush. here 30 is chosen to ensure there is a key
+ // frame after this so that the below section can be covered for all
+ // components
+ uint32_t numFramesFlushed = FLUSH_INTERVAL;
+ ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
+ mFlushedIndices, mLinearPool, eleStream, &Info, 0,
+ numFramesFlushed, false));
+ // flush
+ err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
+ ASSERT_EQ(err, C2_OK);
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ (size_t)MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
// Seek to next key frame and start decoding till the end
- mFlushedIndices.clear();
int index = numFramesFlushed;
bool keyFrame = false;
- flags = 0;
+ uint32_t flags = 0;
while (index < (int)Info.size()) {
if (Info[index].flags) flags = 1u << (Info[index].flags - 1);
if ((flags & SYNC_FRAME) == SYNC_FRAME) {
@@ -807,25 +945,13 @@
eleStream.close();
err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
- (size_t)MAX_INPUT_BUFFERS - flushedWork.size()));
- {
- // Update mFlushedIndices based on the index received from flush()
- ULock l(mQueueLock);
- for (std::unique_ptr<C2Work>& work : flushedWork) {
- ASSERT_NE(work, nullptr);
- uint64_t frameIndex = work->input.ordinal.frameIndex.peeku();
- std::list<uint64_t>::iterator frameIndexIt =
- std::find(mFlushedIndices.begin(), mFlushedIndices.end(), frameIndex);
- if (!mFlushedIndices.empty() && (frameIndexIt != mFlushedIndices.end())) {
- mFlushedIndices.erase(frameIndexIt);
- work->input.buffers.clear();
- work->worklets.clear();
- mWorkQueue.push_back(std::move(work));
- }
- }
- }
- ASSERT_EQ(mFlushedIndices.empty(), true);
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ (size_t)MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+ // TODO: (b/154671521)
+ // Add assert for mWorkResult
ASSERT_EQ(mComponent->stop(), C2_OK);
}
@@ -880,7 +1006,7 @@
// consumed
if (!mEos) {
ALOGV("Waiting for input consumption");
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
}
eleStream.close();
@@ -891,6 +1017,110 @@
}
}
+class Codec2VideoDecCsdInputTests
+ : public Codec2VideoDecHidlTestBase,
+ public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+ void getParams() {
+ mInstanceName = std::get<0>(GetParam());
+ mComponentName = std::get<1>(GetParam());
+ }
+};
+
+// Test the codecs for the following
+// start - csd - data… - (with/without)flush - data… - flush - data…
+TEST_P(Codec2VideoDecCsdInputTests, CSDFlushTest) {
+ description("Tests codecs for flush at different states");
+ if (mDisableTest) GTEST_SKIP() << "Test is disabled";
+
+ char mURL[512], info[512];
+
+ android::Vector<FrameInfo> Info;
+
+ strcpy(mURL, sResourceDir.c_str());
+ strcpy(info, sResourceDir.c_str());
+ GetURLForComponent(mCompName, mURL, info);
+
+ int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
+
+ ASSERT_EQ(mComponent->start(), C2_OK);
+
+ ALOGV("mURL : %s", mURL);
+ std::ifstream eleStream;
+ eleStream.open(mURL, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true);
+ bool flushedDecoder = false;
+ bool signalEOS = false;
+ bool keyFrame = false;
+ bool flushCsd = !std::get<2>(GetParam()).compare("true");
+
+ ALOGV("sending %d csd data ", numCsds);
+ int framesToDecode = numCsds;
+ ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
+ mFlushedIndices, mLinearPool, eleStream, &Info, 0,
+ framesToDecode, false));
+ c2_status_t err = C2_OK;
+ std::list<std::unique_ptr<C2Work>> flushedWork;
+ if (numCsds && flushCsd) {
+ // We wait for all the CSD buffers to get consumed.
+ // Once we have received all CSD work back, we call flush
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
+
+ err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
+ ASSERT_EQ(err, C2_OK);
+ flushedDecoder = true;
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+ }
+
+ int offset = framesToDecode;
+ uint32_t flags = 0;
+ while (1) {
+ while (offset < (int)Info.size()) {
+ flags = 0;
+ if (Info[offset].flags) flags = 1u << (Info[offset].flags - 1);
+ if (flags & SYNC_FRAME) {
+ keyFrame = true;
+ break;
+ }
+ eleStream.ignore(Info[offset].bytesCount);
+ offset++;
+ }
+ if (keyFrame) {
+ framesToDecode = c2_min(FLUSH_INTERVAL, (int)Info.size() - offset);
+ if (framesToDecode < FLUSH_INTERVAL) signalEOS = true;
+ ASSERT_NO_FATAL_FAILURE(decodeNFrames(
+ mComponent, mQueueLock, mQueueCondition, mWorkQueue, mFlushedIndices,
+ mLinearPool, eleStream, &Info, offset, framesToDecode, signalEOS));
+ offset += framesToDecode;
+ }
+ err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
+ ASSERT_EQ(err, C2_OK);
+ keyFrame = false;
+ // blocking call to ensures application to Wait till remaining
+ // 'non-flushed' inputs are consumed
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+ if (signalEOS || offset >= (int)Info.size()) {
+ break;
+ }
+ }
+ if (!signalEOS) {
+ ASSERT_NO_FATAL_FAILURE(testInputBuffer(mComponent, mQueueLock, mWorkQueue,
+ C2FrameData::FLAG_END_OF_STREAM, false));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
+ }
+ eleStream.close();
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+ ASSERT_EQ(mComponent->stop(), C2_OK);
+}
+
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoDecHidlTest, testing::ValuesIn(kTestParameters),
android::hardware::PrintInstanceTupleNameToString<>);
@@ -899,6 +1129,10 @@
testing::ValuesIn(kDecodeTestParameters),
android::hardware::PrintInstanceTupleNameToString<>);
+INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2VideoDecCsdInputTests,
+ testing::ValuesIn(kCsdFlushTestParameters),
+ android::hardware::PrintInstanceTupleNameToString<>);
+
} // anonymous namespace
// TODO : Video specific configuration Test
@@ -917,6 +1151,11 @@
std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "false"));
kDecodeTestParameters.push_back(
std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "true"));
+
+ kCsdFlushTestParameters.push_back(
+ std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+ kCsdFlushTestParameters.push_back(
+ std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
}
// Set the resource directory based on command line args.
@@ -930,4 +1169,4 @@
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
-}
\ No newline at end of file
+}
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
index 63e7a69..a1049df 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
@@ -51,6 +51,14 @@
<option name="push-file" key="bbb_av1_176_144.info" value="/data/local/tmp/media/bbb_av1_176_144.info" />
<option name="push-file" key="bbb_vp9_704x480_280kbps_24fps_altref_2.vp9" value="/data/local/tmp/media/bbb_vp9_704x480_280kbps_24fps_altref_2.vp9" />
<option name="push-file" key="bbb_vp9_704x480_280kbps_24fps_altref_2.info" value="/data/local/tmp/media/bbb_vp9_704x480_280kbps_24fps_altref_2.info" />
+ <option name="push-file" key="bbb_avc_176x144_300kbps_60fps_chksum.md5" value="/data/local/tmp/media/bbb_avc_176x144_300kbps_60fps_chksum.md5" />
+ <option name="push-file" key="bbb_avc_640x360_768kbps_30fps_chksum.md5" value="/data/local/tmp/media/bbb_avc_640x360_768kbps_30fps_chksum.md5" />
+ <option name="push-file" key="bbb_hevc_176x144_176kbps_60fps_chksum.md5" value="/data/local/tmp/media/bbb_hevc_176x144_176kbps_60fps_chksum.md5" />
+ <option name="push-file" key="bbb_hevc_640x360_1600kbps_30fps_chksum.md5" value="/data/local/tmp/media/bbb_hevc_640x360_1600kbps_30fps_chksum.md5" />
+ <option name="push-file" key="bbb_vp8_640x360_2mbps_30fps_chksm.md5" value="/data/local/tmp/media/bbb_vp8_640x360_2mbps_30fps_chksm.md5" />
+ <option name="push-file" key="bbb_vp9_640x360_1600kbps_30fps_chksm.md5" value="/data/local/tmp/media/bbb_vp9_640x360_1600kbps_30fps_chksm.md5" />
+ <option name="push-file" key="bbb_av1_640_360_chksum.md5" value="/data/local/tmp/media/bbb_av1_640_360_chksum.md5" />
+ <option name="push-file" key="bbb_av1_176_144_chksm.md5" value="/data/local/tmp/media/bbb_av1_176_144_chksm.md5" />
</target_preparer>
<test class="com.android.tradefed.testtype.GTest" >
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index 823e11b..5bcea5b 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -107,6 +107,13 @@
mOutputSize = 0u;
mTimestampDevTest = false;
if (mCompName == unknown_comp) mDisableTest = true;
+
+ C2SecureModeTuning secureModeTuning{};
+ mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
+ if (secureModeTuning.value == C2Config::SM_READ_PROTECTED) {
+ mDisableTest = true;
+ }
+
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
}
@@ -492,7 +499,7 @@
// If EOS is not sent, sending empty input with EOS flag
inputFrames += ENC_NUM_FRAMES;
if (!signalEOS) {
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue, 1));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue, 1);
ASSERT_NO_FATAL_FAILURE(testInputBuffer(mComponent, mQueueLock, mWorkQueue,
C2FrameData::FLAG_END_OF_STREAM, false));
inputFrames += 1;
@@ -501,7 +508,7 @@
// blocking call to ensures application to Wait till all the inputs are
// consumed
ALOGD("Waiting for input consumption");
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
eleStream.close();
if (mFramesReceived != inputFrames) {
@@ -510,16 +517,17 @@
ASSERT_TRUE(false);
}
- if (!mCsd && (mCompName != vp8 && mCompName != vp9)) {
- ASSERT_TRUE(false) << "CSD Buffer not received";
- }
-
- if (mCsd && (mCompName == vp8 || mCompName == vp9)) {
- ASSERT_TRUE(false) << "CSD Buffer not expected";
+ if (mCompName == vp8 || mCompName == h263) {
+ ASSERT_FALSE(mCsd) << "CSD Buffer not expected";
+ } else if (mCompName != vp9) {
+ ASSERT_TRUE(mCsd) << "CSD Buffer not received";
}
if (mTimestampDevTest) EXPECT_EQ(mTimestampUslist.empty(), true);
ASSERT_EQ(mComponent->stop(), C2_OK);
+
+ // TODO: (b/155534991)
+ // Add assert for mFailedWorkReceived
}
TEST_P(Codec2VideoEncHidlTest, EOSTest) {
@@ -560,13 +568,13 @@
}
ASSERT_EQ(mEos, true);
ASSERT_EQ(mComponent->stop(), C2_OK);
+ ASSERT_EQ(mFailedWorkReceived, 0);
}
TEST_P(Codec2VideoEncHidlTest, FlushTest) {
description("Test Request for flush");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- typedef std::unique_lock<std::mutex> ULock;
char mURL[512];
int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
@@ -587,9 +595,17 @@
eleStream.open(mURL, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ALOGV("mURL : %s", mURL);
+ // flush
+ std::list<std::unique_ptr<C2Work>> flushedWork;
+ c2_status_t err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
+ ASSERT_EQ(err, C2_OK);
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mGraphicPool, eleStream, mDisableTest, 0,
- numFramesFlushed, nWidth, nHeight));
+ numFramesFlushed, nWidth, nHeight, false, false));
// mDisableTest will be set if buffer was not fetched properly.
// This may happen when resolution is not proper but config succeeded
// In this cases, we skip encoding the input stream
@@ -599,29 +615,14 @@
return;
}
- std::list<std::unique_ptr<C2Work>> flushedWork;
- c2_status_t err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
+ // flush
+ err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
- (size_t)MAX_INPUT_BUFFERS - flushedWork.size()));
- uint64_t frameIndex;
- {
- // Update mFlushedIndices based on the index received from flush()
- ULock l(mQueueLock);
- for (std::unique_ptr<C2Work>& work : flushedWork) {
- ASSERT_NE(work, nullptr);
- frameIndex = work->input.ordinal.frameIndex.peeku();
- std::list<uint64_t>::iterator frameIndexIt =
- std::find(mFlushedIndices.begin(), mFlushedIndices.end(), frameIndex);
- if (!mFlushedIndices.empty() && (frameIndexIt != mFlushedIndices.end())) {
- mFlushedIndices.erase(frameIndexIt);
- work->input.buffers.clear();
- work->worklets.clear();
- mWorkQueue.push_back(std::move(work));
- }
- }
- }
- mFlushedIndices.clear();
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ (size_t)MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mGraphicPool, eleStream, mDisableTest,
numFramesFlushed, numFrames - numFramesFlushed, nWidth,
@@ -638,25 +639,13 @@
err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
- (size_t)MAX_INPUT_BUFFERS - flushedWork.size()));
- {
- // Update mFlushedIndices based on the index received from flush()
- ULock l(mQueueLock);
- for (std::unique_ptr<C2Work>& work : flushedWork) {
- ASSERT_NE(work, nullptr);
- frameIndex = work->input.ordinal.frameIndex.peeku();
- std::list<uint64_t>::iterator frameIndexIt =
- std::find(mFlushedIndices.begin(), mFlushedIndices.end(), frameIndex);
- if (!mFlushedIndices.empty() && (frameIndexIt != mFlushedIndices.end())) {
- mFlushedIndices.erase(frameIndexIt);
- work->input.buffers.clear();
- work->worklets.clear();
- mWorkQueue.push_back(std::move(work));
- }
- }
- }
- ASSERT_EQ(mFlushedIndices.empty(), true);
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
+ (size_t)MAX_INPUT_BUFFERS - flushedWork.size());
+ ASSERT_NO_FATAL_FAILURE(
+ verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
+ ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
+ // TODO: (b/154671521)
+ // Add assert for mFailedWorkReceived
ASSERT_EQ(mComponent->stop(), C2_OK);
}
@@ -691,7 +680,7 @@
// blocking call to ensures application to Wait till all the inputs are
// consumed
ALOGD("Waiting for input consumption");
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
if (mFramesReceived != 3) {
std::cout << "[ WARN ] Component didn't receive all buffers back \n";
@@ -746,7 +735,7 @@
}
ALOGD("Waiting for input consumption");
- ASSERT_NO_FATAL_FAILURE(waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue));
+ waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
ASSERT_EQ(mEos, true);
ASSERT_EQ(mComponent->stop(), C2_OK);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h
index 9c1a5cb..d3a693b 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h
@@ -22,7 +22,6 @@
#define ENC_DEFAULT_FRAME_WIDTH 352
#define ENC_DEFAULT_FRAME_HEIGHT 288
#define MAX_ITERATIONS 128
-#define FLAG_NON_DISPLAY_FRAME (1 << 4)
#define ALIGN(_sz, _align) ((_sz + (_align - 1)) & ~(_align - 1))
diff --git a/media/codec2/hidl/1.1/utils/Android.bp b/media/codec2/hidl/1.1/utils/Android.bp
index 8fddf98..ab8635b 100644
--- a/media/codec2/hidl/1.1/utils/Android.bp
+++ b/media/codec2/hidl/1.1/utils/Android.bp
@@ -44,6 +44,12 @@
"libstagefright_bufferpool@2.0.1",
"libui",
],
+
+ // Device does not boot when global ThinLTO is enabled for this library.
+ // http://b/170595429
+ lto: {
+ never: true,
+ },
}
@@ -52,6 +58,7 @@
cc_library {
name: "libcodec2_hidl@1.1",
vendor_available: true,
+ min_sdk_version: "29",
defaults: ["hidl_defaults"],
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 7e4352d..4650672 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -843,6 +843,11 @@
return;
}
});
+ if (!transStatus.isOk()) {
+ LOG(DEBUG) << "SimpleParamReflector -- transaction failed: "
+ << transStatus.description();
+ descriptor.reset();
+ }
return descriptor;
}
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hidl/services/Android.bp
index a16b106..3780a5a 100644
--- a/media/codec2/hidl/services/Android.bp
+++ b/media/codec2/hidl/services/Android.bp
@@ -52,6 +52,9 @@
// directly in the main device manifest.xml file or via vintf_fragments.
// (Remove the line below if the entry is already in the main manifest.)
vintf_fragments: ["manifest_media_c2_V1_1_default.xml"],
+
+ // Remove this line to enable this module.
+ enabled: false,
}
// seccomp policy file.
diff --git a/media/codec2/hidl/services/vendor.cpp b/media/codec2/hidl/services/vendor.cpp
index 81bffeb..3ddb039 100644
--- a/media/codec2/hidl/services/vendor.cpp
+++ b/media/codec2/hidl/services/vendor.cpp
@@ -122,6 +122,18 @@
})
.withSetter(SetIonUsage)
.build());
+
+ addParameter(
+ DefineParam(mDmaBufUsageInfo, "dmabuf-usage")
+ .withDefault(new C2StoreDmaBufUsageInfo())
+ .withFields({
+ C2F(mDmaBufUsageInfo, usage).flags({C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE}),
+ C2F(mDmaBufUsageInfo, capacity).inRange(0, UINT32_MAX, 1024),
+ C2F(mDmaBufUsageInfo, heapName).any(),
+ C2F(mDmaBufUsageInfo, allocFlags).flags({}),
+ })
+ .withSetter(SetDmaBufUsage)
+ .build());
}
virtual ~Interface() = default;
@@ -135,7 +147,16 @@
return C2R::Ok();
}
+ static C2R SetDmaBufUsage(bool /* mayBlock */, C2P<C2StoreDmaBufUsageInfo> &me) {
+ // Vendor's TODO: put appropriate mapping logic
+ strncpy(me.set().m.heapName, "system", me.v.flexCount());
+ me.set().m.allocFlags = 0;
+ return C2R::Ok();
+ }
+
+
std::shared_ptr<C2StoreIonUsageInfo> mIonUsageInfo;
+ std::shared_ptr<C2StoreDmaBufUsageInfo> mDmaBufUsageInfo;
};
std::shared_ptr<C2ReflectorHelper> mReflectorHelper;
Interface mInterface;
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index c7588e9..dd1f485 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -25,6 +25,7 @@
#include <C2AllocatorGralloc.h>
#include <C2BlockInternal.h>
#include <C2Component.h>
+#include <C2Config.h>
#include <C2PlatformSupport.h>
#include <OMX_Component.h>
@@ -44,6 +45,8 @@
namespace {
+constexpr OMX_U32 kPortIndexInput = 0;
+
class Buffer2D : public C2Buffer {
public:
explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
@@ -200,11 +203,27 @@
return BAD_VALUE;
}
OMX_PARAM_PORTDEFINITIONTYPE *pDef = (OMX_PARAM_PORTDEFINITIONTYPE *)params;
- // TODO: read these from intf()
+ if (pDef->nPortIndex != kPortIndexInput) {
+ break;
+ }
+
pDef->nBufferCountActual = 16;
+
+ std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+ C2PortActualDelayTuning::input inputDelay(0);
+ C2ActualPipelineDelayTuning pipelineDelay(0);
+ c2_status_t c2err = comp->query(
+ {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
+ if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
+ pDef->nBufferCountActual = 4;
+ pDef->nBufferCountActual += (inputDelay ? inputDelay.value : 0u);
+ pDef->nBufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
+ }
+
pDef->eDomain = OMX_PortDomainVideo;
pDef->format.video.nFrameWidth = mWidth;
pDef->format.video.nFrameHeight = mHeight;
+ pDef->format.video.eColorFormat = OMX_COLOR_FormatAndroidOpaque;
err = OK;
break;
}
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 4b4341d..a4d2110 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -246,8 +246,19 @@
if (source == nullptr) {
return NO_INIT;
}
- constexpr size_t kNumSlots = 16;
- for (size_t i = 0; i < kNumSlots; ++i) {
+
+ size_t numSlots = 4;
+ constexpr OMX_U32 kPortIndexInput = 0;
+
+ OMX_PARAM_PORTDEFINITIONTYPE param;
+ param.nPortIndex = kPortIndexInput;
+ status_t err = mNode->getParameter(OMX_IndexParamPortDefinition,
+ ¶m, sizeof(param));
+ if (err == OK) {
+ numSlots = param.nBufferCountActual;
+ }
+
+ for (size_t i = 0; i < numSlots; ++i) {
source->onInputBufferAdded(i);
}
@@ -1332,8 +1343,6 @@
mCallback->onError(err2, ACTION_CODE_FATAL);
return;
}
- // We're not starting after flush.
- (void)mSentConfigAfterResume.test_and_set();
err2 = mChannel->start(inputFormat, outputFormat, buffersBoundToCodec);
if (err2 != OK) {
mCallback->onError(err2, ACTION_CODE_FATAL);
@@ -1581,7 +1590,6 @@
return;
}
- mSentConfigAfterResume.clear();
{
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
const std::unique_ptr<Config> &config = *configLocked;
@@ -1798,7 +1806,7 @@
// handle configuration changes in work done
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
const std::unique_ptr<Config> &config = *configLocked;
- bool changed = !mSentConfigAfterResume.test_and_set();
+ bool changed = false;
Config::Watcher<C2StreamInitDataInfo::output> initData =
config->watch<C2StreamInitDataInfo::output>();
if (!work->worklets.empty()
@@ -1869,7 +1877,7 @@
config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
}
mChannel->onWorkDone(
- std::move(work), changed ? config->mOutputFormat : nullptr,
+ std::move(work), changed ? config->mOutputFormat->dup() : nullptr,
initData.hasChanged() ? initData.update().get() : nullptr);
break;
}
@@ -1959,11 +1967,98 @@
inputSurface->getHalInterface()));
}
-static void MaybeLogUnrecognizedName(const char *func, const std::string &name) {
- thread_local std::set<std::string> sLogged{};
- if (sLogged.insert(name).second) {
- ALOGW("%s: Unrecognized interface name: %s", func, name.c_str());
+class IntfCache {
+public:
+ IntfCache() = default;
+
+ status_t init(const std::string &name) {
+ std::shared_ptr<Codec2Client::Interface> intf{
+ Codec2Client::CreateInterfaceByName(name.c_str())};
+ if (!intf) {
+ ALOGW("IntfCache [%s]: Unrecognized interface name", name.c_str());
+ mInitStatus = NO_INIT;
+ return NO_INIT;
+ }
+ const static C2StreamUsageTuning::input sUsage{0u /* stream id */};
+ mFields.push_back(C2FieldSupportedValuesQuery::Possible(
+ C2ParamField{&sUsage, &sUsage.value}));
+ c2_status_t err = intf->querySupportedValues(mFields, C2_MAY_BLOCK);
+ if (err != C2_OK) {
+ ALOGW("IntfCache [%s]: failed to query usage supported value (err=%d)",
+ name.c_str(), err);
+ mFields[0].status = err;
+ }
+ std::vector<std::unique_ptr<C2Param>> params;
+ err = intf->query(
+ {&mApiFeatures},
+ {C2PortAllocatorsTuning::input::PARAM_TYPE},
+ C2_MAY_BLOCK,
+ ¶ms);
+ if (err != C2_OK && err != C2_BAD_INDEX) {
+ ALOGW("IntfCache [%s]: failed to query api features (err=%d)",
+ name.c_str(), err);
+ }
+ while (!params.empty()) {
+ C2Param *param = params.back().release();
+ params.pop_back();
+ if (!param) {
+ continue;
+ }
+ if (param->type() == C2PortAllocatorsTuning::input::PARAM_TYPE) {
+ mInputAllocators.reset(
+ C2PortAllocatorsTuning::input::From(params[0].get()));
+ }
+ }
+ mInitStatus = OK;
+ return OK;
}
+
+ status_t initCheck() const { return mInitStatus; }
+
+ const C2FieldSupportedValuesQuery &getUsageSupportedValues() const {
+ CHECK_EQ(1u, mFields.size());
+ return mFields[0];
+ }
+
+ const C2ApiFeaturesSetting &getApiFeatures() const {
+ return mApiFeatures;
+ }
+
+ const C2PortAllocatorsTuning::input &getInputAllocators() const {
+ static std::unique_ptr<C2PortAllocatorsTuning::input> sInvalidated = []{
+ std::unique_ptr<C2PortAllocatorsTuning::input> param =
+ C2PortAllocatorsTuning::input::AllocUnique(0);
+ param->invalidate();
+ return param;
+ }();
+ return mInputAllocators ? *mInputAllocators : *sInvalidated;
+ }
+
+private:
+ status_t mInitStatus{NO_INIT};
+
+ std::vector<C2FieldSupportedValuesQuery> mFields;
+ C2ApiFeaturesSetting mApiFeatures;
+ std::unique_ptr<C2PortAllocatorsTuning::input> mInputAllocators;
+};
+
+static const IntfCache &GetIntfCache(const std::string &name) {
+ static IntfCache sNullIntfCache;
+ static std::mutex sMutex;
+ static std::map<std::string, IntfCache> sCache;
+ std::unique_lock<std::mutex> lock{sMutex};
+ auto it = sCache.find(name);
+ if (it == sCache.end()) {
+ lock.unlock();
+ IntfCache intfCache;
+ status_t err = intfCache.init(name);
+ if (err != OK) {
+ return sNullIntfCache;
+ }
+ lock.lock();
+ it = sCache.insert({name, std::move(intfCache)}).first;
+ }
+ return it->second;
}
static status_t GetCommonAllocatorIds(
@@ -1981,24 +2076,16 @@
}
bool firstIteration = true;
for (const std::string &name : names) {
- std::shared_ptr<Codec2Client::Interface> intf{
- Codec2Client::CreateInterfaceByName(name.c_str())};
- if (!intf) {
- MaybeLogUnrecognizedName(__FUNCTION__, name);
+ const IntfCache &intfCache = GetIntfCache(name);
+ if (intfCache.initCheck() != OK) {
continue;
}
- std::vector<std::unique_ptr<C2Param>> params;
- c2_status_t err = intf->query(
- {}, {C2PortAllocatorsTuning::input::PARAM_TYPE}, C2_MAY_BLOCK, ¶ms);
+ const C2PortAllocatorsTuning::input &allocators = intfCache.getInputAllocators();
if (firstIteration) {
firstIteration = false;
- if (err == C2_OK && params.size() == 1u) {
- C2PortAllocatorsTuning::input *allocators =
- C2PortAllocatorsTuning::input::From(params[0].get());
- if (allocators && allocators->flexCount() > 0) {
- ids->insert(allocators->m.values,
- allocators->m.values + allocators->flexCount());
- }
+ if (allocators && allocators.flexCount() > 0) {
+ ids->insert(allocators.m.values,
+ allocators.m.values + allocators.flexCount());
}
if (ids->empty()) {
// The component does not advertise allocators. Use default.
@@ -2007,24 +2094,20 @@
continue;
}
bool filtered = false;
- if (err == C2_OK && params.size() == 1u) {
- C2PortAllocatorsTuning::input *allocators =
- C2PortAllocatorsTuning::input::From(params[0].get());
- if (allocators && allocators->flexCount() > 0) {
- filtered = true;
- for (auto it = ids->begin(); it != ids->end(); ) {
- bool found = false;
- for (size_t j = 0; j < allocators->flexCount(); ++j) {
- if (allocators->m.values[j] == *it) {
- found = true;
- break;
- }
+ if (allocators && allocators.flexCount() > 0) {
+ filtered = true;
+ for (auto it = ids->begin(); it != ids->end(); ) {
+ bool found = false;
+ for (size_t j = 0; j < allocators.flexCount(); ++j) {
+ if (allocators.m.values[j] == *it) {
+ found = true;
+ break;
}
- if (found) {
- ++it;
- } else {
- it = ids->erase(it);
- }
+ }
+ if (found) {
+ ++it;
+ } else {
+ it = ids->erase(it);
}
}
}
@@ -2056,23 +2139,16 @@
*minUsage = 0;
*maxUsage = ~0ull;
for (const std::string &name : names) {
- std::shared_ptr<Codec2Client::Interface> intf{
- Codec2Client::CreateInterfaceByName(name.c_str())};
- if (!intf) {
- MaybeLogUnrecognizedName(__FUNCTION__, name);
+ const IntfCache &intfCache = GetIntfCache(name);
+ if (intfCache.initCheck() != OK) {
continue;
}
- std::vector<C2FieldSupportedValuesQuery> fields;
- fields.push_back(C2FieldSupportedValuesQuery::Possible(
- C2ParamField{&sUsage, &sUsage.value}));
- c2_status_t err = intf->querySupportedValues(fields, C2_MAY_BLOCK);
- if (err != C2_OK) {
+ const C2FieldSupportedValuesQuery &usageSupportedValues =
+ intfCache.getUsageSupportedValues();
+ if (usageSupportedValues.status != C2_OK) {
continue;
}
- if (fields[0].status != C2_OK) {
- continue;
- }
- const C2FieldSupportedValues &supported = fields[0].values;
+ const C2FieldSupportedValues &supported = usageSupportedValues.values;
if (supported.type != C2FieldSupportedValues::FLAGS) {
continue;
}
@@ -2093,15 +2169,28 @@
// static
status_t CCodec::CanFetchLinearBlock(
const std::vector<std::string> &names, const C2MemoryUsage &usage, bool *isCompatible) {
- uint64_t minUsage = usage.expected;
- uint64_t maxUsage = ~0ull;
+ for (const std::string &name : names) {
+ const IntfCache &intfCache = GetIntfCache(name);
+ if (intfCache.initCheck() != OK) {
+ continue;
+ }
+ const C2ApiFeaturesSetting &features = intfCache.getApiFeatures();
+ if (features && !(features.value & API_SAME_INPUT_BUFFER)) {
+ *isCompatible = false;
+ return OK;
+ }
+ }
std::set<C2Allocator::id_t> allocators;
GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
if (allocators.empty()) {
*isCompatible = false;
return OK;
}
+
+ uint64_t minUsage = 0;
+ uint64_t maxUsage = ~0ull;
CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+ minUsage |= usage.expected;
*isCompatible = ((maxUsage & minUsage) == minUsage);
return OK;
}
@@ -2128,14 +2217,16 @@
// static
std::shared_ptr<C2LinearBlock> CCodec::FetchLinearBlock(
size_t capacity, const C2MemoryUsage &usage, const std::vector<std::string> &names) {
- uint64_t minUsage = usage.expected;
- uint64_t maxUsage = ~0ull;
std::set<C2Allocator::id_t> allocators;
GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
if (allocators.empty()) {
allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
}
+
+ uint64_t minUsage = 0;
+ uint64_t maxUsage = ~0ull;
CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+ minUsage |= usage.expected;
if ((maxUsage & minUsage) != minUsage) {
allocators.clear();
allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 49c04a0..06464b5 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -18,6 +18,8 @@
#define LOG_TAG "CCodecBufferChannel"
#include <utils/Log.h>
+#include <algorithm>
+#include <list>
#include <numeric>
#include <C2AllocatorGralloc.h>
@@ -128,97 +130,6 @@
count->value = -1;
}
-// CCodecBufferChannel::ReorderStash
-
-CCodecBufferChannel::ReorderStash::ReorderStash() {
- clear();
-}
-
-void CCodecBufferChannel::ReorderStash::clear() {
- mPending.clear();
- mStash.clear();
- mDepth = 0;
- mKey = C2Config::ORDINAL;
-}
-
-void CCodecBufferChannel::ReorderStash::flush() {
- mPending.clear();
- mStash.clear();
-}
-
-void CCodecBufferChannel::ReorderStash::setDepth(uint32_t depth) {
- mPending.splice(mPending.end(), mStash);
- mDepth = depth;
-}
-
-void CCodecBufferChannel::ReorderStash::setKey(C2Config::ordinal_key_t key) {
- mPending.splice(mPending.end(), mStash);
- mKey = key;
-}
-
-bool CCodecBufferChannel::ReorderStash::pop(Entry *entry) {
- if (mPending.empty()) {
- return false;
- }
- entry->buffer = mPending.front().buffer;
- entry->timestamp = mPending.front().timestamp;
- entry->flags = mPending.front().flags;
- entry->ordinal = mPending.front().ordinal;
- mPending.pop_front();
- return true;
-}
-
-void CCodecBufferChannel::ReorderStash::emplace(
- const std::shared_ptr<C2Buffer> &buffer,
- int64_t timestamp,
- int32_t flags,
- const C2WorkOrdinalStruct &ordinal) {
- bool eos = flags & MediaCodec::BUFFER_FLAG_EOS;
- if (!buffer && eos) {
- // TRICKY: we may be violating ordering of the stash here. Because we
- // don't expect any more emplace() calls after this, the ordering should
- // not matter.
- mStash.emplace_back(buffer, timestamp, flags, ordinal);
- } else {
- flags = flags & ~MediaCodec::BUFFER_FLAG_EOS;
- auto it = mStash.begin();
- for (; it != mStash.end(); ++it) {
- if (less(ordinal, it->ordinal)) {
- break;
- }
- }
- mStash.emplace(it, buffer, timestamp, flags, ordinal);
- if (eos) {
- mStash.back().flags = mStash.back().flags | MediaCodec::BUFFER_FLAG_EOS;
- }
- }
- while (!mStash.empty() && mStash.size() > mDepth) {
- mPending.push_back(mStash.front());
- mStash.pop_front();
- }
-}
-
-void CCodecBufferChannel::ReorderStash::defer(
- const CCodecBufferChannel::ReorderStash::Entry &entry) {
- mPending.push_front(entry);
-}
-
-bool CCodecBufferChannel::ReorderStash::hasPending() const {
- return !mPending.empty();
-}
-
-bool CCodecBufferChannel::ReorderStash::less(
- const C2WorkOrdinalStruct &o1, const C2WorkOrdinalStruct &o2) {
- switch (mKey) {
- case C2Config::ORDINAL: return o1.frameIndex < o2.frameIndex;
- case C2Config::TIMESTAMP: return o1.timestamp < o2.timestamp;
- case C2Config::CUSTOM: return o1.customOrdinal < o2.customOrdinal;
- default:
- ALOGD("Unrecognized key; default to timestamp");
- return o1.frameIndex < o2.frameIndex;
- }
-}
-
// Input
CCodecBufferChannel::Input::Input() : extraBuffers("extra") {}
@@ -707,24 +618,26 @@
}
void CCodecBufferChannel::feedInputBufferIfAvailableInternal() {
- if (mInputMetEos ||
- mReorderStash.lock()->hasPending() ||
- mPipelineWatcher.lock()->pipelineFull()) {
+ if (mInputMetEos) {
return;
- } else {
+ }
+ {
Mutexed<Output>::Locked output(mOutput);
- if (!output->buffers || output->buffers->numClientBuffers() >= output->numSlots) {
+ if (!output->buffers ||
+ output->buffers->hasPending() ||
+ output->buffers->numActiveSlots() >= output->numSlots) {
return;
}
}
- size_t numInputSlots = mInput.lock()->numSlots;
- for (size_t i = 0; i < numInputSlots; ++i) {
+ size_t numActiveSlots = 0;
+ while (!mPipelineWatcher.lock()->pipelineFull()) {
sp<MediaCodecBuffer> inBuffer;
size_t index;
{
Mutexed<Input>::Locked input(mInput);
- if (input->buffers->numClientBuffers() >= input->numSlots) {
- return;
+ numActiveSlots = input->buffers->numActiveSlots();
+ if (numActiveSlots >= input->numSlots) {
+ break;
}
if (!input->buffers->requestNewBuffer(&index, &inBuffer)) {
ALOGV("[%s] no new buffer available", mName);
@@ -734,6 +647,7 @@
ALOGV("[%s] new input index = %zu [%p]", mName, index, inBuffer.get());
mCallback->onInputBufferAvailable(index, inBuffer);
}
+ ALOGV("[%s] # active slots after feedInputBufferIfAvailable = %zu", mName, numActiveSlots);
}
status_t CCodecBufferChannel::renderOutputBuffer(
@@ -820,6 +734,9 @@
std::shared_ptr<const C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
std::static_pointer_cast<const C2StreamHdr10PlusInfo::output>(
c2Buffer->getInfo(C2StreamHdr10PlusInfo::output::PARAM_TYPE));
+ if (hdr10PlusInfo && hdr10PlusInfo->flexCount() == 0) {
+ hdr10PlusInfo.reset();
+ }
{
Mutexed<OutputSurface>::Locked output(mOutputSurface);
@@ -871,7 +788,7 @@
.maxLuminance = hdrStaticInfo->mastering.maxLuminance,
.minLuminance = hdrStaticInfo->mastering.minLuminance,
};
- hdr.validTypes = HdrMetadata::SMPTE2086;
+ hdr.validTypes |= HdrMetadata::SMPTE2086;
hdr.smpte2086 = smpte2086_meta;
}
// If the content light level fields are 0, do not use them, it
@@ -899,6 +816,9 @@
status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
if (result != OK) {
ALOGI("[%s] queueBuffer failed: %d", mName, result);
+ if (result == NO_INIT) {
+ mCCodecCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ }
return result;
}
ALOGV("[%s] queue buffer successful", mName);
@@ -989,17 +909,6 @@
return UNKNOWN_ERROR;
}
- {
- Mutexed<ReorderStash>::Locked reorder(mReorderStash);
- reorder->clear();
- if (reorderDepth) {
- reorder->setDepth(reorderDepth.value);
- }
- if (reorderKey) {
- reorder->setKey(reorderKey.value);
- }
- }
-
uint32_t inputDelayValue = inputDelay ? inputDelay.value : 0;
uint32_t pipelineDelayValue = pipelineDelay ? pipelineDelay.value : 0;
uint32_t outputDelayValue = outputDelay ? outputDelay.value : 0;
@@ -1016,6 +925,12 @@
if (inputFormat != nullptr) {
bool graphic = (iStreamFormat.value == C2BufferData::GRAPHIC);
+ C2Config::api_feature_t apiFeatures = C2Config::api_feature_t(
+ API_REFLECTION |
+ API_VALUES |
+ API_CURRENT_VALUES |
+ API_DEPENDENCY |
+ API_SAME_INPUT_BUFFER);
std::shared_ptr<C2BlockPool> pool;
{
Mutexed<BlockPools>::Locked pools(mBlockPools);
@@ -1027,14 +942,15 @@
// query C2PortAllocatorsTuning::input from component. If an allocator ID is obtained
// from component, create the input block pool with given ID. Otherwise, use default IDs.
std::vector<std::unique_ptr<C2Param>> params;
- err = mComponent->query({ },
+ C2ApiFeaturesSetting featuresSetting{apiFeatures};
+ err = mComponent->query({ &featuresSetting },
{ C2PortAllocatorsTuning::input::PARAM_TYPE },
C2_DONT_BLOCK,
¶ms);
if ((err != C2_OK && err != C2_BAD_INDEX) || params.size() != 1) {
ALOGD("[%s] Query input allocators returned %zu params => %s (%u)",
mName, params.size(), asString(err), err);
- } else if (err == C2_OK && params.size() == 1) {
+ } else if (params.size() == 1) {
C2PortAllocatorsTuning::input *inputAllocators =
C2PortAllocatorsTuning::input::From(params[0].get());
if (inputAllocators && inputAllocators->flexCount() > 0) {
@@ -1049,6 +965,9 @@
}
}
}
+ if (featuresSetting) {
+ apiFeatures = featuresSetting.value;
+ }
// TODO: use C2Component wrapper to associate this pool with ourselves
if ((poolMask >> pools->inputAllocatorId) & 1) {
@@ -1082,7 +1001,10 @@
input->numSlots = numInputSlots;
input->extraBuffers.flush();
input->numExtraSlots = 0u;
- if (!buffersBoundToCodec) {
+ bool conforming = (apiFeatures & API_SAME_INPUT_BUFFER);
+ // For encrypted content, framework decrypts source buffer (ashmem) into
+ // C2Buffers. Thus non-conforming codecs can process these.
+ if (!buffersBoundToCodec && (hasCryptoOrDescrambler() || conforming)) {
input->buffers.reset(new SlotInputBuffers(mName));
} else if (graphic) {
if (mInputSurface) {
@@ -1144,9 +1066,6 @@
Mutexed<OutputSurface>::Locked output(mOutputSurface);
output->maxDequeueBuffers = numOutputSlots +
reorderDepth.value + kRenderingDepth;
- if (!secure) {
- output->maxDequeueBuffers += numInputSlots;
- }
outputSurface = output->surface ?
output->surface->getIGraphicBufferProducer() : nullptr;
if (outputSurface) {
@@ -1268,6 +1187,13 @@
}
output->buffers->setFormat(outputFormat);
+ output->buffers->clearStash();
+ if (reorderDepth) {
+ output->buffers->setReorderDepth(reorderDepth.value);
+ }
+ if (reorderKey) {
+ output->buffers->setReorderKey(reorderKey.value);
+ }
// Try to set output surface to created block pool if given.
if (outputSurface) {
@@ -1337,62 +1263,98 @@
return UNKNOWN_ERROR;
}
size_t numInputSlots = mInput.lock()->numSlots;
- std::vector<sp<MediaCodecBuffer>> toBeQueued;
- for (size_t i = 0; i < numInputSlots; ++i) {
+
+ struct ClientInputBuffer {
size_t index;
sp<MediaCodecBuffer> buffer;
- {
- Mutexed<Input>::Locked input(mInput);
- if (!input->buffers->requestNewBuffer(&index, &buffer)) {
- if (i == 0) {
- ALOGW("[%s] start: cannot allocate memory at all", mName);
- return NO_MEMORY;
- } else {
- ALOGV("[%s] start: cannot allocate memory, only %zu buffers allocated",
- mName, i);
- }
+ size_t capacity;
+ };
+ std::list<ClientInputBuffer> clientInputBuffers;
+
+ {
+ Mutexed<Input>::Locked input(mInput);
+ while (clientInputBuffers.size() < numInputSlots) {
+ ClientInputBuffer clientInputBuffer;
+ if (!input->buffers->requestNewBuffer(&clientInputBuffer.index,
+ &clientInputBuffer.buffer)) {
break;
}
+ clientInputBuffer.capacity = clientInputBuffer.buffer->capacity();
+ clientInputBuffers.emplace_back(std::move(clientInputBuffer));
}
- if (buffer) {
- Mutexed<std::list<sp<ABuffer>>>::Locked configs(mFlushedConfigs);
- ALOGV("[%s] input buffer %zu available", mName, index);
- bool post = true;
- if (!configs->empty()) {
+ }
+ if (clientInputBuffers.empty()) {
+ ALOGW("[%s] start: cannot allocate memory at all", mName);
+ return NO_MEMORY;
+ } else if (clientInputBuffers.size() < numInputSlots) {
+ ALOGD("[%s] start: cannot allocate memory for all slots, "
+ "only %zu buffers allocated",
+ mName, clientInputBuffers.size());
+ } else {
+ ALOGV("[%s] %zu initial input buffers available",
+ mName, clientInputBuffers.size());
+ }
+ // Sort input buffers by their capacities in increasing order.
+ clientInputBuffers.sort(
+ [](const ClientInputBuffer& a, const ClientInputBuffer& b) {
+ return a.capacity < b.capacity;
+ });
+
+ {
+ Mutexed<std::list<sp<ABuffer>>>::Locked configs(mFlushedConfigs);
+ if (!configs->empty()) {
+ while (!configs->empty()) {
sp<ABuffer> config = configs->front();
configs->pop_front();
- if (buffer->capacity() >= config->size()) {
- memcpy(buffer->base(), config->data(), config->size());
- buffer->setRange(0, config->size());
- buffer->meta()->clear();
- buffer->meta()->setInt64("timeUs", 0);
- buffer->meta()->setInt32("csd", 1);
- post = false;
- } else {
- ALOGD("[%s] buffer capacity too small for the config (%zu < %zu)",
- mName, buffer->capacity(), config->size());
+ // Find the smallest input buffer that can fit the config.
+ auto i = std::find_if(
+ clientInputBuffers.begin(),
+ clientInputBuffers.end(),
+ [cfgSize = config->size()](const ClientInputBuffer& b) {
+ return b.capacity >= cfgSize;
+ });
+ if (i == clientInputBuffers.end()) {
+ ALOGW("[%s] no input buffer large enough for the config "
+ "(%zu bytes)",
+ mName, config->size());
+ return NO_MEMORY;
}
- } else if (oStreamFormat.value == C2BufferData::LINEAR && i == 0
- && (!prepend || prepend.value == PREPEND_HEADER_TO_NONE)) {
- // WORKAROUND: Some apps expect CSD available without queueing
- // any input. Queue an empty buffer to get the CSD.
- buffer->setRange(0, 0);
+ sp<MediaCodecBuffer> buffer = i->buffer;
+ memcpy(buffer->base(), config->data(), config->size());
+ buffer->setRange(0, config->size());
buffer->meta()->clear();
buffer->meta()->setInt64("timeUs", 0);
- post = false;
+ buffer->meta()->setInt32("csd", 1);
+ if (queueInputBufferInternal(buffer) != OK) {
+ ALOGW("[%s] Error while queueing a flushed config",
+ mName);
+ return UNKNOWN_ERROR;
+ }
+ clientInputBuffers.erase(i);
}
- if (post) {
- mCallback->onInputBufferAvailable(index, buffer);
- } else {
- toBeQueued.emplace_back(buffer);
+ } else if (oStreamFormat.value == C2BufferData::LINEAR &&
+ (!prepend || prepend.value == PREPEND_HEADER_TO_NONE)) {
+ sp<MediaCodecBuffer> buffer = clientInputBuffers.front().buffer;
+ // WORKAROUND: Some apps expect CSD available without queueing
+ // any input. Queue an empty buffer to get the CSD.
+ buffer->setRange(0, 0);
+ buffer->meta()->clear();
+ buffer->meta()->setInt64("timeUs", 0);
+ if (queueInputBufferInternal(buffer) != OK) {
+ ALOGW("[%s] Error while queueing an empty buffer to get CSD",
+ mName);
+ return UNKNOWN_ERROR;
}
+ clientInputBuffers.pop_front();
}
}
- for (const sp<MediaCodecBuffer> &buffer : toBeQueued) {
- if (queueInputBufferInternal(buffer) != OK) {
- ALOGV("[%s] Error while queueing initial buffers", mName);
- }
+
+ for (const ClientInputBuffer& clientInputBuffer: clientInputBuffers) {
+ mCallback->onInputBufferAvailable(
+ clientInputBuffer.index,
+ clientInputBuffer.buffer);
}
+
return OK;
}
@@ -1402,6 +1364,7 @@
if (mInputSurface != nullptr) {
mInputSurface.reset();
}
+ mPipelineWatcher.lock()->flush();
}
void CCodecBufferChannel::reset() {
@@ -1409,6 +1372,7 @@
{
Mutexed<Input>::Locked input(mInput);
input->buffers.reset(new DummyInputBuffers(""));
+ input->extraBuffers.flush();
}
{
Mutexed<Output>::Locked output(mOutput);
@@ -1425,6 +1389,8 @@
blockPools->inputPool.reset();
blockPools->outputPoolIntf.reset();
}
+ setCrypto(nullptr);
+ setDescrambler(nullptr);
}
@@ -1437,6 +1403,7 @@
continue;
}
if (work->input.buffers.empty()
+ || work->input.buffers.front() == nullptr
|| work->input.buffers.front()->data().linearBlocks().empty()) {
ALOGD("[%s] no linear codec config data found", mName);
continue;
@@ -1460,9 +1427,9 @@
Mutexed<Output>::Locked output(mOutput);
if (output->buffers) {
output->buffers->flush(flushedWork);
+ output->buffers->flushStash();
}
}
- mReorderStash.lock()->flush();
mPipelineWatcher.lock()->flush();
}
@@ -1503,45 +1470,36 @@
if (!output->buffers) {
return false;
}
- if (outputFormat != nullptr) {
- ALOGD("[%s] onWorkDone: output format changed to %s",
- mName, outputFormat->debugString().c_str());
- output->buffers->setFormat(outputFormat);
-
- AString mediaType;
- if (outputFormat->findString(KEY_MIME, &mediaType)
- && mediaType == MIMETYPE_AUDIO_RAW) {
- int32_t channelCount;
- int32_t sampleRate;
- if (outputFormat->findInt32(KEY_CHANNEL_COUNT, &channelCount)
- && outputFormat->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
- output->buffers->updateSkipCutBuffer(sampleRate, channelCount);
- }
- }
- }
}
- if ((work->input.ordinal.frameIndex - mFirstValidFrameIndex.load()).peek() < 0) {
+ // Whether the output buffer should be reported to the client or not.
+ bool notifyClient = false;
+
+ if (work->result == C2_OK){
+ notifyClient = true;
+ } else if (work->result == C2_NOT_FOUND) {
+ ALOGD("[%s] flushed work; ignored.", mName);
+ } else {
+ // C2_OK and C2_NOT_FOUND are the only results that we accept for processing
+ // the config update.
+ ALOGD("[%s] work failed to complete: %d", mName, work->result);
+ mCCodecCallback->onError(work->result, ACTION_CODE_FATAL);
+ return false;
+ }
+
+ if ((work->input.ordinal.frameIndex -
+ mFirstValidFrameIndex.load()).peek() < 0) {
// Discard frames from previous generation.
ALOGD("[%s] Discard frames from previous generation.", mName);
- return false;
+ notifyClient = false;
}
if (mInputSurface == nullptr && (work->worklets.size() != 1u
|| !work->worklets.front()
- || !(work->worklets.front()->output.flags & C2FrameData::FLAG_INCOMPLETE))) {
- mPipelineWatcher.lock()->onWorkDone(work->input.ordinal.frameIndex.peeku());
- }
-
- if (work->result == C2_NOT_FOUND) {
- ALOGD("[%s] flushed work; ignored.", mName);
- return true;
- }
-
- if (work->result != C2_OK) {
- ALOGD("[%s] work failed to complete: %d", mName, work->result);
- mCCodecCallback->onError(work->result, ACTION_CODE_FATAL);
- return false;
+ || !(work->worklets.front()->output.flags &
+ C2FrameData::FLAG_INCOMPLETE))) {
+ mPipelineWatcher.lock()->onWorkDone(
+ work->input.ordinal.frameIndex.peeku());
}
// NOTE: MediaCodec usage supposedly have only one worklet
@@ -1569,6 +1527,7 @@
}
std::optional<uint32_t> newInputDelay, newPipelineDelay;
+ bool needMaxDequeueBufferCountUpdate = false;
while (!worklet->output.configUpdate.empty()) {
std::unique_ptr<C2Param> param;
worklet->output.configUpdate.back().swap(param);
@@ -1577,30 +1536,20 @@
case C2PortReorderBufferDepthTuning::CORE_INDEX: {
C2PortReorderBufferDepthTuning::output reorderDepth;
if (reorderDepth.updateFrom(*param)) {
- bool secure = mComponent->getName().find(".secure") != std::string::npos;
- mReorderStash.lock()->setDepth(reorderDepth.value);
ALOGV("[%s] onWorkDone: updated reorder depth to %u",
mName, reorderDepth.value);
- size_t numOutputSlots = mOutput.lock()->numSlots;
- size_t numInputSlots = mInput.lock()->numSlots;
- Mutexed<OutputSurface>::Locked output(mOutputSurface);
- output->maxDequeueBuffers = numOutputSlots +
- reorderDepth.value + kRenderingDepth;
- if (!secure) {
- output->maxDequeueBuffers += numInputSlots;
- }
- if (output->surface) {
- output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
- }
+ mOutput.lock()->buffers->setReorderDepth(reorderDepth.value);
+ needMaxDequeueBufferCountUpdate = true;
} else {
- ALOGD("[%s] onWorkDone: failed to read reorder depth", mName);
+ ALOGD("[%s] onWorkDone: failed to read reorder depth",
+ mName);
}
break;
}
case C2PortReorderKeySetting::CORE_INDEX: {
C2PortReorderKeySetting::output reorderKey;
if (reorderKey.updateFrom(*param)) {
- mReorderStash.lock()->setKey(reorderKey.value);
+ mOutput.lock()->buffers->setReorderKey(reorderKey.value);
ALOGV("[%s] onWorkDone: updated reorder key to %u",
mName, reorderKey.value);
} else {
@@ -1615,7 +1564,8 @@
ALOGV("[%s] onWorkDone: updating pipeline delay %u",
mName, pipelineDelay.value);
newPipelineDelay = pipelineDelay.value;
- (void)mPipelineWatcher.lock()->pipelineDelay(pipelineDelay.value);
+ (void)mPipelineWatcher.lock()->pipelineDelay(
+ pipelineDelay.value);
}
}
if (param->forInput()) {
@@ -1624,7 +1574,8 @@
ALOGV("[%s] onWorkDone: updating input delay %u",
mName, inputDelay.value);
newInputDelay = inputDelay.value;
- (void)mPipelineWatcher.lock()->inputDelay(inputDelay.value);
+ (void)mPipelineWatcher.lock()->inputDelay(
+ inputDelay.value);
}
}
if (param->forOutput()) {
@@ -1632,19 +1583,19 @@
if (outputDelay.updateFrom(*param)) {
ALOGV("[%s] onWorkDone: updating output delay %u",
mName, outputDelay.value);
- bool secure = mComponent->getName().find(".secure") != std::string::npos;
(void)mPipelineWatcher.lock()->outputDelay(outputDelay.value);
+ needMaxDequeueBufferCountUpdate = true;
bool outputBuffersChanged = false;
size_t numOutputSlots = 0;
- size_t numInputSlots = mInput.lock()->numSlots;
{
Mutexed<Output>::Locked output(mOutput);
if (!output->buffers) {
return false;
}
output->outputDelay = outputDelay.value;
- numOutputSlots = outputDelay.value + kSmoothnessFactor;
+ numOutputSlots = outputDelay.value +
+ kSmoothnessFactor;
if (output->numSlots < numOutputSlots) {
output->numSlots = numOutputSlots;
if (output->buffers->isArrayMode()) {
@@ -1662,16 +1613,6 @@
if (outputBuffersChanged) {
mCCodecCallback->onOutputBuffersChanged();
}
-
- uint32_t depth = mReorderStash.lock()->depth();
- Mutexed<OutputSurface>::Locked output(mOutputSurface);
- output->maxDequeueBuffers = numOutputSlots + depth + kRenderingDepth;
- if (!secure) {
- output->maxDequeueBuffers += numInputSlots;
- }
- if (output->surface) {
- output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
- }
}
}
break;
@@ -1700,6 +1641,20 @@
input->numSlots = newNumSlots;
}
}
+ if (needMaxDequeueBufferCountUpdate) {
+ size_t numOutputSlots = 0;
+ uint32_t reorderDepth = 0;
+ {
+ Mutexed<Output>::Locked output(mOutput);
+ numOutputSlots = output->numSlots;
+ reorderDepth = output->buffers->getReorderDepth();
+ }
+ Mutexed<OutputSurface>::Locked output(mOutputSurface);
+ output->maxDequeueBuffers = numOutputSlots + reorderDepth + kRenderingDepth;
+ if (output->surface) {
+ output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
+ }
+ }
int32_t flags = 0;
if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
@@ -1707,9 +1662,6 @@
ALOGV("[%s] onWorkDone: output EOS", mName);
}
- sp<MediaCodecBuffer> outBuffer;
- size_t index;
-
// WORKAROUND: adjust output timestamp based on client input timestamp and codec
// input timestamp. Codec output timestamp (in the timestamp field) shall correspond to
// the codec input timestamp, but client output timestamp should (reported in timeUs)
@@ -1730,8 +1682,18 @@
worklet->output.ordinal.timestamp.peekll(),
timestamp.peekll());
+ // csd cannot be re-ordered and will always arrive first.
if (initData != nullptr) {
Mutexed<Output>::Locked output(mOutput);
+ if (output->buffers && outputFormat) {
+ output->buffers->updateSkipCutBuffer(outputFormat);
+ output->buffers->setFormat(outputFormat);
+ }
+ if (!notifyClient) {
+ return false;
+ }
+ size_t index;
+ sp<MediaCodecBuffer> outBuffer;
if (output->buffers && output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
outBuffer->meta()->setInt64("timeUs", timestamp.peek());
outBuffer->meta()->setInt32("flags", MediaCodec::BUFFER_FLAG_CODECCONFIG);
@@ -1747,10 +1709,16 @@
}
}
- if (!buffer && !flags && outputFormat == nullptr) {
- ALOGV("[%s] onWorkDone: nothing to report from the work (%lld)",
+ bool drop = false;
+ if (worklet->output.flags & C2FrameData::FLAG_DROP_FRAME) {
+ ALOGV("[%s] onWorkDone: drop buffer but keep metadata", mName);
+ drop = true;
+ }
+
+ if (notifyClient && !buffer && !flags && !(drop && outputFormat)) {
+ ALOGV("[%s] onWorkDone: Not reporting output buffer (%lld)",
mName, work->input.ordinal.frameIndex.peekull());
- return true;
+ notifyClient = false;
}
if (buffer) {
@@ -1769,66 +1737,65 @@
}
{
- Mutexed<ReorderStash>::Locked reorder(mReorderStash);
- reorder->emplace(buffer, timestamp.peek(), flags, worklet->output.ordinal);
- if (flags & MediaCodec::BUFFER_FLAG_EOS) {
- // Flush reorder stash
- reorder->setDepth(0);
+ Mutexed<Output>::Locked output(mOutput);
+ if (!output->buffers) {
+ return false;
}
+ output->buffers->pushToStash(
+ drop ? nullptr : buffer,
+ notifyClient,
+ timestamp.peek(),
+ flags,
+ outputFormat,
+ worklet->output.ordinal);
}
sendOutputBuffers();
return true;
}
void CCodecBufferChannel::sendOutputBuffers() {
- ReorderStash::Entry entry;
- sp<MediaCodecBuffer> outBuffer;
+ OutputBuffers::BufferAction action;
size_t index;
+ sp<MediaCodecBuffer> outBuffer;
+ std::shared_ptr<C2Buffer> c2Buffer;
while (true) {
- Mutexed<ReorderStash>::Locked reorder(mReorderStash);
- if (!reorder->hasPending()) {
- break;
- }
- if (!reorder->pop(&entry)) {
- break;
- }
-
Mutexed<Output>::Locked output(mOutput);
if (!output->buffers) {
return;
}
- status_t err = output->buffers->registerBuffer(entry.buffer, &index, &outBuffer);
- if (err != OK) {
- bool outputBuffersChanged = false;
- if (err != WOULD_BLOCK) {
- if (!output->buffers->isArrayMode()) {
- output->buffers = output->buffers->toArrayMode(output->numSlots);
- }
- OutputBuffersArray *array = (OutputBuffersArray *)output->buffers.get();
- array->realloc(entry.buffer);
- outputBuffersChanged = true;
- }
- ALOGV("[%s] sendOutputBuffers: unable to register output buffer", mName);
- reorder->defer(entry);
-
+ action = output->buffers->popFromStashAndRegister(
+ &c2Buffer, &index, &outBuffer);
+ switch (action) {
+ case OutputBuffers::SKIP:
+ return;
+ case OutputBuffers::DISCARD:
+ break;
+ case OutputBuffers::NOTIFY_CLIENT:
output.unlock();
- reorder.unlock();
-
- if (outputBuffersChanged) {
- mCCodecCallback->onOutputBuffersChanged();
+ mCallback->onOutputBufferAvailable(index, outBuffer);
+ break;
+ case OutputBuffers::REALLOCATE:
+ if (!output->buffers->isArrayMode()) {
+ output->buffers =
+ output->buffers->toArrayMode(output->numSlots);
}
+ static_cast<OutputBuffersArray*>(output->buffers.get())->
+ realloc(c2Buffer);
+ output.unlock();
+ mCCodecCallback->onOutputBuffersChanged();
+ break;
+ case OutputBuffers::RETRY:
+ ALOGV("[%s] sendOutputBuffers: unable to register output buffer",
+ mName);
+ return;
+ default:
+ LOG_ALWAYS_FATAL("[%s] sendOutputBuffers: "
+ "corrupted BufferAction value (%d) "
+ "returned from popFromStashAndRegister.",
+ mName, int(action));
return;
}
- output.unlock();
- reorder.unlock();
-
- outBuffer->meta()->setInt64("timeUs", entry.timestamp);
- outBuffer->meta()->setInt32("flags", entry.flags);
- ALOGV("[%s] sendOutputBuffers: out buffer index = %zu [%p] => %p + %zu (%lld)",
- mName, index, outBuffer.get(), outBuffer->data(), outBuffer->size(),
- (long long)entry.timestamp);
- mCallback->onOutputBufferAvailable(index, outBuffer);
}
}
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index f6e7024..046c5c3 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -306,48 +306,6 @@
Mutexed<PipelineWatcher> mPipelineWatcher;
- class ReorderStash {
- public:
- struct Entry {
- inline Entry() : buffer(nullptr), timestamp(0), flags(0), ordinal({0, 0, 0}) {}
- inline Entry(
- const std::shared_ptr<C2Buffer> &b,
- int64_t t,
- int32_t f,
- const C2WorkOrdinalStruct &o)
- : buffer(b), timestamp(t), flags(f), ordinal(o) {}
- std::shared_ptr<C2Buffer> buffer;
- int64_t timestamp;
- int32_t flags;
- C2WorkOrdinalStruct ordinal;
- };
-
- ReorderStash();
-
- void clear();
- void flush();
- void setDepth(uint32_t depth);
- void setKey(C2Config::ordinal_key_t key);
- bool pop(Entry *entry);
- void emplace(
- const std::shared_ptr<C2Buffer> &buffer,
- int64_t timestamp,
- int32_t flags,
- const C2WorkOrdinalStruct &ordinal);
- void defer(const Entry &entry);
- bool hasPending() const;
- uint32_t depth() const { return mDepth; }
-
- private:
- std::list<Entry> mPending;
- std::list<Entry> mStash;
- uint32_t mDepth;
- C2Config::ordinal_key_t mKey;
-
- bool less(const C2WorkOrdinalStruct &o1, const C2WorkOrdinalStruct &o2);
- };
- Mutexed<ReorderStash> mReorderStash;
-
std::atomic_bool mInputMetEos;
std::once_flag mRenderWarningFlag;
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index caa826b..dd28b6a 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -21,6 +21,7 @@
#include <C2PlatformSupport.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/SkipCutBuffer.h>
#include <mediadrm/ICrypto.h>
@@ -90,9 +91,14 @@
newFormat->setInt32(KEY_STRIDE, stride);
ALOGD("[%s] updating stride = %d", mName, stride);
if (img->mNumPlanes > 1 && stride > 0) {
- int32_t vstride = (img->mPlane[1].mOffset - img->mPlane[0].mOffset) / stride;
+ int64_t offsetDelta =
+ (int64_t)img->mPlane[1].mOffset - (int64_t)img->mPlane[0].mOffset;
+ int32_t vstride = int32_t(offsetDelta / stride);
newFormat->setInt32(KEY_SLICE_HEIGHT, vstride);
ALOGD("[%s] updating vstride = %d", mName, vstride);
+ buffer->setRange(
+ img->mPlane[0].mOffset,
+ buffer->size() - img->mPlane[0].mOffset);
}
}
setFormat(newFormat);
@@ -155,16 +161,29 @@
setSkipCutBuffer(delay, padding);
}
+void OutputBuffers::updateSkipCutBuffer(
+ const sp<AMessage> &format, bool notify) {
+ AString mediaType;
+ if (format->findString(KEY_MIME, &mediaType)
+ && mediaType == MIMETYPE_AUDIO_RAW) {
+ int32_t channelCount;
+ int32_t sampleRate;
+ if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)
+ && format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
+ updateSkipCutBuffer(sampleRate, channelCount);
+ }
+ }
+ if (notify) {
+ mUnreportedFormat = nullptr;
+ }
+}
+
void OutputBuffers::submit(const sp<MediaCodecBuffer> &buffer) {
if (mSkipCutBuffer != nullptr) {
mSkipCutBuffer->submit(buffer);
}
}
-void OutputBuffers::transferSkipCutBuffer(const sp<SkipCutBuffer> &scb) {
- mSkipCutBuffer = scb;
-}
-
void OutputBuffers::setSkipCutBuffer(int32_t skip, int32_t cut) {
if (mSkipCutBuffer != nullptr) {
size_t prevSize = mSkipCutBuffer->size();
@@ -175,6 +194,181 @@
mSkipCutBuffer = new SkipCutBuffer(skip, cut, mChannelCount);
}
+void OutputBuffers::clearStash() {
+ mPending.clear();
+ mReorderStash.clear();
+ mDepth = 0;
+ mKey = C2Config::ORDINAL;
+ mUnreportedFormat = nullptr;
+}
+
+void OutputBuffers::flushStash() {
+ for (StashEntry& e : mPending) {
+ e.notify = false;
+ }
+ for (StashEntry& e : mReorderStash) {
+ e.notify = false;
+ }
+}
+
+uint32_t OutputBuffers::getReorderDepth() const {
+ return mDepth;
+}
+
+void OutputBuffers::setReorderDepth(uint32_t depth) {
+ mPending.splice(mPending.end(), mReorderStash);
+ mDepth = depth;
+}
+
+void OutputBuffers::setReorderKey(C2Config::ordinal_key_t key) {
+ mPending.splice(mPending.end(), mReorderStash);
+ mKey = key;
+}
+
+void OutputBuffers::pushToStash(
+ const std::shared_ptr<C2Buffer>& buffer,
+ bool notify,
+ int64_t timestamp,
+ int32_t flags,
+ const sp<AMessage>& format,
+ const C2WorkOrdinalStruct& ordinal) {
+ bool eos = flags & MediaCodec::BUFFER_FLAG_EOS;
+ if (!buffer && eos) {
+ // TRICKY: we may be violating ordering of the stash here. Because we
+ // don't expect any more emplace() calls after this, the ordering should
+ // not matter.
+ mReorderStash.emplace_back(
+ buffer, notify, timestamp, flags, format, ordinal);
+ } else {
+ flags = flags & ~MediaCodec::BUFFER_FLAG_EOS;
+ auto it = mReorderStash.begin();
+ for (; it != mReorderStash.end(); ++it) {
+ if (less(ordinal, it->ordinal)) {
+ break;
+ }
+ }
+ mReorderStash.emplace(it,
+ buffer, notify, timestamp, flags, format, ordinal);
+ if (eos) {
+ mReorderStash.back().flags =
+ mReorderStash.back().flags | MediaCodec::BUFFER_FLAG_EOS;
+ }
+ }
+ while (!mReorderStash.empty() && mReorderStash.size() > mDepth) {
+ mPending.push_back(mReorderStash.front());
+ mReorderStash.pop_front();
+ }
+ ALOGV("[%s] %s: pushToStash -- pending size = %zu", mName, __func__, mPending.size());
+}
+
+OutputBuffers::BufferAction OutputBuffers::popFromStashAndRegister(
+ std::shared_ptr<C2Buffer>* c2Buffer,
+ size_t* index,
+ sp<MediaCodecBuffer>* outBuffer) {
+ if (mPending.empty()) {
+ return SKIP;
+ }
+
+ // Retrieve the first entry.
+ StashEntry &entry = mPending.front();
+
+ *c2Buffer = entry.buffer;
+ sp<AMessage> outputFormat = entry.format;
+
+ // The output format can be processed without a registered slot.
+ if (outputFormat) {
+ updateSkipCutBuffer(outputFormat, entry.notify);
+ }
+
+ if (entry.notify) {
+ if (outputFormat) {
+ setFormat(outputFormat);
+ } else if (mUnreportedFormat) {
+ outputFormat = mUnreportedFormat;
+ setFormat(outputFormat);
+ }
+ mUnreportedFormat = nullptr;
+ } else {
+ if (outputFormat) {
+ mUnreportedFormat = outputFormat;
+ } else if (!mUnreportedFormat) {
+ mUnreportedFormat = mFormat;
+ }
+ }
+
+ // Flushing mReorderStash because no other buffers should come after output
+ // EOS.
+ if (entry.flags & MediaCodec::BUFFER_FLAG_EOS) {
+ // Flush reorder stash
+ setReorderDepth(0);
+ }
+
+ if (!entry.notify) {
+ if (outputFormat) {
+ ALOGD("[%s] popFromStashAndRegister: output format changed to %s",
+ mName, outputFormat->debugString().c_str());
+ }
+ mPending.pop_front();
+ return DISCARD;
+ }
+
+ // Try to register the buffer.
+ status_t err = registerBuffer(*c2Buffer, index, outBuffer);
+ if (err != OK) {
+ if (err != WOULD_BLOCK) {
+ return REALLOCATE;
+ }
+ return RETRY;
+ }
+
+ // Append information from the front stash entry to outBuffer.
+ (*outBuffer)->meta()->setInt64("timeUs", entry.timestamp);
+ (*outBuffer)->meta()->setInt32("flags", entry.flags);
+ if (outputFormat) {
+ ALOGD("[%s] popFromStashAndRegister: output format changed to %s",
+ mName, outputFormat->debugString().c_str());
+ }
+ ALOGV("[%s] popFromStashAndRegister: "
+ "out buffer index = %zu [%p] => %p + %zu (%lld)",
+ mName, *index, outBuffer->get(),
+ (*outBuffer)->data(), (*outBuffer)->size(),
+ (long long)entry.timestamp);
+
+ // The front entry of mPending will be removed now that the registration
+ // succeeded.
+ mPending.pop_front();
+ return NOTIFY_CLIENT;
+}
+
+bool OutputBuffers::popPending(StashEntry *entry) {
+ if (mPending.empty()) {
+ return false;
+ }
+ *entry = mPending.front();
+ mPending.pop_front();
+ return true;
+}
+
+void OutputBuffers::deferPending(const OutputBuffers::StashEntry &entry) {
+ mPending.push_front(entry);
+}
+
+bool OutputBuffers::hasPending() const {
+ return !mPending.empty();
+}
+
+bool OutputBuffers::less(
+ const C2WorkOrdinalStruct &o1, const C2WorkOrdinalStruct &o2) const {
+ switch (mKey) {
+ case C2Config::ORDINAL: return o1.frameIndex < o2.frameIndex;
+ case C2Config::TIMESTAMP: return o1.timestamp < o2.timestamp;
+ case C2Config::CUSTOM: return o1.customOrdinal < o2.customOrdinal;
+ default:
+ ALOGD("Unrecognized key; default to timestamp");
+ return o1.frameIndex < o2.frameIndex;
+ }
+}
+
// LocalBufferPool
constexpr size_t kInitialPoolCapacity = kMaxLinearBufferSize;
@@ -304,11 +498,12 @@
mBuffers.clear();
}
-size_t FlexBuffersImpl::numClientBuffers() const {
+size_t FlexBuffersImpl::numActiveSlots() const {
return std::count_if(
mBuffers.begin(), mBuffers.end(),
[](const Entry &entry) {
- return (entry.clientBuffer != nullptr);
+ return (entry.clientBuffer != nullptr
+ || !entry.compBuffer.expired());
});
}
@@ -454,11 +649,11 @@
}
}
-size_t BuffersArrayImpl::numClientBuffers() const {
+size_t BuffersArrayImpl::numActiveSlots() const {
return std::count_if(
mBuffers.begin(), mBuffers.end(),
[](const Entry &entry) {
- return entry.ownedByClient;
+ return entry.ownedByClient || !entry.compBuffer.expired();
});
}
@@ -508,8 +703,8 @@
mImpl.flush();
}
-size_t InputBuffersArray::numClientBuffers() const {
- return mImpl.numClientBuffers();
+size_t InputBuffersArray::numActiveSlots() const {
+ return mImpl.numActiveSlots();
}
sp<Codec2Buffer> InputBuffersArray::createNewBuffer() {
@@ -546,8 +741,8 @@
return nullptr;
}
-size_t SlotInputBuffers::numClientBuffers() const {
- return mImpl.numClientBuffers();
+size_t SlotInputBuffers::numActiveSlots() const {
+ return mImpl.numActiveSlots();
}
sp<Codec2Buffer> SlotInputBuffers::createNewBuffer() {
@@ -598,8 +793,8 @@
return std::move(array);
}
-size_t LinearInputBuffers::numClientBuffers() const {
- return mImpl.numClientBuffers();
+size_t LinearInputBuffers::numActiveSlots() const {
+ return mImpl.numActiveSlots();
}
// static
@@ -776,8 +971,8 @@
return std::move(array);
}
-size_t GraphicMetadataInputBuffers::numClientBuffers() const {
- return mImpl.numClientBuffers();
+size_t GraphicMetadataInputBuffers::numActiveSlots() const {
+ return mImpl.numActiveSlots();
}
sp<Codec2Buffer> GraphicMetadataInputBuffers::createNewBuffer() {
@@ -841,8 +1036,8 @@
return std::move(array);
}
-size_t GraphicInputBuffers::numClientBuffers() const {
- return mImpl.numClientBuffers();
+size_t GraphicInputBuffers::numActiveSlots() const {
+ return mImpl.numActiveSlots();
}
sp<Codec2Buffer> GraphicInputBuffers::createNewBuffer() {
@@ -932,8 +1127,8 @@
mImpl.getArray(array);
}
-size_t OutputBuffersArray::numClientBuffers() const {
- return mImpl.numClientBuffers();
+size_t OutputBuffersArray::numActiveSlots() const {
+ return mImpl.numActiveSlots();
}
void OutputBuffersArray::realloc(const std::shared_ptr<C2Buffer> &c2buffer) {
@@ -983,6 +1178,16 @@
mImpl.grow(newSize, mAlloc);
}
+void OutputBuffersArray::transferFrom(OutputBuffers* source) {
+ mFormat = source->mFormat;
+ mSkipCutBuffer = source->mSkipCutBuffer;
+ mUnreportedFormat = source->mUnreportedFormat;
+ mPending = std::move(source->mPending);
+ mReorderStash = std::move(source->mReorderStash);
+ mDepth = source->mDepth;
+ mKey = source->mKey;
+}
+
// FlexOutputBuffers
status_t FlexOutputBuffers::registerBuffer(
@@ -1025,17 +1230,16 @@
// track of the flushed work.
}
-std::unique_ptr<OutputBuffers> FlexOutputBuffers::toArrayMode(size_t size) {
+std::unique_ptr<OutputBuffersArray> FlexOutputBuffers::toArrayMode(size_t size) {
std::unique_ptr<OutputBuffersArray> array(new OutputBuffersArray(mComponentName.c_str()));
- array->setFormat(mFormat);
- array->transferSkipCutBuffer(mSkipCutBuffer);
+ array->transferFrom(this);
std::function<sp<Codec2Buffer>()> alloc = getAlloc();
array->initialize(mImpl, size, alloc);
- return std::move(array);
+ return array;
}
-size_t FlexOutputBuffers::numClientBuffers() const {
- return mImpl.numClientBuffers();
+size_t FlexOutputBuffers::numActiveSlots() const {
+ return mImpl.numActiveSlots();
}
// LinearOutputBuffers
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index eec79f1..c383a7c 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -33,8 +33,8 @@
class SkipCutBuffer;
constexpr size_t kLinearBufferSize = 1048576;
-// This can fit 4K RGBA frame, and most likely client won't need more than this.
-constexpr size_t kMaxLinearBufferSize = 4096 * 2304 * 4;
+// This can fit an 8K frame.
+constexpr size_t kMaxLinearBufferSize = 7680 * 4320 * 2;
/**
* Base class for representation of buffers at one port.
@@ -72,7 +72,7 @@
/**
* Return number of buffers the client owns.
*/
- virtual size_t numClientBuffers() const = 0;
+ virtual size_t numActiveSlots() const = 0;
/**
* Examine image data from the buffer and update the format if necessary.
@@ -156,6 +156,8 @@
DISALLOW_EVIL_CONSTRUCTORS(InputBuffers);
};
+class OutputBuffersArray;
+
class OutputBuffers : public CCodecBuffers {
public:
OutputBuffers(const char *componentName, const char *name = "Output");
@@ -163,8 +165,12 @@
/**
* Register output C2Buffer from the component and obtain corresponding
- * index and MediaCodecBuffer object. Returns false if registration
- * fails.
+ * index and MediaCodecBuffer object.
+ *
+ * Returns:
+ * OK if registration succeeds.
+ * NO_MEMORY if all buffers are available but not compatible.
+ * WOULD_BLOCK if there are compatible buffers, but they are all in use.
*/
virtual status_t registerBuffer(
const std::shared_ptr<C2Buffer> &buffer,
@@ -199,7 +205,7 @@
* shall retain the internal state so that it will honor index and
* buffer from previous calls of registerBuffer().
*/
- virtual std::unique_ptr<OutputBuffers> toArrayMode(size_t size) = 0;
+ virtual std::unique_ptr<OutputBuffersArray> toArrayMode(size_t size) = 0;
/**
* Initialize SkipCutBuffer object.
@@ -208,6 +214,164 @@
int32_t delay, int32_t padding, int32_t sampleRate, int32_t channelCount);
/**
+ * Update SkipCutBuffer from format. The @p format must not be null.
+ * @p notify determines whether the format comes with a buffer that should
+ * be reported to the client or not.
+ */
+ void updateSkipCutBuffer(const sp<AMessage> &format, bool notify = true);
+
+ /**
+ * Output Stash
+ * ============
+ *
+ * The output stash is a place to hold output buffers temporarily before
+ * they are registered to output slots. It has 2 main functions:
+ * 1. Allow reordering of output frames as the codec may produce frames in a
+ * different order.
+ * 2. Act as a "buffer" between the codec and the client because the codec
+ * may produce more buffers than available slots. This excess of codec's
+ * output buffers should be registered to slots later, after the client
+ * has released some slots.
+ *
+ * The stash consists of 2 lists of buffers: mPending and mReorderStash.
+ * mPending is a normal FIFO queue with not size limit, while mReorderStash
+ * is a sorted list with size limit mDepth.
+ *
+ * The normal flow of a non-csd output buffer is as follows:
+ *
+ * |----------------OutputBuffers---------------|
+ * |----------Output stash----------| |
+ * Codec --|-> mReorderStash --> mPending --|-> slots --|-> client
+ * | | |
+ * pushToStash() popFromStashAndRegister()
+ *
+ * The buffer that comes from the codec first enters mReorderStash. The
+ * first buffer in mReorderStash gets moved to mPending when mReorderStash
+ * overflows. Buffers in mPending are registered to slots and given to the
+ * client as soon as slots are available.
+ *
+ * Every output buffer that is not a csd buffer should be put on the stash
+ * by calling pushToStash(), then later registered to a slot by calling
+ * popFromStashAndRegister() before notifying the client with
+ * onOutputBufferAvailable().
+ *
+ * Reordering
+ * ==========
+ *
+ * mReorderStash is a sorted list with a specified size limit. The size
+ * limit can be set by calling setReorderDepth().
+ *
+ * Every buffer in mReorderStash has a C2WorkOrdinalStruct, which contains 3
+ * members, all of which are comparable. Which member of C2WorkOrdinalStruct
+ * should be used for reordering can be chosen by calling setReorderKey().
+ */
+
+ /**
+ * Return the reorder depth---the size of mReorderStash.
+ */
+ uint32_t getReorderDepth() const;
+
+ /**
+ * Set the reorder depth.
+ */
+ void setReorderDepth(uint32_t depth);
+
+ /**
+ * Set the type of "key" to use in comparisons.
+ */
+ void setReorderKey(C2Config::ordinal_key_t key);
+
+ /**
+ * Return whether the output stash has any pending buffers.
+ */
+ bool hasPending() const;
+
+ /**
+ * Flush the stash and reset the depth and the key to their default values.
+ */
+ void clearStash();
+
+ /**
+ * Flush the stash.
+ */
+ void flushStash();
+
+ /**
+ * Push a buffer to the reorder stash.
+ *
+ * @param buffer C2Buffer object from the returned work.
+ * @param notify Whether the returned work contains a buffer that should
+ * be reported to the client. This may be false if the
+ * caller wants to process the buffer without notifying the
+ * client.
+ * @param timestamp Buffer timestamp to report to the client.
+ * @param flags Buffer flags to report to the client.
+ * @param format Buffer format to report to the client.
+ * @param ordinal Ordinal used in reordering. This determines when the
+ * buffer will be popped from the output stash by
+ * `popFromStashAndRegister()`.
+ */
+ void pushToStash(
+ const std::shared_ptr<C2Buffer>& buffer,
+ bool notify,
+ int64_t timestamp,
+ int32_t flags,
+ const sp<AMessage>& format,
+ const C2WorkOrdinalStruct& ordinal);
+
+ enum BufferAction : int {
+ SKIP,
+ DISCARD,
+ NOTIFY_CLIENT,
+ REALLOCATE,
+ RETRY,
+ };
+
+ /**
+ * Try to atomically pop the first buffer from the reorder stash and
+ * register it to an output slot. The function returns a value that
+ * indicates a recommended course of action for the caller.
+ *
+ * If the stash is empty, the function will return `SKIP`.
+ *
+ * If the stash is not empty, the function will peek at the first (oldest)
+ * entry in mPending process the buffer in the entry as follows:
+ * - If the buffer should not be sent to the client, the function will
+ * return `DISCARD`. The stash entry will be removed.
+ * - If the buffer should be sent to the client, the function will attempt
+ * to register the buffer to a slot. The registration may have 3 outcomes
+ * corresponding to the following return values:
+ * - `NOTIFY_CLIENT`: The buffer is successfully registered to a slot. The
+ * output arguments @p index and @p outBuffer will contain valid values
+ * that the caller can use to call onOutputBufferAvailable(). The stash
+ * entry will be removed.
+ * - `REALLOCATE`: The buffer is not registered because it is not
+ * compatible with the current slots (which are available). The caller
+ * should reallocate the OutputBuffers with slots that can fit the
+ * returned @p c2Buffer. The stash entry will not be removed
+ * - `RETRY`: All slots are currently occupied by the client. The caller
+ * should try to call this function again after the client has released
+ * some slots.
+ *
+ * @return What the caller should do afterwards.
+ *
+ * @param[out] c2Buffer Underlying C2Buffer associated to the first buffer
+ * on the stash. This value is guaranteed to be valid
+ * unless the return value is `SKIP`.
+ * @param[out] index Slot index. This value is valid only if the return
+ * value is `NOTIFY_CLIENT`.
+ * @param[out] outBuffer Registered buffer. This value is valid only if the
+ * return valu is `NOTIFY_CLIENT`.
+ */
+ BufferAction popFromStashAndRegister(
+ std::shared_ptr<C2Buffer>* c2Buffer,
+ size_t* index,
+ sp<MediaCodecBuffer>* outBuffer);
+
+protected:
+ sp<SkipCutBuffer> mSkipCutBuffer;
+
+ /**
* Update the SkipCutBuffer object. No-op if it's never initialized.
*/
void updateSkipCutBuffer(int32_t sampleRate, int32_t channelCount);
@@ -217,15 +381,8 @@
*/
void submit(const sp<MediaCodecBuffer> &buffer);
- /**
- * Transfer SkipCutBuffer object to the other Buffers object.
- */
- void transferSkipCutBuffer(const sp<SkipCutBuffer> &scb);
-
-protected:
- sp<SkipCutBuffer> mSkipCutBuffer;
-
private:
+ // SkipCutBuffer
int32_t mDelay;
int32_t mPadding;
int32_t mSampleRate;
@@ -233,7 +390,78 @@
void setSkipCutBuffer(int32_t skip, int32_t cut);
+ // Output stash
+
+ // Output format that has not been made available to the client.
+ sp<AMessage> mUnreportedFormat;
+
+ // Struct for an entry in the output stash (mPending and mReorderStash)
+ struct StashEntry {
+ inline StashEntry()
+ : buffer(nullptr),
+ notify(false),
+ timestamp(0),
+ flags(0),
+ format(),
+ ordinal({0, 0, 0}) {}
+ inline StashEntry(
+ const std::shared_ptr<C2Buffer> &b,
+ bool n,
+ int64_t t,
+ int32_t f,
+ const sp<AMessage> &fmt,
+ const C2WorkOrdinalStruct &o)
+ : buffer(b),
+ notify(n),
+ timestamp(t),
+ flags(f),
+ format(fmt),
+ ordinal(o) {}
+ std::shared_ptr<C2Buffer> buffer;
+ bool notify;
+ int64_t timestamp;
+ int32_t flags;
+ sp<AMessage> format;
+ C2WorkOrdinalStruct ordinal;
+ };
+
+ /**
+ * FIFO queue of stash entries.
+ */
+ std::list<StashEntry> mPending;
+ /**
+ * Sorted list of stash entries.
+ */
+ std::list<StashEntry> mReorderStash;
+ /**
+ * Size limit of mReorderStash.
+ */
+ uint32_t mDepth{0};
+ /**
+ * Choice of key to use in ordering of stash entries in mReorderStash.
+ */
+ C2Config::ordinal_key_t mKey{C2Config::ORDINAL};
+
+ /**
+ * Return false if mPending is empty; otherwise, pop the first entry from
+ * mPending and return true.
+ */
+ bool popPending(StashEntry *entry);
+
+ /**
+ * Push an entry as the first entry of mPending.
+ */
+ void deferPending(const StashEntry &entry);
+
+ /**
+ * Comparison of C2WorkOrdinalStruct based on mKey.
+ */
+ bool less(const C2WorkOrdinalStruct &o1,
+ const C2WorkOrdinalStruct &o2) const;
+
DISALLOW_EVIL_CONSTRUCTORS(OutputBuffers);
+
+ friend OutputBuffersArray;
};
/**
@@ -356,7 +584,7 @@
* Return the number of buffers that are sent to the client but not released
* yet.
*/
- size_t numClientBuffers() const;
+ size_t numActiveSlots() const;
/**
* Return the number of buffers that are sent to the component but not
@@ -477,7 +705,7 @@
* Return the number of buffers that are sent to the client but not released
* yet.
*/
- size_t numClientBuffers() const;
+ size_t numActiveSlots() const;
/**
* Return the size of the array.
@@ -537,7 +765,7 @@
void flush() override;
- size_t numClientBuffers() const final;
+ size_t numActiveSlots() const final;
protected:
sp<Codec2Buffer> createNewBuffer() override;
@@ -568,7 +796,7 @@
std::unique_ptr<InputBuffers> toArrayMode(size_t size) final;
- size_t numClientBuffers() const final;
+ size_t numActiveSlots() const final;
protected:
sp<Codec2Buffer> createNewBuffer() final;
@@ -598,7 +826,7 @@
std::unique_ptr<InputBuffers> toArrayMode(size_t size) override;
- size_t numClientBuffers() const final;
+ size_t numActiveSlots() const final;
protected:
sp<Codec2Buffer> createNewBuffer() override;
@@ -666,7 +894,7 @@
std::unique_ptr<InputBuffers> toArrayMode(size_t size) final;
- size_t numClientBuffers() const final;
+ size_t numActiveSlots() const final;
protected:
sp<Codec2Buffer> createNewBuffer() override;
@@ -696,7 +924,7 @@
std::unique_ptr<InputBuffers> toArrayMode(
size_t size) final;
- size_t numClientBuffers() const final;
+ size_t numActiveSlots() const final;
protected:
sp<Codec2Buffer> createNewBuffer() override;
@@ -737,7 +965,7 @@
array->clear();
}
- size_t numClientBuffers() const final {
+ size_t numActiveSlots() const final {
return 0u;
}
@@ -770,7 +998,7 @@
bool isArrayMode() const final { return true; }
- std::unique_ptr<OutputBuffers> toArrayMode(size_t) final {
+ std::unique_ptr<OutputBuffersArray> toArrayMode(size_t) final {
return nullptr;
}
@@ -791,7 +1019,7 @@
void getArray(Vector<sp<MediaCodecBuffer>> *array) const final;
- size_t numClientBuffers() const final;
+ size_t numActiveSlots() const final;
/**
* Reallocate the array, filled with buffers with the same size as given
@@ -809,6 +1037,12 @@
*/
void grow(size_t newSize);
+ /**
+ * Transfer the SkipCutBuffer and the output stash from another
+ * OutputBuffers.
+ */
+ void transferFrom(OutputBuffers* source);
+
private:
BuffersArrayImpl mImpl;
std::function<sp<Codec2Buffer>()> mAlloc;
@@ -837,9 +1071,9 @@
void flush(
const std::list<std::unique_ptr<C2Work>> &flushedWork) override;
- std::unique_ptr<OutputBuffers> toArrayMode(size_t size) override;
+ std::unique_ptr<OutputBuffersArray> toArrayMode(size_t size) override;
- size_t numClientBuffers() const final;
+ size_t numActiveSlots() const final;
/**
* Return an appropriate Codec2Buffer object for the type of buffers.
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 96f86e8..79c6227 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1151,14 +1151,11 @@
bool changed = false;
if (domain & mInputDomain) {
- sp<AMessage> oldFormat = mInputFormat;
- mInputFormat = mInputFormat->dup(); // trigger format changed
+ sp<AMessage> oldFormat = mInputFormat->dup();
mInputFormat->extend(getFormatForDomain(reflected, mInputDomain));
if (mInputFormat->countEntries() != oldFormat->countEntries()
|| mInputFormat->changesFrom(oldFormat)->countEntries() > 0) {
changed = true;
- } else {
- mInputFormat = oldFormat; // no change
}
}
if (domain & mOutputDomain) {
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 25e7da9..19414a0 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -276,20 +276,22 @@
int32_t planeSize = 0;
for (uint32_t i = 0; i < layout.numPlanes; ++i) {
const C2PlaneInfo &plane = layout.planes[i];
- ssize_t minOffset = plane.minOffset(mWidth, mHeight);
- ssize_t maxOffset = plane.maxOffset(mWidth, mHeight);
+ int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
+ ssize_t minOffset = plane.minOffset(
+ mWidth / plane.colSampling, mHeight / plane.rowSampling);
+ ssize_t maxOffset = plane.maxOffset(
+ mWidth / plane.colSampling, mHeight / plane.rowSampling);
if (minPtr > mView.data()[i] + minOffset) {
minPtr = mView.data()[i] + minOffset;
}
if (maxPtr < mView.data()[i] + maxOffset) {
maxPtr = mView.data()[i] + maxOffset;
}
- planeSize += std::abs(plane.rowInc) * align(mHeight, 64)
- / plane.rowSampling / plane.colSampling
- * divUp(mAllocatedDepth, 8u);
+ planeSize += planeStride * divUp(mAllocatedDepth, 8u)
+ * align(mHeight, 64) / plane.rowSampling;
}
- if ((maxPtr - minPtr + 1) <= planeSize) {
+ if (minPtr == mView.data()[0] && (maxPtr - minPtr + 1) <= planeSize) {
// FIXME: this is risky as reading/writing data out of bound results
// in an undefined behavior, but gralloc does assume a
// contiguous mapping
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index bb35763..479acb1 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -61,24 +61,24 @@
/// Input Surface configuration
struct Config {
// IN PARAMS (GBS)
- float mMinFps; // minimum fps (repeat frame to achieve this)
- float mMaxFps; // max fps (via frame drop)
- float mCaptureFps; // capture fps
- float mCodedFps; // coded fps
- bool mSuspended; // suspended
- int64_t mTimeOffsetUs; // time offset (input => codec)
- int64_t mSuspendAtUs; // suspend/resume time
- int64_t mStartAtUs; // start time
- bool mStopped; // stopped
- int64_t mStopAtUs; // stop time
+ float mMinFps = 0.0; // minimum fps (repeat frame to achieve this)
+ float mMaxFps = 0.0; // max fps (via frame drop)
+ float mCaptureFps = 0.0; // capture fps
+ float mCodedFps = 0.0; // coded fps
+ bool mSuspended = false; // suspended
+ int64_t mTimeOffsetUs = 0; // time offset (input => codec)
+ int64_t mSuspendAtUs = 0; // suspend/resume time
+ int64_t mStartAtUs = 0; // start time
+ bool mStopped = false; // stopped
+ int64_t mStopAtUs = 0; // stop time
// OUT PARAMS (GBS)
- int64_t mInputDelayUs; // delay between encoder input and surface input
+ int64_t mInputDelayUs = 0; // delay between encoder input and surface input
// IN PARAMS (CODEC WRAPPER)
- float mFixedAdjustedFps; // fixed fps via PTS manipulation
- float mMinAdjustedFps; // minimum fps via PTS manipulation
- uint64_t mUsage; // consumer usage
+ float mFixedAdjustedFps = 0.0; // fixed fps via PTS manipulation
+ float mMinAdjustedFps = 0.0; // minimum fps via PTS manipulation
+ uint64_t mUsage = 0; // consumer usage
};
/**
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index ecb2506..dbbb5d5 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -193,7 +193,6 @@
Mutexed<std::unique_ptr<CCodecConfig>> mConfig;
Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
- std::atomic_flag mSentConfigAfterResume;
friend class CCodecCallbackImpl;
diff --git a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
index 5bee605..ad8f6e5 100644
--- a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
@@ -18,22 +18,31 @@
#include <gtest/gtest.h>
+#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/MediaCodecConstants.h>
+#include <C2BlockInternal.h>
#include <C2PlatformSupport.h>
namespace android {
+static std::shared_ptr<RawGraphicOutputBuffers> GetRawGraphicOutputBuffers(
+ int32_t width, int32_t height) {
+ std::shared_ptr<RawGraphicOutputBuffers> buffers =
+ std::make_shared<RawGraphicOutputBuffers>("test");
+ sp<AMessage> format{new AMessage};
+ format->setInt32(KEY_WIDTH, width);
+ format->setInt32(KEY_HEIGHT, height);
+ buffers->setFormat(format);
+ return buffers;
+}
+
TEST(RawGraphicOutputBuffersTest, ChangeNumSlots) {
constexpr int32_t kWidth = 3840;
constexpr int32_t kHeight = 2160;
std::shared_ptr<RawGraphicOutputBuffers> buffers =
- std::make_shared<RawGraphicOutputBuffers>("test");
- sp<AMessage> format{new AMessage};
- format->setInt32("width", kWidth);
- format->setInt32("height", kHeight);
- buffers->setFormat(format);
+ GetRawGraphicOutputBuffers(kWidth, kHeight);
std::shared_ptr<C2BlockPool> pool;
ASSERT_EQ(OK, GetCodec2BlockPool(C2BlockPool::BASIC_GRAPHIC, nullptr, &pool));
@@ -96,4 +105,435 @@
}
}
+class TestGraphicAllocation : public C2GraphicAllocation {
+public:
+ TestGraphicAllocation(
+ uint32_t width,
+ uint32_t height,
+ const C2PlanarLayout &layout,
+ size_t capacity,
+ std::vector<size_t> offsets)
+ : C2GraphicAllocation(width, height),
+ mLayout(layout),
+ mMemory(capacity, 0xAA),
+ mOffsets(offsets) {
+ }
+
+ c2_status_t map(
+ C2Rect rect, C2MemoryUsage usage, C2Fence *fence,
+ C2PlanarLayout *layout, uint8_t **addr) override {
+ (void)rect;
+ (void)usage;
+ (void)fence;
+ *layout = mLayout;
+ for (size_t i = 0; i < mLayout.numPlanes; ++i) {
+ addr[i] = mMemory.data() + mOffsets[i];
+ }
+ return C2_OK;
+ }
+
+ c2_status_t unmap(uint8_t **, C2Rect, C2Fence *) override { return C2_OK; }
+
+ C2Allocator::id_t getAllocatorId() const override { return -1; }
+
+ const C2Handle *handle() const override { return nullptr; }
+
+ bool equals(const std::shared_ptr<const C2GraphicAllocation> &other) const override {
+ return other.get() == this;
+ }
+
+private:
+ C2PlanarLayout mLayout;
+ std::vector<uint8_t> mMemory;
+ std::vector<uint8_t *> mAddr;
+ std::vector<size_t> mOffsets;
+};
+
+class LayoutTest : public ::testing::TestWithParam<std::tuple<bool, std::string, bool, int32_t>> {
+private:
+ static C2PlanarLayout YUVPlanarLayout(int32_t stride) {
+ C2PlanarLayout layout = {
+ C2PlanarLayout::TYPE_YUV,
+ 3, /* numPlanes */
+ 3, /* rootPlanes */
+ {}, /* planes --- to be filled below */
+ };
+ layout.planes[C2PlanarLayout::PLANE_Y] = {
+ C2PlaneInfo::CHANNEL_Y,
+ 1, /* colInc */
+ stride, /* rowInc */
+ 1, /* colSampling */
+ 1, /* rowSampling */
+ 8, /* allocatedDepth */
+ 8, /* bitDepth */
+ 0, /* rightShift */
+ C2PlaneInfo::NATIVE,
+ C2PlanarLayout::PLANE_Y, /* rootIx */
+ 0, /* offset */
+ };
+ layout.planes[C2PlanarLayout::PLANE_U] = {
+ C2PlaneInfo::CHANNEL_CB,
+ 1, /* colInc */
+ stride / 2, /* rowInc */
+ 2, /* colSampling */
+ 2, /* rowSampling */
+ 8, /* allocatedDepth */
+ 8, /* bitDepth */
+ 0, /* rightShift */
+ C2PlaneInfo::NATIVE,
+ C2PlanarLayout::PLANE_U, /* rootIx */
+ 0, /* offset */
+ };
+ layout.planes[C2PlanarLayout::PLANE_V] = {
+ C2PlaneInfo::CHANNEL_CR,
+ 1, /* colInc */
+ stride / 2, /* rowInc */
+ 2, /* colSampling */
+ 2, /* rowSampling */
+ 8, /* allocatedDepth */
+ 8, /* bitDepth */
+ 0, /* rightShift */
+ C2PlaneInfo::NATIVE,
+ C2PlanarLayout::PLANE_V, /* rootIx */
+ 0, /* offset */
+ };
+ return layout;
+ }
+
+ static C2PlanarLayout YUVSemiPlanarLayout(int32_t stride) {
+ C2PlanarLayout layout = {
+ C2PlanarLayout::TYPE_YUV,
+ 3, /* numPlanes */
+ 2, /* rootPlanes */
+ {}, /* planes --- to be filled below */
+ };
+ layout.planes[C2PlanarLayout::PLANE_Y] = {
+ C2PlaneInfo::CHANNEL_Y,
+ 1, /* colInc */
+ stride, /* rowInc */
+ 1, /* colSampling */
+ 1, /* rowSampling */
+ 8, /* allocatedDepth */
+ 8, /* bitDepth */
+ 0, /* rightShift */
+ C2PlaneInfo::NATIVE,
+ C2PlanarLayout::PLANE_Y, /* rootIx */
+ 0, /* offset */
+ };
+ layout.planes[C2PlanarLayout::PLANE_U] = {
+ C2PlaneInfo::CHANNEL_CB,
+ 2, /* colInc */
+ stride, /* rowInc */
+ 2, /* colSampling */
+ 2, /* rowSampling */
+ 8, /* allocatedDepth */
+ 8, /* bitDepth */
+ 0, /* rightShift */
+ C2PlaneInfo::NATIVE,
+ C2PlanarLayout::PLANE_U, /* rootIx */
+ 0, /* offset */
+ };
+ layout.planes[C2PlanarLayout::PLANE_V] = {
+ C2PlaneInfo::CHANNEL_CR,
+ 2, /* colInc */
+ stride, /* rowInc */
+ 2, /* colSampling */
+ 2, /* rowSampling */
+ 8, /* allocatedDepth */
+ 8, /* bitDepth */
+ 0, /* rightShift */
+ C2PlaneInfo::NATIVE,
+ C2PlanarLayout::PLANE_U, /* rootIx */
+ 1, /* offset */
+ };
+ return layout;
+ }
+
+ static C2PlanarLayout YVUSemiPlanarLayout(int32_t stride) {
+ C2PlanarLayout layout = {
+ C2PlanarLayout::TYPE_YUV,
+ 3, /* numPlanes */
+ 2, /* rootPlanes */
+ {}, /* planes --- to be filled below */
+ };
+ layout.planes[C2PlanarLayout::PLANE_Y] = {
+ C2PlaneInfo::CHANNEL_Y,
+ 1, /* colInc */
+ stride, /* rowInc */
+ 1, /* colSampling */
+ 1, /* rowSampling */
+ 8, /* allocatedDepth */
+ 8, /* bitDepth */
+ 0, /* rightShift */
+ C2PlaneInfo::NATIVE,
+ C2PlanarLayout::PLANE_Y, /* rootIx */
+ 0, /* offset */
+ };
+ layout.planes[C2PlanarLayout::PLANE_U] = {
+ C2PlaneInfo::CHANNEL_CB,
+ 2, /* colInc */
+ stride, /* rowInc */
+ 2, /* colSampling */
+ 2, /* rowSampling */
+ 8, /* allocatedDepth */
+ 8, /* bitDepth */
+ 0, /* rightShift */
+ C2PlaneInfo::NATIVE,
+ C2PlanarLayout::PLANE_V, /* rootIx */
+ 1, /* offset */
+ };
+ layout.planes[C2PlanarLayout::PLANE_V] = {
+ C2PlaneInfo::CHANNEL_CR,
+ 2, /* colInc */
+ stride, /* rowInc */
+ 2, /* colSampling */
+ 2, /* rowSampling */
+ 8, /* allocatedDepth */
+ 8, /* bitDepth */
+ 0, /* rightShift */
+ C2PlaneInfo::NATIVE,
+ C2PlanarLayout::PLANE_V, /* rootIx */
+ 0, /* offset */
+ };
+ return layout;
+ }
+
+ static std::shared_ptr<C2GraphicBlock> CreateGraphicBlock(
+ uint32_t width,
+ uint32_t height,
+ const C2PlanarLayout &layout,
+ size_t capacity,
+ std::vector<size_t> offsets) {
+ std::shared_ptr<C2GraphicAllocation> alloc = std::make_shared<TestGraphicAllocation>(
+ width,
+ height,
+ layout,
+ capacity,
+ offsets);
+
+ return _C2BlockFactory::CreateGraphicBlock(alloc);
+ }
+
+ static constexpr uint8_t GetPixelValue(uint8_t value, uint32_t row, uint32_t col) {
+ return (uint32_t(value) * row + col) & 0xFF;
+ }
+
+ static void FillPlane(C2GraphicView &view, size_t index, uint8_t value) {
+ C2PlanarLayout layout = view.layout();
+
+ uint8_t *rowPtr = view.data()[index];
+ C2PlaneInfo plane = layout.planes[index];
+ for (uint32_t row = 0; row < view.height() / plane.rowSampling; ++row) {
+ uint8_t *colPtr = rowPtr;
+ for (uint32_t col = 0; col < view.width() / plane.colSampling; ++col) {
+ *colPtr = GetPixelValue(value, row, col);
+ colPtr += plane.colInc;
+ }
+ rowPtr += plane.rowInc;
+ }
+ }
+
+ static void FillBlock(const std::shared_ptr<C2GraphicBlock> &block) {
+ C2GraphicView view = block->map().get();
+
+ FillPlane(view, C2PlanarLayout::PLANE_Y, 'Y');
+ FillPlane(view, C2PlanarLayout::PLANE_U, 'U');
+ FillPlane(view, C2PlanarLayout::PLANE_V, 'V');
+ }
+
+ static bool VerifyPlane(
+ const MediaImage2 *mediaImage,
+ const uint8_t *base,
+ uint32_t index,
+ uint8_t value,
+ std::string *errorMsg) {
+ *errorMsg = "";
+ MediaImage2::PlaneInfo plane = mediaImage->mPlane[index];
+ const uint8_t *rowPtr = base + plane.mOffset;
+ for (uint32_t row = 0; row < mediaImage->mHeight / plane.mVertSubsampling; ++row) {
+ const uint8_t *colPtr = rowPtr;
+ for (uint32_t col = 0; col < mediaImage->mWidth / plane.mHorizSubsampling; ++col) {
+ if (GetPixelValue(value, row, col) != *colPtr) {
+ *errorMsg = AStringPrintf("row=%u col=%u expected=%02x actual=%02x",
+ row, col, GetPixelValue(value, row, col), *colPtr).c_str();
+ return false;
+ }
+ colPtr += plane.mColInc;
+ }
+ rowPtr += plane.mRowInc;
+ }
+ return true;
+ }
+
+public:
+ static constexpr int32_t kWidth = 320;
+ static constexpr int32_t kHeight = 240;
+ static constexpr int32_t kGapLength = kWidth * kHeight * 10;
+
+ static std::shared_ptr<C2Buffer> CreateAndFillBufferFromParam(const ParamType ¶m) {
+ bool contiguous = std::get<0>(param);
+ std::string planeOrderStr = std::get<1>(param);
+ bool planar = std::get<2>(param);
+ int32_t stride = std::get<3>(param);
+
+ C2PlanarLayout::plane_index_t planeOrder[3];
+ C2PlanarLayout layout;
+
+ if (planeOrderStr.size() != 3) {
+ return nullptr;
+ }
+ for (size_t i = 0; i < 3; ++i) {
+ C2PlanarLayout::plane_index_t planeIndex;
+ switch (planeOrderStr[i]) {
+ case 'Y': planeIndex = C2PlanarLayout::PLANE_Y; break;
+ case 'U': planeIndex = C2PlanarLayout::PLANE_U; break;
+ case 'V': planeIndex = C2PlanarLayout::PLANE_V; break;
+ default: return nullptr;
+ }
+ planeOrder[i] = planeIndex;
+ }
+
+ if (planar) {
+ layout = YUVPlanarLayout(stride);
+ } else { // semi-planar
+ for (size_t i = 0; i < 3; ++i) {
+ if (planeOrder[i] == C2PlanarLayout::PLANE_U) {
+ layout = YUVSemiPlanarLayout(stride);
+ break;
+ }
+ if (planeOrder[i] == C2PlanarLayout::PLANE_V) {
+ layout = YVUSemiPlanarLayout(stride);
+ break;
+ }
+ }
+ }
+
+ size_t yPlaneSize = stride * kHeight;
+ size_t uvPlaneSize = stride * kHeight / 4;
+ size_t capacity = yPlaneSize + uvPlaneSize * 2;
+ std::vector<size_t> offsets(3);
+
+ if (!contiguous) {
+ if (planar) {
+ capacity += kGapLength * 2;
+ } else { // semi-planar
+ capacity += kGapLength;
+ }
+ }
+
+ offsets[planeOrder[0]] = 0;
+ size_t planeSize = (planeOrder[0] == C2PlanarLayout::PLANE_Y) ? yPlaneSize : uvPlaneSize;
+ for (size_t i = 1; i < 3; ++i) {
+ offsets[planeOrder[i]] = offsets[planeOrder[i - 1]] + planeSize;
+ if (!contiguous) {
+ offsets[planeOrder[i]] += kGapLength;
+ }
+ planeSize = (planeOrder[i] == C2PlanarLayout::PLANE_Y) ? yPlaneSize : uvPlaneSize;
+ if (!planar // semi-planar
+ && planeOrder[i - 1] != C2PlanarLayout::PLANE_Y
+ && planeOrder[i] != C2PlanarLayout::PLANE_Y) {
+ offsets[planeOrder[i]] = offsets[planeOrder[i - 1]] + 1;
+ planeSize = uvPlaneSize * 2 - 1;
+ }
+ }
+
+ std::shared_ptr<C2GraphicBlock> block = CreateGraphicBlock(
+ kWidth,
+ kHeight,
+ layout,
+ capacity,
+ offsets);
+ FillBlock(block);
+ return C2Buffer::CreateGraphicBuffer(
+ block->share(block->crop(), C2Fence()));
+ }
+
+ static bool VerifyClientBuffer(
+ const sp<MediaCodecBuffer> &buffer, std::string *errorMsg) {
+ *errorMsg = "";
+ sp<ABuffer> imageData;
+ if (!buffer->format()->findBuffer("image-data", &imageData)) {
+ *errorMsg = "Missing image data";
+ return false;
+ }
+ MediaImage2 *mediaImage = (MediaImage2 *)imageData->data();
+ if (mediaImage->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
+ *errorMsg = AStringPrintf("Unexpected type: %d", mediaImage->mType).c_str();
+ return false;
+ }
+ std::string planeErrorMsg;
+ if (!VerifyPlane(mediaImage, buffer->base(), MediaImage2::Y, 'Y', &planeErrorMsg)) {
+ *errorMsg = "Y plane does not match: " + planeErrorMsg;
+ return false;
+ }
+ if (!VerifyPlane(mediaImage, buffer->base(), MediaImage2::U, 'U', &planeErrorMsg)) {
+ *errorMsg = "U plane does not match: " + planeErrorMsg;
+ return false;
+ }
+ if (!VerifyPlane(mediaImage, buffer->base(), MediaImage2::V, 'V', &planeErrorMsg)) {
+ *errorMsg = "V plane does not match: " + planeErrorMsg;
+ return false;
+ }
+
+ int32_t width, height, stride;
+ buffer->format()->findInt32(KEY_WIDTH, &width);
+ buffer->format()->findInt32(KEY_HEIGHT, &height);
+ buffer->format()->findInt32(KEY_STRIDE, &stride);
+
+ MediaImage2 legacyYLayout = {
+ MediaImage2::MEDIA_IMAGE_TYPE_Y,
+ 1, // mNumPlanes
+ uint32_t(width),
+ uint32_t(height),
+ 8,
+ 8,
+ {}, // mPlane
+ };
+ legacyYLayout.mPlane[MediaImage2::Y] = {
+ 0, // mOffset
+ 1, // mColInc
+ stride, // mRowInc
+ 1, // mHorizSubsampling
+ 1, // mVertSubsampling
+ };
+ if (!VerifyPlane(&legacyYLayout, buffer->data(), MediaImage2::Y, 'Y', &planeErrorMsg)) {
+ *errorMsg = "Y plane by legacy layout does not match: " + planeErrorMsg;
+ return false;
+ }
+ return true;
+ }
+
+};
+
+TEST_P(LayoutTest, VerifyLayout) {
+ std::shared_ptr<RawGraphicOutputBuffers> buffers =
+ GetRawGraphicOutputBuffers(kWidth, kHeight);
+
+ std::shared_ptr<C2Buffer> c2Buffer = CreateAndFillBufferFromParam(GetParam());
+ ASSERT_NE(nullptr, c2Buffer);
+ sp<MediaCodecBuffer> clientBuffer;
+ size_t index;
+ ASSERT_EQ(OK, buffers->registerBuffer(c2Buffer, &index, &clientBuffer));
+ ASSERT_NE(nullptr, clientBuffer);
+ std::string errorMsg;
+ ASSERT_TRUE(VerifyClientBuffer(clientBuffer, &errorMsg)) << errorMsg;
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ RawGraphicOutputBuffersTest,
+ LayoutTest,
+ ::testing::Combine(
+ ::testing::Bool(), /* contiguous */
+ ::testing::Values("YUV", "YVU", "UVY", "VUY"),
+ ::testing::Bool(), /* planar */
+ ::testing::Values(320, 512)),
+ [](const ::testing::TestParamInfo<LayoutTest::ParamType> &info) {
+ std::string contiguous = std::get<0>(info.param) ? "Contiguous" : "Noncontiguous";
+ std::string planar = std::get<2>(info.param) ? "Planar" : "SemiPlanar";
+ return contiguous
+ + std::get<1>(info.param)
+ + planar
+ + std::to_string(std::get<3>(info.param));
+ });
+
} // namespace android
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 205abdc..e7dc92a 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -1,6 +1,7 @@
-cc_library_shared {
+cc_library {
name: "libsfplugin_ccodec_utils",
vendor_available: true,
+ min_sdk_version: "29",
double_loadable: true,
srcs: [
diff --git a/media/codec2/tests/C2Param_test.cpp b/media/codec2/tests/C2Param_test.cpp
index 564d4d2..bb8130c 100644
--- a/media/codec2/tests/C2Param_test.cpp
+++ b/media/codec2/tests/C2Param_test.cpp
@@ -96,7 +96,7 @@
const static std::vector<C2FieldDescriptor> _FIELD_LIST;
static const std::vector<C2FieldDescriptor> FieldList(); // <= needed for C2FieldDescriptor
const static FD::type_t TYPE = (FD::type_t)(CORE_INDEX | FD::STRUCT_FLAG);
-};
+} C2_PACK;
DEFINE_NO_NAMED_VALUES_FOR(C2SizeStruct)
@@ -111,11 +111,13 @@
struct C2TestStruct_A {
int32_t signed32;
+ // 4-byte padding
int64_t signed64[2];
uint32_t unsigned32[1];
+ // 4-byte padding
uint64_t unsigned64;
float fp32;
- C2SizeStruct sz[3];
+ C2SizeStruct sz[3]; // 8-byte structure, but 4-byte aligned
uint8_t blob[100];
char string[100];
bool yesNo[100];
@@ -124,21 +126,21 @@
static const std::vector<C2FieldDescriptor> FieldList();
// enum : uint32_t { CORE_INDEX = kParamIndexTest };
// typedef C2TestStruct_A _type;
-} __attribute__((packed));
+} __attribute__((aligned(4)));
const std::vector<C2FieldDescriptor> C2TestStruct_A::FieldList() {
return _FIELD_LIST;
}
const std::vector<C2FieldDescriptor> C2TestStruct_A::_FIELD_LIST =
{ { FD::INT32, 1, "s32", 0, 4 },
- { FD::INT64, 2, "s64", 4, 8 },
- { FD::UINT32, 1, "u32", 20, 4 },
- { FD::UINT64, 1, "u64", 24, 8 },
- { FD::FLOAT, 1, "fp", 32, 4 },
- { C2SizeStruct::TYPE, 3, "size", 36, 8 },
- { FD::BLOB, 100, "blob", 60, 1 },
- { FD::STRING, 100, "str", 160, 1 },
- { FD::BLOB, 100, "y-n", 260, 1 } };
+ { FD::INT64, 2, "s64", 8, 8 },
+ { FD::UINT32, 1, "u32", 24, 4 },
+ { FD::UINT64, 1, "u64", 32, 8 },
+ { FD::FLOAT, 1, "fp", 40, 4 },
+ { C2SizeStruct::TYPE, 3, "size", 44, 8 },
+ { FD::BLOB, 100, "blob", 68, 1 },
+ { FD::STRING, 100, "str", 168, 1 },
+ { FD::BLOB, 100, "y-n", 268, 1 } };
TEST_P(C2ParamTest_ParamFieldList, VerifyStruct) {
std::vector<C2FieldDescriptor> fields = GetParam(), expected = C2TestStruct_A::_FIELD_LIST;
@@ -198,11 +200,13 @@
struct C2TestAStruct {
int32_t signed32;
+ // 4-byte padding
int64_t signed64[2];
uint32_t unsigned32[1];
+ // 4-byte padding
uint64_t unsigned64;
float fp32;
- C2SizeStruct sz[3];
+ C2SizeStruct sz[3]; // 8-byte structure, but 4-byte aligned
uint8_t blob[100];
char string[100];
bool yesNo[100];
@@ -229,11 +233,13 @@
struct C2TestBStruct {
int32_t signed32;
+ // 4-byte padding
int64_t signed64[2];
uint32_t unsigned32[1];
+ // 4-byte padding
uint64_t unsigned64;
float fp32;
- C2SizeStruct sz[3];
+ C2SizeStruct sz[3]; // 8-byte structure, but 4-byte aligned
uint8_t blob[100];
char string[100];
bool yesNo[100];
@@ -286,7 +292,7 @@
if (fields.size() > 1) {
EXPECT_EQ(2u, fields.size());
EXPECT_EQ(C2FieldDescriptor(FD::INT32, 1, "s32", 0, 4), fields[0]);
- EXPECT_EQ(C2FieldDescriptor(this->FlexType, 0, "flex", 4, this->FLEX_SIZE),
+ EXPECT_EQ(C2FieldDescriptor(this->FlexType, 0, "flex", alignof(TypeParam) /* offset */, this->FLEX_SIZE),
fields[1]);
} else {
EXPECT_EQ(1u, fields.size());
@@ -392,6 +398,7 @@
struct C2TestStruct_FlexEndS64 {
int32_t signed32;
+ // 4-byte padding
int64_t mSigned64Flex[];
const static std::vector<C2FieldDescriptor> _FIELD_LIST;
@@ -406,7 +413,7 @@
}
const std::vector<C2FieldDescriptor> C2TestStruct_FlexEndS64::_FIELD_LIST = {
{ FD::INT32, 1, "s32", 0, 4 },
- { FD::INT64, 0, "flex", 4, 8 },
+ { FD::INT64, 0, "flex", 8, 8 },
};
struct C2TestFlexS64Struct {
@@ -419,6 +426,7 @@
struct C2TestFlexEndS64Struct {
int32_t signed32;
+ // 4-byte padding
int64_t mFlexSigned64[];
C2TestFlexEndS64Struct() {}
@@ -468,7 +476,7 @@
// enum : uint32_t { CORE_INDEX = C2TestStruct_FlexEndSize, FLEX_SIZE = 8 };
// typedef C2TestStruct_FlexEndSize _type;
// typedef C2SizeStruct FlexType;
-};
+} __attribute__((aligned(4)));
const std::vector<C2FieldDescriptor> C2TestStruct_FlexEndSize::FieldList() {
return _FIELD_LIST;
@@ -539,14 +547,14 @@
TEST_F(C2ParamTest, FieldId) {
// pointer constructor
EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestStruct_A*)0)->signed32));
- EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestStruct_A*)0)->signed64));
- EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&((C2TestStruct_A*)0)->unsigned32));
- EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&((C2TestStruct_A*)0)->unsigned64));
- EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&((C2TestStruct_A*)0)->fp32));
- EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&((C2TestStruct_A*)0)->sz));
- EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&((C2TestStruct_A*)0)->blob));
- EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&((C2TestStruct_A*)0)->string));
- EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&((C2TestStruct_A*)0)->yesNo));
+ EXPECT_EQ(_C2FieldId(8, 8), _C2FieldId(&((C2TestStruct_A*)0)->signed64));
+ EXPECT_EQ(_C2FieldId(24, 4), _C2FieldId(&((C2TestStruct_A*)0)->unsigned32));
+ EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&((C2TestStruct_A*)0)->unsigned64));
+ EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&((C2TestStruct_A*)0)->fp32));
+ EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&((C2TestStruct_A*)0)->sz));
+ EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&((C2TestStruct_A*)0)->blob));
+ EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&((C2TestStruct_A*)0)->string));
+ EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&((C2TestStruct_A*)0)->yesNo));
EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->signed32));
EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->mFlexSize));
@@ -556,14 +564,14 @@
// member pointer constructor
EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed32));
- EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed64));
- EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned32));
- EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned64));
- EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::fp32));
- EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::sz));
- EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::blob));
- EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::string));
- EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::yesNo));
+ EXPECT_EQ(_C2FieldId(8, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed64));
+ EXPECT_EQ(_C2FieldId(24, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned32));
+ EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned64));
+ EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::fp32));
+ EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::sz));
+ EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::blob));
+ EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::string));
+ EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::yesNo));
EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::signed32));
EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::mFlexSize));
@@ -573,14 +581,14 @@
// member pointer sans type pointer
EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestStruct_A::signed32));
- EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestStruct_A::signed64));
- EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&C2TestStruct_A::unsigned32));
- EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&C2TestStruct_A::unsigned64));
- EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&C2TestStruct_A::fp32));
- EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&C2TestStruct_A::sz));
- EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&C2TestStruct_A::blob));
- EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&C2TestStruct_A::string));
- EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&C2TestStruct_A::yesNo));
+ EXPECT_EQ(_C2FieldId(8, 8), _C2FieldId(&C2TestStruct_A::signed64));
+ EXPECT_EQ(_C2FieldId(24, 4), _C2FieldId(&C2TestStruct_A::unsigned32));
+ EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&C2TestStruct_A::unsigned64));
+ EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&C2TestStruct_A::fp32));
+ EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&C2TestStruct_A::sz));
+ EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&C2TestStruct_A::blob));
+ EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&C2TestStruct_A::string));
+ EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&C2TestStruct_A::yesNo));
EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestFlexEndSizeStruct::signed32));
EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestFlexEndSizeStruct::mFlexSize));
@@ -594,14 +602,14 @@
// pointer constructor in C2Param
EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestAInfo*)0)->signed32));
- EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestAInfo*)0)->signed64));
- EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId(&((C2TestAInfo*)0)->unsigned32));
- EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&((C2TestAInfo*)0)->unsigned64));
- EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&((C2TestAInfo*)0)->fp32));
- EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&((C2TestAInfo*)0)->sz));
- EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&((C2TestAInfo*)0)->blob));
- EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&((C2TestAInfo*)0)->string));
- EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&((C2TestAInfo*)0)->yesNo));
+ EXPECT_EQ(_C2FieldId(16, 8), _C2FieldId(&((C2TestAInfo*)0)->signed64));
+ EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&((C2TestAInfo*)0)->unsigned32));
+ EXPECT_EQ(_C2FieldId(40, 8), _C2FieldId(&((C2TestAInfo*)0)->unsigned64));
+ EXPECT_EQ(_C2FieldId(48, 4), _C2FieldId(&((C2TestAInfo*)0)->fp32));
+ EXPECT_EQ(_C2FieldId(52, 8), _C2FieldId(&((C2TestAInfo*)0)->sz));
+ EXPECT_EQ(_C2FieldId(76, 1), _C2FieldId(&((C2TestAInfo*)0)->blob));
+ EXPECT_EQ(_C2FieldId(176, 1), _C2FieldId(&((C2TestAInfo*)0)->string));
+ EXPECT_EQ(_C2FieldId(276, 1), _C2FieldId(&((C2TestAInfo*)0)->yesNo));
EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.signed32));
EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.mFlexSize));
@@ -611,14 +619,14 @@
// member pointer in C2Param
EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed32));
- EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed64));
- EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned32));
- EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned64));
- EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::fp32));
- EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::sz));
- EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::blob));
- EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::string));
- EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::yesNo));
+ EXPECT_EQ(_C2FieldId(16, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed64));
+ EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned32));
+ EXPECT_EQ(_C2FieldId(40, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned64));
+ EXPECT_EQ(_C2FieldId(48, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::fp32));
+ EXPECT_EQ(_C2FieldId(52, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::sz));
+ EXPECT_EQ(_C2FieldId(76, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::blob));
+ EXPECT_EQ(_C2FieldId(176, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::string));
+ EXPECT_EQ(_C2FieldId(276, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::yesNo));
// NOTE: cannot use a member pointer for flex params due to introduction of 'm'
// EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&C2TestFlexEndSizeInfo::m.signed32));
@@ -2328,6 +2336,17 @@
static_assert(std::is_same<decltype(blobValue->m.value), uint8_t[]>::value, "should be uint8_t[]");
EXPECT_EQ(0, memcmp(blobValue->m.value, "ABCD\0", 6));
EXPECT_EQ(6u, blobValue->flexCount());
+ blobValue->setFlexCount(7u); // increasing the count does not change it
+ EXPECT_EQ(6u, blobValue->flexCount());
+ blobValue->setFlexCount(2u); // decreasing the count changes it to it
+ EXPECT_EQ(2u, blobValue->flexCount());
+ blobValue->setFlexCount(0u); // can decrease to 0 and blob remains valid
+ EXPECT_EQ(0u, blobValue->flexCount());
+ EXPECT_TRUE(*blobValue);
+ blobValue->invalidate(); // flex params can be invalidated => results in 0 size
+ EXPECT_FALSE(*blobValue);
+ EXPECT_EQ(0u, blobValue->size());
+
std::vector<C2FieldDescriptor> fields = blobValue->FieldList();
EXPECT_EQ(1u, fields.size());
EXPECT_EQ(FD::BLOB, fields.cbegin()->type());
diff --git a/media/codec2/tests/C2UtilTest.cpp b/media/codec2/tests/C2UtilTest.cpp
index 59cd313..2d66df1 100644
--- a/media/codec2/tests/C2UtilTest.cpp
+++ b/media/codec2/tests/C2UtilTest.cpp
@@ -78,7 +78,7 @@
{ "value2", Enum3Value2 },
{ "value4", Enum3Value4 },
{ "invalid", Invalid } });
- Enum3 e3;
+ Enum3 e3(Invalid);
C2FieldDescriptor::namedValuesFor(e3);
// upper case
diff --git a/media/codec2/tests/vndk/C2BufferTest.cpp b/media/codec2/tests/vndk/C2BufferTest.cpp
index 780994a..a9f8e17 100644
--- a/media/codec2/tests/vndk/C2BufferTest.cpp
+++ b/media/codec2/tests/vndk/C2BufferTest.cpp
@@ -765,4 +765,54 @@
}
}
+TEST_F(C2BufferTest, InfoBufferTest) {
+ constexpr size_t kCapacity = 524288u;
+
+ // allocate a linear block
+ std::shared_ptr<C2BlockPool> linearPool(makeLinearBlockPool());
+ std::shared_ptr<C2LinearBlock> linearBlock;
+ ASSERT_EQ(C2_OK, linearPool->fetchLinearBlock(
+ kCapacity,
+ { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+ &linearBlock));
+
+ C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
+ kParamIndexNumber1, linearBlock->share(1024, kCapacity / 2, C2Fence()));
+ std::shared_ptr<C2InfoBuffer> spInfo(new C2InfoBuffer(info));
+ ASSERT_EQ(kParamIndexNumber1, spInfo->index().coreIndex());
+ ASSERT_TRUE(spInfo->index().isGlobal());
+ ASSERT_EQ(C2Param::INFO, spInfo->index().kind());
+ ASSERT_EQ(C2BufferData::LINEAR, spInfo->data().type());
+ ASSERT_EQ(1024, spInfo->data().linearBlocks()[0].offset());
+ ASSERT_EQ(kCapacity / 2, spInfo->data().linearBlocks()[0].size());
+ // handles must actually be identical after sharing into an info buffer
+ ASSERT_EQ(linearBlock->handle(), spInfo->data().linearBlocks()[0].handle());
+ ASSERT_EQ(linearPool->getAllocatorId(), spInfo->data().linearBlocks()[0].getAllocatorId());
+
+ C2InfoBuffer streamInfo = info.asStream(false /* output */, 1u);
+ ASSERT_EQ(kParamIndexNumber1, streamInfo.index().coreIndex());
+ ASSERT_TRUE(streamInfo.index().forStream());
+ ASSERT_TRUE(streamInfo.index().forInput());
+ ASSERT_EQ(1u, streamInfo.index().stream());
+ ASSERT_EQ(C2Param::INFO, streamInfo.index().kind());
+ ASSERT_EQ(C2BufferData::LINEAR, streamInfo.data().type());
+ ASSERT_EQ(1024, streamInfo.data().linearBlocks()[0].offset());
+ ASSERT_EQ(kCapacity / 2, streamInfo.data().linearBlocks()[0].size());
+ // handles must actually be identical after sharing into an info buffer
+ ASSERT_EQ(linearBlock->handle(), streamInfo.data().linearBlocks()[0].handle());
+ ASSERT_EQ(linearPool->getAllocatorId(), streamInfo.data().linearBlocks()[0].getAllocatorId());
+
+ C2InfoBuffer portInfo = streamInfo.asPort(true /* output */);
+ ASSERT_EQ(kParamIndexNumber1, portInfo.index().coreIndex());
+ ASSERT_TRUE(portInfo.index().forPort());
+ ASSERT_TRUE(portInfo.index().forOutput());
+ ASSERT_EQ(C2Param::INFO, portInfo.index().kind());
+ ASSERT_EQ(C2BufferData::LINEAR, portInfo.data().type());
+ ASSERT_EQ(1024, portInfo.data().linearBlocks()[0].offset());
+ ASSERT_EQ(kCapacity / 2, portInfo.data().linearBlocks()[0].size());
+ // handles must actually be identical after sharing into an info buffer
+ ASSERT_EQ(linearBlock->handle(), portInfo.data().linearBlocks()[0].handle());
+ ASSERT_EQ(linearPool->getAllocatorId(), portInfo.data().linearBlocks()[0].getAllocatorId());
+}
+
} // namespace android
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index f3e37e0..19afccf 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -7,13 +7,16 @@
// TODO: Remove this when this module is moved back to frameworks/av.
vendor_available: true,
+
+ min_sdk_version: "29",
}
// !!!DO NOT DEPEND ON THIS SHARED LIBRARY DIRECTLY!!!
// use libcodec2-impl-defaults instead
-cc_library_shared {
+cc_library {
name: "libcodec2_vndk",
vendor_available: true,
+ min_sdk_version: "29",
// TODO: b/147147883
double_loadable: true,
@@ -23,6 +26,7 @@
"C2AllocatorGralloc.cpp",
"C2Buffer.cpp",
"C2Config.cpp",
+ "C2DmaBufAllocator.cpp",
"C2PlatformStorePluginLoader.cpp",
"C2Store.cpp",
"platform/C2BqBuffer.cpp",
@@ -61,6 +65,7 @@
"libhardware",
"libhidlbase",
"libion",
+ "libdmabufheap",
"libfmq",
"liblog",
"libnativewindow",
@@ -87,6 +92,8 @@
"libcodec2_vndk",
"libutils",
],
+
+ min_sdk_version: "29",
}
// public dependency for implementing Codec 2 framework utilities
diff --git a/media/codec2/vndk/C2AllocatorBlob.cpp b/media/codec2/vndk/C2AllocatorBlob.cpp
index 50c9e59..6340cba 100644
--- a/media/codec2/vndk/C2AllocatorBlob.cpp
+++ b/media/codec2/vndk/C2AllocatorBlob.cpp
@@ -17,6 +17,8 @@
// #define LOG_NDEBUG 0
#define LOG_TAG "C2AllocatorBlob"
+#include <set>
+
#include <C2AllocatorBlob.h>
#include <C2PlatformSupport.h>
@@ -67,6 +69,10 @@
private:
const std::shared_ptr<C2GraphicAllocation> mGraphicAllocation;
const C2Allocator::id_t mAllocatorId;
+
+ std::mutex mMapLock;
+ std::multiset<std::pair<size_t, size_t>> mMappedOffsetSize;
+ uint8_t *mMappedAddr;
};
C2AllocationBlob::C2AllocationBlob(
@@ -74,20 +80,74 @@
C2Allocator::id_t allocatorId)
: C2LinearAllocation(capacity),
mGraphicAllocation(std::move(graphicAllocation)),
- mAllocatorId(allocatorId) {}
+ mAllocatorId(allocatorId),
+ mMappedAddr(nullptr) {}
-C2AllocationBlob::~C2AllocationBlob() {}
+C2AllocationBlob::~C2AllocationBlob() {
+ if (mMappedAddr) {
+ C2Rect rect(capacity(), kLinearBufferHeight);
+ mGraphicAllocation->unmap(&mMappedAddr, rect, nullptr);
+ }
+}
c2_status_t C2AllocationBlob::map(size_t offset, size_t size, C2MemoryUsage usage,
C2Fence* fence, void** addr /* nonnull */) {
+ *addr = nullptr;
+ if (size > capacity() || offset > capacity() || offset > capacity() - size) {
+ ALOGV("C2AllocationBlob: map: bad offset / size: offset=%zu size=%zu capacity=%u",
+ offset, size, capacity());
+ return C2_BAD_VALUE;
+ }
+ std::unique_lock<std::mutex> lock(mMapLock);
+ if (mMappedAddr) {
+ *addr = mMappedAddr + offset;
+ mMappedOffsetSize.insert({offset, size});
+ ALOGV("C2AllocationBlob: mapped from existing mapping: offset=%zu size=%zu capacity=%u",
+ offset, size, capacity());
+ return C2_OK;
+ }
C2PlanarLayout layout;
- C2Rect rect = C2Rect(size, kLinearBufferHeight).at(offset, 0u);
- return mGraphicAllocation->map(rect, usage, fence, &layout, reinterpret_cast<uint8_t**>(addr));
+ C2Rect rect = C2Rect(capacity(), kLinearBufferHeight);
+ c2_status_t err = mGraphicAllocation->map(rect, usage, fence, &layout, &mMappedAddr);
+ if (err != C2_OK) {
+ ALOGV("C2AllocationBlob: map failed: offset=%zu size=%zu capacity=%u err=%d",
+ offset, size, capacity(), err);
+ mMappedAddr = nullptr;
+ return err;
+ }
+ *addr = mMappedAddr + offset;
+ mMappedOffsetSize.insert({offset, size});
+ ALOGV("C2AllocationBlob: new map succeeded: offset=%zu size=%zu capacity=%u",
+ offset, size, capacity());
+ return C2_OK;
}
c2_status_t C2AllocationBlob::unmap(void* addr, size_t size, C2Fence* fenceFd) {
- C2Rect rect(size, kLinearBufferHeight);
- return mGraphicAllocation->unmap(reinterpret_cast<uint8_t**>(&addr), rect, fenceFd);
+ std::unique_lock<std::mutex> lock(mMapLock);
+ uint8_t *u8Addr = static_cast<uint8_t *>(addr);
+ if (u8Addr < mMappedAddr || mMappedAddr + capacity() < u8Addr + size) {
+ ALOGV("C2AllocationBlob: unmap: Bad addr / size: addr=%p size=%zu capacity=%u",
+ addr, size, capacity());
+ return C2_BAD_VALUE;
+ }
+ auto it = mMappedOffsetSize.find(std::make_pair(u8Addr - mMappedAddr, size));
+ if (it == mMappedOffsetSize.end()) {
+ ALOGV("C2AllocationBlob: unrecognized map: addr=%p size=%zu capacity=%u",
+ addr, size, capacity());
+ return C2_BAD_VALUE;
+ }
+ mMappedOffsetSize.erase(it);
+ if (!mMappedOffsetSize.empty()) {
+ ALOGV("C2AllocationBlob: still maintain mapping: addr=%p size=%zu capacity=%u",
+ addr, size, capacity());
+ return C2_OK;
+ }
+ C2Rect rect(capacity(), kLinearBufferHeight);
+ c2_status_t err = mGraphicAllocation->unmap(&mMappedAddr, rect, fenceFd);
+ ALOGV("C2AllocationBlob: last unmap: addr=%p size=%zu capacity=%u err=%d",
+ addr, size, capacity(), err);
+ mMappedAddr = nullptr;
+ return err;
}
/* ====================================== BLOB ALLOCATOR ====================================== */
@@ -175,12 +235,12 @@
}
// static
-bool C2AllocatorBlob::isValid(const C2Handle* const o) {
+bool C2AllocatorBlob::CheckHandle(const C2Handle* const o) {
size_t capacity;
// Distinguish C2Handle purely allocated by C2AllocatorGralloc, or one allocated through
// C2AllocatorBlob, by checking the handle's height is 1, and its format is
// PixelFormat::BLOB by GetCapacityFromHandle().
- return C2AllocatorGralloc::isValid(o) && GetCapacityFromHandle(o, &capacity) == C2_OK;
+ return C2AllocatorGralloc::CheckHandle(o) && GetCapacityFromHandle(o, &capacity) == C2_OK;
}
} // namespace android
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index e1e1377..59471a2 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -25,6 +25,7 @@
#include <hardware/gralloc.h>
#include <ui/GraphicBufferAllocator.h>
#include <ui/GraphicBufferMapper.h>
+#include <ui/Rect.h>
#include <C2AllocatorGralloc.h>
#include <C2Buffer.h>
@@ -103,7 +104,7 @@
const static uint32_t MAGIC = '\xc2gr\x00';
static
- const ExtraData* getExtraData(const C2Handle *const handle) {
+ const ExtraData* GetExtraData(const C2Handle *const handle) {
if (handle == nullptr
|| native_handle_is_invalid(handle)
|| handle->numInts < NUM_INTS) {
@@ -114,23 +115,23 @@
}
static
- ExtraData *getExtraData(C2Handle *const handle) {
- return const_cast<ExtraData *>(getExtraData(const_cast<const C2Handle *const>(handle)));
+ ExtraData *GetExtraData(C2Handle *const handle) {
+ return const_cast<ExtraData *>(GetExtraData(const_cast<const C2Handle *const>(handle)));
}
public:
void getIgbpData(uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot) const {
- const ExtraData *ed = getExtraData(this);
+ const ExtraData *ed = GetExtraData(this);
*generation = ed->generation;
*igbp_id = unsigned(ed->igbp_id_lo) | uint64_t(unsigned(ed->igbp_id_hi)) << 32;
*igbp_slot = ed->igbp_slot;
}
- static bool isValid(const C2Handle *const o) {
+ static bool IsValid(const C2Handle *const o) {
if (o == nullptr) { // null handle is always valid
return true;
}
- const ExtraData *xd = getExtraData(o);
+ const ExtraData *xd = GetExtraData(o);
// we cannot validate width/height/format/usage without accessing gralloc driver
return xd != nullptr && xd->magic == MAGIC;
}
@@ -152,7 +153,7 @@
native_handle_t *res = native_handle_create(handle->numFds, handle->numInts + NUM_INTS);
if (res != nullptr) {
memcpy(&res->data, &handle->data, sizeof(int) * (handle->numFds + handle->numInts));
- *getExtraData(res) = xd;
+ *GetExtraData(res) = xd;
}
return reinterpret_cast<C2HandleGralloc *>(res);
}
@@ -180,10 +181,10 @@
static bool MigrateNativeHandle(
native_handle_t *handle,
uint32_t generation, uint64_t igbp_id, uint32_t igbp_slot) {
- if (handle == nullptr || !isValid(handle)) {
+ if (handle == nullptr || !IsValid(handle)) {
return false;
}
- ExtraData *ed = getExtraData(handle);
+ ExtraData *ed = GetExtraData(handle);
if (!ed) return false;
ed->generation = generation;
ed->igbp_id_lo = uint32_t(igbp_id & 0xFFFFFFFF);
@@ -195,7 +196,7 @@
static native_handle_t* UnwrapNativeHandle(
const C2Handle *const handle) {
- const ExtraData *xd = getExtraData(handle);
+ const ExtraData *xd = GetExtraData(handle);
if (xd == nullptr || xd->magic != MAGIC) {
return nullptr;
}
@@ -211,7 +212,7 @@
uint32_t *width, uint32_t *height, uint32_t *format,
uint64_t *usage, uint32_t *stride,
uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot) {
- const ExtraData *xd = getExtraData(handle);
+ const ExtraData *xd = GetExtraData(handle);
if (xd == nullptr) {
return nullptr;
}
@@ -253,7 +254,7 @@
virtual ~C2AllocationGralloc() override;
virtual c2_status_t map(
- C2Rect rect, C2MemoryUsage usage, C2Fence *fence,
+ C2Rect c2Rect, C2MemoryUsage usage, C2Fence *fence,
C2PlanarLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) override;
virtual c2_status_t unmap(
uint8_t **addr /* nonnull */, C2Rect rect, C2Fence *fence /* nullable */) override;
@@ -336,8 +337,12 @@
}
c2_status_t C2AllocationGralloc::map(
- C2Rect rect, C2MemoryUsage usage, C2Fence *fence,
+ C2Rect c2Rect, C2MemoryUsage usage, C2Fence *fence,
C2PlanarLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) {
+ const Rect rect{(int32_t)c2Rect.left, (int32_t)c2Rect.top,
+ (int32_t)(c2Rect.left + c2Rect.width) /* right */,
+ (int32_t)(c2Rect.top + c2Rect.height) /* bottom */};
+
uint64_t grallocUsage = static_cast<C2AndroidMemoryUsage>(usage).asGrallocUsage();
ALOGV("mapping buffer with usage %#llx => %#llx",
(long long)usage.expected, (long long)grallocUsage);
@@ -386,10 +391,7 @@
void *pointer = nullptr;
// TODO: fence
status_t err = GraphicBufferMapper::get().lock(
- const_cast<native_handle_t *>(mBuffer), grallocUsage,
- { (int32_t)rect.left, (int32_t)rect.top,
- (int32_t)rect.width, (int32_t)rect.height },
- &pointer);
+ const_cast<native_handle_t *>(mBuffer), grallocUsage, rect, &pointer);
if (err) {
ALOGE("failed transaction: lock(RGBA_1010102)");
return C2_CORRUPTED;
@@ -464,10 +466,7 @@
void *pointer = nullptr;
// TODO: fence
status_t err = GraphicBufferMapper::get().lock(
- const_cast<native_handle_t*>(mBuffer), grallocUsage,
- { (int32_t)rect.left, (int32_t)rect.top,
- (int32_t)rect.width, (int32_t)rect.height },
- &pointer);
+ const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &pointer);
if (err) {
ALOGE("failed transaction: lock(RGBA_8888)");
return C2_CORRUPTED;
@@ -524,10 +523,7 @@
void *pointer = nullptr;
// TODO: fence
status_t err = GraphicBufferMapper::get().lock(
- const_cast<native_handle_t*>(mBuffer), grallocUsage,
- { (int32_t)rect.left, (int32_t)rect.top,
- (int32_t)rect.width, (int32_t)rect.height },
- &pointer);
+ const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &pointer);
if (err) {
ALOGE("failed transaction: lock(BLOB)");
return C2_CORRUPTED;
@@ -544,10 +540,7 @@
android_ycbcr ycbcrLayout;
status_t err = GraphicBufferMapper::get().lockYCbCr(
- const_cast<native_handle_t*>(mBuffer), grallocUsage,
- { (int32_t)rect.left, (int32_t)rect.top,
- (int32_t)rect.width, (int32_t)rect.height },
- &ycbcrLayout);
+ const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &ycbcrLayout);
if (err) {
ALOGE("failed transaction: lockYCbCr");
return C2_CORRUPTED;
@@ -784,8 +777,9 @@
return mImpl->status();
}
-bool C2AllocatorGralloc::isValid(const C2Handle* const o) {
- return C2HandleGralloc::isValid(o);
+// static
+bool C2AllocatorGralloc::CheckHandle(const C2Handle* const o) {
+ return C2HandleGralloc::IsValid(o);
}
} // namespace android
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 6d27a02..85623b8 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -73,7 +73,7 @@
};
// static
-bool C2HandleIon::isValid(const C2Handle * const o) {
+bool C2HandleIon::IsValid(const C2Handle * const o) {
if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
return false;
}
@@ -579,7 +579,7 @@
return mInit;
}
- if (!C2HandleIon::isValid(handle)) {
+ if (!C2HandleIon::IsValid(handle)) {
return C2_BAD_VALUE;
}
@@ -596,9 +596,8 @@
return ret;
}
-bool C2AllocatorIon::isValid(const C2Handle* const o) {
- return C2HandleIon::isValid(o);
+bool C2AllocatorIon::CheckHandle(const C2Handle* const o) {
+ return C2HandleIon::IsValid(o);
}
} // namespace android
-
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 0b08f31..143355f 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -106,6 +106,7 @@
class BufferDataBuddy : public C2BufferData {
using C2BufferData::C2BufferData;
friend class ::C2Buffer;
+ friend class ::C2InfoBuffer;
};
} // namespace
@@ -396,26 +397,18 @@
std::shared_ptr<C2LinearBlock> _C2BlockFactory::CreateLinearBlock(
const C2Handle *handle) {
// TODO: get proper allocator? and mutex?
- static std::unique_ptr<C2Allocator> sAllocator = []{
- std::unique_ptr<C2Allocator> allocator;
- if (android::GetPreferredLinearAllocatorId(android::GetCodec2PoolMask()) ==
- android::C2PlatformAllocatorStore::BLOB) {
- allocator = std::make_unique<C2AllocatorBlob>(android::C2PlatformAllocatorStore::BLOB);
- } else {
- allocator = std::make_unique<C2AllocatorIon>(android::C2PlatformAllocatorStore::ION);
- }
+ static std::shared_ptr<C2Allocator> sAllocator = []{
+ std::shared_ptr<C2Allocator> allocator;
+ std::shared_ptr<C2AllocatorStore> allocatorStore = android::GetCodec2PlatformAllocatorStore();
+ allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator);
+
return allocator;
}();
if (sAllocator == nullptr)
return nullptr;
- bool isValidHandle = false;
- if (sAllocator->getId() == android::C2PlatformAllocatorStore::BLOB) {
- isValidHandle = C2AllocatorBlob::isValid(handle);
- } else {
- isValidHandle = C2AllocatorIon::isValid(handle);
- }
+ bool isValidHandle = sAllocator->checkHandle(handle);
std::shared_ptr<C2LinearAllocation> alloc;
if (isValidHandle) {
@@ -431,26 +424,18 @@
std::shared_ptr<C2LinearBlock> _C2BlockFactory::CreateLinearBlock(
const C2Handle *cHandle, const std::shared_ptr<BufferPoolData> &data) {
// TODO: get proper allocator? and mutex?
- static std::unique_ptr<C2Allocator> sAllocator = []{
- std::unique_ptr<C2Allocator> allocator;
- if (android::GetPreferredLinearAllocatorId(android::GetCodec2PoolMask()) ==
- android::C2PlatformAllocatorStore::BLOB) {
- allocator = std::make_unique<C2AllocatorBlob>(android::C2PlatformAllocatorStore::BLOB);
- } else {
- allocator = std::make_unique<C2AllocatorIon>(android::C2PlatformAllocatorStore::ION);
- }
+ static std::shared_ptr<C2Allocator> sAllocator = []{
+ std::shared_ptr<C2Allocator> allocator;
+ std::shared_ptr<C2AllocatorStore> allocatorStore = android::GetCodec2PlatformAllocatorStore();
+ allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator);
+
return allocator;
}();
if (sAllocator == nullptr)
return nullptr;
- bool isValidHandle = false;
- if (sAllocator->getId() == android::C2PlatformAllocatorStore::BLOB) {
- isValidHandle = C2AllocatorBlob::isValid(cHandle);
- } else {
- isValidHandle = C2AllocatorIon::isValid(cHandle);
- }
+ bool isValidHandle = sAllocator->checkHandle(cHandle);
std::shared_ptr<C2LinearAllocation> alloc;
if (isValidHandle) {
@@ -1148,7 +1133,7 @@
static std::unique_ptr<C2AllocatorGralloc> sAllocator = std::make_unique<C2AllocatorGralloc>(0);
std::shared_ptr<C2GraphicAllocation> alloc;
- if (C2AllocatorGralloc::isValid(cHandle)) {
+ if (sAllocator->isValid(cHandle)) {
c2_status_t err = sAllocator->priorGraphicAllocation(cHandle, &alloc);
const std::shared_ptr<C2PooledBlockPoolData> poolData =
std::make_shared<C2PooledBlockPoolData>(data);
@@ -1185,6 +1170,7 @@
type_t mType;
std::vector<C2ConstLinearBlock> mLinearBlocks;
std::vector<C2ConstGraphicBlock> mGraphicBlocks;
+ friend class C2InfoBuffer;
};
C2BufferData::C2BufferData(const std::vector<C2ConstLinearBlock> &blocks) : mImpl(new Impl(blocks)) {}
@@ -1200,6 +1186,35 @@
return mImpl->graphicBlocks();
}
+C2InfoBuffer::C2InfoBuffer(
+ C2Param::Index index, const std::vector<C2ConstLinearBlock> &blocks)
+ : mIndex(index), mData(BufferDataBuddy(blocks)) {
+}
+
+C2InfoBuffer::C2InfoBuffer(
+ C2Param::Index index, const std::vector<C2ConstGraphicBlock> &blocks)
+ : mIndex(index), mData(BufferDataBuddy(blocks)) {
+}
+
+C2InfoBuffer::C2InfoBuffer(
+ C2Param::Index index, const C2BufferData &data)
+ : mIndex(index), mData(data) {
+}
+
+// static
+C2InfoBuffer C2InfoBuffer::CreateLinearBuffer(
+ C2Param::CoreIndex index, const C2ConstLinearBlock &block) {
+ return C2InfoBuffer(index.coreIndex() | C2Param::Index::KIND_INFO | C2Param::Index::DIR_GLOBAL,
+ { block });
+}
+
+// static
+C2InfoBuffer C2InfoBuffer::CreateGraphicBuffer(
+ C2Param::CoreIndex index, const C2ConstGraphicBlock &block) {
+ return C2InfoBuffer(index.coreIndex() | C2Param::Index::KIND_INFO | C2Param::Index::DIR_GLOBAL,
+ { block });
+}
+
class C2Buffer::Impl {
public:
Impl(C2Buffer *thiz, const std::vector<C2ConstLinearBlock> &blocks)
@@ -1330,4 +1345,3 @@
std::shared_ptr<C2Buffer> C2Buffer::CreateGraphicBuffer(const C2ConstGraphicBlock &block) {
return std::shared_ptr<C2Buffer>(new C2Buffer({ block }));
}
-
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
new file mode 100644
index 0000000..59e82e2
--- /dev/null
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -0,0 +1,401 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2DmaBufAllocator"
+#include <BufferAllocator/BufferAllocator.h>
+#include <C2Buffer.h>
+#include <C2Debug.h>
+#include <C2DmaBufAllocator.h>
+#include <C2ErrnoUtils.h>
+#include <linux/ion.h>
+#include <sys/mman.h>
+#include <unistd.h> // getpagesize, size_t, close, dup
+#include <utils/Log.h>
+
+#include <list>
+
+#ifdef __ANDROID_APEX__
+#include <android-base/properties.h>
+#endif
+
+namespace android {
+
+namespace {
+constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+}
+
+/* =========================== BUFFER HANDLE =========================== */
+/**
+ * Buffer handle
+ *
+ * Stores dmabuf fd & metadata
+ *
+ * This handle will not capture mapped fd-s as updating that would require a
+ * global mutex.
+ */
+
+struct C2HandleBuf : public C2Handle {
+ C2HandleBuf(int bufferFd, size_t size)
+ : C2Handle(cHeader),
+ mFds{bufferFd},
+ mInts{int(size & 0xFFFFFFFF), int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic} {}
+
+ static bool IsValid(const C2Handle* const o);
+
+ int bufferFd() const { return mFds.mBuffer; }
+ size_t size() const {
+ return size_t(unsigned(mInts.mSizeLo)) | size_t(uint64_t(unsigned(mInts.mSizeHi)) << 32);
+ }
+
+ protected:
+ struct {
+ int mBuffer; // dmabuf fd
+ } mFds;
+ struct {
+ int mSizeLo; // low 32-bits of size
+ int mSizeHi; // high 32-bits of size
+ int mMagic;
+ } mInts;
+
+ private:
+ typedef C2HandleBuf _type;
+ enum {
+ kMagic = '\xc2io\x00',
+ numFds = sizeof(mFds) / sizeof(int),
+ numInts = sizeof(mInts) / sizeof(int),
+ version = sizeof(C2Handle)
+ };
+ // constexpr static C2Handle cHeader = { version, numFds, numInts, {} };
+ const static C2Handle cHeader;
+};
+
+const C2Handle C2HandleBuf::cHeader = {
+ C2HandleBuf::version, C2HandleBuf::numFds, C2HandleBuf::numInts, {}};
+
+// static
+bool C2HandleBuf::IsValid(const C2Handle* const o) {
+ if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
+ return false;
+ }
+ const C2HandleBuf* other = static_cast<const C2HandleBuf*>(o);
+ return other->mInts.mMagic == kMagic;
+}
+
+/* =========================== DMABUF ALLOCATION =========================== */
+class C2DmaBufAllocation : public C2LinearAllocation {
+ public:
+ /* Interface methods */
+ virtual c2_status_t map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence* fence,
+ void** addr /* nonnull */) override;
+ virtual c2_status_t unmap(void* addr, size_t size, C2Fence* fenceFd) override;
+ virtual ~C2DmaBufAllocation() override;
+ virtual const C2Handle* handle() const override;
+ virtual id_t getAllocatorId() const override;
+ virtual bool equals(const std::shared_ptr<C2LinearAllocation>& other) const override;
+
+ // internal methods
+ C2DmaBufAllocation(BufferAllocator& alloc, size_t size, C2String heap_name, unsigned flags,
+ C2Allocator::id_t id);
+ C2DmaBufAllocation(size_t size, int shareFd, C2Allocator::id_t id);
+
+ c2_status_t status() const;
+
+ protected:
+ virtual c2_status_t mapInternal(size_t mapSize, size_t mapOffset, size_t alignmentBytes,
+ int prot, int flags, void** base, void** addr) {
+ c2_status_t err = C2_OK;
+ *base = mmap(nullptr, mapSize, prot, flags, mHandle.bufferFd(), mapOffset);
+ ALOGV("mmap(size = %zu, prot = %d, flags = %d, mapFd = %d, offset = %zu) "
+ "returned (%d)",
+ mapSize, prot, flags, mHandle.bufferFd(), mapOffset, errno);
+ if (*base == MAP_FAILED) {
+ *base = *addr = nullptr;
+ err = c2_map_errno<EINVAL>(errno);
+ } else {
+ *addr = (uint8_t*)*base + alignmentBytes;
+ }
+ return err;
+ }
+
+ C2Allocator::id_t mId;
+ C2HandleBuf mHandle;
+ c2_status_t mInit;
+ struct Mapping {
+ void* addr;
+ size_t alignmentBytes;
+ size_t size;
+ };
+ std::list<Mapping> mMappings;
+
+ // TODO: we could make this encapsulate shared_ptr and copiable
+ C2_DO_NOT_COPY(C2DmaBufAllocation);
+};
+
+c2_status_t C2DmaBufAllocation::map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence* fence,
+ void** addr) {
+ (void)fence; // TODO: wait for fence
+ *addr = nullptr;
+ if (!mMappings.empty()) {
+ ALOGV("multiple map");
+ // TODO: technically we should return DUPLICATE here, but our block views
+ // don't actually unmap, so we end up remapping the buffer multiple times.
+ //
+ // return C2_DUPLICATE;
+ }
+ if (size == 0) {
+ return C2_BAD_VALUE;
+ }
+
+ int prot = PROT_NONE;
+ int flags = MAP_SHARED;
+ if (usage.expected & C2MemoryUsage::CPU_READ) {
+ prot |= PROT_READ;
+ }
+ if (usage.expected & C2MemoryUsage::CPU_WRITE) {
+ prot |= PROT_WRITE;
+ }
+
+ size_t alignmentBytes = offset % PAGE_SIZE;
+ size_t mapOffset = offset - alignmentBytes;
+ size_t mapSize = size + alignmentBytes;
+ Mapping map = {nullptr, alignmentBytes, mapSize};
+
+ c2_status_t err =
+ mapInternal(mapSize, mapOffset, alignmentBytes, prot, flags, &(map.addr), addr);
+ if (map.addr) {
+ mMappings.push_back(map);
+ }
+ return err;
+}
+
+c2_status_t C2DmaBufAllocation::unmap(void* addr, size_t size, C2Fence* fence) {
+ if (mMappings.empty()) {
+ ALOGD("tried to unmap unmapped buffer");
+ return C2_NOT_FOUND;
+ }
+ for (auto it = mMappings.begin(); it != mMappings.end(); ++it) {
+ if (addr != (uint8_t*)it->addr + it->alignmentBytes ||
+ size + it->alignmentBytes != it->size) {
+ continue;
+ }
+ int err = munmap(it->addr, it->size);
+ if (err != 0) {
+ ALOGD("munmap failed");
+ return c2_map_errno<EINVAL>(errno);
+ }
+ if (fence) {
+ *fence = C2Fence(); // not using fences
+ }
+ (void)mMappings.erase(it);
+ ALOGV("successfully unmapped: %d", mHandle.bufferFd());
+ return C2_OK;
+ }
+ ALOGD("unmap failed to find specified map");
+ return C2_BAD_VALUE;
+}
+
+c2_status_t C2DmaBufAllocation::status() const {
+ return mInit;
+}
+
+C2Allocator::id_t C2DmaBufAllocation::getAllocatorId() const {
+ return mId;
+}
+
+bool C2DmaBufAllocation::equals(const std::shared_ptr<C2LinearAllocation>& other) const {
+ if (!other || other->getAllocatorId() != getAllocatorId()) {
+ return false;
+ }
+ // get user handle to compare objects
+ std::shared_ptr<C2DmaBufAllocation> otherAsBuf =
+ std::static_pointer_cast<C2DmaBufAllocation>(other);
+ return mHandle.bufferFd() == otherAsBuf->mHandle.bufferFd();
+}
+
+const C2Handle* C2DmaBufAllocation::handle() const {
+ return &mHandle;
+}
+
+C2DmaBufAllocation::~C2DmaBufAllocation() {
+ if (!mMappings.empty()) {
+ ALOGD("Dangling mappings!");
+ for (const Mapping& map : mMappings) {
+ int err = munmap(map.addr, map.size);
+ if (err) ALOGD("munmap failed");
+ }
+ }
+ if (mInit == C2_OK) {
+ native_handle_close(&mHandle);
+ }
+}
+
+C2DmaBufAllocation::C2DmaBufAllocation(BufferAllocator& alloc, size_t size, C2String heap_name,
+ unsigned flags, C2Allocator::id_t id)
+ : C2LinearAllocation(size), mHandle(-1, 0) {
+ int bufferFd = -1;
+ int ret = 0;
+
+ bufferFd = alloc.Alloc(heap_name, size, flags);
+ if (bufferFd < 0) ret = bufferFd;
+
+ mHandle = C2HandleBuf(bufferFd, size);
+ mId = id;
+ mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(ret));
+}
+
+C2DmaBufAllocation::C2DmaBufAllocation(size_t size, int shareFd, C2Allocator::id_t id)
+ : C2LinearAllocation(size), mHandle(-1, 0) {
+ mHandle = C2HandleBuf(shareFd, size);
+ mId = id;
+ mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(0));
+}
+
+/* =========================== DMABUF ALLOCATOR =========================== */
+C2DmaBufAllocator::C2DmaBufAllocator(id_t id) : mInit(C2_OK) {
+ C2MemoryUsage minUsage = {0, 0};
+ C2MemoryUsage maxUsage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ Traits traits = {"android.allocator.dmabuf", id, LINEAR, minUsage, maxUsage};
+ mTraits = std::make_shared<Traits>(traits);
+}
+
+C2Allocator::id_t C2DmaBufAllocator::getId() const {
+ std::lock_guard<std::mutex> lock(mUsageMapperLock);
+ return mTraits->id;
+}
+
+C2String C2DmaBufAllocator::getName() const {
+ std::lock_guard<std::mutex> lock(mUsageMapperLock);
+ return mTraits->name;
+}
+
+std::shared_ptr<const C2Allocator::Traits> C2DmaBufAllocator::getTraits() const {
+ std::lock_guard<std::mutex> lock(mUsageMapperLock);
+ return mTraits;
+}
+
+void C2DmaBufAllocator::setUsageMapper(const UsageMapperFn& mapper __unused, uint64_t minUsage,
+ uint64_t maxUsage, uint64_t blockSize) {
+ std::lock_guard<std::mutex> lock(mUsageMapperLock);
+ mUsageMapperCache.clear();
+ mUsageMapperLru.clear();
+ mUsageMapper = mapper;
+ Traits traits = {mTraits->name, mTraits->id, LINEAR, C2MemoryUsage(minUsage),
+ C2MemoryUsage(maxUsage)};
+ mTraits = std::make_shared<Traits>(traits);
+ mBlockSize = blockSize;
+}
+
+std::size_t C2DmaBufAllocator::MapperKeyHash::operator()(const MapperKey& k) const {
+ return std::hash<uint64_t>{}(k.first) ^ std::hash<size_t>{}(k.second);
+}
+
+c2_status_t C2DmaBufAllocator::mapUsage(C2MemoryUsage usage, size_t capacity, C2String* heap_name,
+ unsigned* flags) {
+ std::lock_guard<std::mutex> lock(mUsageMapperLock);
+ c2_status_t res = C2_OK;
+ // align capacity
+ capacity = (capacity + mBlockSize - 1) & ~(mBlockSize - 1);
+ MapperKey key = std::make_pair(usage.expected, capacity);
+ auto entry = mUsageMapperCache.find(key);
+ if (entry == mUsageMapperCache.end()) {
+ if (mUsageMapper) {
+ res = mUsageMapper(usage, capacity, heap_name, flags);
+ } else {
+ // No system-uncached yet, so disabled for now
+ if (0 && !(usage.expected & (C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE)))
+ *heap_name = "system-uncached";
+ else
+ *heap_name = "system";
+ *flags = 0;
+ res = C2_NO_INIT;
+ }
+ // add usage to cache
+ MapperValue value = std::make_tuple(*heap_name, *flags, res);
+ mUsageMapperLru.emplace_front(key, value);
+ mUsageMapperCache.emplace(std::make_pair(key, mUsageMapperLru.begin()));
+ if (mUsageMapperCache.size() > USAGE_LRU_CACHE_SIZE) {
+ // remove LRU entry
+ MapperKey lruKey = mUsageMapperLru.front().first;
+ mUsageMapperCache.erase(lruKey);
+ mUsageMapperLru.pop_back();
+ }
+ } else {
+ // move entry to MRU
+ mUsageMapperLru.splice(mUsageMapperLru.begin(), mUsageMapperLru, entry->second);
+ const MapperValue& value = entry->second->second;
+ std::tie(*heap_name, *flags, res) = value;
+ }
+ return res;
+}
+
+c2_status_t C2DmaBufAllocator::newLinearAllocation(
+ uint32_t capacity, C2MemoryUsage usage, std::shared_ptr<C2LinearAllocation>* allocation) {
+ if (allocation == nullptr) {
+ return C2_BAD_VALUE;
+ }
+
+ allocation->reset();
+ if (mInit != C2_OK) {
+ return mInit;
+ }
+
+ C2String heap_name;
+ unsigned flags = 0;
+ c2_status_t ret = mapUsage(usage, capacity, &heap_name, &flags);
+ if (ret && ret != C2_NO_INIT) {
+ return ret;
+ }
+
+ std::shared_ptr<C2DmaBufAllocation> alloc = std::make_shared<C2DmaBufAllocation>(
+ mBufferAllocator, capacity, heap_name, flags, getId());
+ ret = alloc->status();
+ if (ret == C2_OK) {
+ *allocation = alloc;
+ }
+ return ret;
+}
+
+c2_status_t C2DmaBufAllocator::priorLinearAllocation(
+ const C2Handle* handle, std::shared_ptr<C2LinearAllocation>* allocation) {
+ *allocation = nullptr;
+ if (mInit != C2_OK) {
+ return mInit;
+ }
+
+ if (!C2HandleBuf::IsValid(handle)) {
+ return C2_BAD_VALUE;
+ }
+
+ // TODO: get capacity and validate it
+ const C2HandleBuf* h = static_cast<const C2HandleBuf*>(handle);
+ std::shared_ptr<C2DmaBufAllocation> alloc =
+ std::make_shared<C2DmaBufAllocation>(h->size(), h->bufferFd(), getId());
+ c2_status_t ret = alloc->status();
+ if (ret == C2_OK) {
+ *allocation = alloc;
+ native_handle_delete(
+ const_cast<native_handle_t*>(reinterpret_cast<const native_handle_t*>(handle)));
+ }
+ return ret;
+}
+
+// static
+bool C2DmaBufAllocator::CheckHandle(const C2Handle* const o) {
+ return C2HandleBuf::IsValid(o);
+}
+
+} // namespace android
diff --git a/media/codec2/vndk/C2PlatformStorePluginLoader.cpp b/media/codec2/vndk/C2PlatformStorePluginLoader.cpp
index 4c330e5..bee028a 100644
--- a/media/codec2/vndk/C2PlatformStorePluginLoader.cpp
+++ b/media/codec2/vndk/C2PlatformStorePluginLoader.cpp
@@ -33,7 +33,8 @@
} // unnamed
C2PlatformStorePluginLoader::C2PlatformStorePluginLoader(const char *libPath)
- : mCreateBlockPool(nullptr) {
+ : mCreateBlockPool(nullptr),
+ mCreateAllocator(nullptr) {
mLibHandle = dlopen(libPath, RTLD_NOW | RTLD_NODELETE);
if (mLibHandle == nullptr) {
ALOGD("Failed to load library: %s (%s)", libPath, dlerror());
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index d16527e..1e907c1 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -21,6 +21,7 @@
#include <C2AllocatorBlob.h>
#include <C2AllocatorGralloc.h>
#include <C2AllocatorIon.h>
+#include <C2DmaBufAllocator.h>
#include <C2BufferPriv.h>
#include <C2BqBufferPriv.h>
#include <C2Component.h>
@@ -82,6 +83,7 @@
/// returns a shared-singleton ion allocator
std::shared_ptr<C2Allocator> fetchIonAllocator();
+ std::shared_ptr<C2Allocator> fetchDmaBufAllocator();
/// returns a shared-singleton gralloc allocator
std::shared_ptr<C2Allocator> fetchGrallocAllocator();
@@ -99,6 +101,20 @@
C2PlatformAllocatorStoreImpl::C2PlatformAllocatorStoreImpl() {
}
+static bool using_ion(void) {
+ static int cached_result = -1;
+
+ if (cached_result == -1) {
+ struct stat buffer;
+ cached_result = (stat("/dev/ion", &buffer) == 0);
+ if (cached_result)
+ ALOGD("Using ION\n");
+ else
+ ALOGD("Using DMABUF Heaps\n");
+ }
+ return (cached_result == 1);
+}
+
c2_status_t C2PlatformAllocatorStoreImpl::fetchAllocator(
id_t id, std::shared_ptr<C2Allocator> *const allocator) {
allocator->reset();
@@ -107,8 +123,11 @@
}
switch (id) {
// TODO: should we implement a generic registry for all, and use that?
- case C2PlatformAllocatorStore::ION:
- *allocator = fetchIonAllocator();
+ case C2PlatformAllocatorStore::ION: /* also ::DMABUFHEAP */
+ if (using_ion())
+ *allocator = fetchIonAllocator();
+ else
+ *allocator = fetchDmaBufAllocator();
break;
case C2PlatformAllocatorStore::GRALLOC:
@@ -142,7 +161,9 @@
namespace {
std::mutex gIonAllocatorMutex;
+std::mutex gDmaBufAllocatorMutex;
std::weak_ptr<C2AllocatorIon> gIonAllocator;
+std::weak_ptr<C2DmaBufAllocator> gDmaBufAllocator;
void UseComponentStoreForIonAllocator(
const std::shared_ptr<C2AllocatorIon> allocator,
@@ -197,6 +218,65 @@
allocator->setUsageMapper(mapper, minUsage, maxUsage, blockSize);
}
+void UseComponentStoreForDmaBufAllocator(const std::shared_ptr<C2DmaBufAllocator> allocator,
+ std::shared_ptr<C2ComponentStore> store) {
+ C2DmaBufAllocator::UsageMapperFn mapper;
+ const size_t maxHeapNameLen = 128;
+ uint64_t minUsage = 0;
+ uint64_t maxUsage = C2MemoryUsage(C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE).expected;
+ size_t blockSize = getpagesize();
+
+ // query min and max usage as well as block size via supported values
+ std::unique_ptr<C2StoreDmaBufUsageInfo> usageInfo;
+ usageInfo = C2StoreDmaBufUsageInfo::AllocUnique(maxHeapNameLen);
+
+ std::vector<C2FieldSupportedValuesQuery> query = {
+ C2FieldSupportedValuesQuery::Possible(C2ParamField::Make(*usageInfo, usageInfo->m.usage)),
+ C2FieldSupportedValuesQuery::Possible(
+ C2ParamField::Make(*usageInfo, usageInfo->m.capacity)),
+ };
+ c2_status_t res = store->querySupportedValues_sm(query);
+ if (res == C2_OK) {
+ if (query[0].status == C2_OK) {
+ const C2FieldSupportedValues& fsv = query[0].values;
+ if (fsv.type == C2FieldSupportedValues::FLAGS && !fsv.values.empty()) {
+ minUsage = fsv.values[0].u64;
+ maxUsage = 0;
+ for (C2Value::Primitive v : fsv.values) {
+ maxUsage |= v.u64;
+ }
+ }
+ }
+ if (query[1].status == C2_OK) {
+ const C2FieldSupportedValues& fsv = query[1].values;
+ if (fsv.type == C2FieldSupportedValues::RANGE && fsv.range.step.u32 > 0) {
+ blockSize = fsv.range.step.u32;
+ }
+ }
+
+ mapper = [store](C2MemoryUsage usage, size_t capacity, C2String* heapName,
+ unsigned* flags) -> c2_status_t {
+ if (capacity > UINT32_MAX) {
+ return C2_BAD_VALUE;
+ }
+
+ std::unique_ptr<C2StoreDmaBufUsageInfo> usageInfo;
+ usageInfo = C2StoreDmaBufUsageInfo::AllocUnique(maxHeapNameLen, usage.expected, capacity);
+ std::vector<std::unique_ptr<C2SettingResult>> failures; // TODO: remove
+
+ c2_status_t res = store->config_sm({&*usageInfo}, &failures);
+ if (res == C2_OK) {
+ *heapName = C2String(usageInfo->m.heapName);
+ *flags = usageInfo->m.allocFlags;
+ }
+
+ return res;
+ };
+ }
+
+ allocator->setUsageMapper(mapper, minUsage, maxUsage, blockSize);
+}
+
}
void C2PlatformAllocatorStoreImpl::setComponentStore(std::shared_ptr<C2ComponentStore> store) {
@@ -233,6 +313,22 @@
return allocator;
}
+std::shared_ptr<C2Allocator> C2PlatformAllocatorStoreImpl::fetchDmaBufAllocator() {
+ std::lock_guard<std::mutex> lock(gDmaBufAllocatorMutex);
+ std::shared_ptr<C2DmaBufAllocator> allocator = gDmaBufAllocator.lock();
+ if (allocator == nullptr) {
+ std::shared_ptr<C2ComponentStore> componentStore;
+ {
+ std::lock_guard<std::mutex> lock(_mComponentStoreReadLock);
+ componentStore = _mComponentStore;
+ }
+ allocator = std::make_shared<C2DmaBufAllocator>(C2PlatformAllocatorStore::DMABUFHEAP);
+ UseComponentStoreForDmaBufAllocator(allocator, componentStore);
+ gDmaBufAllocator = allocator;
+ }
+ return allocator;
+}
+
std::shared_ptr<C2Allocator> C2PlatformAllocatorStoreImpl::fetchBlobAllocator() {
static std::mutex mutex;
static std::weak_ptr<C2Allocator> blobAllocator;
@@ -347,7 +443,7 @@
allocatorId = GetPreferredLinearAllocatorId(GetCodec2PoolMask());
}
switch(allocatorId) {
- case C2PlatformAllocatorStore::ION:
+ case C2PlatformAllocatorStore::ION: /* also ::DMABUFHEAP */
res = allocatorStore->fetchAllocator(
C2PlatformAllocatorStore::ION, &allocator);
if (res == C2_OK) {
@@ -645,6 +741,7 @@
struct Interface : public C2InterfaceHelper {
std::shared_ptr<C2StoreIonUsageInfo> mIonUsageInfo;
+ std::shared_ptr<C2StoreDmaBufUsageInfo> mDmaBufUsageInfo;
Interface(std::shared_ptr<C2ReflectorHelper> reflector)
: C2InterfaceHelper(reflector) {
@@ -680,7 +777,13 @@
me.set().minAlignment = 0;
#endif
return C2R::Ok();
- }
+ };
+
+ static C2R setDmaBufUsage(bool /* mayBlock */, C2P<C2StoreDmaBufUsageInfo> &me) {
+ strncpy(me.set().m.heapName, "system", me.v.flexCount());
+ me.set().m.allocFlags = 0;
+ return C2R::Ok();
+ };
};
addParameter(
@@ -695,6 +798,18 @@
})
.withSetter(Setter::setIonUsage)
.build());
+
+ addParameter(
+ DefineParam(mDmaBufUsageInfo, "dmabuf-usage")
+ .withDefault(C2StoreDmaBufUsageInfo::AllocShared(0))
+ .withFields({
+ C2F(mDmaBufUsageInfo, m.usage).flags({C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE}),
+ C2F(mDmaBufUsageInfo, m.capacity).inRange(0, UINT32_MAX, 1024),
+ C2F(mDmaBufUsageInfo, m.allocFlags).flags({}),
+ C2F(mDmaBufUsageInfo, m.heapName).any(),
+ })
+ .withSetter(Setter::setDmaBufUsage)
+ .build());
}
};
diff --git a/media/codec2/vndk/include/C2AllocatorBlob.h b/media/codec2/vndk/include/C2AllocatorBlob.h
index 89ce949..fc67af7 100644
--- a/media/codec2/vndk/include/C2AllocatorBlob.h
+++ b/media/codec2/vndk/include/C2AllocatorBlob.h
@@ -44,7 +44,12 @@
virtual ~C2AllocatorBlob() override;
- static bool isValid(const C2Handle* const o);
+ virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+ static bool CheckHandle(const C2Handle* const o);
+
+ // deprecated
+ static bool isValid(const C2Handle* const o) { return CheckHandle(o); }
private:
std::shared_ptr<const Traits> mTraits;
diff --git a/media/codec2/vndk/include/C2AllocatorGralloc.h b/media/codec2/vndk/include/C2AllocatorGralloc.h
index ee7524e..578cf76 100644
--- a/media/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/codec2/vndk/include/C2AllocatorGralloc.h
@@ -84,7 +84,12 @@
virtual ~C2AllocatorGralloc() override;
- static bool isValid(const C2Handle* const o);
+ virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+ static bool CheckHandle(const C2Handle* const o);
+
+ // deprecated
+ static bool isValid(const C2Handle* const o) { return CheckHandle(o); }
private:
class Impl;
diff --git a/media/codec2/vndk/include/C2AllocatorIon.h b/media/codec2/vndk/include/C2AllocatorIon.h
index 1b2051f..6a49b7d 100644
--- a/media/codec2/vndk/include/C2AllocatorIon.h
+++ b/media/codec2/vndk/include/C2AllocatorIon.h
@@ -57,7 +57,12 @@
virtual ~C2AllocatorIon() override;
- static bool isValid(const C2Handle* const o);
+ virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+ static bool CheckHandle(const C2Handle* const o);
+
+ // deprecated
+ static bool isValid(const C2Handle* const o) { return CheckHandle(o); }
/**
* Updates the usage mapper for subsequent new allocations, as well as the supported
diff --git a/media/codec2/vndk/include/C2DmaBufAllocator.h b/media/codec2/vndk/include/C2DmaBufAllocator.h
new file mode 100644
index 0000000..abb8307
--- /dev/null
+++ b/media/codec2/vndk/include/C2DmaBufAllocator.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
+#define STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
+
+#include <BufferAllocator/BufferAllocator.h>
+#include <C2Buffer.h>
+#include <sys/stat.h> // stat
+
+#include <functional>
+#include <list>
+#include <mutex>
+#include <tuple>
+#include <unordered_map>
+
+namespace android {
+
+class C2DmaBufAllocator : public C2Allocator {
+ public:
+ virtual c2_status_t newLinearAllocation(
+ uint32_t capacity, C2MemoryUsage usage,
+ std::shared_ptr<C2LinearAllocation>* allocation) override;
+
+ virtual c2_status_t priorLinearAllocation(
+ const C2Handle* handle, std::shared_ptr<C2LinearAllocation>* allocation) override;
+
+ C2DmaBufAllocator(id_t id);
+
+ virtual c2_status_t status() const { return mInit; }
+
+ virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+ static bool CheckHandle(const C2Handle* const o);
+
+ virtual id_t getId() const override;
+
+ virtual C2String getName() const override;
+
+ virtual std::shared_ptr<const Traits> getTraits() const override;
+
+ // Usage mapper function used by the allocator
+ // (usage, capacity) => (heapName, flags)
+ //
+ // capacity is aligned to the default block-size (defaults to page size) to
+ // reduce caching overhead
+ typedef std::function<c2_status_t(C2MemoryUsage, size_t,
+ /* => */ C2String*, unsigned*)>
+ UsageMapperFn;
+
+ /**
+ * Updates the usage mapper for subsequent new allocations, as well as the
+ * supported minimum and maximum usage masks and default block-size to use
+ * for the mapper.
+ *
+ * \param mapper This method is called to map Codec 2.0 buffer usage
+ * to dmabuf heap name and flags required by the dma
+ * buf heap device
+ *
+ * \param minUsage Minimum buffer usage required for supported
+ * allocations (defaults to 0)
+ *
+ * \param maxUsage Maximum buffer usage supported by the ion allocator
+ * (defaults to SW_READ | SW_WRITE)
+ *
+ * \param blockSize Alignment used prior to calling |mapper| for the
+ * buffer capacity. This also helps reduce the size of
+ * cache required for caching mapper results.
+ * (defaults to the page size)
+ */
+ void setUsageMapper(const UsageMapperFn& mapper, uint64_t minUsage, uint64_t maxUsage,
+ uint64_t blockSize);
+
+ private:
+ c2_status_t mInit;
+ BufferAllocator mBufferAllocator;
+
+ c2_status_t mapUsage(C2MemoryUsage usage, size_t size,
+ /* => */ C2String* heap_name, unsigned* flags);
+
+ // this locks mTraits, mBlockSize, mUsageMapper, mUsageMapperLru and
+ // mUsageMapperCache
+ mutable std::mutex mUsageMapperLock;
+ std::shared_ptr<const Traits> mTraits;
+ size_t mBlockSize;
+ UsageMapperFn mUsageMapper;
+ typedef std::pair<uint64_t, size_t> MapperKey;
+ struct MapperKeyHash {
+ std::size_t operator()(const MapperKey&) const;
+ };
+ typedef std::tuple<C2String, unsigned, c2_status_t> MapperValue;
+ typedef std::pair<MapperKey, MapperValue> MapperKeyValue;
+ typedef std::list<MapperKeyValue>::iterator MapperKeyValuePointer;
+ std::list<MapperKeyValue> mUsageMapperLru;
+ std::unordered_map<MapperKey, MapperKeyValuePointer, MapperKeyHash> mUsageMapperCache;
+};
+} // namespace android
+
+#endif // STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
diff --git a/media/codec2/vndk/include/C2PlatformSupport.h b/media/codec2/vndk/include/C2PlatformSupport.h
index a14e0d3..4814494 100644
--- a/media/codec2/vndk/include/C2PlatformSupport.h
+++ b/media/codec2/vndk/include/C2PlatformSupport.h
@@ -47,6 +47,17 @@
*/
ION = PLATFORM_START,
+ /*
+ * ID of the DMA-Buf Heap (ion replacement) backed platform allocator.
+ *
+ * C2Handle consists of:
+ * fd shared dmabuf buffer handle
+ * int size (lo 32 bits)
+ * int size (hi 32 bits)
+ * int magic '\xc2io\x00'
+ */
+ DMABUFHEAP = ION,
+
/**
* ID of the gralloc backed platform allocator.
*
diff --git a/media/codec2/vndk/internal/C2HandleIonInternal.h b/media/codec2/vndk/internal/C2HandleIonInternal.h
index c0e1d83..c67698c 100644
--- a/media/codec2/vndk/internal/C2HandleIonInternal.h
+++ b/media/codec2/vndk/internal/C2HandleIonInternal.h
@@ -28,7 +28,10 @@
mFds{ bufferFd },
mInts{ int(size & 0xFFFFFFFF), int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic } { }
- static bool isValid(const C2Handle * const o);
+ static bool IsValid(const C2Handle * const o);
+
+ // deprecated
+ static bool isValid(const C2Handle * const o) { return IsValid(o); }
int bufferFd() const { return mFds.mBuffer; }
size_t size() const {
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 62936f6..fff12c4 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -223,7 +223,7 @@
static std::unique_ptr<C2AllocatorGralloc> sAllocator = std::make_unique<C2AllocatorGralloc>(0);
std::shared_ptr<C2GraphicAllocation> alloc;
- if (C2AllocatorGralloc::isValid(handle)) {
+ if (C2AllocatorGralloc::CheckHandle(handle)) {
uint32_t width;
uint32_t height;
uint32_t format;
diff --git a/media/codecs/amrnb/TEST_MAPPING b/media/codecs/amrnb/TEST_MAPPING
new file mode 100644
index 0000000..343d08a
--- /dev/null
+++ b/media/codecs/amrnb/TEST_MAPPING
@@ -0,0 +1,10 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrnb
+{
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
+ { "name": "AmrnbDecoderTest"},
+ { "name": "AmrnbEncoderTest"}
+ ]
+}
diff --git a/media/libstagefright/codecs/amrnb/common/Android.bp b/media/codecs/amrnb/common/Android.bp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/Android.bp
rename to media/codecs/amrnb/common/Android.bp
diff --git a/media/libstagefright/codecs/amrnb/common/MODULE_LICENSE_APACHE2 b/media/codecs/amrnb/common/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/MODULE_LICENSE_APACHE2
rename to media/codecs/amrnb/common/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrnb/common/NOTICE b/media/codecs/amrnb/common/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/NOTICE
rename to media/codecs/amrnb/common/NOTICE
diff --git a/media/libstagefright/codecs/amrnb/common/include/abs_s.h b/media/codecs/amrnb/common/include/abs_s.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/abs_s.h
rename to media/codecs/amrnb/common/include/abs_s.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/add.h b/media/codecs/amrnb/common/include/add.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/add.h
rename to media/codecs/amrnb/common/include/add.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/az_lsp.h b/media/codecs/amrnb/common/include/az_lsp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/az_lsp.h
rename to media/codecs/amrnb/common/include/az_lsp.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/basic_op.h b/media/codecs/amrnb/common/include/basic_op.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/basic_op.h
rename to media/codecs/amrnb/common/include/basic_op.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h b/media/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h
rename to media/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/basic_op_arm_v5.h b/media/codecs/amrnb/common/include/basic_op_arm_v5.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/basic_op_arm_v5.h
rename to media/codecs/amrnb/common/include/basic_op_arm_v5.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/basic_op_c_equivalent.h b/media/codecs/amrnb/common/include/basic_op_c_equivalent.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/basic_op_c_equivalent.h
rename to media/codecs/amrnb/common/include/basic_op_c_equivalent.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/basicop_malloc.h b/media/codecs/amrnb/common/include/basicop_malloc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/basicop_malloc.h
rename to media/codecs/amrnb/common/include/basicop_malloc.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/bitno_tab.h b/media/codecs/amrnb/common/include/bitno_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/bitno_tab.h
rename to media/codecs/amrnb/common/include/bitno_tab.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/bitreorder_tab.h b/media/codecs/amrnb/common/include/bitreorder_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/bitreorder_tab.h
rename to media/codecs/amrnb/common/include/bitreorder_tab.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/bits2prm.h b/media/codecs/amrnb/common/include/bits2prm.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/bits2prm.h
rename to media/codecs/amrnb/common/include/bits2prm.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/cnst.h b/media/codecs/amrnb/common/include/cnst.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/cnst.h
rename to media/codecs/amrnb/common/include/cnst.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/cnst_vad.h b/media/codecs/amrnb/common/include/cnst_vad.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/cnst_vad.h
rename to media/codecs/amrnb/common/include/cnst_vad.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/copy.h b/media/codecs/amrnb/common/include/copy.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/copy.h
rename to media/codecs/amrnb/common/include/copy.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/d_gain_c.h b/media/codecs/amrnb/common/include/d_gain_c.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/d_gain_c.h
rename to media/codecs/amrnb/common/include/d_gain_c.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/d_gain_p.h b/media/codecs/amrnb/common/include/d_gain_p.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/d_gain_p.h
rename to media/codecs/amrnb/common/include/d_gain_p.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/d_plsf.h b/media/codecs/amrnb/common/include/d_plsf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/d_plsf.h
rename to media/codecs/amrnb/common/include/d_plsf.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/div_32.h b/media/codecs/amrnb/common/include/div_32.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/div_32.h
rename to media/codecs/amrnb/common/include/div_32.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/div_s.h b/media/codecs/amrnb/common/include/div_s.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/div_s.h
rename to media/codecs/amrnb/common/include/div_s.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/dtx_common_def.h b/media/codecs/amrnb/common/include/dtx_common_def.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/dtx_common_def.h
rename to media/codecs/amrnb/common/include/dtx_common_def.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/extract_h.h b/media/codecs/amrnb/common/include/extract_h.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/extract_h.h
rename to media/codecs/amrnb/common/include/extract_h.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/extract_l.h b/media/codecs/amrnb/common/include/extract_l.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/extract_l.h
rename to media/codecs/amrnb/common/include/extract_l.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/frame.h b/media/codecs/amrnb/common/include/frame.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/frame.h
rename to media/codecs/amrnb/common/include/frame.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/frame_type_3gpp.h b/media/codecs/amrnb/common/include/frame_type_3gpp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/frame_type_3gpp.h
rename to media/codecs/amrnb/common/include/frame_type_3gpp.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/gc_pred.h b/media/codecs/amrnb/common/include/gc_pred.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/gc_pred.h
rename to media/codecs/amrnb/common/include/gc_pred.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/gmed_n.h b/media/codecs/amrnb/common/include/gmed_n.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/gmed_n.h
rename to media/codecs/amrnb/common/include/gmed_n.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/gsm_amr_typedefs.h b/media/codecs/amrnb/common/include/gsm_amr_typedefs.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/gsm_amr_typedefs.h
rename to media/codecs/amrnb/common/include/gsm_amr_typedefs.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/int_lpc.h b/media/codecs/amrnb/common/include/int_lpc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/int_lpc.h
rename to media/codecs/amrnb/common/include/int_lpc.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/int_lsf.h b/media/codecs/amrnb/common/include/int_lsf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/int_lsf.h
rename to media/codecs/amrnb/common/include/int_lsf.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/inv_sqrt.h b/media/codecs/amrnb/common/include/inv_sqrt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/inv_sqrt.h
rename to media/codecs/amrnb/common/include/inv_sqrt.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_abs.h b/media/codecs/amrnb/common/include/l_abs.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_abs.h
rename to media/codecs/amrnb/common/include/l_abs.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_add.h b/media/codecs/amrnb/common/include/l_add.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_add.h
rename to media/codecs/amrnb/common/include/l_add.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_add_c.h b/media/codecs/amrnb/common/include/l_add_c.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_add_c.h
rename to media/codecs/amrnb/common/include/l_add_c.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_comp.h b/media/codecs/amrnb/common/include/l_comp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_comp.h
rename to media/codecs/amrnb/common/include/l_comp.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_deposit_h.h b/media/codecs/amrnb/common/include/l_deposit_h.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_deposit_h.h
rename to media/codecs/amrnb/common/include/l_deposit_h.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_deposit_l.h b/media/codecs/amrnb/common/include/l_deposit_l.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_deposit_l.h
rename to media/codecs/amrnb/common/include/l_deposit_l.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_extract.h b/media/codecs/amrnb/common/include/l_extract.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_extract.h
rename to media/codecs/amrnb/common/include/l_extract.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_mac.h b/media/codecs/amrnb/common/include/l_mac.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_mac.h
rename to media/codecs/amrnb/common/include/l_mac.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_msu.h b/media/codecs/amrnb/common/include/l_msu.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_msu.h
rename to media/codecs/amrnb/common/include/l_msu.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_mult.h b/media/codecs/amrnb/common/include/l_mult.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_mult.h
rename to media/codecs/amrnb/common/include/l_mult.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_negate.h b/media/codecs/amrnb/common/include/l_negate.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_negate.h
rename to media/codecs/amrnb/common/include/l_negate.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_shl.h b/media/codecs/amrnb/common/include/l_shl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_shl.h
rename to media/codecs/amrnb/common/include/l_shl.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_shr.h b/media/codecs/amrnb/common/include/l_shr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_shr.h
rename to media/codecs/amrnb/common/include/l_shr.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_shr_r.h b/media/codecs/amrnb/common/include/l_shr_r.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_shr_r.h
rename to media/codecs/amrnb/common/include/l_shr_r.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_sub.h b/media/codecs/amrnb/common/include/l_sub.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_sub.h
rename to media/codecs/amrnb/common/include/l_sub.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/log2.h b/media/codecs/amrnb/common/include/log2.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/log2.h
rename to media/codecs/amrnb/common/include/log2.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/log2_norm.h b/media/codecs/amrnb/common/include/log2_norm.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/log2_norm.h
rename to media/codecs/amrnb/common/include/log2_norm.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/lsfwt.h b/media/codecs/amrnb/common/include/lsfwt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/lsfwt.h
rename to media/codecs/amrnb/common/include/lsfwt.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/lsp.h b/media/codecs/amrnb/common/include/lsp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/lsp.h
rename to media/codecs/amrnb/common/include/lsp.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/lsp_az.h b/media/codecs/amrnb/common/include/lsp_az.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/lsp_az.h
rename to media/codecs/amrnb/common/include/lsp_az.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/lsp_lsf.h b/media/codecs/amrnb/common/include/lsp_lsf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/lsp_lsf.h
rename to media/codecs/amrnb/common/include/lsp_lsf.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/lsp_tab.h b/media/codecs/amrnb/common/include/lsp_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/lsp_tab.h
rename to media/codecs/amrnb/common/include/lsp_tab.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mac_32.h b/media/codecs/amrnb/common/include/mac_32.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mac_32.h
rename to media/codecs/amrnb/common/include/mac_32.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mode.h b/media/codecs/amrnb/common/include/mode.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mode.h
rename to media/codecs/amrnb/common/include/mode.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mpy_32.h b/media/codecs/amrnb/common/include/mpy_32.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mpy_32.h
rename to media/codecs/amrnb/common/include/mpy_32.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mpy_32_16.h b/media/codecs/amrnb/common/include/mpy_32_16.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mpy_32_16.h
rename to media/codecs/amrnb/common/include/mpy_32_16.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mult.h b/media/codecs/amrnb/common/include/mult.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mult.h
rename to media/codecs/amrnb/common/include/mult.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mult_r.h b/media/codecs/amrnb/common/include/mult_r.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mult_r.h
rename to media/codecs/amrnb/common/include/mult_r.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/n_proc.h b/media/codecs/amrnb/common/include/n_proc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/n_proc.h
rename to media/codecs/amrnb/common/include/n_proc.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/negate.h b/media/codecs/amrnb/common/include/negate.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/negate.h
rename to media/codecs/amrnb/common/include/negate.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/norm_l.h b/media/codecs/amrnb/common/include/norm_l.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/norm_l.h
rename to media/codecs/amrnb/common/include/norm_l.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/norm_s.h b/media/codecs/amrnb/common/include/norm_s.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/norm_s.h
rename to media/codecs/amrnb/common/include/norm_s.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/oper_32b.h b/media/codecs/amrnb/common/include/oper_32b.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/oper_32b.h
rename to media/codecs/amrnb/common/include/oper_32b.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/p_ol_wgh.h b/media/codecs/amrnb/common/include/p_ol_wgh.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/p_ol_wgh.h
rename to media/codecs/amrnb/common/include/p_ol_wgh.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/pow2.h b/media/codecs/amrnb/common/include/pow2.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/pow2.h
rename to media/codecs/amrnb/common/include/pow2.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/pred_lt.h b/media/codecs/amrnb/common/include/pred_lt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/pred_lt.h
rename to media/codecs/amrnb/common/include/pred_lt.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/q_plsf.h b/media/codecs/amrnb/common/include/q_plsf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/q_plsf.h
rename to media/codecs/amrnb/common/include/q_plsf.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/q_plsf_3_tbl.h b/media/codecs/amrnb/common/include/q_plsf_3_tbl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/q_plsf_3_tbl.h
rename to media/codecs/amrnb/common/include/q_plsf_3_tbl.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/q_plsf_5_tbl.h b/media/codecs/amrnb/common/include/q_plsf_5_tbl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/q_plsf_5_tbl.h
rename to media/codecs/amrnb/common/include/q_plsf_5_tbl.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/qgain475_tab.h b/media/codecs/amrnb/common/include/qgain475_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/qgain475_tab.h
rename to media/codecs/amrnb/common/include/qgain475_tab.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/qua_gain.h b/media/codecs/amrnb/common/include/qua_gain.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/qua_gain.h
rename to media/codecs/amrnb/common/include/qua_gain.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/qua_gain_tbl.h b/media/codecs/amrnb/common/include/qua_gain_tbl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/qua_gain_tbl.h
rename to media/codecs/amrnb/common/include/qua_gain_tbl.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/reorder.h b/media/codecs/amrnb/common/include/reorder.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/reorder.h
rename to media/codecs/amrnb/common/include/reorder.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/residu.h b/media/codecs/amrnb/common/include/residu.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/residu.h
rename to media/codecs/amrnb/common/include/residu.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/reverse_bits.h b/media/codecs/amrnb/common/include/reverse_bits.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/reverse_bits.h
rename to media/codecs/amrnb/common/include/reverse_bits.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/round.h b/media/codecs/amrnb/common/include/round.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/round.h
rename to media/codecs/amrnb/common/include/round.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/set_zero.h b/media/codecs/amrnb/common/include/set_zero.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/set_zero.h
rename to media/codecs/amrnb/common/include/set_zero.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/shl.h b/media/codecs/amrnb/common/include/shl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/shl.h
rename to media/codecs/amrnb/common/include/shl.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/shr.h b/media/codecs/amrnb/common/include/shr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/shr.h
rename to media/codecs/amrnb/common/include/shr.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/shr_r.h b/media/codecs/amrnb/common/include/shr_r.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/shr_r.h
rename to media/codecs/amrnb/common/include/shr_r.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/sqrt_l.h b/media/codecs/amrnb/common/include/sqrt_l.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/sqrt_l.h
rename to media/codecs/amrnb/common/include/sqrt_l.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/sub.h b/media/codecs/amrnb/common/include/sub.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/sub.h
rename to media/codecs/amrnb/common/include/sub.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/syn_filt.h b/media/codecs/amrnb/common/include/syn_filt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/syn_filt.h
rename to media/codecs/amrnb/common/include/syn_filt.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/typedef.h b/media/codecs/amrnb/common/include/typedef.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/typedef.h
rename to media/codecs/amrnb/common/include/typedef.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/vad.h b/media/codecs/amrnb/common/include/vad.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/vad.h
rename to media/codecs/amrnb/common/include/vad.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/vad1.h b/media/codecs/amrnb/common/include/vad1.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/vad1.h
rename to media/codecs/amrnb/common/include/vad1.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/vad2.h b/media/codecs/amrnb/common/include/vad2.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/vad2.h
rename to media/codecs/amrnb/common/include/vad2.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/weight_a.h b/media/codecs/amrnb/common/include/weight_a.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/weight_a.h
rename to media/codecs/amrnb/common/include/weight_a.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/window_tab.h b/media/codecs/amrnb/common/include/window_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/window_tab.h
rename to media/codecs/amrnb/common/include/window_tab.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/wmf_to_ets.h b/media/codecs/amrnb/common/include/wmf_to_ets.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/wmf_to_ets.h
rename to media/codecs/amrnb/common/include/wmf_to_ets.h
diff --git a/media/libstagefright/codecs/amrnb/common/src/add.cpp b/media/codecs/amrnb/common/src/add.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/add.cpp
rename to media/codecs/amrnb/common/src/add.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp b/media/codecs/amrnb/common/src/az_lsp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp
rename to media/codecs/amrnb/common/src/az_lsp.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/bitno_tab.cpp b/media/codecs/amrnb/common/src/bitno_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/bitno_tab.cpp
rename to media/codecs/amrnb/common/src/bitno_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/bitreorder_tab.cpp b/media/codecs/amrnb/common/src/bitreorder_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/bitreorder_tab.cpp
rename to media/codecs/amrnb/common/src/bitreorder_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/bits2prm.cpp b/media/codecs/amrnb/common/src/bits2prm.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/bits2prm.cpp
rename to media/codecs/amrnb/common/src/bits2prm.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/c2_9pf_tab.cpp b/media/codecs/amrnb/common/src/c2_9pf_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/c2_9pf_tab.cpp
rename to media/codecs/amrnb/common/src/c2_9pf_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/copy.cpp b/media/codecs/amrnb/common/src/copy.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/copy.cpp
rename to media/codecs/amrnb/common/src/copy.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/div_32.cpp b/media/codecs/amrnb/common/src/div_32.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/div_32.cpp
rename to media/codecs/amrnb/common/src/div_32.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/div_s.cpp b/media/codecs/amrnb/common/src/div_s.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/div_s.cpp
rename to media/codecs/amrnb/common/src/div_s.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/extract_h.cpp b/media/codecs/amrnb/common/src/extract_h.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/extract_h.cpp
rename to media/codecs/amrnb/common/src/extract_h.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/extract_l.cpp b/media/codecs/amrnb/common/src/extract_l.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/extract_l.cpp
rename to media/codecs/amrnb/common/src/extract_l.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/gains_tbl.cpp b/media/codecs/amrnb/common/src/gains_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/gains_tbl.cpp
rename to media/codecs/amrnb/common/src/gains_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/gc_pred.cpp b/media/codecs/amrnb/common/src/gc_pred.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/gc_pred.cpp
rename to media/codecs/amrnb/common/src/gc_pred.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/gmed_n.cpp b/media/codecs/amrnb/common/src/gmed_n.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/gmed_n.cpp
rename to media/codecs/amrnb/common/src/gmed_n.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/gray_tbl.cpp b/media/codecs/amrnb/common/src/gray_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/gray_tbl.cpp
rename to media/codecs/amrnb/common/src/gray_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/grid_tbl.cpp b/media/codecs/amrnb/common/src/grid_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/grid_tbl.cpp
rename to media/codecs/amrnb/common/src/grid_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/int_lpc.cpp b/media/codecs/amrnb/common/src/int_lpc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/int_lpc.cpp
rename to media/codecs/amrnb/common/src/int_lpc.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/inv_sqrt.cpp b/media/codecs/amrnb/common/src/inv_sqrt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/inv_sqrt.cpp
rename to media/codecs/amrnb/common/src/inv_sqrt.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/inv_sqrt_tbl.cpp b/media/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
rename to media/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/l_abs.cpp b/media/codecs/amrnb/common/src/l_abs.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/l_abs.cpp
rename to media/codecs/amrnb/common/src/l_abs.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/l_deposit_h.cpp b/media/codecs/amrnb/common/src/l_deposit_h.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/l_deposit_h.cpp
rename to media/codecs/amrnb/common/src/l_deposit_h.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/l_deposit_l.cpp b/media/codecs/amrnb/common/src/l_deposit_l.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/l_deposit_l.cpp
rename to media/codecs/amrnb/common/src/l_deposit_l.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/l_shr_r.cpp b/media/codecs/amrnb/common/src/l_shr_r.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/l_shr_r.cpp
rename to media/codecs/amrnb/common/src/l_shr_r.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/log2.cpp b/media/codecs/amrnb/common/src/log2.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/log2.cpp
rename to media/codecs/amrnb/common/src/log2.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/log2_norm.cpp b/media/codecs/amrnb/common/src/log2_norm.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/log2_norm.cpp
rename to media/codecs/amrnb/common/src/log2_norm.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/log2_tbl.cpp b/media/codecs/amrnb/common/src/log2_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/log2_tbl.cpp
rename to media/codecs/amrnb/common/src/log2_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsfwt.cpp b/media/codecs/amrnb/common/src/lsfwt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsfwt.cpp
rename to media/codecs/amrnb/common/src/lsfwt.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp.cpp b/media/codecs/amrnb/common/src/lsp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsp.cpp
rename to media/codecs/amrnb/common/src/lsp.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_az.cpp b/media/codecs/amrnb/common/src/lsp_az.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsp_az.cpp
rename to media/codecs/amrnb/common/src/lsp_az.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_lsf.cpp b/media/codecs/amrnb/common/src/lsp_lsf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsp_lsf.cpp
rename to media/codecs/amrnb/common/src/lsp_lsf.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_lsf_tbl.cpp b/media/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
rename to media/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_tab.cpp b/media/codecs/amrnb/common/src/lsp_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsp_tab.cpp
rename to media/codecs/amrnb/common/src/lsp_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/mult_r.cpp b/media/codecs/amrnb/common/src/mult_r.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/mult_r.cpp
rename to media/codecs/amrnb/common/src/mult_r.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/negate.cpp b/media/codecs/amrnb/common/src/negate.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/negate.cpp
rename to media/codecs/amrnb/common/src/negate.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/norm_l.cpp b/media/codecs/amrnb/common/src/norm_l.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/norm_l.cpp
rename to media/codecs/amrnb/common/src/norm_l.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/norm_s.cpp b/media/codecs/amrnb/common/src/norm_s.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/norm_s.cpp
rename to media/codecs/amrnb/common/src/norm_s.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/ph_disp_tab.cpp b/media/codecs/amrnb/common/src/ph_disp_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/ph_disp_tab.cpp
rename to media/codecs/amrnb/common/src/ph_disp_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/pow2.cpp b/media/codecs/amrnb/common/src/pow2.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/pow2.cpp
rename to media/codecs/amrnb/common/src/pow2.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/pow2_tbl.cpp b/media/codecs/amrnb/common/src/pow2_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/pow2_tbl.cpp
rename to media/codecs/amrnb/common/src/pow2_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/pred_lt.cpp b/media/codecs/amrnb/common/src/pred_lt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/pred_lt.cpp
rename to media/codecs/amrnb/common/src/pred_lt.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf.cpp b/media/codecs/amrnb/common/src/q_plsf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/q_plsf.cpp
rename to media/codecs/amrnb/common/src/q_plsf.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf_3.cpp b/media/codecs/amrnb/common/src/q_plsf_3.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/q_plsf_3.cpp
rename to media/codecs/amrnb/common/src/q_plsf_3.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf_3_tbl.cpp b/media/codecs/amrnb/common/src/q_plsf_3_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/q_plsf_3_tbl.cpp
rename to media/codecs/amrnb/common/src/q_plsf_3_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf_5.cpp b/media/codecs/amrnb/common/src/q_plsf_5.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/q_plsf_5.cpp
rename to media/codecs/amrnb/common/src/q_plsf_5.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf_5_tbl.cpp b/media/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
rename to media/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/qua_gain_tbl.cpp b/media/codecs/amrnb/common/src/qua_gain_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/qua_gain_tbl.cpp
rename to media/codecs/amrnb/common/src/qua_gain_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/reorder.cpp b/media/codecs/amrnb/common/src/reorder.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/reorder.cpp
rename to media/codecs/amrnb/common/src/reorder.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/residu.cpp b/media/codecs/amrnb/common/src/residu.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/residu.cpp
rename to media/codecs/amrnb/common/src/residu.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/round.cpp b/media/codecs/amrnb/common/src/round.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/round.cpp
rename to media/codecs/amrnb/common/src/round.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/set_zero.cpp b/media/codecs/amrnb/common/src/set_zero.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/set_zero.cpp
rename to media/codecs/amrnb/common/src/set_zero.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/shr.cpp b/media/codecs/amrnb/common/src/shr.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/shr.cpp
rename to media/codecs/amrnb/common/src/shr.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/shr_r.cpp b/media/codecs/amrnb/common/src/shr_r.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/shr_r.cpp
rename to media/codecs/amrnb/common/src/shr_r.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/sqrt_l.cpp b/media/codecs/amrnb/common/src/sqrt_l.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/sqrt_l.cpp
rename to media/codecs/amrnb/common/src/sqrt_l.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/sqrt_l_tbl.cpp b/media/codecs/amrnb/common/src/sqrt_l_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/sqrt_l_tbl.cpp
rename to media/codecs/amrnb/common/src/sqrt_l_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/sub.cpp b/media/codecs/amrnb/common/src/sub.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/sub.cpp
rename to media/codecs/amrnb/common/src/sub.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/syn_filt.cpp b/media/codecs/amrnb/common/src/syn_filt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/syn_filt.cpp
rename to media/codecs/amrnb/common/src/syn_filt.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/vad1.cpp b/media/codecs/amrnb/common/src/vad1.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/vad1.cpp
rename to media/codecs/amrnb/common/src/vad1.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/weight_a.cpp b/media/codecs/amrnb/common/src/weight_a.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/weight_a.cpp
rename to media/codecs/amrnb/common/src/weight_a.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/window_tab.cpp b/media/codecs/amrnb/common/src/window_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/window_tab.cpp
rename to media/codecs/amrnb/common/src/window_tab.cpp
diff --git a/media/codecs/amrnb/dec/Android.bp b/media/codecs/amrnb/dec/Android.bp
new file mode 100644
index 0000000..944ff75
--- /dev/null
+++ b/media/codecs/amrnb/dec/Android.bp
@@ -0,0 +1,106 @@
+cc_library_static {
+ name: "libstagefright_amrnbdec",
+ vendor_available: true,
+ host_supported: true,
+ min_sdk_version: "29",
+
+ srcs: [
+ "src/a_refl.cpp",
+ "src/agc.cpp",
+ "src/amrdecode.cpp",
+ "src/b_cn_cod.cpp",
+ "src/bgnscd.cpp",
+ "src/c_g_aver.cpp",
+ "src/d1035pf.cpp",
+ "src/d2_11pf.cpp",
+ "src/d2_9pf.cpp",
+ "src/d3_14pf.cpp",
+ "src/d4_17pf.cpp",
+ "src/d8_31pf.cpp",
+ "src/d_gain_c.cpp",
+ "src/d_gain_p.cpp",
+ "src/d_plsf.cpp",
+ "src/d_plsf_3.cpp",
+ "src/d_plsf_5.cpp",
+ "src/dec_amr.cpp",
+ "src/dec_gain.cpp",
+ "src/dec_input_format_tab.cpp",
+ "src/dec_lag3.cpp",
+ "src/dec_lag6.cpp",
+ "src/dtx_dec.cpp",
+ "src/ec_gains.cpp",
+ "src/ex_ctrl.cpp",
+ "src/if2_to_ets.cpp",
+ "src/int_lsf.cpp",
+ "src/lsp_avg.cpp",
+ "src/ph_disp.cpp",
+ "src/post_pro.cpp",
+ "src/preemph.cpp",
+ "src/pstfilt.cpp",
+ "src/qgain475_tab.cpp",
+ "src/sp_dec.cpp",
+ "src/wmf_to_ets.cpp",
+ ],
+
+ export_include_dirs: ["src"],
+
+ cflags: [
+ "-DOSCL_UNUSED_ARG(x)=(void)(x)",
+ "-DOSCL_IMPORT_REF=",
+
+ "-Werror",
+ ],
+
+ //sanitize: {
+ // misc_undefined: [
+ // "signed-integer-overflow",
+ // ],
+ //},
+
+ shared_libs: [
+ "libstagefright_amrnb_common",
+ "liblog",
+ ],
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
+//###############################################################################
+cc_test {
+ name: "libstagefright_amrnbdec_test",
+ gtest: false,
+ host_supported: true,
+
+ srcs: ["test/amrnbdec_test.cpp"],
+
+ cflags: ["-Wall", "-Werror"],
+
+ local_include_dirs: ["src"],
+
+ static_libs: [
+ "libstagefright_amrnbdec",
+ "libsndfile",
+ ],
+
+ shared_libs: [
+ "libstagefright_amrnb_common",
+ "libaudioutils",
+ "liblog",
+ ],
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+
+ //sanitize: {
+ // misc_undefined: [
+ // "signed-integer-overflow",
+ // ],
+ //},
+}
diff --git a/media/libstagefright/codecs/amrnb/dec/MODULE_LICENSE_APACHE2 b/media/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
rename to media/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrnb/dec/NOTICE b/media/codecs/amrnb/dec/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/NOTICE
rename to media/codecs/amrnb/dec/NOTICE
diff --git a/media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp b/media/codecs/amrnb/dec/src/a_refl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp
rename to media/codecs/amrnb/dec/src/a_refl.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/a_refl.h b/media/codecs/amrnb/dec/src/a_refl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/a_refl.h
rename to media/codecs/amrnb/dec/src/a_refl.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/agc.cpp b/media/codecs/amrnb/dec/src/agc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/agc.cpp
rename to media/codecs/amrnb/dec/src/agc.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/agc.h b/media/codecs/amrnb/dec/src/agc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/agc.h
rename to media/codecs/amrnb/dec/src/agc.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/amrdecode.cpp b/media/codecs/amrnb/dec/src/amrdecode.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/amrdecode.cpp
rename to media/codecs/amrnb/dec/src/amrdecode.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/amrdecode.h b/media/codecs/amrnb/dec/src/amrdecode.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/amrdecode.h
rename to media/codecs/amrnb/dec/src/amrdecode.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/b_cn_cod.cpp b/media/codecs/amrnb/dec/src/b_cn_cod.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/b_cn_cod.cpp
rename to media/codecs/amrnb/dec/src/b_cn_cod.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/b_cn_cod.h b/media/codecs/amrnb/dec/src/b_cn_cod.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/b_cn_cod.h
rename to media/codecs/amrnb/dec/src/b_cn_cod.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/bgnscd.cpp b/media/codecs/amrnb/dec/src/bgnscd.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/bgnscd.cpp
rename to media/codecs/amrnb/dec/src/bgnscd.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/bgnscd.h b/media/codecs/amrnb/dec/src/bgnscd.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/bgnscd.h
rename to media/codecs/amrnb/dec/src/bgnscd.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/c_g_aver.cpp b/media/codecs/amrnb/dec/src/c_g_aver.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/c_g_aver.cpp
rename to media/codecs/amrnb/dec/src/c_g_aver.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/c_g_aver.h b/media/codecs/amrnb/dec/src/c_g_aver.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/c_g_aver.h
rename to media/codecs/amrnb/dec/src/c_g_aver.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d1035pf.cpp b/media/codecs/amrnb/dec/src/d1035pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d1035pf.cpp
rename to media/codecs/amrnb/dec/src/d1035pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d1035pf.h b/media/codecs/amrnb/dec/src/d1035pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d1035pf.h
rename to media/codecs/amrnb/dec/src/d1035pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d2_11pf.cpp b/media/codecs/amrnb/dec/src/d2_11pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d2_11pf.cpp
rename to media/codecs/amrnb/dec/src/d2_11pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d2_11pf.h b/media/codecs/amrnb/dec/src/d2_11pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d2_11pf.h
rename to media/codecs/amrnb/dec/src/d2_11pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d2_9pf.cpp b/media/codecs/amrnb/dec/src/d2_9pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d2_9pf.cpp
rename to media/codecs/amrnb/dec/src/d2_9pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d2_9pf.h b/media/codecs/amrnb/dec/src/d2_9pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d2_9pf.h
rename to media/codecs/amrnb/dec/src/d2_9pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d3_14pf.cpp b/media/codecs/amrnb/dec/src/d3_14pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d3_14pf.cpp
rename to media/codecs/amrnb/dec/src/d3_14pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d3_14pf.h b/media/codecs/amrnb/dec/src/d3_14pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d3_14pf.h
rename to media/codecs/amrnb/dec/src/d3_14pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d4_17pf.cpp b/media/codecs/amrnb/dec/src/d4_17pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d4_17pf.cpp
rename to media/codecs/amrnb/dec/src/d4_17pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d4_17pf.h b/media/codecs/amrnb/dec/src/d4_17pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d4_17pf.h
rename to media/codecs/amrnb/dec/src/d4_17pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d8_31pf.cpp b/media/codecs/amrnb/dec/src/d8_31pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d8_31pf.cpp
rename to media/codecs/amrnb/dec/src/d8_31pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d8_31pf.h b/media/codecs/amrnb/dec/src/d8_31pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d8_31pf.h
rename to media/codecs/amrnb/dec/src/d8_31pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_gain_c.cpp b/media/codecs/amrnb/dec/src/d_gain_c.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d_gain_c.cpp
rename to media/codecs/amrnb/dec/src/d_gain_c.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_gain_p.cpp b/media/codecs/amrnb/dec/src/d_gain_p.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d_gain_p.cpp
rename to media/codecs/amrnb/dec/src/d_gain_p.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_plsf.cpp b/media/codecs/amrnb/dec/src/d_plsf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d_plsf.cpp
rename to media/codecs/amrnb/dec/src/d_plsf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_plsf_3.cpp b/media/codecs/amrnb/dec/src/d_plsf_3.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d_plsf_3.cpp
rename to media/codecs/amrnb/dec/src/d_plsf_3.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_plsf_5.cpp b/media/codecs/amrnb/dec/src/d_plsf_5.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d_plsf_5.cpp
rename to media/codecs/amrnb/dec/src/d_plsf_5.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_amr.cpp b/media/codecs/amrnb/dec/src/dec_amr.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_amr.cpp
rename to media/codecs/amrnb/dec/src/dec_amr.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_amr.h b/media/codecs/amrnb/dec/src/dec_amr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_amr.h
rename to media/codecs/amrnb/dec/src/dec_amr.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_gain.cpp b/media/codecs/amrnb/dec/src/dec_gain.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_gain.cpp
rename to media/codecs/amrnb/dec/src/dec_gain.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_gain.h b/media/codecs/amrnb/dec/src/dec_gain.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_gain.h
rename to media/codecs/amrnb/dec/src/dec_gain.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_input_format_tab.cpp b/media/codecs/amrnb/dec/src/dec_input_format_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_input_format_tab.cpp
rename to media/codecs/amrnb/dec/src/dec_input_format_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_lag3.cpp b/media/codecs/amrnb/dec/src/dec_lag3.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_lag3.cpp
rename to media/codecs/amrnb/dec/src/dec_lag3.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_lag3.h b/media/codecs/amrnb/dec/src/dec_lag3.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_lag3.h
rename to media/codecs/amrnb/dec/src/dec_lag3.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_lag6.cpp b/media/codecs/amrnb/dec/src/dec_lag6.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_lag6.cpp
rename to media/codecs/amrnb/dec/src/dec_lag6.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_lag6.h b/media/codecs/amrnb/dec/src/dec_lag6.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_lag6.h
rename to media/codecs/amrnb/dec/src/dec_lag6.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dtx_dec.cpp b/media/codecs/amrnb/dec/src/dtx_dec.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dtx_dec.cpp
rename to media/codecs/amrnb/dec/src/dtx_dec.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dtx_dec.h b/media/codecs/amrnb/dec/src/dtx_dec.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dtx_dec.h
rename to media/codecs/amrnb/dec/src/dtx_dec.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ec_gains.cpp b/media/codecs/amrnb/dec/src/ec_gains.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ec_gains.cpp
rename to media/codecs/amrnb/dec/src/ec_gains.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ec_gains.h b/media/codecs/amrnb/dec/src/ec_gains.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ec_gains.h
rename to media/codecs/amrnb/dec/src/ec_gains.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ex_ctrl.cpp b/media/codecs/amrnb/dec/src/ex_ctrl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ex_ctrl.cpp
rename to media/codecs/amrnb/dec/src/ex_ctrl.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ex_ctrl.h b/media/codecs/amrnb/dec/src/ex_ctrl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ex_ctrl.h
rename to media/codecs/amrnb/dec/src/ex_ctrl.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/gsmamr_dec.h b/media/codecs/amrnb/dec/src/gsmamr_dec.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/gsmamr_dec.h
rename to media/codecs/amrnb/dec/src/gsmamr_dec.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/if2_to_ets.cpp b/media/codecs/amrnb/dec/src/if2_to_ets.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/if2_to_ets.cpp
rename to media/codecs/amrnb/dec/src/if2_to_ets.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/if2_to_ets.h b/media/codecs/amrnb/dec/src/if2_to_ets.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/if2_to_ets.h
rename to media/codecs/amrnb/dec/src/if2_to_ets.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/int_lsf.cpp b/media/codecs/amrnb/dec/src/int_lsf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/int_lsf.cpp
rename to media/codecs/amrnb/dec/src/int_lsf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/lsp_avg.cpp b/media/codecs/amrnb/dec/src/lsp_avg.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/lsp_avg.cpp
rename to media/codecs/amrnb/dec/src/lsp_avg.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/lsp_avg.h b/media/codecs/amrnb/dec/src/lsp_avg.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/lsp_avg.h
rename to media/codecs/amrnb/dec/src/lsp_avg.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ph_disp.cpp b/media/codecs/amrnb/dec/src/ph_disp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ph_disp.cpp
rename to media/codecs/amrnb/dec/src/ph_disp.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ph_disp.h b/media/codecs/amrnb/dec/src/ph_disp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ph_disp.h
rename to media/codecs/amrnb/dec/src/ph_disp.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/post_pro.cpp b/media/codecs/amrnb/dec/src/post_pro.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/post_pro.cpp
rename to media/codecs/amrnb/dec/src/post_pro.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/post_pro.h b/media/codecs/amrnb/dec/src/post_pro.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/post_pro.h
rename to media/codecs/amrnb/dec/src/post_pro.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/preemph.cpp b/media/codecs/amrnb/dec/src/preemph.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/preemph.cpp
rename to media/codecs/amrnb/dec/src/preemph.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/preemph.h b/media/codecs/amrnb/dec/src/preemph.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/preemph.h
rename to media/codecs/amrnb/dec/src/preemph.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/pstfilt.cpp b/media/codecs/amrnb/dec/src/pstfilt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/pstfilt.cpp
rename to media/codecs/amrnb/dec/src/pstfilt.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/pstfilt.h b/media/codecs/amrnb/dec/src/pstfilt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/pstfilt.h
rename to media/codecs/amrnb/dec/src/pstfilt.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/qgain475_tab.cpp b/media/codecs/amrnb/dec/src/qgain475_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/qgain475_tab.cpp
rename to media/codecs/amrnb/dec/src/qgain475_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/sp_dec.cpp b/media/codecs/amrnb/dec/src/sp_dec.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/sp_dec.cpp
rename to media/codecs/amrnb/dec/src/sp_dec.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/sp_dec.h b/media/codecs/amrnb/dec/src/sp_dec.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/sp_dec.h
rename to media/codecs/amrnb/dec/src/sp_dec.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/wmf_to_ets.cpp b/media/codecs/amrnb/dec/src/wmf_to_ets.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/wmf_to_ets.cpp
rename to media/codecs/amrnb/dec/src/wmf_to_ets.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h b/media/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
rename to media/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
diff --git a/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp b/media/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
rename to media/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/test/Android.bp b/media/codecs/amrnb/dec/test/Android.bp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/test/Android.bp
rename to media/codecs/amrnb/dec/test/Android.bp
diff --git a/media/libstagefright/codecs/amrnb/dec/test/AndroidTest.xml b/media/codecs/amrnb/dec/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/test/AndroidTest.xml
rename to media/codecs/amrnb/dec/test/AndroidTest.xml
diff --git a/media/libstagefright/codecs/amrnb/dec/test/README.md b/media/codecs/amrnb/dec/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/test/README.md
rename to media/codecs/amrnb/dec/test/README.md
diff --git a/media/libstagefright/codecs/amrnb/dec/test/amrnbdec_test.cpp b/media/codecs/amrnb/dec/test/amrnbdec_test.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/test/amrnbdec_test.cpp
rename to media/codecs/amrnb/dec/test/amrnbdec_test.cpp
diff --git a/media/codecs/amrnb/enc/Android.bp b/media/codecs/amrnb/enc/Android.bp
new file mode 100644
index 0000000..534ce04
--- /dev/null
+++ b/media/codecs/amrnb/enc/Android.bp
@@ -0,0 +1,113 @@
+cc_library_static {
+ name: "libstagefright_amrnbenc",
+ vendor_available: true,
+ min_sdk_version: "29",
+
+ srcs: [
+ "src/amrencode.cpp",
+ "src/autocorr.cpp",
+ "src/c1035pf.cpp",
+ "src/c2_11pf.cpp",
+ "src/c2_9pf.cpp",
+ "src/c3_14pf.cpp",
+ "src/c4_17pf.cpp",
+ "src/c8_31pf.cpp",
+ "src/calc_cor.cpp",
+ "src/calc_en.cpp",
+ "src/cbsearch.cpp",
+ "src/cl_ltp.cpp",
+ "src/cod_amr.cpp",
+ "src/convolve.cpp",
+ "src/cor_h.cpp",
+ "src/cor_h_x.cpp",
+ "src/cor_h_x2.cpp",
+ "src/corrwght_tab.cpp",
+ "src/dtx_enc.cpp",
+ "src/enc_lag3.cpp",
+ "src/enc_lag6.cpp",
+ "src/enc_output_format_tab.cpp",
+ "src/ets_to_if2.cpp",
+ "src/ets_to_wmf.cpp",
+ "src/g_adapt.cpp",
+ "src/g_code.cpp",
+ "src/g_pitch.cpp",
+ "src/gain_q.cpp",
+ "src/hp_max.cpp",
+ "src/inter_36.cpp",
+ "src/inter_36_tab.cpp",
+ "src/l_comp.cpp",
+ "src/l_extract.cpp",
+ "src/l_negate.cpp",
+ "src/lag_wind.cpp",
+ "src/lag_wind_tab.cpp",
+ "src/levinson.cpp",
+ "src/lpc.cpp",
+ "src/ol_ltp.cpp",
+ "src/p_ol_wgh.cpp",
+ "src/pitch_fr.cpp",
+ "src/pitch_ol.cpp",
+ "src/pre_big.cpp",
+ "src/pre_proc.cpp",
+ "src/prm2bits.cpp",
+ "src/q_gain_c.cpp",
+ "src/q_gain_p.cpp",
+ "src/qgain475.cpp",
+ "src/qgain795.cpp",
+ "src/qua_gain.cpp",
+ "src/s10_8pf.cpp",
+ "src/set_sign.cpp",
+ "src/sid_sync.cpp",
+ "src/sp_enc.cpp",
+ "src/spreproc.cpp",
+ "src/spstproc.cpp",
+ "src/ton_stab.cpp",
+ ],
+
+ header_libs: ["libstagefright_headers"],
+ export_include_dirs: ["src"],
+
+ cflags: [
+ "-DOSCL_UNUSED_ARG(x)=(void)(x)",
+ "-Werror",
+ ],
+
+ //addressing b/25409744
+ //sanitize: {
+ // misc_undefined: [
+ // "signed-integer-overflow",
+ // ],
+ //},
+
+ shared_libs: ["libstagefright_amrnb_common"],
+
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
+//###############################################################################
+
+cc_test {
+ name: "libstagefright_amrnbenc_test",
+ gtest: false,
+
+ srcs: ["test/amrnb_enc_test.cpp"],
+
+ cflags: ["-Wall", "-Werror"],
+
+ local_include_dirs: ["src"],
+
+ static_libs: ["libstagefright_amrnbenc"],
+
+ shared_libs: ["libstagefright_amrnb_common"],
+
+ //addressing b/25409744
+ //sanitize: {
+ // misc_undefined: [
+ // "signed-integer-overflow",
+ // ],
+ //},
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/MODULE_LICENSE_APACHE2 b/media/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
rename to media/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrnb/enc/NOTICE b/media/codecs/amrnb/enc/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/NOTICE
rename to media/codecs/amrnb/enc/NOTICE
diff --git a/media/codecs/amrnb/enc/fuzzer/Android.bp b/media/codecs/amrnb/enc/fuzzer/Android.bp
new file mode 100644
index 0000000..e88e5eb
--- /dev/null
+++ b/media/codecs/amrnb/enc/fuzzer/Android.bp
@@ -0,0 +1,41 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+ name: "amrnb_enc_fuzzer",
+ host_supported: true,
+
+ srcs: [
+ "amrnb_enc_fuzzer.cpp",
+ ],
+
+ static_libs: [
+ "liblog",
+ "libstagefright_amrnbenc",
+ "libstagefright_amrnb_common",
+ ],
+
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/media/codecs/amrnb/enc/fuzzer/README.md b/media/codecs/amrnb/enc/fuzzer/README.md
new file mode 100644
index 0000000..239b4a8
--- /dev/null
+++ b/media/codecs/amrnb/enc/fuzzer/README.md
@@ -0,0 +1,60 @@
+# Fuzzer for libstagefright_amrnbenc encoder
+
+## Plugin Design Considerations
+The fuzzer plugin for AMR-NB is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+AMR-WB supports the following parameters:
+1. Output Format (parameter name: `outputFormat`)
+2. Mode (parameter name: `mode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `outputFormat` | 0. `AMR_TX_WMF` 1. `AMR_TX_IF2` 2. `AMR_TX_ETS` | Bits 0, 1 and 2 of 1st byte of data. |
+| `mode` | 0. `MR475` 1. `MR515` 2. `MR59` 3. `MR67` 4. `MR74 ` 5. `MR795` 6. `MR102` 7. `MR122` 8. `MRDTX` | Bits 3, 4, 5 and 6 of 1st byte of data. |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+If the encode operation was successful, the input is advanced by the frame size.
+If the encode operation was un-successful, the input is still advanced by frame size so
+that the fuzzer can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build amrnb_enc_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) amrnb_enc_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some pcm files to that folder
+Push this directory to device.
+
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/amrnb_enc_fuzzer/amrnb_enc_fuzzer CORPUS_DIR
+```
+To run on host
+```
+ $ $ANDROID_HOST_OUT/fuzz/x86_64/amrnb_enc_fuzzer/amrnb_enc_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp b/media/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp
new file mode 100644
index 0000000..2fcbf24
--- /dev/null
+++ b/media/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp
@@ -0,0 +1,105 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <string.h>
+#include <utils/Log.h>
+#include <algorithm>
+#include "gsmamr_enc.h"
+
+// Constants for AMR-NB
+const int32_t kNumInputSamples = L_FRAME; // 160 samples
+const int32_t kOutputBufferSize = 2 * kNumInputSamples * sizeof(Word16);
+const Mode kModes[9] = {MR475, /* 4.75 kbps */
+ MR515, /* 5.15 kbps */
+ MR59, /* 5.90 kbps */
+ MR67, /* 6.70 kbps */
+ MR74, /* 7.40 kbps */
+ MR795, /* 7.95 kbps */
+ MR102, /* 10.2 kbps */
+ MR122, /* 12.2 kbps */
+ MRDTX, /* DTX */};
+const Word16 kOutputFormat[3] = {AMR_TX_WMF, AMR_TX_IF2, AMR_TX_ETS};
+
+class Codec {
+ public:
+ Codec() = default;
+ ~Codec() { deInitEncoder(); }
+ Word16 initEncoder(const uint8_t *data);
+ void deInitEncoder();
+ void encodeFrames(const uint8_t *data, size_t size);
+
+ private:
+ void *mEncState = nullptr;
+ void *mSidState = nullptr;
+};
+
+Word16 Codec::initEncoder(const uint8_t *data) {
+ return AMREncodeInit(&mEncState, &mSidState, (*data >> 1) & 0x01 /* dtx_enable flag */);
+}
+
+void Codec::deInitEncoder() {
+ if (mEncState) {
+ AMREncodeExit(&mEncState, &mSidState);
+ mEncState = nullptr;
+ mSidState = nullptr;
+ }
+}
+
+void Codec::encodeFrames(const uint8_t *data, size_t size) {
+ AMREncodeReset(mEncState, mSidState);
+ uint8_t startByte = *data;
+ int modeIndex = ((startByte >> 3) % 9);
+ int outputFormatIndex = (startByte % 3);
+ Mode mode = kModes[modeIndex];
+ Word16 outputFormat = kOutputFormat[outputFormatIndex];
+
+ // Consume startByte
+ data++;
+ size--;
+
+ while (size > 0) {
+ Frame_Type_3GPP frameType = (Frame_Type_3GPP)mode;
+
+ Word16 inputBuf[kNumInputSamples] = {};
+ int32_t minSize = std::min(size, sizeof(inputBuf));
+
+ uint8_t outputBuf[kOutputBufferSize] = {};
+ memcpy(inputBuf, data, minSize);
+
+ AMREncode(mEncState, mSidState, mode, inputBuf, outputBuf, &frameType, outputFormat);
+
+ data += minSize;
+ size -= minSize;
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ if (size < 1) {
+ return 0;
+ }
+ Codec *codec = new Codec();
+ if (!codec) {
+ return 0;
+ }
+ if (codec->initEncoder(data) == 0) {
+ codec->encodeFrames(data, size);
+ }
+ delete codec;
+ return 0;
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/src/amrencode.cpp b/media/codecs/amrnb/enc/src/amrencode.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/amrencode.cpp
rename to media/codecs/amrnb/enc/src/amrencode.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/amrencode.h b/media/codecs/amrnb/enc/src/amrencode.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/amrencode.h
rename to media/codecs/amrnb/enc/src/amrencode.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/autocorr.cpp b/media/codecs/amrnb/enc/src/autocorr.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/autocorr.cpp
rename to media/codecs/amrnb/enc/src/autocorr.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/autocorr.h b/media/codecs/amrnb/enc/src/autocorr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/autocorr.h
rename to media/codecs/amrnb/enc/src/autocorr.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c1035pf.cpp b/media/codecs/amrnb/enc/src/c1035pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c1035pf.cpp
rename to media/codecs/amrnb/enc/src/c1035pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c1035pf.h b/media/codecs/amrnb/enc/src/c1035pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c1035pf.h
rename to media/codecs/amrnb/enc/src/c1035pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c2_11pf.cpp b/media/codecs/amrnb/enc/src/c2_11pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c2_11pf.cpp
rename to media/codecs/amrnb/enc/src/c2_11pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c2_11pf.h b/media/codecs/amrnb/enc/src/c2_11pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c2_11pf.h
rename to media/codecs/amrnb/enc/src/c2_11pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c2_9pf.cpp b/media/codecs/amrnb/enc/src/c2_9pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c2_9pf.cpp
rename to media/codecs/amrnb/enc/src/c2_9pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c2_9pf.h b/media/codecs/amrnb/enc/src/c2_9pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c2_9pf.h
rename to media/codecs/amrnb/enc/src/c2_9pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c3_14pf.cpp b/media/codecs/amrnb/enc/src/c3_14pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c3_14pf.cpp
rename to media/codecs/amrnb/enc/src/c3_14pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c3_14pf.h b/media/codecs/amrnb/enc/src/c3_14pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c3_14pf.h
rename to media/codecs/amrnb/enc/src/c3_14pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c4_17pf.cpp b/media/codecs/amrnb/enc/src/c4_17pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c4_17pf.cpp
rename to media/codecs/amrnb/enc/src/c4_17pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c4_17pf.h b/media/codecs/amrnb/enc/src/c4_17pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c4_17pf.h
rename to media/codecs/amrnb/enc/src/c4_17pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c8_31pf.cpp b/media/codecs/amrnb/enc/src/c8_31pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c8_31pf.cpp
rename to media/codecs/amrnb/enc/src/c8_31pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c8_31pf.h b/media/codecs/amrnb/enc/src/c8_31pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c8_31pf.h
rename to media/codecs/amrnb/enc/src/c8_31pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/calc_cor.cpp b/media/codecs/amrnb/enc/src/calc_cor.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/calc_cor.cpp
rename to media/codecs/amrnb/enc/src/calc_cor.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/calc_cor.h b/media/codecs/amrnb/enc/src/calc_cor.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/calc_cor.h
rename to media/codecs/amrnb/enc/src/calc_cor.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/calc_en.cpp b/media/codecs/amrnb/enc/src/calc_en.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/calc_en.cpp
rename to media/codecs/amrnb/enc/src/calc_en.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/calc_en.h b/media/codecs/amrnb/enc/src/calc_en.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/calc_en.h
rename to media/codecs/amrnb/enc/src/calc_en.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cbsearch.cpp b/media/codecs/amrnb/enc/src/cbsearch.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cbsearch.cpp
rename to media/codecs/amrnb/enc/src/cbsearch.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cbsearch.h b/media/codecs/amrnb/enc/src/cbsearch.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cbsearch.h
rename to media/codecs/amrnb/enc/src/cbsearch.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cl_ltp.cpp b/media/codecs/amrnb/enc/src/cl_ltp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cl_ltp.cpp
rename to media/codecs/amrnb/enc/src/cl_ltp.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cl_ltp.h b/media/codecs/amrnb/enc/src/cl_ltp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cl_ltp.h
rename to media/codecs/amrnb/enc/src/cl_ltp.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cod_amr.cpp b/media/codecs/amrnb/enc/src/cod_amr.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cod_amr.cpp
rename to media/codecs/amrnb/enc/src/cod_amr.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cod_amr.h b/media/codecs/amrnb/enc/src/cod_amr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cod_amr.h
rename to media/codecs/amrnb/enc/src/cod_amr.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/convolve.cpp b/media/codecs/amrnb/enc/src/convolve.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/convolve.cpp
rename to media/codecs/amrnb/enc/src/convolve.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/convolve.h b/media/codecs/amrnb/enc/src/convolve.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/convolve.h
rename to media/codecs/amrnb/enc/src/convolve.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h.cpp b/media/codecs/amrnb/enc/src/cor_h.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h.cpp
rename to media/codecs/amrnb/enc/src/cor_h.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h.h b/media/codecs/amrnb/enc/src/cor_h.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h.h
rename to media/codecs/amrnb/enc/src/cor_h.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x.cpp b/media/codecs/amrnb/enc/src/cor_h_x.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h_x.cpp
rename to media/codecs/amrnb/enc/src/cor_h_x.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x.h b/media/codecs/amrnb/enc/src/cor_h_x.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h_x.h
rename to media/codecs/amrnb/enc/src/cor_h_x.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.cpp b/media/codecs/amrnb/enc/src/cor_h_x2.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.cpp
rename to media/codecs/amrnb/enc/src/cor_h_x2.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.h b/media/codecs/amrnb/enc/src/cor_h_x2.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.h
rename to media/codecs/amrnb/enc/src/cor_h_x2.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/corrwght_tab.cpp b/media/codecs/amrnb/enc/src/corrwght_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/corrwght_tab.cpp
rename to media/codecs/amrnb/enc/src/corrwght_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/dtx_enc.cpp b/media/codecs/amrnb/enc/src/dtx_enc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/dtx_enc.cpp
rename to media/codecs/amrnb/enc/src/dtx_enc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/dtx_enc.h b/media/codecs/amrnb/enc/src/dtx_enc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/dtx_enc.h
rename to media/codecs/amrnb/enc/src/dtx_enc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_lag3.cpp b/media/codecs/amrnb/enc/src/enc_lag3.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/enc_lag3.cpp
rename to media/codecs/amrnb/enc/src/enc_lag3.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_lag3.h b/media/codecs/amrnb/enc/src/enc_lag3.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/enc_lag3.h
rename to media/codecs/amrnb/enc/src/enc_lag3.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_lag6.cpp b/media/codecs/amrnb/enc/src/enc_lag6.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/enc_lag6.cpp
rename to media/codecs/amrnb/enc/src/enc_lag6.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_lag6.h b/media/codecs/amrnb/enc/src/enc_lag6.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/enc_lag6.h
rename to media/codecs/amrnb/enc/src/enc_lag6.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_output_format_tab.cpp b/media/codecs/amrnb/enc/src/enc_output_format_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/enc_output_format_tab.cpp
rename to media/codecs/amrnb/enc/src/enc_output_format_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ets_to_if2.cpp b/media/codecs/amrnb/enc/src/ets_to_if2.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ets_to_if2.cpp
rename to media/codecs/amrnb/enc/src/ets_to_if2.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ets_to_if2.h b/media/codecs/amrnb/enc/src/ets_to_if2.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ets_to_if2.h
rename to media/codecs/amrnb/enc/src/ets_to_if2.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ets_to_wmf.cpp b/media/codecs/amrnb/enc/src/ets_to_wmf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ets_to_wmf.cpp
rename to media/codecs/amrnb/enc/src/ets_to_wmf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ets_to_wmf.h b/media/codecs/amrnb/enc/src/ets_to_wmf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ets_to_wmf.h
rename to media/codecs/amrnb/enc/src/ets_to_wmf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_adapt.cpp b/media/codecs/amrnb/enc/src/g_adapt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_adapt.cpp
rename to media/codecs/amrnb/enc/src/g_adapt.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_adapt.h b/media/codecs/amrnb/enc/src/g_adapt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_adapt.h
rename to media/codecs/amrnb/enc/src/g_adapt.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_code.cpp b/media/codecs/amrnb/enc/src/g_code.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_code.cpp
rename to media/codecs/amrnb/enc/src/g_code.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_code.h b/media/codecs/amrnb/enc/src/g_code.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_code.h
rename to media/codecs/amrnb/enc/src/g_code.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_pitch.cpp b/media/codecs/amrnb/enc/src/g_pitch.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_pitch.cpp
rename to media/codecs/amrnb/enc/src/g_pitch.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_pitch.h b/media/codecs/amrnb/enc/src/g_pitch.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_pitch.h
rename to media/codecs/amrnb/enc/src/g_pitch.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/gain_q.cpp b/media/codecs/amrnb/enc/src/gain_q.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/gain_q.cpp
rename to media/codecs/amrnb/enc/src/gain_q.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/gain_q.h b/media/codecs/amrnb/enc/src/gain_q.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/gain_q.h
rename to media/codecs/amrnb/enc/src/gain_q.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/gsmamr_enc.h b/media/codecs/amrnb/enc/src/gsmamr_enc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/gsmamr_enc.h
rename to media/codecs/amrnb/enc/src/gsmamr_enc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/hp_max.cpp b/media/codecs/amrnb/enc/src/hp_max.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/hp_max.cpp
rename to media/codecs/amrnb/enc/src/hp_max.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/hp_max.h b/media/codecs/amrnb/enc/src/hp_max.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/hp_max.h
rename to media/codecs/amrnb/enc/src/hp_max.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/inter_36.cpp b/media/codecs/amrnb/enc/src/inter_36.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/inter_36.cpp
rename to media/codecs/amrnb/enc/src/inter_36.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/inter_36.h b/media/codecs/amrnb/enc/src/inter_36.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/inter_36.h
rename to media/codecs/amrnb/enc/src/inter_36.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.cpp b/media/codecs/amrnb/enc/src/inter_36_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.cpp
rename to media/codecs/amrnb/enc/src/inter_36_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.h b/media/codecs/amrnb/enc/src/inter_36_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.h
rename to media/codecs/amrnb/enc/src/inter_36_tab.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/l_comp.cpp b/media/codecs/amrnb/enc/src/l_comp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/l_comp.cpp
rename to media/codecs/amrnb/enc/src/l_comp.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/l_extract.cpp b/media/codecs/amrnb/enc/src/l_extract.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/l_extract.cpp
rename to media/codecs/amrnb/enc/src/l_extract.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/l_negate.cpp b/media/codecs/amrnb/enc/src/l_negate.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/l_negate.cpp
rename to media/codecs/amrnb/enc/src/l_negate.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lag_wind.cpp b/media/codecs/amrnb/enc/src/lag_wind.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lag_wind.cpp
rename to media/codecs/amrnb/enc/src/lag_wind.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lag_wind.h b/media/codecs/amrnb/enc/src/lag_wind.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lag_wind.h
rename to media/codecs/amrnb/enc/src/lag_wind.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.cpp b/media/codecs/amrnb/enc/src/lag_wind_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.cpp
rename to media/codecs/amrnb/enc/src/lag_wind_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.h b/media/codecs/amrnb/enc/src/lag_wind_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.h
rename to media/codecs/amrnb/enc/src/lag_wind_tab.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/levinson.cpp b/media/codecs/amrnb/enc/src/levinson.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/levinson.cpp
rename to media/codecs/amrnb/enc/src/levinson.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/levinson.h b/media/codecs/amrnb/enc/src/levinson.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/levinson.h
rename to media/codecs/amrnb/enc/src/levinson.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lpc.cpp b/media/codecs/amrnb/enc/src/lpc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lpc.cpp
rename to media/codecs/amrnb/enc/src/lpc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lpc.h b/media/codecs/amrnb/enc/src/lpc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lpc.h
rename to media/codecs/amrnb/enc/src/lpc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ol_ltp.cpp b/media/codecs/amrnb/enc/src/ol_ltp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ol_ltp.cpp
rename to media/codecs/amrnb/enc/src/ol_ltp.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ol_ltp.h b/media/codecs/amrnb/enc/src/ol_ltp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ol_ltp.h
rename to media/codecs/amrnb/enc/src/ol_ltp.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/p_ol_wgh.cpp b/media/codecs/amrnb/enc/src/p_ol_wgh.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/p_ol_wgh.cpp
rename to media/codecs/amrnb/enc/src/p_ol_wgh.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pitch_fr.cpp b/media/codecs/amrnb/enc/src/pitch_fr.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pitch_fr.cpp
rename to media/codecs/amrnb/enc/src/pitch_fr.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pitch_fr.h b/media/codecs/amrnb/enc/src/pitch_fr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pitch_fr.h
rename to media/codecs/amrnb/enc/src/pitch_fr.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pitch_ol.cpp b/media/codecs/amrnb/enc/src/pitch_ol.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pitch_ol.cpp
rename to media/codecs/amrnb/enc/src/pitch_ol.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pitch_ol.h b/media/codecs/amrnb/enc/src/pitch_ol.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pitch_ol.h
rename to media/codecs/amrnb/enc/src/pitch_ol.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pre_big.cpp b/media/codecs/amrnb/enc/src/pre_big.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pre_big.cpp
rename to media/codecs/amrnb/enc/src/pre_big.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pre_big.h b/media/codecs/amrnb/enc/src/pre_big.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pre_big.h
rename to media/codecs/amrnb/enc/src/pre_big.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pre_proc.cpp b/media/codecs/amrnb/enc/src/pre_proc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pre_proc.cpp
rename to media/codecs/amrnb/enc/src/pre_proc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pre_proc.h b/media/codecs/amrnb/enc/src/pre_proc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pre_proc.h
rename to media/codecs/amrnb/enc/src/pre_proc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/prm2bits.cpp b/media/codecs/amrnb/enc/src/prm2bits.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/prm2bits.cpp
rename to media/codecs/amrnb/enc/src/prm2bits.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/prm2bits.h b/media/codecs/amrnb/enc/src/prm2bits.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/prm2bits.h
rename to media/codecs/amrnb/enc/src/prm2bits.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/q_gain_c.cpp b/media/codecs/amrnb/enc/src/q_gain_c.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/q_gain_c.cpp
rename to media/codecs/amrnb/enc/src/q_gain_c.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/q_gain_c.h b/media/codecs/amrnb/enc/src/q_gain_c.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/q_gain_c.h
rename to media/codecs/amrnb/enc/src/q_gain_c.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/q_gain_p.cpp b/media/codecs/amrnb/enc/src/q_gain_p.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/q_gain_p.cpp
rename to media/codecs/amrnb/enc/src/q_gain_p.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/q_gain_p.h b/media/codecs/amrnb/enc/src/q_gain_p.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/q_gain_p.h
rename to media/codecs/amrnb/enc/src/q_gain_p.h
diff --git a/media/codecs/amrnb/enc/src/qgain475.cpp b/media/codecs/amrnb/enc/src/qgain475.cpp
new file mode 100644
index 0000000..08a5c15
--- /dev/null
+++ b/media/codecs/amrnb/enc/src/qgain475.cpp
@@ -0,0 +1,1445 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/****************************************************************************************
+Portions of this file are derived from the following 3GPP standard:
+
+ 3GPP TS 26.073
+ ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
+ Available from http://www.3gpp.org
+
+(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
+Permission to distribute, modify and use this file under the standard license
+terms listed above has been obtained from the copyright holder.
+****************************************************************************************/
+/*
+------------------------------------------------------------------------------
+
+
+
+ Pathname: ./audio/gsm-amr/c/src/qgain475.c
+ Funtions: MR475_quant_store_results
+ MR475_update_unq_pred
+ MR475_gain_quant
+
+------------------------------------------------------------------------------
+ MODULE DESCRIPTION
+
+ These modules handle the quantization of pitch and codebook gains for MR475.
+
+------------------------------------------------------------------------------
+*/
+
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+#include "qgain475.h"
+#include "typedef.h"
+#include "basic_op.h"
+#include "mode.h"
+#include "cnst.h"
+#include "pow2.h"
+#include "log2.h"
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+#define MR475_VQ_SIZE 256
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL VARIABLE DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+/* The table contains the following data:
+ *
+ * g_pitch(0) (Q14) // for sub-
+ * g_fac(0) (Q12) // frame 0 and 2
+ * g_pitch(1) (Q14) // for sub-
+ * g_fac(2) (Q12) // frame 1 and 3
+ *
+ */
+static const Word16 table_gain_MR475[MR475_VQ_SIZE*4] =
+{
+ /*g_pit(0), g_fac(0), g_pit(1), g_fac(1) */
+ 812, 128, 542, 140,
+ 2873, 1135, 2266, 3402,
+ 2067, 563, 12677, 647,
+ 4132, 1798, 5601, 5285,
+ 7689, 374, 3735, 441,
+ 10912, 2638, 11807, 2494,
+ 20490, 797, 5218, 675,
+ 6724, 8354, 5282, 1696,
+ 1488, 428, 5882, 452,
+ 5332, 4072, 3583, 1268,
+ 2469, 901, 15894, 1005,
+ 14982, 3271, 10331, 4858,
+ 3635, 2021, 2596, 835,
+ 12360, 4892, 12206, 1704,
+ 13432, 1604, 9118, 2341,
+ 3968, 1538, 5479, 9936,
+ 3795, 417, 1359, 414,
+ 3640, 1569, 7995, 3541,
+ 11405, 645, 8552, 635,
+ 4056, 1377, 16608, 6124,
+ 11420, 700, 2007, 607,
+ 12415, 1578, 11119, 4654,
+ 13680, 1708, 11990, 1229,
+ 7996, 7297, 13231, 5715,
+ 2428, 1159, 2073, 1941,
+ 6218, 6121, 3546, 1804,
+ 8925, 1802, 8679, 1580,
+ 13935, 3576, 13313, 6237,
+ 6142, 1130, 5994, 1734,
+ 14141, 4662, 11271, 3321,
+ 12226, 1551, 13931, 3015,
+ 5081, 10464, 9444, 6706,
+ 1689, 683, 1436, 1306,
+ 7212, 3933, 4082, 2713,
+ 7793, 704, 15070, 802,
+ 6299, 5212, 4337, 5357,
+ 6676, 541, 6062, 626,
+ 13651, 3700, 11498, 2408,
+ 16156, 716, 12177, 751,
+ 8065, 11489, 6314, 2256,
+ 4466, 496, 7293, 523,
+ 10213, 3833, 8394, 3037,
+ 8403, 966, 14228, 1880,
+ 8703, 5409, 16395, 4863,
+ 7420, 1979, 6089, 1230,
+ 9371, 4398, 14558, 3363,
+ 13559, 2873, 13163, 1465,
+ 5534, 1678, 13138, 14771,
+ 7338, 600, 1318, 548,
+ 4252, 3539, 10044, 2364,
+ 10587, 622, 13088, 669,
+ 14126, 3526, 5039, 9784,
+ 15338, 619, 3115, 590,
+ 16442, 3013, 15542, 4168,
+ 15537, 1611, 15405, 1228,
+ 16023, 9299, 7534, 4976,
+ 1990, 1213, 11447, 1157,
+ 12512, 5519, 9475, 2644,
+ 7716, 2034, 13280, 2239,
+ 16011, 5093, 8066, 6761,
+ 10083, 1413, 5002, 2347,
+ 12523, 5975, 15126, 2899,
+ 18264, 2289, 15827, 2527,
+ 16265, 10254, 14651, 11319,
+ 1797, 337, 3115, 397,
+ 3510, 2928, 4592, 2670,
+ 7519, 628, 11415, 656,
+ 5946, 2435, 6544, 7367,
+ 8238, 829, 4000, 863,
+ 10032, 2492, 16057, 3551,
+ 18204, 1054, 6103, 1454,
+ 5884, 7900, 18752, 3468,
+ 1864, 544, 9198, 683,
+ 11623, 4160, 4594, 1644,
+ 3158, 1157, 15953, 2560,
+ 12349, 3733, 17420, 5260,
+ 6106, 2004, 2917, 1742,
+ 16467, 5257, 16787, 1680,
+ 17205, 1759, 4773, 3231,
+ 7386, 6035, 14342, 10012,
+ 4035, 442, 4194, 458,
+ 9214, 2242, 7427, 4217,
+ 12860, 801, 11186, 825,
+ 12648, 2084, 12956, 6554,
+ 9505, 996, 6629, 985,
+ 10537, 2502, 15289, 5006,
+ 12602, 2055, 15484, 1653,
+ 16194, 6921, 14231, 5790,
+ 2626, 828, 5615, 1686,
+ 13663, 5778, 3668, 1554,
+ 11313, 2633, 9770, 1459,
+ 14003, 4733, 15897, 6291,
+ 6278, 1870, 7910, 2285,
+ 16978, 4571, 16576, 3849,
+ 15248, 2311, 16023, 3244,
+ 14459, 17808, 11847, 2763,
+ 1981, 1407, 1400, 876,
+ 4335, 3547, 4391, 4210,
+ 5405, 680, 17461, 781,
+ 6501, 5118, 8091, 7677,
+ 7355, 794, 8333, 1182,
+ 15041, 3160, 14928, 3039,
+ 20421, 880, 14545, 852,
+ 12337, 14708, 6904, 1920,
+ 4225, 933, 8218, 1087,
+ 10659, 4084, 10082, 4533,
+ 2735, 840, 20657, 1081,
+ 16711, 5966, 15873, 4578,
+ 10871, 2574, 3773, 1166,
+ 14519, 4044, 20699, 2627,
+ 15219, 2734, 15274, 2186,
+ 6257, 3226, 13125, 19480,
+ 7196, 930, 2462, 1618,
+ 4515, 3092, 13852, 4277,
+ 10460, 833, 17339, 810,
+ 16891, 2289, 15546, 8217,
+ 13603, 1684, 3197, 1834,
+ 15948, 2820, 15812, 5327,
+ 17006, 2438, 16788, 1326,
+ 15671, 8156, 11726, 8556,
+ 3762, 2053, 9563, 1317,
+ 13561, 6790, 12227, 1936,
+ 8180, 3550, 13287, 1778,
+ 16299, 6599, 16291, 7758,
+ 8521, 2551, 7225, 2645,
+ 18269, 7489, 16885, 2248,
+ 17882, 2884, 17265, 3328,
+ 9417, 20162, 11042, 8320,
+ 1286, 620, 1431, 583,
+ 5993, 2289, 3978, 3626,
+ 5144, 752, 13409, 830,
+ 5553, 2860, 11764, 5908,
+ 10737, 560, 5446, 564,
+ 13321, 3008, 11946, 3683,
+ 19887, 798, 9825, 728,
+ 13663, 8748, 7391, 3053,
+ 2515, 778, 6050, 833,
+ 6469, 5074, 8305, 2463,
+ 6141, 1865, 15308, 1262,
+ 14408, 4547, 13663, 4515,
+ 3137, 2983, 2479, 1259,
+ 15088, 4647, 15382, 2607,
+ 14492, 2392, 12462, 2537,
+ 7539, 2949, 12909, 12060,
+ 5468, 684, 3141, 722,
+ 5081, 1274, 12732, 4200,
+ 15302, 681, 7819, 592,
+ 6534, 2021, 16478, 8737,
+ 13364, 882, 5397, 899,
+ 14656, 2178, 14741, 4227,
+ 14270, 1298, 13929, 2029,
+ 15477, 7482, 15815, 4572,
+ 2521, 2013, 5062, 1804,
+ 5159, 6582, 7130, 3597,
+ 10920, 1611, 11729, 1708,
+ 16903, 3455, 16268, 6640,
+ 9306, 1007, 9369, 2106,
+ 19182, 5037, 12441, 4269,
+ 15919, 1332, 15357, 3512,
+ 11898, 14141, 16101, 6854,
+ 2010, 737, 3779, 861,
+ 11454, 2880, 3564, 3540,
+ 9057, 1241, 12391, 896,
+ 8546, 4629, 11561, 5776,
+ 8129, 589, 8218, 588,
+ 18728, 3755, 12973, 3149,
+ 15729, 758, 16634, 754,
+ 15222, 11138, 15871, 2208,
+ 4673, 610, 10218, 678,
+ 15257, 4146, 5729, 3327,
+ 8377, 1670, 19862, 2321,
+ 15450, 5511, 14054, 5481,
+ 5728, 2888, 7580, 1346,
+ 14384, 5325, 16236, 3950,
+ 15118, 3744, 15306, 1435,
+ 14597, 4070, 12301, 15696,
+ 7617, 1699, 2170, 884,
+ 4459, 4567, 18094, 3306,
+ 12742, 815, 14926, 907,
+ 15016, 4281, 15518, 8368,
+ 17994, 1087, 2358, 865,
+ 16281, 3787, 15679, 4596,
+ 16356, 1534, 16584, 2210,
+ 16833, 9697, 15929, 4513,
+ 3277, 1085, 9643, 2187,
+ 11973, 6068, 9199, 4462,
+ 8955, 1629, 10289, 3062,
+ 16481, 5155, 15466, 7066,
+ 13678, 2543, 5273, 2277,
+ 16746, 6213, 16655, 3408,
+ 20304, 3363, 18688, 1985,
+ 14172, 12867, 15154, 15703,
+ 4473, 1020, 1681, 886,
+ 4311, 4301, 8952, 3657,
+ 5893, 1147, 11647, 1452,
+ 15886, 2227, 4582, 6644,
+ 6929, 1205, 6220, 799,
+ 12415, 3409, 15968, 3877,
+ 19859, 2109, 9689, 2141,
+ 14742, 8830, 14480, 2599,
+ 1817, 1238, 7771, 813,
+ 19079, 4410, 5554, 2064,
+ 3687, 2844, 17435, 2256,
+ 16697, 4486, 16199, 5388,
+ 8028, 2763, 3405, 2119,
+ 17426, 5477, 13698, 2786,
+ 19879, 2720, 9098, 3880,
+ 18172, 4833, 17336, 12207,
+ 5116, 996, 4935, 988,
+ 9888, 3081, 6014, 5371,
+ 15881, 1667, 8405, 1183,
+ 15087, 2366, 19777, 7002,
+ 11963, 1562, 7279, 1128,
+ 16859, 1532, 15762, 5381,
+ 14708, 2065, 20105, 2155,
+ 17158, 8245, 17911, 6318,
+ 5467, 1504, 4100, 2574,
+ 17421, 6810, 5673, 2888,
+ 16636, 3382, 8975, 1831,
+ 20159, 4737, 19550, 7294,
+ 6658, 2781, 11472, 3321,
+ 19397, 5054, 18878, 4722,
+ 16439, 2373, 20430, 4386,
+ 11353, 26526, 11593, 3068,
+ 2866, 1566, 5108, 1070,
+ 9614, 4915, 4939, 3536,
+ 7541, 878, 20717, 851,
+ 6938, 4395, 16799, 7733,
+ 10137, 1019, 9845, 964,
+ 15494, 3955, 15459, 3430,
+ 18863, 982, 20120, 963,
+ 16876, 12887, 14334, 4200,
+ 6599, 1220, 9222, 814,
+ 16942, 5134, 5661, 4898,
+ 5488, 1798, 20258, 3962,
+ 17005, 6178, 17929, 5929,
+ 9365, 3420, 7474, 1971,
+ 19537, 5177, 19003, 3006,
+ 16454, 3788, 16070, 2367,
+ 8664, 2743, 9445, 26358,
+ 10856, 1287, 3555, 1009,
+ 5606, 3622, 19453, 5512,
+ 12453, 797, 20634, 911,
+ 15427, 3066, 17037, 10275,
+ 18883, 2633, 3913, 1268,
+ 19519, 3371, 18052, 5230,
+ 19291, 1678, 19508, 3172,
+ 18072, 10754, 16625, 6845,
+ 3134, 2298, 10869, 2437,
+ 15580, 6913, 12597, 3381,
+ 11116, 3297, 16762, 2424,
+ 18853, 6715, 17171, 9887,
+ 12743, 2605, 8937, 3140,
+ 19033, 7764, 18347, 3880,
+ 20475, 3682, 19602, 3380,
+ 13044, 19373, 10526, 23124
+};
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: MR475_quant_store_results
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+ pred_st = pointer to structure of type gc_predState
+ p = pointer to selected quantizer table entry (const Word16)
+ gcode0 = predicted CB gain (Word16)
+ exp_gcode0 = exponent of predicted CB gain (Word16)
+ gain_pit = pointer to Pitch gain (Word16)
+ gain_cod = pointer to Code gain (Word16)
+
+ Outputs:
+ pred_st points to the updated structure of type gc_predState
+ gain_pit points to Pitch gain
+ gain_cod points to Code gain
+ pOverflow points to overflow indicator (Flag)
+
+ Returns:
+ None.
+
+ Global Variables Used:
+ None.
+
+ Local Variables Needed:
+ None.
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ This function calculates the final fixed codebook gain and the predictor
+ update values, and updates the gain predictor.
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None.
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+static void MR475_quant_store_results(
+
+ gc_predState *pred_st, // i/o: gain predictor state struct
+ const Word16 *p, // i : pointer to selected quantizer table entry
+ Word16 gcode0, // i : predicted CB gain, Q(14 - exp_gcode0)
+ Word16 exp_gcode0, // i : exponent of predicted CB gain, Q0
+ Word16 *gain_pit, // o : Pitch gain, Q14
+ Word16 *gain_cod // o : Code gain, Q1
+)
+{
+
+ Word16 g_code, exp, frac, tmp;
+ Word32 L_tmp;
+
+ Word16 qua_ener_MR122; // o : quantized energy error, MR122 version Q10
+ Word16 qua_ener; // o : quantized energy error, Q10
+
+ // Read the quantized gains
+ *gain_pit = *p++;
+ g_code = *p++;
+
+ //------------------------------------------------------------------*
+ * calculate final fixed codebook gain: *
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
+ * *
+ * gc = gc0 * g *
+ *------------------------------------------------------------------
+
+ L_tmp = L_mult(g_code, gcode0);
+ L_tmp = L_shr(L_tmp, sub(10, exp_gcode0));
+ *gain_cod = extract_h(L_tmp);
+
+ //------------------------------------------------------------------*
+ * calculate predictor update values and update gain predictor: *
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
+ * *
+ * qua_ener = log2(g) *
+ * qua_ener_MR122 = 20*log10(g) *
+ *------------------------------------------------------------------
+
+ Log2 (L_deposit_l (g_code), &exp, &frac); // Log2(x Q12) = log2(x) + 12
+ exp = sub(exp, 12);
+
+ tmp = shr_r (frac, 5);
+ qua_ener_MR122 = add (tmp, shl (exp, 10));
+
+ L_tmp = Mpy_32_16(exp, frac, 24660); // 24660 Q12 ~= 6.0206 = 20*log10(2)
+ qua_ener = pv_round (L_shl (L_tmp, 13)); // Q12 * Q0 = Q13 -> Q10
+
+ gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+ used to represent cycle count for each subroutine
+ called)
+ where: (cycle count variable) = cycle count for [subroutine
+ name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+
+static void MR475_quant_store_results(
+ gc_predState *pred_st, /* i/o: gain predictor state struct */
+ const Word16 *p, /* i : pointer to selected quantizer table entry */
+ Word16 gcode0, /* i : predicted CB gain, Q(14 - exp_gcode0) */
+ Word16 exp_gcode0, /* i : exponent of predicted CB gain, Q0 */
+ Word16 *gain_pit, /* o : Pitch gain, Q14 */
+ Word16 *gain_cod, /* o : Code gain, Q1 */
+ Flag *pOverflow /* o : overflow indicator */
+)
+{
+ Word16 g_code;
+ Word16 exp;
+ Word16 frac;
+ Word16 tmp;
+ Word32 L_tmp;
+
+ Word16 qua_ener_MR122; /* o : quantized energy error, MR122 version Q10 */
+ Word16 qua_ener; /* o : quantized energy error, Q10 */
+
+
+ /* Read the quantized gains */
+ *gain_pit = *p++;
+ g_code = *p++;
+
+ /*------------------------------------------------------------------*
+ * calculate final fixed codebook gain: *
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
+ * *
+ * gc = gc0 * g *
+ *------------------------------------------------------------------*/
+
+ L_tmp = ((Word32) g_code * gcode0) << 1;
+ tmp = 10 - exp_gcode0;
+ L_tmp = L_shr(L_tmp, tmp, pOverflow);
+ *gain_cod = (Word16)(L_tmp >> 16);
+
+ /*------------------------------------------------------------------*
+ * calculate predictor update values and update gain predictor: *
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
+ * *
+ * qua_ener = log2(g) *
+ * qua_ener_MR122 = 20*log10(g) *
+ *------------------------------------------------------------------*/
+
+ /* Log2(x Q12) = log2(x) + 12 */
+ Log2((Word32) g_code, &exp, &frac, pOverflow);
+ exp -= 12;
+
+ tmp = shr_r(frac, 5, pOverflow);
+ qua_ener_MR122 = exp << 10;
+ qua_ener_MR122 = tmp + qua_ener_MR122;
+
+ /* 24660 Q12 ~= 6.0206 = 20*log10(2) */
+ L_tmp = Mpy_32_16(exp, frac, 24660, pOverflow);
+ L_tmp = L_tmp << 13;
+
+ /* Q12 * Q0 = Q13 -> Q10 */
+ qua_ener = (Word16)((L_tmp + (Word32) 0x00008000L) >> 16);
+
+ gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
+
+ return;
+}
+
+/****************************************************************************/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: MR475_update_unq_pred
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+ pred_st = pointer to structure of type gc_predState
+ exp_gcode0 = predicted CB gain (exponent MSW) (Word16)
+ frac_gcode0 = predicted CB gain (exponent LSW) (Word16)
+ cod_gain_exp = optimum codebook gain (exponent)(Word16)
+ cod_gain_frac = optimum codebook gain (fraction) (Word16)
+
+ Outputs:
+ pred_st points to the updated structure of type gc_predState
+ pOverflow points to overflow indicator (Flag)
+
+ Returns:
+ None.
+
+ Global Variables Used:
+ None.
+
+ Local Variables Needed:
+ None.
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ This module uses the optimum codebook gain and updates the "unquantized"
+ gain predictor with the (bounded) prediction error.
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None.
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+void
+MR475_update_unq_pred(
+ gc_predState *pred_st, // i/o: gain predictor state struct
+ Word16 exp_gcode0, // i : predicted CB gain (exponent MSW), Q0
+ Word16 frac_gcode0, // i : predicted CB gain (exponent LSW), Q15
+ Word16 cod_gain_exp, // i : optimum codebook gain (exponent), Q0
+ Word16 cod_gain_frac // i : optimum codebook gain (fraction), Q15
+)
+{
+ Word16 tmp, exp, frac;
+ Word16 qua_ener, qua_ener_MR122;
+ Word32 L_tmp;
+
+ // calculate prediction error factor (given optimum CB gain gcu):
+ // predErrFact = gcu / gcode0
+ // (limit to MIN_PRED_ERR_FACT <= predErrFact <= MAX_PRED_ERR_FACT
+ // -> limit qua_ener*)
+ //
+ // calculate prediction error (log):
+ //
+ // qua_ener_MR122 = log2(predErrFact)
+ // qua_ener = 20*log10(predErrFact)
+
+ if (cod_gain_frac <= 0)
+ {
+ // if gcu <= 0 -> predErrFact = 0 < MIN_PRED_ERR_FACT
+ // -> set qua_ener(_MR122) directly
+ qua_ener = MIN_QUA_ENER;
+ qua_ener_MR122 = MIN_QUA_ENER_MR122;
+ }
+ else
+ {
+ // convert gcode0 from DPF to standard fraction/exponent format
+ // with normalized frac, i.e. 16384 <= frac <= 32767
+ // Note: exponent correction (exp=exp-14) is done after div_s
+ frac_gcode0 = extract_l (Pow2 (14, frac_gcode0));
+
+ // make sure cod_gain_frac < frac_gcode0 for div_s
+ if (sub(cod_gain_frac, frac_gcode0) >= 0)
+ {
+ cod_gain_frac = shr (cod_gain_frac, 1);
+ cod_gain_exp = add (cod_gain_exp, 1);
+ }
+
+ // predErrFact
+ // = gcu / gcode0
+ // = cod_gain_frac/frac_gcode0 * 2^(cod_gain_exp-(exp_gcode0-14))
+ // = div_s (c_g_f, frac_gcode0)*2^-15 * 2^(c_g_e-exp_gcode0+14)
+ // = div_s * 2^(cod_gain_exp-exp_gcode0 - 1)
+
+ frac = div_s (cod_gain_frac, frac_gcode0);
+ tmp = sub (sub (cod_gain_exp, exp_gcode0), 1);
+
+ Log2 (L_deposit_l (frac), &exp, &frac);
+ exp = add (exp, tmp);
+
+ // calculate prediction error (log2, Q10)
+ qua_ener_MR122 = shr_r (frac, 5);
+ qua_ener_MR122 = add (qua_ener_MR122, shl (exp, 10));
+
+ if (sub(qua_ener_MR122, MIN_QUA_ENER_MR122) < 0)
+ {
+ qua_ener = MIN_QUA_ENER;
+ qua_ener_MR122 = MIN_QUA_ENER_MR122;
+ }
+ else if (sub(qua_ener_MR122, MAX_QUA_ENER_MR122) > 0)
+ {
+ qua_ener = MAX_QUA_ENER;
+ qua_ener_MR122 = MAX_QUA_ENER_MR122;
+ }
+ else
+ {
+ // calculate prediction error (20*log10, Q10)
+ L_tmp = Mpy_32_16(exp, frac, 24660);
+ // 24660 Q12 ~= 6.0206 = 20*log10(2)
+ qua_ener = pv_round (L_shl (L_tmp, 13));
+ // Q12 * Q0 = Q13 -> Q26 -> Q10
+ }
+ }
+
+ // update MA predictor memory
+ gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+ used to represent cycle count for each subroutine
+ called)
+ where: (cycle count variable) = cycle count for [subroutine
+ name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+
+void MR475_update_unq_pred(
+ gc_predState *pred_st, /* i/o: gain predictor state struct */
+ Word16 exp_gcode0, /* i : predicted CB gain (exponent MSW), Q0 */
+ Word16 frac_gcode0, /* i : predicted CB gain (exponent LSW), Q15 */
+ Word16 cod_gain_exp, /* i : optimum codebook gain (exponent), Q0 */
+ Word16 cod_gain_frac, /* i : optimum codebook gain (fraction), Q15 */
+ Flag *pOverflow /* o : overflow indicator */
+)
+{
+ Word16 tmp;
+ Word16 exp;
+ Word16 frac;
+ Word16 qua_ener;
+ Word16 qua_ener_MR122;
+ Word32 L_tmp;
+
+ /* calculate prediction error factor (given optimum CB gain gcu):
+ *
+ * predErrFact = gcu / gcode0
+ * (limit to MIN_PRED_ERR_FACT <= predErrFact <= MAX_PRED_ERR_FACT
+ * -> limit qua_ener*)
+ *
+ * calculate prediction error (log):
+ *
+ * qua_ener_MR122 = log2(predErrFact)
+ * qua_ener = 20*log10(predErrFact)
+ *
+ */
+
+ if (cod_gain_frac <= 0)
+ {
+ /* if gcu <= 0 -> predErrFact = 0 < MIN_PRED_ERR_FACT */
+ /* -> set qua_ener(_MR122) directly */
+ qua_ener = MIN_QUA_ENER;
+ qua_ener_MR122 = MIN_QUA_ENER_MR122;
+ }
+ else
+ {
+ /* convert gcode0 from DPF to standard fraction/exponent format */
+ /* with normalized frac, i.e. 16384 <= frac <= 32767 */
+ /* Note: exponent correction (exp=exp-14) is done after div_s */
+ frac_gcode0 = (Word16)(Pow2(14, frac_gcode0, pOverflow));
+
+ /* make sure cod_gain_frac < frac_gcode0 for div_s */
+ if (cod_gain_frac >= frac_gcode0)
+ {
+ cod_gain_frac >>= 1;
+ cod_gain_exp += 1;
+ }
+
+ /*
+ predErrFact
+ = gcu / gcode0
+ = cod_gain_frac/frac_gcode0 * 2^(cod_gain_exp-(exp_gcode0-14))
+ = div_s (c_g_f, frac_gcode0)*2^-15 * 2^(c_g_e-exp_gcode0+14)
+ = div_s * 2^(cod_gain_exp-exp_gcode0 - 1)
+ */
+ frac = div_s(cod_gain_frac, frac_gcode0);
+ tmp = cod_gain_exp - exp_gcode0;
+ tmp -= 1;
+
+ Log2((Word32) frac, &exp, &frac, pOverflow);
+ exp += tmp;
+
+ /* calculate prediction error (log2, Q10) */
+ qua_ener_MR122 = shr_r(frac, 5, pOverflow);
+ tmp = exp << 10;
+ qua_ener_MR122 += tmp;
+
+ if (qua_ener_MR122 > MAX_QUA_ENER_MR122)
+ {
+ qua_ener = MAX_QUA_ENER;
+ qua_ener_MR122 = MAX_QUA_ENER_MR122;
+ }
+ else
+ {
+ /* calculate prediction error (20*log10, Q10) */
+ L_tmp = Mpy_32_16(exp, frac, 24660, pOverflow);
+ /* 24660 Q12 ~= 6.0206 = 20*log10(2) */
+ L_tmp = L_shl(L_tmp, 13, pOverflow);
+ qua_ener = pv_round(L_tmp, pOverflow);
+
+ /* Q12 * Q0 = Q13 -> Q26 -> Q10 */
+ }
+ }
+
+ /* update MA predictor memory */
+ gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
+
+
+ return;
+}
+
+/****************************************************************************/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: MR475_gain_quant
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+ pred_st = pointer to structure of type gc_predState
+ sf0_exp_gcode0 = predicted CB gain (exponent) (Word16)
+ f0_frac_gcode0 = predicted CB gain (fraction) (Word16)
+ sf0_exp_coeff = energy coeff. (exponent part) (Word16)
+ sf0_frac_coeff = energy coeff. ((fraction part) (Word16)
+ sf0_exp_target_en = exponent of target energy (Word16)
+ sf0_frac_target_en = fraction of target energy (Word16)
+ sf1_code_nosharp = innovative codebook vector (Word16)
+ sf1_exp_gcode0 = predicted CB gain (exponent) (Word16)
+ sf1_frac_gcode0 = predicted CB gain (fraction) (Word16)
+ sf1_exp_coeff = energy coeff. (exponent part) (Word16)
+ sf1_frac_coeff = energy coeff. (fraction part) (Word16)
+ sf1_exp_target_en = exponent of target energy (Word16)
+ sf1_frac_target_en = fraction of target energy (Word16)
+ gp_limit = pitch gain limit (Word16)
+ sf0_gain_pit = pointer to Pitch gain (Word16)
+ sf0_gain_cod = pointer to Code gain (Word16)
+ sf1_gain_pit = pointer to Pitch gain (Word16)
+ sf1_gain_cod = pointer to Code gain (Word16)
+
+ Outputs:
+ pred_st points to the updated structure of type gc_predState
+ sf0_gain_pit points to Pitch gain
+ sf0_gain_cod points to Code gain
+ sf1_gain_pit points to Pitch gain
+ sf1_gain_cod points to Code gain
+
+ Returns:
+ index = index of quantization
+
+ Global Variables Used:
+ None.
+
+ Local Variables Needed:
+ None.
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ This module provides quantization of pitch and codebook gains for two
+ subframes using the predicted codebook gain.
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None.
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+Word16
+MR475_gain_quant( // o : index of quantization.
+ gc_predState *pred_st, // i/o: gain predictor state struct
+
+ // data from subframe 0 (or 2)
+ Word16 sf0_exp_gcode0, // i : predicted CB gain (exponent), Q0
+ Word16 sf0_frac_gcode0, // i : predicted CB gain (fraction), Q15
+ Word16 sf0_exp_coeff[], // i : energy coeff. (5), exponent part, Q0
+ Word16 sf0_frac_coeff[], // i : energy coeff. (5), fraction part, Q15
+ // (frac_coeff and exp_coeff computed in
+ // calc_filt_energies())
+ Word16 sf0_exp_target_en, // i : exponent of target energy, Q0
+ Word16 sf0_frac_target_en, // i : fraction of target energy, Q15
+
+ // data from subframe 1 (or 3)
+ Word16 sf1_code_nosharp[], // i : innovative codebook vector (L_SUBFR)
+ // (whithout pitch sharpening)
+ Word16 sf1_exp_gcode0, // i : predicted CB gain (exponent), Q0
+ Word16 sf1_frac_gcode0, // i : predicted CB gain (fraction), Q15
+ Word16 sf1_exp_coeff[], // i : energy coeff. (5), exponent part, Q0
+ Word16 sf1_frac_coeff[], // i : energy coeff. (5), fraction part, Q15
+ // (frac_coeff and exp_coeff computed in
+ // calc_filt_energies())
+ Word16 sf1_exp_target_en, // i : exponent of target energy, Q0
+ Word16 sf1_frac_target_en, // i : fraction of target energy, Q15
+
+ Word16 gp_limit, // i : pitch gain limit
+
+ Word16 *sf0_gain_pit, // o : Pitch gain, Q14
+ Word16 *sf0_gain_cod, // o : Code gain, Q1
+
+ Word16 *sf1_gain_pit, // o : Pitch gain, Q14
+ Word16 *sf1_gain_cod // o : Code gain, Q1
+)
+{
+ const Word16 *p;
+ Word16 i, index = 0;
+ Word16 tmp;
+ Word16 exp;
+ Word16 sf0_gcode0, sf1_gcode0;
+ Word16 g_pitch, g2_pitch, g_code, g2_code, g_pit_cod;
+ Word16 coeff[10], coeff_lo[10], exp_max[10]; // 0..4: sf0; 5..9: sf1
+ Word32 L_tmp, dist_min;
+
+ *-------------------------------------------------------------------*
+ * predicted codebook gain *
+ * ~~~~~~~~~~~~~~~~~~~~~~~ *
+ * gc0 = 2^exp_gcode0 + 2^frac_gcode0 *
+ * *
+ * gcode0 (Q14) = 2^14*2^frac_gcode0 = gc0 * 2^(14-exp_gcode0) *
+ *-------------------------------------------------------------------*
+
+ sf0_gcode0 = extract_l(Pow2(14, sf0_frac_gcode0));
+ sf1_gcode0 = extract_l(Pow2(14, sf1_frac_gcode0));
+
+ * For each subframe, the error energy (sum) to be minimized consists
+ * of five terms, t[0..4].
+ *
+ * t[0] = gp^2 * <y1 y1>
+ * t[1] = -2*gp * <xn y1>
+ * t[2] = gc^2 * <y2 y2>
+ * t[3] = -2*gc * <xn y2>
+ * t[4] = 2*gp*gc * <y1 y2>
+ *
+
+ // sf 0
+ // determine the scaling exponent for g_code: ec = ec0 - 11
+ exp = sub(sf0_exp_gcode0, 11);
+
+ // calculate exp_max[i] = s[i]-1
+ exp_max[0] = sub(sf0_exp_coeff[0], 13);
+ exp_max[1] = sub(sf0_exp_coeff[1], 14);
+ exp_max[2] = add(sf0_exp_coeff[2], add(15, shl(exp, 1)));
+ exp_max[3] = add(sf0_exp_coeff[3], exp);
+ exp_max[4] = add(sf0_exp_coeff[4], add(1, exp));
+
+ // sf 1
+ // determine the scaling exponent for g_code: ec = ec0 - 11
+ exp = sub(sf1_exp_gcode0, 11);
+
+ // calculate exp_max[i] = s[i]-1
+ exp_max[5] = sub(sf1_exp_coeff[0], 13);
+ exp_max[6] = sub(sf1_exp_coeff[1], 14);
+ exp_max[7] = add(sf1_exp_coeff[2], add(15, shl(exp, 1)));
+ exp_max[8] = add(sf1_exp_coeff[3], exp);
+ exp_max[9] = add(sf1_exp_coeff[4], add(1, exp));
+
+ *-------------------------------------------------------------------*
+ * Gain search equalisation: *
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~ *
+ * The MSE for the two subframes is weighted differently if there *
+ * is a big difference in the corresponding target energies *
+ *-------------------------------------------------------------------*
+
+ // make the target energy exponents the same by de-normalizing the
+ // fraction of the smaller one. This is necessary to be able to compare
+ // them
+
+ exp = sf0_exp_target_en - sf1_exp_target_en;
+ if (exp > 0)
+ {
+ sf1_frac_target_en = shr (sf1_frac_target_en, exp);
+ }
+ else
+ {
+ sf0_frac_target_en = shl (sf0_frac_target_en, exp);
+ }
+
+ // assume no change of exponents
+ exp = 0;
+
+ // test for target energy difference; set exp to +1 or -1 to scale
+ // up/down coefficients for sf 1
+
+ tmp = shr_r (sf1_frac_target_en, 1); // tmp = ceil(0.5*en(sf1))
+ if (sub (tmp, sf0_frac_target_en) > 0) // tmp > en(sf0)?
+ {
+ // target_energy(sf1) > 2*target_energy(sf0)
+ // -> scale up MSE(sf0) by 2 by adding 1 to exponents 0..4
+ exp = 1;
+ }
+ else
+ {
+ tmp = shr (add (sf0_frac_target_en, 3), 2); // tmp=ceil(0.25*en(sf0))
+ if (sub (tmp, sf1_frac_target_en) > 0) // tmp > en(sf1)?
+ {
+ // target_energy(sf1) < 0.25*target_energy(sf0)
+ // -> scale down MSE(sf0) by 0.5 by subtracting 1 from
+ // coefficients 0..4
+ exp = -1;
+ }
+ }
+
+ for (i = 0; i < 5; i++)
+ {
+ exp_max[i] = add (exp_max[i], exp);
+ }
+
+ *-------------------------------------------------------------------*
+ * Find maximum exponent: *
+ * ~~~~~~~~~~~~~~~~~~~~~~ *
+ * *
+ * For the sum operation, all terms must have the same scaling; *
+ * that scaling should be low enough to prevent overflow. There- *
+ * fore, the maximum scale is determined and all coefficients are *
+ * re-scaled: *
+ * *
+ * exp = max(exp_max[i]) + 1; *
+ * e = exp_max[i]-exp; e <= 0! *
+ * c[i] = c[i]*2^e *
+ *-------------------------------------------------------------------*
+
+ exp = exp_max[0];
+ for (i = 1; i < 10; i++)
+ {
+ if (sub(exp_max[i], exp) > 0)
+ {
+ exp = exp_max[i];
+ }
+ }
+ exp = add(exp, 1); // To avoid overflow
+
+ p = &sf0_frac_coeff[0];
+ for (i = 0; i < 5; i++) {
+ tmp = sub(exp, exp_max[i]);
+ L_tmp = L_deposit_h(*p++);
+ L_tmp = L_shr(L_tmp, tmp);
+ L_Extract(L_tmp, &coeff[i], &coeff_lo[i]);
+ }
+ p = &sf1_frac_coeff[0];
+ for (; i < 10; i++) {
+ tmp = sub(exp, exp_max[i]);
+ L_tmp = L_deposit_h(*p++);
+ L_tmp = L_shr(L_tmp, tmp);
+ L_Extract(L_tmp, &coeff[i], &coeff_lo[i]);
+ }
+
+ //-------------------------------------------------------------------*
+ * Codebook search: *
+ * ~~~~~~~~~~~~~~~~ *
+ * *
+ * For each pair (g_pitch, g_fac) in the table calculate the *
+ * terms t[0..4] and sum them up; the result is the mean squared *
+ * error for the quantized gains from the table. The index for the *
+ * minimum MSE is stored and finally used to retrieve the quantized *
+ * gains *
+ *-------------------------------------------------------------------
+
+ // start with "infinite" MSE
+ dist_min = MAX_32;
+
+ p = &table_gain_MR475[0];
+
+ for (i = 0; i < MR475_VQ_SIZE; i++)
+ {
+ // subframe 0 (and 2) calculations
+ g_pitch = *p++;
+ g_code = *p++;
+
+ g_code = mult(g_code, sf0_gcode0);
+ g2_pitch = mult(g_pitch, g_pitch);
+ g2_code = mult(g_code, g_code);
+ g_pit_cod = mult(g_code, g_pitch);
+
+ L_tmp = Mpy_32_16( coeff[0], coeff_lo[0], g2_pitch);
+ L_tmp = Mac_32_16(L_tmp, coeff[1], coeff_lo[1], g_pitch);
+ L_tmp = Mac_32_16(L_tmp, coeff[2], coeff_lo[2], g2_code);
+ L_tmp = Mac_32_16(L_tmp, coeff[3], coeff_lo[3], g_code);
+ L_tmp = Mac_32_16(L_tmp, coeff[4], coeff_lo[4], g_pit_cod);
+
+ tmp = sub (g_pitch, gp_limit);
+
+ // subframe 1 (and 3) calculations
+ g_pitch = *p++;
+ g_code = *p++;
+
+ if (tmp <= 0 && sub(g_pitch, gp_limit) <= 0)
+ {
+ g_code = mult(g_code, sf1_gcode0);
+ g2_pitch = mult(g_pitch, g_pitch);
+ g2_code = mult(g_code, g_code);
+ g_pit_cod = mult(g_code, g_pitch);
+
+ L_tmp = Mac_32_16(L_tmp, coeff[5], coeff_lo[5], g2_pitch);
+ L_tmp = Mac_32_16(L_tmp, coeff[6], coeff_lo[6], g_pitch);
+ L_tmp = Mac_32_16(L_tmp, coeff[7], coeff_lo[7], g2_code);
+ L_tmp = Mac_32_16(L_tmp, coeff[8], coeff_lo[8], g_code);
+ L_tmp = Mac_32_16(L_tmp, coeff[9], coeff_lo[9], g_pit_cod);
+
+ // store table index if MSE for this index is lower
+ than the minimum MSE seen so far
+ if (L_sub(L_tmp, dist_min) < (Word32) 0)
+ {
+ dist_min = L_tmp;
+ index = i;
+ }
+ }
+ }
+
+ *------------------------------------------------------------------*
+ * read quantized gains and update MA predictor memories *
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
+ *------------------------------------------------------------------*
+
+ // for subframe 0, the pre-calculated gcode0/exp_gcode0 are the same
+ // as those calculated from the "real" predictor using quantized gains
+ tmp = shl(index, 2);
+ MR475_quant_store_results(pred_st,
+ &table_gain_MR475[tmp],
+ sf0_gcode0,
+ sf0_exp_gcode0,
+ sf0_gain_pit,
+ sf0_gain_cod);
+
+ // calculate new predicted gain for subframe 1 (this time using
+ // the real, quantized gains)
+ gc_pred(pred_st, MR475, sf1_code_nosharp,
+ &sf1_exp_gcode0, &sf1_frac_gcode0,
+ &sf0_exp_gcode0, &sf0_gcode0); // last two args are unused
+ sf1_gcode0 = extract_l(Pow2(14, sf1_frac_gcode0));
+
+ tmp = add (tmp, 2);
+ MR475_quant_store_results(pred_st,
+ &table_gain_MR475[tmp],
+ sf1_gcode0,
+ sf1_exp_gcode0,
+ sf1_gain_pit,
+ sf1_gain_cod);
+
+ return index;
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+ used to represent cycle count for each subroutine
+ called)
+ where: (cycle count variable) = cycle count for [subroutine
+ name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+
+Word16 MR475_gain_quant( /* o : index of quantization. */
+ gc_predState *pred_st, /* i/o: gain predictor state struct */
+
+ /* data from subframe 0 (or 2) */
+ Word16 sf0_exp_gcode0, /* i : predicted CB gain (exponent), Q0 */
+ Word16 sf0_frac_gcode0, /* i : predicted CB gain (fraction), Q15 */
+ Word16 sf0_exp_coeff[], /* i : energy coeff. (5), exponent part, Q0 */
+ Word16 sf0_frac_coeff[], /* i : energy coeff. (5), fraction part, Q15 */
+ /* (frac_coeff and exp_coeff computed in */
+ /* calc_filt_energies()) */
+ Word16 sf0_exp_target_en, /* i : exponent of target energy, Q0 */
+ Word16 sf0_frac_target_en, /* i : fraction of target energy, Q15 */
+
+ /* data from subframe 1 (or 3) */
+ Word16 sf1_code_nosharp[], /* i : innovative codebook vector (L_SUBFR) */
+ /* (whithout pitch sharpening) */
+ Word16 sf1_exp_gcode0, /* i : predicted CB gain (exponent), Q0 */
+ Word16 sf1_frac_gcode0, /* i : predicted CB gain (fraction), Q15 */
+ Word16 sf1_exp_coeff[], /* i : energy coeff. (5), exponent part, Q0 */
+ Word16 sf1_frac_coeff[], /* i : energy coeff. (5), fraction part, Q15 */
+ /* (frac_coeff and exp_coeff computed in */
+ /* calc_filt_energies()) */
+ Word16 sf1_exp_target_en, /* i : exponent of target energy, Q0 */
+ Word16 sf1_frac_target_en, /* i : fraction of target energy, Q15 */
+
+ Word16 gp_limit, /* i : pitch gain limit */
+
+ Word16 *sf0_gain_pit, /* o : Pitch gain, Q14 */
+ Word16 *sf0_gain_cod, /* o : Code gain, Q1 */
+
+ Word16 *sf1_gain_pit, /* o : Pitch gain, Q14 */
+ Word16 *sf1_gain_cod, /* o : Code gain, Q1 */
+ Flag *pOverflow /* o : overflow indicator */
+)
+{
+ const Word16 *p;
+ Word16 i;
+ Word16 index = 0;
+ Word16 tmp;
+ Word16 exp;
+ Word16 sf0_gcode0;
+ Word16 sf1_gcode0;
+ Word16 g_pitch;
+ Word16 g2_pitch;
+ Word16 g_code;
+ Word16 g2_code;
+ Word16 g_pit_cod;
+ Word16 coeff[10];
+ Word16 coeff_lo[10];
+ Word16 exp_max[10]; /* 0..4: sf0; 5..9: sf1 */
+ Word32 L_tmp;
+ Word32 dist_min;
+
+ /*-------------------------------------------------------------------*
+ * predicted codebook gain *
+ * ~~~~~~~~~~~~~~~~~~~~~~~ *
+ * gc0 = 2^exp_gcode0 + 2^frac_gcode0 *
+ * *
+ * gcode0 (Q14) = 2^14*2^frac_gcode0 = gc0 * 2^(14-exp_gcode0) *
+ *-------------------------------------------------------------------*/
+
+ sf0_gcode0 = (Word16)(Pow2(14, sf0_frac_gcode0, pOverflow));
+ sf1_gcode0 = (Word16)(Pow2(14, sf1_frac_gcode0, pOverflow));
+
+ /*
+ * For each subframe, the error energy (sum) to be minimized consists
+ * of five terms, t[0..4].
+ *
+ * t[0] = gp^2 * <y1 y1>
+ * t[1] = -2*gp * <xn y1>
+ * t[2] = gc^2 * <y2 y2>
+ * t[3] = -2*gc * <xn y2>
+ * t[4] = 2*gp*gc * <y1 y2>
+ *
+ */
+
+ /* sf 0 */
+ /* determine the scaling exponent for g_code: ec = ec0 - 11 */
+ exp = sf0_exp_gcode0 - 11;
+
+ /* calculate exp_max[i] = s[i]-1 */
+ exp_max[0] = (sf0_exp_coeff[0] - 13);
+ exp_max[1] = (sf0_exp_coeff[1] - 14);
+ exp_max[2] = (sf0_exp_coeff[2] + (15 + (exp << 1)));
+ exp_max[3] = (sf0_exp_coeff[3] + exp);
+ exp_max[4] = (sf0_exp_coeff[4] + (1 + exp));
+
+ /* sf 1 */
+ /* determine the scaling exponent for g_code: ec = ec0 - 11 */
+ exp = sf1_exp_gcode0 - 11;
+
+ /* calculate exp_max[i] = s[i]-1 */
+ exp_max[5] = (sf1_exp_coeff[0] - 13);
+ exp_max[6] = (sf1_exp_coeff[1] - 14);
+ exp_max[7] = (sf1_exp_coeff[2] + (15 + (exp << 1)));
+ exp_max[8] = (sf1_exp_coeff[3] + exp);
+ exp_max[9] = (sf1_exp_coeff[4] + (1 + exp));
+
+ /*-------------------------------------------------------------------*
+ * Gain search equalisation: *
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~ *
+ * The MSE for the two subframes is weighted differently if there *
+ * is a big difference in the corresponding target energies *
+ *-------------------------------------------------------------------*/
+
+ /* make the target energy exponents the same by de-normalizing the
+ fraction of the smaller one. This is necessary to be able to compare
+ them
+ */
+ exp = sf0_exp_target_en - sf1_exp_target_en;
+ if (exp > 0)
+ {
+ sf1_frac_target_en >>= exp;
+ }
+ else
+ {
+ sf0_frac_target_en >>= (-exp);
+ }
+
+ /* assume no change of exponents */
+ exp = 0;
+
+ /* test for target energy difference; set exp to +1 or -1 to scale
+ * up/down coefficients for sf 1
+ */
+ tmp = shr_r(sf1_frac_target_en, 1, pOverflow); /* tmp = ceil(0.5*en(sf1)) */
+
+ if (tmp > sf0_frac_target_en) /* tmp > en(sf0)? */
+ {
+ /*
+ * target_energy(sf1) > 2*target_energy(sf0)
+ * -> scale up MSE(sf0) by 2 by adding 1 to exponents 0..4
+ */
+ exp = 1;
+ }
+ else
+ {
+ tmp = ((sf0_frac_target_en + 3) >> 2); /* tmp=ceil(0.25*en(sf0)) */
+
+ if (tmp > sf1_frac_target_en) /* tmp > en(sf1)? */
+ {
+ /*
+ * target_energy(sf1) < 0.25*target_energy(sf0)
+ * -> scale down MSE(sf0) by 0.5 by subtracting 1 from
+ * coefficients 0..4
+ */
+ exp = -1;
+ }
+ }
+
+ for (i = 0; i < 5; i++)
+ {
+ exp_max[i] += exp;
+ }
+
+ /*-------------------------------------------------------------------*
+ * Find maximum exponent: *
+ * ~~~~~~~~~~~~~~~~~~~~~~ *
+ * *
+ * For the sum operation, all terms must have the same scaling; *
+ * that scaling should be low enough to prevent overflow. There- *
+ * fore, the maximum scale is determined and all coefficients are *
+ * re-scaled: *
+ * *
+ * exp = max(exp_max[i]) + 1; *
+ * e = exp_max[i]-exp; e <= 0! *
+ * c[i] = c[i]*2^e *
+ *-------------------------------------------------------------------*/
+
+ exp = exp_max[0];
+ for (i = 9; i > 0; i--)
+ {
+ if (exp_max[i] > exp)
+ {
+ exp = exp_max[i];
+ }
+ }
+ exp++; /* To avoid overflow */
+
+ p = &sf0_frac_coeff[0];
+ for (i = 0; i < 5; i++)
+ {
+ tmp = (exp - exp_max[i]);
+ L_tmp = ((Word32)(*p++) << 16);
+ L_tmp = L_shr(L_tmp, tmp, pOverflow);
+ coeff[i] = (Word16)(L_tmp >> 16);
+ coeff_lo[i] = (Word16)((L_tmp >> 1) - ((L_tmp >> 16) << 15));
+ }
+ p = &sf1_frac_coeff[0];
+ for (; i < 10; i++)
+ {
+ tmp = exp - exp_max[i];
+ L_tmp = ((Word32)(*p++) << 16);
+ L_tmp = L_shr(L_tmp, tmp, pOverflow);
+ coeff[i] = (Word16)(L_tmp >> 16);
+ coeff_lo[i] = (Word16)((L_tmp >> 1) - ((L_tmp >> 16) << 15));
+ }
+
+
+ /*-------------------------------------------------------------------*
+ * Codebook search: *
+ * ~~~~~~~~~~~~~~~~ *
+ * *
+ * For each pair (g_pitch, g_fac) in the table calculate the *
+ * terms t[0..4] and sum them up; the result is the mean squared *
+ * error for the quantized gains from the table. The index for the *
+ * minimum MSE is stored and finally used to retrieve the quantized *
+ * gains *
+ *-------------------------------------------------------------------*/
+
+ /* start with "infinite" MSE */
+ dist_min = MAX_32;
+
+ p = &table_gain_MR475[0];
+
+ for (i = 0; i < MR475_VQ_SIZE; i++)
+ {
+ /* subframe 0 (and 2) calculations */
+ g_pitch = *p++;
+ g_code = *p++;
+
+ /* Need to be there OKA */
+ g_code = (Word16)(((Word32) g_code * sf0_gcode0) >> 15);
+ g2_pitch = (Word16)(((Word32) g_pitch * g_pitch) >> 15);
+ g2_code = (Word16)(((Word32) g_code * g_code) >> 15);
+ g_pit_cod = (Word16)(((Word32) g_code * g_pitch) >> 15);
+
+
+ L_tmp = Mpy_32_16(coeff[0], coeff_lo[0], g2_pitch, pOverflow) +
+ Mpy_32_16(coeff[1], coeff_lo[1], g_pitch, pOverflow) +
+ Mpy_32_16(coeff[2], coeff_lo[2], g2_code, pOverflow) +
+ Mpy_32_16(coeff[3], coeff_lo[3], g_code, pOverflow) +
+ Mpy_32_16(coeff[4], coeff_lo[4], g_pit_cod, pOverflow);
+
+ tmp = (g_pitch - gp_limit);
+
+ /* subframe 1 (and 3) calculations */
+ g_pitch = *p++;
+ g_code = *p++;
+
+ if ((tmp <= 0) && (g_pitch <= gp_limit))
+ {
+ g_code = (Word16)(((Word32) g_code * sf1_gcode0) >> 15);
+ g2_pitch = (Word16)(((Word32) g_pitch * g_pitch) >> 15);
+ g2_code = (Word16)(((Word32) g_code * g_code) >> 15);
+ g_pit_cod = (Word16)(((Word32) g_code * g_pitch) >> 15);
+
+ L_tmp += (Mpy_32_16(coeff[5], coeff_lo[5], g2_pitch, pOverflow) +
+ Mpy_32_16(coeff[6], coeff_lo[6], g_pitch, pOverflow) +
+ Mpy_32_16(coeff[7], coeff_lo[7], g2_code, pOverflow) +
+ Mpy_32_16(coeff[8], coeff_lo[8], g_code, pOverflow) +
+ Mpy_32_16(coeff[9], coeff_lo[9], g_pit_cod, pOverflow));
+
+ /* store table index if MSE for this index is lower
+ than the minimum MSE seen so far */
+ if (L_tmp < dist_min)
+ {
+ dist_min = L_tmp;
+ index = i;
+ }
+ }
+ }
+
+ /*------------------------------------------------------------------*
+ * read quantized gains and update MA predictor memories *
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
+ *------------------------------------------------------------------*/
+
+ /* for subframe 0, the pre-calculated gcode0/exp_gcode0 are the same
+ as those calculated from the "real" predictor using quantized gains */
+ tmp = index << 2;
+ MR475_quant_store_results(pred_st,
+ &table_gain_MR475[tmp],
+ sf0_gcode0,
+ sf0_exp_gcode0,
+ sf0_gain_pit,
+ sf0_gain_cod,
+ pOverflow);
+
+ /* calculate new predicted gain for subframe 1 (this time using
+ the real, quantized gains) */
+ gc_pred(pred_st, MR475, sf1_code_nosharp,
+ &sf1_exp_gcode0, &sf1_frac_gcode0,
+ &sf0_exp_gcode0, &sf0_gcode0, /* unused args */
+ pOverflow);
+
+ sf1_gcode0 = (Word16)(Pow2(14, sf1_frac_gcode0, pOverflow));
+
+ tmp += 2;
+ MR475_quant_store_results(
+ pred_st,
+ &table_gain_MR475[tmp],
+ sf1_gcode0,
+ sf1_exp_gcode0,
+ sf1_gain_pit,
+ sf1_gain_cod,
+ pOverflow);
+
+ return(index);
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qgain475.h b/media/codecs/amrnb/enc/src/qgain475.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/qgain475.h
rename to media/codecs/amrnb/enc/src/qgain475.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qgain795.cpp b/media/codecs/amrnb/enc/src/qgain795.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/qgain795.cpp
rename to media/codecs/amrnb/enc/src/qgain795.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qgain795.h b/media/codecs/amrnb/enc/src/qgain795.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/qgain795.h
rename to media/codecs/amrnb/enc/src/qgain795.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qua_gain.cpp b/media/codecs/amrnb/enc/src/qua_gain.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/qua_gain.cpp
rename to media/codecs/amrnb/enc/src/qua_gain.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/s10_8pf.cpp b/media/codecs/amrnb/enc/src/s10_8pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/s10_8pf.cpp
rename to media/codecs/amrnb/enc/src/s10_8pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/s10_8pf.h b/media/codecs/amrnb/enc/src/s10_8pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/s10_8pf.h
rename to media/codecs/amrnb/enc/src/s10_8pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp b/media/codecs/amrnb/enc/src/set_sign.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp
rename to media/codecs/amrnb/enc/src/set_sign.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/set_sign.h b/media/codecs/amrnb/enc/src/set_sign.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/set_sign.h
rename to media/codecs/amrnb/enc/src/set_sign.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/sid_sync.cpp b/media/codecs/amrnb/enc/src/sid_sync.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/sid_sync.cpp
rename to media/codecs/amrnb/enc/src/sid_sync.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/sid_sync.h b/media/codecs/amrnb/enc/src/sid_sync.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/sid_sync.h
rename to media/codecs/amrnb/enc/src/sid_sync.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/sp_enc.cpp b/media/codecs/amrnb/enc/src/sp_enc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/sp_enc.cpp
rename to media/codecs/amrnb/enc/src/sp_enc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/sp_enc.h b/media/codecs/amrnb/enc/src/sp_enc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/sp_enc.h
rename to media/codecs/amrnb/enc/src/sp_enc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/spreproc.cpp b/media/codecs/amrnb/enc/src/spreproc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/spreproc.cpp
rename to media/codecs/amrnb/enc/src/spreproc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/spreproc.h b/media/codecs/amrnb/enc/src/spreproc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/spreproc.h
rename to media/codecs/amrnb/enc/src/spreproc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/spstproc.cpp b/media/codecs/amrnb/enc/src/spstproc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/spstproc.cpp
rename to media/codecs/amrnb/enc/src/spstproc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/spstproc.h b/media/codecs/amrnb/enc/src/spstproc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/spstproc.h
rename to media/codecs/amrnb/enc/src/spstproc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ton_stab.cpp b/media/codecs/amrnb/enc/src/ton_stab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ton_stab.cpp
rename to media/codecs/amrnb/enc/src/ton_stab.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ton_stab.h b/media/codecs/amrnb/enc/src/ton_stab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ton_stab.h
rename to media/codecs/amrnb/enc/src/ton_stab.h
diff --git a/media/libstagefright/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h b/media/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h
rename to media/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h
diff --git a/media/libstagefright/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp b/media/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
rename to media/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/test/Android.bp b/media/codecs/amrnb/enc/test/Android.bp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/Android.bp
rename to media/codecs/amrnb/enc/test/Android.bp
diff --git a/media/libstagefright/codecs/amrnb/enc/test/AndroidTest.xml b/media/codecs/amrnb/enc/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/AndroidTest.xml
rename to media/codecs/amrnb/enc/test/AndroidTest.xml
diff --git a/media/libstagefright/codecs/amrnb/enc/test/README.md b/media/codecs/amrnb/enc/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/README.md
rename to media/codecs/amrnb/enc/test/README.md
diff --git a/media/libstagefright/codecs/amrnb/enc/test/amrnb_enc_test.cpp b/media/codecs/amrnb/enc/test/amrnb_enc_test.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/amrnb_enc_test.cpp
rename to media/codecs/amrnb/enc/test/amrnb_enc_test.cpp
diff --git a/media/codecs/amrnb/fuzzer/Android.bp b/media/codecs/amrnb/fuzzer/Android.bp
new file mode 100644
index 0000000..c1eaa53
--- /dev/null
+++ b/media/codecs/amrnb/fuzzer/Android.bp
@@ -0,0 +1,43 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+ name: "amrnb_dec_fuzzer",
+ host_supported: true,
+ srcs: [
+ "amrnb_dec_fuzzer.cpp",
+ ],
+ static_libs: [
+ "libstagefright_amrnbdec",
+ "libstagefright_amrnb_common",
+ "liblog",
+ ],
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/media/libstagefright/codecs/amrnb/fuzzer/README.md b/media/codecs/amrnb/fuzzer/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrnb/fuzzer/README.md
rename to media/codecs/amrnb/fuzzer/README.md
diff --git a/media/libstagefright/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp b/media/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
rename to media/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
diff --git a/media/libstagefright/codecs/amrnb/patent_disclaimer.txt b/media/codecs/amrnb/patent_disclaimer.txt
similarity index 100%
rename from media/libstagefright/codecs/amrnb/patent_disclaimer.txt
rename to media/codecs/amrnb/patent_disclaimer.txt
diff --git a/media/libstagefright/codecs/amrwb/Android.bp b/media/codecs/amrwb/dec/Android.bp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/Android.bp
rename to media/codecs/amrwb/dec/Android.bp
diff --git a/media/libstagefright/codecs/amrnb/dec/MODULE_LICENSE_APACHE2 b/media/codecs/amrwb/dec/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
copy to media/codecs/amrwb/dec/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrnb/dec/NOTICE b/media/codecs/amrwb/dec/NOTICE
similarity index 100%
copy from media/libstagefright/codecs/amrnb/dec/NOTICE
copy to media/codecs/amrwb/dec/NOTICE
diff --git a/media/codecs/amrwb/dec/TEST_MAPPING b/media/codecs/amrwb/dec/TEST_MAPPING
new file mode 100644
index 0000000..0278d26
--- /dev/null
+++ b/media/codecs/amrwb/dec/TEST_MAPPING
@@ -0,0 +1,10 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrwb
+{
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
+ { "name": "AmrwbDecoderTest"}
+
+ ]
+}
diff --git a/media/codecs/amrwb/dec/fuzzer/Android.bp b/media/codecs/amrwb/dec/fuzzer/Android.bp
new file mode 100644
index 0000000..7106a30
--- /dev/null
+++ b/media/codecs/amrwb/dec/fuzzer/Android.bp
@@ -0,0 +1,41 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+ name: "amrwb_dec_fuzzer",
+ host_supported: true,
+ srcs: [
+ "amrwb_dec_fuzzer.cpp",
+ ],
+ static_libs: [
+ "libstagefright_amrwbdec",
+ ],
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/media/libstagefright/codecs/amrwb/fuzzer/README.md b/media/codecs/amrwb/dec/fuzzer/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrwb/fuzzer/README.md
rename to media/codecs/amrwb/dec/fuzzer/README.md
diff --git a/media/libstagefright/codecs/amrwb/fuzzer/amrwb_dec_fuzzer.cpp b/media/codecs/amrwb/dec/fuzzer/amrwb_dec_fuzzer.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/fuzzer/amrwb_dec_fuzzer.cpp
rename to media/codecs/amrwb/dec/fuzzer/amrwb_dec_fuzzer.cpp
diff --git a/media/libstagefright/codecs/amrwb/include/pvamrwbdecoder_api.h b/media/codecs/amrwb/dec/include/pvamrwbdecoder_api.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/include/pvamrwbdecoder_api.h
rename to media/codecs/amrwb/dec/include/pvamrwbdecoder_api.h
diff --git a/media/libstagefright/codecs/mp3dec/patent_disclaimer.txt b/media/codecs/amrwb/dec/patent_disclaimer.txt
similarity index 100%
copy from media/libstagefright/codecs/mp3dec/patent_disclaimer.txt
copy to media/codecs/amrwb/dec/patent_disclaimer.txt
diff --git a/media/libstagefright/codecs/amrwb/src/agc2_amr_wb.cpp b/media/codecs/amrwb/dec/src/agc2_amr_wb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/agc2_amr_wb.cpp
rename to media/codecs/amrwb/dec/src/agc2_amr_wb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/band_pass_6k_7k.cpp b/media/codecs/amrwb/dec/src/band_pass_6k_7k.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/band_pass_6k_7k.cpp
rename to media/codecs/amrwb/dec/src/band_pass_6k_7k.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/dec_acelp_2p_in_64.cpp b/media/codecs/amrwb/dec/src/dec_acelp_2p_in_64.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dec_acelp_2p_in_64.cpp
rename to media/codecs/amrwb/dec/src/dec_acelp_2p_in_64.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/dec_acelp_4p_in_64.cpp b/media/codecs/amrwb/dec/src/dec_acelp_4p_in_64.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dec_acelp_4p_in_64.cpp
rename to media/codecs/amrwb/dec/src/dec_acelp_4p_in_64.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/dec_alg_codebook.cpp b/media/codecs/amrwb/dec/src/dec_alg_codebook.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dec_alg_codebook.cpp
rename to media/codecs/amrwb/dec/src/dec_alg_codebook.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/dec_gain2_amr_wb.cpp b/media/codecs/amrwb/dec/src/dec_gain2_amr_wb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dec_gain2_amr_wb.cpp
rename to media/codecs/amrwb/dec/src/dec_gain2_amr_wb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/deemphasis_32.cpp b/media/codecs/amrwb/dec/src/deemphasis_32.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/deemphasis_32.cpp
rename to media/codecs/amrwb/dec/src/deemphasis_32.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/dtx.h b/media/codecs/amrwb/dec/src/dtx.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dtx.h
rename to media/codecs/amrwb/dec/src/dtx.h
diff --git a/media/libstagefright/codecs/amrwb/src/dtx_decoder_amr_wb.cpp b/media/codecs/amrwb/dec/src/dtx_decoder_amr_wb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dtx_decoder_amr_wb.cpp
rename to media/codecs/amrwb/dec/src/dtx_decoder_amr_wb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/e_pv_amrwbdec.h b/media/codecs/amrwb/dec/src/e_pv_amrwbdec.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/e_pv_amrwbdec.h
rename to media/codecs/amrwb/dec/src/e_pv_amrwbdec.h
diff --git a/media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.cpp b/media/codecs/amrwb/dec/src/get_amr_wb_bits.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.cpp
rename to media/codecs/amrwb/dec/src/get_amr_wb_bits.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.h b/media/codecs/amrwb/dec/src/get_amr_wb_bits.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.h
rename to media/codecs/amrwb/dec/src/get_amr_wb_bits.h
diff --git a/media/libstagefright/codecs/amrwb/src/highpass_400hz_at_12k8.cpp b/media/codecs/amrwb/dec/src/highpass_400hz_at_12k8.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/highpass_400hz_at_12k8.cpp
rename to media/codecs/amrwb/dec/src/highpass_400hz_at_12k8.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/highpass_50hz_at_12k8.cpp b/media/codecs/amrwb/dec/src/highpass_50hz_at_12k8.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/highpass_50hz_at_12k8.cpp
rename to media/codecs/amrwb/dec/src/highpass_50hz_at_12k8.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/homing_amr_wb_dec.cpp b/media/codecs/amrwb/dec/src/homing_amr_wb_dec.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/homing_amr_wb_dec.cpp
rename to media/codecs/amrwb/dec/src/homing_amr_wb_dec.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/interpolate_isp.cpp b/media/codecs/amrwb/dec/src/interpolate_isp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/interpolate_isp.cpp
rename to media/codecs/amrwb/dec/src/interpolate_isp.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/isf_extrapolation.cpp b/media/codecs/amrwb/dec/src/isf_extrapolation.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/isf_extrapolation.cpp
rename to media/codecs/amrwb/dec/src/isf_extrapolation.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/isp_az.cpp b/media/codecs/amrwb/dec/src/isp_az.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/isp_az.cpp
rename to media/codecs/amrwb/dec/src/isp_az.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/isp_isf.cpp b/media/codecs/amrwb/dec/src/isp_isf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/isp_isf.cpp
rename to media/codecs/amrwb/dec/src/isp_isf.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/lagconceal.cpp b/media/codecs/amrwb/dec/src/lagconceal.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/lagconceal.cpp
rename to media/codecs/amrwb/dec/src/lagconceal.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/low_pass_filt_7k.cpp b/media/codecs/amrwb/dec/src/low_pass_filt_7k.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/low_pass_filt_7k.cpp
rename to media/codecs/amrwb/dec/src/low_pass_filt_7k.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/median5.cpp b/media/codecs/amrwb/dec/src/median5.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/median5.cpp
rename to media/codecs/amrwb/dec/src/median5.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/mime_io.cpp b/media/codecs/amrwb/dec/src/mime_io.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/mime_io.cpp
rename to media/codecs/amrwb/dec/src/mime_io.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/mime_io.h b/media/codecs/amrwb/dec/src/mime_io.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/mime_io.h
rename to media/codecs/amrwb/dec/src/mime_io.h
diff --git a/media/libstagefright/codecs/amrwb/src/noise_gen_amrwb.cpp b/media/codecs/amrwb/dec/src/noise_gen_amrwb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/noise_gen_amrwb.cpp
rename to media/codecs/amrwb/dec/src/noise_gen_amrwb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/normalize_amr_wb.cpp b/media/codecs/amrwb/dec/src/normalize_amr_wb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/normalize_amr_wb.cpp
rename to media/codecs/amrwb/dec/src/normalize_amr_wb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/normalize_amr_wb.h b/media/codecs/amrwb/dec/src/normalize_amr_wb.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/normalize_amr_wb.h
rename to media/codecs/amrwb/dec/src/normalize_amr_wb.h
diff --git a/media/libstagefright/codecs/amrwb/src/oversamp_12k8_to_16k.cpp b/media/codecs/amrwb/dec/src/oversamp_12k8_to_16k.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/oversamp_12k8_to_16k.cpp
rename to media/codecs/amrwb/dec/src/oversamp_12k8_to_16k.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/phase_dispersion.cpp b/media/codecs/amrwb/dec/src/phase_dispersion.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/phase_dispersion.cpp
rename to media/codecs/amrwb/dec/src/phase_dispersion.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/pit_shrp.cpp b/media/codecs/amrwb/dec/src/pit_shrp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pit_shrp.cpp
rename to media/codecs/amrwb/dec/src/pit_shrp.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/pred_lt4.cpp b/media/codecs/amrwb/dec/src/pred_lt4.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pred_lt4.cpp
rename to media/codecs/amrwb/dec/src/pred_lt4.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/preemph_amrwb_dec.cpp b/media/codecs/amrwb/dec/src/preemph_amrwb_dec.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/preemph_amrwb_dec.cpp
rename to media/codecs/amrwb/dec/src/preemph_amrwb_dec.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/pv_amr_wb_type_defs.h b/media/codecs/amrwb/dec/src/pv_amr_wb_type_defs.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pv_amr_wb_type_defs.h
rename to media/codecs/amrwb/dec/src/pv_amr_wb_type_defs.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.cpp b/media/codecs/amrwb/dec/src/pvamrwb_math_op.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.cpp
rename to media/codecs/amrwb/dec/src/pvamrwb_math_op.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.h b/media/codecs/amrwb/dec/src/pvamrwb_math_op.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.h
rename to media/codecs/amrwb/dec/src/pvamrwb_math_op.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.cpp b/media/codecs/amrwb/dec/src/pvamrwbdecoder.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.cpp
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_acelp.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_acelp.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_acelp.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_acelp.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_armv5.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_armv5.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_armv5.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_armv5.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_cequivalent.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_cequivalent.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_cequivalent.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_cequivalent.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_gcc_armv5.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_gcc_armv5.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_gcc_armv5.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_gcc_armv5.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_cnst.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_cnst.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_cnst.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_cnst.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_mem_funcs.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_mem_funcs.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_mem_funcs.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_mem_funcs.h
diff --git a/media/libstagefright/codecs/amrwb/src/q_gain2_tab.cpp b/media/codecs/amrwb/dec/src/q_gain2_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/q_gain2_tab.cpp
rename to media/codecs/amrwb/dec/src/q_gain2_tab.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/q_pulse.h b/media/codecs/amrwb/dec/src/q_pulse.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/q_pulse.h
rename to media/codecs/amrwb/dec/src/q_pulse.h
diff --git a/media/libstagefright/codecs/amrwb/src/qisf_ns.cpp b/media/codecs/amrwb/dec/src/qisf_ns.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qisf_ns.cpp
rename to media/codecs/amrwb/dec/src/qisf_ns.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/qisf_ns.h b/media/codecs/amrwb/dec/src/qisf_ns.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qisf_ns.h
rename to media/codecs/amrwb/dec/src/qisf_ns.h
diff --git a/media/libstagefright/codecs/amrwb/src/qisf_ns_tab.cpp b/media/codecs/amrwb/dec/src/qisf_ns_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qisf_ns_tab.cpp
rename to media/codecs/amrwb/dec/src/qisf_ns_tab.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/qpisf_2s.cpp b/media/codecs/amrwb/dec/src/qpisf_2s.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qpisf_2s.cpp
rename to media/codecs/amrwb/dec/src/qpisf_2s.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/qpisf_2s.h b/media/codecs/amrwb/dec/src/qpisf_2s.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qpisf_2s.h
rename to media/codecs/amrwb/dec/src/qpisf_2s.h
diff --git a/media/libstagefright/codecs/amrwb/src/qpisf_2s_tab.cpp b/media/codecs/amrwb/dec/src/qpisf_2s_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qpisf_2s_tab.cpp
rename to media/codecs/amrwb/dec/src/qpisf_2s_tab.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/scale_signal.cpp b/media/codecs/amrwb/dec/src/scale_signal.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/scale_signal.cpp
rename to media/codecs/amrwb/dec/src/scale_signal.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/synthesis_amr_wb.cpp b/media/codecs/amrwb/dec/src/synthesis_amr_wb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/synthesis_amr_wb.cpp
rename to media/codecs/amrwb/dec/src/synthesis_amr_wb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/synthesis_amr_wb.h b/media/codecs/amrwb/dec/src/synthesis_amr_wb.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/synthesis_amr_wb.h
rename to media/codecs/amrwb/dec/src/synthesis_amr_wb.h
diff --git a/media/libstagefright/codecs/amrwb/src/voice_factor.cpp b/media/codecs/amrwb/dec/src/voice_factor.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/voice_factor.cpp
rename to media/codecs/amrwb/dec/src/voice_factor.cpp
diff --git a/media/codecs/amrwb/dec/src/wb_syn_filt.cpp b/media/codecs/amrwb/dec/src/wb_syn_filt.cpp
new file mode 100644
index 0000000..d960322
--- /dev/null
+++ b/media/codecs/amrwb/dec/src/wb_syn_filt.cpp
@@ -0,0 +1,307 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/****************************************************************************************
+Portions of this file are derived from the following 3GPP standard:
+
+ 3GPP TS 26.173
+ ANSI-C code for the Adaptive Multi-Rate - Wideband (AMR-WB) speech codec
+ Available from http://www.3gpp.org
+
+(C) 2007, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
+Permission to distribute, modify and use this file under the standard license
+terms listed above has been obtained from the copyright holder.
+****************************************************************************************/
+/*
+------------------------------------------------------------------------------
+
+
+
+ Filename: wb_syn_filt.cpp
+
+ Date: 05/08/2004
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+
+ Description:
+
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+wb_syn_filt
+
+ int16 a[], (i) Q12 : a[m+1] prediction coefficients
+ int16 m, (i) : order of LP filter
+ int16 x[], (i) : input signal
+ int16 y[], (o) : output signal
+ int16 lg, (i) : size of filtering
+ int16 mem[], (i/o) : memory associated with this filtering.
+ int16 update, (i) : 0=no update, 1=update of memory.
+ int16 y_buf[]
+
+Syn_filt_32
+
+ int16 a[], (i) Q12 : a[m+1] prediction coefficients
+ int16 m, (i) : order of LP filter
+ int16 exc[], (i) Qnew: excitation (exc[i] >> Qnew)
+ int16 Qnew, (i) : exc scaling = 0(min) to 8(max)
+ int16 sig_hi[], (o) /16 : synthesis high
+ int16 sig_lo[], (o) /16 : synthesis low
+ int16 lg (i) : size of filtering
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ Do the synthesis filtering 1/A(z) 16 and 32-bits version
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+------------------------------------------------------------------------------
+*/
+
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+
+
+#include "pv_amr_wb_type_defs.h"
+#include "pvamrwbdecoder_mem_funcs.h"
+#include "pvamrwbdecoder_basic_op.h"
+#include "pvamrwb_math_op.h"
+#include "pvamrwbdecoder_cnst.h"
+#include "pvamrwbdecoder_acelp.h"
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+void wb_syn_filt(
+ int16 a[], /* (i) Q12 : a[m+1] prediction coefficients */
+ int16 m, /* (i) : order of LP filter */
+ int16 x[], /* (i) : input signal */
+ int16 y[], /* (o) : output signal */
+ int16 lg, /* (i) : size of filtering */
+ int16 mem[], /* (i/o) : memory associated with this filtering. */
+ int16 update, /* (i) : 0=no update, 1=update of memory. */
+ int16 y_buf[]
+)
+{
+
+ int16 i, j;
+ int32 L_tmp1;
+ int32 L_tmp2;
+ int32 L_tmp3;
+ int32 L_tmp4;
+ int16 *yy;
+
+ /* copy initial filter states into synthesis buffer */
+ pv_memcpy(y_buf, mem, m*sizeof(*yy));
+
+ yy = &y_buf[m];
+
+ /* Do the filtering. */
+
+ for (i = 0; i < lg >> 2; i++)
+ {
+ L_tmp1 = -((int32)x[(i<<2)] << 11);
+ L_tmp2 = -((int32)x[(i<<2)+1] << 11);
+ L_tmp3 = -((int32)x[(i<<2)+2] << 11);
+ L_tmp4 = -((int32)x[(i<<2)+3] << 11);
+
+ /* a[] uses Q12 and abs(a) =< 1 */
+
+ L_tmp1 = fxp_mac_16by16(yy[(i<<2) -3], a[3], L_tmp1);
+ L_tmp2 = fxp_mac_16by16(yy[(i<<2) -2], a[3], L_tmp2);
+ L_tmp1 = fxp_mac_16by16(yy[(i<<2) -2], a[2], L_tmp1);
+ L_tmp2 = fxp_mac_16by16(yy[(i<<2) -1], a[2], L_tmp2);
+ L_tmp1 = fxp_mac_16by16(yy[(i<<2) -1], a[1], L_tmp1);
+
+ for (j = 4; j < m; j += 2)
+ {
+ L_tmp1 = fxp_mac_16by16(yy[(i<<2)-1 - j], a[j+1], L_tmp1);
+ L_tmp2 = fxp_mac_16by16(yy[(i<<2) - j], a[j+1], L_tmp2);
+ L_tmp1 = fxp_mac_16by16(yy[(i<<2) - j], a[j ], L_tmp1);
+ L_tmp2 = fxp_mac_16by16(yy[(i<<2)+1 - j], a[j ], L_tmp2);
+ L_tmp3 = fxp_mac_16by16(yy[(i<<2)+1 - j], a[j+1], L_tmp3);
+ L_tmp4 = fxp_mac_16by16(yy[(i<<2)+2 - j], a[j+1], L_tmp4);
+ L_tmp3 = fxp_mac_16by16(yy[(i<<2)+2 - j], a[j ], L_tmp3);
+ L_tmp4 = fxp_mac_16by16(yy[(i<<2)+3 - j], a[j ], L_tmp4);
+ }
+
+ L_tmp1 = fxp_mac_16by16(yy[(i<<2) - j], a[j], L_tmp1);
+ L_tmp2 = fxp_mac_16by16(yy[(i<<2)+1 - j], a[j], L_tmp2);
+ L_tmp3 = fxp_mac_16by16(yy[(i<<2)+2 - j], a[j], L_tmp3);
+ L_tmp4 = fxp_mac_16by16(yy[(i<<2)+3 - j], a[j], L_tmp4);
+
+ L_tmp1 = shl_int32(L_tmp1, 4);
+
+ y[(i<<2)] = yy[(i<<2)] = amr_wb_round(-L_tmp1);
+
+ L_tmp2 = fxp_mac_16by16(yy[(i<<2)], a[1], L_tmp2);
+
+ L_tmp2 = shl_int32(L_tmp2, 4);
+
+ y[(i<<2)+1] = yy[(i<<2)+1] = amr_wb_round(-L_tmp2);
+
+ L_tmp3 = fxp_mac_16by16(yy[(i<<2) - 1], a[3], L_tmp3);
+ L_tmp4 = fxp_mac_16by16(yy[(i<<2)], a[3], L_tmp4);
+ L_tmp3 = fxp_mac_16by16(yy[(i<<2)], a[2], L_tmp3);
+ L_tmp4 = fxp_mac_16by16(yy[(i<<2) + 1], a[2], L_tmp4);
+ L_tmp3 = fxp_mac_16by16(yy[(i<<2) + 1], a[1], L_tmp3);
+
+ L_tmp3 = shl_int32(L_tmp3, 4);
+
+ y[(i<<2)+2] = yy[(i<<2)+2] = amr_wb_round(-L_tmp3);
+
+ L_tmp4 = fxp_mac_16by16(yy[(i<<2)+2], a[1], L_tmp4);
+
+ L_tmp4 = shl_int32(L_tmp4, 4);
+
+ y[(i<<2)+3] = yy[(i<<2)+3] = amr_wb_round(-L_tmp4);
+ }
+
+
+ /* Update memory if required */
+
+ if (update)
+ {
+ pv_memcpy(mem, &y[lg - m], m*sizeof(*y));
+ }
+
+ return;
+}
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+void Syn_filt_32(
+ int16 a[], /* (i) Q12 : a[m+1] prediction coefficients */
+ int16 m, /* (i) : order of LP filter */
+ int16 exc[], /* (i) Qnew: excitation (exc[i] >> Qnew) */
+ int16 Qnew, /* (i) : exc scaling = 0(min) to 8(max) */
+ int16 sig_hi[], /* (o) /16 : synthesis high */
+ int16 sig_lo[], /* (o) /16 : synthesis low */
+ int16 lg /* (i) : size of filtering */
+)
+{
+ int16 i, k, a0;
+ int32 L_tmp1;
+ int32 L_tmp2;
+ int32 L_tmp3;
+ int32 L_tmp4;
+
+ a0 = 9 - Qnew; /* input / 16 and >>Qnew */
+
+ /* Do the filtering. */
+
+ for (i = 0; i < lg >> 1; i++)
+ {
+
+ L_tmp3 = 0;
+ L_tmp4 = 0;
+
+ L_tmp1 = fxp_mul_16by16(sig_lo[(i<<1) - 1], a[1]);
+ L_tmp2 = fxp_mul_16by16(sig_hi[(i<<1) - 1], a[1]);
+
+ for (k = 2; k < m; k += 2)
+ {
+
+ L_tmp1 = fxp_mac_16by16(sig_lo[(i<<1)-1 - k], a[k+1], L_tmp1);
+ L_tmp2 = fxp_mac_16by16(sig_hi[(i<<1)-1 - k], a[k+1], L_tmp2);
+ L_tmp1 = fxp_mac_16by16(sig_lo[(i<<1) - k], a[k ], L_tmp1);
+ L_tmp2 = fxp_mac_16by16(sig_hi[(i<<1) - k], a[k ], L_tmp2);
+ L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1) - k], a[k+1], L_tmp3);
+ L_tmp4 = fxp_mac_16by16(sig_hi[(i<<1) - k], a[k+1], L_tmp4);
+ L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)+1 - k], a[k ], L_tmp3);
+ L_tmp4 = fxp_mac_16by16(sig_hi[(i<<1)+1 - k], a[k ], L_tmp4);
+ }
+
+ L_tmp1 = -fxp_mac_16by16(sig_lo[(i<<1) - k], a[k], L_tmp1);
+ L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)+1 - k], a[k], L_tmp3);
+ L_tmp2 = fxp_mac_16by16(sig_hi[(i<<1) - k], a[k], L_tmp2);
+ L_tmp4 = fxp_mac_16by16(sig_hi[(i<<1)+1 - k], a[k], L_tmp4);
+
+
+
+ L_tmp1 >>= 11; /* -4 : sig_lo[i] << 4 */
+
+ int64 sig_tmp;
+ sig_tmp = (int64)L_tmp1 + (int32)(exc[(i<<1)] << a0);
+ L_tmp1 = (int32)(sig_tmp - (L_tmp2 << 1));
+
+ /* sig_hi = bit16 to bit31 of synthesis */
+ L_tmp1 = shl_int32(L_tmp1, 3); /* ai in Q12 */
+
+ sig_hi[(i<<1)] = (int16)(L_tmp1 >> 16);
+
+ L_tmp4 = fxp_mac_16by16((int16)(L_tmp1 >> 16), a[1], L_tmp4);
+
+ /* sig_lo = bit4 to bit15 of synthesis */
+ /* L_tmp1 >>= 4 : sig_lo[i] >> 4 */
+ sig_lo[(i<<1)] = (int16)((L_tmp1 >> 4) - ((L_tmp1 >> 16) << 12));
+
+ L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)], a[1], L_tmp3);
+ L_tmp3 = -L_tmp3 >> 11;
+
+ sig_tmp = (int64)L_tmp3 + (int32)(exc[(i<<1)+1] << a0);
+ L_tmp3 = (int32)(sig_tmp - (L_tmp4 << 1));
+ /* sig_hi = bit16 to bit31 of synthesis */
+ L_tmp3 = shl_int32(L_tmp3, 3); /* ai in Q12 */
+ sig_hi[(i<<1)+1] = (int16)(L_tmp3 >> 16);
+
+ /* sig_lo = bit4 to bit15 of synthesis */
+ /* L_tmp1 >>= 4 : sig_lo[i] >> 4 */
+ sig_lo[(i<<1)+1] = (int16)((L_tmp3 >> 4) - (sig_hi[(i<<1)+1] << 12));
+ }
+
+}
+
+
diff --git a/media/libstagefright/codecs/amrwb/src/weight_amrwb_lpc.cpp b/media/codecs/amrwb/dec/src/weight_amrwb_lpc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/weight_amrwb_lpc.cpp
rename to media/codecs/amrwb/dec/src/weight_amrwb_lpc.cpp
diff --git a/media/libstagefright/codecs/amrwb/test/AmrwbDecTestEnvironment.h b/media/codecs/amrwb/dec/test/AmrwbDecTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/test/AmrwbDecTestEnvironment.h
rename to media/codecs/amrwb/dec/test/AmrwbDecTestEnvironment.h
diff --git a/media/libstagefright/codecs/amrwb/test/AmrwbDecoderTest.cpp b/media/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/test/AmrwbDecoderTest.cpp
rename to media/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
diff --git a/media/codecs/amrwb/dec/test/Android.bp b/media/codecs/amrwb/dec/test/Android.bp
new file mode 100644
index 0000000..e8a2aa9
--- /dev/null
+++ b/media/codecs/amrwb/dec/test/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+ name: "AmrwbDecoderTest",
+ test_suites: ["device-tests"],
+ gtest: true,
+
+ srcs: [
+ "AmrwbDecoderTest.cpp",
+ ],
+
+ static_libs: [
+ "libstagefright_amrwbdec",
+ "libsndfile",
+ "libaudioutils",
+ ],
+
+ shared_libs: [
+ "liblog",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/libstagefright/codecs/amrwb/test/AndroidTest.xml b/media/codecs/amrwb/dec/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/codecs/amrwb/test/AndroidTest.xml
rename to media/codecs/amrwb/dec/test/AndroidTest.xml
diff --git a/media/libstagefright/codecs/amrwb/test/README.md b/media/codecs/amrwb/dec/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrwb/test/README.md
rename to media/codecs/amrwb/dec/test/README.md
diff --git a/media/libstagefright/codecs/amrwb/test/amrwbdec_test.cpp b/media/codecs/amrwb/dec/test/amrwbdec_test.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/test/amrwbdec_test.cpp
rename to media/codecs/amrwb/dec/test/amrwbdec_test.cpp
diff --git a/media/codecs/amrwb/enc/Android.bp b/media/codecs/amrwb/enc/Android.bp
new file mode 100644
index 0000000..1521a45
--- /dev/null
+++ b/media/codecs/amrwb/enc/Android.bp
@@ -0,0 +1,149 @@
+cc_library_static {
+ name: "libstagefright_amrwbenc",
+ vendor_available: true,
+ min_sdk_version: "29",
+
+ srcs: [
+ "src/autocorr.c",
+ "src/az_isp.c",
+ "src/bits.c",
+ "src/c2t64fx.c",
+ "src/c4t64fx.c",
+ "src/convolve.c",
+ "src/cor_h_x.c",
+ "src/decim54.c",
+ "src/deemph.c",
+ "src/dtx.c",
+ "src/g_pitch.c",
+ "src/gpclip.c",
+ "src/homing.c",
+ "src/hp400.c",
+ "src/hp50.c",
+ "src/hp6k.c",
+ "src/hp_wsp.c",
+ "src/int_lpc.c",
+ "src/isp_az.c",
+ "src/isp_isf.c",
+ "src/lag_wind.c",
+ "src/levinson.c",
+ "src/log2.c",
+ "src/lp_dec2.c",
+ "src/math_op.c",
+ "src/oper_32b.c",
+ "src/p_med_ol.c",
+ "src/pit_shrp.c",
+ "src/pitch_f4.c",
+ "src/pred_lt4.c",
+ "src/preemph.c",
+ "src/q_gain2.c",
+ "src/q_pulse.c",
+ "src/qisf_ns.c",
+ "src/qpisf_2s.c",
+ "src/random.c",
+ "src/residu.c",
+ "src/scale.c",
+ "src/stream.c",
+ "src/syn_filt.c",
+ "src/updt_tar.c",
+ "src/util.c",
+ "src/voAMRWBEnc.c",
+ "src/voicefac.c",
+ "src/wb_vad.c",
+ "src/weight_a.c",
+ "src/mem_align.c",
+ ],
+
+ arch: {
+ arm: {
+ srcs: [
+ "src/asm/ARMV5E/convolve_opt.s",
+ "src/asm/ARMV5E/cor_h_vec_opt.s",
+ "src/asm/ARMV5E/Deemph_32_opt.s",
+ "src/asm/ARMV5E/Dot_p_opt.s",
+ "src/asm/ARMV5E/Filt_6k_7k_opt.s",
+ "src/asm/ARMV5E/Norm_Corr_opt.s",
+ "src/asm/ARMV5E/pred_lt4_1_opt.s",
+ "src/asm/ARMV5E/residu_asm_opt.s",
+ "src/asm/ARMV5E/scale_sig_opt.s",
+ "src/asm/ARMV5E/Syn_filt_32_opt.s",
+ "src/asm/ARMV5E/syn_filt_opt.s",
+ ],
+
+ cflags: [
+ "-DARM",
+ "-DASM_OPT",
+ ],
+ local_include_dirs: ["src/asm/ARMV5E"],
+
+ instruction_set: "arm",
+
+ neon: {
+ exclude_srcs: [
+ "src/asm/ARMV5E/convolve_opt.s",
+ "src/asm/ARMV5E/cor_h_vec_opt.s",
+ "src/asm/ARMV5E/Deemph_32_opt.s",
+ "src/asm/ARMV5E/Dot_p_opt.s",
+ "src/asm/ARMV5E/Filt_6k_7k_opt.s",
+ "src/asm/ARMV5E/Norm_Corr_opt.s",
+ "src/asm/ARMV5E/pred_lt4_1_opt.s",
+ "src/asm/ARMV5E/residu_asm_opt.s",
+ "src/asm/ARMV5E/scale_sig_opt.s",
+ "src/asm/ARMV5E/Syn_filt_32_opt.s",
+ "src/asm/ARMV5E/syn_filt_opt.s",
+ ],
+
+ srcs: [
+ "src/asm/ARMV7/convolve_neon.s",
+ "src/asm/ARMV7/cor_h_vec_neon.s",
+ "src/asm/ARMV7/Deemph_32_neon.s",
+ "src/asm/ARMV7/Dot_p_neon.s",
+ "src/asm/ARMV7/Filt_6k_7k_neon.s",
+ "src/asm/ARMV7/Norm_Corr_neon.s",
+ "src/asm/ARMV7/pred_lt4_1_neon.s",
+ "src/asm/ARMV7/residu_asm_neon.s",
+ "src/asm/ARMV7/scale_sig_neon.s",
+ "src/asm/ARMV7/Syn_filt_32_neon.s",
+ "src/asm/ARMV7/syn_filt_neon.s",
+ ],
+
+ // don't actually generate neon instructions, see bug 26932980
+ cflags: [
+ "-DARMV7",
+ "-mfpu=vfpv3",
+ ],
+ local_include_dirs: [
+ "src/asm/ARMV5E",
+ "src/asm/ARMV7",
+ ],
+ },
+
+ },
+ },
+
+ include_dirs: [
+ "frameworks/av/include",
+ "frameworks/av/media/libstagefright/include",
+ ],
+
+ local_include_dirs: ["src"],
+ export_include_dirs: ["inc"],
+
+ shared_libs: [
+ "libstagefright_enc_common",
+ "liblog",
+ ],
+
+ cflags: ["-Werror"],
+ sanitize: {
+ cfi: true,
+ },
+
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
+
diff --git a/media/libstagefright/codecs/amrnb/enc/MODULE_LICENSE_APACHE2 b/media/codecs/amrwb/enc/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
copy to media/codecs/amrwb/enc/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrnb/enc/NOTICE b/media/codecs/amrwb/enc/NOTICE
similarity index 100%
copy from media/libstagefright/codecs/amrnb/enc/NOTICE
copy to media/codecs/amrwb/enc/NOTICE
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/AMRWB_E_SAMPLE.c b/media/codecs/amrwb/enc/SampleCode/AMRWB_E_SAMPLE.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/SampleCode/AMRWB_E_SAMPLE.c
rename to media/codecs/amrwb/enc/SampleCode/AMRWB_E_SAMPLE.c
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp b/media/codecs/amrwb/enc/SampleCode/Android.bp
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
rename to media/codecs/amrwb/enc/SampleCode/Android.bp
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/MODULE_LICENSE_APACHE2 b/media/codecs/amrwb/enc/SampleCode/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/SampleCode/MODULE_LICENSE_APACHE2
rename to media/codecs/amrwb/enc/SampleCode/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/NOTICE b/media/codecs/amrwb/enc/SampleCode/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/SampleCode/NOTICE
rename to media/codecs/amrwb/enc/SampleCode/NOTICE
diff --git a/media/codecs/amrwb/enc/TEST_MAPPING b/media/codecs/amrwb/enc/TEST_MAPPING
new file mode 100644
index 0000000..045e8b3
--- /dev/null
+++ b/media/codecs/amrwb/enc/TEST_MAPPING
@@ -0,0 +1,10 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrwbenc
+{
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
+ { "name": "AmrwbEncoderTest"}
+
+ ]
+}
diff --git a/media/libstagefright/codecs/amrwbenc/doc/voAMRWBEncoderSDK.pdf b/media/codecs/amrwb/enc/doc/voAMRWBEncoderSDK.pdf
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/doc/voAMRWBEncoderSDK.pdf
rename to media/codecs/amrwb/enc/doc/voAMRWBEncoderSDK.pdf
Binary files differ
diff --git a/media/codecs/amrwb/enc/fuzzer/Android.bp b/media/codecs/amrwb/enc/fuzzer/Android.bp
new file mode 100644
index 0000000..e3473d6
--- /dev/null
+++ b/media/codecs/amrwb/enc/fuzzer/Android.bp
@@ -0,0 +1,41 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+ name: "amrwb_enc_fuzzer",
+ host_supported: true,
+
+ srcs: [
+ "amrwb_enc_fuzzer.cpp",
+ ],
+
+ static_libs: [
+ "liblog",
+ "libstagefright_amrwbenc",
+ "libstagefright_enc_common",
+ ],
+
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/media/codecs/amrwb/enc/fuzzer/README.md b/media/codecs/amrwb/enc/fuzzer/README.md
new file mode 100644
index 0000000..447fbfa
--- /dev/null
+++ b/media/codecs/amrwb/enc/fuzzer/README.md
@@ -0,0 +1,60 @@
+# Fuzzer for libstagefright_amrwbenc encoder
+
+## Plugin Design Considerations
+The fuzzer plugin for AMR-WB is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+AMR-WB supports the following parameters:
+1. Frame Type (parameter name: `frameType`)
+2. Mode (parameter name: `mode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `frameType` | 0. `VOAMRWB_DEFAULT` 1. `VOAMRWB_ITU` 2. `VOAMRWB_RFC3267` | Bits 0, 1 and 2 of 1st byte of data. |
+| `mode` | 0. `VOAMRWB_MD66` 1. `VOAMRWB_MD885` 2. `VOAMRWB_MD1265` 3. `VOAMRWB_MD1425` 4. `VOAMRWB_MD1585 ` 5. `VOAMRWB_MD1825` 6. `VOAMRWB_MD1985` 7. `VOAMRWB_MD2305` 8. `VOAMRWB_MD2385` 9. `VOAMRWB_N_MODES` | Bits 4, 5, 6 and 7 of 1st byte of data. |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+If the encode operation was successful, the input is advanced by the frame size.
+If the encode operation was un-successful, the input is still advanced by frame size so
+that the fuzzer can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build amrwb_enc_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) amrwb_enc_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some pcm files to that folder
+Push this directory to device.
+
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/amrwb_enc_fuzzer/amrwb_enc_fuzzer CORPUS_DIR
+```
+To run on host
+```
+ $ $ANDROID_HOST_OUT/fuzz/x86_64/amrwb_enc_fuzzer/amrwb_enc_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/codecs/amrwb/enc/fuzzer/amrwb_enc_fuzzer.cpp b/media/codecs/amrwb/enc/fuzzer/amrwb_enc_fuzzer.cpp
new file mode 100644
index 0000000..4773a1f
--- /dev/null
+++ b/media/codecs/amrwb/enc/fuzzer/amrwb_enc_fuzzer.cpp
@@ -0,0 +1,142 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <string.h>
+#include <utils/Log.h>
+#include <algorithm>
+#include "cmnMemory.h"
+#include "voAMRWB.h"
+#include "cnst.h"
+
+typedef int(VO_API *VOGETAUDIOENCAPI)(VO_AUDIO_CODECAPI *pEncHandle);
+const int32_t kInputBufferSize = L_FRAME16k * sizeof(int16_t) * 2;
+const int32_t kOutputBufferSize = 2 * kInputBufferSize;
+const int32_t kModes[] = {VOAMRWB_MD66 /* 6.60kbps */, VOAMRWB_MD885 /* 8.85kbps */,
+ VOAMRWB_MD1265 /* 12.65kbps */, VOAMRWB_MD1425 /* 14.25kbps */,
+ VOAMRWB_MD1585 /* 15.85kbps */, VOAMRWB_MD1825 /* 18.25kbps */,
+ VOAMRWB_MD1985 /* 19.85kbps */, VOAMRWB_MD2305 /* 23.05kbps */,
+ VOAMRWB_MD2385 /* 23.85kbps */, VOAMRWB_N_MODES /* Invalid Mode */};
+const VOAMRWBFRAMETYPE kFrameTypes[] = {VOAMRWB_DEFAULT, VOAMRWB_ITU, VOAMRWB_RFC3267};
+
+class Codec {
+ public:
+ Codec() = default;
+ ~Codec() { deInitEncoder(); }
+ bool initEncoder(const uint8_t *data);
+ void deInitEncoder();
+ void encodeFrames(const uint8_t *data, size_t size);
+
+ private:
+ VO_AUDIO_CODECAPI *mApiHandle = nullptr;
+ VO_MEM_OPERATOR *mMemOperator = nullptr;
+ VO_HANDLE mEncoderHandle = nullptr;
+};
+
+bool Codec::initEncoder(const uint8_t *data) {
+ uint8_t startByte = *data;
+ int32_t mode = kModes[(startByte >> 4) % 10];
+ VOAMRWBFRAMETYPE frameType = kFrameTypes[startByte % 3];
+ mMemOperator = new VO_MEM_OPERATOR;
+ if (!mMemOperator) {
+ return false;
+ }
+
+ mMemOperator->Alloc = cmnMemAlloc;
+ mMemOperator->Copy = cmnMemCopy;
+ mMemOperator->Free = cmnMemFree;
+ mMemOperator->Set = cmnMemSet;
+ mMemOperator->Check = cmnMemCheck;
+
+ VO_CODEC_INIT_USERDATA userData;
+ memset(&userData, 0, sizeof(userData));
+ userData.memflag = VO_IMF_USERMEMOPERATOR;
+ userData.memData = (VO_PTR)mMemOperator;
+
+ mApiHandle = new VO_AUDIO_CODECAPI;
+ if (!mApiHandle) {
+ return false;
+ }
+ if (VO_ERR_NONE != voGetAMRWBEncAPI(mApiHandle)) {
+ // Failed to get api handle
+ return false;
+ }
+ if (VO_ERR_NONE != mApiHandle->Init(&mEncoderHandle, VO_AUDIO_CodingAMRWB, &userData)) {
+ // Failed to init AMRWB encoder
+ return false;
+ }
+ if (VO_ERR_NONE != mApiHandle->SetParam(mEncoderHandle, VO_PID_AMRWB_FRAMETYPE, &frameType)) {
+ // Failed to set AMRWB encoder frame type
+ return false;
+ }
+ if (VO_ERR_NONE != mApiHandle->SetParam(mEncoderHandle, VO_PID_AMRWB_MODE, &mode)) {
+ // Failed to set AMRWB encoder mode
+ return false;
+ }
+ return true;
+}
+
+void Codec::deInitEncoder() {
+ if (mEncoderHandle) {
+ mApiHandle->Uninit(mEncoderHandle);
+ mEncoderHandle = nullptr;
+ }
+ if (mApiHandle) {
+ delete mApiHandle;
+ mApiHandle = nullptr;
+ }
+ if (mMemOperator) {
+ delete mMemOperator;
+ mMemOperator = nullptr;
+ }
+}
+
+void Codec::encodeFrames(const uint8_t *data, size_t size) {
+ do {
+ int32_t minSize = std::min((int32_t)size, kInputBufferSize);
+ uint8_t outputBuf[kOutputBufferSize] = {};
+ VO_CODECBUFFER inData;
+ VO_CODECBUFFER outData;
+ VO_AUDIO_OUTPUTINFO outFormat;
+ inData.Buffer = (unsigned char *)data;
+ inData.Length = minSize;
+ outData.Buffer = outputBuf;
+ mApiHandle->SetInputData(mEncoderHandle, &inData);
+ mApiHandle->GetOutputData(mEncoderHandle, &outData, &outFormat);
+ data += minSize;
+ size -= minSize;
+ } while (size > 0);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ if (size < 1) {
+ return 0;
+ }
+ Codec *codec = new Codec();
+ if (!codec) {
+ return 0;
+ }
+ if (codec->initEncoder(data)) {
+ // Consume first byte
+ ++data;
+ --size;
+ codec->encodeFrames(data, size);
+ }
+ delete codec;
+ return 0;
+}
diff --git a/media/libstagefright/codecs/amrwbenc/inc/acelp.h b/media/codecs/amrwb/enc/inc/acelp.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/acelp.h
rename to media/codecs/amrwb/enc/inc/acelp.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/basic_op.h b/media/codecs/amrwb/enc/inc/basic_op.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/basic_op.h
rename to media/codecs/amrwb/enc/inc/basic_op.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/bits.h b/media/codecs/amrwb/enc/inc/bits.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/bits.h
rename to media/codecs/amrwb/enc/inc/bits.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/cnst.h b/media/codecs/amrwb/enc/inc/cnst.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/cnst.h
rename to media/codecs/amrwb/enc/inc/cnst.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/cod_main.h b/media/codecs/amrwb/enc/inc/cod_main.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/cod_main.h
rename to media/codecs/amrwb/enc/inc/cod_main.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/dtx.h b/media/codecs/amrwb/enc/inc/dtx.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/dtx.h
rename to media/codecs/amrwb/enc/inc/dtx.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/grid100.tab b/media/codecs/amrwb/enc/inc/grid100.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/grid100.tab
rename to media/codecs/amrwb/enc/inc/grid100.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/ham_wind.tab b/media/codecs/amrwb/enc/inc/ham_wind.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/ham_wind.tab
rename to media/codecs/amrwb/enc/inc/ham_wind.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/homing.tab b/media/codecs/amrwb/enc/inc/homing.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/homing.tab
rename to media/codecs/amrwb/enc/inc/homing.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/isp_isf.tab b/media/codecs/amrwb/enc/inc/isp_isf.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/isp_isf.tab
rename to media/codecs/amrwb/enc/inc/isp_isf.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/lag_wind.tab b/media/codecs/amrwb/enc/inc/lag_wind.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/lag_wind.tab
rename to media/codecs/amrwb/enc/inc/lag_wind.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/log2.h b/media/codecs/amrwb/enc/inc/log2.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/log2.h
rename to media/codecs/amrwb/enc/inc/log2.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/log2_tab.h b/media/codecs/amrwb/enc/inc/log2_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/log2_tab.h
rename to media/codecs/amrwb/enc/inc/log2_tab.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/main.h b/media/codecs/amrwb/enc/inc/main.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/main.h
rename to media/codecs/amrwb/enc/inc/main.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/math_op.h b/media/codecs/amrwb/enc/inc/math_op.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/math_op.h
rename to media/codecs/amrwb/enc/inc/math_op.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/mem_align.h b/media/codecs/amrwb/enc/inc/mem_align.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/mem_align.h
rename to media/codecs/amrwb/enc/inc/mem_align.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/mime_io.tab b/media/codecs/amrwb/enc/inc/mime_io.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/mime_io.tab
rename to media/codecs/amrwb/enc/inc/mime_io.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/oper_32b.h b/media/codecs/amrwb/enc/inc/oper_32b.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/oper_32b.h
rename to media/codecs/amrwb/enc/inc/oper_32b.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/p_med_o.h b/media/codecs/amrwb/enc/inc/p_med_o.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/p_med_o.h
rename to media/codecs/amrwb/enc/inc/p_med_o.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/p_med_ol.tab b/media/codecs/amrwb/enc/inc/p_med_ol.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/p_med_ol.tab
rename to media/codecs/amrwb/enc/inc/p_med_ol.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/q_gain2.tab b/media/codecs/amrwb/enc/inc/q_gain2.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/q_gain2.tab
rename to media/codecs/amrwb/enc/inc/q_gain2.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/q_pulse.h b/media/codecs/amrwb/enc/inc/q_pulse.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/q_pulse.h
rename to media/codecs/amrwb/enc/inc/q_pulse.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/qisf_ns.tab b/media/codecs/amrwb/enc/inc/qisf_ns.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/qisf_ns.tab
rename to media/codecs/amrwb/enc/inc/qisf_ns.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/qpisf_2s.tab b/media/codecs/amrwb/enc/inc/qpisf_2s.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/qpisf_2s.tab
rename to media/codecs/amrwb/enc/inc/qpisf_2s.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/stream.h b/media/codecs/amrwb/enc/inc/stream.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/stream.h
rename to media/codecs/amrwb/enc/inc/stream.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/typedef.h b/media/codecs/amrwb/enc/inc/typedef.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/typedef.h
rename to media/codecs/amrwb/enc/inc/typedef.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/typedefs.h b/media/codecs/amrwb/enc/inc/typedefs.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/typedefs.h
rename to media/codecs/amrwb/enc/inc/typedefs.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/wb_vad.h b/media/codecs/amrwb/enc/inc/wb_vad.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/wb_vad.h
rename to media/codecs/amrwb/enc/inc/wb_vad.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/wb_vad_c.h b/media/codecs/amrwb/enc/inc/wb_vad_c.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/wb_vad_c.h
rename to media/codecs/amrwb/enc/inc/wb_vad_c.h
diff --git a/media/libstagefright/codecs/amrwbenc/patent_disclaimer.txt b/media/codecs/amrwb/enc/patent_disclaimer.txt
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/patent_disclaimer.txt
rename to media/codecs/amrwb/enc/patent_disclaimer.txt
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Deemph_32_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/Deemph_32_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Deemph_32_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/Deemph_32_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Dot_p_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/Dot_p_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Dot_p_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/Dot_p_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/Filt_6k_7k_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/Filt_6k_7k_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Norm_Corr_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/Norm_Corr_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Norm_Corr_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/Norm_Corr_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Syn_filt_32_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/Syn_filt_32_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Syn_filt_32_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/Syn_filt_32_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/convolve_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/convolve_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/convolve_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/convolve_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/cor_h_vec_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/cor_h_vec_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/cor_h_vec_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/cor_h_vec_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/pred_lt4_1_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/pred_lt4_1_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/residu_asm_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/residu_asm_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/residu_asm_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/residu_asm_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/scale_sig_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/scale_sig_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/scale_sig_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/scale_sig_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/syn_filt_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/syn_filt_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/syn_filt_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/syn_filt_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Deemph_32_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/Deemph_32_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Deemph_32_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/Deemph_32_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Dot_p_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/Dot_p_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Dot_p_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/Dot_p_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/Filt_6k_7k_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/Filt_6k_7k_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Norm_Corr_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/Norm_Corr_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Norm_Corr_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/Norm_Corr_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Syn_filt_32_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/Syn_filt_32_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Syn_filt_32_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/Syn_filt_32_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/convolve_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/convolve_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/convolve_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/convolve_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/cor_h_vec_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/cor_h_vec_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/cor_h_vec_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/cor_h_vec_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/pred_lt4_1_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/pred_lt4_1_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/residu_asm_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/residu_asm_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/residu_asm_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/residu_asm_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/scale_sig_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/scale_sig_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/scale_sig_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/scale_sig_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/syn_filt_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/syn_filt_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/syn_filt_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/syn_filt_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/autocorr.c b/media/codecs/amrwb/enc/src/autocorr.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/autocorr.c
rename to media/codecs/amrwb/enc/src/autocorr.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/az_isp.c b/media/codecs/amrwb/enc/src/az_isp.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/az_isp.c
rename to media/codecs/amrwb/enc/src/az_isp.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/bits.c b/media/codecs/amrwb/enc/src/bits.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/bits.c
rename to media/codecs/amrwb/enc/src/bits.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/c2t64fx.c b/media/codecs/amrwb/enc/src/c2t64fx.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/c2t64fx.c
rename to media/codecs/amrwb/enc/src/c2t64fx.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c b/media/codecs/amrwb/enc/src/c4t64fx.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
rename to media/codecs/amrwb/enc/src/c4t64fx.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/convolve.c b/media/codecs/amrwb/enc/src/convolve.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/convolve.c
rename to media/codecs/amrwb/enc/src/convolve.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/cor_h_x.c b/media/codecs/amrwb/enc/src/cor_h_x.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/cor_h_x.c
rename to media/codecs/amrwb/enc/src/cor_h_x.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/decim54.c b/media/codecs/amrwb/enc/src/decim54.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/decim54.c
rename to media/codecs/amrwb/enc/src/decim54.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/deemph.c b/media/codecs/amrwb/enc/src/deemph.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/deemph.c
rename to media/codecs/amrwb/enc/src/deemph.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/dtx.c b/media/codecs/amrwb/enc/src/dtx.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/dtx.c
rename to media/codecs/amrwb/enc/src/dtx.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/g_pitch.c b/media/codecs/amrwb/enc/src/g_pitch.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/g_pitch.c
rename to media/codecs/amrwb/enc/src/g_pitch.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/gpclip.c b/media/codecs/amrwb/enc/src/gpclip.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/gpclip.c
rename to media/codecs/amrwb/enc/src/gpclip.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/homing.c b/media/codecs/amrwb/enc/src/homing.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/homing.c
rename to media/codecs/amrwb/enc/src/homing.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/hp400.c b/media/codecs/amrwb/enc/src/hp400.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/hp400.c
rename to media/codecs/amrwb/enc/src/hp400.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/hp50.c b/media/codecs/amrwb/enc/src/hp50.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/hp50.c
rename to media/codecs/amrwb/enc/src/hp50.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/hp6k.c b/media/codecs/amrwb/enc/src/hp6k.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/hp6k.c
rename to media/codecs/amrwb/enc/src/hp6k.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/hp_wsp.c b/media/codecs/amrwb/enc/src/hp_wsp.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/hp_wsp.c
rename to media/codecs/amrwb/enc/src/hp_wsp.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/int_lpc.c b/media/codecs/amrwb/enc/src/int_lpc.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/int_lpc.c
rename to media/codecs/amrwb/enc/src/int_lpc.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/isp_az.c b/media/codecs/amrwb/enc/src/isp_az.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/isp_az.c
rename to media/codecs/amrwb/enc/src/isp_az.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/isp_isf.c b/media/codecs/amrwb/enc/src/isp_isf.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/isp_isf.c
rename to media/codecs/amrwb/enc/src/isp_isf.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/lag_wind.c b/media/codecs/amrwb/enc/src/lag_wind.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/lag_wind.c
rename to media/codecs/amrwb/enc/src/lag_wind.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/levinson.c b/media/codecs/amrwb/enc/src/levinson.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/levinson.c
rename to media/codecs/amrwb/enc/src/levinson.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/log2.c b/media/codecs/amrwb/enc/src/log2.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/log2.c
rename to media/codecs/amrwb/enc/src/log2.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/lp_dec2.c b/media/codecs/amrwb/enc/src/lp_dec2.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/lp_dec2.c
rename to media/codecs/amrwb/enc/src/lp_dec2.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/math_op.c b/media/codecs/amrwb/enc/src/math_op.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/math_op.c
rename to media/codecs/amrwb/enc/src/math_op.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/mem_align.c b/media/codecs/amrwb/enc/src/mem_align.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/mem_align.c
rename to media/codecs/amrwb/enc/src/mem_align.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/oper_32b.c b/media/codecs/amrwb/enc/src/oper_32b.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/oper_32b.c
rename to media/codecs/amrwb/enc/src/oper_32b.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/p_med_ol.c b/media/codecs/amrwb/enc/src/p_med_ol.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/p_med_ol.c
rename to media/codecs/amrwb/enc/src/p_med_ol.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/pit_shrp.c b/media/codecs/amrwb/enc/src/pit_shrp.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/pit_shrp.c
rename to media/codecs/amrwb/enc/src/pit_shrp.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c b/media/codecs/amrwb/enc/src/pitch_f4.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
rename to media/codecs/amrwb/enc/src/pitch_f4.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/pred_lt4.c b/media/codecs/amrwb/enc/src/pred_lt4.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/pred_lt4.c
rename to media/codecs/amrwb/enc/src/pred_lt4.c
diff --git a/media/codecs/amrwb/enc/src/preemph.c b/media/codecs/amrwb/enc/src/preemph.c
new file mode 100644
index 0000000..a43841a
--- /dev/null
+++ b/media/codecs/amrwb/enc/src/preemph.c
@@ -0,0 +1,101 @@
+/*
+ ** Copyright 2003-2010, VisualOn, Inc.
+ **
+ ** Licensed under the Apache License, Version 2.0 (the "License");
+ ** you may not use this file except in compliance with the License.
+ ** You may obtain a copy of the License at
+ **
+ ** http://www.apache.org/licenses/LICENSE-2.0
+ **
+ ** Unless required by applicable law or agreed to in writing, software
+ ** distributed under the License is distributed on an "AS IS" BASIS,
+ ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ** See the License for the specific language governing permissions and
+ ** limitations under the License.
+ */
+
+/***********************************************************************
+* File: preemph.c *
+* *
+* Description: Preemphasis: filtering through 1 - g z^-1 *
+* Preemph2 --> signal is multiplied by 2 *
+* *
+************************************************************************/
+
+#include "typedef.h"
+#include "basic_op.h"
+#include <stdint.h>
+
+void Preemph(
+ Word16 x[], /* (i/o) : input signal overwritten by the output */
+ Word16 mu, /* (i) Q15 : preemphasis coefficient */
+ Word16 lg, /* (i) : lenght of filtering */
+ Word16 * mem /* (i/o) : memory (x[-1]) */
+ )
+{
+ Word16 temp;
+ Word32 i, L_tmp;
+
+ temp = x[lg - 1];
+
+ for (i = lg - 1; i > 0; i--)
+ {
+ L_tmp = L_deposit_h(x[i]);
+ L_tmp -= (x[i - 1] * mu)<<1;
+ x[i] = (L_tmp + 0x8000)>>16;
+ }
+
+ L_tmp = L_deposit_h(x[0]);
+ L_tmp -= ((*mem) * mu)<<1;
+ x[0] = (L_tmp + 0x8000)>>16;
+
+ *mem = temp;
+
+ return;
+}
+
+
+void Preemph2(
+ Word16 x[], /* (i/o) : input signal overwritten by the output */
+ Word16 mu, /* (i) Q15 : preemphasis coefficient */
+ Word16 lg, /* (i) : lenght of filtering */
+ Word16 * mem /* (i/o) : memory (x[-1]) */
+ )
+{
+ Word16 temp;
+ Word32 i, L_tmp;
+
+ temp = x[lg - 1];
+
+ for (i = (Word16) (lg - 1); i > 0; i--)
+ {
+ L_tmp = L_deposit_h(x[i]);
+ L_tmp -= (x[i - 1] * mu)<<1; // only called with mu == 22282, so this won't overflow
+ if (L_tmp > INT32_MAX / 2) {
+ L_tmp = INT32_MAX / 2;
+ }
+ L_tmp = (L_tmp << 1);
+ if (L_tmp > INT32_MAX - 0x8000) {
+ L_tmp = INT32_MAX - 0x8000;
+ }
+ x[i] = (L_tmp + 0x8000)>>16;
+ }
+
+ L_tmp = L_deposit_h(x[0]);
+ L_tmp -= ((*mem) * mu)<<1;
+ if (L_tmp > INT32_MAX / 2) {
+ L_tmp = INT32_MAX / 2;
+ }
+ L_tmp = (L_tmp << 1);
+ if (L_tmp > INT32_MAX - 0x8000) {
+ L_tmp = INT32_MAX - 0x8000;
+ }
+ x[0] = (L_tmp + 0x8000)>>16;
+
+ *mem = temp;
+
+ return;
+}
+
+
+
diff --git a/media/libstagefright/codecs/amrwbenc/src/q_gain2.c b/media/codecs/amrwb/enc/src/q_gain2.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/q_gain2.c
rename to media/codecs/amrwb/enc/src/q_gain2.c
diff --git a/media/codecs/amrwb/enc/src/q_pulse.c b/media/codecs/amrwb/enc/src/q_pulse.c
new file mode 100644
index 0000000..657b6fe
--- /dev/null
+++ b/media/codecs/amrwb/enc/src/q_pulse.c
@@ -0,0 +1,401 @@
+/*
+ ** Copyright 2003-2010, VisualOn, Inc.
+ **
+ ** Licensed under the Apache License, Version 2.0 (the "License");
+ ** you may not use this file except in compliance with the License.
+ ** You may obtain a copy of the License at
+ **
+ ** http://www.apache.org/licenses/LICENSE-2.0
+ **
+ ** Unless required by applicable law or agreed to in writing, software
+ ** distributed under the License is distributed on an "AS IS" BASIS,
+ ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ** See the License for the specific language governing permissions and
+ ** limitations under the License.
+ */
+
+/***********************************************************************
+* File: q_pulse.c *
+* *
+* Description: Coding and decoding of algebraic codebook *
+* *
+************************************************************************/
+
+#include <stdio.h>
+#include "typedef.h"
+#include "basic_op.h"
+#include "q_pulse.h"
+
+#define NB_POS 16 /* pos in track, mask for sign bit */
+#define UNUSED_VAR __attribute__((unused))
+
+Word32 quant_1p_N1( /* (o) return N+1 bits */
+ Word16 pos, /* (i) position of the pulse */
+ Word16 N) /* (i) number of bits for position */
+{
+ Word16 mask;
+ Word32 index;
+
+ mask = (1 << N) - 1; /* mask = ((1<<N)-1); */
+ /*-------------------------------------------------------*
+ * Quantization of 1 pulse with N+1 bits: *
+ *-------------------------------------------------------*/
+ index = L_deposit_l((Word16) (pos & mask));
+ if ((pos & NB_POS) != 0)
+ {
+ index = vo_L_add(index, L_deposit_l(1 << N)); /* index += 1 << N; */
+ }
+ return (index);
+}
+
+
+Word32 quant_2p_2N1( /* (o) return (2*N)+1 bits */
+ Word16 pos1, /* (i) position of the pulse 1 */
+ Word16 pos2, /* (i) position of the pulse 2 */
+ Word16 N) /* (i) number of bits for position */
+{
+ Word16 mask, tmp;
+ Word32 index;
+ mask = (1 << N) - 1; /* mask = ((1<<N)-1); */
+ /*-------------------------------------------------------*
+ * Quantization of 2 pulses with 2*N+1 bits: *
+ *-------------------------------------------------------*/
+ if (((pos2 ^ pos1) & NB_POS) == 0)
+ {
+ /* sign of 1st pulse == sign of 2th pulse */
+ if(pos1 <= pos2) /* ((pos1 - pos2) <= 0) */
+ {
+ /* index = ((pos1 & mask) << N) + (pos2 & mask); */
+ index = L_deposit_l(add1((((Word16) (pos1 & mask)) << N), ((Word16) (pos2 & mask))));
+ } else
+ {
+ /* ((pos2 & mask) << N) + (pos1 & mask); */
+ index = L_deposit_l(add1((((Word16) (pos2 & mask)) << N), ((Word16) (pos1 & mask))));
+ }
+ if ((pos1 & NB_POS) != 0)
+ {
+ tmp = (N << 1);
+ index = vo_L_add(index, (1L << tmp)); /* index += 1 << (2*N); */
+ }
+ } else
+ {
+ /* sign of 1st pulse != sign of 2th pulse */
+ if (vo_sub((Word16) (pos1 & mask), (Word16) (pos2 & mask)) <= 0)
+ {
+ /* index = ((pos2 & mask) << N) + (pos1 & mask); */
+ index = L_deposit_l(add1((((Word16) (pos2 & mask)) << N), ((Word16) (pos1 & mask))));
+ if ((pos2 & NB_POS) != 0)
+ {
+ tmp = (N << 1); /* index += 1 << (2*N); */
+ index = vo_L_add(index, (1L << tmp));
+ }
+ } else
+ {
+ /* index = ((pos1 & mask) << N) + (pos2 & mask); */
+ index = L_deposit_l(add1((((Word16) (pos1 & mask)) << N), ((Word16) (pos2 & mask))));
+ if ((pos1 & NB_POS) != 0)
+ {
+ tmp = (N << 1);
+ index = vo_L_add(index, (1 << tmp)); /* index += 1 << (2*N); */
+ }
+ }
+ }
+ return (index);
+}
+
+
+Word32 quant_3p_3N1( /* (o) return (3*N)+1 bits */
+ Word16 pos1, /* (i) position of the pulse 1 */
+ Word16 pos2, /* (i) position of the pulse 2 */
+ Word16 pos3, /* (i) position of the pulse 3 */
+ Word16 N) /* (i) number of bits for position */
+{
+ Word16 nb_pos;
+ Word32 index;
+
+ nb_pos =(1 <<(N - 1)); /* nb_pos = (1<<(N-1)); */
+ /*-------------------------------------------------------*
+ * Quantization of 3 pulses with 3*N+1 bits: *
+ *-------------------------------------------------------*/
+ if (((pos1 ^ pos2) & nb_pos) == 0)
+ {
+ index = quant_2p_2N1(pos1, pos2, sub(N, 1)); /* index = quant_2p_2N1(pos1, pos2, (N-1)); */
+ /* index += (pos1 & nb_pos) << N; */
+ index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
+ /* index += quant_1p_N1(pos3, N) << (2*N); */
+ index = vo_L_add(index, (quant_1p_N1(pos3, N)<<(N << 1)));
+
+ } else if (((pos1 ^ pos3) & nb_pos) == 0)
+ {
+ index = quant_2p_2N1(pos1, pos3, sub(N, 1)); /* index = quant_2p_2N1(pos1, pos3, (N-1)); */
+ index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
+ /* index += (pos1 & nb_pos) << N; */
+ index = vo_L_add(index, (quant_1p_N1(pos2, N) << (N << 1)));
+ /* index += quant_1p_N1(pos2, N) <<
+ * (2*N); */
+ } else
+ {
+ index = quant_2p_2N1(pos2, pos3, (N - 1)); /* index = quant_2p_2N1(pos2, pos3, (N-1)); */
+ /* index += (pos2 & nb_pos) << N; */
+ index = vo_L_add(index, (L_deposit_l((Word16) (pos2 & nb_pos)) << N));
+ /* index += quant_1p_N1(pos1, N) << (2*N); */
+ index = vo_L_add(index, (quant_1p_N1(pos1, N) << (N << 1)));
+ }
+ return (index);
+}
+
+
+Word32 quant_4p_4N1( /* (o) return (4*N)+1 bits */
+ Word16 pos1, /* (i) position of the pulse 1 */
+ Word16 pos2, /* (i) position of the pulse 2 */
+ Word16 pos3, /* (i) position of the pulse 3 */
+ Word16 pos4, /* (i) position of the pulse 4 */
+ Word16 N) /* (i) number of bits for position */
+{
+ Word16 nb_pos;
+ Word32 index;
+
+ nb_pos = 1 << (N - 1); /* nb_pos = (1<<(N-1)); */
+ /*-------------------------------------------------------*
+ * Quantization of 4 pulses with 4*N+1 bits: *
+ *-------------------------------------------------------*/
+ if (((pos1 ^ pos2) & nb_pos) == 0)
+ {
+ index = quant_2p_2N1(pos1, pos2, sub(N, 1)); /* index = quant_2p_2N1(pos1, pos2, (N-1)); */
+ /* index += (pos1 & nb_pos) << N; */
+ index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
+ /* index += quant_2p_2N1(pos3, pos4, N) << (2*N); */
+ index = vo_L_add(index, (quant_2p_2N1(pos3, pos4, N) << (N << 1)));
+ } else if (((pos1 ^ pos3) & nb_pos) == 0)
+ {
+ index = quant_2p_2N1(pos1, pos3, (N - 1));
+ /* index += (pos1 & nb_pos) << N; */
+ index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
+ /* index += quant_2p_2N1(pos2, pos4, N) << (2*N); */
+ index = vo_L_add(index, (quant_2p_2N1(pos2, pos4, N) << (N << 1)));
+ } else
+ {
+ index = quant_2p_2N1(pos2, pos3, (N - 1));
+ /* index += (pos2 & nb_pos) << N; */
+ index = vo_L_add(index, (L_deposit_l((Word16) (pos2 & nb_pos)) << N));
+ /* index += quant_2p_2N1(pos1, pos4, N) << (2*N); */
+ index = vo_L_add(index, (quant_2p_2N1(pos1, pos4, N) << (N << 1)));
+ }
+ return (index);
+}
+
+
+Word32 quant_4p_4N( /* (o) return 4*N bits */
+ Word16 pos[], /* (i) position of the pulse 1..4 */
+ Word16 N) /* (i) number of bits for position */
+{
+ Word16 nb_pos, mask UNUSED_VAR, n_1, tmp;
+ Word16 posA[4], posB[4];
+ Word32 i, j, k, index;
+
+ n_1 = (Word16) (N - 1);
+ nb_pos = (1 << n_1); /* nb_pos = (1<<n_1); */
+ mask = vo_sub((1 << N), 1); /* mask = ((1<<N)-1); */
+
+ i = 0;
+ j = 0;
+ for (k = 0; k < 4; k++)
+ {
+ if ((pos[k] & nb_pos) == 0)
+ {
+ posA[i++] = pos[k];
+ } else
+ {
+ posB[j++] = pos[k];
+ }
+ }
+
+ switch (i)
+ {
+ case 0:
+ tmp = vo_sub((N << 2), 3); /* index = 1 << ((4*N)-3); */
+ index = (1L << tmp);
+ /* index += quant_4p_4N1(posB[0], posB[1], posB[2], posB[3], n_1); */
+ index = vo_L_add(index, quant_4p_4N1(posB[0], posB[1], posB[2], posB[3], n_1));
+ break;
+ case 1:
+ /* index = quant_1p_N1(posA[0], n_1) << ((3*n_1)+1); */
+ tmp = add1((Word16)((vo_L_mult(3, n_1) >> 1)), 1);
+ index = L_shl(quant_1p_N1(posA[0], n_1), tmp);
+ /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1); */
+ index = vo_L_add(index, quant_3p_3N1(posB[0], posB[1], posB[2], n_1));
+ break;
+ case 2:
+ tmp = ((n_1 << 1) + 1); /* index = quant_2p_2N1(posA[0], posA[1], n_1) << ((2*n_1)+1); */
+ index = L_shl(quant_2p_2N1(posA[0], posA[1], n_1), tmp);
+ /* index += quant_2p_2N1(posB[0], posB[1], n_1); */
+ index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], n_1));
+ break;
+ case 3:
+ /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << N; */
+ index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), N);
+ index = vo_L_add(index, quant_1p_N1(posB[0], n_1)); /* index += quant_1p_N1(posB[0], n_1); */
+ break;
+ case 4:
+ index = quant_4p_4N1(posA[0], posA[1], posA[2], posA[3], n_1);
+ break;
+ default:
+ index = 0;
+ fprintf(stderr, "Error in function quant_4p_4N\n");
+ }
+ tmp = ((N << 2) - 2); /* index += (i & 3) << ((4*N)-2); */
+ index = vo_L_add(index, L_shl((L_deposit_l(i) & (3L)), tmp));
+
+ return (index);
+}
+
+
+
+Word32 quant_5p_5N( /* (o) return 5*N bits */
+ Word16 pos[], /* (i) position of the pulse 1..5 */
+ Word16 N) /* (i) number of bits for position */
+{
+ Word16 nb_pos, n_1, tmp;
+ Word16 posA[5], posB[5];
+ Word32 i, j, k, index, tmp2;
+
+ n_1 = (Word16) (N - 1);
+ nb_pos = (1 << n_1); /* nb_pos = (1<<n_1); */
+
+ i = 0;
+ j = 0;
+ for (k = 0; k < 5; k++)
+ {
+ if ((pos[k] & nb_pos) == 0)
+ {
+ posA[i++] = pos[k];
+ } else
+ {
+ posB[j++] = pos[k];
+ }
+ }
+
+ switch (i)
+ {
+ case 0:
+ tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1); /* ((5*N)-1)) */
+ index = L_shl(1L, tmp); /* index = 1 << ((5*N)-1); */
+ tmp = add1((N << 1), 1); /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) << ((2*N)+1);*/
+ tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
+ index = vo_L_add(index, tmp2);
+ index = vo_L_add(index, quant_2p_2N1(posB[3], posB[4], N)); /* index += quant_2p_2N1(posB[3], posB[4], N); */
+ break;
+ case 1:
+ tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1); /* index = 1 << ((5*N)-1); */
+ index = L_shl(1L, tmp);
+ tmp = add1((N << 1), 1); /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) <<((2*N)+1); */
+ tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
+ index = vo_L_add(index, tmp2);
+ index = vo_L_add(index, quant_2p_2N1(posB[3], posA[0], N)); /* index += quant_2p_2N1(posB[3], posA[0], N); */
+ break;
+ case 2:
+ tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1); /* ((5*N)-1)) */
+ index = L_shl(1L, tmp); /* index = 1 << ((5*N)-1); */
+ tmp = add1((N << 1), 1); /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) << ((2*N)+1); */
+ tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
+ index = vo_L_add(index, tmp2);
+ index = vo_L_add(index, quant_2p_2N1(posA[0], posA[1], N)); /* index += quant_2p_2N1(posA[0], posA[1], N); */
+ break;
+ case 3:
+ tmp = add1((N << 1), 1); /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1); */
+ index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
+ index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], N)); /* index += quant_2p_2N1(posB[0], posB[1], N); */
+ break;
+ case 4:
+ tmp = add1((N << 1), 1); /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1); */
+ index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
+ index = vo_L_add(index, quant_2p_2N1(posA[3], posB[0], N)); /* index += quant_2p_2N1(posA[3], posB[0], N); */
+ break;
+ case 5:
+ tmp = add1((N << 1), 1); /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1); */
+ index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
+ index = vo_L_add(index, quant_2p_2N1(posA[3], posA[4], N)); /* index += quant_2p_2N1(posA[3], posA[4], N); */
+ break;
+ default:
+ index = 0;
+ fprintf(stderr, "Error in function quant_5p_5N\n");
+ }
+
+ return (index);
+}
+
+
+Word32 quant_6p_6N_2( /* (o) return (6*N)-2 bits */
+ Word16 pos[], /* (i) position of the pulse 1..6 */
+ Word16 N) /* (i) number of bits for position */
+{
+ Word16 nb_pos, n_1;
+ Word16 posA[6], posB[6];
+ Word32 i, j, k, index;
+
+ /* !! N and n_1 are constants -> it doesn't need to be operated by Basic Operators */
+ n_1 = (Word16) (N - 1);
+ nb_pos = (1 << n_1); /* nb_pos = (1<<n_1); */
+
+ i = 0;
+ j = 0;
+ for (k = 0; k < 6; k++)
+ {
+ if ((pos[k] & nb_pos) == 0)
+ {
+ posA[i++] = pos[k];
+ } else
+ {
+ posB[j++] = pos[k];
+ }
+ }
+
+ switch (i)
+ {
+ case 0:
+ index = (1 << (Word16) (6 * N - 5)); /* index = 1 << ((6*N)-5); */
+ index = vo_L_add(index, (quant_5p_5N(posB, n_1) << N)); /* index += quant_5p_5N(posB, n_1) << N; */
+ index = vo_L_add(index, quant_1p_N1(posB[5], n_1)); /* index += quant_1p_N1(posB[5], n_1); */
+ break;
+ case 1:
+ index = (1L << (Word16) (6 * N - 5)); /* index = 1 << ((6*N)-5); */
+ index = vo_L_add(index, (quant_5p_5N(posB, n_1) << N)); /* index += quant_5p_5N(posB, n_1) << N; */
+ index = vo_L_add(index, quant_1p_N1(posA[0], n_1)); /* index += quant_1p_N1(posA[0], n_1); */
+ break;
+ case 2:
+ index = (1L << (Word16) (6 * N - 5)); /* index = 1 << ((6*N)-5); */
+ /* index += quant_4p_4N(posB, n_1) << ((2*n_1)+1); */
+ index = vo_L_add(index, (quant_4p_4N(posB, n_1) << (Word16) (2 * n_1 + 1)));
+ index = vo_L_add(index, quant_2p_2N1(posA[0], posA[1], n_1)); /* index += quant_2p_2N1(posA[0], posA[1], n_1); */
+ break;
+ case 3:
+ index = (quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << (Word16) (3 * n_1 + 1));
+ /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((3*n_1)+1); */
+ index =vo_L_add(index, quant_3p_3N1(posB[0], posB[1], posB[2], n_1));
+ /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1); */
+ break;
+ case 4:
+ i = 2;
+ index = (quant_4p_4N(posA, n_1) << (Word16) (2 * n_1 + 1)); /* index = quant_4p_4N(posA, n_1) << ((2*n_1)+1); */
+ index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], n_1)); /* index += quant_2p_2N1(posB[0], posB[1], n_1); */
+ break;
+ case 5:
+ i = 1;
+ index = (quant_5p_5N(posA, n_1) << N); /* index = quant_5p_5N(posA, n_1) << N; */
+ index = vo_L_add(index, quant_1p_N1(posB[0], n_1)); /* index += quant_1p_N1(posB[0], n_1); */
+ break;
+ case 6:
+ i = 0;
+ index = (quant_5p_5N(posA, n_1) << N); /* index = quant_5p_5N(posA, n_1) << N; */
+ index = vo_L_add(index, quant_1p_N1(posA[5], n_1)); /* index += quant_1p_N1(posA[5], n_1); */
+ break;
+ default:
+ index = 0;
+ fprintf(stderr, "Error in function quant_6p_6N_2\n");
+ }
+ index = vo_L_add(index, ((L_deposit_l(i) & 3L) << (Word16) (6 * N - 4))); /* index += (i & 3) << ((6*N)-4); */
+
+ return (index);
+}
+
+
diff --git a/media/libstagefright/codecs/amrwbenc/src/qisf_ns.c b/media/codecs/amrwb/enc/src/qisf_ns.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/qisf_ns.c
rename to media/codecs/amrwb/enc/src/qisf_ns.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/qpisf_2s.c b/media/codecs/amrwb/enc/src/qpisf_2s.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/qpisf_2s.c
rename to media/codecs/amrwb/enc/src/qpisf_2s.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/random.c b/media/codecs/amrwb/enc/src/random.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/random.c
rename to media/codecs/amrwb/enc/src/random.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/residu.c b/media/codecs/amrwb/enc/src/residu.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/residu.c
rename to media/codecs/amrwb/enc/src/residu.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/scale.c b/media/codecs/amrwb/enc/src/scale.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/scale.c
rename to media/codecs/amrwb/enc/src/scale.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/stream.c b/media/codecs/amrwb/enc/src/stream.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/stream.c
rename to media/codecs/amrwb/enc/src/stream.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/syn_filt.c b/media/codecs/amrwb/enc/src/syn_filt.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/syn_filt.c
rename to media/codecs/amrwb/enc/src/syn_filt.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/updt_tar.c b/media/codecs/amrwb/enc/src/updt_tar.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/updt_tar.c
rename to media/codecs/amrwb/enc/src/updt_tar.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/util.c b/media/codecs/amrwb/enc/src/util.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/util.c
rename to media/codecs/amrwb/enc/src/util.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c b/media/codecs/amrwb/enc/src/voAMRWBEnc.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
rename to media/codecs/amrwb/enc/src/voAMRWBEnc.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/voicefac.c b/media/codecs/amrwb/enc/src/voicefac.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/voicefac.c
rename to media/codecs/amrwb/enc/src/voicefac.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/wb_vad.c b/media/codecs/amrwb/enc/src/wb_vad.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/wb_vad.c
rename to media/codecs/amrwb/enc/src/wb_vad.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/weight_a.c b/media/codecs/amrwb/enc/src/weight_a.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/weight_a.c
rename to media/codecs/amrwb/enc/src/weight_a.c
diff --git a/media/libstagefright/codecs/amrwbenc/test/AmrwbEncTestEnvironment.h b/media/codecs/amrwb/enc/test/AmrwbEncTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/test/AmrwbEncTestEnvironment.h
rename to media/codecs/amrwb/enc/test/AmrwbEncTestEnvironment.h
diff --git a/media/libstagefright/codecs/amrwbenc/test/AmrwbEncoderTest.cpp b/media/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/test/AmrwbEncoderTest.cpp
rename to media/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
diff --git a/media/codecs/amrwb/enc/test/Android.bp b/media/codecs/amrwb/enc/test/Android.bp
new file mode 100644
index 0000000..0872570
--- /dev/null
+++ b/media/codecs/amrwb/enc/test/Android.bp
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+ name: "AmrwbEncoderTest",
+ test_suites: ["device-tests"],
+ gtest: true,
+
+ srcs: [
+ "AmrwbEncoderTest.cpp",
+ ],
+
+ static_libs: [
+ "libstagefright_enc_common",
+ "libstagefright_amrwbenc",
+ "libaudioutils",
+ "libsndfile",
+ ],
+
+ shared_libs: [
+ "liblog",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/libstagefright/codecs/amrwbenc/test/AndroidTest.xml b/media/codecs/amrwb/enc/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/test/AndroidTest.xml
rename to media/codecs/amrwb/enc/test/AndroidTest.xml
diff --git a/media/libstagefright/codecs/amrwbenc/test/README.md b/media/codecs/amrwb/enc/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/test/README.md
rename to media/codecs/amrwb/enc/test/README.md
diff --git a/media/codecs/g711/decoder/Android.bp b/media/codecs/g711/decoder/Android.bp
index 377833f..51f4c38 100644
--- a/media/codecs/g711/decoder/Android.bp
+++ b/media/codecs/g711/decoder/Android.bp
@@ -35,7 +35,14 @@
],
cfi: true,
},
- apex_available: ["com.android.media.swcodec"],
+
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media.swcodec",
+ "test_com.android.media.swcodec",
+ ],
+
+ min_sdk_version: "29",
target: {
darwin: {
diff --git a/media/codecs/g711/fuzzer/Android.bp b/media/codecs/g711/fuzzer/Android.bp
index 1aee7f5..ff5efa9 100644
--- a/media/codecs/g711/fuzzer/Android.bp
+++ b/media/codecs/g711/fuzzer/Android.bp
@@ -30,6 +30,12 @@
cflags: [
"-DALAW",
],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
}
cc_fuzz {
@@ -41,4 +47,10 @@
static_libs: [
"codecs_g711dec",
],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
}
diff --git a/media/codecs/m4v_h263/TEST_MAPPING b/media/codecs/m4v_h263/TEST_MAPPING
new file mode 100644
index 0000000..ba3ff1c
--- /dev/null
+++ b/media/codecs/m4v_h263/TEST_MAPPING
@@ -0,0 +1,18 @@
+// mappings for frameworks/av/media/libstagefright/codecs/m4v_h263
+{
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
+
+ // the decoder reports something bad about an unexpected newline in the *config file
+ // and the config file looks like the AndroidTest.xml file that we put in there.
+ // I don't get this from the Encoder -- and I don't see any substantive difference
+ // between decode and encode AndroidTest.xml files -- except that encode does NOT
+ // finish with a newline.
+ // strange.
+ { "name": "Mpeg4H263DecoderTest"},
+ { "name": "Mpeg4H263EncoderTest"}
+
+ ]
+}
diff --git a/media/codecs/m4v_h263/dec/Android.bp b/media/codecs/m4v_h263/dec/Android.bp
new file mode 100644
index 0000000..b40745a
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/Android.bp
@@ -0,0 +1,58 @@
+cc_library_static {
+ name: "libstagefright_m4vh263dec",
+ vendor_available: true,
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media.swcodec",
+ ],
+ min_sdk_version: "29",
+ host_supported: true,
+ shared_libs: ["liblog"],
+
+ srcs: [
+ "src/bitstream.cpp",
+ "src/block_idct.cpp",
+ "src/cal_dc_scaler.cpp",
+ "src/combined_decode.cpp",
+ "src/conceal.cpp",
+ "src/datapart_decode.cpp",
+ "src/dcac_prediction.cpp",
+ "src/dec_pred_intra_dc.cpp",
+ "src/get_pred_adv_b_add.cpp",
+ "src/get_pred_outside.cpp",
+ "src/idct.cpp",
+ "src/idct_vca.cpp",
+ "src/mb_motion_comp.cpp",
+ "src/mb_utils.cpp",
+ "src/packet_util.cpp",
+ "src/post_filter.cpp",
+ "src/pvdec_api.cpp",
+ "src/scaling_tab.cpp",
+ "src/vlc_decode.cpp",
+ "src/vlc_dequant.cpp",
+ "src/vlc_tab.cpp",
+ "src/vop.cpp",
+ "src/zigzag_tab.cpp",
+ ],
+
+ local_include_dirs: ["src"],
+ export_include_dirs: ["include"],
+
+ cflags: [
+ "-Werror",
+ ],
+
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2 b/media/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2
rename to media/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/m4v_h263/dec/NOTICE b/media/codecs/m4v_h263/dec/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/NOTICE
rename to media/codecs/m4v_h263/dec/NOTICE
diff --git a/media/libstagefright/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h b/media/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h
rename to media/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/include/mp4dec_api.h b/media/codecs/m4v_h263/dec/include/mp4dec_api.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/include/mp4dec_api.h
rename to media/codecs/m4v_h263/dec/include/mp4dec_api.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/include/visual_header.h b/media/codecs/m4v_h263/dec/include/visual_header.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/include/visual_header.h
rename to media/codecs/m4v_h263/dec/include/visual_header.h
diff --git a/media/codecs/m4v_h263/dec/src/bitstream.cpp b/media/codecs/m4v_h263/dec/src/bitstream.cpp
new file mode 100644
index 0000000..5b19db4
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/bitstream.cpp
@@ -0,0 +1,1014 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+#include "bitstream.h"
+#include "mp4dec_lib.h"
+
+
+#define OSCL_DISABLE_WARNING_CONDITIONAL_IS_CONSTANT
+/* to mask the n least significant bits of an integer */
+static const uint32 msk[33] =
+{
+ 0x00000000, 0x00000001, 0x00000003, 0x00000007,
+ 0x0000000f, 0x0000001f, 0x0000003f, 0x0000007f,
+ 0x000000ff, 0x000001ff, 0x000003ff, 0x000007ff,
+ 0x00000fff, 0x00001fff, 0x00003fff, 0x00007fff,
+ 0x0000ffff, 0x0001ffff, 0x0003ffff, 0x0007ffff,
+ 0x000fffff, 0x001fffff, 0x003fffff, 0x007fffff,
+ 0x00ffffff, 0x01ffffff, 0x03ffffff, 0x07ffffff,
+ 0x0fffffff, 0x1fffffff, 0x3fffffff, 0x7fffffff,
+ 0xffffffff
+};
+
+
+/* ======================================================================== */
+/* Function : BitstreamFillCache() */
+/* Date : 08/29/2000 */
+/* Purpose : Read more bitstream data into buffer & the 24-byte cache. */
+/* This function is different from BitstreamFillBuffer in */
+/* that the buffer is the frame-based buffer provided by */
+/* the application. */
+/* In/out : */
+/* Return : PV_SUCCESS if successed, PV_FAIL if failed. */
+/* Modified : 4/16/01 : removed return of PV_END_OF_BUFFER */
+/* ======================================================================== */
+PV_STATUS BitstreamFillCache(BitstreamDecVideo *stream)
+{
+ uint8 *bitstreamBuffer = stream->bitstreamBuffer;
+ uint8 *v;
+ int num_bits, i;
+
+ stream->curr_word |= (stream->next_word >> stream->incnt); // stream->incnt cannot be 32
+ stream->next_word <<= (31 - stream->incnt);
+ stream->next_word <<= 1;
+ num_bits = stream->incnt_next + stream->incnt;
+ if (num_bits >= 32)
+ {
+ stream->incnt_next -= (32 - stream->incnt);
+ stream->incnt = 32;
+ return PV_SUCCESS;
+ }
+ /* this check can be removed if there is additional extra 4 bytes at the end of the bitstream */
+ v = bitstreamBuffer + stream->read_point;
+
+ if (stream->read_point > stream->data_end_pos - 4)
+ {
+ if (stream->data_end_pos <= stream->read_point)
+ {
+ stream->incnt = num_bits;
+ stream->incnt_next = 0;
+ return PV_SUCCESS;
+ }
+
+ stream->next_word = 0;
+
+ for (i = 0; i < stream->data_end_pos - stream->read_point; i++)
+ {
+ stream->next_word |= (v[i] << ((3 - i) << 3));
+ }
+
+ stream->read_point = stream->data_end_pos;
+ stream->curr_word |= (stream->next_word >> num_bits); // this is safe
+
+ stream->next_word <<= (31 - num_bits);
+ stream->next_word <<= 1;
+ num_bits = i << 3;
+ stream->incnt += stream->incnt_next;
+ stream->incnt_next = num_bits - (32 - stream->incnt);
+ if (stream->incnt_next < 0)
+ {
+ stream->incnt += num_bits;
+ stream->incnt_next = 0;
+ }
+ else
+ {
+ stream->incnt = 32;
+ }
+ return PV_SUCCESS;
+ }
+
+ stream->next_word = ((uint32)v[0] << 24) | (v[1] << 16) | (v[2] << 8) | v[3];
+ stream->read_point += 4;
+
+ stream->curr_word |= (stream->next_word >> num_bits); // this is safe
+ stream->next_word <<= (31 - num_bits);
+ stream->next_word <<= 1;
+ stream->incnt_next += stream->incnt;
+ stream->incnt = 32;
+ return PV_SUCCESS;
+}
+
+
+/* ======================================================================== */
+/* Function : BitstreamReset() */
+/* Date : 08/29/2000 */
+/* Purpose : Initialize the bitstream buffer for frame-based decoding. */
+/* In/out : */
+/* Return : */
+/* Modified : */
+/* ======================================================================== */
+void BitstreamReset(BitstreamDecVideo *stream, uint8 *buffer, int32 buffer_size)
+{
+ /* set up frame-based bitstream buffer */
+ oscl_memset(stream, 0, sizeof(BitstreamDecVideo));
+ stream->data_end_pos = buffer_size;
+ stream->bitstreamBuffer = buffer;
+}
+
+
+/* ======================================================================== */
+/* Function : BitstreamOpen() */
+/* Purpose : Initialize the bitstream data structure. */
+/* In/out : */
+/* Return : */
+/* Modified : */
+/* ======================================================================== */
+int BitstreamOpen(BitstreamDecVideo *stream, int)
+{
+ int buffer_size = 0;
+ /* set up linear bitstream buffer */
+// stream->currentBytePos = 0;
+ stream->data_end_pos = 0;
+
+ stream->incnt = 0;
+ stream->incnt_next = 0;
+ stream->bitcnt = 0;
+ stream->curr_word = stream->next_word = 0;
+ stream->read_point = stream->data_end_pos;
+ return buffer_size;
+}
+
+
+/* ======================================================================== */
+/* Function : BitstreamClose() */
+/* Purpose : Cleanup the bitstream data structure. */
+/* In/out : */
+/* Return : */
+/* Modified : */
+/* ======================================================================== */
+void BitstreamClose(BitstreamDecVideo *)
+{
+ return;
+}
+
+
+/***********************************************************CommentBegin******
+*
+* -- BitstreamShowBits32HC
+* Shows 32 bits
+***********************************************************CommentEnd********/
+
+PV_STATUS BitstreamShowBits32HC(BitstreamDecVideo *stream, uint32 *code)
+{
+ PV_STATUS status = PV_SUCCESS;
+
+ if (stream->incnt < 32)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+ *code = stream->curr_word;
+ return status;
+}
+
+/***********************************************************CommentBegin******
+*
+* -- BitstreamShowBits32
+* Shows upto and including 31 bits
+***********************************************************CommentEnd********/
+PV_STATUS BitstreamShowBits32(BitstreamDecVideo *stream, int nbits, uint32 *code)
+{
+ PV_STATUS status = PV_SUCCESS;
+
+ if (stream->incnt < nbits)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+ *code = stream->curr_word >> (32 - nbits);
+ return status;
+}
+
+
+#ifndef PV_BS_INLINE
+/*========================================================================= */
+/* Function: BitstreamShowBits16() */
+/* Date: 12/18/2000 */
+/* Purpose: To see the next "nbits"(nbits<=16) bitstream bits */
+/* without advancing the read pointer */
+/* */
+/* =========================================================================*/
+PV_STATUS BitstreamShowBits16(BitstreamDecVideo *stream, int nbits, uint *code)
+{
+ PV_STATUS status = PV_SUCCESS;
+
+
+ if (stream->incnt < nbits)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+
+ *code = stream->curr_word >> (32 - nbits);
+ return status;
+}
+
+
+/*========================================================================= */
+/* Function: BitstreamShow15Bits() */
+/* Date: 01/23/2001 */
+/* Purpose: To see the next 15 bitstream bits */
+/* without advancing the read pointer */
+/* */
+/* =========================================================================*/
+PV_STATUS BitstreamShow15Bits(BitstreamDecVideo *stream, uint *code)
+{
+ PV_STATUS status = PV_SUCCESS;
+
+ if (stream->incnt < 15)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+ *code = stream->curr_word >> 17;
+ return status;
+}
+/*========================================================================= */
+/* Function: BitstreamShow13Bits */
+/* Date: 050923 */
+/* Purpose: Faciliate and speed up showing 13 bit from bitstream */
+/* used in VlcTCOEFF decoding */
+/* Modified: */
+/* =========================================================================*/
+PV_STATUS BitstreamShow13Bits(BitstreamDecVideo *stream, uint *code)
+{
+ PV_STATUS status = PV_SUCCESS;
+
+ if (stream->incnt < 13)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+ *code = stream->curr_word >> 19;
+ return status;
+}
+
+uint BitstreamReadBits16_INLINE(BitstreamDecVideo *stream, int nbits)
+{
+ uint code;
+ PV_STATUS status;
+
+ if (stream->incnt < nbits)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+ code = stream->curr_word >> (32 - nbits);
+ PV_BitstreamFlushBits(stream, nbits);
+ return code;
+}
+
+
+uint BitstreamRead1Bits_INLINE(BitstreamDecVideo *stream)
+{
+ PV_STATUS status = PV_SUCCESS;
+ uint code;
+
+
+ if (stream->incnt < 1)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+ code = stream->curr_word >> 31;
+ PV_BitstreamFlushBits(stream, 1);
+
+ return code;
+}
+
+#endif
+
+/* ======================================================================== */
+/* Function : BitstreamReadBits16() */
+/* Purpose : Read bits (nbits <=16) from bitstream buffer. */
+/* In/out : */
+/* Return : */
+/* ======================================================================== */
+uint BitstreamReadBits16(BitstreamDecVideo *stream, int nbits)
+{
+ uint code;
+
+ if (stream->incnt < nbits)
+ {
+ /* frame-based decoding */
+ BitstreamFillCache(stream);
+ }
+ code = stream->curr_word >> (32 - nbits);
+ PV_BitstreamFlushBits(stream, nbits);
+ return code;
+}
+
+/* ======================================================================== */
+/* Function : BitstreamRead1Bits() */
+/* Date : 10/23/2000 */
+/* Purpose : Faciliate and speed up reading 1 bit from bitstream. */
+/* In/out : */
+/* Return : */
+/* ======================================================================== */
+
+uint BitstreamRead1Bits(BitstreamDecVideo *stream)
+{
+ uint code;
+
+ if (stream->incnt < 1)
+ {
+ /* frame-based decoding */
+ BitstreamFillCache(stream);
+ }
+ code = stream->curr_word >> 31;
+ PV_BitstreamFlushBits(stream, 1);
+
+ return code;
+}
+
+/* ======================================================================== */
+/* Function : PV_BitstreamFlushBitsCheck() */
+/* Purpose : Flush nbits bits from bitstream buffer. Check for cache */
+/* In/out : */
+/* Return : */
+/* Modified : */
+/* ======================================================================== */
+PV_STATUS PV_BitstreamFlushBitsCheck(BitstreamDecVideo *stream, int nbits)
+{
+ PV_STATUS status = PV_SUCCESS;
+
+ stream->bitcnt += nbits;
+ stream->incnt -= nbits;
+ if (stream->incnt < 0)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+
+ if (stream->incnt < 0)
+ {
+ stream->bitcnt += stream->incnt;
+ stream->incnt = 0;
+ }
+ }
+ stream->curr_word <<= nbits;
+ return status;
+}
+
+/* ======================================================================== */
+/* Function : BitstreamReadBits32() */
+/* Purpose : Read bits from bitstream buffer. */
+/* In/out : */
+/* Return : */
+/* ======================================================================== */
+uint32 BitstreamReadBits32(BitstreamDecVideo *stream, int nbits)
+{
+ uint32 code;
+
+ if (stream->incnt < nbits)
+ {
+ /* frame-based decoding */
+ BitstreamFillCache(stream);
+ }
+ code = stream->curr_word >> (32 - nbits);
+ PV_BitstreamFlushBits(stream, nbits);
+ return code;
+}
+
+uint32 BitstreamReadBits32HC(BitstreamDecVideo *stream)
+{
+ uint32 code;
+
+ BitstreamShowBits32HC(stream, &code);
+ stream->bitcnt += 32;
+ stream->incnt = 0;
+ stream->curr_word = 0;
+ return code;
+}
+
+/* ======================================================================== */
+/* Function : BitstreamCheckEndBuffer() */
+/* Date : 03/30/2001 */
+/* Purpose : Check to see if we are at the end of buffer */
+/* In/out : */
+/* Return : */
+/* Modified : */
+/* ======================================================================== */
+PV_STATUS BitstreamCheckEndBuffer(BitstreamDecVideo *stream)
+{
+ if (stream->read_point >= stream->data_end_pos && stream->incnt <= 0) return PV_END_OF_VOP;
+ return PV_SUCCESS;
+}
+
+
+PV_STATUS PV_BitstreamShowBitsByteAlign(BitstreamDecVideo *stream, int nbits, uint32 *code)
+{
+ PV_STATUS status = PV_SUCCESS;
+
+ int n_stuffed;
+
+ n_stuffed = 8 - (stream->bitcnt & 0x7); /* 07/05/01 */
+
+ if (stream->incnt < (nbits + n_stuffed))
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+
+ *code = (stream->curr_word << n_stuffed) >> (32 - nbits);
+ return status;
+}
+
+#ifdef PV_ANNEX_IJKT_SUPPORT
+PV_STATUS PV_BitstreamShowBitsByteAlignNoForceStuffing(BitstreamDecVideo *stream, int nbits, uint32 *code)
+{
+ PV_STATUS status = PV_SUCCESS;
+
+ int n_stuffed;
+
+ n_stuffed = (8 - (stream->bitcnt & 0x7)) & 7;
+
+ if (stream->incnt < (nbits + n_stuffed))
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+
+ *code = (stream->curr_word << n_stuffed) >> (32 - nbits);
+ return status;
+}
+#endif
+
+PV_STATUS PV_BitstreamByteAlign(BitstreamDecVideo *stream)
+{
+ PV_STATUS status = PV_SUCCESS;
+ int n_stuffed;
+
+ n_stuffed = 8 - (stream->bitcnt & 0x7); /* 07/05/01 */
+
+ /* We have to make sure we have enough bits in the cache. 08/15/2000 */
+ if (stream->incnt < n_stuffed)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+
+
+ stream->bitcnt += n_stuffed;
+ stream->incnt -= n_stuffed;
+ stream->curr_word <<= n_stuffed;
+ if (stream->incnt < 0)
+ {
+ stream->bitcnt += stream->incnt;
+ stream->incnt = 0;
+ }
+ return status;
+}
+
+
+PV_STATUS BitstreamByteAlignNoForceStuffing(BitstreamDecVideo *stream)
+{
+ uint n_stuffed;
+
+ n_stuffed = (8 - (stream->bitcnt & 0x7)) & 0x7; /* 07/05/01 */
+
+ stream->bitcnt += n_stuffed;
+ stream->incnt -= n_stuffed;
+
+ if (stream->incnt < 0)
+ {
+ stream->bitcnt += stream->incnt;
+ stream->incnt = 0;
+ }
+ stream->curr_word <<= n_stuffed;
+ return PV_SUCCESS;
+}
+
+
+/* ==================================================================== */
+/* Function : getPointer() */
+/* Date : 10/98 */
+/* Purpose : get current position of file pointer */
+/* In/out : */
+/* Return : */
+/* ==================================================================== */
+int32 getPointer(BitstreamDecVideo *stream)
+{
+ return stream->bitcnt;
+}
+
+
+
+
+/* ====================================================================== /
+Function : movePointerTo()
+Date : 05/14/2004
+Purpose : move bitstream pointer to a desired position
+In/out :
+Return :
+Modified :
+/ ====================================================================== */
+PV_STATUS movePointerTo(BitstreamDecVideo *stream, int32 pos)
+{
+ int32 byte_pos;
+ if (pos < 0)
+ {
+ pos = 0;
+ }
+
+ byte_pos = pos >> 3;
+
+ if (byte_pos > stream->data_end_pos)
+ {
+ byte_pos = stream->data_end_pos;
+ }
+
+ stream->read_point = byte_pos & -4;
+ stream->bitcnt = stream->read_point << 3;;
+ stream->curr_word = 0;
+ stream->next_word = 0;
+ stream->incnt = 0;
+ stream->incnt_next = 0;
+ BitstreamFillCache(stream);
+ PV_BitstreamFlushBits(stream, ((pos & 0x7) + ((byte_pos & 0x3) << 3)));
+ return PV_SUCCESS;
+}
+
+
+/* ======================================================================== */
+/* Function : validStuffing() */
+/* Date : 04/11/2000 */
+/* Purpose : Check whether we have valid stuffing at current position. */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : 12/18/2000 : changed the pattern type to uint */
+/* 04/01/2001 : removed PV_END_OF_BUFFER */
+/* ======================================================================== */
+Bool validStuffing(BitstreamDecVideo *stream)
+{
+ uint n_stuffed;
+ uint pattern;
+
+
+ n_stuffed = 8 - (stream->bitcnt & 0x7);
+ BitstreamShowBits16(stream, n_stuffed, &pattern);
+ if (pattern == msk[n_stuffed-1]) return PV_TRUE;
+ return PV_FALSE;
+}
+#ifdef PV_ANNEX_IJKT_SUPPORT
+Bool validStuffing_h263(BitstreamDecVideo *stream)
+{
+ uint n_stuffed;
+ uint pattern;
+
+
+ n_stuffed = (8 - (stream->bitcnt & 0x7)) & 7; // stream->incnt % 8
+ if (n_stuffed == 0)
+ {
+ return PV_TRUE;
+ }
+ BitstreamShowBits16(stream, n_stuffed, &pattern);
+ if (pattern == 0) return PV_TRUE;
+ return PV_FALSE;
+}
+#endif
+
+
+/* ======================================================================== */
+/* Function : PVSearchNextH263Frame() */
+/* Date : 04/08/2005 */
+/* Purpose : search for 0x00 0x00 0x80 */
+/* In/out : */
+/* Return : PV_SUCCESS if succeeded or PV_END_OF_VOP if failed */
+/* Modified : */
+/* ======================================================================== */
+PV_STATUS PVSearchNextH263Frame(BitstreamDecVideo *stream)
+{
+ PV_STATUS status = PV_SUCCESS;
+ uint8 *ptr;
+ int32 i;
+ int32 initial_byte_aligned_position = (stream->bitcnt + 7) >> 3;
+
+ ptr = stream->bitstreamBuffer + initial_byte_aligned_position;
+
+ i = PVLocateH263FrameHeader(ptr, stream->data_end_pos - initial_byte_aligned_position);
+ if (stream->data_end_pos <= initial_byte_aligned_position + i)
+ {
+ status = PV_END_OF_VOP;
+ }
+ (void)movePointerTo(stream, ((i + initial_byte_aligned_position) << 3)); /* ptr + i */
+ return status;
+}
+
+
+/* ======================================================================== */
+/* Function : PVSearchNextM4VFrame() */
+/* Date : 04/08/2005 */
+/* Purpose : search for 0x00 0x00 0x01 and move the pointer to the */
+/* beginning of the start code */
+/* In/out : */
+/* Return : PV_SUCCESS if succeeded or PV_END_OF_VOP if failed */
+/* Modified : */
+/* ======================================================================== */
+
+PV_STATUS PVSearchNextM4VFrame(BitstreamDecVideo *stream)
+{
+ PV_STATUS status = PV_SUCCESS;
+ uint8 *ptr;
+ int32 i;
+ int32 initial_byte_aligned_position = (stream->bitcnt + 7) >> 3;
+
+ ptr = stream->bitstreamBuffer + initial_byte_aligned_position;
+
+ i = PVLocateFrameHeader(ptr, stream->data_end_pos - initial_byte_aligned_position);
+ if (stream->data_end_pos <= initial_byte_aligned_position + i)
+ {
+ status = PV_END_OF_VOP;
+ }
+ (void)movePointerTo(stream, ((i + initial_byte_aligned_position) << 3)); /* ptr + i */
+ return status;
+}
+
+
+
+PV_STATUS PVLocateM4VFrameBoundary(BitstreamDecVideo *stream)
+{
+ PV_STATUS status = BitstreamCheckEndBuffer(stream);
+ if (status == PV_END_OF_VOP) return status;
+
+ uint8 *ptr;
+ int32 byte_pos = (stream->bitcnt >> 3);
+
+ stream->searched_frame_boundary = 1;
+ ptr = stream->bitstreamBuffer + byte_pos;
+
+ stream->data_end_pos = PVLocateFrameHeader(ptr, (int32)stream->data_end_pos - byte_pos) + byte_pos;
+ return PV_SUCCESS;
+}
+
+PV_STATUS PVLocateH263FrameBoundary(BitstreamDecVideo *stream)
+{
+ PV_STATUS status = BitstreamCheckEndBuffer(stream);
+ if (status == PV_END_OF_VOP) return status;
+
+ uint8 *ptr;
+ int32 byte_pos = (stream->bitcnt >> 3);
+
+ stream->searched_frame_boundary = 1;
+ ptr = stream->bitstreamBuffer + byte_pos;
+
+ stream->data_end_pos = PVLocateH263FrameHeader(ptr, (int32)stream->data_end_pos - byte_pos) + byte_pos;
+ return PV_SUCCESS;
+}
+
+/* ======================================================================== */
+/* Function : quickSearchVideoPacketHeader() */
+/* Date : 05/08/2000 */
+/* Purpose : Quick search for the next video packet header */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* ======================================================================== */
+PV_STATUS quickSearchVideoPacketHeader(BitstreamDecVideo *stream, int marker_length)
+{
+ PV_STATUS status = PV_SUCCESS;
+ uint32 tmpvar;
+
+
+ if (stream->searched_frame_boundary == 0)
+ {
+ status = PVLocateM4VFrameBoundary(stream);
+ if (status != PV_SUCCESS) return status;
+ }
+
+ do
+ {
+ status = BitstreamCheckEndBuffer(stream);
+ if (status == PV_END_OF_VOP) break;
+ PV_BitstreamShowBitsByteAlign(stream, marker_length, &tmpvar);
+ if (tmpvar == RESYNC_MARKER) break;
+ PV_BitstreamFlushBits(stream, 8);
+ }
+ while (status == PV_SUCCESS);
+
+ return status;
+}
+#ifdef PV_ANNEX_IJKT_SUPPORT
+PV_STATUS quickSearchH263SliceHeader(BitstreamDecVideo *stream)
+{
+ PV_STATUS status = PV_SUCCESS;
+ uint32 tmpvar;
+
+
+ if (stream->searched_frame_boundary == 0)
+ {
+ status = PVLocateH263FrameBoundary(stream);
+ if (status != PV_SUCCESS) return status;
+ }
+
+ do
+ {
+ status = BitstreamCheckEndBuffer(stream);
+ if (status == PV_END_OF_VOP) break;
+ PV_BitstreamShowBitsByteAlignNoForceStuffing(stream, 17, &tmpvar);
+ if (tmpvar == RESYNC_MARKER) break;
+ PV_BitstreamFlushBits(stream, 8);
+ }
+ while (status == PV_SUCCESS);
+
+ return status;
+}
+#endif
+/* ======================================================================== */
+/* The following functions are for Error Concealment. */
+/* ======================================================================== */
+
+/****************************************************/
+// 01/22/99 Quick search of Resync Marker
+// (actually the first part of it, i.e. 16 0's and a 1.
+
+/* We are not using the fastest algorithm possible. What this function does is
+to locate 11 consecutive 0's and then check if the 5 bits before them and
+the 1 bit after them are all 1's.
+*/
+
+// Table used for quick search of markers. Gives the last `1' in
+// 4 bits. The MSB is bit #1, the LSB is bit #4.
+const int lastOne[] =
+{
+ 0, 4, 3, 4, 2, 4, 3, 4,
+ 1, 4, 3, 4, 2, 4, 3, 4
+};
+
+// Table used for quick search of markers. Gives the last `0' in
+// 4 bits. The MSB is bit #1, the LSB is bit #4.
+/*const int lastZero[]=
+{
+ 4, 3, 4, 2, 4, 3, 4, 1,
+ 4, 3, 4, 2, 4, 3, 4, 0
+};
+*/
+// Table used for quick search of markers. Gives the first `0' in
+// 4 bits. The MSB is bit #1, the LSB is bit #4.
+const int firstZero[] =
+{
+ 1, 1, 1, 1, 1, 1, 1, 1,
+ 2, 2, 2, 2, 3, 3, 4, 0
+};
+
+// Table used for quick search of markers. Gives the first `1' in
+// 4 bits. The MSB is bit #1, the LSB is bit #4.
+const int firstOne[] =
+{
+ 0, 4, 3, 3, 2, 2, 2, 2,
+ 1, 1, 1, 1, 1, 1, 1, 1
+};
+
+
+/* ======================================================================== */
+/* Function : quickSearchMarkers() */
+/* Date : 01/25/99 */
+/* Purpose : Quick search for Motion marker */
+/* In/out : */
+/* Return : Boolean true of false */
+/* Modified : 12/18/2000 : 32-bit version */
+/* ======================================================================== */
+PV_STATUS quickSearchMotionMarker(BitstreamDecVideo *stream)
+// MM: (11111000000000001)
+{
+ PV_STATUS status;
+ uint32 tmpvar, tmpvar2;
+
+ if (stream->searched_frame_boundary == 0)
+ {
+ status = PVLocateM4VFrameBoundary(stream);
+ if (status != PV_SUCCESS) return status;
+ }
+
+ while (TRUE)
+ {
+ status = BitstreamCheckEndBuffer(stream);
+ if (status == PV_END_OF_VOP) return PV_END_OF_VOP;
+
+ BitstreamShowBits32(stream, 17, &tmpvar);
+ if (!tmpvar) return PV_FAIL;
+
+ if (tmpvar & 1) // Check if the 17th bit from the curr bit pos is a '1'
+ {
+ if (tmpvar == MOTION_MARKER_COMB)
+ {
+ return PV_SUCCESS; // Found
+ }
+ else
+ {
+ tmpvar >>= 1;
+ tmpvar &= 0xF;
+ PV_BitstreamFlushBits(stream, (int)(12 + firstZero[tmpvar]));
+ }
+ }
+ else
+ {
+ // 01/25/99 Get the first 16 bits
+ tmpvar >>= 1;
+ tmpvar2 = tmpvar & 0xF;
+
+ // 01/26/99 Check bits #13 ~ #16
+ if (tmpvar2)
+ {
+ PV_BitstreamFlushBits(stream, (int)(7 + lastOne[tmpvar2]));
+ }
+ else
+ {
+ tmpvar >>= 4;
+ tmpvar2 = tmpvar & 0xF;
+
+ // 01/26/99 Check bits #9 ~ #12
+ if (tmpvar2)
+ {
+ PV_BitstreamFlushBits(stream, (int)(3 + lastOne[tmpvar2]));
+ }
+ else
+ {
+ tmpvar >>= 4;
+ tmpvar2 = tmpvar & 0xF;
+
+ // 01/26/99 Check bits #5 ~ #8
+ // We don't need to check further
+ // for the first 5 bits should be all 1's
+ if (lastOne[tmpvar2] < 2)
+ {
+ /* we already have too many consecutive 0's. */
+ /* Go directly pass the last of the 17 bits. */
+ PV_BitstreamFlushBits(stream, 17);
+ }
+ else
+ {
+ PV_BitstreamFlushBits(stream, (int)(lastOne[tmpvar2] - 1));
+ }
+ }
+ }
+ }
+
+ }
+}
+
+/* ======================================================================== */
+/* Function : quickSearchDCM() */
+/* Date : 01/22/99 */
+/* Purpose : Quick search for DC Marker */
+/* We are not using the fastest algorithm possible. What this */
+/* function does is to locate 11 consecutive 0's and then */
+/* check if the 7 bits before them and the 1 bit after them */
+/* are correct. (actually the first part of it, i.e. 16 0's */
+/* and a 1. */
+/* In/out : */
+/* Return : Boolean true of false */
+/* Modified : 12/18/2000 : 32-bit version */
+/* ======================================================================== */
+PV_STATUS quickSearchDCM(BitstreamDecVideo *stream)
+// DCM: (110 1011 0000 0000 0001)
+{
+ PV_STATUS status;
+ uint32 tmpvar, tmpvar2;
+
+ if (stream->searched_frame_boundary == 0)
+ {
+ status = PVLocateM4VFrameBoundary(stream);
+ if (status != PV_SUCCESS) return status;
+ }
+
+ while (TRUE)
+ {
+ status = BitstreamCheckEndBuffer(stream);
+ if (status == PV_END_OF_VOP) return PV_END_OF_VOP;
+ BitstreamShowBits32(stream, 19, &tmpvar);
+
+ if (tmpvar & 1) // Check if the 17th bit from the curr bit pos is a '1'
+ {
+ if (tmpvar == DC_MARKER)
+ {
+ return PV_SUCCESS; // Found
+ }
+ else
+ {
+ // 01/25/99 We treat the last of the 19 bits as its 7th bit (which is
+ // also a `1'
+ PV_BitstreamFlushBits(stream, 12);
+ }
+ }
+ else
+ {
+ tmpvar >>= 1;
+ tmpvar2 = tmpvar & 0xF;
+
+ if (tmpvar2)
+ {
+ PV_BitstreamFlushBits(stream, (int)(7 + lastOne[tmpvar2]));
+ }
+ else
+ {
+ tmpvar >>= 4;
+ tmpvar2 = tmpvar & 0xF;
+ if (tmpvar2)
+ {
+ PV_BitstreamFlushBits(stream, (int)(3 + lastOne[tmpvar2]));
+ }
+ else
+ {
+ tmpvar >>= 4;
+ tmpvar2 = tmpvar & 0xF;
+ if (lastOne[tmpvar2] < 2)
+ {
+ /* we already have too many consecutive 0's. */
+ /* Go directly pass the last of the 17 bits. */
+ PV_BitstreamFlushBits(stream, 19);
+ }
+ else
+ {
+ PV_BitstreamFlushBits(stream, (int)(lastOne[tmpvar2] - 1));
+ }
+ }
+ }
+ }
+ }
+}
+
+/* ======================================================================== */
+/* Function : quickSearchGOBHeader() 0000 0000 0000 0000 1 */
+/* Date : 07/06/01 */
+/* Purpose : Quick search of GOBHeader (not byte aligned) */
+/* In/out : */
+/* Return : Integer value indicates type of marker found */
+/* Modified : */
+/* ======================================================================== */
+PV_STATUS quickSearchGOBHeader(BitstreamDecVideo *stream)
+{
+ PV_STATUS status;
+ int byte0, byte1, byte2, shift, tmpvar;
+
+ BitstreamByteAlignNoForceStuffing(stream);
+
+ if (stream->searched_frame_boundary == 0)
+ {
+ status = PVLocateH263FrameBoundary(stream);
+ if (status != PV_SUCCESS) return status;
+ }
+
+ while (TRUE)
+ {
+ status = BitstreamCheckEndBuffer(stream);
+ if (status == PV_END_OF_VOP) return PV_END_OF_VOP;
+
+ if (stream->incnt < 24)
+ {
+ status = BitstreamFillCache(stream);
+ }
+
+
+ byte1 = (stream->curr_word << 8) >> 24;
+ if (byte1 == 0)
+ {
+ byte2 = (stream->curr_word << 16) >> 24;
+ if (byte2)
+ {
+ tmpvar = byte2 >> 4;
+
+ if (tmpvar)
+ {
+ shift = 9 - firstOne[tmpvar];
+ }
+ else
+ {
+ shift = 5 - firstOne[byte2];
+ }
+ byte0 = stream->curr_word >> 24;
+ if ((byte0 & msk[shift]) == 0)
+ {
+ PV_BitstreamFlushBits(stream, 8 - shift);
+ return PV_SUCCESS;
+ }
+ PV_BitstreamFlushBits(stream, 8); /* third_byte is not zero */
+ }
+ }
+
+ PV_BitstreamFlushBits(stream, 8);
+ }
+}
diff --git a/media/codecs/m4v_h263/dec/src/bitstream.h b/media/codecs/m4v_h263/dec/src/bitstream.h
new file mode 100644
index 0000000..0cf903d
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/bitstream.h
@@ -0,0 +1,174 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+
+#ifndef _BITSTREAM_D_H_
+#define _BITSTREAM_D_H_
+
+#include "mp4dec_lib.h" /* video decoder function prototypes */
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif /* __cplusplus */
+
+#define PV_BS_INLINE /* support inline bitstream functions */
+
+#define PV_BitstreamFlushBits(A,B) {(A)->bitcnt += (B); (A)->incnt -= (B); (A)->curr_word <<= (B);}
+
+ PV_STATUS BitstreamFillBuffer(BitstreamDecVideo *stream);
+ PV_STATUS BitstreamFillCache(BitstreamDecVideo *stream);
+ void BitstreamReset(BitstreamDecVideo *stream, uint8 *buffer, int32 buffer_size);
+ int BitstreamOpen(BitstreamDecVideo *stream, int layer);
+ void BitstreamClose(BitstreamDecVideo *stream);
+
+ PV_STATUS BitstreamShowBits32(BitstreamDecVideo *stream, int nbits, uint32 *code);
+ uint32 BitstreamReadBits32(BitstreamDecVideo *stream, int nbits);
+
+ uint BitstreamReadBits16(BitstreamDecVideo *stream, int nbits);
+ uint BitstreamRead1Bits(BitstreamDecVideo *stream);
+#ifndef PV_BS_INLINE
+ PV_STATUS BitstreamShowBits16(BitstreamDecVideo *stream, int nbits, uint *code);
+ PV_STATUS BitstreamShow15Bits(BitstreamDecVideo *stream, uint *code);
+ PV_STATUS BitstreamShow13Bits(BitstreamDecVideo *stream, uint *code);
+ uint BitstreamReadBits16_INLINE(BitstreamDecVideo *stream, int nbits);
+ uint BitstreamRead1Bits_INLINE(BitstreamDecVideo *stream);
+#else
+ __inline PV_STATUS BitstreamShowBits16(BitstreamDecVideo *stream, int nbits, uint *code)
+ {
+ PV_STATUS status = PV_SUCCESS;
+
+
+ if (stream->incnt < nbits)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+
+ *code = stream->curr_word >> (32 - nbits);
+ return status;
+ }
+
+
+
+ /* =========================================================================*/
+ __inline PV_STATUS BitstreamShow15Bits(BitstreamDecVideo *stream, uint *code)
+ {
+ PV_STATUS status = PV_SUCCESS;
+
+ if (stream->incnt < 15)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+ *code = stream->curr_word >> 17;
+ return status;
+ }
+
+
+ __inline PV_STATUS BitstreamShow13Bits(BitstreamDecVideo *stream, uint *code)
+ {
+ PV_STATUS status = PV_SUCCESS;
+
+ if (stream->incnt < 13)
+ {
+ /* frame-based decoding */
+ status = BitstreamFillCache(stream);
+ }
+ *code = stream->curr_word >> 19;
+ return status;
+ }
+ __inline uint BitstreamReadBits16_INLINE(BitstreamDecVideo *stream, int nbits)
+ {
+ uint code;
+
+ if (stream->incnt < nbits)
+ {
+ /* frame-based decoding */
+ BitstreamFillCache(stream);
+ }
+ code = stream->curr_word >> (32 - nbits);
+ PV_BitstreamFlushBits(stream, nbits);
+ return code;
+ }
+
+
+ __inline uint BitstreamRead1Bits_INLINE(BitstreamDecVideo *stream)
+ {
+ uint code;
+
+ if (stream->incnt < 1)
+ {
+ /* frame-based decoding */
+ BitstreamFillCache(stream);
+ }
+ code = stream->curr_word >> 31;
+ PV_BitstreamFlushBits(stream, 1);
+
+ return code;
+ }
+
+#endif
+
+
+
+
+
+
+
+ PV_STATUS PV_BitstreamFlushBitsCheck(BitstreamDecVideo *stream, int nbits);
+
+ uint32 BitstreamReadBits32HC(BitstreamDecVideo *stream);
+ PV_STATUS BitstreamShowBits32HC(BitstreamDecVideo *stream, uint32 *code);
+
+
+
+ PV_STATUS BitstreamCheckEndBuffer(BitstreamDecVideo *stream);
+
+ PV_STATUS PV_BitstreamShowBitsByteAlign(BitstreamDecVideo *stream, int nbits, uint32 *code);
+#ifdef PV_ANNEX_IJKT_SUPPORT
+ PV_STATUS PV_BitstreamShowBitsByteAlignNoForceStuffing(BitstreamDecVideo *stream, int nbits, uint32 *code);
+ Bool validStuffing_h263(BitstreamDecVideo *stream);
+ PV_STATUS quickSearchH263SliceHeader(BitstreamDecVideo *stream);
+#endif
+ PV_STATUS PV_BitstreamByteAlign(BitstreamDecVideo *stream);
+ PV_STATUS BitstreamByteAlignNoForceStuffing(BitstreamDecVideo *stream);
+ Bool validStuffing(BitstreamDecVideo *stream);
+
+ PV_STATUS movePointerTo(BitstreamDecVideo *stream, int32 pos);
+ PV_STATUS PVSearchNextM4VFrame(BitstreamDecVideo *stream);
+ PV_STATUS PVSearchNextH263Frame(BitstreamDecVideo *stream);
+ PV_STATUS quickSearchVideoPacketHeader(BitstreamDecVideo *stream, int marker_length);
+
+
+ /* for error concealment & soft-decoding */
+ PV_STATUS PVLocateM4VFrameBoundary(BitstreamDecVideo *stream);
+ PV_STATUS PVSearchH263FrameBoundary(BitstreamDecVideo *stream);
+
+ PV_STATUS quickSearchMotionMarker(BitstreamDecVideo *stream);
+ PV_STATUS quickSearchDCM(BitstreamDecVideo *stream);
+ PV_STATUS quickSearchGOBHeader(BitstreamDecVideo *stream);
+ void BitstreamShowBuffer(BitstreamDecVideo *stream, int32 startbit, int32 endbit, uint8 *bitBfr);
+
+ /* 10/8/98 New prototyps. */
+ int32 getPointer(BitstreamDecVideo *stream);
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* _BITSTREAM_D_H_ */
diff --git a/media/codecs/m4v_h263/dec/src/block_idct.cpp b/media/codecs/m4v_h263/dec/src/block_idct.cpp
new file mode 100644
index 0000000..bc708e2
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/block_idct.cpp
@@ -0,0 +1,914 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/*
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+ [input_variable_name] = [description of the input to module, its type
+ definition, and length (when applicable)]
+
+ Local Stores/Buffers/Pointers Needed:
+ [local_store_name] = [description of the local store, its type
+ definition, and length (when applicable)]
+ [local_buffer_name] = [description of the local buffer, its type
+ definition, and length (when applicable)]
+ [local_ptr_name] = [description of the local pointer, its type
+ definition, and length (when applicable)]
+
+ Global Stores/Buffers/Pointers Needed:
+ [global_store_name] = [description of the global store, its type
+ definition, and length (when applicable)]
+ [global_buffer_name] = [description of the global buffer, its type
+ definition, and length (when applicable)]
+ [global_ptr_name] = [description of the global pointer, its type
+ definition, and length (when applicable)]
+
+ Outputs:
+ [return_variable_name] = [description of data/pointer returned
+ by module, its type definition, and length
+ (when applicable)]
+
+ Pointers and Buffers Modified:
+ [variable_bfr_ptr] points to the [describe where the
+ variable_bfr_ptr points to, its type definition, and length
+ (when applicable)]
+ [variable_bfr] contents are [describe the new contents of
+ variable_bfr]
+
+ Local Stores Modified:
+ [local_store_name] = [describe new contents, its type
+ definition, and length (when applicable)]
+
+ Global Stores Modified:
+ [global_store_name] = [describe new contents, its type
+ definition, and length (when applicable)]
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+------------------------------------------------------------------------------
+ RESOURCES USED
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ STACK USAGE: [stack count for this module] + [variable to represent
+ stack usage for each subroutine called]
+
+ where: [stack usage variable] = stack usage for [subroutine
+ name] (see [filename].ext)
+
+ DATA MEMORY USED: x words
+
+ PROGRAM MEMORY USED: x words
+
+ CLOCK CYCLES: [cycle count equation for this module] + [variable
+ used to represent cycle count for each subroutine
+ called]
+
+ where: [cycle count variable] = cycle count for [subroutine
+ name] (see [filename].ext)
+
+------------------------------------------------------------------------------
+*/
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+#include "mp4dec_lib.h"
+#include "idct.h"
+#include "motion_comp.h"
+
+#define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+/* private prototypes */
+static void idctrow(int16 *blk, uint8 *pred, uint8 *dst, int width);
+static void idctrow_intra(int16 *blk, PIXEL *, int width);
+static void idctcol(int16 *blk);
+
+#ifdef FAST_IDCT
+// mapping from nz_coefs to functions to be used
+
+
+// ARM4 does not allow global data when they are not constant hence
+// an array of function pointers cannot be considered as array of constants
+// (actual addresses are only known when the dll is loaded).
+// So instead of arrays of function pointers, we'll store here
+// arrays of rows or columns and then call the idct function
+// corresponding to such the row/column number:
+
+
+static void (*const idctcolVCA[10][4])(int16*) =
+{
+ {&idctcol1, &idctcol0, &idctcol0, &idctcol0},
+ {&idctcol1, &idctcol1, &idctcol0, &idctcol0},
+ {&idctcol2, &idctcol1, &idctcol0, &idctcol0},
+ {&idctcol3, &idctcol1, &idctcol0, &idctcol0},
+ {&idctcol3, &idctcol2, &idctcol0, &idctcol0},
+ {&idctcol3, &idctcol2, &idctcol1, &idctcol0},
+ {&idctcol3, &idctcol2, &idctcol1, &idctcol1},
+ {&idctcol3, &idctcol2, &idctcol2, &idctcol1},
+ {&idctcol3, &idctcol3, &idctcol2, &idctcol1},
+ {&idctcol4, &idctcol3, &idctcol2, &idctcol1}
+};
+
+
+static void (*const idctrowVCA[10])(int16*, uint8*, uint8*, int) =
+{
+ &idctrow1,
+ &idctrow2,
+ &idctrow2,
+ &idctrow2,
+ &idctrow2,
+ &idctrow3,
+ &idctrow4,
+ &idctrow4,
+ &idctrow4,
+ &idctrow4
+};
+
+
+static void (*const idctcolVCA2[16])(int16*) =
+{
+ &idctcol0, &idctcol4, &idctcol3, &idctcol4,
+ &idctcol2, &idctcol4, &idctcol3, &idctcol4,
+ &idctcol1, &idctcol4, &idctcol3, &idctcol4,
+ &idctcol2, &idctcol4, &idctcol3, &idctcol4
+};
+
+static void (*const idctrowVCA2[8])(int16*, uint8*, uint8*, int) =
+{
+ &idctrow1, &idctrow4, &idctrow3, &idctrow4,
+ &idctrow2, &idctrow4, &idctrow3, &idctrow4
+};
+
+static void (*const idctrowVCA_intra[10])(int16*, PIXEL *, int) =
+{
+ &idctrow1_intra,
+ &idctrow2_intra,
+ &idctrow2_intra,
+ &idctrow2_intra,
+ &idctrow2_intra,
+ &idctrow3_intra,
+ &idctrow4_intra,
+ &idctrow4_intra,
+ &idctrow4_intra,
+ &idctrow4_intra
+};
+
+static void (*const idctrowVCA2_intra[8])(int16*, PIXEL *, int) =
+{
+ &idctrow1_intra, &idctrow4_intra, &idctrow3_intra, &idctrow4_intra,
+ &idctrow2_intra, &idctrow4_intra, &idctrow3_intra, &idctrow4_intra
+};
+#endif
+
+/*----------------------------------------------------------------------------
+; LOCAL STORE/BUFFER/POINTER DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+void MBlockIDCT(VideoDecData *video)
+{
+ Vop *currVop = video->currVop;
+ MacroBlock *mblock = video->mblock;
+ PIXEL *c_comp;
+ PIXEL *cu_comp;
+ PIXEL *cv_comp;
+ int x_pos = video->mbnum_col;
+ int y_pos = video->mbnum_row;
+ int width, width_uv;
+ int32 offset;
+ width = video->width;
+ width_uv = width >> 1;
+ offset = (int32)(y_pos << 4) * width + (x_pos << 4);
+
+ c_comp = currVop->yChan + offset;
+ cu_comp = currVop->uChan + (offset >> 2) + (x_pos << 2);
+ cv_comp = currVop->vChan + (offset >> 2) + (x_pos << 2);
+
+ BlockIDCT_intra(mblock, c_comp, 0, width);
+ BlockIDCT_intra(mblock, c_comp + 8, 1, width);
+ BlockIDCT_intra(mblock, c_comp + (width << 3), 2, width);
+ BlockIDCT_intra(mblock, c_comp + (width << 3) + 8, 3, width);
+ BlockIDCT_intra(mblock, cu_comp, 4, width_uv);
+ BlockIDCT_intra(mblock, cv_comp, 5, width_uv);
+}
+
+
+void BlockIDCT_intra(
+ MacroBlock *mblock, PIXEL *c_comp, int comp, int width)
+{
+ /*----------------------------------------------------------------------------
+ ; Define all local variables
+ ----------------------------------------------------------------------------*/
+ int16 *coeff_in = mblock->block[comp];
+#ifdef INTEGER_IDCT
+#ifdef FAST_IDCT /* VCA IDCT using nzcoefs and bitmaps*/
+ int i, bmapr;
+ int nz_coefs = mblock->no_coeff[comp];
+ uint8 *bitmapcol = mblock->bitmapcol[comp];
+ uint8 bitmaprow = mblock->bitmaprow[comp];
+
+ /*----------------------------------------------------------------------------
+ ; Function body here
+ ----------------------------------------------------------------------------*/
+ if (nz_coefs <= 10)
+ {
+ bmapr = (nz_coefs - 1);
+
+ (*(idctcolVCA[bmapr]))(coeff_in);
+ (*(idctcolVCA[bmapr][1]))(coeff_in + 1);
+ (*(idctcolVCA[bmapr][2]))(coeff_in + 2);
+ (*(idctcolVCA[bmapr][3]))(coeff_in + 3);
+
+ (*idctrowVCA_intra[nz_coefs-1])(coeff_in, c_comp, width);
+ }
+ else
+ {
+ i = 8;
+ while (i--)
+ {
+ bmapr = (int)bitmapcol[i];
+ if (bmapr)
+ {
+ if ((bmapr&0xf) == 0) /* 07/18/01 */
+ {
+ (*(idctcolVCA2[bmapr>>4]))(coeff_in + i);
+ }
+ else
+ {
+ idctcol(coeff_in + i);
+ }
+ }
+ }
+ if ((bitmapcol[4] | bitmapcol[5] | bitmapcol[6] | bitmapcol[7]) == 0)
+ {
+ bitmaprow >>= 4;
+ (*(idctrowVCA2_intra[(int)bitmaprow]))(coeff_in, c_comp, width);
+ }
+ else
+ {
+ idctrow_intra(coeff_in, c_comp, width);
+ }
+ }
+#else
+ void idct_intra(int *block, uint8 *comp, int width);
+ idct_intra(coeff_in, c_comp, width);
+#endif
+#else
+ void idctref_intra(int *block, uint8 *comp, int width);
+ idctref_intra(coeff_in, c_comp, width);
+#endif
+
+
+ /*----------------------------------------------------------------------------
+ ; Return nothing or data or data pointer
+ ----------------------------------------------------------------------------*/
+ return;
+}
+
+/* 08/04/05, no residue, just copy from pred to output */
+void Copy_Blk_to_Vop(uint8 *dst, uint8 *pred, int width)
+{
+ /* copy 4 bytes at a time */
+ width -= 4;
+ *((uint32*)dst) = *((uint32*)pred);
+ *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+ *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+ *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+ *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+ *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+ *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+ *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+ *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+ *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+ *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+ *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+ *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+ *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+ *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+ *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+
+ return ;
+}
+
+/* 08/04/05 compute IDCT and add prediction at the end */
+void BlockIDCT(
+ uint8 *dst, /* destination */
+ uint8 *pred, /* prediction block, pitch 16 */
+ int16 *coeff_in, /* DCT data, size 64 */
+ int width, /* width of dst */
+ int nz_coefs,
+ uint8 *bitmapcol,
+ uint8 bitmaprow
+)
+{
+#ifdef INTEGER_IDCT
+#ifdef FAST_IDCT /* VCA IDCT using nzcoefs and bitmaps*/
+ int i, bmapr;
+ /*----------------------------------------------------------------------------
+ ; Function body here
+ ----------------------------------------------------------------------------*/
+ if (nz_coefs <= 10)
+ {
+ bmapr = (nz_coefs - 1);
+ (*(idctcolVCA[bmapr]))(coeff_in);
+ (*(idctcolVCA[bmapr][1]))(coeff_in + 1);
+ (*(idctcolVCA[bmapr][2]))(coeff_in + 2);
+ (*(idctcolVCA[bmapr][3]))(coeff_in + 3);
+
+ (*idctrowVCA[nz_coefs-1])(coeff_in, pred, dst, width);
+ return ;
+ }
+ else
+ {
+ i = 8;
+
+ while (i--)
+ {
+ bmapr = (int)bitmapcol[i];
+ if (bmapr)
+ {
+ if ((bmapr&0xf) == 0) /* 07/18/01 */
+ {
+ (*(idctcolVCA2[bmapr>>4]))(coeff_in + i);
+ }
+ else
+ {
+ idctcol(coeff_in + i);
+ }
+ }
+ }
+ if ((bitmapcol[4] | bitmapcol[5] | bitmapcol[6] | bitmapcol[7]) == 0)
+ {
+ (*(idctrowVCA2[bitmaprow>>4]))(coeff_in, pred, dst, width);
+ }
+ else
+ {
+ idctrow(coeff_in, pred, dst, width);
+ }
+ return ;
+ }
+#else // FAST_IDCT
+ void idct(int *block, uint8 *pred, uint8 *dst, int width);
+ idct(coeff_in, pred, dst, width);
+ return;
+#endif // FAST_IDCT
+#else // INTEGER_IDCT
+ void idctref(int *block, uint8 *pred, uint8 *dst, int width);
+ idctref(coeff_in, pred, dst, width);
+ return;
+#endif // INTEGER_IDCT
+
+}
+/*----------------------------------------------------------------------------
+; End Function: block_idct
+----------------------------------------------------------------------------*/
+
+
+/****************************************************************************/
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: idctrow
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS FOR idctrow
+
+ Inputs:
+ [input_variable_name] = [description of the input to module, its type
+ definition, and length (when applicable)]
+
+ Local Stores/Buffers/Pointers Needed:
+ [local_store_name] = [description of the local store, its type
+ definition, and length (when applicable)]
+ [local_buffer_name] = [description of the local buffer, its type
+ definition, and length (when applicable)]
+ [local_ptr_name] = [description of the local pointer, its type
+ definition, and length (when applicable)]
+
+ Global Stores/Buffers/Pointers Needed:
+ [global_store_name] = [description of the global store, its type
+ definition, and length (when applicable)]
+ [global_buffer_name] = [description of the global buffer, its type
+ definition, and length (when applicable)]
+ [global_ptr_name] = [description of the global pointer, its type
+ definition, and length (when applicable)]
+
+ Outputs:
+ [return_variable_name] = [description of data/pointer returned
+ by module, its type definition, and length
+ (when applicable)]
+
+ Pointers and Buffers Modified:
+ [variable_bfr_ptr] points to the [describe where the
+ variable_bfr_ptr points to, its type definition, and length
+ (when applicable)]
+ [variable_bfr] contents are [describe the new contents of
+ variable_bfr]
+
+ Local Stores Modified:
+ [local_store_name] = [describe new contents, its type
+ definition, and length (when applicable)]
+
+ Global Stores Modified:
+ [global_store_name] = [describe new contents, its type
+ definition, and length (when applicable)]
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION FOR idctrow
+
+------------------------------------------------------------------------------
+ REQUIREMENTS FOR idctrow
+
+------------------------------------------------------------------------------
+ REFERENCES FOR idctrow
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE FOR idctrow
+
+------------------------------------------------------------------------------
+ RESOURCES USED FOR idctrow
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ STACK USAGE: [stack count for this module] + [variable to represent
+ stack usage for each subroutine called]
+
+ where: [stack usage variable] = stack usage for [subroutine
+ name] (see [filename].ext)
+
+ DATA MEMORY USED: x words
+
+ PROGRAM MEMORY USED: x words
+
+ CLOCK CYCLES: [cycle count equation for this module] + [variable
+ used to represent cycle count for each subroutine
+ called]
+
+ where: [cycle count variable] = cycle count for [subroutine
+ name] (see [filename].ext)
+
+------------------------------------------------------------------------------
+*/
+
+/*----------------------------------------------------------------------------
+; Function Code FOR idctrow
+----------------------------------------------------------------------------*/
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow(
+ int16 *blk, uint8 *pred, uint8 *dst, int width
+)
+{
+ /*----------------------------------------------------------------------------
+ ; Define all local variables
+ ----------------------------------------------------------------------------*/
+ int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+ int i = 8;
+ uint32 pred_word, dst_word;
+ int res, res2;
+
+ /*----------------------------------------------------------------------------
+ ; Function body here
+ ----------------------------------------------------------------------------*/
+ /* row (horizontal) IDCT
+ *
+ * 7 pi 1 dst[k] = sum c[l] * src[l] * cos( -- *
+ * ( k + - ) * l ) l=0 8 2
+ *
+ * where: c[0] = 128 c[1..7] = 128*sqrt(2) */
+
+ /* preset the offset, such that we can take advantage pre-offset addressing mode */
+ width -= 4;
+ dst -= width;
+ pred -= 12;
+ blk -= 8;
+
+ while (i--)
+ {
+ x1 = (int32)blk[12] << 8;
+ blk[12] = 0;
+ x2 = blk[14];
+ blk[14] = 0;
+ x3 = blk[10];
+ blk[10] = 0;
+ x4 = blk[9];
+ blk[9] = 0;
+ x5 = blk[15];
+ blk[15] = 0;
+ x6 = blk[13];
+ blk[13] = 0;
+ x7 = blk[11];
+ blk[11] = 0;
+ x0 = ((*(blk += 8)) << 8) + 8192;
+ blk[0] = 0; /* for proper rounding in the fourth stage */
+
+ /* first stage */
+ x8 = W7 * (x4 + x5) + 4;
+ x4 = (x8 + (W1 - W7) * x4) >> 3;
+ x5 = (x8 - (W1 + W7) * x5) >> 3;
+ x8 = W3 * (x6 + x7) + 4;
+ x6 = (x8 - (W3 - W5) * x6) >> 3;
+ x7 = (x8 - (W3 + W5) * x7) >> 3;
+
+ /* second stage */
+ x8 = x0 + x1;
+ x0 -= x1;
+ x1 = W6 * (x3 + x2) + 4;
+ x2 = (x1 - (W2 + W6) * x2) >> 3;
+ x3 = (x1 + (W2 - W6) * x3) >> 3;
+ x1 = x4 + x6;
+ x4 -= x6;
+ x6 = x5 + x7;
+ x5 -= x7;
+
+ /* third stage */
+ x7 = x8 + x3;
+ x8 -= x3;
+ x3 = x0 + x2;
+ x0 -= x2;
+ x2 = (181 * (x4 + x5) + 128) >> 8;
+ x4 = (181 * (x4 - x5) + 128) >> 8;
+
+ /* fourth stage */
+ pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
+
+ res = (x7 + x1) >> 14;
+ ADD_AND_CLIP1(res);
+ res2 = (x3 + x2) >> 14;
+ ADD_AND_CLIP2(res2);
+ dst_word = (res2 << 8) | res;
+ res = (x0 + x4) >> 14;
+ ADD_AND_CLIP3(res);
+ dst_word |= (res << 16);
+ res = (x8 + x6) >> 14;
+ ADD_AND_CLIP4(res);
+ dst_word |= (res << 24);
+ *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
+
+ pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
+
+ res = (x8 - x6) >> 14;
+ ADD_AND_CLIP1(res);
+ res2 = (x0 - x4) >> 14;
+ ADD_AND_CLIP2(res2);
+ dst_word = (res2 << 8) | res;
+ res = (x3 - x2) >> 14;
+ ADD_AND_CLIP3(res);
+ dst_word |= (res << 16);
+ res = (x7 - x1) >> 14;
+ ADD_AND_CLIP4(res);
+ dst_word |= (res << 24);
+ *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
+ }
+ /*----------------------------------------------------------------------------
+ ; Return nothing or data or data pointer
+ ----------------------------------------------------------------------------*/
+ return;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow_intra(
+ int16 *blk, PIXEL *comp, int width
+)
+{
+ /*----------------------------------------------------------------------------
+ ; Define all local variables
+ ----------------------------------------------------------------------------*/
+ int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
+ int i = 8;
+ int offset = width;
+ int32 word;
+
+ /*----------------------------------------------------------------------------
+ ; Function body here
+ ----------------------------------------------------------------------------*/
+ /* row (horizontal) IDCT
+ *
+ * 7 pi 1 dst[k] = sum c[l] * src[l] * cos( -- *
+ * ( k + - ) * l ) l=0 8 2
+ *
+ * where: c[0] = 128 c[1..7] = 128*sqrt(2) */
+ while (i--)
+ {
+ x1 = (int32)blk[4] << 8;
+ blk[4] = 0;
+ x2 = blk[6];
+ blk[6] = 0;
+ x3 = blk[2];
+ blk[2] = 0;
+ x4 = blk[1];
+ blk[1] = 0;
+ x5 = blk[7];
+ blk[7] = 0;
+ x6 = blk[5];
+ blk[5] = 0;
+ x7 = blk[3];
+ blk[3] = 0;
+#ifndef FAST_IDCT
+ /* shortcut */ /* covered by idctrow1 01/9/2001 */
+ if (!(x1 | x2 | x3 | x4 | x5 | x6 | x7))
+ {
+ blk[0] = blk[1] = blk[2] = blk[3] = blk[4] = blk[5] = blk[6] = blk[7] = (blk[0] + 32) >> 6;
+ return;
+ }
+#endif
+ x0 = ((int32)blk[0] << 8) + 8192;
+ blk[0] = 0; /* for proper rounding in the fourth stage */
+
+ /* first stage */
+ x8 = W7 * (x4 + x5) + 4;
+ x4 = (x8 + (W1 - W7) * x4) >> 3;
+ x5 = (x8 - (W1 + W7) * x5) >> 3;
+ x8 = W3 * (x6 + x7) + 4;
+ x6 = (x8 - (W3 - W5) * x6) >> 3;
+ x7 = (x8 - (W3 + W5) * x7) >> 3;
+
+ /* second stage */
+ x8 = x0 + x1;
+ x0 -= x1;
+ x1 = W6 * (x3 + x2) + 4;
+ x2 = (x1 - (W2 + W6) * x2) >> 3;
+ x3 = (x1 + (W2 - W6) * x3) >> 3;
+ x1 = x4 + x6;
+ x4 -= x6;
+ x6 = x5 + x7;
+ x5 -= x7;
+
+ /* third stage */
+ x7 = x8 + x3;
+ x8 -= x3;
+ x3 = x0 + x2;
+ x0 -= x2;
+ x2 = (181 * (x4 + x5) + 128) >> 8;
+ x4 = (181 * (x4 - x5) + 128) >> 8;
+
+ /* fourth stage */
+ word = ((x7 + x1) >> 14);
+ CLIP_RESULT(word)
+
+ temp = ((x3 + x2) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 8);
+
+ temp = ((x0 + x4) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 16);
+
+ temp = ((x8 + x6) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 24);
+ *((int32*)(comp)) = word;
+
+ word = ((x8 - x6) >> 14);
+ CLIP_RESULT(word)
+
+ temp = ((x0 - x4) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 8);
+
+ temp = ((x3 - x2) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 16);
+
+ temp = ((x7 - x1) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 24);
+ *((int32*)(comp + 4)) = word;
+ comp += offset;
+
+ blk += B_SIZE;
+ }
+ /*----------------------------------------------------------------------------
+ ; Return nothing or data or data pointer
+ ----------------------------------------------------------------------------*/
+ return;
+}
+
+/*----------------------------------------------------------------------------
+; End Function: idctrow
+----------------------------------------------------------------------------*/
+
+
+/****************************************************************************/
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: idctcol
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS FOR idctcol
+
+ Inputs:
+ [input_variable_name] = [description of the input to module, its type
+ definition, and length (when applicable)]
+
+ Local Stores/Buffers/Pointers Needed:
+ [local_store_name] = [description of the local store, its type
+ definition, and length (when applicable)]
+ [local_buffer_name] = [description of the local buffer, its type
+ definition, and length (when applicable)]
+ [local_ptr_name] = [description of the local pointer, its type
+ definition, and length (when applicable)]
+
+ Global Stores/Buffers/Pointers Needed:
+ [global_store_name] = [description of the global store, its type
+ definition, and length (when applicable)]
+ [global_buffer_name] = [description of the global buffer, its type
+ definition, and length (when applicable)]
+ [global_ptr_name] = [description of the global pointer, its type
+ definition, and length (when applicable)]
+
+ Outputs:
+ [return_variable_name] = [description of data/pointer returned
+ by module, its type definition, and length
+ (when applicable)]
+
+ Pointers and Buffers Modified:
+ [variable_bfr_ptr] points to the [describe where the
+ variable_bfr_ptr points to, its type definition, and length
+ (when applicable)]
+ [variable_bfr] contents are [describe the new contents of
+ variable_bfr]
+
+ Local Stores Modified:
+ [local_store_name] = [describe new contents, its type
+ definition, and length (when applicable)]
+
+ Global Stores Modified:
+ [global_store_name] = [describe new contents, its type
+ definition, and length (when applicable)]
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION FOR idctcol
+
+------------------------------------------------------------------------------
+ REQUIREMENTS FOR idctcol
+
+------------------------------------------------------------------------------
+ REFERENCES FOR idctcol
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE FOR idctcol
+
+------------------------------------------------------------------------------
+ RESOURCES USED FOR idctcol
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ STACK USAGE: [stack count for this module] + [variable to represent
+ stack usage for each subroutine called]
+
+ where: [stack usage variable] = stack usage for [subroutine
+ name] (see [filename].ext)
+
+ DATA MEMORY USED: x words
+
+ PROGRAM MEMORY USED: x words
+
+ CLOCK CYCLES: [cycle count equation for this module] + [variable
+ used to represent cycle count for each subroutine
+ called]
+
+ where: [cycle count variable] = cycle count for [subroutine
+ name] (see [filename].ext)
+
+------------------------------------------------------------------------------
+*/
+
+/*----------------------------------------------------------------------------
+; Function Code FOR idctcol
+----------------------------------------------------------------------------*/
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctcol(
+ int16 *blk
+)
+{
+ /*----------------------------------------------------------------------------
+ ; Define all local variables
+ ----------------------------------------------------------------------------*/
+ int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+
+ /*----------------------------------------------------------------------------
+ ; Function body here
+ ----------------------------------------------------------------------------*/
+ /* column (vertical) IDCT
+ *
+ * 7 pi 1 dst[8*k] = sum c[l] * src[8*l] *
+ * cos( -- * ( k + - ) * l ) l=0 8 2
+ *
+ * where: c[0] = 1/1024 c[1..7] = (1/1024)*sqrt(2) */
+ x1 = (int32)blk[32] << 11;
+ x2 = blk[48];
+ x3 = blk[16];
+ x4 = blk[8];
+ x5 = blk[56];
+ x6 = blk[40];
+ x7 = blk[24];
+#ifndef FAST_IDCT
+ /* shortcut */ /* covered by idctcolumn1 01/9/2001 */
+ if (!(x1 | x2 | x3 | x4 | x5 | x6 | x7))
+ {
+ blk[0] = blk[8] = blk[16] = blk[24] = blk[32] = blk[40] = blk[48] = blk[56]
+ = blk[0] << 3;
+ return;
+ }
+#endif
+
+ x0 = ((int32)blk[0] << 11) + 128;
+
+ /* first stage */
+ x8 = W7 * (x4 + x5);
+ x4 = x8 + (W1 - W7) * x4;
+ x5 = x8 - (W1 + W7) * x5;
+ x8 = W3 * (x6 + x7);
+ x6 = x8 - (W3 - W5) * x6;
+ x7 = x8 - (W3 + W5) * x7;
+
+ /* second stage */
+ x8 = x0 + x1;
+ x0 -= x1;
+ x1 = W6 * (x3 + x2);
+ x2 = x1 - (W2 + W6) * x2;
+ x3 = x1 + (W2 - W6) * x3;
+ x1 = x4 + x6;
+ x4 -= x6;
+ x6 = x5 + x7;
+ x5 -= x7;
+
+ /* third stage */
+ x7 = x8 + x3;
+ x8 -= x3;
+ x3 = x0 + x2;
+ x0 -= x2;
+ x2 = (181 * (x4 + x5) + 128) >> 8;
+ x4 = (181 * (x4 - x5) + 128) >> 8;
+
+ /* fourth stage */
+ blk[0] = (x7 + x1) >> 8;
+ blk[8] = (x3 + x2) >> 8;
+ blk[16] = (x0 + x4) >> 8;
+ blk[24] = (x8 + x6) >> 8;
+ blk[32] = (x8 - x6) >> 8;
+ blk[40] = (x0 - x4) >> 8;
+ blk[48] = (x3 - x2) >> 8;
+ blk[56] = (x7 - x1) >> 8;
+ /*----------------------------------------------------------------------------
+ ; Return nothing or data or data pointer
+ ----------------------------------------------------------------------------*/
+ return;
+}
+/*----------------------------------------------------------------------------
+; End Function: idctcol
+----------------------------------------------------------------------------*/
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp b/media/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp
rename to media/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/combined_decode.cpp b/media/codecs/m4v_h263/dec/src/combined_decode.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/combined_decode.cpp
rename to media/codecs/m4v_h263/dec/src/combined_decode.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/conceal.cpp b/media/codecs/m4v_h263/dec/src/conceal.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/conceal.cpp
rename to media/codecs/m4v_h263/dec/src/conceal.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/datapart_decode.cpp b/media/codecs/m4v_h263/dec/src/datapart_decode.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/datapart_decode.cpp
rename to media/codecs/m4v_h263/dec/src/datapart_decode.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/dcac_prediction.cpp b/media/codecs/m4v_h263/dec/src/dcac_prediction.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/dcac_prediction.cpp
rename to media/codecs/m4v_h263/dec/src/dcac_prediction.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp b/media/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp
rename to media/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp b/media/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp
rename to media/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/get_pred_outside.cpp b/media/codecs/m4v_h263/dec/src/get_pred_outside.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/get_pred_outside.cpp
rename to media/codecs/m4v_h263/dec/src/get_pred_outside.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/idct.cpp b/media/codecs/m4v_h263/dec/src/idct.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/idct.cpp
rename to media/codecs/m4v_h263/dec/src/idct.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/idct.h b/media/codecs/m4v_h263/dec/src/idct.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/idct.h
rename to media/codecs/m4v_h263/dec/src/idct.h
diff --git a/media/codecs/m4v_h263/dec/src/idct_vca.cpp b/media/codecs/m4v_h263/dec/src/idct_vca.cpp
new file mode 100644
index 0000000..dbaf5d1
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/idct_vca.cpp
@@ -0,0 +1,670 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+#include "mp4def.h"
+#include "idct.h"
+#include "motion_comp.h"
+
+#ifdef FAST_IDCT
+
+/****************************************************************
+* vca_idct.c : created 6/1/99 for several options
+* of hard-coded reduced idct function (using nz_coefs)
+******************************************************************/
+
+/*****************************************************/
+//pretested version
+void idctrow0(int16 *, uint8 *, uint8 *, int)
+{
+ return ;
+}
+void idctcol0(int16 *)
+{
+ return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow1(int16 *blk, uint8 *pred, uint8 *dst, int width)
+{
+ /* shortcut */
+ int tmp;
+ int i = 8;
+ uint32 pred_word, dst_word;
+ int res, res2;
+
+ /* preset the offset, such that we can take advantage pre-offset addressing mode */
+ width -= 4;
+ dst -= width;
+ pred -= 12;
+ blk -= 8;
+
+ while (i--)
+ {
+ tmp = (*(blk += 8) + 32) >> 6;
+ *blk = 0;
+
+ pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
+ res = tmp + (pred_word & 0xFF);
+ CLIP_RESULT(res);
+ res2 = tmp + ((pred_word >> 8) & 0xFF);
+ CLIP_RESULT(res2);
+ dst_word = (res2 << 8) | res;
+ res = tmp + ((pred_word >> 16) & 0xFF);
+ CLIP_RESULT(res);
+ dst_word |= (res << 16);
+ res = tmp + ((pred_word >> 24) & 0xFF);
+ CLIP_RESULT(res);
+ dst_word |= (res << 24);
+ *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
+
+ pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
+ res = tmp + (pred_word & 0xFF);
+ CLIP_RESULT(res);
+ res2 = tmp + ((pred_word >> 8) & 0xFF);
+ CLIP_RESULT(res2);
+ dst_word = (res2 << 8) | res;
+ res = tmp + ((pred_word >> 16) & 0xFF);
+ CLIP_RESULT(res);
+ dst_word |= (res << 16);
+ res = tmp + ((pred_word >> 24) & 0xFF);
+ CLIP_RESULT(res);
+ dst_word |= (res << 24);
+ *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
+ }
+ return;
+}
+
+void idctcol1(int16 *blk)
+{ /* shortcut */
+ blk[0] = blk[8] = blk[16] = blk[24] = blk[32] = blk[40] = blk[48] = blk[56] =
+ blk[0] << 3;
+ return;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow2(int16 *blk, uint8 *pred, uint8 *dst, int width)
+{
+ int32 x0, x1, x2, x4, x5;
+ int i = 8;
+ uint32 pred_word, dst_word;
+ int res, res2;
+
+ /* preset the offset, such that we can take advantage pre-offset addressing mode */
+ width -= 4;
+ dst -= width;
+ pred -= 12;
+ blk -= 8;
+
+ while (i--)
+ {
+ /* shortcut */
+ x4 = blk[9];
+ blk[9] = 0;
+ x0 = ((*(blk += 8)) << 8) + 8192;
+ *blk = 0; /* for proper rounding in the fourth stage */
+
+ /* first stage */
+ x5 = (W7 * x4 + 4) >> 3;
+ x4 = (W1 * x4 + 4) >> 3;
+
+ /* third stage */
+ x2 = (181 * (x4 + x5) + 128) >> 8;
+ x1 = (181 * (x4 - x5) + 128) >> 8;
+
+ /* fourth stage */
+ pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
+ res = (x0 + x4) >> 14;
+ ADD_AND_CLIP1(res);
+ res2 = (x0 + x2) >> 14;
+ ADD_AND_CLIP2(res2);
+ dst_word = (res2 << 8) | res;
+ res = (x0 + x1) >> 14;
+ ADD_AND_CLIP3(res);
+ dst_word |= (res << 16);
+ res = (x0 + x5) >> 14;
+ ADD_AND_CLIP4(res);
+ dst_word |= (res << 24);
+ *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
+
+ pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
+ res = (x0 - x5) >> 14;
+ ADD_AND_CLIP1(res);
+ res2 = (x0 - x1) >> 14;
+ ADD_AND_CLIP2(res2);
+ dst_word = (res2 << 8) | res;
+ res = (x0 - x2) >> 14;
+ ADD_AND_CLIP3(res);
+ dst_word |= (res << 16);
+ res = (x0 - x4) >> 14;
+ ADD_AND_CLIP4(res);
+ dst_word |= (res << 24);
+ *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
+ }
+ return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctcol2(int16 *blk)
+{
+ int32 x0, x1, x3, x5, x7;//, x8;
+
+ x1 = blk[8];
+ x0 = ((int32)blk[0] << 11) + 128;
+ /* both upper and lower*/
+
+ x7 = W7 * x1;
+ x1 = W1 * x1;
+
+ x3 = x7;
+ x5 = (181 * (x1 - x7) + 128) >> 8;
+ x7 = (181 * (x1 + x7) + 128) >> 8;
+
+ blk[0] = (x0 + x1) >> 8;
+ blk[8] = (x0 + x7) >> 8;
+ blk[16] = (x0 + x5) >> 8;
+ blk[24] = (x0 + x3) >> 8;
+ blk[56] = (x0 - x1) >> 8;
+ blk[48] = (x0 - x7) >> 8;
+ blk[40] = (x0 - x5) >> 8;
+ blk[32] = (x0 - x3) >> 8;
+
+ return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow3(int16 *blk, uint8 *pred, uint8 *dst, int width)
+{
+ int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+ int i = 8;
+ uint32 pred_word, dst_word;
+ int res, res2;
+
+ /* preset the offset, such that we can take advantage pre-offset addressing mode */
+ width -= 4;
+ dst -= width;
+ pred -= 12;
+ blk -= 8;
+
+ while (i--)
+ {
+ x2 = blk[10];
+ blk[10] = 0;
+ x1 = blk[9];
+ blk[9] = 0;
+ x0 = ((*(blk += 8)) << 8) + 8192;
+ *blk = 0; /* for proper rounding in the fourth stage */
+ /* both upper and lower*/
+ /* both x2orx6 and x0orx4 */
+
+ x4 = x0;
+ x6 = (W6 * x2 + 4) >> 3;
+ x2 = (W2 * x2 + 4) >> 3;
+ x8 = x0 - x2;
+ x0 += x2;
+ x2 = x8;
+ x8 = x4 - x6;
+ x4 += x6;
+ x6 = x8;
+
+ x7 = (W7 * x1 + 4) >> 3;
+ x1 = (W1 * x1 + 4) >> 3;
+ x3 = x7;
+ x5 = (181 * (x1 - x7) + 128) >> 8;
+ x7 = (181 * (x1 + x7) + 128) >> 8;
+
+ pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
+ res = (x0 + x1) >> 14;
+ ADD_AND_CLIP1(res);
+ res2 = (x4 + x7) >> 14;
+ ADD_AND_CLIP2(res2);
+ dst_word = (res2 << 8) | res;
+ res = (x6 + x5) >> 14;
+ ADD_AND_CLIP3(res);
+ dst_word |= (res << 16);
+ res = (x2 + x3) >> 14;
+ ADD_AND_CLIP4(res);
+ dst_word |= (res << 24);
+ *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
+
+ pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
+ res = (x2 - x3) >> 14;
+ ADD_AND_CLIP1(res);
+ res2 = (x6 - x5) >> 14;
+ ADD_AND_CLIP2(res2);
+ dst_word = (res2 << 8) | res;
+ res = (x4 - x7) >> 14;
+ ADD_AND_CLIP3(res);
+ dst_word |= (res << 16);
+ res = (x0 - x1) >> 14;
+ ADD_AND_CLIP4(res);
+ dst_word |= (res << 24);
+ *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
+ }
+
+ return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctcol3(int16 *blk)
+{
+ int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+
+ x2 = blk[16];
+ x1 = blk[8];
+ x0 = ((int32)blk[0] << 11) + 128;
+
+ x4 = x0;
+ x6 = W6 * x2;
+ x2 = W2 * x2;
+ x8 = x0 - x2;
+ x0 += x2;
+ x2 = x8;
+ x8 = x4 - x6;
+ x4 += x6;
+ x6 = x8;
+
+ x7 = W7 * x1;
+ x1 = W1 * x1;
+ x3 = x7;
+ x5 = (181 * (x1 - x7) + 128) >> 8;
+ x7 = (181 * (x1 + x7) + 128) >> 8;
+
+ blk[0] = (x0 + x1) >> 8;
+ blk[8] = (x4 + x7) >> 8;
+ blk[16] = (x6 + x5) >> 8;
+ blk[24] = (x2 + x3) >> 8;
+ blk[56] = (x0 - x1) >> 8;
+ blk[48] = (x4 - x7) >> 8;
+ blk[40] = (x6 - x5) >> 8;
+ blk[32] = (x2 - x3) >> 8;
+
+ return;
+}
+
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow4(int16 *blk, uint8 *pred, uint8 *dst, int width)
+{
+ int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+ int i = 8;
+ uint32 pred_word, dst_word;
+ int res, res2;
+
+ /* preset the offset, such that we can take advantage pre-offset addressing mode */
+ width -= 4;
+ dst -= width;
+ pred -= 12;
+ blk -= 8;
+
+ while (i--)
+ {
+ x2 = blk[10];
+ blk[10] = 0;
+ x1 = blk[9];
+ blk[9] = 0;
+ x3 = blk[11];
+ blk[11] = 0;
+ x0 = ((*(blk += 8)) << 8) + 8192;
+ *blk = 0; /* for proper rounding in the fourth stage */
+
+ x4 = x0;
+ x6 = (W6 * x2 + 4) >> 3;
+ x2 = (W2 * x2 + 4) >> 3;
+ x8 = x0 - x2;
+ x0 += x2;
+ x2 = x8;
+ x8 = x4 - x6;
+ x4 += x6;
+ x6 = x8;
+
+ x7 = (W7 * x1 + 4) >> 3;
+ x1 = (W1 * x1 + 4) >> 3;
+ x5 = (W3 * x3 + 4) >> 3;
+ x3 = (- W5 * x3 + 4) >> 3;
+ x8 = x1 - x5;
+ x1 += x5;
+ x5 = x8;
+ x8 = x7 - x3;
+ x3 += x7;
+ x7 = (181 * (x5 + x8) + 128) >> 8;
+ x5 = (181 * (x5 - x8) + 128) >> 8;
+
+ pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
+ res = (x0 + x1) >> 14;
+ ADD_AND_CLIP1(res);
+ res2 = (x4 + x7) >> 14;
+ ADD_AND_CLIP2(res2);
+ dst_word = (res2 << 8) | res;
+ res = (x6 + x5) >> 14;
+ ADD_AND_CLIP3(res);
+ dst_word |= (res << 16);
+ res = (x2 + x3) >> 14;
+ ADD_AND_CLIP4(res);
+ dst_word |= (res << 24);
+ *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
+
+ pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
+ res = (x2 - x3) >> 14;
+ ADD_AND_CLIP1(res);
+ res2 = (x6 - x5) >> 14;
+ ADD_AND_CLIP2(res2);
+ dst_word = (res2 << 8) | res;
+ res = (x4 - x7) >> 14;
+ ADD_AND_CLIP3(res);
+ dst_word |= (res << 16);
+ res = (x0 - x1) >> 14;
+ ADD_AND_CLIP4(res);
+ dst_word |= (res << 24);
+ *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
+ }
+ return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctcol4(int16 *blk)
+{
+ int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+ x2 = blk[16];
+ x1 = blk[8];
+ x3 = blk[24];
+ x0 = ((int32)blk[0] << 11) + 128;
+
+ x4 = x0;
+ x6 = W6 * x2;
+ x2 = W2 * x2;
+ x8 = x0 - x2;
+ x0 += x2;
+ x2 = x8;
+ x8 = x4 - x6;
+ x4 += x6;
+ x6 = x8;
+
+ x7 = W7 * x1;
+ x1 = W1 * x1;
+ x5 = W3 * x3;
+ x3 = -W5 * x3;
+ x8 = x1 - x5;
+ x1 += x5;
+ x5 = x8;
+ x8 = x7 - x3;
+ x3 += x7;
+ x7 = (181 * (x5 + x8) + 128) >> 8;
+ x5 = (181 * (x5 - x8) + 128) >> 8;
+
+
+ blk[0] = (x0 + x1) >> 8;
+ blk[8] = (x4 + x7) >> 8;
+ blk[16] = (x6 + x5) >> 8;
+ blk[24] = (x2 + x3) >> 8;
+ blk[56] = (x0 - x1) >> 8;
+ blk[48] = (x4 - x7) >> 8;
+ blk[40] = (x6 - x5) >> 8;
+ blk[32] = (x2 - x3) >> 8;
+
+ return ;
+}
+
+void idctrow0_intra(int16 *, PIXEL *, int)
+{
+ return ;
+}
+
+void idctrow1_intra(int16 *blk, PIXEL *comp, int width)
+{
+ /* shortcut */
+ int32 tmp;
+ int i = 8;
+ int offset = width;
+ uint32 word;
+
+ comp -= offset;
+ while (i--)
+ {
+ tmp = ((blk[0] + 32) >> 6);
+ blk[0] = 0;
+ CLIP_RESULT(tmp)
+
+ word = (tmp << 8) | tmp;
+ word = (word << 16) | word;
+
+ *((uint32*)(comp += offset)) = word;
+ *((uint32*)(comp + 4)) = word;
+
+
+
+
+ blk += B_SIZE;
+ }
+ return;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow2_intra(int16 *blk, PIXEL *comp, int width)
+{
+ int32 x0, x1, x2, x4, x5, temp;
+ int i = 8;
+ int offset = width;
+ int32 word;
+
+ comp -= offset;
+ while (i--)
+ {
+ /* shortcut */
+ x4 = blk[1];
+ blk[1] = 0;
+ x0 = ((int32)blk[0] << 8) + 8192;
+ blk[0] = 0; /* for proper rounding in the fourth stage */
+
+ /* first stage */
+ x5 = (W7 * x4 + 4) >> 3;
+ x4 = (W1 * x4 + 4) >> 3;
+
+ /* third stage */
+ x2 = (181 * (x4 + x5) + 128) >> 8;
+ x1 = (181 * (x4 - x5) + 128) >> 8;
+
+ /* fourth stage */
+ word = ((x0 + x4) >> 14);
+ CLIP_RESULT(word)
+
+ temp = ((x0 + x2) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 8);
+ temp = ((x0 + x1) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 16);
+ temp = ((x0 + x5) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 24);
+ *((int32*)(comp += offset)) = word;
+
+ word = ((x0 - x5) >> 14);
+ CLIP_RESULT(word)
+ temp = ((x0 - x1) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 8);
+ temp = ((x0 - x2) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 16);
+ temp = ((x0 - x4) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 24);
+ *((int32*)(comp + 4)) = word;
+
+ blk += B_SIZE;
+ }
+ return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow3_intra(int16 *blk, PIXEL *comp, int width)
+{
+ int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
+ int i = 8;
+ int offset = width;
+ int32 word;
+
+ comp -= offset;
+
+ while (i--)
+ {
+ x2 = blk[2];
+ blk[2] = 0;
+ x1 = blk[1];
+ blk[1] = 0;
+ x0 = ((int32)blk[0] << 8) + 8192;
+ blk[0] = 0;/* for proper rounding in the fourth stage */
+ /* both upper and lower*/
+ /* both x2orx6 and x0orx4 */
+
+ x4 = x0;
+ x6 = (W6 * x2 + 4) >> 3;
+ x2 = (W2 * x2 + 4) >> 3;
+ x8 = x0 - x2;
+ x0 += x2;
+ x2 = x8;
+ x8 = x4 - x6;
+ x4 += x6;
+ x6 = x8;
+
+ x7 = (W7 * x1 + 4) >> 3;
+ x1 = (W1 * x1 + 4) >> 3;
+ x3 = x7;
+ x5 = (181 * (x1 - x7) + 128) >> 8;
+ x7 = (181 * (x1 + x7) + 128) >> 8;
+
+ word = ((x0 + x1) >> 14);
+ CLIP_RESULT(word)
+ temp = ((x4 + x7) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 8);
+
+
+ temp = ((x6 + x5) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 16);
+
+ temp = ((x2 + x3) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 24);
+ *((int32*)(comp += offset)) = word;
+
+ word = ((x2 - x3) >> 14);
+ CLIP_RESULT(word)
+
+ temp = ((x6 - x5) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 8);
+
+ temp = ((x4 - x7) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 16);
+
+ temp = ((x0 - x1) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 24);
+ *((int32*)(comp + 4)) = word;
+
+ blk += B_SIZE;
+ }
+ return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow4_intra(int16 *blk, PIXEL *comp, int width)
+{
+ int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
+ int i = 8;
+ int offset = width;
+ int32 word;
+
+ comp -= offset;
+
+ while (i--)
+ {
+ x2 = blk[2];
+ blk[2] = 0;
+ x1 = blk[1];
+ blk[1] = 0;
+ x3 = blk[3];
+ blk[3] = 0;
+ x0 = ((int32)blk[0] << 8) + 8192;
+ blk[0] = 0;/* for proper rounding in the fourth stage */
+
+ x4 = x0;
+ x6 = (W6 * x2 + 4) >> 3;
+ x2 = (W2 * x2 + 4) >> 3;
+ x8 = x0 - x2;
+ x0 += x2;
+ x2 = x8;
+ x8 = x4 - x6;
+ x4 += x6;
+ x6 = x8;
+
+ x7 = (W7 * x1 + 4) >> 3;
+ x1 = (W1 * x1 + 4) >> 3;
+ x5 = (W3 * x3 + 4) >> 3;
+ x3 = (- W5 * x3 + 4) >> 3;
+ x8 = x1 - x5;
+ x1 += x5;
+ x5 = x8;
+ x8 = x7 - x3;
+ x3 += x7;
+ x7 = (181 * (x5 + x8) + 128) >> 8;
+ x5 = (181 * (x5 - x8) + 128) >> 8;
+
+ word = ((x0 + x1) >> 14);
+ CLIP_RESULT(word)
+
+ temp = ((x4 + x7) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 8);
+
+
+ temp = ((x6 + x5) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 16);
+
+ temp = ((x2 + x3) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 24);
+ *((int32*)(comp += offset)) = word;
+
+ word = ((x2 - x3) >> 14);
+ CLIP_RESULT(word)
+
+ temp = ((x6 - x5) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 8);
+
+ temp = ((x4 - x7) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 16);
+
+ temp = ((x0 - x1) >> 14);
+ CLIP_RESULT(temp)
+ word = word | (temp << 24);
+ *((int32*)(comp + 4)) = word;
+
+ blk += B_SIZE;
+ }
+
+ return ;
+}
+
+#endif
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/max_level.h b/media/codecs/m4v_h263/dec/src/max_level.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/max_level.h
rename to media/codecs/m4v_h263/dec/src/max_level.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mb_motion_comp.cpp b/media/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
rename to media/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mb_utils.cpp b/media/codecs/m4v_h263/dec/src/mb_utils.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/mb_utils.cpp
rename to media/codecs/m4v_h263/dec/src/mb_utils.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mbtype_mode.h b/media/codecs/m4v_h263/dec/src/mbtype_mode.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/mbtype_mode.h
rename to media/codecs/m4v_h263/dec/src/mbtype_mode.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/motion_comp.h b/media/codecs/m4v_h263/dec/src/motion_comp.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/motion_comp.h
rename to media/codecs/m4v_h263/dec/src/motion_comp.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mp4dec_lib.h b/media/codecs/m4v_h263/dec/src/mp4dec_lib.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/mp4dec_lib.h
rename to media/codecs/m4v_h263/dec/src/mp4dec_lib.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mp4def.h b/media/codecs/m4v_h263/dec/src/mp4def.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/mp4def.h
rename to media/codecs/m4v_h263/dec/src/mp4def.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mp4lib_int.h b/media/codecs/m4v_h263/dec/src/mp4lib_int.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/mp4lib_int.h
rename to media/codecs/m4v_h263/dec/src/mp4lib_int.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/packet_util.cpp b/media/codecs/m4v_h263/dec/src/packet_util.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/packet_util.cpp
rename to media/codecs/m4v_h263/dec/src/packet_util.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/post_filter.cpp b/media/codecs/m4v_h263/dec/src/post_filter.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/post_filter.cpp
rename to media/codecs/m4v_h263/dec/src/post_filter.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/post_proc.h b/media/codecs/m4v_h263/dec/src/post_proc.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/post_proc.h
rename to media/codecs/m4v_h263/dec/src/post_proc.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp b/media/codecs/m4v_h263/dec/src/pvdec_api.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
rename to media/codecs/m4v_h263/dec/src/pvdec_api.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/scaling.h b/media/codecs/m4v_h263/dec/src/scaling.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/scaling.h
rename to media/codecs/m4v_h263/dec/src/scaling.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/scaling_tab.cpp b/media/codecs/m4v_h263/dec/src/scaling_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/scaling_tab.cpp
rename to media/codecs/m4v_h263/dec/src/scaling_tab.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vlc_dec_tab.h b/media/codecs/m4v_h263/dec/src/vlc_dec_tab.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vlc_dec_tab.h
rename to media/codecs/m4v_h263/dec/src/vlc_dec_tab.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vlc_decode.cpp b/media/codecs/m4v_h263/dec/src/vlc_decode.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vlc_decode.cpp
rename to media/codecs/m4v_h263/dec/src/vlc_decode.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vlc_decode.h b/media/codecs/m4v_h263/dec/src/vlc_decode.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vlc_decode.h
rename to media/codecs/m4v_h263/dec/src/vlc_decode.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vlc_dequant.cpp b/media/codecs/m4v_h263/dec/src/vlc_dequant.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vlc_dequant.cpp
rename to media/codecs/m4v_h263/dec/src/vlc_dequant.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vlc_tab.cpp b/media/codecs/m4v_h263/dec/src/vlc_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vlc_tab.cpp
rename to media/codecs/m4v_h263/dec/src/vlc_tab.cpp
diff --git a/media/codecs/m4v_h263/dec/src/vop.cpp b/media/codecs/m4v_h263/dec/src/vop.cpp
new file mode 100644
index 0000000..7b32498
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/vop.cpp
@@ -0,0 +1,1674 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+#include "log/log.h"
+
+#include "mp4dec_lib.h"
+#include "bitstream.h"
+#include "vlc_decode.h"
+#include "zigzag.h"
+
+#define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
+
+/* INTRA */
+const static int mpeg_iqmat_def[NCOEFF_BLOCK] =
+{
+ 8, 17, 18, 19, 21, 23, 25, 27,
+ 17, 18, 19, 21, 23, 25, 27, 28,
+ 20, 21, 22, 23, 24, 26, 28, 30,
+ 21, 22, 23, 24, 26, 28, 30, 32,
+ 22, 23, 24, 26, 28, 30, 32, 35,
+ 23, 24, 26, 28, 30, 32, 35, 38,
+ 25, 26, 28, 30, 32, 35, 38, 41,
+ 27, 28, 30, 32, 35, 38, 41, 45
+};
+
+/* INTER */
+const static int mpeg_nqmat_def[64] =
+{
+ 16, 17, 18, 19, 20, 21, 22, 23,
+ 17, 18, 19, 20, 21, 22, 23, 24,
+ 18, 19, 20, 21, 22, 23, 24, 25,
+ 19, 20, 21, 22, 23, 24, 26, 27,
+ 20, 21, 22, 23, 25, 26, 27, 28,
+ 21, 22, 23, 24, 26, 27, 28, 30,
+ 22, 23, 24, 26, 27, 28, 30, 31,
+ 23, 24, 25, 27, 28, 30, 31, 33
+};
+
+/* ======================================================================== */
+/* Function : CalcNumBits() */
+/* Purpose : */
+/* In/out : */
+/* Return : Calculate the minimum number of bits required to */
+/* represent x. */
+/* Note : This is an equivalent implementation of */
+/* (long)ceil(log((double)x)/log(2.0)) */
+/* Modified : */
+/* ======================================================================== */
+int CalcNumBits(uint x)
+{
+ int i = 1;
+ while (x >>= 1) i++;
+ return i;
+}
+
+
+
+/***********************************************************CommentBegin******
+*
+* -- DecodeVolHeader -- Decode the header of a VOL
+*
+* 04/10/2000 : initial modification to the new PV-Decoder Lib format.
+* 10/12/2001 : reject non compliant bitstreams
+*
+***********************************************************CommentEnd********/
+PV_STATUS DecodeVOLHeader(VideoDecData *video, int layer)
+{
+ PV_STATUS status;
+ Vol *currVol;
+ BitstreamDecVideo *stream;
+ uint32 tmpvar, vol_shape;
+ uint32 startCode;
+ int *qmat, i, j;
+ int version_id = 1;
+#ifdef PV_TOLERATE_VOL_ERRORS
+ uint32 profile = 0x01;
+#endif
+ /* There's a "currLayer" variable inside videoDecData. */
+ /* However, we don't maintain it until we decode frame data. 04/05/2000 */
+ currVol = video->vol[layer];
+ stream = currVol->bitstream;
+ currVol->moduloTimeBase = 0;
+
+ /* Determine which start code for the decoder to begin with */
+ status = BitstreamShowBits32HC(stream, &startCode);
+
+ if (startCode == VISUAL_OBJECT_SEQUENCE_START_CODE)
+ { /* Bitstream Exhchange Fix 9/99 */
+ /* Bitstream Exchange requires we allow start with Video Object Sequence */
+ /* visual_object_sequence_start_code */
+ (void) BitstreamReadBits32HC(stream);
+ tmpvar = (uint32) BitstreamReadBits16(stream, 8); /* profile */
+#ifndef PV_TOLERATE_VOL_ERRORS
+ if (layer) /* */
+ {
+ /* support SSPL0-2 */
+ if (tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
+ tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3/* Core SP@L1-L3 */)
+ return PV_FAIL;
+ }
+ else
+ {
+ /* support SPL0-3 & SSPL0-2 */
+ if (tmpvar != 0x01 && tmpvar != 0x02 && tmpvar != 0x03 && tmpvar != 0x08 &&
+ /* While not technically supported, try to decode SPL4&SPL5 files as well. */
+ /* We'll fail later if the size is too large. This is to allow playback of */
+ /* some <=CIF files generated by other encoders. */
+ tmpvar != 0x04 && tmpvar != 0x05 &&
+ tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
+ tmpvar != 0x21 && tmpvar != 0x22 && /* Core Profile Levels */
+ tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3 &&
+ tmpvar != 0xF0 && tmpvar != 0xF1 && /* Advanced Simple Profile Levels*/
+ tmpvar != 0xF2 && tmpvar != 0xF3 &&
+ tmpvar != 0xF4 && tmpvar != 0xF5)
+ return PV_FAIL;
+ }
+#else
+ profile = tmpvar;
+#endif
+
+ // save the profile and level for the query
+ currVol->profile_level_id = (uint)tmpvar; // 6/10/04
+
+
+
+ status = BitstreamShowBits32HC(stream, &tmpvar);
+ if (tmpvar == USER_DATA_START_CODE)
+ {
+ /* Something has to be done with user data 11/11/99 */
+ status = DecodeUserData(stream);
+ if (status != PV_SUCCESS) return PV_FAIL;
+ }
+ /* visual_object_start_code */
+ BitstreamShowBits32HC(stream, &tmpvar);
+ if (tmpvar != VISUAL_OBJECT_START_CODE)
+ {
+ do
+ {
+ /* Search for VOL_HEADER */
+ status = PVSearchNextM4VFrame(stream); /* search 0x00 0x00 0x01 */
+ if (status != PV_SUCCESS) return PV_FAIL; /* breaks the loop */
+ BitstreamShowBits32(stream, VOL_START_CODE_LENGTH, &tmpvar);
+ PV_BitstreamFlushBits(stream, 8);
+ }
+ while (tmpvar != VOL_START_CODE);
+ goto decode_vol;
+ }
+ else
+ {
+ BitstreamReadBits32HC(stream);
+ }
+
+ /* is_visual_object_identifier */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ if (tmpvar)
+ {
+ /* visual_object_verid */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 4);
+ /* visual_object_priority */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 3);
+ }
+ /* visual_object_type */
+ BitstreamShowBits32(stream, 4, &tmpvar);
+ if (tmpvar == 1)
+ { /* video_signal_type */
+ PV_BitstreamFlushBits(stream, 4);
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ if (tmpvar == 1)
+ {
+ /* video_format */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 3);
+ /* video_range */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ /* color_description */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ if (tmpvar == 1)
+ {
+ /* color_primaries */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 8);
+ /* transfer_characteristics */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 8);
+ /* matrix_coefficients */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 8);
+ }
+ }
+ }
+ else
+ {
+ do
+ {
+ /* Search for VOL_HEADER */
+ status = PVSearchNextM4VFrame(stream); /* search 0x00 0x00 0x01 */
+ if (status != PV_SUCCESS) return PV_FAIL; /* breaks the loop */
+ BitstreamShowBits32(stream, VOL_START_CODE_LENGTH, &tmpvar);
+ PV_BitstreamFlushBits(stream, 8);
+ }
+ while (tmpvar != VOL_START_CODE);
+ goto decode_vol;
+ }
+
+ /* next_start_code() */
+ status = PV_BitstreamByteAlign(stream); /* 10/12/01 */
+ status = BitstreamShowBits32HC(stream, &tmpvar);
+
+ if (tmpvar == USER_DATA_START_CODE)
+ {
+ /* Something has to be done to deal with user data (parse it) 11/11/99 */
+ status = DecodeUserData(stream);
+ if (status != PV_SUCCESS) return PV_FAIL;
+ }
+ status = BitstreamShowBits32(stream, 27, &tmpvar); /* 10/12/01 */
+ }
+ else
+ {
+ /* tmpvar = 0; */ /* 10/12/01 */
+ status = BitstreamShowBits32(stream, 27, &tmpvar); /* uncomment this line if you want
+ to start decoding with a
+ video_object_start_code */
+ }
+
+ if (tmpvar == VO_START_CODE)
+ {
+ /*****
+ *
+ * Read the VOL header entries from the bitstream
+ *
+ *****/
+ /* video_object_start_code */
+ tmpvar = BitstreamReadBits32(stream, 27);
+ tmpvar = (uint32) BitstreamReadBits16(stream, 5);
+
+
+ /* video_object_layer_start_code */
+ BitstreamShowBits32(stream, VOL_START_CODE_LENGTH, &tmpvar);
+ if (tmpvar != VOL_START_CODE)
+ {
+ status = BitstreamCheckEndBuffer(stream);
+ if (status == PV_END_OF_VOP)
+ {
+ video->shortVideoHeader = TRUE;
+ return PV_SUCCESS;
+ }
+ else
+ {
+ do
+ {
+ /* Search for VOL_HEADER */
+ status = PVSearchNextM4VFrame(stream);/* search 0x00 0x00 0x01 */
+ if (status != PV_SUCCESS) return PV_FAIL; /* breaks the loop */
+ BitstreamShowBits32(stream, VOL_START_CODE_LENGTH, &tmpvar);
+ PV_BitstreamFlushBits(stream, 8); /* advance the byte ptr */
+ }
+ while (tmpvar != VOL_START_CODE);
+ }
+ }
+ else
+ {
+ PV_BitstreamFlushBits(stream, 8);
+ }
+
+decode_vol:
+ PV_BitstreamFlushBits(stream, VOL_START_CODE_LENGTH - 8);
+ video->shortVideoHeader = 0;
+
+ /* vol_id (4 bits) */
+ currVol->volID = (int) BitstreamReadBits16(stream, 4);
+
+ /* RandomAccessible flag */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+
+ /* object type */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 8); /* */
+
+#ifdef PV_TOLERATE_VOL_ERRORS
+ if (tmpvar == 0)
+ {
+ if (layer) /* */
+ {
+ /* support SSPL0-2 */
+ if (profile != 0x10 && profile != 0x11 && profile != 0x12)
+ return PV_FAIL;
+ tmpvar = 0x02;
+ }
+ else
+ {
+ /* support SPL0-3 & SSPL0-2 */
+ if (profile != 0x01 && profile != 0x02 && profile != 0x03 && profile != 0x08 &&
+ profile != 0x10 && profile != 0x11 && profile != 0x12)
+ return PV_FAIL;
+ tmpvar = 0x01;
+ }
+ profile |= 0x0100;
+ }
+#endif
+
+ if (layer)
+ {
+ if (tmpvar != 0x02) return PV_FAIL;
+ }
+ else
+ {
+ // Simple and advanced simple (for quant-type 1)
+ if (tmpvar != 0x01 && tmpvar != 0x11) return PV_FAIL;
+ }
+
+ /* version id specified? */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ if (tmpvar == 1)
+ {
+ /* version ID */
+ version_id = (uint32) BitstreamReadBits16(stream, 4);
+ /* priority */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 3);
+
+ }
+
+ /* aspect ratio info */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 4);
+ if (tmpvar == 0) return PV_FAIL;
+ if (tmpvar == 0xf /* extended_par */)
+ {
+ /* width */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 8);
+ /* height */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 8);
+ }
+
+
+ /* control parameters present? */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+
+ /* Get the parameters (skipped) */
+ /* 03/10/99 */
+ if (tmpvar)
+ {
+ /* chroma_format */
+ tmpvar = BitstreamReadBits16(stream, 2);
+ if (tmpvar != 1) return PV_FAIL;
+ /* low_delay */
+ tmpvar = BitstreamRead1Bits(stream);
+
+ /* vbv_parameters present? */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ if (tmpvar)
+ {
+ /* first_half_bit_rate */
+ BitstreamReadBits16(stream, 15);
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+ /* latter_half_bit_rate */
+ BitstreamReadBits16(stream, 15);
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+ /* first_half_vbv_buffer_size */
+ BitstreamReadBits16(stream, 15);
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+ /* latter_half_vbv_buffer_size */
+ BitstreamReadBits16(stream, 3);
+ /* first_half_vbv_occupancy */
+ BitstreamReadBits16(stream, 11);
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+ /* latter_half_vbv_occupancy */
+ BitstreamReadBits16(stream, 15);
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+ }
+ }
+
+ /* video_object_layer_shape (2 bits), only 00 (rect) is supported for now */
+ vol_shape = (uint32) BitstreamReadBits16(stream, 2);
+ if (vol_shape) return PV_FAIL;
+
+ /* marker bit, 03/10/99 */
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+
+ /* vop_time_increment_resolution */
+ currVol->timeIncrementResolution = BitstreamReadBits16(stream, 16);
+ if (currVol->timeIncrementResolution == 0) return PV_FAIL;
+
+ /* . since nbitsTimeIncRes will be used over and over again, */
+ /* we should put it in Vol structure. 04/12/2000. */
+ currVol->nbitsTimeIncRes = CalcNumBits((uint)currVol->timeIncrementResolution - 1);
+
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+
+ /* fixed_vop_rate */
+ currVol->fixedVopRate = (int) BitstreamRead1Bits(stream);
+ if (currVol->fixedVopRate)
+ {
+ /* fixed_vop_time_increment */
+ tmpvar = BitstreamReadBits16(stream, currVol->nbitsTimeIncRes);
+ }
+
+ /* marker bit */
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+
+ /* video_object_layer_width (13 bits) */
+ tmpvar = BitstreamReadBits16(stream, 13);
+ if (!tmpvar) return PV_FAIL;
+ video->displayWidth = video->width = tmpvar;
+
+ /* round up to a multiple of MB_SIZE. 08/09/2000 */
+ video->width = (video->width + 15) & -16;
+// video->displayWidth += (video->displayWidth & 0x1); /* displayed image should be even size */
+
+ /* marker bit */
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+
+ /* video_object_layer_height (13 bits) */
+ tmpvar = BitstreamReadBits16(stream, 13);
+ if (!tmpvar) return PV_FAIL;
+ video->displayHeight = video->height = tmpvar;
+
+ /* round up to a multiple of MB_SIZE. 08/09/2000 */
+ video->height = (video->height + 15) & -16;
+// video->displayHeight += (video->displayHeight & 0x1); /* displayed image should be even size */
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+
+ /* 03/10/99 */
+ /* interlaced */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ if (tmpvar != 0)
+ {
+ mp4dec_log("DecodeVOLHeader(): Interlaced video is not supported.\n");
+ return PV_FAIL;
+ }
+
+ /* obmc_disable */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ if (tmpvar == 0) return PV_FAIL;
+
+ if (version_id == 1)
+ {
+ /* sprite_enable (1 bits) */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ if (tmpvar)
+ {
+ mp4dec_log("DecodeVOLHeader(): Sprite is not supported.\n");
+ return PV_FAIL;
+ }
+ }
+ else
+ {
+ /* For version 2, vol_sprite_usage has two bits. */
+ /* sprite_enable */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 2);
+ if (tmpvar)
+ {
+ mp4dec_log("DecodeVOLHeader(): Sprite is not supported.\n");
+ return PV_FAIL;
+ }
+ }
+
+ /* not_8_bit */
+ if (BitstreamRead1Bits(stream))
+ {
+ /* quant_precision */
+ currVol->quantPrecision = BitstreamReadBits16(stream, 4);
+ /* bits_per_pixel */
+ currVol->bitsPerPixel = BitstreamReadBits16(stream, 4);
+ mp4dec_log("DecodeVOLHeader(): not an 8-bit stream.\n"); // For the time being we do not support != 8 bits
+
+ return PV_FAIL;
+ }
+ else
+ {
+ currVol->quantPrecision = 5;
+ currVol->bitsPerPixel = 8;
+ }
+
+ /* quant_type (1 bit) */
+ currVol->quantType = BitstreamRead1Bits(stream);
+ if (currVol->quantType)
+ {
+ /* load quantization matrices. 5/22/2000 */
+ /* load_intra_quant_mat (1 bit) */
+ qmat = currVol->iqmat;
+ currVol->loadIntraQuantMat = BitstreamRead1Bits(stream);
+ if (currVol->loadIntraQuantMat)
+ {
+ /* intra_quant_mat (8*64 bits) */
+ i = 0;
+ do
+ {
+ qmat[*(zigzag_inv+i)] = (int) BitstreamReadBits16(stream, 8);
+ }
+ while ((qmat[*(zigzag_inv+i)] != 0) && (++i < 64));
+
+ /* qmatrix must have at least one non-zero value, which means
+ i would be non-zero in valid cases */
+ if (i == 0)
+ {
+ return PV_FAIL;
+ }
+
+ for (j = i; j < 64; j++)
+ qmat[*(zigzag_inv+j)] = qmat[*(zigzag_inv+i-1)];
+ }
+ else
+ {
+ oscl_memcpy(qmat, mpeg_iqmat_def, 64*sizeof(int));
+ }
+
+ qmat[0] = 0; /* necessary for switched && MPEG quant 07/09/01 */
+
+ /* load_nonintra_quant_mat (1 bit) */
+ qmat = currVol->niqmat;
+ currVol->loadNonIntraQuantMat = BitstreamRead1Bits(stream);
+ if (currVol->loadNonIntraQuantMat)
+ {
+ /* nonintra_quant_mat (8*64 bits) */
+ i = 0;
+ do
+ {
+ qmat[*(zigzag_inv+i)] = (int) BitstreamReadBits16(stream, 8);
+ }
+ while ((qmat[*(zigzag_inv+i)] != 0) && (++i < 64));
+
+ /* qmatrix must have at least one non-zero value, which means
+ i would be non-zero in valid cases */
+ if (i == 0)
+ {
+ return PV_FAIL;
+ }
+
+ for (j = i; j < 64; j++)
+ qmat[*(zigzag_inv+j)] = qmat[*(zigzag_inv+i-1)];
+ }
+ else
+ {
+ oscl_memcpy(qmat, mpeg_nqmat_def, 64*sizeof(int));
+ }
+ }
+
+ if (version_id != 1)
+ {
+ /* quarter_sample enabled */
+ tmpvar = BitstreamRead1Bits(stream);
+ if (tmpvar) return PV_FAIL;
+ }
+
+ /* complexity_estimation_disable */
+ currVol->complexity_estDisable = BitstreamRead1Bits(stream);
+ if (currVol->complexity_estDisable == 0)
+ {
+ currVol->complexity_estMethod = BitstreamReadBits16(stream, 2);
+
+ if (currVol->complexity_estMethod < 2)
+ {
+ /* shape_complexity_estimation_disable */
+ tmpvar = BitstreamRead1Bits(stream);
+ if (tmpvar == 0)
+ {
+ mp4dec_log("DecodeVOLHeader(): Shape Complexity estimation is not supported.\n");
+ return PV_FAIL;
+ }
+ /* texture_complexity_estimation_set_1_disable */
+ tmpvar = BitstreamRead1Bits(stream);
+ if (tmpvar == 0)
+ {
+ currVol->complexity.text_1 = BitstreamReadBits16(stream, 4);
+ }
+ /* marker bit */
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+ /* texture_complexity_estimation_set_2_disable */
+ tmpvar = BitstreamRead1Bits(stream);
+ if (tmpvar == 0)
+ {
+ currVol->complexity.text_2 = BitstreamReadBits16(stream, 4);
+ }
+ /* motion_compensation_complexity_disable */
+ tmpvar = BitstreamRead1Bits(stream);
+ if (tmpvar == 0)
+ {
+ currVol->complexity.mc = BitstreamReadBits16(stream, 6);
+ }
+ /* marker bit */
+ if (!BitstreamRead1Bits(stream)) return PV_FAIL;
+
+ if (currVol->complexity_estMethod == 1)
+ { /* version2_complexity_estimation_disable */
+ tmpvar = BitstreamRead1Bits(stream);
+ if (tmpvar == 0)
+ {
+ mp4dec_log("DecodeVOLHeader(): sadct, quarter pel not supported.\n");
+ return PV_FAIL;
+ }
+ }
+ }
+ }
+
+ /* 03/10/99 */
+ /* resync_marker_disable */
+ currVol->errorResDisable = (int) BitstreamRead1Bits(stream);
+ /* data_partititioned */
+ currVol->dataPartitioning = (int) BitstreamRead1Bits(stream);
+
+ video->vlcDecCoeffIntra = &VlcDecTCOEFIntra;
+ video->vlcDecCoeffInter = &VlcDecTCOEFInter;
+
+ if (currVol->dataPartitioning)
+ {
+ if (layer) return PV_FAIL; /* */
+ /* reversible_vlc */
+ currVol->useReverseVLC = (int)BitstreamRead1Bits(stream);
+ if (currVol->useReverseVLC)
+ {
+ video->vlcDecCoeffIntra = &RvlcDecTCOEFIntra;
+ video->vlcDecCoeffInter = &RvlcDecTCOEFInter;
+ }
+ currVol->errorResDisable = 0;
+ }
+ else
+ {
+ currVol->useReverseVLC = 0;
+ }
+
+ if (version_id != 1)
+ {
+ /* newpred_enable */
+ tmpvar = BitstreamRead1Bits(stream);
+ if (tmpvar) return PV_FAIL;
+
+ /* reduced_resolution_vop */
+ tmpvar = BitstreamRead1Bits(stream);
+ if (tmpvar) return PV_FAIL;
+
+ }
+
+ /* Intra AC/DC prediction is always true */
+ video->intra_acdcPredDisable = 0;
+ /* scalability */
+ currVol->scalability = (int) BitstreamRead1Bits(stream);
+
+ if (currVol->scalability)
+ {
+ if (layer == 0) return PV_FAIL; /* */
+ /* hierarchy_type: 1 : temporal, 0 : spatial */
+ /* 03/10/99 */
+ currVol->scalType = (int) BitstreamRead1Bits(stream); /* */
+ if (!currVol->scalType) return PV_FAIL;
+
+ /* ref_layer_id (4 bits) */
+ currVol->refVolID = (int) BitstreamReadBits16(stream, 4);
+ if (layer) /* */
+ {
+ if (currVol->refVolID != video->vol[0]->volID) return PV_FAIL;
+ }
+ /* ref_layer_sampling_direc (1 bits) */
+ /* 1 : ref. layer has higher resolution */
+ /* 0 : ref. layer has equal or lower resolution */
+ currVol->refSampDir = (int) BitstreamRead1Bits(stream);
+ if (currVol->refSampDir) return PV_FAIL;
+
+ /* hor_sampling_factor_n (5 bits) */
+ currVol->horSamp_n = (int) BitstreamReadBits16(stream, 5);
+
+ /* hor_sampling_factor_m (5 bits) */
+ currVol->horSamp_m = (int) BitstreamReadBits16(stream, 5);
+
+ if (currVol->horSamp_m == 0) return PV_FAIL;
+ if (currVol->horSamp_n != currVol->horSamp_m) return PV_FAIL;
+
+ /* ver_sampling_factor_n (5 bits) */
+ currVol->verSamp_n = (int) BitstreamReadBits16(stream, 5);
+
+ /* ver_sampling_factor_m (5 bits) */
+ currVol->verSamp_m = (int) BitstreamReadBits16(stream, 5);
+
+ if (currVol->verSamp_m == 0) return PV_FAIL;
+ if (currVol->verSamp_n != currVol->verSamp_m) return PV_FAIL;
+
+
+ /* enhancement_type: 1 : partial region, 0 : full region */
+ /* 04/10/2000: we only support full region enhancement layer. */
+ if (BitstreamRead1Bits(stream)) return PV_FAIL;
+ }
+
+ PV_BitstreamByteAlign(stream);
+
+ status = BitstreamShowBits32HC(stream, &tmpvar);
+
+ /* if we hit the end of buffer, tmpvar == 0. 08/30/2000 */
+ if (tmpvar == USER_DATA_START_CODE)
+ {
+ status = DecodeUserData(stream);
+ /* you should not check for status here 03/19/2002 */
+ status = PV_SUCCESS;
+ }
+
+ /* Compute some convenience variables: 04/13/2000 */
+ video->nMBPerRow = video->width / MB_SIZE;
+ video->nMBPerCol = video->height / MB_SIZE;
+ video->nTotalMB = video->nMBPerRow * video->nMBPerCol;
+ video->nBitsForMBID = CalcNumBits((uint)video->nTotalMB - 1);
+#ifdef PV_ANNEX_IJKT_SUPPORT
+ video->modified_quant = 0;
+ video->advanced_INTRA = 0;
+ video->deblocking = 0;
+ video->slice_structure = 0;
+#endif
+ }
+ else
+ {
+ /* SHORT_HEADER */
+ status = BitstreamShowBits32(stream, SHORT_VIDEO_START_MARKER_LENGTH, &tmpvar);
+
+ if (tmpvar == SHORT_VIDEO_START_MARKER)
+ {
+ video->shortVideoHeader = TRUE;
+ }
+ else
+ {
+ do
+ {
+ /* Search for VOL_HEADER */
+ status = PVSearchNextM4VFrame(stream); /* search 0x00 0x00 0x01 */
+ if (status != PV_SUCCESS) return PV_FAIL; /* breaks the loop */
+ BitstreamShowBits32(stream, VOL_START_CODE_LENGTH, &tmpvar);
+ PV_BitstreamFlushBits(stream, 8);
+ }
+ while (tmpvar != VOL_START_CODE);
+ goto decode_vol;
+ }
+ }
+#ifdef PV_TOLERATE_VOL_ERRORS
+ if (profile > 0xFF || profile == 0)
+ {
+ return PV_BAD_VOLHEADER;
+ }
+#endif
+
+ return status;
+}
+
+
+/***********************************************************CommentBegin******
+*
+* -- DecodeGOV -- Decodes the Group of VOPs from bitstream
+*
+* 04/20/2000 initial modification to the new PV-Decoder Lib format.
+*
+***********************************************************CommentEnd********/
+PV_STATUS DecodeGOVHeader(BitstreamDecVideo *stream, uint32 *time_base)
+{
+ uint32 tmpvar, time_s;
+ int closed_gov, broken_link;
+
+ /* group_start_code (32 bits) */
+// tmpvar = BitstreamReadBits32(stream, 32);
+
+ /* hours */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 5);
+ time_s = tmpvar * 3600;
+
+ /* minutes */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 6);
+ time_s += tmpvar * 60;
+
+ /* marker bit */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+
+ /* seconds */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 6);
+ time_s += tmpvar;
+
+ /* We have to check the timestamp here. If the sync timestamp is */
+ /* earlier than the previous timestamp or longer than 60 sec. */
+ /* after the previous timestamp, assume the GOV header is */
+ /* corrupted. 05/12/2000 */
+ *time_base = time_s; /* 02/27/2002 */
+// *time_base = *time_base/1000;
+// tmpvar = time_s - *time_base;
+// if (tmpvar <= 60) *time_base = time_s;
+// else return PV_FAIL;
+
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ closed_gov = tmpvar;
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ broken_link = tmpvar;
+
+ if ((closed_gov == 0) && (broken_link == 1))
+ {
+ return PV_SUCCESS; /* 03/15/2002 you can also return PV_FAIL */
+ }
+
+ PV_BitstreamByteAlign(stream);
+
+ BitstreamShowBits32HC(stream, &tmpvar);
+
+ while (tmpvar == USER_DATA_START_CODE) /* 03/15/2002 */
+ {
+ DecodeUserData(stream);
+ BitstreamShowBits32HC(stream, &tmpvar);
+ }
+
+ return PV_SUCCESS;
+}
+
+/***********************************************************CommentBegin******
+*
+* -- DecodeVopHeader -- Decodes the VOPheader information from the bitstream
+*
+* 04/12/2000 Initial port to the new PV decoder library format.
+* 05/10/2000 Error resilient decoding of vop header.
+*
+***********************************************************CommentEnd********/
+PV_STATUS DecodeVOPHeader(VideoDecData *video, Vop *currVop, Bool use_ext_timestamp)
+{
+ PV_STATUS status = PV_SUCCESS;
+ Vol *currVol = video->vol[video->currLayer];
+ BitstreamDecVideo *stream = currVol->bitstream;
+ uint32 tmpvar;
+ int time_base;
+
+ /*****
+ * Read the VOP header from the bitstream (No shortVideoHeader Mode here!)
+ *****/
+ BitstreamShowBits32HC(stream, &tmpvar);
+
+ /* check if we have a GOV header here. 08/30/2000 */
+ if (tmpvar == GROUP_START_CODE)
+ {
+ tmpvar = BitstreamReadBits32HC(stream);
+// rewindBitstream(stream, START_CODE_LENGTH); /* for backward compatibility */
+ status = DecodeGOVHeader(stream, &tmpvar);
+ if (status != PV_SUCCESS)
+ {
+ return status;
+ }
+// use_ext_timestamp = TRUE; /* 02/08/2002 */
+ /* We should have a VOP header following the GOV header. 03/15/2001 */
+ BitstreamShowBits32HC(stream, &tmpvar);
+ }
+#ifdef PV_SUPPORT_TEMPORAL_SCALABILITY
+ currVop->timeStamp = -1;
+#endif
+ if (tmpvar == VOP_START_CODE)
+ {
+ tmpvar = BitstreamReadBits32HC(stream);
+ }
+ else
+ {
+ PV_BitstreamFlushBits(stream, 8); // advance by a byte
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+
+
+ /* vop_prediction_type (2 bits) */
+ currVop->predictionType = (int) BitstreamReadBits16(stream, 2);
+
+ /* modulo_time_base (? bits) */
+ time_base = -1;
+ do
+ {
+ time_base++;
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+ }
+ while (tmpvar == 1);
+
+
+
+ if (!use_ext_timestamp)
+ {
+ currVol->moduloTimeBase += 1000 * time_base; /* milliseconds based MTB 11/12/01 */
+ }
+
+ /* marker_bit (1 bit) */
+ if (!BitstreamRead1Bits(stream))
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+ /* vop_time_increment (1-15 bits) in Nov_Compliant (1-16 bits) */
+ /* we always assumes fixed vop rate here */
+ currVop->timeInc = BitstreamReadBits16(stream, currVol->nbitsTimeIncRes);
+
+
+ /* marker_bit (1 bit) */
+ if (!BitstreamRead1Bits(stream))
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+ /* vop_coded */
+ currVop->vopCoded = (int) BitstreamRead1Bits(stream);
+
+
+ if (currVop->vopCoded == 0)
+ {
+ status = PV_SUCCESS;
+ goto return_point;
+ }
+
+
+ /* read vop_rounding_type */
+ if (currVop->predictionType == P_VOP)
+ {
+ currVop->roundingType = (int) BitstreamRead1Bits(stream);
+ }
+ else
+ {
+ currVop->roundingType = 0;
+ }
+
+ if (currVol->complexity_estDisable == 0)
+ {
+ if (currVol->complexity_estMethod < 2) /* OCT 2002 */
+ {
+ if ((currVol->complexity.text_1 >> 3) & 0x1) /* intra */
+ BitstreamReadBits16(stream, 8);
+ if (currVol->complexity.text_1 & 0x1) /* not_coded */
+ BitstreamReadBits16(stream, 8);
+ if ((currVol->complexity.text_2 >> 3) & 0x1) /* dct_coefs */
+ BitstreamReadBits16(stream, 8);
+ if ((currVol->complexity.text_2 >> 2) & 0x1) /* dct_lines */
+ BitstreamReadBits16(stream, 8);
+ if ((currVol->complexity.text_2 >> 1) & 0x1) /* vlc_symbols */
+ BitstreamReadBits16(stream, 8);
+ if (currVol->complexity.text_2 & 0x1) /* vlc_bits */
+ BitstreamReadBits16(stream, 4);
+
+ if (currVop->predictionType != I_VOP)
+ {
+ if ((currVol->complexity.text_1 >> 2) & 0x1) /* inter */
+ BitstreamReadBits16(stream, 8);
+ if ((currVol->complexity.text_1 >> 1) & 0x1) /* inter_4v */
+ BitstreamReadBits16(stream, 8);
+ if ((currVol->complexity.mc >> 5) & 0x1) /* apm */
+ BitstreamReadBits16(stream, 8);
+ if ((currVol->complexity.mc >> 4) & 0x1) /* npm */
+ BitstreamReadBits16(stream, 8);
+ /* interpolate_mc_q */
+ if ((currVol->complexity.mc >> 2) & 0x1) /* forw_back_mc_q */
+ BitstreamReadBits16(stream, 8);
+ if ((currVol->complexity.mc >> 1) & 0x1) /* halfpel2 */
+ BitstreamReadBits16(stream, 8);
+ if (currVol->complexity.mc & 0x1) /* halfpel4 */
+ BitstreamReadBits16(stream, 8);
+ }
+ if (currVop->predictionType == B_VOP)
+ {
+ if ((currVol->complexity.mc >> 3) & 0x1) /* interpolate_mc_q */
+ BitstreamReadBits16(stream, 8);
+ }
+ }
+ }
+
+ /* read intra_dc_vlc_thr */
+ currVop->intraDCVlcThr = (int) BitstreamReadBits16(stream, 3);
+
+ /* read vop_quant (currVol->quantPrecision bits) */
+ currVop->quantizer = (int16) BitstreamReadBits16(stream, currVol->quantPrecision);
+ if (currVop->quantizer == 0)
+ {
+ currVop->quantizer = video->prevVop->quantizer;
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+
+ /* read vop_fcode_forward */
+ if (currVop->predictionType != I_VOP)
+ {
+ tmpvar = (uint32) BitstreamReadBits16(stream, 3);
+ if (tmpvar < 1)
+ {
+ currVop->fcodeForward = 1;
+ status = PV_FAIL;
+ goto return_point;
+ }
+ currVop->fcodeForward = tmpvar;
+ }
+ else
+ {
+ currVop->fcodeForward = 0;
+ }
+
+ /* read vop_fcode_backward */
+ if (currVop->predictionType == B_VOP)
+ {
+ tmpvar = (uint32) BitstreamReadBits16(stream, 3);
+ if (tmpvar < 1)
+ {
+ currVop->fcodeBackward = 1;
+ status = PV_FAIL;
+ goto return_point;
+ }
+ currVop->fcodeBackward = tmpvar;
+ }
+ else
+ {
+ currVop->fcodeBackward = 0;
+ }
+
+ if (currVol->scalability)
+ {
+ currVop->refSelectCode = (int) BitstreamReadBits16(stream, 2);
+ }
+
+return_point:
+ return status;
+}
+
+
+/***********************************************************CommentBegin******
+*
+* -- VideoPlaneWithShortHeader -- Decodes the short_video_header information from the bitstream
+* Modified :
+ 04/23/2001. Remove the codes related to the
+ "first pass" decoding. We use a different function
+ to set up the decoder now.
+***********************************************************CommentEnd********/
+PV_STATUS DecodeShortHeader(VideoDecData *video, Vop *currVop)
+{
+ PV_STATUS status = PV_SUCCESS;
+ Vol *currVol = video->vol[0];
+ BitstreamDecVideo *stream = currVol->bitstream;
+ uint32 tmpvar;
+ int32 size;
+
+ int extended_PTYPE = FALSE;
+ int UFEP = 0, custom_PFMT = 0, custom_PCF = 0;
+
+ status = BitstreamShowBits32(stream, SHORT_VIDEO_START_MARKER_LENGTH, &tmpvar);
+
+ if (tmpvar != SHORT_VIDEO_START_MARKER)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+
+ PV_BitstreamFlushBits(stream, SHORT_VIDEO_START_MARKER_LENGTH);
+
+ /* Temporal reference. Using vop_time_increment_resolution = 30000 */
+ tmpvar = (uint32) BitstreamReadBits16(stream, 8);
+ currVop->temporalRef = (int) tmpvar;
+
+
+ currVop->timeInc = 0xff & (256 + currVop->temporalRef - video->prevVop->temporalRef);
+ currVol->moduloTimeBase += currVop->timeInc; /* mseconds 11/12/01 */
+ /* Marker Bit */
+ if (!BitstreamRead1Bits(stream))
+ {
+ mp4dec_log("DecodeShortHeader(): Marker bit wrong.\n");
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+ /* Zero Bit */
+ if (BitstreamRead1Bits(stream))
+ {
+ mp4dec_log("DecodeShortHeader(): Zero bit wrong.\n");
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+ /*split_screen_indicator*/
+ if (BitstreamRead1Bits(stream))
+ {
+ mp4dec_log("DecodeShortHeader(): Split Screen not supported.\n");
+ VideoDecoderErrorDetected(video);
+ }
+
+ /*document_freeze_camera*/
+ if (BitstreamRead1Bits(stream))
+ {
+ mp4dec_log("DecodeShortHeader(): Freeze Camera not supported.\n");
+ VideoDecoderErrorDetected(video);
+ }
+
+ /*freeze_picture_release*/
+ if (BitstreamRead1Bits(stream))
+ {
+ mp4dec_log("DecodeShortHeader(): Freeze Release not supported.\n");
+ VideoDecoderErrorDetected(video);
+ }
+ /* source format */
+ switch (BitstreamReadBits16(stream, 3))
+ {
+ case 1:
+ if (video->size < 128*96)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = video->width = 128;
+ video->displayHeight = video->height = 96;
+ break;
+
+ case 2:
+ if (video->size < 176*144)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = video->width = 176;
+ video->displayHeight = video->height = 144;
+ break;
+
+ case 3:
+ if (video->size < 352*288)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = video->width = 352;
+ video->displayHeight = video->height = 288;
+ break;
+
+ case 4:
+ if (video->size < 704*576)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = video->width = 704;
+ video->displayHeight = video->height = 576;
+ break;
+
+ case 5:
+ if (video->size < 1408*1152)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = video->width = 1408;
+ video->displayHeight = video->height = 1152;
+ break;
+
+ case 7:
+ extended_PTYPE = TRUE;
+ break;
+
+ default:
+ /* Msg("H.263 source format not legal\n"); */
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+
+ currVop->roundingType = 0;
+
+ if (extended_PTYPE == FALSE)
+ {
+ currVop->predictionType = (int) BitstreamRead1Bits(stream);
+
+ /* four_reserved_zero_bits */
+ if (BitstreamReadBits16(stream, 4))
+ {
+ mp4dec_log("DecodeShortHeader(): Reserved bits wrong.\n");
+ status = PV_FAIL;
+ goto return_point;
+ }
+ }
+ else
+ {
+ UFEP = BitstreamReadBits16(stream, 3);
+ if (UFEP == 1)
+ {
+ /* source format */
+ switch (BitstreamReadBits16(stream, 3))
+ {
+ case 1:
+ if (video->size < 128*96)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = video->width = 128;
+ video->displayHeight = video->height = 96;
+ break;
+
+ case 2:
+ if (video->size < 176*144)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = video->width = 176;
+ video->displayHeight = video->height = 144;
+ break;
+
+ case 3:
+ if (video->size < 352*288)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = video->width = 352;
+ video->displayHeight = video->height = 288;
+ break;
+
+ case 4:
+ if (video->size < 704*576)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = video->width = 704;
+ video->displayHeight = video->height = 576;
+ break;
+
+ case 5:
+ if (video->size < 1408*1152)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = video->width = 1408;
+ video->displayHeight = video->height = 1152;
+ break;
+
+ case 6:
+ custom_PFMT = TRUE;
+ break;
+
+ default:
+ /* Msg("H.263 source format not legal\n"); */
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+ custom_PCF = BitstreamRead1Bits(stream);
+ /* unrestricted MV */
+ if (BitstreamRead1Bits(stream))
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ /* SAC */
+ if (BitstreamRead1Bits(stream))
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+ /* AP */
+ if (BitstreamRead1Bits(stream))
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+ video->advanced_INTRA = BitstreamRead1Bits(stream);
+
+ video->deblocking = BitstreamRead1Bits(stream);
+
+ video->slice_structure = BitstreamRead1Bits(stream);
+
+ /* RPS, ISD, AIV */
+ if (BitstreamReadBits16(stream, 3))
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->modified_quant = BitstreamRead1Bits(stream);
+
+ /* Marker Bit and reserved*/
+ if (BitstreamReadBits16(stream, 4) != 8)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ }
+#ifndef PV_ANNEX_IJKT_SUPPORT
+ if (video->advanced_INTRA | video->deblocking | video->modified_quant | video->modified_quant)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+#endif
+
+ if (UFEP == 0 || UFEP == 1)
+ {
+ tmpvar = BitstreamReadBits16(stream, 3);
+ if (tmpvar > 1)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ currVop->predictionType = tmpvar;
+ /* RPR */
+ if (BitstreamRead1Bits(stream))
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+ /* RRU */
+ if (BitstreamRead1Bits(stream))
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ currVop->roundingType = (int) BitstreamRead1Bits(stream);
+ if (BitstreamReadBits16(stream, 3) != 1)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ }
+ else
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ /* CPM */
+ if (BitstreamRead1Bits(stream))
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ /* CPFMT */
+ if (custom_PFMT == 1 && UFEP == 1)
+ {
+ /* aspect ratio */
+ tmpvar = BitstreamReadBits16(stream, 4);
+ if (tmpvar == 0)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ /* Extended PAR */
+ if (tmpvar == 0xF)
+ {
+ /* Read par_width and par_height but do nothing */
+ /* par_width */
+ tmpvar = BitstreamReadBits16(stream, 8);
+
+ /* par_height */
+ tmpvar = BitstreamReadBits16(stream, 8);
+ }
+ tmpvar = BitstreamReadBits16(stream, 9);
+
+ int tmpDisplayWidth = (tmpvar + 1) << 2;
+ /* marker bit */
+ if (!BitstreamRead1Bits(stream))
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ tmpvar = BitstreamReadBits16(stream, 9);
+ if (tmpvar == 0)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ int tmpDisplayHeight = tmpvar << 2;
+ int tmpHeight = (tmpDisplayHeight + 15) & -16;
+ int tmpWidth = (tmpDisplayWidth + 15) & -16;
+
+ if (tmpWidth > video->width)
+ {
+ // while allowed by the spec, this decoder does not actually
+ // support an increase in size.
+ ALOGE("width increase not supported");
+ status = PV_FAIL;
+ goto return_point;
+ }
+ if (tmpHeight * tmpWidth > video->size)
+ {
+ // This is just possibly "b/37079296".
+ ALOGE("b/37079296");
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->displayWidth = tmpDisplayWidth;
+ video->width = tmpWidth;
+ video->displayHeight = tmpDisplayHeight;
+ video->height = tmpHeight;
+
+ video->nTotalMB = video->width / MB_SIZE * video->height / MB_SIZE;
+
+ if (video->nTotalMB <= 48)
+ {
+ video->nBitsForMBID = 6;
+ }
+ else if (video->nTotalMB <= 99)
+ {
+ video->nBitsForMBID = 7;
+ }
+ else if (video->nTotalMB <= 396)
+ {
+ video->nBitsForMBID = 9;
+ }
+ else if (video->nTotalMB <= 1584)
+ {
+ video->nBitsForMBID = 11;
+ }
+ else if (video->nTotalMB <= 6336)
+ {
+ video->nBitsForMBID = 13 ;
+ }
+ else if (video->nTotalMB <= 9216)
+ {
+ video->nBitsForMBID = 14 ;
+ }
+ else
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ }
+ if (UFEP == 1 && custom_PCF == 1)
+ {
+ BitstreamRead1Bits(stream);
+
+ tmpvar = BitstreamReadBits16(stream, 7);
+ if (tmpvar == 0)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ }
+
+ if (custom_PCF == 1)
+ {
+ currVop->ETR = BitstreamReadBits16(stream, 2);
+ }
+
+ if (UFEP == 1 && video->slice_structure == 1)
+ {
+ /* SSS */
+ tmpvar = BitstreamReadBits16(stream, 2);
+ if (tmpvar != 0)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ }
+ }
+
+ /* Recalculate number of macroblocks per row & col since */
+ /* the frame size can change. 04/23/2001. */
+ video->nMBinGOB = video->nMBPerRow = video->width / MB_SIZE;
+ video->nGOBinVop = video->nMBPerCol = video->height / MB_SIZE;
+ video->nTotalMB = video->nMBPerRow * video->nMBPerCol;
+ if (custom_PFMT == 0 || UFEP == 0)
+ {
+ video->nBitsForMBID = CalcNumBits((uint)video->nTotalMB - 1); /* otherwise calculate above */
+ }
+ size = (int32)video->width * video->height;
+ if (currVop->predictionType == P_VOP && size > video->videoDecControls->size)
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+ video->videoDecControls->size = size;
+ video->currVop->uChan = video->currVop->yChan + size;
+ video->currVop->vChan = video->currVop->uChan + (size >> 2);
+ video->prevVop->uChan = video->prevVop->yChan + size;
+ video->prevVop->vChan = video->prevVop->uChan + (size >> 2);
+
+
+ currVop->quantizer = (int16) BitstreamReadBits16(stream, 5);
+
+ if (currVop->quantizer == 0) /* 04/03/01 */
+ {
+ currVop->quantizer = video->prevVop->quantizer;
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+
+ /* Zero bit */
+ if (extended_PTYPE == FALSE)
+ {
+ if (BitstreamRead1Bits(stream))
+ {
+ mp4dec_log("DecodeShortHeader(): Zero bit wrong.\n");
+ status = PV_FAIL;
+ goto return_point;
+ }
+ }
+ /* pei */
+ tmpvar = (uint32) BitstreamRead1Bits(stream);
+
+ while (tmpvar)
+ {
+ tmpvar = (uint32) BitstreamReadBits16(stream, 8); /* "PSPARE" */
+ tmpvar = (uint32) BitstreamRead1Bits(stream); /* "PEI" */
+ }
+
+ if (video->slice_structure) /* ANNEX_K */
+ {
+ if (!BitstreamRead1Bits(stream)) /* SEPB1 */
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+ // if (currVol->nBitsForMBID //
+ if (BitstreamReadBits16(stream, video->nBitsForMBID))
+ {
+ status = PV_FAIL; /* no ASO, RS support for Annex K */
+ goto return_point;
+ }
+
+ if (!BitstreamRead1Bits(stream)) /*SEPB3 */
+ {
+ status = PV_FAIL;
+ goto return_point;
+ }
+
+ }
+ /* Setting of other VOP-header parameters */
+ currVop->gobNumber = 0;
+ currVop->vopCoded = 1;
+
+ currVop->intraDCVlcThr = 0;
+ currVop->gobFrameID = 0; /* initial value, 05/22/00 */
+ currVol->errorResDisable = 0;
+ /*PutVopInterlaced(0,curr_vop); no implemented yet */
+ if (currVop->predictionType != I_VOP)
+ currVop->fcodeForward = 1;
+ else
+ currVop->fcodeForward = 0;
+
+return_point:
+
+ return status;
+}
+/***********************************************************CommentBegin******
+*
+* -- PV_DecodeVop -- Decodes the VOP information from the bitstream
+*
+* 04/12/2000
+* Initial port to the new PV decoder library format.
+* This function is different from the one in MoMuSys MPEG-4
+* visual decoder. We handle combined mode with or withput
+* error resilience and H.263 mode through the sam path now.
+*
+* 05/04/2000
+* Added temporal scalability to the decoder.
+*
+***********************************************************CommentEnd********/
+PV_STATUS PV_DecodeVop(VideoDecData *video)
+{
+ Vol *currVol = video->vol[video->currLayer];
+ PV_STATUS status;
+ uint32 tmpvar;
+
+ /*****
+ * Do scalable or non-scalable decoding of the current VOP
+ *****/
+
+ if (!currVol->scalability)
+ {
+ if (currVol->dataPartitioning)
+ {
+ /* Data partitioning mode comes here */
+ status = DecodeFrameDataPartMode(video);
+ }
+ else
+ {
+ /* Combined mode with or without error resilience */
+ /* and short video header comes here. */
+ status = DecodeFrameCombinedMode(video);
+ }
+ }
+ else
+ {
+#ifdef DO_NOT_FOLLOW_STANDARD
+ /* according to the standard, only combined mode is allowed */
+ /* in the enhancement layer. 06/01/2000. */
+ if (currVol->dataPartitioning)
+ {
+ /* Data partitioning mode comes here */
+ status = DecodeFrameDataPartMode(video);
+ }
+ else
+ {
+ /* Combined mode with or without error resilience */
+ /* and short video header comes here. */
+ status = DecodeFrameCombinedMode(video);
+ }
+#else
+ status = DecodeFrameCombinedMode(video);
+#endif
+ }
+
+ /* This part is for consuming Visual_object_sequence_end_code and EOS Code */ /* 10/15/01 */
+ if (!video->shortVideoHeader)
+ {
+ /* at this point bitstream is expected to be byte aligned */
+ BitstreamByteAlignNoForceStuffing(currVol->bitstream);
+
+ status = BitstreamShowBits32HC(currVol->bitstream, &tmpvar); /* 07/07/01 */
+ if (tmpvar == VISUAL_OBJECT_SEQUENCE_END_CODE)/* VOS_END_CODE */
+ {
+ PV_BitstreamFlushBits(currVol->bitstream, 16);
+ PV_BitstreamFlushBits(currVol->bitstream, 16);
+ }
+
+ }
+ else
+ {
+#ifdef PV_ANNEX_IJKT_SUPPORT
+ if (video->deblocking)
+ {
+ H263_Deblock(video->currVop->yChan, video->width, video->height, video->QPMB, video->headerInfo.Mode, 0, 0);
+ H263_Deblock(video->currVop->uChan, video->width >> 1, video->height >> 1, video->QPMB, video->headerInfo.Mode, 1, video->modified_quant);
+ H263_Deblock(video->currVop->vChan, video->width >> 1, video->height >> 1, video->QPMB, video->headerInfo.Mode, 1, video->modified_quant);
+ }
+#endif
+ /* Read EOS code for shortheader bitstreams */
+ status = BitstreamShowBits32(currVol->bitstream, 22, &tmpvar);
+ if (tmpvar == SHORT_VIDEO_END_MARKER)
+ {
+ PV_BitstreamFlushBits(currVol->bitstream, 22);
+ }
+ else
+ {
+ status = PV_BitstreamShowBitsByteAlign(currVol->bitstream, 22, &tmpvar);
+ if (tmpvar == SHORT_VIDEO_END_MARKER)
+ {
+ PV_BitstreamByteAlign(currVol->bitstream);
+ PV_BitstreamFlushBits(currVol->bitstream, 22);
+ }
+ }
+ }
+ return status;
+}
+
+
+/***********************************************************CommentBegin******
+*
+* -- CalcVopDisplayTime -- calculate absolute time when VOP is to be displayed
+*
+* 04/12/2000 Initial port to the new PV decoder library format.
+*
+***********************************************************CommentEnd********/
+uint32 CalcVopDisplayTime(Vol *currVol, Vop *currVop, int shortVideoHeader)
+{
+ uint32 display_time;
+
+
+ /*****
+ * Calculate the time when the VOP is to be displayed next
+ *****/
+
+ if (!shortVideoHeader)
+ {
+ display_time = (uint32)(currVol->moduloTimeBase + (((int32)currVop->timeInc - (int32)currVol->timeInc_offset) * 1000) / ((int32)currVol->timeIncrementResolution)); /* 11/12/2001 */
+ if (currVop->timeStamp >= display_time)
+ {
+ display_time += 1000; /* this case is valid if GOVHeader timestamp is ignored */
+ }
+ }
+ else
+ {
+ display_time = (uint32)(currVol->moduloTimeBase * 33 + (currVol->moduloTimeBase * 11) / 30); /* 11/12/2001 */
+ }
+
+ return(display_time);
+}
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/zigzag.h b/media/codecs/m4v_h263/dec/src/zigzag.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/zigzag.h
rename to media/codecs/m4v_h263/dec/src/zigzag.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/zigzag_tab.cpp b/media/codecs/m4v_h263/dec/src/zigzag_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/zigzag_tab.cpp
rename to media/codecs/m4v_h263/dec/src/zigzag_tab.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp b/media/codecs/m4v_h263/dec/test/Android.bp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/test/Android.bp
rename to media/codecs/m4v_h263/dec/test/Android.bp
diff --git a/media/codecs/m4v_h263/dec/test/AndroidTest.xml b/media/codecs/m4v_h263/dec/test/AndroidTest.xml
new file mode 100755
index 0000000..f572b0c
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/test/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Test module config for Mpeg4H263 Decoder unit tests">
+ <option name="test-suite-tag" value="Mpeg4H263DecoderTest" />
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="true" />
+ <option name="push" value="Mpeg4H263DecoderTest->/data/local/tmp/Mpeg4H263DecoderTest" />
+ <option name="push-file"
+ key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder-1.1.zip?unzip=true"
+ value="/data/local/tmp/Mpeg4H263DecoderTestRes/" />
+ </target_preparer>
+
+ <test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="module-name" value="Mpeg4H263DecoderTest" />
+ <option name="native-test-flag" value="-P /data/local/tmp/Mpeg4H263DecoderTestRes/" />
+ </test>
+</configuration>
diff --git a/media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp b/media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
new file mode 100644
index 0000000..53d66ea
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
@@ -0,0 +1,426 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Mpeg4H263DecoderTest"
+#include <utils/Log.h>
+
+#include <stdio.h>
+#include <string.h>
+#include <utils/String8.h>
+#include <fstream>
+
+#include <media/stagefright/foundation/AUtils.h>
+#include "mp4dec_api.h"
+
+#include "Mpeg4H263DecoderTestEnvironment.h"
+
+using namespace android;
+
+#define OUTPUT_FILE_NAME "/data/local/tmp/Output.yuv"
+#define CODEC_CONFIG_FLAG 32
+#define SYNC_FRAME 1
+#define MPEG4_MAX_WIDTH 1920
+#define MPEG4_MAX_HEIGHT 1080
+#define H263_MAX_WIDTH 352
+#define H263_MAX_HEIGHT 288
+
+constexpr uint32_t kNumOutputBuffers = 2;
+
+struct FrameInfo {
+ int32_t bytesCount;
+ uint32_t flags;
+ int64_t timestamp;
+};
+
+struct tagvideoDecControls;
+
+static Mpeg4H263DecoderTestEnvironment *gEnv = nullptr;
+
+class Mpeg4H263DecoderTest : public ::testing::TestWithParam<tuple<string, string, bool>> {
+ public:
+ Mpeg4H263DecoderTest()
+ : mDecHandle(nullptr),
+ mInputBuffer(nullptr),
+ mInitialized(false),
+ mFramesConfigured(false),
+ mNumSamplesOutput(0),
+ mWidth(352),
+ mHeight(288) {
+ memset(mOutputBuffer, 0x0, sizeof(mOutputBuffer));
+ }
+
+ ~Mpeg4H263DecoderTest() {
+ if (mEleStream.is_open()) mEleStream.close();
+ if (mDecHandle) {
+ delete mDecHandle;
+ mDecHandle = nullptr;
+ }
+ if (mInputBuffer) {
+ free(mInputBuffer);
+ mInputBuffer = nullptr;
+ }
+ freeOutputBuffer();
+ }
+
+ status_t initDecoder();
+ void allocOutputBuffer(size_t outputBufferSize);
+ void dumpOutput(ofstream &ostrm);
+ void freeOutputBuffer();
+ void processMpeg4H263Decoder(vector<FrameInfo> Info, int32_t offset, int32_t range,
+ ifstream &mEleStream, ofstream &ostrm, MP4DecodingMode inputMode);
+ void deInitDecoder();
+
+ ifstream mEleStream;
+ tagvideoDecControls *mDecHandle;
+ char *mInputBuffer;
+ uint8_t *mOutputBuffer[kNumOutputBuffers];
+ bool mInitialized;
+ bool mFramesConfigured;
+ uint32_t mNumSamplesOutput;
+ uint32_t mWidth;
+ uint32_t mHeight;
+};
+
+status_t Mpeg4H263DecoderTest::initDecoder() {
+ if (!mDecHandle) {
+ mDecHandle = new tagvideoDecControls;
+ }
+ if (!mDecHandle) {
+ return NO_MEMORY;
+ }
+ memset(mDecHandle, 0, sizeof(tagvideoDecControls));
+
+ return OK;
+}
+
+void Mpeg4H263DecoderTest::allocOutputBuffer(size_t outputBufferSize) {
+ for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
+ if (!mOutputBuffer[i]) {
+ mOutputBuffer[i] = (uint8_t *)malloc(outputBufferSize);
+ ASSERT_NE(mOutputBuffer[i], nullptr) << "Output buffer allocation failed";
+ }
+ }
+}
+
+void Mpeg4H263DecoderTest::dumpOutput(ofstream &ostrm) {
+ uint8_t *src = mOutputBuffer[mNumSamplesOutput & 1];
+ size_t vStride = align(mHeight, 16);
+ size_t srcYStride = align(mWidth, 16);
+ size_t srcUVStride = srcYStride / 2;
+ uint8_t *srcStart = src;
+
+ /* Y buffer */
+ for (size_t i = 0; i < mHeight; ++i) {
+ ostrm.write(reinterpret_cast<char *>(src), mWidth);
+ src += srcYStride;
+ }
+ /* U buffer */
+ src = srcStart + vStride * srcYStride;
+ for (size_t i = 0; i < mHeight / 2; ++i) {
+ ostrm.write(reinterpret_cast<char *>(src), mWidth / 2);
+ src += srcUVStride;
+ }
+ /* V buffer */
+ src = srcStart + vStride * srcYStride * 5 / 4;
+ for (size_t i = 0; i < mHeight / 2; ++i) {
+ ostrm.write(reinterpret_cast<char *>(src), mWidth / 2);
+ src += srcUVStride;
+ }
+}
+
+void Mpeg4H263DecoderTest::freeOutputBuffer() {
+ for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
+ if (mOutputBuffer[i]) {
+ free(mOutputBuffer[i]);
+ mOutputBuffer[i] = nullptr;
+ }
+ }
+}
+
+void Mpeg4H263DecoderTest::processMpeg4H263Decoder(vector<FrameInfo> Info, int32_t offset,
+ int32_t range, ifstream &mEleStream,
+ ofstream &ostrm, MP4DecodingMode inputMode) {
+ size_t maxWidth = (inputMode == MPEG4_MODE) ? MPEG4_MAX_WIDTH : H263_MAX_WIDTH;
+ size_t maxHeight = (inputMode == MPEG4_MODE) ? MPEG4_MAX_HEIGHT : H263_MAX_HEIGHT;
+ size_t outputBufferSize = align(maxWidth, 16) * align(maxHeight, 16) * 3 / 2;
+ uint32_t frameIndex = offset;
+ bool status = true;
+ ASSERT_GE(range, 0) << "Invalid range";
+ ASSERT_TRUE(offset >= 0 && offset <= Info.size() - 1) << "Invalid offset";
+ ASSERT_LE(range + offset, Info.size()) << "range+offset can't be greater than the no of frames";
+
+ while (1) {
+ if (frameIndex == Info.size() || frameIndex == (offset + range)) break;
+
+ int32_t bytesCount = Info[frameIndex].bytesCount;
+ ASSERT_GT(bytesCount, 0) << "Size for the memory allocation is negative";
+ mInputBuffer = (char *)malloc(bytesCount);
+ ASSERT_NE(mInputBuffer, nullptr) << "Insufficient memory to read frame";
+ mEleStream.read(mInputBuffer, bytesCount);
+ ASSERT_EQ(mEleStream.gcount(), bytesCount) << "mEleStream.gcount() != bytesCount";
+ static const uint8_t volInfo[] = {0x00, 0x00, 0x01, 0xB0};
+ bool volHeader = memcmp(mInputBuffer, volInfo, 4) == 0;
+ if (volHeader) {
+ PVCleanUpVideoDecoder(mDecHandle);
+ mInitialized = false;
+ }
+
+ if (!mInitialized) {
+ uint8_t *volData[1]{};
+ int32_t volSize = 0;
+
+ uint32_t flags = Info[frameIndex].flags;
+ bool codecConfig = flags == CODEC_CONFIG_FLAG;
+ if (codecConfig || volHeader) {
+ volData[0] = reinterpret_cast<uint8_t *>(mInputBuffer);
+ volSize = bytesCount;
+ }
+
+ status = PVInitVideoDecoder(mDecHandle, volData, &volSize, 1, maxWidth, maxHeight,
+ inputMode);
+ ASSERT_TRUE(status) << "PVInitVideoDecoder failed. Unsupported content";
+
+ mInitialized = true;
+ MP4DecodingMode actualMode = PVGetDecBitstreamMode(mDecHandle);
+ ASSERT_EQ(inputMode, actualMode)
+ << "Decoded mode not same as actual mode of the decoder";
+
+ PVSetPostProcType(mDecHandle, 0);
+
+ int32_t dispWidth, dispHeight;
+ PVGetVideoDimensions(mDecHandle, &dispWidth, &dispHeight);
+
+ int32_t bufWidth, bufHeight;
+ PVGetBufferDimensions(mDecHandle, &bufWidth, &bufHeight);
+
+ ASSERT_LE(dispWidth, bufWidth) << "Display width is greater than buffer width";
+ ASSERT_LE(dispHeight, bufHeight) << "Display height is greater than buffer height";
+
+ if (dispWidth != mWidth || dispHeight != mHeight) {
+ mWidth = dispWidth;
+ mHeight = dispHeight;
+ freeOutputBuffer();
+ if (inputMode == H263_MODE) {
+ PVCleanUpVideoDecoder(mDecHandle);
+
+ uint8_t *volData[1]{};
+ int32_t volSize = 0;
+
+ status = PVInitVideoDecoder(mDecHandle, volData, &volSize, 1, maxWidth,
+ maxHeight, H263_MODE);
+ ASSERT_TRUE(status) << "PVInitVideoDecoder failed for H263";
+ }
+ mFramesConfigured = false;
+ }
+
+ if (codecConfig) {
+ frameIndex++;
+ continue;
+ }
+ }
+
+ uint32_t yFrameSize = sizeof(uint8) * mDecHandle->size;
+ ASSERT_GE(outputBufferSize, yFrameSize * 3 / 2)
+ << "Too small output buffer: " << outputBufferSize << " bytes";
+ ASSERT_NO_FATAL_FAILURE(allocOutputBuffer(outputBufferSize));
+
+ if (!mFramesConfigured) {
+ PVSetReferenceYUV(mDecHandle, mOutputBuffer[1]);
+ mFramesConfigured = true;
+ }
+
+ // Need to check if header contains new info, e.g., width/height, etc.
+ VopHeaderInfo headerInfo;
+ uint32_t useExtTimestamp = 1;
+ int32_t inputSize = (Info)[frameIndex].bytesCount;
+ uint32_t timestamp = frameIndex;
+
+ uint8_t *bitstreamTmp = reinterpret_cast<uint8_t *>(mInputBuffer);
+
+ status = PVDecodeVopHeader(mDecHandle, &bitstreamTmp, ×tamp, &inputSize, &headerInfo,
+ &useExtTimestamp, mOutputBuffer[mNumSamplesOutput & 1]);
+ ASSERT_EQ(status, PV_TRUE) << "failed to decode vop header";
+
+ // H263 doesn't have VOL header, the frame size information is in short header, i.e. the
+ // decoder may detect size change after PVDecodeVopHeader.
+ int32_t dispWidth, dispHeight;
+ PVGetVideoDimensions(mDecHandle, &dispWidth, &dispHeight);
+
+ int32_t bufWidth, bufHeight;
+ PVGetBufferDimensions(mDecHandle, &bufWidth, &bufHeight);
+
+ ASSERT_LE(dispWidth, bufWidth) << "Display width is greater than buffer width";
+ ASSERT_LE(dispHeight, bufHeight) << "Display height is greater than buffer height";
+ if (dispWidth != mWidth || dispHeight != mHeight) {
+ mWidth = dispWidth;
+ mHeight = dispHeight;
+ }
+
+ status = PVDecodeVopBody(mDecHandle, &inputSize);
+ ASSERT_EQ(status, PV_TRUE) << "failed to decode video frame No = %d" << frameIndex;
+
+ dumpOutput(ostrm);
+
+ ++mNumSamplesOutput;
+ ++frameIndex;
+ }
+ freeOutputBuffer();
+}
+
+void Mpeg4H263DecoderTest::deInitDecoder() {
+ if (mInitialized) {
+ if (mDecHandle) {
+ PVCleanUpVideoDecoder(mDecHandle);
+ delete mDecHandle;
+ mDecHandle = nullptr;
+ }
+ mInitialized = false;
+ }
+ freeOutputBuffer();
+}
+
+void getInfo(string infoFileName, vector<FrameInfo> &Info) {
+ ifstream eleInfo;
+ eleInfo.open(infoFileName);
+ ASSERT_EQ(eleInfo.is_open(), true) << "Failed to open " << infoFileName;
+ int32_t bytesCount = 0;
+ uint32_t flags = 0;
+ uint32_t timestamp = 0;
+ while (1) {
+ if (!(eleInfo >> bytesCount)) {
+ break;
+ }
+ eleInfo >> flags;
+ eleInfo >> timestamp;
+ Info.push_back({bytesCount, flags, timestamp});
+ }
+ if (eleInfo.is_open()) eleInfo.close();
+}
+
+TEST_P(Mpeg4H263DecoderTest, DecodeTest) {
+ tuple<string /* InputFileName */, string /* InfoFileName */, bool /* mode */> params =
+ GetParam();
+
+ string inputFileName = gEnv->getRes() + get<0>(params);
+ mEleStream.open(inputFileName, ifstream::binary);
+ ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open " << get<0>(params);
+
+ string infoFileName = gEnv->getRes() + get<1>(params);
+ vector<FrameInfo> Info;
+ ASSERT_NO_FATAL_FAILURE(getInfo(infoFileName, Info));
+ ASSERT_NE(Info.empty(), true) << "Invalid Info file";
+
+ ofstream ostrm;
+ ostrm.open(OUTPUT_FILE_NAME, std::ofstream::binary);
+ ASSERT_EQ(ostrm.is_open(), true) << "Failed to open output stream for " << get<0>(params);
+
+ status_t err = initDecoder();
+ ASSERT_EQ(err, OK) << "initDecoder: failed to create decoder " << err;
+
+ bool isMpeg4 = get<2>(params);
+ MP4DecodingMode inputMode = isMpeg4 ? MPEG4_MODE : H263_MODE;
+ ASSERT_NO_FATAL_FAILURE(
+ processMpeg4H263Decoder(Info, 0, Info.size(), mEleStream, ostrm, inputMode));
+ deInitDecoder();
+ ostrm.close();
+ Info.clear();
+}
+
+TEST_P(Mpeg4H263DecoderTest, FlushTest) {
+ tuple<string /* InputFileName */, string /* InfoFileName */, bool /* mode */> params =
+ GetParam();
+
+ string inputFileName = gEnv->getRes() + get<0>(params);
+ mEleStream.open(inputFileName, ifstream::binary);
+ ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open " << get<0>(params);
+
+ string infoFileName = gEnv->getRes() + get<1>(params);
+ vector<FrameInfo> Info;
+ ASSERT_NO_FATAL_FAILURE(getInfo(infoFileName, Info));
+ ASSERT_NE(Info.empty(), true) << "Invalid Info file";
+
+ ofstream ostrm;
+ ostrm.open(OUTPUT_FILE_NAME, std::ofstream::binary);
+ ASSERT_EQ(ostrm.is_open(), true) << "Failed to open output stream for " << get<0>(params);
+
+ status_t err = initDecoder();
+ ASSERT_EQ(err, OK) << "initDecoder: failed to create decoder " << err;
+
+ bool isMpeg4 = get<2>(params);
+ MP4DecodingMode inputMode = isMpeg4 ? MPEG4_MODE : H263_MODE;
+ // Number of frames to be decoded before flush
+ int32_t numFrames = Info.size() / 3;
+ ASSERT_NO_FATAL_FAILURE(
+ processMpeg4H263Decoder(Info, 0, numFrames, mEleStream, ostrm, inputMode));
+
+ if (mInitialized) {
+ int32_t status = PVResetVideoDecoder(mDecHandle);
+ ASSERT_EQ(status, PV_TRUE);
+ }
+
+ // Seek to next key frame and start decoding till the end
+ int32_t index = numFrames;
+ bool keyFrame = false;
+ uint32_t flags = 0;
+ while (index < (int32_t)Info.size()) {
+ if (Info[index].flags) flags = 1u << (Info[index].flags - 1);
+ if ((flags & SYNC_FRAME) == SYNC_FRAME) {
+ keyFrame = true;
+ break;
+ }
+ flags = 0;
+ mEleStream.ignore(Info[index].bytesCount);
+ index++;
+ }
+ ALOGV("Index= %d", index);
+ if (keyFrame) {
+ mNumSamplesOutput = 0;
+ ASSERT_NO_FATAL_FAILURE(processMpeg4H263Decoder(Info, index, (int32_t)Info.size() - index,
+ mEleStream, ostrm, inputMode));
+ }
+ deInitDecoder();
+ ostrm.close();
+ Info.clear();
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ Mpeg4H263DecoderTestAll, Mpeg4H263DecoderTest,
+ ::testing::Values(make_tuple("swirl_128x96_h263.h263", "swirl_128x96_h263.info", false),
+ make_tuple("swirl_176x144_h263.h263", "swirl_176x144_h263.info", false),
+ make_tuple("swirl_352x288_h263.h263", "swirl_352x288_h263.info", false),
+ make_tuple("bbb_352x288_h263.h263", "bbb_352x288_h263.info", false),
+ make_tuple("bbb_352x288_mpeg4.m4v", "bbb_352x288_mpeg4.info", true),
+ make_tuple("qtype0_mpeg4.m4v", "qtype0_mpeg4.info", true),
+ make_tuple("qtype1_mpeg4.m4v", "qtype1_mpeg4.info", true),
+ make_tuple("qtype1_qmatrix_mpeg4.m4v", "qtype1_qmatrix_mpeg4.info", true),
+ make_tuple("swirl_128x128_mpeg4.m4v", "swirl_128x128_mpeg4.info", true),
+ make_tuple("swirl_130x132_mpeg4.m4v", "swirl_130x132_mpeg4.info", true),
+ make_tuple("swirl_132x130_mpeg4.m4v", "swirl_132x130_mpeg4.info", true),
+ make_tuple("swirl_136x144_mpeg4.m4v", "swirl_136x144_mpeg4.info", true),
+ make_tuple("swirl_144x136_mpeg4.m4v", "swirl_144x136_mpeg4.info", true)));
+
+int main(int argc, char **argv) {
+ gEnv = new Mpeg4H263DecoderTestEnvironment();
+ ::testing::AddGlobalTestEnvironment(gEnv);
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = gEnv->initFromOptions(argc, argv);
+ if (status == 0) {
+ status = RUN_ALL_TESTS();
+ ALOGD("Decoder Test Result = %d\n", status);
+ }
+ return status;
+}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h b/media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h
rename to media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h
diff --git a/media/codecs/m4v_h263/dec/test/README.md b/media/codecs/m4v_h263/dec/test/README.md
new file mode 100644
index 0000000..38ac567
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/test/README.md
@@ -0,0 +1,40 @@
+## Media Testing ##
+---
+#### Mpeg4H263Decoder :
+The Mpeg4H263Decoder Test Suite validates the Mpeg4 and H263 decoder available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m Mpeg4H263DecoderTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/Mpeg4H263DecoderTest/Mpeg4H263DecoderTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/Mpeg4H263DecoderTest/Mpeg4H263DecoderTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder-1.1.zip).
+Download, unzip and push these files into device for testing.
+
+```
+adb push Mpeg4H263Decoder /data/local/tmp/
+```
+
+usage: Mpeg4H263DecoderTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/Mpeg4H263DecoderTest -P /data/local/tmp/Mpeg4H263Decoder/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest Mpeg4H263DecoderTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/codecs/m4v_h263/enc/Android.bp b/media/codecs/m4v_h263/enc/Android.bp
new file mode 100644
index 0000000..dd7f005
--- /dev/null
+++ b/media/codecs/m4v_h263/enc/Android.bp
@@ -0,0 +1,75 @@
+cc_library_static {
+ name: "libstagefright_m4vh263enc",
+ vendor_available: true,
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media.swcodec",
+ ],
+ min_sdk_version: "29",
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+
+ srcs: [
+ "src/bitstream_io.cpp",
+ "src/combined_encode.cpp", "src/datapart_encode.cpp",
+ "src/dct.cpp",
+ "src/findhalfpel.cpp",
+ "src/fastcodemb.cpp",
+ "src/fastidct.cpp",
+ "src/fastquant.cpp",
+ "src/me_utils.cpp",
+ "src/mp4enc_api.cpp",
+ "src/rate_control.cpp",
+ "src/motion_est.cpp",
+ "src/motion_comp.cpp",
+ "src/sad.cpp",
+ "src/sad_halfpel.cpp",
+ "src/vlc_encode.cpp",
+ "src/vop.cpp",
+ ],
+
+ cflags: [
+ "-DBX_RC",
+ "-Werror",
+ ],
+
+ local_include_dirs: ["src"],
+ export_include_dirs: ["include"],
+
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+}
+
+//###############################################################################
+
+cc_test {
+ name: "libstagefright_m4vh263enc_test",
+ gtest: false,
+
+ srcs: ["test/m4v_h263_enc_test.cpp"],
+
+ local_include_dirs: ["src"],
+
+ cflags: [
+ "-DBX_RC",
+ "-Wall",
+ "-Werror",
+ ],
+
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+
+ static_libs: ["libstagefright_m4vh263enc"],
+}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2 b/media/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2
rename to media/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/m4v_h263/enc/NOTICE b/media/codecs/m4v_h263/enc/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/NOTICE
rename to media/codecs/m4v_h263/enc/NOTICE
diff --git a/media/libstagefright/codecs/m4v_h263/enc/include/cvei.h b/media/codecs/m4v_h263/enc/include/cvei.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/include/cvei.h
rename to media/codecs/m4v_h263/enc/include/cvei.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h b/media/codecs/m4v_h263/enc/include/mp4enc_api.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h
rename to media/codecs/m4v_h263/enc/include/mp4enc_api.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/bitstream_io.cpp b/media/codecs/m4v_h263/enc/src/bitstream_io.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/bitstream_io.cpp
rename to media/codecs/m4v_h263/enc/src/bitstream_io.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/bitstream_io.h b/media/codecs/m4v_h263/enc/src/bitstream_io.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/bitstream_io.h
rename to media/codecs/m4v_h263/enc/src/bitstream_io.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/combined_encode.cpp b/media/codecs/m4v_h263/enc/src/combined_encode.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/combined_encode.cpp
rename to media/codecs/m4v_h263/enc/src/combined_encode.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/datapart_encode.cpp b/media/codecs/m4v_h263/enc/src/datapart_encode.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/datapart_encode.cpp
rename to media/codecs/m4v_h263/enc/src/datapart_encode.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/dct.cpp b/media/codecs/m4v_h263/enc/src/dct.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/dct.cpp
rename to media/codecs/m4v_h263/enc/src/dct.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/dct.h b/media/codecs/m4v_h263/enc/src/dct.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/dct.h
rename to media/codecs/m4v_h263/enc/src/dct.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/dct_inline.h b/media/codecs/m4v_h263/enc/src/dct_inline.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/dct_inline.h
rename to media/codecs/m4v_h263/enc/src/dct_inline.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/fastcodemb.cpp b/media/codecs/m4v_h263/enc/src/fastcodemb.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/fastcodemb.cpp
rename to media/codecs/m4v_h263/enc/src/fastcodemb.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/fastcodemb.h b/media/codecs/m4v_h263/enc/src/fastcodemb.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/fastcodemb.h
rename to media/codecs/m4v_h263/enc/src/fastcodemb.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/fastidct.cpp b/media/codecs/m4v_h263/enc/src/fastidct.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/fastidct.cpp
rename to media/codecs/m4v_h263/enc/src/fastidct.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/fastquant.cpp b/media/codecs/m4v_h263/enc/src/fastquant.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/fastquant.cpp
rename to media/codecs/m4v_h263/enc/src/fastquant.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/fastquant_inline.h b/media/codecs/m4v_h263/enc/src/fastquant_inline.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/fastquant_inline.h
rename to media/codecs/m4v_h263/enc/src/fastquant_inline.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/findhalfpel.cpp b/media/codecs/m4v_h263/enc/src/findhalfpel.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/findhalfpel.cpp
rename to media/codecs/m4v_h263/enc/src/findhalfpel.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/m4venc_oscl.h b/media/codecs/m4v_h263/enc/src/m4venc_oscl.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/m4venc_oscl.h
rename to media/codecs/m4v_h263/enc/src/m4venc_oscl.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/me_utils.cpp b/media/codecs/m4v_h263/enc/src/me_utils.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/me_utils.cpp
rename to media/codecs/m4v_h263/enc/src/me_utils.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/motion_comp.cpp b/media/codecs/m4v_h263/enc/src/motion_comp.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/motion_comp.cpp
rename to media/codecs/m4v_h263/enc/src/motion_comp.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/motion_est.cpp b/media/codecs/m4v_h263/enc/src/motion_est.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/motion_est.cpp
rename to media/codecs/m4v_h263/enc/src/motion_est.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h b/media/codecs/m4v_h263/enc/src/mp4def.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h
rename to media/codecs/m4v_h263/enc/src/mp4def.h
diff --git a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
new file mode 100644
index 0000000..30e4fda
--- /dev/null
+++ b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
@@ -0,0 +1,3310 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+
+#include "mp4enc_lib.h"
+#include "bitstream_io.h"
+#include "rate_control.h"
+#include "m4venc_oscl.h"
+
+#ifndef INT32_MAX
+#define INT32_MAX 0x7fffffff
+#endif
+
+#ifndef SIZE_MAX
+#define SIZE_MAX ((size_t) -1)
+#endif
+
+/* Inverse normal zigzag */
+const static Int zigzag_i[NCOEFF_BLOCK] =
+{
+ 0, 1, 8, 16, 9, 2, 3, 10,
+ 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34,
+ 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36,
+ 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46,
+ 53, 60, 61, 54, 47, 55, 62, 63
+};
+
+/* INTRA */
+const static Int mpeg_iqmat_def[NCOEFF_BLOCK] =
+ { 8, 17, 18, 19, 21, 23, 25, 27,
+ 17, 18, 19, 21, 23, 25, 27, 28,
+ 20, 21, 22, 23, 24, 26, 28, 30,
+ 21, 22, 23, 24, 26, 28, 30, 32,
+ 22, 23, 24, 26, 28, 30, 32, 35,
+ 23, 24, 26, 28, 30, 32, 35, 38,
+ 25, 26, 28, 30, 32, 35, 38, 41,
+ 27, 28, 30, 32, 35, 38, 41, 45
+ };
+
+/* INTER */
+const static Int mpeg_nqmat_def[64] =
+ { 16, 17, 18, 19, 20, 21, 22, 23,
+ 17, 18, 19, 20, 21, 22, 23, 24,
+ 18, 19, 20, 21, 22, 23, 24, 25,
+ 19, 20, 21, 22, 23, 24, 26, 27,
+ 20, 21, 22, 23, 25, 26, 27, 28,
+ 21, 22, 23, 24, 26, 27, 28, 30,
+ 22, 23, 24, 26, 27, 28, 30, 31,
+ 23, 24, 25, 27, 28, 30, 31, 33
+ };
+
+/* Profiles and levels */
+/* Simple profile(level 0-3) and Core profile (level 1-2) */
+/* {SPL0, SPL1, SPL2, SPL3, CPL1, CPL2, CPL2, CPL2} , SPL0: Simple Profile@Level0, CPL1: Core Profile@Level1, the last two are redundant for easy table manipulation */
+const static Int profile_level_code[8] =
+{
+ 0x08, 0x01, 0x02, 0x03, 0x21, 0x22, 0x22, 0x22
+};
+
+const static Int profile_level_max_bitrate[8] =
+{
+ 64000, 64000, 128000, 384000, 384000, 2000000, 2000000, 2000000
+};
+
+const static Int profile_level_max_packet_size[8] =
+{
+ 2048, 2048, 4096, 8192, 4096, 8192, 8192, 8192
+};
+
+const static Int profile_level_max_mbsPerSec[8] =
+{
+ 1485, 1485, 5940, 11880, 5940, 23760, 23760, 23760
+};
+
+const static Int profile_level_max_VBV_size[8] =
+{
+ 163840, 163840, 655360, 655360, 262144, 1310720, 1310720, 1310720
+};
+
+
+/* Simple scalable profile (level 0-2) and Core scalable profile (level 1-3) */
+/* {SSPL0, SSPL1, SSPL2, SSPL2, CSPL1, CSPL2, CSPL3, CSPL3} , SSPL0: Simple Scalable Profile@Level0, CSPL1: Core Scalable Profile@Level1, the fourth is redundant for easy table manipulation */
+
+const static Int scalable_profile_level_code[8] =
+{
+ 0x10, 0x11, 0x12, 0x12, 0xA1, 0xA2, 0xA3, 0xA3
+};
+
+const static Int scalable_profile_level_max_bitrate[8] =
+{
+ 128000, 128000, 256000, 256000, 768000, 1500000, 4000000, 4000000
+};
+
+/* in bits */
+const static Int scalable_profile_level_max_packet_size[8] =
+{
+ 2048, 2048, 4096, 4096, 4096, 4096, 16384, 16384
+};
+
+const static Int scalable_profile_level_max_mbsPerSec[8] =
+{
+ 1485, 7425, 23760, 23760, 14850, 29700, 120960, 120960
+};
+
+const static Int scalable_profile_level_max_VBV_size[8] =
+{
+ 163840, 655360, 655360, 655360, 1048576, 1310720, 1310720, 1310720
+};
+
+
+/* H263 profile 0 @ level 10-70 */
+const static Int h263Level[8] = {0, 10, 20, 30, 40, 50, 60, 70};
+const static float rBR_bound[8] = {0, 1, 2, 6, 32, 64, 128, 256};
+const static float max_h263_framerate[2] = {(float)30000 / (float)2002,
+ (float)30000 / (float)1001
+ };
+const static Int max_h263_width[2] = {176, 352};
+const static Int max_h263_height[2] = {144, 288};
+
+/* 6/2/2001, newly added functions to make PVEncodeVop more readable. */
+Int DetermineCodingLayer(VideoEncData *video, Int *nLayer, ULong modTime);
+void DetermineVopType(VideoEncData *video, Int currLayer);
+Int UpdateSkipNextFrame(VideoEncData *video, ULong *modTime, Int *size, PV_STATUS status);
+Bool SetProfile_BufferSize(VideoEncData *video, float delay, Int bInitialized);
+
+#ifdef PRINT_RC_INFO
+extern FILE *facct;
+extern int tiTotalNumBitsGenerated;
+extern int iStuffBits;
+#endif
+
+#ifdef PRINT_EC
+extern FILE *fec;
+#endif
+
+
+/* ======================================================================== */
+/* Function : PVGetDefaultEncOption() */
+/* Date : 12/12/2005 */
+/* Purpose : */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVGetDefaultEncOption(VideoEncOptions *encOption, Int encUseCase)
+{
+ VideoEncOptions defaultUseCase = {H263_MODE, profile_level_max_packet_size[SIMPLE_PROFILE_LEVEL0] >> 3,
+ SIMPLE_PROFILE_LEVEL0, PV_OFF, 0, 1, 1000, 33, {144, 144}, {176, 176}, {15, 30}, {64000, 128000},
+ {10, 10}, {12, 12}, {0, 0}, CBR_1, 0.0, PV_OFF, -1, 0, PV_OFF, 16, PV_OFF, 0, PV_ON
+ };
+
+ OSCL_UNUSED_ARG(encUseCase); // unused for now. Later we can add more defaults setting and use this
+ // argument to select the right one.
+ /* in the future we can create more meaningful use-cases */
+ if (encOption == NULL)
+ {
+ return PV_FALSE;
+ }
+
+ M4VENC_MEMCPY(encOption, &defaultUseCase, sizeof(VideoEncOptions));
+
+ return PV_TRUE;
+}
+
+/* ======================================================================== */
+/* Function : PVInitVideoEncoder() */
+/* Date : 08/22/2000 */
+/* Purpose : Initialization of MP4 Encoder and VO bitstream */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : 5/21/01, allocate only yChan and assign uChan & vChan */
+/* 12/12/05, add encoding option as input argument */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVInitVideoEncoder(VideoEncControls *encoderControl, VideoEncOptions *encOption)
+{
+
+ Bool status = PV_TRUE;
+ Int nLayers, idx, i, j;
+ Int max = 0, max_width = 0, max_height = 0, pitch, offset;
+ Int size = 0, nTotalMB = 0;
+ VideoEncData *video;
+ Vol *pVol;
+ VideoEncParams *pEncParams;
+ Int temp_w, temp_h, mbsPerSec;
+
+ /******************************************/
+ /* this part use to be PVSetEncode() */
+ Int profile_table_index, *profile_level_table;
+ Int profile_level = encOption->profile_level;
+ Int PacketSize = encOption->packetSize << 3;
+ Int timeInc, timeIncRes;
+ float profile_max_framerate;
+ VideoEncParams *encParams;
+
+ if (encoderControl->videoEncoderData) /* this has been called */
+ {
+ if (encoderControl->videoEncoderInit) /* check if PVInitVideoEncoder() has been called */
+ {
+ PVCleanUpVideoEncoder(encoderControl);
+ encoderControl->videoEncoderInit = 0;
+ }
+
+ M4VENC_FREE(encoderControl->videoEncoderData);
+ encoderControl->videoEncoderData = NULL;
+ }
+ encoderControl->videoEncoderInit = 0; /* reset this value */
+
+ video = (VideoEncData *)M4VENC_MALLOC(sizeof(VideoEncData)); /* allocate memory for encData */
+
+ if (video == NULL)
+ return PV_FALSE;
+
+ M4VENC_MEMSET(video, 0, sizeof(VideoEncData));
+
+ encoderControl->videoEncoderData = (void *) video; /* set up pointer in VideoEncData structure */
+
+ video->encParams = (VideoEncParams *)M4VENC_MALLOC(sizeof(VideoEncParams));
+ if (video->encParams == NULL)
+ goto CLEAN_UP;
+
+ M4VENC_MEMSET(video->encParams, 0, sizeof(VideoEncParams));
+
+ encParams = video->encParams;
+ encParams->nLayers = encOption->numLayers;
+
+ /* Check whether the input packetsize is valid (Note: put code here (before any memory allocation) in order to avoid memory leak */
+ if ((Int)profile_level < (Int)(SIMPLE_SCALABLE_PROFILE_LEVEL0)) /* non-scalable profile */
+ {
+ profile_level_table = (Int *)profile_level_max_packet_size;
+ profile_table_index = (Int)profile_level;
+ if (encParams->nLayers != 1)
+ {
+ goto CLEAN_UP;
+ }
+
+ encParams->LayerMaxMbsPerSec[0] = profile_level_max_mbsPerSec[profile_table_index];
+
+ }
+ else /* scalable profile */
+ {
+ profile_level_table = (Int *)scalable_profile_level_max_packet_size;
+ profile_table_index = (Int)profile_level - (Int)(SIMPLE_SCALABLE_PROFILE_LEVEL0);
+ if (encParams->nLayers < 2)
+ {
+ goto CLEAN_UP;
+ }
+ for (i = 0; i < encParams->nLayers; i++)
+ {
+ encParams->LayerMaxMbsPerSec[i] = scalable_profile_level_max_mbsPerSec[profile_table_index];
+ }
+
+ }
+
+ /* cannot have zero size packet with these modes */
+ if (PacketSize == 0)
+ {
+ if (encOption->encMode == DATA_PARTITIONING_MODE)
+ {
+ goto CLEAN_UP;
+ }
+ if (encOption->encMode == COMBINE_MODE_WITH_ERR_RES)
+ {
+ encOption->encMode = COMBINE_MODE_NO_ERR_RES;
+ }
+ }
+
+ if (encOption->gobHeaderInterval == 0)
+ {
+ if (encOption->encMode == H263_MODE_WITH_ERR_RES)
+ {
+ encOption->encMode = H263_MODE;
+ }
+
+ if (encOption->encMode == SHORT_HEADER_WITH_ERR_RES)
+ {
+ encOption->encMode = SHORT_HEADER;
+ }
+ }
+
+ if (PacketSize > profile_level_table[profile_table_index])
+ goto CLEAN_UP;
+
+ /* Initial Defaults for all Modes */
+
+ encParams->SequenceStartCode = 1;
+ encParams->GOV_Enabled = 0;
+ encParams->RoundingType = 0;
+ encParams->IntraDCVlcThr = PV_MAX(PV_MIN(encOption->intraDCVlcTh, 7), 0);
+ encParams->ACDCPrediction = ((encOption->useACPred == PV_ON) ? TRUE : FALSE);
+ encParams->RC_Type = encOption->rcType;
+ encParams->Refresh = encOption->numIntraMB;
+ encParams->ResyncMarkerDisable = 0; /* Enable Resync Marker */
+
+ for (i = 0; i < encOption->numLayers; i++)
+ {
+#ifdef NO_MPEG_QUANT
+ encParams->QuantType[i] = 0;
+#else
+ encParams->QuantType[i] = encOption->quantType[i]; /* H263 */
+#endif
+ if (encOption->pQuant[i] >= 1 && encOption->pQuant[i] <= 31)
+ {
+ encParams->InitQuantPvop[i] = encOption->pQuant[i];
+ }
+ else
+ {
+ goto CLEAN_UP;
+ }
+ if (encOption->iQuant[i] >= 1 && encOption->iQuant[i] <= 31)
+ {
+ encParams->InitQuantIvop[i] = encOption->iQuant[i];
+ }
+ else
+ {
+ goto CLEAN_UP;
+ }
+ }
+
+ encParams->HalfPel_Enabled = 1;
+ encParams->SearchRange = encOption->searchRange; /* 4/16/2001 */
+ encParams->FullSearch_Enabled = 0;
+#ifdef NO_INTER4V
+ encParams->MV8x8_Enabled = 0;
+#else
+ encParams->MV8x8_Enabled = 0;// comment out for now!! encOption->mv8x8Enable;
+#endif
+ encParams->H263_Enabled = 0;
+ encParams->GOB_Header_Interval = 0; // need to be reset to 0
+ encParams->IntraPeriod = encOption->intraPeriod; /* Intra update period update default*/
+ encParams->SceneChange_Det = encOption->sceneDetect;
+ encParams->FineFrameSkip_Enabled = 0;
+ encParams->NoFrameSkip_Enabled = encOption->noFrameSkipped;
+ encParams->NoPreSkip_Enabled = encOption->noFrameSkipped;
+ encParams->GetVolHeader[0] = 0;
+ encParams->GetVolHeader[1] = 0;
+ encParams->ResyncPacketsize = encOption->packetSize << 3;
+ encParams->LayerMaxBitRate[0] = 0;
+ encParams->LayerMaxBitRate[1] = 0;
+ encParams->LayerMaxFrameRate[0] = (float)0.0;
+ encParams->LayerMaxFrameRate[1] = (float)0.0;
+ encParams->VBV_delay = encOption->vbvDelay; /* 2sec VBV buffer size */
+
+ switch (encOption->encMode)
+ {
+
+ case SHORT_HEADER:
+ case SHORT_HEADER_WITH_ERR_RES:
+
+ /* From Table 6-26 */
+ encParams->nLayers = 1;
+ encParams->QuantType[0] = 0; /*H263 */
+ encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
+ encParams->DataPartitioning = 0; /* Combined Mode */
+ encParams->ReversibleVLC = 0; /* Disable RVLC */
+ encParams->RoundingType = 0;
+ encParams->IntraDCVlcThr = 7; /* use_intra_dc_vlc = 0 */
+ encParams->MV8x8_Enabled = 0;
+
+ encParams->GOB_Header_Interval = encOption->gobHeaderInterval;
+ encParams->H263_Enabled = 2;
+ encParams->GOV_Enabled = 0;
+ encParams->TimeIncrementRes = 30000; /* timeIncrementRes for H263 */
+ break;
+
+ case H263_MODE:
+ case H263_MODE_WITH_ERR_RES:
+
+ /* From Table 6-26 */
+ encParams->nLayers = 1;
+ encParams->QuantType[0] = 0; /*H263 */
+ encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
+ encParams->DataPartitioning = 0; /* Combined Mode */
+ encParams->ReversibleVLC = 0; /* Disable RVLC */
+ encParams->RoundingType = 0;
+ encParams->IntraDCVlcThr = 7; /* use_intra_dc_vlc = 0 */
+ encParams->MV8x8_Enabled = 0;
+
+ encParams->H263_Enabled = 1;
+ encParams->GOV_Enabled = 0;
+ encParams->TimeIncrementRes = 30000; /* timeIncrementRes for H263 */
+
+ break;
+#ifndef H263_ONLY
+ case DATA_PARTITIONING_MODE:
+
+ encParams->DataPartitioning = 1; /* Base Layer Data Partitioning */
+ encParams->ResyncMarkerDisable = 0; /* Resync Marker */
+#ifdef NO_RVLC
+ encParams->ReversibleVLC = 0;
+#else
+ encParams->ReversibleVLC = (encOption->rvlcEnable == PV_ON); /* RVLC when Data Partitioning */
+#endif
+ encParams->ResyncPacketsize = PacketSize;
+ break;
+
+ case COMBINE_MODE_WITH_ERR_RES:
+
+ encParams->DataPartitioning = 0; /* Combined Mode */
+ encParams->ResyncMarkerDisable = 0; /* Resync Marker */
+ encParams->ReversibleVLC = 0; /* No RVLC */
+ encParams->ResyncPacketsize = PacketSize;
+ break;
+
+ case COMBINE_MODE_NO_ERR_RES:
+
+ encParams->DataPartitioning = 0; /* Combined Mode */
+ encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
+ encParams->ReversibleVLC = 0; /* No RVLC */
+ break;
+#endif
+ default:
+ goto CLEAN_UP;
+ }
+ /* Set the constraints (maximum values) according to the input profile and level */
+ /* Note that profile_table_index is already figured out above */
+
+ /* base layer */
+ encParams->profile_table_index = profile_table_index; /* Used to limit the profile and level in SetProfile_BufferSize() */
+
+ /* check timeIncRes */
+ timeIncRes = encOption->timeIncRes;
+ timeInc = encOption->tickPerSrc;
+
+ if ((timeIncRes >= 1) && (timeIncRes <= 65536) && (timeInc < timeIncRes) && (timeInc != 0))
+ {
+ if (!encParams->H263_Enabled)
+ {
+ encParams->TimeIncrementRes = timeIncRes;
+ }
+ else
+ {
+ encParams->TimeIncrementRes = 30000;
+// video->FrameRate = 30000/(float)1001; /* fix it to 29.97 fps */
+ }
+ video->FrameRate = timeIncRes / ((float)timeInc);
+ }
+ else
+ {
+ goto CLEAN_UP;
+ }
+
+ /* check frame dimension */
+ if (encParams->H263_Enabled)
+ {
+ switch (encOption->encWidth[0])
+ {
+ case 128:
+ if (encOption->encHeight[0] != 96) /* source_format = 1 */
+ goto CLEAN_UP;
+ break;
+ case 176:
+ if (encOption->encHeight[0] != 144) /* source_format = 2 */
+ goto CLEAN_UP;
+ break;
+ case 352:
+ if (encOption->encHeight[0] != 288) /* source_format = 2 */
+ goto CLEAN_UP;
+ break;
+
+ case 704:
+ if (encOption->encHeight[0] != 576) /* source_format = 2 */
+ goto CLEAN_UP;
+ break;
+ case 1408:
+ if (encOption->encHeight[0] != 1152) /* source_format = 2 */
+ goto CLEAN_UP;
+ break;
+
+ default:
+ goto CLEAN_UP;
+ }
+ }
+ for (i = 0; i < encParams->nLayers; i++)
+ {
+ if (encOption->encHeight[i] == 0 || encOption->encWidth[i] == 0 ||
+ encOption->encHeight[i] % 16 != 0 || encOption->encWidth[i] % 16 != 0)
+ goto CLEAN_UP;
+ encParams->LayerHeight[i] = encOption->encHeight[i];
+ encParams->LayerWidth[i] = encOption->encWidth[i];
+ }
+
+ /* check frame rate */
+ for (i = 0; i < encParams->nLayers; i++)
+ {
+ encParams->LayerFrameRate[i] = encOption->encFrameRate[i];
+ }
+
+ if (encParams->nLayers > 1)
+ {
+ if (encOption->encFrameRate[0] == encOption->encFrameRate[1] ||
+ encOption->encFrameRate[0] == 0. || encOption->encFrameRate[1] == 0.) /* 7/31/03 */
+ goto CLEAN_UP;
+ }
+ /* set max frame rate */
+ for (i = 0; i < encParams->nLayers; i++)
+ {
+
+ /* Make sure the maximum framerate is consistent with the given profile and level */
+ nTotalMB = ((encParams->LayerWidth[i] + 15) / 16) * ((encParams->LayerHeight[i] + 15) / 16);
+
+ if (nTotalMB > 0)
+ profile_max_framerate = (float)encParams->LayerMaxMbsPerSec[i] / (float)nTotalMB;
+
+ else
+ profile_max_framerate = (float)30.0;
+
+ encParams->LayerMaxFrameRate[i] = PV_MIN(profile_max_framerate, encParams->LayerFrameRate[i]);
+ }
+
+ /* check bit rate */
+ /* set max bit rate */
+ for (i = 0; i < encParams->nLayers; i++)
+ {
+ encParams->LayerBitRate[i] = encOption->bitRate[i];
+ encParams->LayerMaxBitRate[i] = encOption->bitRate[i];
+ }
+ if (encParams->nLayers > 1)
+ {
+ if (encOption->bitRate[0] == encOption->bitRate[1] ||
+ encOption->bitRate[0] == 0 || encOption->bitRate[1] == 0) /* 7/31/03 */
+ goto CLEAN_UP;
+ }
+ /* check rate control and vbv delay*/
+ encParams->RC_Type = encOption->rcType;
+
+ if (encOption->vbvDelay == 0.0) /* set to default */
+ {
+ switch (encOption->rcType)
+ {
+ case CBR_1:
+ case CBR_2:
+ encParams->VBV_delay = (float)2.0; /* default 2sec VBV buffer size */
+ break;
+
+ case CBR_LOWDELAY:
+ encParams->VBV_delay = (float)0.5; /* default 0.5sec VBV buffer size */
+ break;
+
+ case VBR_1:
+ case VBR_2:
+ encParams->VBV_delay = (float)10.0; /* default 10sec VBV buffer size */
+ break;
+ default:
+ break;
+ }
+ }
+ else /* force this value */
+ {
+ encParams->VBV_delay = encOption->vbvDelay;
+ }
+
+ /* check search range */
+ if (encParams->H263_Enabled && encOption->searchRange > 16)
+ {
+ encParams->SearchRange = 16; /* 4/16/2001 */
+ }
+
+ /*****************************************/
+ /* checking for conflict between options */
+ /*****************************************/
+
+ if (video->encParams->RC_Type == CBR_1 || video->encParams->RC_Type == CBR_2 || video->encParams->RC_Type == CBR_LOWDELAY) /* if CBR */
+ {
+#ifdef _PRINT_STAT
+ if (video->encParams->NoFrameSkip_Enabled == PV_ON ||
+ video->encParams->NoPreSkip_Enabled == PV_ON) /* don't allow frame skip*/
+ printf("WARNING!!!! CBR with NoFrameSkip\n");
+#endif
+ }
+ else if (video->encParams->RC_Type == CONSTANT_Q) /* constant_Q */
+ {
+ video->encParams->NoFrameSkip_Enabled = PV_ON; /* no frame skip */
+ video->encParams->NoPreSkip_Enabled = PV_ON; /* no frame skip */
+#ifdef _PRINT_STAT
+ printf("Turn on NoFrameSkip\n");
+#endif
+ }
+
+ if (video->encParams->NoFrameSkip_Enabled == PV_ON) /* if no frame skip */
+ {
+ video->encParams->FineFrameSkip_Enabled = PV_OFF;
+#ifdef _PRINT_STAT
+ printf("NoFrameSkip !!! may violate VBV_BUFFER constraint.\n");
+ printf("Turn off FineFrameSkip\n");
+#endif
+ }
+
+ /******************************************/
+ /******************************************/
+
+ nLayers = video->encParams->nLayers; /* Number of Layers to be encoded */
+
+ /* Find the maximum width*height for memory allocation of the VOPs */
+ for (idx = 0; idx < nLayers; idx++)
+ {
+ temp_w = video->encParams->LayerWidth[idx];
+ temp_h = video->encParams->LayerHeight[idx];
+
+ if ((temp_w*temp_h) > max)
+ {
+ max = temp_w * temp_h;
+ max_width = ((temp_w + 15) >> 4) << 4;
+ max_height = ((temp_h + 15) >> 4) << 4;
+ if (((uint64_t)max_width * max_height) > (uint64_t)INT32_MAX
+ || temp_w > INT32_MAX - 15 || temp_h > INT32_MAX - 15) {
+ goto CLEAN_UP;
+ }
+ nTotalMB = ((max_width * max_height) >> 8);
+ }
+
+ /* Check if the video size and framerate(MBsPerSec) are vald */
+ mbsPerSec = (Int)(nTotalMB * video->encParams->LayerFrameRate[idx]);
+ if (mbsPerSec > video->encParams->LayerMaxMbsPerSec[idx]) status = PV_FALSE;
+ }
+
+ /****************************************************/
+ /* Set Profile and Video Buffer Size for each layer */
+ /****************************************************/
+ if (video->encParams->RC_Type == CBR_LOWDELAY) video->encParams->VBV_delay = 0.5; /* For CBR_LOWDELAY, we set 0.5sec buffer */
+ status = SetProfile_BufferSize(video, video->encParams->VBV_delay, 1);
+ if (status != PV_TRUE)
+ goto CLEAN_UP;
+
+ /****************************************/
+ /* memory allocation and initialization */
+ /****************************************/
+
+ if (video == NULL) goto CLEAN_UP;
+
+ /* cyclic reference for passing through both structures */
+ video->videoEncControls = encoderControl;
+
+ //video->currLayer = 0; /* Set current Layer to 0 */
+ //video->currFrameNo = 0; /* Set current frame Number to 0 */
+ video->nextModTime = 0;
+ video->nextEncIVop = 0; /* Sets up very first frame to be I-VOP! */
+ video->numVopsInGOP = 0; /* counter for Vops in Gop, 2/8/01 */
+
+ //video->frameRate = video->encParams->LayerFrameRate[0]; /* Set current layer frame rate */
+
+ video->QPMB = (UChar *) M4VENC_MALLOC(nTotalMB * sizeof(UChar)); /* Memory for MB quantizers */
+ if (video->QPMB == NULL) goto CLEAN_UP;
+
+
+ video->headerInfo.Mode = (UChar *) M4VENC_MALLOC(sizeof(UChar) * nTotalMB); /* Memory for MB Modes */
+ if (video->headerInfo.Mode == NULL) goto CLEAN_UP;
+ video->headerInfo.CBP = (UChar *) M4VENC_MALLOC(sizeof(UChar) * nTotalMB); /* Memory for CBP (Y and C) of each MB */
+ if (video->headerInfo.CBP == NULL) goto CLEAN_UP;
+
+ /* Allocating motion vector space and interpolation memory*/
+
+ if ((size_t)nTotalMB > SIZE_MAX / sizeof(MOT *)) {
+ goto CLEAN_UP;
+ }
+ video->mot = (MOT **)M4VENC_MALLOC(sizeof(MOT *) * nTotalMB);
+ if (video->mot == NULL) goto CLEAN_UP;
+
+ for (idx = 0; idx < nTotalMB; idx++)
+ {
+ video->mot[idx] = (MOT *)M4VENC_MALLOC(sizeof(MOT) * 8);
+ if (video->mot[idx] == NULL)
+ {
+ goto CLEAN_UP;
+ }
+ }
+
+ video->intraArray = (UChar *)M4VENC_MALLOC(sizeof(UChar) * nTotalMB);
+ if (video->intraArray == NULL) goto CLEAN_UP;
+
+ video->sliceNo = (UChar *) M4VENC_MALLOC(nTotalMB); /* Memory for Slice Numbers */
+ if (video->sliceNo == NULL) goto CLEAN_UP;
+ /* Allocating space for predDCAC[][8][16], Not that I intentionally */
+ /* increase the dimension of predDCAC from [][6][15] to [][8][16] */
+ /* so that compilers can generate faster code to indexing the */
+ /* data inside (by using << instead of *). 04/14/2000. */
+ /* 5/29/01, use decoder lib ACDC prediction memory scheme. */
+ if ((size_t)nTotalMB > SIZE_MAX / sizeof(typeDCStore)) {
+ goto CLEAN_UP;
+ }
+ video->predDC = (typeDCStore *) M4VENC_MALLOC(nTotalMB * sizeof(typeDCStore));
+ if (video->predDC == NULL) goto CLEAN_UP;
+
+ if (!video->encParams->H263_Enabled)
+ {
+ if ((size_t)((max_width >> 4) + 1) > SIZE_MAX / sizeof(typeDCACStore)) {
+ goto CLEAN_UP;
+ }
+ video->predDCAC_col = (typeDCACStore *) M4VENC_MALLOC(((max_width >> 4) + 1) * sizeof(typeDCACStore));
+ if (video->predDCAC_col == NULL) goto CLEAN_UP;
+
+ /* element zero will be used for storing vertical (col) AC coefficients */
+ /* the rest will be used for storing horizontal (row) AC coefficients */
+ video->predDCAC_row = video->predDCAC_col + 1; /* ACDC */
+
+ if ((size_t)nTotalMB > SIZE_MAX / sizeof(Int)) {
+ goto CLEAN_UP;
+ }
+ video->acPredFlag = (Int *) M4VENC_MALLOC(nTotalMB * sizeof(Int)); /* Memory for acPredFlag */
+ if (video->acPredFlag == NULL) goto CLEAN_UP;
+ }
+
+ video->outputMB = (MacroBlock *) M4VENC_MALLOC(sizeof(MacroBlock)); /* Allocating macroblock space */
+ if (video->outputMB == NULL) goto CLEAN_UP;
+ M4VENC_MEMSET(video->outputMB->block[0], 0, (sizeof(Short) << 6)*6);
+
+ M4VENC_MEMSET(video->dataBlock, 0, sizeof(Short) << 7);
+ /* Allocate (2*packetsize) working bitstreams */
+
+ video->bitstream1 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 1*/
+ if (video->bitstream1 == NULL) goto CLEAN_UP;
+ video->bitstream2 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 2*/
+ if (video->bitstream2 == NULL) goto CLEAN_UP;
+ video->bitstream3 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 3*/
+ if (video->bitstream3 == NULL) goto CLEAN_UP;
+
+ /* allocate overrun buffer */
+ // this buffer is used when user's buffer is too small to hold one frame.
+ // It is not needed for slice-based encoding.
+ if (nLayers == 1)
+ {
+ video->oBSize = encParams->BufferSize[0] >> 3;
+ }
+ else
+ {
+ video->oBSize = PV_MAX((encParams->BufferSize[0] >> 3), (encParams->BufferSize[1] >> 3));
+ }
+
+ if (video->oBSize > DEFAULT_OVERRUN_BUFFER_SIZE || encParams->RC_Type == CONSTANT_Q) // set limit
+ {
+ video->oBSize = DEFAULT_OVERRUN_BUFFER_SIZE;
+ }
+ video->overrunBuffer = (UChar*) M4VENC_MALLOC(sizeof(UChar) * video->oBSize);
+ if (video->overrunBuffer == NULL) goto CLEAN_UP;
+
+
+ video->currVop = (Vop *) M4VENC_MALLOC(sizeof(Vop)); /* Memory for Current VOP */
+ if (video->currVop == NULL) goto CLEAN_UP;
+
+ /* add padding, 09/19/05 */
+ if (video->encParams->H263_Enabled) /* make it conditional 11/28/05 */
+ {
+ pitch = max_width;
+ offset = 0;
+ }
+ else
+ {
+ pitch = max_width + 32;
+ offset = (pitch << 4) + 16;
+ max_height += 32;
+ }
+ if (((uint64_t)pitch * max_height) > (uint64_t)INT32_MAX) {
+ goto CLEAN_UP;
+ }
+ size = pitch * max_height;
+
+ if (size > INT32_MAX - (size >> 1)
+ || (size_t)(size + (size >> 1)) > SIZE_MAX / sizeof(PIXEL)) {
+ goto CLEAN_UP;
+ }
+ video->currVop->allChan = video->currVop->yChan = (PIXEL *)M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for currVop Y */
+ if (video->currVop->yChan == NULL) goto CLEAN_UP;
+ video->currVop->uChan = video->currVop->yChan + size;/* Memory for currVop U */
+ video->currVop->vChan = video->currVop->uChan + (size >> 2);/* Memory for currVop V */
+
+ /* shift for the offset */
+ if (offset)
+ {
+ video->currVop->yChan += offset; /* offset to the origin.*/
+ video->currVop->uChan += (offset >> 2) + 4;
+ video->currVop->vChan += (offset >> 2) + 4;
+ }
+
+ video->forwardRefVop = video->currVop; /* Initialize forwardRefVop */
+ video->backwardRefVop = video->currVop; /* Initialize backwardRefVop */
+
+ video->prevBaseVop = (Vop *) M4VENC_MALLOC(sizeof(Vop)); /* Memory for Previous Base Vop */
+ if (video->prevBaseVop == NULL) goto CLEAN_UP;
+ video->prevBaseVop->allChan = video->prevBaseVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for prevBaseVop Y */
+ if (video->prevBaseVop->yChan == NULL) goto CLEAN_UP;
+ video->prevBaseVop->uChan = video->prevBaseVop->yChan + size; /* Memory for prevBaseVop U */
+ video->prevBaseVop->vChan = video->prevBaseVop->uChan + (size >> 2); /* Memory for prevBaseVop V */
+
+ if (offset)
+ {
+ video->prevBaseVop->yChan += offset; /* offset to the origin.*/
+ video->prevBaseVop->uChan += (offset >> 2) + 4;
+ video->prevBaseVop->vChan += (offset >> 2) + 4;
+ }
+
+
+ if (0) /* If B Frames */
+ {
+ video->nextBaseVop = (Vop *) M4VENC_MALLOC(sizeof(Vop)); /* Memory for Next Base Vop */
+ if (video->nextBaseVop == NULL) goto CLEAN_UP;
+ video->nextBaseVop->allChan = video->nextBaseVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for nextBaseVop Y */
+ if (video->nextBaseVop->yChan == NULL) goto CLEAN_UP;
+ video->nextBaseVop->uChan = video->nextBaseVop->yChan + size; /* Memory for nextBaseVop U */
+ video->nextBaseVop->vChan = video->nextBaseVop->uChan + (size >> 2); /* Memory for nextBaseVop V */
+
+ if (offset)
+ {
+ video->nextBaseVop->yChan += offset; /* offset to the origin.*/
+ video->nextBaseVop->uChan += (offset >> 2) + 4;
+ video->nextBaseVop->vChan += (offset >> 2) + 4;
+ }
+ }
+
+ if (nLayers > 1) /* If enhancement layers */
+ {
+ video->prevEnhanceVop = (Vop *) M4VENC_MALLOC(sizeof(Vop)); /* Memory for Previous Enhancement Vop */
+ if (video->prevEnhanceVop == NULL) goto CLEAN_UP;
+ video->prevEnhanceVop->allChan = video->prevEnhanceVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for Previous Ehancement Y */
+ if (video->prevEnhanceVop->yChan == NULL) goto CLEAN_UP;
+ video->prevEnhanceVop->uChan = video->prevEnhanceVop->yChan + size; /* Memory for Previous Enhancement U */
+ video->prevEnhanceVop->vChan = video->prevEnhanceVop->uChan + (size >> 2); /* Memory for Previous Enhancement V */
+
+ if (offset)
+ {
+ video->prevEnhanceVop->yChan += offset; /* offset to the origin.*/
+ video->prevEnhanceVop->uChan += (offset >> 2) + 4;
+ video->prevEnhanceVop->vChan += (offset >> 2) + 4;
+ }
+ }
+
+ video->numberOfLayers = nLayers; /* Number of Layers */
+ video->sumMAD = 0;
+
+
+ /* 04/09/01, for Vops in the use multipass processing */
+ for (idx = 0; idx < nLayers; idx++)
+ {
+ video->pMP[idx] = (MultiPass *)M4VENC_MALLOC(sizeof(MultiPass));
+ if (video->pMP[idx] == NULL) goto CLEAN_UP;
+ M4VENC_MEMSET(video->pMP[idx], 0, sizeof(MultiPass));
+
+ video->pMP[idx]->encoded_frames = -1; /* forget about the very first I frame */
+
+
+ /* RDInfo **pRDSamples */
+ video->pMP[idx]->pRDSamples = (RDInfo **)M4VENC_MALLOC(30 * sizeof(RDInfo *));
+ if (video->pMP[idx]->pRDSamples == NULL) goto CLEAN_UP;
+ for (i = 0; i < 30; i++)
+ {
+ video->pMP[idx]->pRDSamples[i] = (RDInfo *)M4VENC_MALLOC(32 * sizeof(RDInfo));
+ if (video->pMP[idx]->pRDSamples[i] == NULL) goto CLEAN_UP;
+ for (j = 0; j < 32; j++) M4VENC_MEMSET(&(video->pMP[idx]->pRDSamples[i][j]), 0, sizeof(RDInfo));
+ }
+ video->pMP[idx]->frameRange = (Int)(video->encParams->LayerFrameRate[idx] * 1.0); /* 1.0s time frame*/
+ video->pMP[idx]->frameRange = PV_MAX(video->pMP[idx]->frameRange, 5);
+ video->pMP[idx]->frameRange = PV_MIN(video->pMP[idx]->frameRange, 30);
+
+ video->pMP[idx]->framePos = -1;
+
+ }
+ /* /// End /////////////////////////////////////// */
+
+
+ if ((size_t)nLayers > SIZE_MAX / sizeof(Vol *)) {
+ goto CLEAN_UP;
+ }
+ video->vol = (Vol **)M4VENC_MALLOC(nLayers * sizeof(Vol *)); /* Memory for VOL pointers */
+
+ /* Memory allocation and Initialization of Vols and writing of headers */
+ if (video->vol == NULL) goto CLEAN_UP;
+
+ for (idx = 0; idx < nLayers; idx++)
+ {
+ video->volInitialize[idx] = 1;
+ video->refTick[idx] = 0;
+ video->relLayerCodeTime[idx] = 1000;
+ video->vol[idx] = (Vol *)M4VENC_MALLOC(sizeof(Vol));
+ if (video->vol[idx] == NULL) goto CLEAN_UP;
+
+ pVol = video->vol[idx];
+ pEncParams = video->encParams;
+
+ M4VENC_MEMSET(video->vol[idx], 0, sizeof(Vol));
+ /* Initialize some VOL parameters */
+ pVol->volID = idx; /* Set VOL ID */
+ pVol->shortVideoHeader = pEncParams->H263_Enabled; /*Short Header */
+ pVol->GOVStart = pEncParams->GOV_Enabled; /* GOV Header */
+ pVol->timeIncrementResolution = video->encParams->TimeIncrementRes;
+ pVol->nbitsTimeIncRes = 1;
+ while (pVol->timeIncrementResolution > (1 << pVol->nbitsTimeIncRes))
+ {
+ pVol->nbitsTimeIncRes++;
+ }
+
+ /* timing stuff */
+ pVol->timeIncrement = 0;
+ pVol->moduloTimeBase = 0;
+ pVol->fixedVopRate = 0; /* No fixed VOP rate */
+ pVol->stream = (BitstreamEncVideo *)M4VENC_MALLOC(sizeof(BitstreamEncVideo)); /* allocate BitstreamEncVideo Instance */
+ if (pVol->stream == NULL) goto CLEAN_UP;
+
+ pVol->width = pEncParams->LayerWidth[idx]; /* Layer Width */
+ pVol->height = pEncParams->LayerHeight[idx]; /* Layer Height */
+ // pVol->intra_acdcPredDisable = pEncParams->ACDCPrediction; /* ACDC Prediction */
+ pVol->ResyncMarkerDisable = pEncParams->ResyncMarkerDisable; /* Resync Marker Mode */
+ pVol->dataPartitioning = pEncParams->DataPartitioning; /* Data Partitioning */
+ pVol->useReverseVLC = pEncParams->ReversibleVLC; /* RVLC */
+ if (idx > 0) /* Scalability layers */
+ {
+ pVol->ResyncMarkerDisable = 1;
+ pVol->dataPartitioning = 0;
+ pVol->useReverseVLC = 0; /* No RVLC */
+ }
+ pVol->quantType = pEncParams->QuantType[idx]; /* Quantizer Type */
+
+ /* no need to init Quant Matrices */
+
+ pVol->scalability = 0; /* Vol Scalability */
+ if (idx > 0)
+ pVol->scalability = 1; /* Multiple layers => Scalability */
+
+ /* Initialize Vol to Temporal scalability. It can change during encoding */
+ pVol->scalType = 1;
+ /* Initialize reference Vol ID to the base layer = 0 */
+ pVol->refVolID = 0;
+ /* Initialize layer resolution to same as the reference */
+ pVol->refSampDir = 0;
+ pVol->horSamp_m = 1;
+ pVol->horSamp_n = 1;
+ pVol->verSamp_m = 1;
+ pVol->verSamp_n = 1;
+ pVol->enhancementType = 0; /* We always enhance the entire region */
+
+ pVol->nMBPerRow = (pVol->width + 15) / 16;
+ pVol->nMBPerCol = (pVol->height + 15) / 16;
+ pVol->nTotalMB = pVol->nMBPerRow * pVol->nMBPerCol;
+
+ if (pVol->nTotalMB >= 1)
+ pVol->nBitsForMBID = 1;
+ if (pVol->nTotalMB >= 3)
+ pVol->nBitsForMBID = 2;
+ if (pVol->nTotalMB >= 5)
+ pVol->nBitsForMBID = 3;
+ if (pVol->nTotalMB >= 9)
+ pVol->nBitsForMBID = 4;
+ if (pVol->nTotalMB >= 17)
+ pVol->nBitsForMBID = 5;
+ if (pVol->nTotalMB >= 33)
+ pVol->nBitsForMBID = 6;
+ if (pVol->nTotalMB >= 65)
+ pVol->nBitsForMBID = 7;
+ if (pVol->nTotalMB >= 129)
+ pVol->nBitsForMBID = 8;
+ if (pVol->nTotalMB >= 257)
+ pVol->nBitsForMBID = 9;
+ if (pVol->nTotalMB >= 513)
+ pVol->nBitsForMBID = 10;
+ if (pVol->nTotalMB >= 1025)
+ pVol->nBitsForMBID = 11;
+ if (pVol->nTotalMB >= 2049)
+ pVol->nBitsForMBID = 12;
+ if (pVol->nTotalMB >= 4097)
+ pVol->nBitsForMBID = 13;
+ if (pVol->nTotalMB >= 8193)
+ pVol->nBitsForMBID = 14;
+ if (pVol->nTotalMB >= 16385)
+ pVol->nBitsForMBID = 15;
+ if (pVol->nTotalMB >= 32769)
+ pVol->nBitsForMBID = 16;
+ if (pVol->nTotalMB >= 65537)
+ pVol->nBitsForMBID = 17;
+ if (pVol->nTotalMB >= 131073)
+ pVol->nBitsForMBID = 18;
+
+ if (pVol->shortVideoHeader)
+ {
+ switch (pVol->width)
+ {
+ case 128:
+ if (pVol->height == 96) /* source_format = 1 */
+ {
+ pVol->nGOBinVop = 6;
+ pVol->nMBinGOB = 8;
+ }
+ else
+ status = PV_FALSE;
+ break;
+
+ case 176:
+ if (pVol->height == 144) /* source_format = 2 */
+ {
+ pVol->nGOBinVop = 9;
+ pVol->nMBinGOB = 11;
+ }
+ else
+ status = PV_FALSE;
+ break;
+ case 352:
+ if (pVol->height == 288) /* source_format = 2 */
+ {
+ pVol->nGOBinVop = 18;
+ pVol->nMBinGOB = 22;
+ }
+ else
+ status = PV_FALSE;
+ break;
+
+ case 704:
+ if (pVol->height == 576) /* source_format = 2 */
+ {
+ pVol->nGOBinVop = 18;
+ pVol->nMBinGOB = 88;
+ }
+ else
+ status = PV_FALSE;
+ break;
+ case 1408:
+ if (pVol->height == 1152) /* source_format = 2 */
+ {
+ pVol->nGOBinVop = 18;
+ pVol->nMBinGOB = 352;
+ }
+ else
+ status = PV_FALSE;
+ break;
+
+ default:
+ status = PV_FALSE;
+ break;
+ }
+ }
+ }
+
+ /***************************************************/
+ /* allocate and initialize rate control parameters */
+ /***************************************************/
+
+ /* BEGIN INITIALIZATION OF ANNEX L RATE CONTROL */
+ if (video->encParams->RC_Type != CONSTANT_Q)
+ {
+ for (idx = 0; idx < nLayers; idx++) /* 12/25/00 */
+ {
+ video->rc[idx] =
+ (rateControl *)M4VENC_MALLOC(sizeof(rateControl));
+
+ if (video->rc[idx] == NULL) goto CLEAN_UP;
+
+ M4VENC_MEMSET(video->rc[idx], 0, sizeof(rateControl));
+ }
+ if (PV_SUCCESS != RC_Initialize(video))
+ {
+ goto CLEAN_UP;
+ }
+ /* initialization for 2-pass rate control */
+ }
+ /* END INITIALIZATION OF ANNEX L RATE CONTROL */
+
+ /********** assign platform dependent functions ***********************/
+ /* 1/23/01 */
+ /* This must be done at run-time not a compile time */
+ video->functionPointer = (FuncPtr*) M4VENC_MALLOC(sizeof(FuncPtr));
+ if (video->functionPointer == NULL) goto CLEAN_UP;
+
+ video->functionPointer->ComputeMBSum = &ComputeMBSum_C;
+ video->functionPointer->SAD_MB_HalfPel[0] = NULL;
+ video->functionPointer->SAD_MB_HalfPel[1] = &SAD_MB_HalfPel_Cxh;
+ video->functionPointer->SAD_MB_HalfPel[2] = &SAD_MB_HalfPel_Cyh;
+ video->functionPointer->SAD_MB_HalfPel[3] = &SAD_MB_HalfPel_Cxhyh;
+
+#ifndef NO_INTER4V
+ video->functionPointer->SAD_Blk_HalfPel = &SAD_Blk_HalfPel_C;
+ video->functionPointer->SAD_Block = &SAD_Block_C;
+#endif
+ video->functionPointer->SAD_Macroblock = &SAD_Macroblock_C;
+ video->functionPointer->ChooseMode = &ChooseMode_C;
+ video->functionPointer->GetHalfPelMBRegion = &GetHalfPelMBRegion_C;
+// video->functionPointer->SAD_MB_PADDING = &SAD_MB_PADDING; /* 4/21/01 */
+
+
+ encoderControl->videoEncoderInit = 1; /* init done! */
+
+ return PV_TRUE;
+
+CLEAN_UP:
+ PVCleanUpVideoEncoder(encoderControl);
+
+ return PV_FALSE;
+}
+
+
+/* ======================================================================== */
+/* Function : PVCleanUpVideoEncoder() */
+/* Date : 08/22/2000 */
+/* Purpose : Deallocates allocated memory from InitVideoEncoder() */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : 5/21/01, free only yChan in Vop */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVCleanUpVideoEncoder(VideoEncControls *encoderControl)
+{
+ Int idx, i;
+ VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
+ int nTotalMB;
+ int max_width, offset;
+
+#ifdef PRINT_RC_INFO
+ if (facct != NULL)
+ {
+ fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+ fprintf(facct, "TOTAL NUM BITS GENERATED %d\n", tiTotalNumBitsGenerated);
+ fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+ fprintf(facct, "TOTAL NUMBER OF FRAMES CODED %d\n",
+ video->encParams->rc[0]->totalFrameNumber);
+ fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+ fprintf(facct, "Average BitRate %d\n",
+ (tiTotalNumBitsGenerated / (90 / 30)));
+ fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+ fprintf(facct, "TOTAL NUMBER OF STUFF BITS %d\n", (iStuffBits + 10740));
+ fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+ fprintf(facct, "TOTAL NUMBER OF BITS TO NETWORK %d\n", (35800*90 / 30));;
+ fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+ fprintf(facct, "SUM OF STUFF BITS AND GENERATED BITS %d\n",
+ (tiTotalNumBitsGenerated + iStuffBits + 10740));
+ fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+ fprintf(facct, "UNACCOUNTED DIFFERENCE %d\n",
+ ((35800*90 / 30) - (tiTotalNumBitsGenerated + iStuffBits + 10740)));
+ fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+ fclose(facct);
+ }
+#endif
+
+#ifdef PRINT_EC
+ fclose(fec);
+#endif
+
+ if (video != NULL)
+ {
+
+ if (video->QPMB) M4VENC_FREE(video->QPMB);
+ if (video->headerInfo.Mode)M4VENC_FREE(video->headerInfo.Mode);
+ if (video->headerInfo.CBP)M4VENC_FREE(video->headerInfo.CBP);
+
+
+ if (video->mot)
+ {
+ nTotalMB = video->vol[0]->nTotalMB;
+ for (idx = 1; idx < video->currLayer; idx++)
+ if (video->vol[idx]->nTotalMB > nTotalMB)
+ nTotalMB = video->vol[idx]->nTotalMB;
+ for (idx = 0; idx < nTotalMB; idx++)
+ {
+ if (video->mot[idx])
+ M4VENC_FREE(video->mot[idx]);
+ }
+ M4VENC_FREE(video->mot);
+ }
+
+ if (video->intraArray) M4VENC_FREE(video->intraArray);
+
+ if (video->sliceNo)M4VENC_FREE(video->sliceNo);
+ if (video->acPredFlag)M4VENC_FREE(video->acPredFlag);
+// if(video->predDCAC)M4VENC_FREE(video->predDCAC);
+ if (video->predDC) M4VENC_FREE(video->predDC);
+ video->predDCAC_row = NULL;
+ if (video->predDCAC_col) M4VENC_FREE(video->predDCAC_col);
+ if (video->outputMB)M4VENC_FREE(video->outputMB);
+
+ if (video->bitstream1)BitstreamCloseEnc(video->bitstream1);
+ if (video->bitstream2)BitstreamCloseEnc(video->bitstream2);
+ if (video->bitstream3)BitstreamCloseEnc(video->bitstream3);
+
+ if (video->overrunBuffer) M4VENC_FREE(video->overrunBuffer);
+
+ max_width = video->encParams->LayerWidth[0];
+ max_width = (((max_width + 15) >> 4) << 4); /* 09/19/05 */
+ if (video->encParams->H263_Enabled)
+ {
+ offset = 0;
+ }
+ else
+ {
+ offset = ((max_width + 32) << 4) + 16;
+ }
+
+ if (video->currVop)
+ {
+ if (video->currVop->allChan)
+ {
+ M4VENC_FREE(video->currVop->allChan);
+ }
+ M4VENC_FREE(video->currVop);
+ }
+
+ if (video->nextBaseVop)
+ {
+ if (video->nextBaseVop->allChan)
+ {
+ M4VENC_FREE(video->nextBaseVop->allChan);
+ }
+ M4VENC_FREE(video->nextBaseVop);
+ }
+
+ if (video->prevBaseVop)
+ {
+ if (video->prevBaseVop->allChan)
+ {
+ M4VENC_FREE(video->prevBaseVop->allChan);
+ }
+ M4VENC_FREE(video->prevBaseVop);
+ }
+ if (video->prevEnhanceVop)
+ {
+ if (video->prevEnhanceVop->allChan)
+ {
+ M4VENC_FREE(video->prevEnhanceVop->allChan);
+ }
+ M4VENC_FREE(video->prevEnhanceVop);
+ }
+
+ /* 04/09/01, for Vops in the use multipass processing */
+ for (idx = 0; idx < video->encParams->nLayers; idx++)
+ {
+ if (video->pMP[idx])
+ {
+ if (video->pMP[idx]->pRDSamples)
+ {
+ for (i = 0; i < 30; i++)
+ {
+ if (video->pMP[idx]->pRDSamples[i])
+ M4VENC_FREE(video->pMP[idx]->pRDSamples[i]);
+ }
+ M4VENC_FREE(video->pMP[idx]->pRDSamples);
+ }
+
+ M4VENC_MEMSET(video->pMP[idx], 0, sizeof(MultiPass));
+ M4VENC_FREE(video->pMP[idx]);
+ }
+ }
+ /* // End /////////////////////////////////////// */
+
+ if (video->vol)
+ {
+ for (idx = 0; idx < video->encParams->nLayers; idx++)
+ {
+ if (video->vol[idx])
+ {
+ if (video->vol[idx]->stream)
+ M4VENC_FREE(video->vol[idx]->stream);
+ M4VENC_FREE(video->vol[idx]);
+ }
+ }
+ M4VENC_FREE(video->vol);
+ }
+
+ /***************************************************/
+ /* stop rate control parameters */
+ /***************************************************/
+
+ /* ANNEX L RATE CONTROL */
+ if (video->encParams->RC_Type != CONSTANT_Q)
+ {
+ RC_Cleanup(video->rc, video->encParams->nLayers);
+
+ for (idx = 0; idx < video->encParams->nLayers; idx++)
+ {
+ if (video->rc[idx])
+ M4VENC_FREE(video->rc[idx]);
+ }
+ }
+
+ if (video->functionPointer) M4VENC_FREE(video->functionPointer);
+
+ /* If application has called PVCleanUpVideoEncoder then we deallocate */
+ /* If PVInitVideoEncoder class it, then we DO NOT deallocate */
+ if (video->encParams)
+ {
+ M4VENC_FREE(video->encParams);
+ }
+
+ M4VENC_FREE(video);
+ encoderControl->videoEncoderData = NULL; /* video */
+ }
+
+ encoderControl->videoEncoderInit = 0;
+
+ return PV_TRUE;
+}
+
+/* ======================================================================== */
+/* Function : PVGetVolHeader() */
+/* Date : 7/17/2001, */
+/* Purpose : */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVGetVolHeader(VideoEncControls *encCtrl, UChar *volHeader, Int *size, Int layer)
+{
+ VideoEncData *encData;
+ PV_STATUS EncodeVOS_Start(VideoEncControls *encCtrl);
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+
+
+ encData->currLayer = layer; /* Set Layer */
+ /*pv_status = */
+ EncodeVOS_Start(encCtrl); /* Encode VOL Header */
+
+ encData->encParams->GetVolHeader[layer] = 1; /* Set usage flag: Needed to support old method*/
+
+ /* Copy bitstream to buffer and set the size */
+
+ if (*size > encData->bitstream1->byteCount)
+ {
+ *size = encData->bitstream1->byteCount;
+ M4VENC_MEMCPY(volHeader, encData->bitstream1->bitstreamBuffer, *size);
+ }
+ else
+ return PV_FALSE;
+
+ /* Reset bitstream1 buffer parameters */
+ BitstreamEncReset(encData->bitstream1);
+
+ return PV_TRUE;
+}
+
+/* ======================================================================== */
+/* Function : PVGetOverrunBuffer() */
+/* Purpose : Get the overrun buffer ` */
+/* In/out : */
+/* Return : Pointer to overrun buffer. */
+/* Modified : */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF UChar* PVGetOverrunBuffer(VideoEncControls *encCtrl)
+{
+ VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
+ Int currLayer = video->currLayer;
+ Vol *currVol = video->vol[currLayer];
+
+ if (currVol->stream->bitstreamBuffer != video->overrunBuffer) // not used
+ {
+ return NULL;
+ }
+
+ return video->overrunBuffer;
+}
+
+
+
+
+/* ======================================================================== */
+/* Function : EncodeVideoFrame() */
+/* Date : 08/22/2000 */
+/* Purpose : Encode video frame and return bitstream */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* 02.14.2001 */
+/* Finishing new timestamp 32-bit input */
+/* Applications need to take care of wrap-around */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVEncodeVideoFrame(VideoEncControls *encCtrl, VideoEncFrameIO *vid_in, VideoEncFrameIO *vid_out,
+ ULong *nextModTime, UChar *bstream, Int *size, Int *nLayer)
+{
+ Bool status = PV_TRUE;
+ PV_STATUS pv_status;
+ VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
+ VideoEncParams *encParams = video->encParams;
+ Vol *currVol;
+ Vop *tempForwRefVop = NULL;
+ Int tempRefSelCode = 0;
+ PV_STATUS EncodeVOS_Start(VideoEncControls *encCtrl);
+ Int width_16, height_16;
+ Int width, height;
+ Vop *temp;
+ Int encodeVop = 0;
+ void PaddingEdge(Vop *padVop);
+ Int currLayer = -1;
+ //Int nLayers = encParams->nLayers;
+
+ ULong modTime = vid_in->timestamp;
+
+#ifdef RANDOM_REFSELCODE /* add random selection of reference Vop */
+ Int random_val[30] = {0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0};
+ static Int rand_idx = 0;
+#endif
+
+ /*******************************************************/
+ /* Determine Next Vop to encode, if any, and nLayer */
+ /*******************************************************/
+ //i = nLayers-1;
+
+ if (video->volInitialize[0]) /* first vol to code */
+ {
+ video->nextModTime = video->modTimeRef = ((modTime) - ((modTime) % 1000));
+ }
+
+ encodeVop = DetermineCodingLayer(video, nLayer, modTime);
+ currLayer = *nLayer;
+ if ((currLayer < 0) || (currLayer > encParams->nLayers - 1))
+ return PV_FALSE;
+
+ /******************************************/
+ /* If post-skipping still effective --- return */
+ /******************************************/
+
+ if (!encodeVop) /* skip enh layer, no base layer coded --- return */
+ {
+#ifdef _PRINT_STAT
+ printf("No frame coded. Continue to next frame.");
+#endif
+ /* expected next code time, convert back to millisec */
+ *nextModTime = video->nextModTime;
+
+#ifdef ALLOW_VOP_NOT_CODED
+ if (video->vol[0]->shortVideoHeader) /* Short Video Header = 1 */
+ {
+ *size = 0;
+ *nLayer = -1;
+ }
+ else
+ {
+ *nLayer = 0;
+ EncodeVopNotCoded(video, bstream, size, modTime);
+ *size = video->vol[0]->stream->byteCount;
+ }
+#else
+ *size = 0;
+ *nLayer = -1;
+#endif
+ return status;
+ }
+
+
+//ENCODE_VOP_AGAIN: /* 12/30/00 */
+
+ /**************************************************************/
+ /* Initialize Vol stream structure with application bitstream */
+ /**************************************************************/
+
+ currVol = video->vol[currLayer];
+ currVol->stream->bitstreamBuffer = bstream;
+ currVol->stream->bufferSize = *size;
+ BitstreamEncReset(currVol->stream);
+ BitstreamSetOverrunBuffer(currVol->stream, video->overrunBuffer, video->oBSize, video);
+
+ /***********************************************************/
+ /* Encode VOS and VOL Headers on first call for each layer */
+ /***********************************************************/
+
+ if (video->volInitialize[currLayer])
+ {
+ video->currVop->timeInc = 0;
+ video->prevBaseVop->timeInc = 0;
+ if (!video->encParams->GetVolHeader[currLayer])
+ pv_status = EncodeVOS_Start(encCtrl);
+ }
+
+ /***************************************************/
+ /* Copy Input Video Frame to Internal Video Buffer */
+ /***************************************************/
+ /* Determine Width and Height of Vop Layer */
+
+ width = encParams->LayerWidth[currLayer]; /* Get input width */
+ height = encParams->LayerHeight[currLayer]; /* Get input height */
+ /* Round Up to nearest multiple of 16 : MPEG-4 Standard */
+
+ width_16 = ((width + 15) / 16) * 16; /* Round up to nearest multiple of 16 */
+ height_16 = ((height + 15) / 16) * 16; /* Round up to nearest multiple of 16 */
+
+ video->input = vid_in; /* point to the frame input */
+
+ /*// End ////////////////////////////// */
+
+
+ /**************************************/
+ /* Determine VOP Type */
+ /* 6/2/2001, separate function */
+ /**************************************/
+ DetermineVopType(video, currLayer);
+
+ /****************************/
+ /* Initialize VOP */
+ /****************************/
+ video->currVop->volID = currVol->volID;
+ video->currVop->width = width_16;
+ video->currVop->height = height_16;
+ if (video->encParams->H263_Enabled) /* 11/28/05 */
+ {
+ video->currVop->pitch = width_16;
+ }
+ else
+ {
+ video->currVop->pitch = width_16 + 32;
+ }
+ video->currVop->timeInc = currVol->timeIncrement;
+ video->currVop->vopCoded = 1;
+ video->currVop->roundingType = 0;
+ video->currVop->intraDCVlcThr = encParams->IntraDCVlcThr;
+
+ if (currLayer == 0
+#ifdef RANDOM_REFSELCODE /* add random selection of reference Vop */
+ || random_val[rand_idx] || video->volInitialize[currLayer]
+#endif
+ )
+ {
+ tempForwRefVop = video->forwardRefVop; /* keep initial state */
+ if (tempForwRefVop != NULL) tempRefSelCode = tempForwRefVop->refSelectCode;
+
+ video->forwardRefVop = video->prevBaseVop;
+ video->forwardRefVop->refSelectCode = 1;
+ }
+#ifdef RANDOM_REFSELCODE
+ else
+ {
+ tempForwRefVop = video->forwardRefVop; /* keep initial state */
+ if (tempForwRefVop != NULL) tempRefSelCode = tempForwRefVop->refSelectCode;
+
+ video->forwardRefVop = video->prevEnhanceVop;
+ video->forwardRefVop->refSelectCode = 0;
+ }
+ rand_idx++;
+ rand_idx %= 30;
+#endif
+
+ video->currVop->refSelectCode = video->forwardRefVop->refSelectCode;
+ video->currVop->gobNumber = 0;
+ video->currVop->gobFrameID = video->currVop->predictionType;
+ video->currVop->temporalRef = (modTime * 30 / 1001) % 256;
+
+ video->currVop->temporalInterval = 0;
+
+ if (video->currVop->predictionType == I_VOP)
+ video->currVop->quantizer = encParams->InitQuantIvop[currLayer];
+ else
+ video->currVop->quantizer = encParams->InitQuantPvop[currLayer];
+
+
+ /****************/
+ /* Encode Vop */
+ /****************/
+ video->slice_coding = 0;
+
+ pv_status = EncodeVop(video);
+#ifdef _PRINT_STAT
+ if (video->currVop->predictionType == I_VOP)
+ printf(" I-VOP ");
+ else
+ printf(" P-VOP (ref.%d)", video->forwardRefVop->refSelectCode);
+#endif
+
+ /************************************/
+ /* Update Skip Next Frame */
+ /************************************/
+ *nLayer = UpdateSkipNextFrame(video, nextModTime, size, pv_status);
+ if (*nLayer == -1) /* skip current frame */
+ {
+ /* make sure that pointers are restored to the previous state */
+ if (currLayer == 0)
+ {
+ video->forwardRefVop = tempForwRefVop; /* For P-Vop base only */
+ video->forwardRefVop->refSelectCode = tempRefSelCode;
+ }
+
+ return status;
+ }
+
+ /* If I-VOP was encoded, reset IntraPeriod */
+ if ((currLayer == 0) && (encParams->IntraPeriod > 0) && (video->currVop->predictionType == I_VOP))
+ video->nextEncIVop = encParams->IntraPeriod;
+
+ /* Set HintTrack Information */
+ if (currLayer != -1)
+ {
+ if (currVol->prevModuloTimeBase)
+ video->hintTrackInfo.MTB = 1;
+ else
+ video->hintTrackInfo.MTB = 0;
+ video->hintTrackInfo.LayerID = (UChar)currVol->volID;
+ video->hintTrackInfo.CodeType = (UChar)video->currVop->predictionType;
+ video->hintTrackInfo.RefSelCode = (UChar)video->currVop->refSelectCode;
+ }
+
+ /************************************************/
+ /* Determine nLayer and timeInc for next encode */
+ /* 12/27/00 always go by the highest layer*/
+ /************************************************/
+
+ /**********************************************************/
+ /* Copy Reconstructed Buffer to Output Video Frame Buffer */
+ /**********************************************************/
+ vid_out->yChan = video->currVop->yChan;
+ vid_out->uChan = video->currVop->uChan;
+ vid_out->vChan = video->currVop->vChan;
+ if (video->encParams->H263_Enabled)
+ {
+ vid_out->height = video->currVop->height; /* padded height */
+ vid_out->pitch = video->currVop->width; /* padded width */
+ }
+ else
+ {
+ vid_out->height = video->currVop->height + 32; /* padded height */
+ vid_out->pitch = video->currVop->width + 32; /* padded width */
+ }
+ //video_out->timestamp = video->modTime;
+ vid_out->timestamp = (ULong)(((video->prevFrameNum[currLayer] * 1000) / encParams->LayerFrameRate[currLayer]) + video->modTimeRef + 0.5);
+
+ /*// End /////////////////////// */
+
+ /***********************************/
+ /* Update Ouput bstream byte count */
+ /***********************************/
+
+ *size = currVol->stream->byteCount;
+
+ /****************************************/
+ /* Swap Vop Pointers for Base Layer */
+ /****************************************/
+ if (currLayer == 0)
+ {
+ temp = video->prevBaseVop;
+ video->prevBaseVop = video->currVop;
+ video->prevBaseVop->padded = 0; /* not padded */
+ video->currVop = temp;
+ video->forwardRefVop = video->prevBaseVop; /* For P-Vop base only */
+ video->forwardRefVop->refSelectCode = 1;
+ }
+ else
+ {
+ temp = video->prevEnhanceVop;
+ video->prevEnhanceVop = video->currVop;
+ video->prevEnhanceVop->padded = 0; /* not padded */
+ video->currVop = temp;
+ video->forwardRefVop = video->prevEnhanceVop;
+ video->forwardRefVop->refSelectCode = 0;
+ }
+
+ /****************************************/
+ /* Modify the intialize flag at the end.*/
+ /****************************************/
+ if (video->volInitialize[currLayer])
+ video->volInitialize[currLayer] = 0;
+
+ return status;
+}
+
+#ifndef NO_SLICE_ENCODE
+/* ======================================================================== */
+/* Function : PVEncodeFrameSet() */
+/* Date : 04/18/2000 */
+/* Purpose : Enter a video frame and perform front-end time check plus ME */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVEncodeFrameSet(VideoEncControls *encCtrl, VideoEncFrameIO *vid_in, ULong *nextModTime, Int *nLayer)
+{
+ Bool status = PV_TRUE;
+ VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
+ VideoEncParams *encParams = video->encParams;
+ Vol *currVol;
+ PV_STATUS EncodeVOS_Start(VideoEncControls *encCtrl);
+ Int width_16, height_16;
+ Int width, height;
+ Int encodeVop = 0;
+ void PaddingEdge(Vop *padVop);
+ Int currLayer = -1;
+ //Int nLayers = encParams->nLayers;
+
+ ULong modTime = vid_in->timestamp;
+
+#ifdef RANDOM_REFSELCODE /* add random selection of reference Vop */
+ Int random_val[30] = {0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0};
+ static Int rand_idx = 0;
+#endif
+ /*******************************************************/
+ /* Determine Next Vop to encode, if any, and nLayer */
+ /*******************************************************/
+
+ video->modTime = modTime;
+
+ //i = nLayers-1;
+
+ if (video->volInitialize[0]) /* first vol to code */
+ {
+ video->nextModTime = video->modTimeRef = ((modTime) - ((modTime) % 1000));
+ }
+
+
+ encodeVop = DetermineCodingLayer(video, nLayer, modTime);
+
+ currLayer = *nLayer;
+
+ /******************************************/
+ /* If post-skipping still effective --- return */
+ /******************************************/
+
+ if (!encodeVop) /* skip enh layer, no base layer coded --- return */
+ {
+#ifdef _PRINT_STAT
+ printf("No frame coded. Continue to next frame.");
+#endif
+ *nLayer = -1;
+
+ /* expected next code time, convert back to millisec */
+ *nextModTime = video->nextModTime;;
+ return status;
+ }
+
+ /**************************************************************/
+ /* Initialize Vol stream structure with application bitstream */
+ /**************************************************************/
+
+ currVol = video->vol[currLayer];
+ currVol->stream->bufferSize = 0;
+ BitstreamEncReset(currVol->stream);
+
+ /***********************************************************/
+ /* Encode VOS and VOL Headers on first call for each layer */
+ /***********************************************************/
+
+ if (video->volInitialize[currLayer])
+ {
+ video->currVop->timeInc = 0;
+ video->prevBaseVop->timeInc = 0;
+ }
+
+ /***************************************************/
+ /* Copy Input Video Frame to Internal Video Buffer */
+ /***************************************************/
+ /* Determine Width and Height of Vop Layer */
+
+ width = encParams->LayerWidth[currLayer]; /* Get input width */
+ height = encParams->LayerHeight[currLayer]; /* Get input height */
+ /* Round Up to nearest multiple of 16 : MPEG-4 Standard */
+
+ width_16 = ((width + 15) / 16) * 16; /* Round up to nearest multiple of 16 */
+ height_16 = ((height + 15) / 16) * 16; /* Round up to nearest multiple of 16 */
+
+ video->input = vid_in; /* point to the frame input */
+
+ /*// End ////////////////////////////// */
+
+
+ /**************************************/
+ /* Determine VOP Type */
+ /* 6/2/2001, separate function */
+ /**************************************/
+ DetermineVopType(video, currLayer);
+
+ /****************************/
+ /* Initialize VOP */
+ /****************************/
+ video->currVop->volID = currVol->volID;
+ video->currVop->width = width_16;
+ video->currVop->height = height_16;
+ if (video->encParams->H263_Enabled) /* 11/28/05 */
+ {
+ video->currVop->pitch = width_16;
+ }
+ else
+ {
+ video->currVop->pitch = width_16 + 32;
+ }
+ video->currVop->timeInc = currVol->timeIncrement;
+ video->currVop->vopCoded = 1;
+ video->currVop->roundingType = 0;
+ video->currVop->intraDCVlcThr = encParams->IntraDCVlcThr;
+
+ if (currLayer == 0
+#ifdef RANDOM_REFSELCODE /* add random selection of reference Vop */
+ || random_val[rand_idx] || video->volInitialize[currLayer]
+#endif
+ )
+ {
+ video->tempForwRefVop = video->forwardRefVop; /* keep initial state */
+ if (video->tempForwRefVop != NULL) video->tempRefSelCode = video->tempForwRefVop->refSelectCode;
+
+ video->forwardRefVop = video->prevBaseVop;
+ video->forwardRefVop->refSelectCode = 1;
+ }
+#ifdef RANDOM_REFSELCODE
+ else
+ {
+ video->tempForwRefVop = video->forwardRefVop; /* keep initial state */
+ if (video->tempForwRefVop != NULL) video->tempRefSelCode = video->tempForwRefVop->refSelectCode;
+
+ video->forwardRefVop = video->prevEnhanceVop;
+ video->forwardRefVop->refSelectCode = 0;
+ }
+ rand_idx++;
+ rand_idx %= 30;
+#endif
+
+ video->currVop->refSelectCode = video->forwardRefVop->refSelectCode;
+ video->currVop->gobNumber = 0;
+ video->currVop->gobFrameID = video->currVop->predictionType;
+ video->currVop->temporalRef = ((modTime) * 30 / 1001) % 256;
+
+ video->currVop->temporalInterval = 0;
+
+ if (video->currVop->predictionType == I_VOP)
+ video->currVop->quantizer = encParams->InitQuantIvop[currLayer];
+ else
+ video->currVop->quantizer = encParams->InitQuantPvop[currLayer];
+
+ /****************/
+ /* Encode Vop */
+ /****************/
+ video->slice_coding = 1;
+
+ /*pv_status =*/
+ EncodeVop(video);
+
+#ifdef _PRINT_STAT
+ if (video->currVop->predictionType == I_VOP)
+ printf(" I-VOP ");
+ else
+ printf(" P-VOP (ref.%d)", video->forwardRefVop->refSelectCode);
+#endif
+
+ /* Set HintTrack Information */
+ if (currVol->prevModuloTimeBase)
+ video->hintTrackInfo.MTB = 1;
+ else
+ video->hintTrackInfo.MTB = 0;
+
+ video->hintTrackInfo.LayerID = (UChar)currVol->volID;
+ video->hintTrackInfo.CodeType = (UChar)video->currVop->predictionType;
+ video->hintTrackInfo.RefSelCode = (UChar)video->currVop->refSelectCode;
+
+ return status;
+}
+#endif /* NO_SLICE_ENCODE */
+
+#ifndef NO_SLICE_ENCODE
+/* ======================================================================== */
+/* Function : PVEncodePacket() */
+/* Date : 04/18/2002 */
+/* Purpose : Encode one packet and return bitstream */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVEncodeSlice(VideoEncControls *encCtrl, UChar *bstream, Int *size,
+ Int *endofFrame, VideoEncFrameIO *vid_out, ULong *nextModTime)
+{
+ PV_STATUS pv_status;
+ VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
+ VideoEncParams *encParams = video->encParams;
+ Vol *currVol;
+ PV_STATUS EncodeVOS_Start(VideoEncControls *encCtrl);
+ Vop *temp;
+ void PaddingEdge(Vop *padVop);
+ Int currLayer = video->currLayer;
+ Int pre_skip;
+ Int pre_size;
+ /**************************************************************/
+ /* Initialize Vol stream structure with application bitstream */
+ /**************************************************************/
+
+ currVol = video->vol[currLayer];
+ currVol->stream->bitstreamBuffer = bstream;
+ pre_size = currVol->stream->byteCount;
+ currVol->stream->bufferSize = pre_size + (*size);
+
+ /***********************************************************/
+ /* Encode VOS and VOL Headers on first call for each layer */
+ /***********************************************************/
+
+ if (video->volInitialize[currLayer])
+ {
+ if (!video->encParams->GetVolHeader[currLayer])
+ pv_status = EncodeVOS_Start(encCtrl);
+ }
+
+ /****************/
+ /* Encode Slice */
+ /****************/
+ pv_status = EncodeSlice(video);
+
+ *endofFrame = 0;
+
+ if (video->mbnum >= currVol->nTotalMB && !video->end_of_buf)
+ {
+ *endofFrame = 1;
+
+ /************************************/
+ /* Update Skip Next Frame */
+ /************************************/
+ pre_skip = UpdateSkipNextFrame(video, nextModTime, size, pv_status); /* modified such that no pre-skipped */
+
+ if (pre_skip == -1) /* error */
+ {
+ *endofFrame = -1;
+ /* make sure that pointers are restored to the previous state */
+ if (currLayer == 0)
+ {
+ video->forwardRefVop = video->tempForwRefVop; /* For P-Vop base only */
+ video->forwardRefVop->refSelectCode = video->tempRefSelCode;
+ }
+
+ return pv_status;
+ }
+
+ /* If I-VOP was encoded, reset IntraPeriod */
+ if ((currLayer == 0) && (encParams->IntraPeriod > 0) && (video->currVop->predictionType == I_VOP))
+ video->nextEncIVop = encParams->IntraPeriod;
+
+ /**********************************************************/
+ /* Copy Reconstructed Buffer to Output Video Frame Buffer */
+ /**********************************************************/
+ vid_out->yChan = video->currVop->yChan;
+ vid_out->uChan = video->currVop->uChan;
+ vid_out->vChan = video->currVop->vChan;
+ if (video->encParams->H263_Enabled)
+ {
+ vid_out->height = video->currVop->height; /* padded height */
+ vid_out->pitch = video->currVop->width; /* padded width */
+ }
+ else
+ {
+ vid_out->height = video->currVop->height + 32; /* padded height */
+ vid_out->pitch = video->currVop->width + 32; /* padded width */
+ }
+ //vid_out->timestamp = video->modTime;
+ vid_out->timestamp = (ULong)(((video->prevFrameNum[currLayer] * 1000) / encParams->LayerFrameRate[currLayer]) + video->modTimeRef + 0.5);
+
+ /*// End /////////////////////// */
+
+ /****************************************/
+ /* Swap Vop Pointers for Base Layer */
+ /****************************************/
+
+ if (currLayer == 0)
+ {
+ temp = video->prevBaseVop;
+ video->prevBaseVop = video->currVop;
+ video->prevBaseVop->padded = 0; /* not padded */
+ video->currVop = temp;
+ video->forwardRefVop = video->prevBaseVop; /* For P-Vop base only */
+ video->forwardRefVop->refSelectCode = 1;
+ }
+ else
+ {
+ temp = video->prevEnhanceVop;
+ video->prevEnhanceVop = video->currVop;
+ video->prevEnhanceVop->padded = 0; /* not padded */
+ video->currVop = temp;
+ video->forwardRefVop = video->prevEnhanceVop;
+ video->forwardRefVop->refSelectCode = 0;
+ }
+ }
+
+ /***********************************/
+ /* Update Ouput bstream byte count */
+ /***********************************/
+
+ *size = currVol->stream->byteCount - pre_size;
+
+ /****************************************/
+ /* Modify the intialize flag at the end.*/
+ /****************************************/
+ if (video->volInitialize[currLayer])
+ video->volInitialize[currLayer] = 0;
+
+ return pv_status;
+}
+#endif /* NO_SLICE_ENCODE */
+
+
+/* ======================================================================== */
+/* Function : PVGetH263ProfileLevelID() */
+/* Date : 02/05/2003 */
+/* Purpose : Get H.263 Profile ID and level ID for profile 0 */
+/* In/out : Profile ID=0, levelID is what we want */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* Note : h263Level[8], rBR_bound[8], max_h263_framerate[2] */
+/* max_h263_width[2], max_h263_height[2] are global */
+/* */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVGetH263ProfileLevelID(VideoEncControls *encCtrl, Int *profileID, Int *levelID)
+{
+ VideoEncData *encData;
+ Int width, height;
+ float bitrate_r, framerate;
+
+
+ /* For this version, we only support H.263 profile 0 */
+ *profileID = 0;
+
+ *levelID = 0;
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+
+ if (!encData->encParams->H263_Enabled) return PV_FALSE;
+
+
+ /* get image width, height, bitrate and framerate */
+ width = encData->encParams->LayerWidth[0];
+ height = encData->encParams->LayerHeight[0];
+ bitrate_r = (float)(encData->encParams->LayerBitRate[0]) / (float)64000.0;
+ framerate = encData->encParams->LayerFrameRate[0];
+ if (!width || !height || !(bitrate_r > 0 && framerate > 0)) return PV_FALSE;
+
+ /* This is the most frequent case : level 10 */
+ if (bitrate_r <= rBR_bound[1] && framerate <= max_h263_framerate[0] &&
+ (width <= max_h263_width[0] && height <= max_h263_height[0]))
+ {
+ *levelID = h263Level[1];
+ return PV_TRUE;
+ }
+ else if (bitrate_r > rBR_bound[4] ||
+ (width > max_h263_width[1] || height > max_h263_height[1]) ||
+ framerate > max_h263_framerate[1]) /* check the highest level 70 */
+ {
+ *levelID = h263Level[7];
+ return PV_TRUE;
+ }
+ else /* search level 20, 30, 40 */
+ {
+
+ /* pick out level 20 */
+ if (bitrate_r <= rBR_bound[2] &&
+ ((width <= max_h263_width[0] && height <= max_h263_height[0] && framerate <= max_h263_framerate[1]) ||
+ (width <= max_h263_width[1] && height <= max_h263_height[1] && framerate <= max_h263_framerate[0])))
+ {
+ *levelID = h263Level[2];
+ return PV_TRUE;
+ }
+ else /* width, height and framerate are ok, now choose level 30 or 40 */
+ {
+ *levelID = (bitrate_r <= rBR_bound[3] ? h263Level[3] : h263Level[4]);
+ return PV_TRUE;
+ }
+ }
+}
+
+/* ======================================================================== */
+/* Function : PVGetMPEG4ProfileLevelID() */
+/* Date : 26/06/2008 */
+/* Purpose : Get MPEG4 Level after initialized */
+/* In/out : profile_level according to interface */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVGetMPEG4ProfileLevelID(VideoEncControls *encCtrl, Int *profile_level, Int nLayer)
+{
+ VideoEncData* video;
+ Int i;
+
+ video = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (nLayer == 0)
+ {
+ for (i = 0; i < 8; i++)
+ {
+ if (video->encParams->ProfileLevel[0] == profile_level_code[i])
+ {
+ break;
+ }
+ }
+ *profile_level = i;
+ }
+ else
+ {
+ for (i = 0; i < 8; i++)
+ {
+ if (video->encParams->ProfileLevel[0] == scalable_profile_level_code[i])
+ {
+ break;
+ }
+ }
+ *profile_level = i + SIMPLE_SCALABLE_PROFILE_LEVEL0;
+ }
+
+ return true;
+}
+
+#ifndef LIMITED_API
+/* ======================================================================== */
+/* Function : PVUpdateEncFrameRate */
+/* Date : 04/08/2002 */
+/* Purpose : Update target frame rates of the encoded base and enhance */
+/* layer(if any) while encoding operation is ongoing */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVUpdateEncFrameRate(VideoEncControls *encCtrl, float *frameRate)
+{
+ VideoEncData *encData;
+ Int i;// nTotalMB, mbPerSec;
+
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+
+ /* Update the framerates for all the layers */
+ for (i = 0; i < encData->encParams->nLayers; i++)
+ {
+
+ /* New check: encoding framerate should be consistent with the given profile and level */
+ //nTotalMB = (((encData->encParams->LayerWidth[i]+15)/16)*16)*(((encData->encParams->LayerHeight[i]+15)/16)*16)/(16*16);
+ //mbPerSec = (Int)(nTotalMB * frameRate[i]);
+ //if(mbPerSec > encData->encParams->LayerMaxMbsPerSec[i]) return PV_FALSE;
+ if (frameRate[i] > encData->encParams->LayerMaxFrameRate[i]) return PV_FALSE; /* set by users or profile */
+
+ encData->encParams->LayerFrameRate[i] = frameRate[i];
+ }
+
+ return RC_UpdateBXRCParams((void*) encData);
+
+}
+#endif
+#ifndef LIMITED_API
+/* ======================================================================== */
+/* Function : PVUpdateBitRate */
+/* Date : 04/08/2002 */
+/* Purpose : Update target bit rates of the encoded base and enhance */
+/* layer(if any) while encoding operation is ongoing */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVUpdateBitRate(VideoEncControls *encCtrl, Int *bitRate)
+{
+ VideoEncData *encData;
+ Int i;
+
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+
+ /* Update the bitrates for all the layers */
+ for (i = 0; i < encData->encParams->nLayers; i++)
+ {
+ if (bitRate[i] > encData->encParams->LayerMaxBitRate[i]) /* set by users or profile */
+ {
+ return PV_FALSE;
+ }
+ encData->encParams->LayerBitRate[i] = bitRate[i];
+ }
+
+ return RC_UpdateBXRCParams((void*) encData);
+
+}
+#endif
+#ifndef LIMITED_API
+/* ============================================================================ */
+/* Function : PVUpdateVBVDelay() */
+/* Date : 4/23/2004 */
+/* Purpose : Update VBV buffer size(in delay) */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ============================================================================ */
+
+Bool PVUpdateVBVDelay(VideoEncControls *encCtrl, float delay)
+{
+
+ VideoEncData *encData;
+ Int total_bitrate, max_buffer_size;
+ int index;
+
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+
+ /* Check whether the input delay is valid based on the given profile */
+ total_bitrate = (encData->encParams->nLayers == 1 ? encData->encParams->LayerBitRate[0] :
+ encData->encParams->LayerBitRate[1]);
+ index = encData->encParams->profile_table_index;
+ max_buffer_size = (encData->encParams->nLayers == 1 ? profile_level_max_VBV_size[index] :
+ scalable_profile_level_max_VBV_size[index]);
+
+ if (total_bitrate*delay > (float)max_buffer_size)
+ return PV_FALSE;
+
+ encData->encParams->VBV_delay = delay;
+ return PV_TRUE;
+
+}
+#endif
+#ifndef LIMITED_API
+/* ======================================================================== */
+/* Function : PVUpdateIFrameInterval() */
+/* Date : 04/10/2002 */
+/* Purpose : updates the INTRA frame refresh interval while encoding */
+/* is ongoing */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVUpdateIFrameInterval(VideoEncControls *encCtrl, Int aIFramePeriod)
+{
+ VideoEncData *encData;
+
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+
+ encData->encParams->IntraPeriod = aIFramePeriod;
+ return PV_TRUE;
+}
+#endif
+#ifndef LIMITED_API
+/* ======================================================================== */
+/* Function : PVSetNumIntraMBRefresh() */
+/* Date : 08/05/2003 */
+/* Purpose : */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVUpdateNumIntraMBRefresh(VideoEncControls *encCtrl, Int numMB)
+{
+ VideoEncData *encData;
+
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+
+ encData->encParams->Refresh = numMB;
+
+ return PV_TRUE;
+}
+#endif
+#ifndef LIMITED_API
+/* ======================================================================== */
+/* Function : PVIFrameRequest() */
+/* Date : 04/10/2002 */
+/* Purpose : encodes the next base frame as an I-Vop */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVIFrameRequest(VideoEncControls *encCtrl)
+{
+ VideoEncData *encData;
+
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+
+ encData->nextEncIVop = 1;
+ return PV_TRUE;
+}
+#endif
+#ifndef LIMITED_API
+/* ======================================================================== */
+/* Function : PVGetEncMemoryUsage() */
+/* Date : 10/17/2000 */
+/* Purpose : */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Int PVGetEncMemoryUsage(VideoEncControls *encCtrl)
+{
+ VideoEncData *encData;
+
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+ return encData->encParams->MemoryUsage;
+}
+#endif
+
+/* ======================================================================== */
+/* Function : PVGetHintTrack() */
+/* Date : 1/17/2001, */
+/* Purpose : */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVGetHintTrack(VideoEncControls *encCtrl, MP4HintTrack *info)
+{
+ VideoEncData *encData;
+
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+ info->MTB = encData->hintTrackInfo.MTB;
+ info->LayerID = encData->hintTrackInfo.LayerID;
+ info->CodeType = encData->hintTrackInfo.CodeType;
+ info->RefSelCode = encData->hintTrackInfo.RefSelCode;
+
+ return PV_TRUE;
+}
+
+/* ======================================================================== */
+/* Function : PVGetMaxVideoFrameSize() */
+/* Date : 7/17/2001, */
+/* Purpose : Function merely returns the maximum buffer size */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVGetMaxVideoFrameSize(VideoEncControls *encCtrl, Int *maxVideoFrameSize)
+{
+ VideoEncData *encData;
+
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+
+
+
+ *maxVideoFrameSize = encData->encParams->BufferSize[0];
+
+ if (encData->encParams->nLayers == 2)
+ if (*maxVideoFrameSize < encData->encParams->BufferSize[1])
+ *maxVideoFrameSize = encData->encParams->BufferSize[1];
+ *maxVideoFrameSize >>= 3; /* Convert to Bytes */
+
+ if (*maxVideoFrameSize <= 4000)
+ *maxVideoFrameSize = 4000;
+
+ return PV_TRUE;
+}
+#ifndef LIMITED_API
+/* ======================================================================== */
+/* Function : PVGetVBVSize() */
+/* Date : 4/15/2002 */
+/* Purpose : Function merely returns the maximum buffer size */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVGetVBVSize(VideoEncControls *encCtrl, Int *VBVSize)
+{
+ VideoEncData *encData;
+
+ encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+ if (encData == NULL)
+ return PV_FALSE;
+ if (encData->encParams == NULL)
+ return PV_FALSE;
+
+ *VBVSize = encData->encParams->BufferSize[0];
+ if (encData->encParams->nLayers == 2)
+ *VBVSize += encData->encParams->BufferSize[1];
+
+ return PV_TRUE;
+
+}
+#endif
+/* ======================================================================== */
+/* Function : EncodeVOS_Start() */
+/* Date : 08/22/2000 */
+/* Purpose : Encodes the VOS,VO, and VOL or Short Headers */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+PV_STATUS EncodeVOS_Start(VideoEncControls *encoderControl)
+{
+
+ VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
+ Vol *currVol = video->vol[video->currLayer];
+ PV_STATUS status = PV_SUCCESS;
+ //int profile_level=0x01;
+ BitstreamEncVideo *stream = video->bitstream1;
+ int i, j;
+
+ /********************************/
+ /* Check for short_video_header */
+ /********************************/
+ if (currVol->shortVideoHeader == 1)
+ return status;
+ else
+ {
+ /* Short Video Header or M4V */
+
+ /**************************/
+ /* VisualObjectSequence ()*/
+ /**************************/
+ status = BitstreamPutGT16Bits(stream, 32, SESSION_START_CODE);
+ /* Determine profile_level */
+ status = BitstreamPutBits(stream, 8, video->encParams->ProfileLevel[video->currLayer]);
+
+ /******************/
+ /* VisualObject() */
+ /******************/
+
+ status = BitstreamPutGT16Bits(stream, 32, VISUAL_OBJECT_START_CODE);
+ status = BitstreamPut1Bits(stream, 0x00); /* visual object identifier */
+ status = BitstreamPutBits(stream, 4, 0x01); /* visual object Type == "video ID" */
+ status = BitstreamPut1Bits(stream, 0x00); /* no video signal type */
+
+ /*temp = */
+ BitstreamMpeg4ByteAlignStuffing(stream);
+
+
+ status = BitstreamPutGT16Bits(stream, 27, VO_START_CODE);/* byte align: should be 2 bits */
+ status = BitstreamPutBits(stream, 5, 0x00);/* Video ID = 0 */
+
+
+
+ /**********************/
+ /* VideoObjectLayer() */
+ /**********************/
+ if (currVol->shortVideoHeader == 0)
+ { /* M4V else Short Video Header */
+ status = BitstreamPutGT16Bits(stream, VOL_START_CODE_LENGTH, VOL_START_CODE);
+ status = BitstreamPutBits(stream, 4, currVol->volID);/* video_object_layer_id */
+ status = BitstreamPut1Bits(stream, 0x00);/* Random Access = 0 */
+
+ if (video->currLayer == 0)
+ status = BitstreamPutBits(stream, 8, 0x01);/* Video Object Type Indication = 1 ... Simple Object Type */
+ else
+ status = BitstreamPutBits(stream, 8, 0x02);/* Video Object Type Indication = 2 ... Simple Scalable Object Type */
+
+ status = BitstreamPut1Bits(stream, 0x00);/* is_object_layer_identifer = 0 */
+
+
+ status = BitstreamPutBits(stream, 4, 0x01); /* aspect_ratio_info = 1 ... 1:1(Square) */
+ status = BitstreamPut1Bits(stream, 0x00);/* vol_control_parameters = 0 */
+ status = BitstreamPutBits(stream, 2, 0x00);/* video_object_layer_shape = 00 ... rectangular */
+ status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
+ status = BitstreamPutGT8Bits(stream, 16, currVol->timeIncrementResolution);/* vop_time_increment_resolution */
+ status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
+ status = BitstreamPut1Bits(stream, currVol->fixedVopRate);/* fixed_vop_rate = 0 */
+
+ /* For Rectangular VO layer shape */
+ status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
+ status = BitstreamPutGT8Bits(stream, 13, currVol->width);/* video_object_layer_width */
+ status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
+ status = BitstreamPutGT8Bits(stream, 13, currVol->height);/* video_object_layer_height */
+ status = BitstreamPut1Bits(stream, 0x01);/*marker bit */
+
+ status = BitstreamPut1Bits(stream, 0x00);/*interlaced = 0 */
+ status = BitstreamPut1Bits(stream, 0x01);/* obmc_disable = 1 */
+ status = BitstreamPut1Bits(stream, 0x00);/* sprite_enable = 0 */
+ status = BitstreamPut1Bits(stream, 0x00);/* not_8_bit = 0 */
+ status = BitstreamPut1Bits(stream, currVol->quantType);/* quant_type */
+
+ if (currVol->quantType)
+ {
+ status = BitstreamPut1Bits(stream, currVol->loadIntraQuantMat); /* Intra quant matrix */
+ if (currVol->loadIntraQuantMat)
+ {
+ for (j = 63; j >= 1; j--)
+ if (currVol->iqmat[*(zigzag_i+j)] != currVol->iqmat[*(zigzag_i+j-1)])
+ break;
+ if ((j == 1) && (currVol->iqmat[*(zigzag_i+j)] == currVol->iqmat[*(zigzag_i+j-1)]))
+ j = 0;
+ for (i = 0; i < j + 1; i++)
+ BitstreamPutBits(stream, 8, currVol->iqmat[*(zigzag_i+i)]);
+ if (j < 63)
+ BitstreamPutBits(stream, 8, 0);
+ }
+ else
+ {
+ for (j = 0; j < 64; j++)
+ currVol->iqmat[j] = mpeg_iqmat_def[j];
+
+ }
+ status = BitstreamPut1Bits(stream, currVol->loadNonIntraQuantMat); /* Non-Intra quant matrix */
+ if (currVol->loadNonIntraQuantMat)
+ {
+ for (j = 63; j >= 1; j--)
+ if (currVol->niqmat[*(zigzag_i+j)] != currVol->niqmat[*(zigzag_i+j-1)])
+ break;
+ if ((j == 1) && (currVol->niqmat[*(zigzag_i+j)] == currVol->niqmat[*(zigzag_i+j-1)]))
+ j = 0;
+ for (i = 0; i < j + 1; i++)
+ BitstreamPutBits(stream, 8, currVol->niqmat[*(zigzag_i+i)]);
+ if (j < 63)
+ BitstreamPutBits(stream, 8, 0);
+ }
+ else
+ {
+ for (j = 0; j < 64; j++)
+ currVol->niqmat[j] = mpeg_nqmat_def[j];
+ }
+ }
+
+ status = BitstreamPut1Bits(stream, 0x01); /* complexity_estimation_disable = 1 */
+ status = BitstreamPut1Bits(stream, currVol->ResyncMarkerDisable);/* Resync_marker_disable */
+ status = BitstreamPut1Bits(stream, currVol->dataPartitioning);/* Data partitioned */
+
+ if (currVol->dataPartitioning)
+ status = BitstreamPut1Bits(stream, currVol->useReverseVLC); /* Reversible_vlc */
+
+
+ if (currVol->scalability) /* Scalability*/
+ {
+
+ status = BitstreamPut1Bits(stream, currVol->scalability);/* Scalability = 1 */
+ status = BitstreamPut1Bits(stream, currVol->scalType);/* hierarchy _type ... Spatial= 0 and Temporal = 1 */
+ status = BitstreamPutBits(stream, 4, currVol->refVolID);/* ref_layer_id */
+ status = BitstreamPut1Bits(stream, currVol->refSampDir);/* ref_layer_sampling_direc*/
+ status = BitstreamPutBits(stream, 5, currVol->horSamp_n);/*hor_sampling_factor_n*/
+ status = BitstreamPutBits(stream, 5, currVol->horSamp_m);/*hor_sampling_factor_m*/
+ status = BitstreamPutBits(stream, 5, currVol->verSamp_n);/*vert_sampling_factor_n*/
+ status = BitstreamPutBits(stream, 5, currVol->verSamp_m);/*vert_sampling_factor_m*/
+ status = BitstreamPut1Bits(stream, currVol->enhancementType);/* enhancement_type*/
+ }
+ else /* No Scalability */
+ status = BitstreamPut1Bits(stream, currVol->scalability);/* Scalability = 0 */
+
+ /*temp = */
+ BitstreamMpeg4ByteAlignStuffing(stream); /* Byte align Headers for VOP */
+ }
+ }
+
+ return status;
+}
+
+/* ======================================================================== */
+/* Function : VOS_End() */
+/* Date : 08/22/2000 */
+/* Purpose : Visual Object Sequence End */
+/* In/out : */
+/* Return : PV_TRUE if successed, PV_FALSE if failed. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+PV_STATUS VOS_End(VideoEncControls *encoderControl)
+{
+ PV_STATUS status = PV_SUCCESS;
+ VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
+ Vol *currVol = video->vol[video->currLayer];
+ BitstreamEncVideo *stream = currVol->stream;
+
+
+ status = BitstreamPutBits(stream, SESSION_END_CODE, 32);
+
+ return status;
+}
+
+/* ======================================================================== */
+/* Function : DetermineCodingLayer */
+/* Date : 06/02/2001 */
+/* Purpose : Find layer to code based on current mod time, assuming that
+ it's time to encode enhanced layer. */
+/* In/out : */
+/* Return : Number of layer to code. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+Int DetermineCodingLayer(VideoEncData *video, Int *nLayer, ULong modTime)
+{
+ Vol **vol = video->vol;
+ VideoEncParams *encParams = video->encParams;
+ Int numLayers = encParams->nLayers;
+ UInt modTimeRef = video->modTimeRef;
+ float *LayerFrameRate = encParams->LayerFrameRate;
+ UInt frameNum[4], frameTick;
+ ULong frameModTime, nextFrmModTime;
+#ifdef REDUCE_FRAME_VARIANCE /* To limit how close 2 frames can be */
+ float frameInterval;
+#endif
+ float srcFrameInterval;
+ Int frameInc;
+ Int i, extra_skip;
+ Int encodeVop = 0;
+
+ i = numLayers - 1;
+
+ if (modTime - video->nextModTime > ((ULong)(-1)) >> 1) /* next time wrapped around */
+ return 0; /* not time to code it yet */
+
+ video->relLayerCodeTime[i] -= 1000;
+ video->nextEncIVop--; /* number of Vops in highest layer resolution. */
+ video->numVopsInGOP++;
+
+ /* from this point frameModTime and nextFrmModTime are internal */
+
+ frameNum[i] = (UInt)((modTime - modTimeRef) * LayerFrameRate[i] + 500) / 1000;
+ if (video->volInitialize[i])
+ {
+ video->prevFrameNum[i] = frameNum[i] - 1;
+ }
+ else if (frameNum[i] <= video->prevFrameNum[i])
+ {
+ return 0; /* do not encode this frame */
+ }
+
+ /**** this part computes expected next frame *******/
+ frameModTime = (ULong)(((frameNum[i] * 1000) / LayerFrameRate[i]) + modTimeRef + 0.5); /* rec. time */
+ nextFrmModTime = (ULong)((((frameNum[i] + 1) * 1000) / LayerFrameRate[i]) + modTimeRef + 0.5); /* rec. time */
+
+ srcFrameInterval = 1000 / video->FrameRate;
+
+ video->nextModTime = nextFrmModTime - (ULong)(srcFrameInterval / 2.) - 1; /* between current and next frame */
+
+#ifdef REDUCE_FRAME_VARIANCE /* To limit how close 2 frames can be */
+ frameInterval = 1000 / LayerFrameRate[i]; /* next rec. time */
+ delta = (Int)(frameInterval / 4); /* empirical number */
+ if (video->nextModTime - modTime < (ULong)delta) /* need to move nextModTime further. */
+ {
+ video->nextModTime += ((delta - video->nextModTime + modTime)); /* empirical formula */
+ }
+#endif
+ /****************************************************/
+
+ /* map frame no.to tick from modTimeRef */
+ /*frameTick = (frameNum[i]*vol[i]->timeIncrementResolution) ;
+ frameTick = (UInt)((frameTick + (encParams->LayerFrameRate[i]/2))/encParams->LayerFrameRate[i]);*/
+ /* 11/16/01, change frameTick to be the closest tick from the actual modTime */
+ /* 12/12/02, add (double) to prevent large number wrap-around */
+ frameTick = (Int)(((double)(modTime - modTimeRef) * vol[i]->timeIncrementResolution + 500) / 1000);
+
+ /* find timeIncrement to be put in the bitstream */
+ /* refTick is second boundary reference. */
+ vol[i]->timeIncrement = frameTick - video->refTick[i];
+
+
+ vol[i]->moduloTimeBase = 0;
+ while (vol[i]->timeIncrement >= vol[i]->timeIncrementResolution)
+ {
+ vol[i]->timeIncrement -= vol[i]->timeIncrementResolution;
+ vol[i]->moduloTimeBase++;
+ /* do not update refTick and modTimeRef yet, do it after encoding!! */
+ }
+
+ if (video->relLayerCodeTime[i] <= 0) /* no skipping */
+ {
+ encodeVop = 1;
+ video->currLayer = *nLayer = i;
+ video->relLayerCodeTime[i] += 1000;
+
+ /* takes care of more dropped frame than expected */
+ extra_skip = -1;
+ frameInc = (frameNum[i] - video->prevFrameNum[i]);
+ extra_skip += frameInc;
+
+ if (extra_skip > 0)
+ { /* update rc->Nr, rc->B, (rc->Rr)*/
+ video->nextEncIVop -= extra_skip;
+ video->numVopsInGOP += extra_skip;
+ if (encParams->RC_Type != CONSTANT_Q)
+ {
+ RC_UpdateBuffer(video, i, extra_skip);
+ }
+ }
+
+ }
+ /* update frame no. */
+ video->prevFrameNum[i] = frameNum[i];
+
+ /* go through all lower layer */
+ for (i = (numLayers - 2); i >= 0; i--)
+ {
+
+ video->relLayerCodeTime[i] -= 1000;
+
+ /* find timeIncrement to be put in the bitstream */
+ vol[i]->timeIncrement = frameTick - video->refTick[i];
+
+ if (video->relLayerCodeTime[i] <= 0) /* time to encode base */
+ {
+ /* 12/27/00 */
+ encodeVop = 1;
+ video->currLayer = *nLayer = i;
+ video->relLayerCodeTime[i] +=
+ (Int)((1000.0 * encParams->LayerFrameRate[numLayers-1]) / encParams->LayerFrameRate[i]);
+
+ vol[i]->moduloTimeBase = 0;
+ while (vol[i]->timeIncrement >= vol[i]->timeIncrementResolution)
+ {
+ vol[i]->timeIncrement -= vol[i]->timeIncrementResolution;
+ vol[i]->moduloTimeBase++;
+ /* do not update refTick and modTimeRef yet, do it after encoding!! */
+ }
+
+ /* takes care of more dropped frame than expected */
+ frameNum[i] = (UInt)((frameModTime - modTimeRef) * encParams->LayerFrameRate[i] + 500) / 1000;
+ if (video->volInitialize[i])
+ video->prevFrameNum[i] = frameNum[i] - 1;
+
+ extra_skip = -1;
+ frameInc = (frameNum[i] - video->prevFrameNum[i]);
+ extra_skip += frameInc;
+
+ if (extra_skip > 0)
+ { /* update rc->Nr, rc->B, (rc->Rr)*/
+ if (encParams->RC_Type != CONSTANT_Q)
+ {
+ RC_UpdateBuffer(video, i, extra_skip);
+ }
+ }
+ /* update frame no. */
+ video->prevFrameNum[i] = frameNum[i];
+ }
+ }
+
+#ifdef _PRINT_STAT
+ if (encodeVop)
+ printf(" TI: %d ", vol[*nLayer]->timeIncrement);
+#endif
+
+ return encodeVop;
+}
+
+/* ======================================================================== */
+/* Function : DetermineVopType */
+/* Date : 06/02/2001 */
+/* Purpose : The name says it all. */
+/* In/out : */
+/* Return : void . */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+void DetermineVopType(VideoEncData *video, Int currLayer)
+{
+ VideoEncParams *encParams = video->encParams;
+// Vol *currVol = video->vol[currLayer];
+
+ if (encParams->IntraPeriod == 0) /* I-VOPs only */
+ {
+ if (video->currLayer > 0)
+ video->currVop->predictionType = P_VOP;
+ else
+ {
+ video->currVop->predictionType = I_VOP;
+ if (video->numVopsInGOP >= 132)
+ video->numVopsInGOP = 0;
+ }
+ }
+ else if (encParams->IntraPeriod == -1) /* IPPPPP... */
+ {
+
+ /* maintain frame type if previous frame is pre-skipped, 06/02/2001 */
+ if (encParams->RC_Type == CONSTANT_Q || video->rc[currLayer]->skip_next_frame != -1)
+ video->currVop->predictionType = P_VOP;
+
+ if (video->currLayer == 0)
+ {
+ if (/*video->numVopsInGOP>=132 || */video->volInitialize[currLayer])
+ {
+ video->currVop->predictionType = I_VOP;
+ video->numVopsInGOP = 0; /* force INTRA update every 132 base frames*/
+ video->nextEncIVop = 1;
+ }
+ else if (video->nextEncIVop == 0 || video->currVop->predictionType == I_VOP)
+ {
+ video->numVopsInGOP = 0;
+ video->nextEncIVop = 1;
+ }
+ }
+ }
+ else /* IntraPeriod>0 : IPPPPPIPPPPPI... */
+ {
+
+ /* maintain frame type if previous frame is pre-skipped, 06/02/2001 */
+ if (encParams->RC_Type == CONSTANT_Q || video->rc[currLayer]->skip_next_frame != -1)
+ video->currVop->predictionType = P_VOP;
+
+ if (currLayer == 0)
+ {
+ if (video->nextEncIVop <= 0 || video->currVop->predictionType == I_VOP)
+ {
+ video->nextEncIVop = encParams->IntraPeriod;
+ video->currVop->predictionType = I_VOP;
+ video->numVopsInGOP = 0;
+ }
+ }
+ }
+
+ return ;
+}
+
+/* ======================================================================== */
+/* Function : UpdateSkipNextFrame */
+/* Date : 06/02/2001 */
+/* Purpose : From rate control frame skipping decision, update timing
+ related parameters. */
+/* In/out : */
+/* Return : Current coded layer. */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+Int UpdateSkipNextFrame(VideoEncData *video, ULong *modTime, Int *size, PV_STATUS status)
+{
+ Int currLayer = video->currLayer;
+ Int nLayer = currLayer;
+ VideoEncParams *encParams = video->encParams;
+ Int numLayers = encParams->nLayers;
+ Vol *currVol = video->vol[currLayer];
+ Vol **vol = video->vol;
+ Int num_skip, extra_skip;
+ Int i;
+ UInt newRefTick, deltaModTime;
+ UInt temp;
+
+ if (encParams->RC_Type != CONSTANT_Q)
+ {
+ if (video->volInitialize[0] && currLayer == 0) /* always encode the first frame */
+ {
+ RC_ResetSkipNextFrame(video, currLayer);
+ //return currLayer; 09/15/05
+ }
+ else
+ {
+ if (RC_GetSkipNextFrame(video, currLayer) < 0 || status == PV_END_OF_BUF) /* Skip Current Frame */
+ {
+
+#ifdef _PRINT_STAT
+ printf("Skip current frame");
+#endif
+ currVol->moduloTimeBase = currVol->prevModuloTimeBase;
+
+ /*********************/
+ /* prepare to return */
+ /*********************/
+ *size = 0; /* Set Bitstream buffer to zero */
+
+ /* Determine nLayer and modTime for next encode */
+
+ *modTime = video->nextModTime;
+ nLayer = -1;
+
+ return nLayer; /* return immediately without updating RefTick & modTimeRef */
+ /* If I-VOP was attempted, then ensure next base is I-VOP */
+ /*if((encParams->IntraPeriod>0) && (video->currVop->predictionType == I_VOP))
+ video->nextEncIVop = 0; commented out by 06/05/01 */
+
+ }
+ else if ((num_skip = RC_GetSkipNextFrame(video, currLayer)) > 0)
+ {
+
+#ifdef _PRINT_STAT
+ printf("Skip next %d frames", num_skip);
+#endif
+ /* to keep the Nr of enh layer the same */
+ /* adjust relLayerCodeTime only, do not adjust layerCodeTime[numLayers-1] */
+ extra_skip = 0;
+ for (i = 0; i < currLayer; i++)
+ {
+ if (video->relLayerCodeTime[i] <= 1000)
+ {
+ extra_skip = 1;
+ break;
+ }
+ }
+
+ for (i = currLayer; i < numLayers; i++)
+ {
+ video->relLayerCodeTime[i] += (num_skip + extra_skip) *
+ ((Int)((1000.0 * encParams->LayerFrameRate[numLayers-1]) / encParams->LayerFrameRate[i]));
+ }
+ }
+ }/* first frame */
+ }
+ /***** current frame is encoded, now update refTick ******/
+
+ video->refTick[currLayer] += vol[currLayer]->prevModuloTimeBase * vol[currLayer]->timeIncrementResolution;
+
+ /* Reset layerCodeTime every I-VOP to prevent overflow */
+ if (currLayer == 0)
+ {
+ /* 12/12/02, fix for weird targer frame rate of 9.99 fps or 3.33 fps */
+ if (((encParams->IntraPeriod != 0) /*&& (video->currVop->predictionType==I_VOP)*/) ||
+ ((encParams->IntraPeriod == 0) && (video->numVopsInGOP == 0)))
+ {
+ newRefTick = video->refTick[0];
+
+ for (i = 1; i < numLayers; i++)
+ {
+ if (video->refTick[i] < newRefTick)
+ newRefTick = video->refTick[i];
+ }
+
+ /* check to make sure that the update is integer multiple of frame number */
+ /* how many msec elapsed from last modTimeRef */
+ deltaModTime = (newRefTick / vol[0]->timeIncrementResolution) * 1000;
+
+ for (i = numLayers - 1; i >= 0; i--)
+ {
+ temp = (UInt)(deltaModTime * encParams->LayerFrameRate[i]); /* 12/12/02 */
+ if (temp % 1000)
+ newRefTick = 0;
+
+ }
+ if (newRefTick > 0)
+ {
+ video->modTimeRef += deltaModTime;
+ for (i = numLayers - 1; i >= 0; i--)
+ {
+ video->prevFrameNum[i] -= (UInt)(deltaModTime * encParams->LayerFrameRate[i]) / 1000;
+ video->refTick[i] -= newRefTick;
+ }
+ }
+ }
+ }
+
+ *modTime = video->nextModTime;
+
+ return nLayer;
+}
+
+
+#ifndef ORIGINAL_VERSION
+
+/* ======================================================================== */
+/* Function : SetProfile_BufferSize */
+/* Date : 04/08/2002 */
+/* Purpose : Set profile and video buffer size, copied from Jim's code */
+/* in PVInitVideoEncoder(.), since we have different places */
+/* to reset profile and video buffer size */
+/* In/out : */
+/* Return : */
+/* Modified : */
+/* */
+/* ======================================================================== */
+
+Bool SetProfile_BufferSize(VideoEncData *video, float delay, Int bInitialized)
+{
+ Int i, j, start, end;
+// Int BaseMBsPerSec = 0, EnhMBsPerSec = 0;
+ Int nTotalMB = 0;
+ Int idx, temp_w, temp_h, max = 0, max_width, max_height;
+
+ Int nLayers = video->encParams->nLayers; /* Number of Layers to be encoded */
+
+ Int total_bitrate = 0, base_bitrate;
+ Int total_packet_size = 0, base_packet_size;
+ Int total_MBsPerSec = 0, base_MBsPerSec;
+ Int total_VBV_size = 0, base_VBV_size, enhance_VBV_size = 0;
+ float total_framerate, base_framerate;
+ float upper_bound_ratio;
+ Int bFound = 0;
+ Int k = 0, width16, height16, index;
+ Int lowest_level;
+
+#define MIN_BUFF 16000 /* 16k minimum buffer size */
+#define BUFF_CONST 2.0 /* 2000ms */
+#define UPPER_BOUND_RATIO 8.54 /* upper_bound = 1.4*(1.1+bound/10)*bitrate/framerate */
+
+#define QCIF_WIDTH 176
+#define QCIF_HEIGHT 144
+
+ index = video->encParams->profile_table_index;
+
+ /* Calculate "nTotalMB" */
+ /* Find the maximum width*height for memory allocation of the VOPs */
+ for (idx = 0; idx < nLayers; idx++)
+ {
+ temp_w = video->encParams->LayerWidth[idx];
+ temp_h = video->encParams->LayerHeight[idx];
+
+ if ((temp_w*temp_h) > max)
+ {
+ max = temp_w * temp_h;
+ max_width = temp_w;
+ max_height = temp_h;
+ nTotalMB = ((max_width + 15) >> 4) * ((max_height + 15) >> 4);
+ }
+ }
+ upper_bound_ratio = (video->encParams->RC_Type == CBR_LOWDELAY ? (float)5.0 : (float)UPPER_BOUND_RATIO);
+
+
+ /* Get the basic information: bitrate, packet_size, MBs/s and VBV_size */
+ base_bitrate = video->encParams->LayerBitRate[0];
+ if (video->encParams->LayerMaxBitRate[0] != 0) /* video->encParams->LayerMaxBitRate[0] == 0 means it has not been set */
+ {
+ base_bitrate = PV_MAX(base_bitrate, video->encParams->LayerMaxBitRate[0]);
+ }
+ else /* if the max is not set, set it to the specified profile/level */
+ {
+ video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[index];
+ }
+
+ base_framerate = video->encParams->LayerFrameRate[0];
+ if (video->encParams->LayerMaxFrameRate[0] != 0)
+ {
+ base_framerate = PV_MAX(base_framerate, video->encParams->LayerMaxFrameRate[0]);
+ }
+ else /* if the max is not set, set it to the specified profile/level */
+ {
+ video->encParams->LayerMaxFrameRate[0] = (float)profile_level_max_mbsPerSec[index] / nTotalMB;
+ }
+
+ base_packet_size = video->encParams->ResyncPacketsize;
+ base_MBsPerSec = (Int)(base_framerate * nTotalMB);
+ base_VBV_size = PV_MAX((Int)(base_bitrate * delay),
+ (Int)(upper_bound_ratio * base_bitrate / base_framerate));
+ base_VBV_size = PV_MAX(base_VBV_size, MIN_BUFF);
+
+ /* if the buffer is larger than maximum buffer size, we'll clip it */
+ if (base_VBV_size > profile_level_max_VBV_size[5])
+ base_VBV_size = profile_level_max_VBV_size[5];
+
+
+ /* Check if the buffer exceeds the maximum buffer size given the maximum profile and level */
+ if (nLayers == 1 && base_VBV_size > profile_level_max_VBV_size[index])
+ return FALSE;
+
+
+ if (nLayers == 2)
+ {
+ total_bitrate = video->encParams->LayerBitRate[1];
+ if (video->encParams->LayerMaxBitRate[1] != 0)
+ {
+ total_bitrate = PV_MIN(total_bitrate, video->encParams->LayerMaxBitRate[1]);
+ }
+ else /* if the max is not set, set it to the specified profile/level */
+ {
+ video->encParams->LayerMaxBitRate[1] = scalable_profile_level_max_bitrate[index];
+ }
+
+ total_framerate = video->encParams->LayerFrameRate[1];
+ if (video->encParams->LayerMaxFrameRate[1] != 0)
+ {
+ total_framerate = PV_MIN(total_framerate, video->encParams->LayerMaxFrameRate[1]);
+ }
+ else /* if the max is not set, set it to the specified profile/level */
+ {
+ video->encParams->LayerMaxFrameRate[1] = (float)scalable_profile_level_max_mbsPerSec[index] / nTotalMB;
+ }
+
+ total_packet_size = video->encParams->ResyncPacketsize;
+ total_MBsPerSec = (Int)(total_framerate * nTotalMB);
+
+ enhance_VBV_size = PV_MAX((Int)((total_bitrate - base_bitrate) * delay),
+ (Int)(upper_bound_ratio * (total_bitrate - base_bitrate) / (total_framerate - base_framerate)));
+ enhance_VBV_size = PV_MAX(enhance_VBV_size, MIN_BUFF);
+
+ total_VBV_size = base_VBV_size + enhance_VBV_size;
+
+ /* if the buffer is larger than maximum buffer size, we'll clip it */
+ if (total_VBV_size > scalable_profile_level_max_VBV_size[6])
+ {
+ total_VBV_size = scalable_profile_level_max_VBV_size[6];
+ enhance_VBV_size = total_VBV_size - base_VBV_size;
+ }
+
+ /* Check if the buffer exceeds the maximum buffer size given the maximum profile and level */
+ if (total_VBV_size > scalable_profile_level_max_VBV_size[index])
+ return FALSE;
+ }
+
+
+ if (!bInitialized) /* Has been initialized --> profile @ level has been figured out! */
+ {
+ video->encParams->BufferSize[0] = base_VBV_size;
+ if (nLayers > 1)
+ video->encParams->BufferSize[1] = enhance_VBV_size;
+
+ return PV_TRUE;
+ }
+
+
+ /* Profile @ level determination */
+ if (nLayers == 1)
+ {
+ /* BASE ONLY : Simple Profile(SP) Or Core Profile(CP) */
+ if (base_bitrate > profile_level_max_bitrate[index] ||
+ base_packet_size > profile_level_max_packet_size[index] ||
+ base_MBsPerSec > profile_level_max_mbsPerSec[index] ||
+ base_VBV_size > profile_level_max_VBV_size[index])
+
+ return PV_FALSE; /* Beyond the bound of Core Profile @ Level2 */
+
+ /* For H263/Short header, determine k*16384 */
+ width16 = ((video->encParams->LayerWidth[0] + 15) >> 4) << 4;
+ height16 = ((video->encParams->LayerHeight[0] + 15) >> 4) << 4;
+ if (video->encParams->H263_Enabled)
+ {
+ k = 4;
+ if (width16 == 2*QCIF_WIDTH && height16 == 2*QCIF_HEIGHT) /* CIF */
+ k = 16;
+
+ else if (width16 == 4*QCIF_WIDTH && height16 == 4*QCIF_HEIGHT) /* 4CIF */
+ k = 32;
+
+ else if (width16 == 8*QCIF_WIDTH && height16 == 8*QCIF_HEIGHT) /* 16CIF */
+ k = 64;
+
+ video->encParams->maxFrameSize = k * 16384;
+
+ /* Make sure the buffer size is limited to the top profile and level: the Core profile and level 2 */
+ if (base_VBV_size > (Int)(k*16384 + 4*(float)profile_level_max_bitrate[5]*1001.0 / 30000.0))
+ base_VBV_size = (Int)(k * 16384 + 4 * (float)profile_level_max_bitrate[5] * 1001.0 / 30000.0);
+
+ if (base_VBV_size > (Int)(k*16384 + 4*(float)profile_level_max_bitrate[index]*1001.0 / 30000.0))
+ return PV_FALSE;
+ }
+
+ /* Search the appropriate profile@level index */
+ if (!video->encParams->H263_Enabled &&
+ (video->encParams->IntraDCVlcThr != 0 || video->encParams->SearchRange > 16))
+ {
+ lowest_level = 1; /* cannot allow SPL0 */
+ }
+ else
+ {
+ lowest_level = 0; /* SPL0 */
+ }
+
+ for (i = lowest_level; i <= index; i++)
+ {
+ if (i != 4 && /* skip Core Profile@Level1 because the parameters in it are smaller than those in Simple Profile@Level3 */
+ base_bitrate <= profile_level_max_bitrate[i] &&
+ base_packet_size <= profile_level_max_packet_size[i] &&
+ base_MBsPerSec <= profile_level_max_mbsPerSec[i] &&
+ base_VBV_size <= (video->encParams->H263_Enabled ? (Int)(k*16384 + 4*(float)profile_level_max_bitrate[i]*1001.0 / 30000.0) :
+ profile_level_max_VBV_size[i]))
+ break;
+ }
+ if (i > index) return PV_FALSE; /* Nothing found!! */
+
+ /* Found out the actual profile @ level : index "i" */
+ if (i == 0)
+ {
+ /* For Simple Profile @ Level 0, we need to do one more check: image size <= QCIF */
+ if (width16 > QCIF_WIDTH || height16 > QCIF_HEIGHT)
+ i = 1; /* image size > QCIF, then set SP level1 */
+ }
+
+ video->encParams->ProfileLevel[0] = profile_level_code[i];
+ video->encParams->BufferSize[0] = base_VBV_size;
+
+ if (video->encParams->LayerMaxBitRate[0] == 0)
+ video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[i];
+
+ if (video->encParams->LayerMaxFrameRate[0] == 0)
+ video->encParams->LayerMaxFrameRate[0] = PV_MIN(30, (float)profile_level_max_mbsPerSec[i] / nTotalMB);
+
+ /* For H263/Short header, one special constraint for VBV buffer size */
+ if (video->encParams->H263_Enabled)
+ video->encParams->BufferSize[0] = (Int)(k * 16384 + 4 * (float)profile_level_max_bitrate[i] * 1001.0 / 30000.0);
+
+ }
+ else
+ {
+ /* SCALABALE MODE: Simple Scalable Profile(SSP) Or Core Scalable Profile(CSP) */
+
+ if (total_bitrate > scalable_profile_level_max_bitrate[index] ||
+ total_packet_size > scalable_profile_level_max_packet_size[index] ||
+ total_MBsPerSec > scalable_profile_level_max_mbsPerSec[index] ||
+ total_VBV_size > scalable_profile_level_max_VBV_size[index])
+
+ return PV_FALSE; /* Beyond given profile and level */
+
+ /* One-time check: Simple Scalable Profile or Core Scalable Profile */
+ if (total_bitrate <= scalable_profile_level_max_bitrate[2] &&
+ total_packet_size <= scalable_profile_level_max_packet_size[2] &&
+ total_MBsPerSec <= scalable_profile_level_max_mbsPerSec[2] &&
+ total_VBV_size <= scalable_profile_level_max_VBV_size[2])
+
+ {
+ start = 0;
+ end = index;
+ }
+
+ else
+ {
+ start = 4;
+ end = index;
+ }
+
+
+ /* Search the scalable profile */
+ for (i = start; i <= end; i++)
+ {
+ if (total_bitrate <= scalable_profile_level_max_bitrate[i] &&
+ total_packet_size <= scalable_profile_level_max_packet_size[i] &&
+ total_MBsPerSec <= scalable_profile_level_max_mbsPerSec[i] &&
+ total_VBV_size <= scalable_profile_level_max_VBV_size[i])
+
+ break;
+ }
+ if (i > end) return PV_FALSE;
+
+ /* Search the base profile */
+ if (i == 0)
+ {
+ j = 0;
+ bFound = 1;
+ }
+ else bFound = 0;
+
+ for (j = start; !bFound && j <= i; j++)
+ {
+ if (base_bitrate <= profile_level_max_bitrate[j] &&
+ base_packet_size <= profile_level_max_packet_size[j] &&
+ base_MBsPerSec <= profile_level_max_mbsPerSec[j] &&
+ base_VBV_size <= profile_level_max_VBV_size[j])
+
+ {
+ bFound = 1;
+ break;
+ }
+ }
+
+ if (!bFound) // && start == 4)
+ return PV_FALSE; /* mis-match in the profiles between base layer and enhancement layer */
+
+ /* j for base layer, i for enhancement layer */
+ video->encParams->ProfileLevel[0] = profile_level_code[j];
+ video->encParams->ProfileLevel[1] = scalable_profile_level_code[i];
+ video->encParams->BufferSize[0] = base_VBV_size;
+ video->encParams->BufferSize[1] = enhance_VBV_size;
+
+ if (video->encParams->LayerMaxBitRate[0] == 0)
+ video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[j];
+
+ if (video->encParams->LayerMaxBitRate[1] == 0)
+ video->encParams->LayerMaxBitRate[1] = scalable_profile_level_max_bitrate[i];
+
+ if (video->encParams->LayerMaxFrameRate[0] == 0)
+ video->encParams->LayerMaxFrameRate[0] = PV_MIN(30, (float)profile_level_max_mbsPerSec[j] / nTotalMB);
+
+ if (video->encParams->LayerMaxFrameRate[1] == 0)
+ video->encParams->LayerMaxFrameRate[1] = PV_MIN(30, (float)scalable_profile_level_max_mbsPerSec[i] / nTotalMB);
+
+
+ } /* end of: if(nLayers == 1) */
+
+
+ if (!video->encParams->H263_Enabled && (video->encParams->ProfileLevel[0] == 0x08)) /* SPL0 restriction*/
+ {
+ /* PV only allow frame-based rate control, no QP change from one MB to another
+ if(video->encParams->ACDCPrediction == TRUE && MB-based rate control)
+ return PV_FALSE */
+ }
+
+ return PV_TRUE;
+}
+
+#endif /* #ifndef ORIGINAL_VERSION */
+
+
+
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_lib.h b/media/codecs/m4v_h263/enc/src/mp4enc_lib.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_lib.h
rename to media/codecs/m4v_h263/enc/src/mp4enc_lib.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4lib_int.h b/media/codecs/m4v_h263/enc/src/mp4lib_int.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/mp4lib_int.h
rename to media/codecs/m4v_h263/enc/src/mp4lib_int.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/rate_control.cpp b/media/codecs/m4v_h263/enc/src/rate_control.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/rate_control.cpp
rename to media/codecs/m4v_h263/enc/src/rate_control.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/rate_control.h b/media/codecs/m4v_h263/enc/src/rate_control.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/rate_control.h
rename to media/codecs/m4v_h263/enc/src/rate_control.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/sad.cpp b/media/codecs/m4v_h263/enc/src/sad.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/sad.cpp
rename to media/codecs/m4v_h263/enc/src/sad.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/sad_halfpel.cpp b/media/codecs/m4v_h263/enc/src/sad_halfpel.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/sad_halfpel.cpp
rename to media/codecs/m4v_h263/enc/src/sad_halfpel.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/sad_halfpel_inline.h b/media/codecs/m4v_h263/enc/src/sad_halfpel_inline.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/sad_halfpel_inline.h
rename to media/codecs/m4v_h263/enc/src/sad_halfpel_inline.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/sad_inline.h b/media/codecs/m4v_h263/enc/src/sad_inline.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/sad_inline.h
rename to media/codecs/m4v_h263/enc/src/sad_inline.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/sad_mb_offset.h b/media/codecs/m4v_h263/enc/src/sad_mb_offset.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/sad_mb_offset.h
rename to media/codecs/m4v_h263/enc/src/sad_mb_offset.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vlc_enc_tab.h b/media/codecs/m4v_h263/enc/src/vlc_enc_tab.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/vlc_enc_tab.h
rename to media/codecs/m4v_h263/enc/src/vlc_enc_tab.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.cpp b/media/codecs/m4v_h263/enc/src/vlc_encode.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.cpp
rename to media/codecs/m4v_h263/enc/src/vlc_encode.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.h b/media/codecs/m4v_h263/enc/src/vlc_encode.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.h
rename to media/codecs/m4v_h263/enc/src/vlc_encode.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode_inline.h b/media/codecs/m4v_h263/enc/src/vlc_encode_inline.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode_inline.h
rename to media/codecs/m4v_h263/enc/src/vlc_encode_inline.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vop.cpp b/media/codecs/m4v_h263/enc/src/vop.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/vop.cpp
rename to media/codecs/m4v_h263/enc/src/vop.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp b/media/codecs/m4v_h263/enc/test/Android.bp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/Android.bp
rename to media/codecs/m4v_h263/enc/test/Android.bp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/AndroidTest.xml b/media/codecs/m4v_h263/enc/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/AndroidTest.xml
rename to media/codecs/m4v_h263/enc/test/AndroidTest.xml
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp b/media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp
rename to media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h b/media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h
rename to media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/README.md b/media/codecs/m4v_h263/enc/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/README.md
rename to media/codecs/m4v_h263/enc/test/README.md
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp b/media/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp
rename to media/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp
diff --git a/media/codecs/m4v_h263/fuzzer/Android.bp b/media/codecs/m4v_h263/fuzzer/Android.bp
new file mode 100644
index 0000000..778dafb
--- /dev/null
+++ b/media/codecs/m4v_h263/fuzzer/Android.bp
@@ -0,0 +1,111 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_defaults {
+ name: "mpeg4_h263_dec_fuzz_defaults",
+
+ host_supported: true,
+
+ srcs: [
+ "mpeg4_h263_dec_fuzzer.cpp",
+ ],
+
+ static_libs: [
+ "libstagefright_m4vh263dec",
+ "liblog",
+ ],
+
+ cflags: [
+ "-DOSCL_IMPORT_REF=",
+ ],
+
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
+
+cc_fuzz {
+ name: "mpeg4_dec_fuzzer",
+
+ defaults: [
+ "mpeg4_h263_dec_fuzz_defaults",
+ ],
+
+ cflags: [
+ "-DMPEG4",
+ ],
+}
+
+cc_fuzz {
+ name: "h263_dec_fuzzer",
+
+ defaults: [
+ "mpeg4_h263_dec_fuzz_defaults",
+ ],
+}
+
+cc_defaults {
+ name: "mpeg4_h263_enc_fuzz_defaults",
+
+ host_supported: true,
+
+ srcs: ["mpeg4_h263_enc_fuzzer.cpp"],
+
+ shared_libs: [
+ "libutils",
+ "liblog",
+ ],
+
+ static_libs: [
+ "libstagefright_m4vh263enc",
+ ],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
+
+cc_fuzz {
+ name: "mpeg4_enc_fuzzer",
+
+ defaults: [
+ "mpeg4_h263_enc_fuzz_defaults",
+ ],
+
+ cflags: ["-DMPEG4"],
+}
+
+cc_fuzz {
+ name: "h263_enc_fuzzer",
+
+ defaults: [
+ "mpeg4_h263_enc_fuzz_defaults",
+ ],
+}
diff --git a/media/codecs/m4v_h263/fuzzer/README.md b/media/codecs/m4v_h263/fuzzer/README.md
new file mode 100644
index 0000000..ad4ff97
--- /dev/null
+++ b/media/codecs/m4v_h263/fuzzer/README.md
@@ -0,0 +1,158 @@
+# Fuzzer for libstagefright_m4vh263dec decoder
+
+## Plugin Design Considerations
+The fuzzer plugin for MPEG4/H263 is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+Dict files (dictionary files) are created for MPEG4 and H263 to ensure that the required start
+bytes are present in every input file that goes to the fuzzer.
+This ensures that decoder does not reject any input file in the first check
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+ * If the decode operation was successful, the input is advanced by the number of bytes consumed
+ in the decode call.
+ * If the decode operation was un-successful, the input is advanced by 1 byte so that the fuzzer
+ can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge, malformed, etc)
+and doesnt `exit()` on any input and thereby increasing the chance of identifying vulnerabilities.
+
+##### Other considerations
+ * Two fuzzer binaries - mpeg4_dec_fuzzer and h263_dec_fuzzer are generated based on the presence
+ of a flag - 'MPEG4'
+ * The number of decode calls are kept to a maximum of 100 so that the fuzzer does not timeout.
+
+## Build
+
+This describes steps to build mpeg4_dec_fuzzer and h263_dec_fuzzer binary.
+
+### Android
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) mpeg4_dec_fuzzer
+ $ mm -j$(nproc) h263_dec_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some MPEG4 or H263 files to that folder
+Push this directory to device.
+
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/mpeg4_dec_fuzzer/mpeg4_dec_fuzzer CORPUS_DIR
+ $ adb shell /data/fuzz/arm64/h263_dec_fuzzer/h263_dec_fuzzer CORPUS_DIR
+```
+To run on host
+```
+ $ $ANDROID_HOST_OUT/fuzz/x86_64/mpeg4_dec_fuzzer/mpeg4_dec_fuzzer CORPUS_DIR
+ $ $ANDROID_HOST_OUT/fuzz/x86_64/h263_dec_fuzzer/h263_dec_fuzzer CORPUS_DIR
+```
+
+# Fuzzer for libstagefright_m4vh263enc encoder
+
+## Plugin Design Considerations
+The fuzzer plugin for MPEG4/H263 is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+MPEG4/H263 supports the following parameters:
+1. Frame Width (parameter name: `encWidth`)
+2. Frame Height (parameter name: `encHeight`)
+3. Rate control mode (parameter name: `rcType`)
+4. Number of bytes per packet (parameter name: `packetSize`)
+5. Qp for I-Vop(parameter name: `iQuant`)
+6. Qp for P-Vop (parameter name: `pQuant`)
+7. Enable RVLC mode (parameter name: `rvlcEnable`)
+8. Quantization mode (parameter name: `quantType`)
+9. Disable frame skipping (parameter name: `noFrameSkipped`)
+10. Enable scene change detection (parameter name: `sceneDetect`)
+11. Number of intra MBs in P-frame(parameter name: `numIntraMB`)
+12. Search range of ME (parameter name: `searchRange`)
+13. Enable 8x8 ME and MC (parameter name: `mv8x8Enable`)
+14. Enable AC prediction (parameter name: `useACPred`)
+15. Threshold for intra DC VLC (parameter name: `intraDCVlcTh`)
+16. Encoding Mode (parameter name: `encMode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `rcType` | 0. `CONSTANT_Q` 1. `CBR_1` 2. `VBR_1` 3. `CBR_2` 4. `VBR_2` 5. `CBR_LOWDELAY` | All the bits of 6th byte of data modulus 6 |
+| `packetSize` | In the range `0 to 255` | All the bits of 7th byte of data |
+| `iQuant` | In the range `1 to 31` | All the bits of 8th byte of data |
+| `pQuant` | In the range `1 to 31` | All the bits of 9th byte of data |
+| `rvlcEnable` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 10th byte of data |
+| `quantType` | 0. `0` 1. `1` | bit 0 of 11th byte of data |
+| `noFrameSkipped` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 12th byte of data |
+| `sceneDetect` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 13th byte of data |
+| `numIntraMB` | In the range `0 to 7` | bit 0, 1 and 2 of 14th byte of data |
+| `searchRange` | In the range `0 to 31` | bit 0, 1, 2, 3 and 4 of 15th byte of data |
+| `mv8x8Enable` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 16th byte of data |
+| `useACPred` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 17th byte of data |
+| `intraDCVlcTh` | In the range `0 to 7` | bit 0, 1 and 2 of 18th byte of data |
+
+Following parameters are only for mpeg4_enc_fuzzer
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `encWidth` | In the range `0 to 10239` | All the bits of 1st and 2nd byte of data |
+| `encHeight` | In the range `0 to 10239` | All the bits of 3rd and 4th byte of data |
+| `encMode` | 0. `H263_MODE` 1. `H263_MODE_WITH_ERR_RES` 2. `DATA_PARTITIONING_MODE` 3. `COMBINE_MODE_NO_ERR_RES` 4. `COMBINE_MODE_WITH_ERR_RES` | All the bits of 19th byte of data modulus 5 |
+
+Following parameters are only for h263_enc_fuzzer
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `encWidth` | 0. `128` 1. `176` 2. `352` 3. `704` 4. `1408` | All the bits of 1st byte of data modulus 5|
+| `encHeight` | 0. `96` 1. `144` 2. `288` 3. `576` 4. `1152 ` | All the bits of 3rd byte of data modulus 5|
+| `encMode` | 0. `SHORT_HEADER` 1. `SHORT_HEADER_WITH_ERR_RES` | All the bits of 19th byte of data modulus 2 |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+If the encode operation was successful, the input is advanced by the frame size.
+If the encode operation was un-successful, the input is still advanced by frame size so
+that the fuzzer can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build mpeg4_enc_fuzzer and h263_enc_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) mpeg4_enc_fuzzer
+ $ mm -j$(nproc) h263_enc_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some yuv files to that folder
+Push this directory to device.
+
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/m4v_h263_enc_fuzzer/m4v_h263_enc_fuzzer CORPUS_DIR
+ $ adb shell /data/fuzz/arm64/h263_enc_fuzzer/h263_enc_fuzzer CORPUS_DIR
+```
+To run on host
+```
+ $ $ANDROID_HOST_OUT/fuzz/x86_64/mpeg4_enc_fuzzer/mpeg4_enc_fuzzer CORPUS_DIR
+ $ $ANDROID_HOST_OUT/fuzz/x86_64/h263_enc_fuzzer/h263_enc_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict b/media/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict
rename to media/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict b/media/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict
rename to media/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
rename to media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
diff --git a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp
new file mode 100644
index 0000000..423325d
--- /dev/null
+++ b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp
@@ -0,0 +1,192 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <algorithm>
+#include "mp4enc_api.h"
+
+constexpr int8_t kIDRFrameRefreshIntervalInSec = 1;
+constexpr MP4RateControlType krcType[] = {CONSTANT_Q, CBR_1, VBR_1, CBR_2, VBR_2, CBR_LOWDELAY};
+#ifdef MPEG4
+constexpr MP4EncodingMode kEncodingMode[] = {SHORT_HEADER, SHORT_HEADER_WITH_ERR_RES,
+ DATA_PARTITIONING_MODE, COMBINE_MODE_NO_ERR_RES,
+ COMBINE_MODE_WITH_ERR_RES};
+constexpr size_t kMaxWidth = 10240;
+constexpr size_t kMaxHeight = 10240;
+#else
+constexpr MP4EncodingMode kEncodingMode[] = {H263_MODE, H263_MODE_WITH_ERR_RES};
+constexpr int kWidth[] = {128, 176, 352, 704, 1408};
+constexpr int kHeight[] = {96, 144, 288, 576, 1152};
+constexpr size_t kWidthNum = std::size(kWidth);
+constexpr size_t kHeightNum = std::size(kHeight);
+#endif
+
+constexpr size_t krcTypeNum = std::size(krcType);
+constexpr size_t kEncodingModeNum = std::size(kEncodingMode);
+constexpr size_t kMaxQP = 51;
+
+enum {
+ IDX_WD_BYTE_1,
+ IDX_WD_BYTE_2,
+ IDX_HT_BYTE_1,
+ IDX_HT_BYTE_2,
+ IDX_FRAME_RATE,
+ IDX_RC_TYPE,
+ IDX_PACKET_SIZE,
+ IDX_I_FRAME_QP,
+ IDX_P_FRAME_QP,
+ IDX_ENABLE_RVLC,
+ IDX_QUANT_TYPE,
+ IDX_NO_FRAME_SKIPPED_FLAG,
+ IDX_ENABLE_SCENE_DETECT,
+ IDX_NUM_INTRA_MB,
+ IDX_SEARCH_RANGE,
+ IDX_ENABLE_MV_8x8,
+ IDX_USE_AC_PRED,
+ IDX_INTRA_DC_VLC_THRESHOLD,
+ IDX_ENC_MODE,
+ IDX_LAST
+};
+
+class Codec {
+ public:
+ Codec() = default;
+ ~Codec() { deInitEncoder(); }
+ bool initEncoder(const uint8_t *data);
+ void encodeFrames(const uint8_t *data, size_t size);
+ void deInitEncoder();
+
+ private:
+ int32_t mFrameWidth = 352;
+ int32_t mFrameHeight = 288;
+ float mFrameRate = 25.0f;
+ VideoEncOptions *mEncodeHandle = nullptr;
+ VideoEncControls *mEncodeControl = nullptr;
+};
+
+bool Codec::initEncoder(const uint8_t *data) {
+ mEncodeHandle = new VideoEncOptions;
+ if (!mEncodeHandle) {
+ return false;
+ }
+ memset(mEncodeHandle, 0, sizeof(VideoEncOptions));
+ mEncodeControl = new VideoEncControls;
+ if (!mEncodeControl) {
+ return false;
+ }
+ memset(mEncodeControl, 0, sizeof(VideoEncControls));
+ PVGetDefaultEncOption(mEncodeHandle, 0);
+
+#ifdef MPEG4
+ mFrameWidth = ((data[IDX_WD_BYTE_1] << 8) | data[IDX_WD_BYTE_2]) % kMaxWidth;
+ mFrameHeight = ((data[IDX_HT_BYTE_1] << 8) | data[IDX_HT_BYTE_2]) % kMaxHeight;
+#else
+ mFrameWidth = kWidth[data[IDX_WD_BYTE_1] % kWidthNum];
+ mFrameHeight = kHeight[data[IDX_HT_BYTE_1] % kHeightNum];
+#endif
+ mFrameRate = data[IDX_FRAME_RATE];
+ mEncodeHandle->rcType = krcType[data[IDX_RC_TYPE] % krcTypeNum];
+ mEncodeHandle->profile_level = CORE_PROFILE_LEVEL2;
+ mEncodeHandle->packetSize = data[IDX_PACKET_SIZE];
+ mEncodeHandle->iQuant[0] = (data[IDX_I_FRAME_QP] % kMaxQP) + 1;
+ mEncodeHandle->pQuant[0] = (data[IDX_P_FRAME_QP] % kMaxQP) + 1;
+ mEncodeHandle->rvlcEnable = (data[IDX_ENABLE_RVLC] & 0x01) ? PV_OFF : PV_ON;
+ mEncodeHandle->quantType[0] = (data[IDX_QUANT_TYPE] & 0x01) ? 0 : 1;
+ mEncodeHandle->noFrameSkipped = (data[IDX_NO_FRAME_SKIPPED_FLAG] & 0x01) ? PV_OFF : PV_ON;
+ mEncodeHandle->sceneDetect = (data[IDX_ENABLE_SCENE_DETECT] & 0x01) ? PV_OFF : PV_ON;
+ mEncodeHandle->numIntraMB = data[IDX_NUM_INTRA_MB] & 0x07;
+ mEncodeHandle->searchRange = data[IDX_SEARCH_RANGE] & 0x1F;
+ mEncodeHandle->mv8x8Enable = (data[IDX_ENABLE_MV_8x8] & 0x01) ? PV_OFF : PV_ON;
+ mEncodeHandle->useACPred = (data[IDX_USE_AC_PRED] & 0x01) ? PV_OFF : PV_ON;
+ mEncodeHandle->intraDCVlcTh = data[IDX_INTRA_DC_VLC_THRESHOLD] & 0x07;
+ mEncodeHandle->encMode = kEncodingMode[data[IDX_ENC_MODE] % kEncodingModeNum];
+ mEncodeHandle->encWidth[0] = mFrameWidth;
+ mEncodeHandle->encHeight[0] = mFrameHeight;
+ mEncodeHandle->encFrameRate[0] = mFrameRate;
+ mEncodeHandle->tickPerSrc = mEncodeHandle->timeIncRes / mFrameRate;
+ mEncodeHandle->intraPeriod = (kIDRFrameRefreshIntervalInSec * mFrameRate);
+ if (!PVInitVideoEncoder(mEncodeControl, mEncodeHandle)) {
+ return false;
+ }
+ return true;
+}
+
+void Codec::deInitEncoder() {
+ if (mEncodeControl) {
+ PVCleanUpVideoEncoder(mEncodeControl);
+ delete mEncodeControl;
+ mEncodeControl = nullptr;
+ }
+ if (mEncodeHandle) {
+ delete mEncodeHandle;
+ mEncodeHandle = nullptr;
+ }
+}
+
+void Codec::encodeFrames(const uint8_t *data, size_t size) {
+ size_t inputBufferSize = (mFrameWidth * mFrameHeight * 3) / 2;
+ size_t outputBufferSize = inputBufferSize * 2;
+ uint8_t *outputBuffer = new uint8_t[outputBufferSize];
+ uint8_t *inputBuffer = new uint8_t[inputBufferSize];
+
+ // Get VOL header.
+ int32_t sizeOutputBuffer = outputBufferSize;
+ PVGetVolHeader(mEncodeControl, outputBuffer, &sizeOutputBuffer, 0);
+
+ size_t numFrame = 0;
+ while (size > 0) {
+ size_t bytesConsumed = std::min(size, inputBufferSize);
+ memcpy(inputBuffer, data, bytesConsumed);
+ if (bytesConsumed < inputBufferSize) {
+ memset(inputBuffer + bytesConsumed, data[0], inputBufferSize - bytesConsumed);
+ }
+ VideoEncFrameIO videoIn{}, videoOut{};
+ videoIn.height = mFrameHeight;
+ videoIn.pitch = mFrameWidth;
+ videoIn.timestamp = (numFrame * 1000) / mFrameRate;
+ videoIn.yChan = inputBuffer;
+ videoIn.uChan = videoIn.yChan + videoIn.height * videoIn.pitch;
+ videoIn.vChan = videoIn.uChan + ((videoIn.height * videoIn.pitch) >> 2);
+ uint32_t modTimeMs = 0;
+ int32_t dataLength = outputBufferSize;
+ int32_t nLayer = 0;
+ PVEncodeVideoFrame(mEncodeControl, &videoIn, &videoOut, &modTimeMs, outputBuffer,
+ &dataLength, &nLayer);
+ MP4HintTrack hintTrack;
+ PVGetHintTrack(mEncodeControl, &hintTrack);
+ PVGetOverrunBuffer(mEncodeControl);
+ ++numFrame;
+ data += bytesConsumed;
+ size -= bytesConsumed;
+ }
+ delete[] inputBuffer;
+ delete[] outputBuffer;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ if (size < IDX_LAST) {
+ return 0;
+ }
+ Codec *codec = new Codec();
+ if (!codec) {
+ return 0;
+ }
+ if (codec->initEncoder(data)) {
+ data += IDX_LAST;
+ size -= IDX_LAST;
+ codec->encodeFrames(data, size);
+ }
+ delete codec;
+ return 0;
+}
diff --git a/media/libstagefright/codecs/m4v_h263/patent_disclaimer.txt b/media/codecs/m4v_h263/patent_disclaimer.txt
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/patent_disclaimer.txt
rename to media/codecs/m4v_h263/patent_disclaimer.txt
diff --git a/media/codecs/mp3dec/Android.bp b/media/codecs/mp3dec/Android.bp
new file mode 100644
index 0000000..f84da21
--- /dev/null
+++ b/media/codecs/mp3dec/Android.bp
@@ -0,0 +1,128 @@
+cc_library_headers {
+ name: "libstagefright_mp3dec_headers",
+ vendor_available: true,
+ min_sdk_version: "29",
+ host_supported:true,
+ export_include_dirs: [
+ "include",
+ "src",
+ ],
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media.swcodec",
+ ],
+}
+
+cc_library_static {
+ name: "libstagefright_mp3dec",
+ vendor_available: true,
+ min_sdk_version: "29",
+
+ host_supported:true,
+ srcs: [
+ "src/pvmp3_normalize.cpp",
+ "src/pvmp3_alias_reduction.cpp",
+ "src/pvmp3_crc.cpp",
+ "src/pvmp3_decode_header.cpp",
+ "src/pvmp3_decode_huff_cw.cpp",
+ "src/pvmp3_getbits.cpp",
+ "src/pvmp3_dequantize_sample.cpp",
+ "src/pvmp3_framedecoder.cpp",
+ "src/pvmp3_get_main_data_size.cpp",
+ "src/pvmp3_get_side_info.cpp",
+ "src/pvmp3_get_scale_factors.cpp",
+ "src/pvmp3_mpeg2_get_scale_data.cpp",
+ "src/pvmp3_mpeg2_get_scale_factors.cpp",
+ "src/pvmp3_mpeg2_stereo_proc.cpp",
+ "src/pvmp3_huffman_decoding.cpp",
+ "src/pvmp3_huffman_parsing.cpp",
+ "src/pvmp3_tables.cpp",
+ "src/pvmp3_imdct_synth.cpp",
+ "src/pvmp3_mdct_6.cpp",
+ "src/pvmp3_dct_6.cpp",
+ "src/pvmp3_poly_phase_synthesis.cpp",
+ "src/pvmp3_equalizer.cpp",
+ "src/pvmp3_seek_synch.cpp",
+ "src/pvmp3_stereo_proc.cpp",
+ "src/pvmp3_reorder.cpp",
+
+ "src/pvmp3_polyphase_filter_window.cpp",
+ "src/pvmp3_mdct_18.cpp",
+ "src/pvmp3_dct_9.cpp",
+ "src/pvmp3_dct_16.cpp",
+ ],
+
+ arch: {
+ arm: {
+ exclude_srcs: [
+ "src/pvmp3_polyphase_filter_window.cpp",
+ "src/pvmp3_mdct_18.cpp",
+ "src/pvmp3_dct_9.cpp",
+ "src/pvmp3_dct_16.cpp",
+ ],
+ srcs: [
+ "src/asm/pvmp3_polyphase_filter_window_gcc.s",
+ "src/asm/pvmp3_mdct_18_gcc.s",
+ "src/asm/pvmp3_dct_9_gcc.s",
+ "src/asm/pvmp3_dct_16_gcc.s",
+ ],
+
+ instruction_set: "arm",
+ },
+ },
+
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+
+ include_dirs: ["frameworks/av/media/libstagefright/include"],
+
+ header_libs: ["libstagefright_mp3dec_headers"],
+ export_header_lib_headers: ["libstagefright_mp3dec_headers"],
+
+ cflags: [
+ "-DOSCL_UNUSED_ARG(x)=(void)(x)",
+ "-Werror",
+ ],
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
+//###############################################################################
+cc_test {
+ name: "libstagefright_mp3dec_test",
+ gtest: false,
+
+ srcs: [
+ "test/mp3dec_test.cpp",
+ "test/mp3reader.cpp",
+ ],
+
+ cflags: ["-Wall", "-Werror"],
+
+ local_include_dirs: [
+ "src",
+ "include",
+ ],
+
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+
+ static_libs: [
+ "libstagefright_mp3dec",
+ "libsndfile",
+ ],
+
+ shared_libs: ["libaudioutils"],
+}
diff --git a/media/libstagefright/codecs/mp3dec/MODULE_LICENSE_APACHE2 b/media/codecs/mp3dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/MODULE_LICENSE_APACHE2
rename to media/codecs/mp3dec/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/mp3dec/NOTICE b/media/codecs/mp3dec/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/NOTICE
rename to media/codecs/mp3dec/NOTICE
diff --git a/media/codecs/mp3dec/TEST_MAPPING b/media/codecs/mp3dec/TEST_MAPPING
new file mode 100644
index 0000000..4ef4317
--- /dev/null
+++ b/media/codecs/mp3dec/TEST_MAPPING
@@ -0,0 +1,9 @@
+// mappings for frameworks/av/media/libstagefright/codecs/mp3dec
+{
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
+ { "name": "Mp3DecoderTest"}
+ ]
+}
diff --git a/media/codecs/mp3dec/fuzzer/Android.bp b/media/codecs/mp3dec/fuzzer/Android.bp
new file mode 100644
index 0000000..79fa1e9
--- /dev/null
+++ b/media/codecs/mp3dec/fuzzer/Android.bp
@@ -0,0 +1,39 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+ name: "mp3_dec_fuzzer",
+ host_supported: true,
+
+ static_libs: [
+ "libstagefright_mp3dec",
+ ],
+
+ srcs: [
+ "mp3_dec_fuzzer.cpp",
+ ],
+
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/media/libstagefright/codecs/mp3dec/fuzzer/README.md b/media/codecs/mp3dec/fuzzer/README.md
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/fuzzer/README.md
rename to media/codecs/mp3dec/fuzzer/README.md
diff --git a/media/libstagefright/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp b/media/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp
rename to media/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp
diff --git a/media/libstagefright/codecs/mp3dec/include/mp3_decoder_selection.h b/media/codecs/mp3dec/include/mp3_decoder_selection.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/include/mp3_decoder_selection.h
rename to media/codecs/mp3dec/include/mp3_decoder_selection.h
diff --git a/media/libstagefright/codecs/mp3dec/include/pvmp3_audio_type_defs.h b/media/codecs/mp3dec/include/pvmp3_audio_type_defs.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/include/pvmp3_audio_type_defs.h
rename to media/codecs/mp3dec/include/pvmp3_audio_type_defs.h
diff --git a/media/libstagefright/codecs/mp3dec/include/pvmp3decoder_api.h b/media/codecs/mp3dec/include/pvmp3decoder_api.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/include/pvmp3decoder_api.h
rename to media/codecs/mp3dec/include/pvmp3decoder_api.h
diff --git a/media/libstagefright/codecs/mp3dec/patent_disclaimer.txt b/media/codecs/mp3dec/patent_disclaimer.txt
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/patent_disclaimer.txt
rename to media/codecs/mp3dec/patent_disclaimer.txt
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s b/media/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s
rename to media/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s b/media/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s
rename to media/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s b/media/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s
rename to media/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s b/media/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
rename to media/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
diff --git a/media/libstagefright/codecs/mp3dec/src/mp3_mem_funcs.h b/media/codecs/mp3dec/src/mp3_mem_funcs.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/mp3_mem_funcs.h
rename to media/codecs/mp3dec/src/mp3_mem_funcs.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3_huffman.h b/media/codecs/mp3dec/src/pv_mp3_huffman.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3_huffman.h
rename to media/codecs/mp3dec/src/pv_mp3_huffman.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op.h b/media/codecs/mp3dec/src/pv_mp3dec_fxd_op.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op.h
rename to media/codecs/mp3dec/src/pv_mp3dec_fxd_op.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h b/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h
rename to media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h b/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h
rename to media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h b/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h
rename to media/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h b/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h
rename to media/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_alias_reduction.cpp b/media/codecs/mp3dec/src/pvmp3_alias_reduction.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_alias_reduction.cpp
rename to media/codecs/mp3dec/src/pvmp3_alias_reduction.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_alias_reduction.h b/media/codecs/mp3dec/src/pvmp3_alias_reduction.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_alias_reduction.h
rename to media/codecs/mp3dec/src/pvmp3_alias_reduction.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_crc.cpp b/media/codecs/mp3dec/src/pvmp3_crc.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_crc.cpp
rename to media/codecs/mp3dec/src/pvmp3_crc.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_crc.h b/media/codecs/mp3dec/src/pvmp3_crc.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_crc.h
rename to media/codecs/mp3dec/src/pvmp3_crc.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dct_16.cpp b/media/codecs/mp3dec/src/pvmp3_dct_16.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dct_16.cpp
rename to media/codecs/mp3dec/src/pvmp3_dct_16.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dct_16.h b/media/codecs/mp3dec/src/pvmp3_dct_16.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dct_16.h
rename to media/codecs/mp3dec/src/pvmp3_dct_16.h
diff --git a/media/codecs/mp3dec/src/pvmp3_dct_6.cpp b/media/codecs/mp3dec/src/pvmp3_dct_6.cpp
new file mode 100644
index 0000000..c306873
--- /dev/null
+++ b/media/codecs/mp3dec/src/pvmp3_dct_6.cpp
@@ -0,0 +1,153 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/*
+------------------------------------------------------------------------------
+
+ PacketVideo Corp.
+ MP3 Decoder Library
+
+ Filename: pvmp3_dct6.cpp
+
+ Date: 09/21/2007
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+
+ Description:
+
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+Input
+ Int32 vec[] vector of 6 32-bit integers
+Returns
+ Int32 vec[] dct computation in-place
+
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ Returns the dct of length 6 of the input vector
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+------------------------------------------------------------------------------
+*/
+
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+
+#include "pvmp3_audio_type_defs.h"
+#include "pv_mp3dec_fxd_op.h"
+#include "pvmp3_mdct_6.h"
+
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+#define Qfmt30(a) (Int32)((a)*((Int32)1<<30) + ((a)>=0?0.5F:-0.5F))
+
+#define cos_pi_6 Qfmt30( 0.86602540378444f)
+#define cos_2_pi_6 Qfmt30( 0.5f)
+#define cos_7_pi_12 Qfmt30( -0.25881904510252f)
+#define cos_3_pi_12 Qfmt30( 0.70710678118655f)
+#define cos_11_pi_12 Qfmt30( -0.96592582628907f)
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL STORE/BUFFER/POINTER DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+__attribute__((no_sanitize("integer")))
+void pvmp3_dct_6(int32 vec[])
+{
+
+ Int32 tmp0;
+ Int32 tmp1;
+ Int32 tmp2;
+ Int32 tmp3;
+ Int32 tmp4;
+ Int32 tmp5;
+
+
+ /* split input vector */
+
+ tmp0 = vec[5] + vec[0];
+ tmp5 = vec[5] - vec[0];
+ tmp1 = vec[4] + vec[1];
+ tmp4 = vec[4] - vec[1];
+ tmp2 = vec[3] + vec[2];
+ tmp3 = vec[3] - vec[2];
+
+ vec[0] = tmp0 + tmp2 ;
+ vec[2] = fxp_mul32_Q30(tmp0 - tmp2, cos_pi_6);
+ vec[4] = (vec[0] >> 1) - tmp1;
+ vec[0] += tmp1;
+
+ tmp0 = fxp_mul32_Q30(tmp3, cos_7_pi_12);
+ tmp0 = fxp_mac32_Q30(tmp4, -cos_3_pi_12, tmp0);
+ vec[1] = fxp_mac32_Q30(tmp5, cos_11_pi_12, tmp0);
+
+ vec[3] = fxp_mul32_Q30((tmp3 + tmp4 - tmp5), cos_3_pi_12);
+ tmp0 = fxp_mul32_Q30(tmp3, cos_11_pi_12);
+ tmp0 = fxp_mac32_Q30(tmp4, cos_3_pi_12, tmp0);
+ vec[5] = fxp_mac32_Q30(tmp5, cos_7_pi_12, tmp0);
+
+}
+
+
+
+
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dct_9.cpp b/media/codecs/mp3dec/src/pvmp3_dct_9.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dct_9.cpp
rename to media/codecs/mp3dec/src/pvmp3_dct_9.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dec_defs.h b/media/codecs/mp3dec/src/pvmp3_dec_defs.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dec_defs.h
rename to media/codecs/mp3dec/src/pvmp3_dec_defs.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_decode_header.cpp b/media/codecs/mp3dec/src/pvmp3_decode_header.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_decode_header.cpp
rename to media/codecs/mp3dec/src/pvmp3_decode_header.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_decode_header.h b/media/codecs/mp3dec/src/pvmp3_decode_header.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_decode_header.h
rename to media/codecs/mp3dec/src/pvmp3_decode_header.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp b/media/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp
rename to media/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_decode_huff_cw.h b/media/codecs/mp3dec/src/pvmp3_decode_huff_cw.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_decode_huff_cw.h
rename to media/codecs/mp3dec/src/pvmp3_decode_huff_cw.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp b/media/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp
rename to media/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dequantize_sample.h b/media/codecs/mp3dec/src/pvmp3_dequantize_sample.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dequantize_sample.h
rename to media/codecs/mp3dec/src/pvmp3_dequantize_sample.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_equalizer.cpp b/media/codecs/mp3dec/src/pvmp3_equalizer.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_equalizer.cpp
rename to media/codecs/mp3dec/src/pvmp3_equalizer.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_equalizer.h b/media/codecs/mp3dec/src/pvmp3_equalizer.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_equalizer.h
rename to media/codecs/mp3dec/src/pvmp3_equalizer.h
diff --git a/media/codecs/mp3dec/src/pvmp3_framedecoder.cpp b/media/codecs/mp3dec/src/pvmp3_framedecoder.cpp
new file mode 100644
index 0000000..15d2feb
--- /dev/null
+++ b/media/codecs/mp3dec/src/pvmp3_framedecoder.cpp
@@ -0,0 +1,835 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/*
+------------------------------------------------------------------------------
+
+ PacketVideo Corp.
+ MP3 Decoder Library
+
+ Filename: pvmp3_framedecoder.cpp
+
+ Functions:
+ pvmp3_framedecoder
+ pvmp3_InitDecoder
+ pvmp3_resetDecoder
+
+ Date: 09/21/2007
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+
+ Description:
+
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+Input
+ pExt = pointer to the external interface structure. See the file
+ pvmp3decoder_api.h for a description of each field.
+ Data type of pointer to a tPVMP3DecoderExternal
+ structure.
+
+ pMem = void pointer to hide the internal implementation of the library
+ It is cast back to a tmp3dec_file structure. This structure
+ contains information that needs to persist between calls to
+ this function, or is too big to be placed on the stack, even
+ though the data is only needed during execution of this function
+ Data type void pointer, internally pointer to a tmp3dec_file
+ structure.
+
+
+ Outputs:
+ status = ERROR condition. see structure ERROR_CODE
+
+ Pointers and Buffers Modified:
+ pMem contents are modified.
+ pExt: (more detail in the file pvmp3decoder_api.h)
+ inputBufferUsedLength - number of array elements used up by the stream.
+ samplingRate - sampling rate in samples per sec
+ bitRate - bit rate in bits per second, varies frame to frame.
+
+
+
+------------------------------------------------------------------------------
+ FUNCTIONS DESCRIPTION
+
+ pvmp3_framedecoder
+ frame decoder library driver
+ pvmp3_InitDecoder
+ Decoder Initialization
+ pvmp3_resetDecoder
+ Reset Decoder
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ [1] ISO MPEG Audio Subgroup Software Simulation Group (1996)
+ ISO 13818-3 MPEG-2 Audio Decoder - Lower Sampling Frequency Extension
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+------------------------------------------------------------------------------
+*/
+
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+
+
+#include "pvmp3_framedecoder.h"
+#include "pvmp3_dec_defs.h"
+#include "pvmp3_poly_phase_synthesis.h"
+#include "pvmp3_tables.h"
+#include "pvmp3_imdct_synth.h"
+#include "pvmp3_alias_reduction.h"
+#include "pvmp3_reorder.h"
+#include "pvmp3_dequantize_sample.h"
+#include "pvmp3_stereo_proc.h"
+#include "pvmp3_mpeg2_stereo_proc.h"
+#include "pvmp3_get_side_info.h"
+#include "pvmp3_get_scale_factors.h"
+#include "pvmp3_mpeg2_get_scale_factors.h"
+#include "pvmp3_decode_header.h"
+#include "pvmp3_get_main_data_size.h"
+#include "s_tmp3dec_file.h"
+#include "pvmp3_getbits.h"
+#include "mp3_mem_funcs.h"
+
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL STORE/BUFFER/POINTER DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+ERROR_CODE pvmp3_framedecoder(tPVMP3DecoderExternal *pExt,
+ void *pMem)
+{
+
+ ERROR_CODE errorCode = NO_DECODING_ERROR;
+
+ int32 crc_error_count = 0;
+ uint32 sent_crc = 0;
+ uint32 computed_crc = 0;
+
+ tmp3dec_chan *pChVars[CHAN];
+ tmp3dec_file *pVars = (tmp3dec_file *)pMem;
+
+ mp3Header info_data;
+ mp3Header *info = &info_data;
+
+ pVars->inputStream.pBuffer = pExt->pInputBuffer;
+
+
+ pVars->inputStream.usedBits = pExt->inputBufferUsedLength << 3;
+ pVars->inputStream.inputBufferCurrentLength = pExt->inputBufferCurrentLength;
+
+
+ errorCode = pvmp3_decode_header(&pVars->inputStream,
+ info,
+ &computed_crc);
+
+ if (errorCode != NO_DECODING_ERROR)
+ {
+ pExt->outputFrameSize = 0;
+ return errorCode;
+ }
+
+ pVars->num_channels = (info->mode == MPG_MD_MONO) ? 1 : 2;
+ pExt->num_channels = pVars->num_channels;
+
+ int32 outputFrameSize = (info->version_x == MPEG_1) ?
+ 2 * SUBBANDS_NUMBER * FILTERBANK_BANDS :
+ SUBBANDS_NUMBER * FILTERBANK_BANDS;
+
+ outputFrameSize = (info->mode == MPG_MD_MONO) ? outputFrameSize : outputFrameSize << 1;
+
+
+ /*
+ * Check if output buffer has enough room to hold output PCM
+ */
+ if (pExt->outputFrameSize >= outputFrameSize)
+ {
+ pExt->outputFrameSize = outputFrameSize;
+ }
+ else
+ {
+ pExt->outputFrameSize = 0;
+ return OUTPUT_BUFFER_TOO_SMALL;
+ }
+
+
+ pChVars[ LEFT] = &pVars->perChan[ LEFT];
+ pChVars[RIGHT] = &pVars->perChan[RIGHT];
+
+
+
+
+ if (info->error_protection)
+ {
+ if (!bitsAvailable(&pVars->inputStream, 16))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ /*
+ * Get crc content
+ */
+ sent_crc = getUpTo17bits(&pVars->inputStream, 16);
+ }
+
+
+ if (info->layer_description == 3)
+ {
+ int32 gr;
+ int32 ch;
+ uint32 main_data_end;
+ int32 bytes_to_discard;
+ int16 *ptrOutBuffer = pExt->pOutputBuffer;
+
+ /*
+ * Side Information must be extracted from the bitstream and store for use
+ * during the decoded of the associated frame
+ */
+
+ errorCode = pvmp3_get_side_info(&pVars->inputStream,
+ &pVars->sideInfo,
+ info,
+ &computed_crc);
+
+ if (errorCode != NO_DECODING_ERROR)
+ {
+ pExt->outputFrameSize = 0;
+ return errorCode;
+ }
+
+ /*
+ * If CRC was sent, check that matches the one got while parsing data
+ * disable crc if this is the desired mode
+ */
+ if (info->error_protection)
+ {
+ if ((computed_crc != sent_crc) && pExt->crcEnabled)
+ {
+ crc_error_count++;
+ }
+ }
+
+ /*
+ * main data (scalefactors, Huffman coded, etc,) are not necessarily located
+ * adjacent to the side-info. Beginning of main data is located using
+ * field "main_data_begin" of the current frame. The length does not include
+ * header and side info.
+ * "main_data_begin" points to the first bit of main data of a frame. It is a negative
+ * offset in bytes from the first byte of the sync word
+ * main_data_begin = 0 <===> main data start rigth after side info.
+ */
+
+ int32 temp = pvmp3_get_main_data_size(info, pVars);
+
+
+ /*
+ * Check if available data holds a full frame, if not flag an error
+ */
+
+ if ((uint32)pVars->predicted_frame_size > pVars->inputStream.inputBufferCurrentLength)
+ {
+ pExt->outputFrameSize = 0;
+ return NO_ENOUGH_MAIN_DATA_ERROR;
+ }
+
+ /*
+ * Fill in internal circular buffer
+ */
+ fillMainDataBuf(pVars, temp);
+
+
+ main_data_end = pVars->mainDataStream.usedBits >> 3; /* in bytes */
+ if ((main_data_end << 3) < pVars->mainDataStream.usedBits)
+ {
+ main_data_end++;
+ pVars->mainDataStream.usedBits = main_data_end << 3;
+ }
+
+
+ // force signed computation; buffer sizes and offsets are all going to be
+ // well within the constraints of 32-bit signed math.
+ bytes_to_discard = pVars->frame_start
+ - ((int32)pVars->sideInfo.main_data_begin)
+ - ((int32)main_data_end);
+
+
+ if (main_data_end > BUFSIZE) /* check overflow on the buffer */
+ {
+ pVars->frame_start -= BUFSIZE;
+
+ pVars->mainDataStream.usedBits -= (BUFSIZE << 3);
+ }
+
+ pVars->frame_start += temp;
+
+
+ if (bytes_to_discard < 0 || crc_error_count)
+ {
+ /*
+ * Not enough data to decode, then we should avoid reading this
+ * data ( getting/ignoring sido info and scale data)
+ * Main data could be located in the previous frame, so an unaccounted
+ * frame can cause incorrect processing
+ * Just run the polyphase filter to "clean" the history buffer
+ */
+ errorCode = NO_ENOUGH_MAIN_DATA_ERROR;
+
+ /*
+ * Clear the input to these filters
+ */
+
+ pv_memset((void*)pChVars[RIGHT]->work_buf_int32,
+ 0,
+ SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[RIGHT]->work_buf_int32[0]));
+
+ pv_memset((void*)pChVars[LEFT]->work_buf_int32,
+ 0,
+ SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[LEFT]->work_buf_int32[0]));
+
+ /* clear circular buffers, to avoid any glitch */
+ pv_memset((void*)&pChVars[ LEFT]->circ_buffer[576],
+ 0,
+ 480*sizeof(pChVars[ LEFT]->circ_buffer[0]));
+ pv_memset((void*)&pChVars[RIGHT]->circ_buffer[576],
+ 0,
+ 480*sizeof(pChVars[RIGHT]->circ_buffer[0]));
+
+ pChVars[ LEFT]->used_freq_lines = 575;
+ pChVars[RIGHT]->used_freq_lines = 575;
+
+ }
+ else
+ {
+ pVars->mainDataStream.usedBits += (bytes_to_discard << 3);
+ }
+
+ /*
+ * if (fr_ps->header->version_x == MPEG_1), use 2 granules, otherwise just 1
+ */
+ for (gr = 0; gr < (1 + !(info->version_x)); gr++)
+ {
+ if (errorCode != NO_ENOUGH_MAIN_DATA_ERROR)
+ {
+ for (ch = 0; ch < pVars->num_channels; ch++)
+ {
+ int32 part2_start = pVars->mainDataStream.usedBits;
+
+ if (info->version_x == MPEG_1)
+ {
+
+ pvmp3_get_scale_factors(&pVars->scaleFactors[ch],
+ &pVars->sideInfo,
+ gr,
+ ch,
+ &pVars->mainDataStream);
+ }
+ else
+ {
+ int32 * tmp = pVars->Scratch_mem;
+ pvmp3_mpeg2_get_scale_factors(&pVars->scaleFactors[ch],
+ &pVars->sideInfo,
+ gr,
+ ch,
+ info,
+ (uint32 *)tmp,
+ &pVars->mainDataStream);
+ }
+
+ pChVars[ch]->used_freq_lines = pvmp3_huffman_parsing(pChVars[ch]->work_buf_int32,
+ &pVars->sideInfo.ch[ch].gran[gr],
+ pVars,
+ part2_start,
+ info);
+
+
+ pvmp3_dequantize_sample(pChVars[ch]->work_buf_int32,
+ &pVars->scaleFactors[ch],
+ &pVars->sideInfo.ch[ch].gran[gr],
+ pChVars[ch]->used_freq_lines,
+ info);
+
+
+
+
+ } /* for (ch=0; ch<stereo; ch++) */
+
+ if (pVars->num_channels == 2)
+ {
+
+ int32 used_freq_lines = (pChVars[ LEFT]->used_freq_lines >
+ pChVars[RIGHT]->used_freq_lines) ?
+ pChVars[ LEFT]->used_freq_lines :
+ pChVars[RIGHT]->used_freq_lines;
+
+ pChVars[ LEFT]->used_freq_lines = used_freq_lines;
+ pChVars[RIGHT]->used_freq_lines = used_freq_lines;
+
+ if (info->version_x == MPEG_1)
+ {
+ pvmp3_stereo_proc(pChVars[ LEFT]->work_buf_int32,
+ pChVars[RIGHT]->work_buf_int32,
+ &pVars->scaleFactors[RIGHT],
+ &pVars->sideInfo.ch[LEFT].gran[gr],
+ used_freq_lines,
+ info);
+ }
+ else
+ {
+ int32 * tmp = pVars->Scratch_mem;
+ pvmp3_mpeg2_stereo_proc(pChVars[ LEFT]->work_buf_int32,
+ pChVars[RIGHT]->work_buf_int32,
+ &pVars->scaleFactors[RIGHT],
+ &pVars->sideInfo.ch[ LEFT].gran[gr],
+ &pVars->sideInfo.ch[RIGHT].gran[gr],
+ (uint32 *)tmp,
+ used_freq_lines,
+ info);
+ }
+ }
+
+ } /* if ( errorCode != NO_ENOUGH_MAIN_DATA_ERROR) */
+
+ for (ch = 0; ch < pVars->num_channels; ch++)
+ {
+
+ pvmp3_reorder(pChVars[ch]->work_buf_int32,
+ &pVars->sideInfo.ch[ch].gran[gr],
+ &pChVars[ ch]->used_freq_lines,
+ info,
+ pVars->Scratch_mem);
+
+ pvmp3_alias_reduction(pChVars[ch]->work_buf_int32,
+ &pVars->sideInfo.ch[ch].gran[gr],
+ &pChVars[ ch]->used_freq_lines,
+ info);
+
+
+ /*
+ * IMDCT
+ */
+ /* set mxposition
+ * In case of mixed blocks, # of bands with long
+ * blocks (2 or 4) else 0
+ */
+ uint16 mixedBlocksLongBlocks = 0; /* 0 = long or short, 2=mixed, 4=mixed 2.5@8000 */
+ if (pVars->sideInfo.ch[ch].gran[gr].mixed_block_flag &&
+ pVars->sideInfo.ch[ch].gran[gr].window_switching_flag)
+ {
+ if ((info->version_x == MPEG_2_5) && (info->sampling_frequency == 2))
+ {
+ mixedBlocksLongBlocks = 4; /* mpeg2.5 @ 8 KHz */
+ }
+ else
+ {
+ mixedBlocksLongBlocks = 2;
+ }
+ }
+
+ pvmp3_imdct_synth(pChVars[ch]->work_buf_int32,
+ pChVars[ch]->overlap,
+ pVars->sideInfo.ch[ch].gran[gr].block_type,
+ mixedBlocksLongBlocks,
+ pChVars[ ch]->used_freq_lines,
+ pVars->Scratch_mem);
+
+
+ /*
+ * Polyphase synthesis
+ */
+
+ pvmp3_poly_phase_synthesis(pChVars[ch],
+ pVars->num_channels,
+ pExt->equalizerType,
+ &ptrOutBuffer[ch]);
+
+
+ }/* end ch loop */
+
+ ptrOutBuffer += pVars->num_channels * SUBBANDS_NUMBER * FILTERBANK_BANDS;
+ } /* for (gr=0;gr<Max_gr;gr++) */
+
+ /* skip ancillary data */
+ if (info->bitrate_index > 0)
+ { /* if not free-format */
+
+ int32 ancillary_data_lenght = pVars->predicted_frame_size << 3;
+
+ ancillary_data_lenght -= pVars->inputStream.usedBits;
+
+ /* skip ancillary data */
+ if (ancillary_data_lenght > 0)
+ {
+ pVars->inputStream.usedBits += ancillary_data_lenght;
+ }
+
+ }
+
+ /*
+ * This overrides a possible NO_ENOUGH_MAIN_DATA_ERROR
+ */
+ errorCode = NO_DECODING_ERROR;
+
+ }
+ else
+ {
+ /*
+ * The info on the header leads to an unsupported layer, more data
+ * will not fix this, so this is a bad frame,
+ */
+
+ pExt->outputFrameSize = 0;
+ return UNSUPPORTED_LAYER;
+ }
+
+ pExt->inputBufferUsedLength = pVars->inputStream.usedBits >> 3;
+ pExt->totalNumberOfBitsUsed += pVars->inputStream.usedBits;
+ pExt->version = info->version_x;
+ pExt->samplingRate = mp3_s_freq[info->version_x][info->sampling_frequency];
+ pExt->bitRate = mp3_bitrate[pExt->version][info->bitrate_index];
+
+
+ /*
+ * Always verify buffer overrun condition
+ */
+
+ if (pExt->inputBufferUsedLength > pExt->inputBufferCurrentLength)
+ {
+ pExt->outputFrameSize = 0;
+ errorCode = NO_ENOUGH_MAIN_DATA_ERROR;
+ }
+
+ return errorCode;
+
+}
+
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+__inline void fillDataBuf(tmp3Bits *pMainData,
+ uint32 val) /* val to write into the buffer */
+{
+ pMainData->pBuffer[module(pMainData->offset++, BUFSIZE)] = (uint8)val;
+}
+
+
+void fillMainDataBuf(void *pMem, int32 temp)
+{
+ tmp3dec_file *pVars = (tmp3dec_file *)pMem;
+
+
+ int32 offset = (pVars->inputStream.usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
+
+ /*
+ * Check if input circular buffer boundaries need to be enforced
+ */
+ if ((offset + temp) < BUFSIZE)
+ {
+ uint8 * ptr = pVars->inputStream.pBuffer + offset;
+
+ offset = pVars->mainDataStream.offset;
+
+ /*
+ * Check if main data circular buffer boundaries need to be enforced
+ */
+ if ((offset + temp) < BUFSIZE)
+ {
+ pv_memcpy((pVars->mainDataStream.pBuffer + offset), ptr, temp*sizeof(uint8));
+ pVars->mainDataStream.offset += temp;
+ }
+ else
+ {
+ int32 tmp1 = *(ptr++);
+ for (int32 nBytes = temp >> 1; nBytes != 0; nBytes--) /* read main data. */
+ {
+ int32 tmp2 = *(ptr++);
+ fillDataBuf(&pVars->mainDataStream, tmp1);
+ fillDataBuf(&pVars->mainDataStream, tmp2);
+ tmp1 = *(ptr++);
+ }
+
+ if (temp&1)
+ {
+ fillDataBuf(&pVars->mainDataStream, tmp1);
+ }
+
+ /* adjust circular buffer counter */
+ pVars->mainDataStream.offset = module(pVars->mainDataStream.offset, BUFSIZE);
+ }
+ }
+ else
+ {
+ for (int32 nBytes = temp >> 1; nBytes != 0; nBytes--) /* read main data. */
+ {
+ fillDataBuf(&pVars->mainDataStream, *(pVars->inputStream.pBuffer + module(offset++ , BUFSIZE)));
+ fillDataBuf(&pVars->mainDataStream, *(pVars->inputStream.pBuffer + module(offset++ , BUFSIZE)));
+ }
+ if (temp&1)
+ {
+ fillDataBuf(&pVars->mainDataStream, *(pVars->inputStream.pBuffer + module(offset , BUFSIZE)));
+ }
+ }
+
+
+ pVars->inputStream.usedBits += (temp) << INBUF_ARRAY_INDEX_SHIFT;
+}
+
+
+
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+uint32 pvmp3_decoderMemRequirements(void)
+{
+ uint32 size;
+
+ size = (uint32) sizeof(tmp3dec_file);
+ return (size);
+}
+
+
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+#include "pvmp3_decode_huff_cw.h"
+
+void pvmp3_InitDecoder(tPVMP3DecoderExternal *pExt,
+ void *pMem)
+{
+
+ tmp3dec_file *pVars;
+ huffcodetab *pHuff;
+
+ pVars = (tmp3dec_file *)pMem;
+ memset(pVars, 0, sizeof(*pVars));
+
+ pExt->totalNumberOfBitsUsed = 0;
+ pExt->inputBufferCurrentLength = 0;
+ pExt->inputBufferUsedLength = 0;
+
+ pVars->inputStream.pBuffer = pExt->pInputBuffer;
+
+ /*
+ * Initialize huffman decoding table
+ */
+
+ pHuff = pVars->ht;
+ pHuff[0].linbits = 0;
+ pHuff[0].pdec_huff_tab = pvmp3_decode_huff_cw_tab0;
+ pHuff[1].linbits = 0;
+ pHuff[1].pdec_huff_tab = pvmp3_decode_huff_cw_tab1;
+ pHuff[2].linbits = 0;
+ pHuff[2].pdec_huff_tab = pvmp3_decode_huff_cw_tab2;
+ pHuff[3].linbits = 0;
+ pHuff[3].pdec_huff_tab = pvmp3_decode_huff_cw_tab3;
+ pHuff[4].linbits = 0;
+ pHuff[4].pdec_huff_tab = pvmp3_decode_huff_cw_tab0; /* tbl 4 is not used */
+ pHuff[5].linbits = 4;
+ pHuff[5].pdec_huff_tab = pvmp3_decode_huff_cw_tab5;
+ pHuff[6].linbits = 0;
+ pHuff[6].pdec_huff_tab = pvmp3_decode_huff_cw_tab6;
+ pHuff[7].linbits = 0;
+ pHuff[7].pdec_huff_tab = pvmp3_decode_huff_cw_tab7;
+ pHuff[8].linbits = 0;
+ pHuff[8].pdec_huff_tab = pvmp3_decode_huff_cw_tab8;
+ pHuff[9].linbits = 0;
+ pHuff[9].pdec_huff_tab = pvmp3_decode_huff_cw_tab9;
+ pHuff[10].linbits = 0;
+ pHuff[10].pdec_huff_tab = pvmp3_decode_huff_cw_tab10;
+ pHuff[11].linbits = 0;
+ pHuff[11].pdec_huff_tab = pvmp3_decode_huff_cw_tab11;
+ pHuff[12].linbits = 0;
+ pHuff[12].pdec_huff_tab = pvmp3_decode_huff_cw_tab12;
+ pHuff[13].linbits = 0;
+ pHuff[13].pdec_huff_tab = pvmp3_decode_huff_cw_tab13;
+ pHuff[14].linbits = 0;
+ pHuff[14].pdec_huff_tab = pvmp3_decode_huff_cw_tab0; /* tbl 14 is not used */
+ pHuff[15].linbits = 0;
+ pHuff[15].pdec_huff_tab = pvmp3_decode_huff_cw_tab15;
+ pHuff[16].linbits = 1;
+ pHuff[16].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+ pHuff[17].linbits = 2;
+ pHuff[17].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+ pHuff[18].linbits = 3;
+ pHuff[18].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+ pHuff[19].linbits = 4;
+ pHuff[19].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+ pHuff[20].linbits = 6;
+ pHuff[20].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+ pHuff[21].linbits = 8;
+ pHuff[21].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+ pHuff[22].linbits = 10;
+ pHuff[22].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+ pHuff[23].linbits = 13;
+ pHuff[23].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+ pHuff[24].linbits = 4;
+ pHuff[24].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+ pHuff[25].linbits = 5;
+ pHuff[25].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+ pHuff[26].linbits = 6;
+ pHuff[26].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+ pHuff[27].linbits = 7;
+ pHuff[27].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+ pHuff[28].linbits = 8;
+ pHuff[28].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+ pHuff[29].linbits = 9;
+ pHuff[29].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+ pHuff[30].linbits = 11;
+ pHuff[30].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+ pHuff[31].linbits = 13;
+ pHuff[31].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+ pHuff[32].linbits = 0;
+ pHuff[32].pdec_huff_tab = pvmp3_decode_huff_cw_tab32;
+ pHuff[33].linbits = 0;
+ pHuff[33].pdec_huff_tab = pvmp3_decode_huff_cw_tab33;
+
+ /*
+ * Initialize polysynthesis circular buffer mechanism
+ */
+ /* clear buffers */
+
+ pvmp3_resetDecoder(pMem);
+
+}
+
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+
+void pvmp3_resetDecoder(void *pMem)
+{
+
+ tmp3dec_file *pVars;
+ tmp3dec_chan *pChVars[CHAN];
+
+ pVars = (tmp3dec_file *)pMem;
+ pChVars[ LEFT] = &pVars->perChan[ LEFT];
+ pChVars[RIGHT] = &pVars->perChan[RIGHT];
+
+ pVars->frame_start = 0;
+
+ pVars->mainDataStream.offset = 0;
+
+ pVars->mainDataStream.pBuffer = pVars->mainDataBuffer;
+ pVars->mainDataStream.usedBits = 0;
+
+
+ pVars->inputStream.usedBits = 0; // in bits
+
+
+ pChVars[ LEFT]->used_freq_lines = 575;
+ pChVars[RIGHT]->used_freq_lines = 575;
+
+
+ /*
+ * Initialize polysynthesis circular buffer mechanism
+ */
+
+ pv_memset((void*)&pChVars[ LEFT]->circ_buffer[576],
+ 0,
+ 480*sizeof(pChVars[ LEFT]->circ_buffer[0]));
+ pv_memset((void*)&pChVars[RIGHT]->circ_buffer[576],
+ 0,
+ 480*sizeof(pChVars[RIGHT]->circ_buffer[0]));
+
+
+ pv_memset((void*)pChVars[ LEFT]->overlap,
+ 0,
+ SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[ LEFT]->overlap[0]));
+
+
+ pv_memset((void*)pChVars[ RIGHT]->overlap,
+ 0,
+ SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[ RIGHT]->overlap[0]));
+
+
+
+
+
+ /*
+ * Clear all the structures
+ */
+
+
+ pv_memset((void*)&pVars->scaleFactors[RIGHT],
+ 0,
+ sizeof(mp3ScaleFactors));
+
+ pv_memset((void*)&pVars->scaleFactors[LEFT],
+ 0,
+ sizeof(mp3ScaleFactors));
+
+ pv_memset((void*)&pVars->sideInfo,
+ 0,
+ sizeof(mp3SideInfo));
+
+ pv_memset((void*)&pVars->sideInfo,
+ 0,
+ sizeof(mp3SideInfo));
+
+}
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.h b/media/codecs/mp3dec/src/pvmp3_framedecoder.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.h
rename to media/codecs/mp3dec/src/pvmp3_framedecoder.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp b/media/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp
rename to media/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_main_data_size.h b/media/codecs/mp3dec/src/pvmp3_get_main_data_size.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_main_data_size.h
rename to media/codecs/mp3dec/src/pvmp3_get_main_data_size.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp b/media/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp
rename to media/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_scale_factors.h b/media/codecs/mp3dec/src/pvmp3_get_scale_factors.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_scale_factors.h
rename to media/codecs/mp3dec/src/pvmp3_get_scale_factors.h
diff --git a/media/codecs/mp3dec/src/pvmp3_get_side_info.cpp b/media/codecs/mp3dec/src/pvmp3_get_side_info.cpp
new file mode 100644
index 0000000..1a3fca5
--- /dev/null
+++ b/media/codecs/mp3dec/src/pvmp3_get_side_info.cpp
@@ -0,0 +1,346 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/*
+------------------------------------------------------------------------------
+
+ PacketVideo Corp.
+ MP3 Decoder Library
+
+ Filename: pvmp3_get_side_info.cpp
+
+ Date: 09/21/2007
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+
+ Description:
+
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+Input
+ mp3SideInfo *si,
+ mp3Header *info, mp3 header information
+ uint32 *crc initialized crc value (if enabled)
+
+
+ Returns
+
+ mp3SideInfo *si, side information
+
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ acquires side information
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ [1] ISO MPEG Audio Subgroup Software Simulation Group (1996)
+ ISO 13818-3 MPEG-2 Audio Decoder - Lower Sampling Frequency Extension
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+------------------------------------------------------------------------------
+*/
+
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+
+#include "pvmp3_get_side_info.h"
+#include "pvmp3_crc.h"
+#include "pvmp3_getbits.h"
+
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL STORE/BUFFER/POINTER DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+ERROR_CODE pvmp3_get_side_info(tmp3Bits *inputStream,
+ mp3SideInfo *si,
+ mp3Header *info,
+ uint32 *crc)
+{
+ int32 ch, gr;
+ uint32 tmp;
+
+ int stereo = (info->mode == MPG_MD_MONO) ? 1 : 2;
+
+ if (info->version_x == MPEG_1)
+ {
+ if (stereo == 1)
+ {
+ if (!bitsAvailable(inputStream, 14))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ tmp = getbits_crc(inputStream, 14, crc, info->error_protection);
+ si->main_data_begin = (tmp << 18) >> 23; /* 9 */
+ si->private_bits = (tmp << 27) >> 27; /* 5 */
+ }
+ else
+ {
+ if (!bitsAvailable(inputStream, 12))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ tmp = getbits_crc(inputStream, 12, crc, info->error_protection);
+ si->main_data_begin = (tmp << 20) >> 23; /* 9 */
+ si->private_bits = (tmp << 29) >> 29; /* 3 */
+
+ }
+
+ for (ch = 0; ch < stereo; ch++)
+ {
+ if (!bitsAvailable(inputStream, 4))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ tmp = getbits_crc(inputStream, 4, crc, info->error_protection);
+ si->ch[ch].scfsi[0] = (tmp << 28) >> 31; /* 1 */
+ si->ch[ch].scfsi[1] = (tmp << 29) >> 31; /* 1 */
+ si->ch[ch].scfsi[2] = (tmp << 30) >> 31; /* 1 */
+ si->ch[ch].scfsi[3] = tmp & 1; /* 1 */
+ }
+
+ for (gr = 0; gr < 2 ; gr++)
+ {
+ for (ch = 0; ch < stereo; ch++)
+ {
+ if (!bitsAvailable(inputStream, 34))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ si->ch[ch].gran[gr].part2_3_length = getbits_crc(inputStream, 12, crc, info->error_protection);
+ tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
+
+ si->ch[ch].gran[gr].big_values = (tmp << 10) >> 23; /* 9 */
+ si->ch[ch].gran[gr].global_gain = (int32)((tmp << 19) >> 24) - 210; /* 8 */
+ si->ch[ch].gran[gr].scalefac_compress = (tmp << 27) >> 28; /* 4 */
+ si->ch[ch].gran[gr].window_switching_flag = tmp & 1; /* 1 */
+
+ if (si->ch[ch].gran[gr].window_switching_flag)
+ {
+ if (!bitsAvailable(inputStream, 22))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
+
+ si->ch[ch].gran[gr].block_type = (tmp << 10) >> 30; /* 2 */;
+ si->ch[ch].gran[gr].mixed_block_flag = (tmp << 12) >> 31; /* 1 */;
+
+ si->ch[ch].gran[gr].table_select[0] = (tmp << 13) >> 27; /* 5 */;
+ si->ch[ch].gran[gr].table_select[1] = (tmp << 18) >> 27; /* 5 */;
+
+ si->ch[ch].gran[gr].subblock_gain[0] = (tmp << 23) >> 29; /* 3 */;
+ si->ch[ch].gran[gr].subblock_gain[1] = (tmp << 26) >> 29; /* 3 */;
+ si->ch[ch].gran[gr].subblock_gain[2] = (tmp << 29) >> 29; /* 3 */;
+
+ /* Set region_count parameters since they are implicit in this case. */
+
+ if (si->ch[ch].gran[gr].block_type == 0)
+ {
+ return(SIDE_INFO_ERROR);
+ }
+ else if ((si->ch[ch].gran[gr].block_type == 2)
+ && (si->ch[ch].gran[gr].mixed_block_flag == 0))
+ {
+ si->ch[ch].gran[gr].region0_count = 8; /* MI 9; */
+ si->ch[ch].gran[gr].region1_count = 12;
+ }
+ else
+ {
+ si->ch[ch].gran[gr].region0_count = 7; /* MI 8; */
+ si->ch[ch].gran[gr].region1_count = 13;
+ }
+ }
+ else
+ {
+ if (!bitsAvailable(inputStream, 22))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
+
+ si->ch[ch].gran[gr].table_select[0] = (tmp << 10) >> 27; /* 5 */;
+ si->ch[ch].gran[gr].table_select[1] = (tmp << 15) >> 27; /* 5 */;
+ si->ch[ch].gran[gr].table_select[2] = (tmp << 20) >> 27; /* 5 */;
+
+ si->ch[ch].gran[gr].region0_count = (tmp << 25) >> 28; /* 4 */;
+ si->ch[ch].gran[gr].region1_count = (tmp << 29) >> 29; /* 3 */;
+
+ si->ch[ch].gran[gr].block_type = 0;
+ }
+
+ if (!bitsAvailable(inputStream, 3))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ tmp = getbits_crc(inputStream, 3, crc, info->error_protection);
+ si->ch[ch].gran[gr].preflag = (tmp << 29) >> 31; /* 1 */
+ si->ch[ch].gran[gr].scalefac_scale = (tmp << 30) >> 31; /* 1 */
+ si->ch[ch].gran[gr].count1table_select = tmp & 1; /* 1 */
+ }
+ }
+ }
+ else /* Layer 3 LSF */
+ {
+ if (!bitsAvailable(inputStream, 8 + stereo))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ si->main_data_begin = getbits_crc(inputStream, 8, crc, info->error_protection);
+ si->private_bits = getbits_crc(inputStream, stereo, crc, info->error_protection);
+
+ for (ch = 0; ch < stereo; ch++)
+ {
+ if (!bitsAvailable(inputStream, 39))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ tmp = getbits_crc(inputStream, 21, crc, info->error_protection);
+ si->ch[ch].gran[0].part2_3_length = (tmp << 11) >> 20; /* 12 */
+ si->ch[ch].gran[0].big_values = (tmp << 23) >> 23; /* 9 */
+
+ tmp = getbits_crc(inputStream, 18, crc, info->error_protection);
+ si->ch[ch].gran[0].global_gain = ((tmp << 14) >> 24) - 210; /* 8 */
+ si->ch[ch].gran[0].scalefac_compress = (tmp << 22) >> 23; /* 9 */
+ si->ch[ch].gran[0].window_switching_flag = tmp & 1; /* 1 */
+
+ if (si->ch[ch].gran[0].window_switching_flag)
+ {
+
+ if (!bitsAvailable(inputStream, 22))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
+
+ si->ch[ch].gran[0].block_type = (tmp << 10) >> 30; /* 2 */;
+ si->ch[ch].gran[0].mixed_block_flag = (tmp << 12) >> 31; /* 1 */;
+
+ si->ch[ch].gran[0].table_select[0] = (tmp << 13) >> 27; /* 5 */;
+ si->ch[ch].gran[0].table_select[1] = (tmp << 18) >> 27; /* 5 */;
+
+ si->ch[ch].gran[0].subblock_gain[0] = (tmp << 23) >> 29; /* 3 */;
+ si->ch[ch].gran[0].subblock_gain[1] = (tmp << 26) >> 29; /* 3 */;
+ si->ch[ch].gran[0].subblock_gain[2] = (tmp << 29) >> 29; /* 3 */;
+
+ /* Set region_count parameters since they are implicit in this case. */
+
+ if (si->ch[ch].gran[0].block_type == 0)
+ {
+ return(SIDE_INFO_ERROR);
+ }
+ else if ((si->ch[ch].gran[0].block_type == 2)
+ && (si->ch[ch].gran[0].mixed_block_flag == 0))
+ {
+ si->ch[ch].gran[0].region0_count = 8; /* MI 9; */
+ si->ch[ch].gran[0].region1_count = 12;
+ }
+ else
+ {
+ si->ch[ch].gran[0].region0_count = 7; /* MI 8; */
+ si->ch[ch].gran[0].region1_count = 13;
+ }
+ }
+ else
+ {
+ if (!bitsAvailable(inputStream, 22))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
+
+ si->ch[ch].gran[0].table_select[0] = (tmp << 10) >> 27; /* 5 */;
+ si->ch[ch].gran[0].table_select[1] = (tmp << 15) >> 27; /* 5 */;
+ si->ch[ch].gran[0].table_select[2] = (tmp << 20) >> 27; /* 5 */;
+
+ si->ch[ch].gran[0].region0_count = (tmp << 25) >> 28; /* 4 */;
+ si->ch[ch].gran[0].region1_count = (tmp << 29) >> 29; /* 3 */;
+
+ si->ch[ch].gran[0].block_type = 0;
+ }
+
+ if (!bitsAvailable(inputStream, 2))
+ {
+ return SIDE_INFO_ERROR;
+ }
+
+ tmp = getbits_crc(inputStream, 2, crc, info->error_protection);
+ si->ch[ch].gran[0].scalefac_scale = tmp >> 1; /* 1 */
+ si->ch[ch].gran[0].count1table_select = tmp & 1; /* 1 */
+
+ }
+ }
+ return (NO_DECODING_ERROR);
+}
+
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.h b/media/codecs/mp3dec/src/pvmp3_get_side_info.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.h
rename to media/codecs/mp3dec/src/pvmp3_get_side_info.h
diff --git a/media/codecs/mp3dec/src/pvmp3_getbits.cpp b/media/codecs/mp3dec/src/pvmp3_getbits.cpp
new file mode 100644
index 0000000..4d252ef
--- /dev/null
+++ b/media/codecs/mp3dec/src/pvmp3_getbits.cpp
@@ -0,0 +1,295 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/*
+------------------------------------------------------------------------------
+
+ PacketVideo Corp.
+ MP3 Decoder Library
+
+ Filename: pvmp3_getbits.cpp
+
+
+ Date: 09/21/2007
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+
+ Description:
+
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+
+ tmp3Bits *inputStream, structure holding the input stream parameters
+ int32 neededBits number of bits to read from the bit stream
+
+ Outputs:
+
+ word parsed from teh bitstream, with size neededBits-bits,
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+
+------------------------------------------------------------------------------
+ REFERENCES
+ [1] ISO MPEG Audio Subgroup Software Simulation Group (1996)
+ ISO 13818-3 MPEG-2 Audio Decoder - Lower Sampling Frequency Extension
+
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+------------------------------------------------------------------------------
+*/
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+#include "pvmp3_getbits.h"
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL STORE/BUFFER/POINTER DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+uint32 getNbits(tmp3Bits *ptBitStream,
+ int32 neededBits) /* number of bits to read from the bitstream (up to 25) */
+{
+
+ uint32 offset;
+ uint32 bitIndex;
+ uint32 bytesToFetch;
+ uint8 Elem = 0; /* Needs to be same type as pInput->pBuffer */
+ uint8 Elem1 = 0;
+ uint8 Elem2 = 0;
+ uint8 Elem3 = 0;
+ uint32 returnValue = 0;
+
+ if (!neededBits)
+ {
+ return (returnValue);
+ }
+
+ offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
+
+ /* Remove extra high bits by shifting up */
+ bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
+
+ bytesToFetch = (bitIndex + neededBits + 7 ) >> 3 ;
+
+ switch (bytesToFetch)
+ {
+ case 4:
+ Elem3 = *(ptBitStream->pBuffer + module(offset + 3, BUFSIZE));
+ [[fallthrough]];
+ case 3:
+ Elem2 = *(ptBitStream->pBuffer + module(offset + 2, BUFSIZE));
+ [[fallthrough]];
+ case 2:
+ Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
+ [[fallthrough]];
+ case 1:
+ Elem = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+ }
+
+
+ returnValue = (((uint32)(Elem)) << 24) |
+ (((uint32)(Elem1)) << 16) |
+ (((uint32)(Elem2)) << 8) |
+ ((uint32)(Elem3));
+
+ /* This line is faster than to mask off the high bits. */
+ returnValue <<= bitIndex;
+
+ /* Move the field down. */
+ returnValue >>= (32 - neededBits);
+
+ ptBitStream->usedBits += neededBits;
+
+ return (returnValue);
+}
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+uint16 getUpTo9bits(tmp3Bits *ptBitStream,
+ int32 neededBits) /* number of bits to read from the bit stream 2 to 9 */
+{
+
+ uint32 offset;
+ uint32 bitIndex;
+ uint32 bytesToFetch;
+ uint8 Elem = 0; /* Needs to be same type as pInput->pBuffer */
+ uint8 Elem1 = 0;
+ uint16 returnValue;
+
+ offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
+
+ /* Remove extra high bits by shifting up */
+ bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
+
+ bytesToFetch = (bitIndex + neededBits + 7 ) >> 3 ;
+
+ if (bytesToFetch > 1)
+ {
+ Elem = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+ Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
+ }
+ else if (bytesToFetch > 0)
+ {
+ Elem = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+ }
+
+
+ returnValue = (((uint16)(Elem)) << 8) |
+ ((uint16)(Elem1));
+
+ ptBitStream->usedBits += neededBits;
+ /* This line is faster than to mask off the high bits. */
+ returnValue = (returnValue << (bitIndex));
+
+ /* Move the field down. */
+
+ return (uint16)(returnValue >> (16 - neededBits));
+
+}
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+uint32 getUpTo17bits(tmp3Bits *ptBitStream,
+ int32 neededBits) /* number of bits to read from the bit stream 2 to 8 */
+{
+
+ uint32 offset;
+ uint32 bitIndex;
+ uint32 bytesToFetch;
+ uint8 Elem = 0; /* Needs to be same type as pInput->pBuffer */
+ uint8 Elem1 = 0;
+ uint8 Elem2 = 0;
+ uint32 returnValue;
+
+ offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
+
+ /* Remove extra high bits by shifting up */
+ bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
+
+ bytesToFetch = (bitIndex + neededBits + 7 ) >> 3 ;
+
+ if (bytesToFetch > 2)
+ {
+ Elem = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+ Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
+ Elem2 = *(ptBitStream->pBuffer + module(offset + 2, BUFSIZE));
+ }
+ else if (bytesToFetch > 1)
+ {
+ Elem = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+ Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
+ }
+ else if (bytesToFetch > 0)
+ {
+ Elem = *(ptBitStream->pBuffer + module(offset, BUFSIZE));
+ }
+
+
+ returnValue = (((uint32)(Elem)) << 16) |
+ (((uint32)(Elem1)) << 8) |
+ ((uint32)(Elem2));
+
+ ptBitStream->usedBits += neededBits;
+ /* This line is faster than to mask off the high bits. */
+ returnValue = 0xFFFFFF & (returnValue << (bitIndex));
+
+ /* Move the field down. */
+
+ return (uint32)(returnValue >> (24 - neededBits));
+
+}
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+uint8 get1bit(tmp3Bits *ptBitStream) /* number of bits to read from the bit stream */
+{
+
+ uint32 offset;
+ uint32 bitIndex;
+ uint8 returnValue;
+
+ offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
+
+ returnValue = *(ptBitStream->pBuffer + module(offset , BUFSIZE));
+
+ /* Remove extra high bits by shifting up */
+ bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
+ ptBitStream->usedBits++;
+
+ /* This line is faster than to mask off the high bits. */
+ returnValue = (returnValue << (bitIndex));
+
+ return (uint8)(returnValue >> 7);
+
+}
+
+
+
+
diff --git a/media/codecs/mp3dec/src/pvmp3_getbits.h b/media/codecs/mp3dec/src/pvmp3_getbits.h
new file mode 100644
index 0000000..b04fe6d
--- /dev/null
+++ b/media/codecs/mp3dec/src/pvmp3_getbits.h
@@ -0,0 +1,117 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/*
+------------------------------------------------------------------------------
+ PacketVideo Corp.
+ MP3 Decoder Library
+
+ Filename: pvmp3_getbits.h
+
+ Date: 09/21/2007
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+ Description:
+
+------------------------------------------------------------------------------
+ INCLUDE DESCRIPTION
+
+
+------------------------------------------------------------------------------
+*/
+
+/*----------------------------------------------------------------------------
+; CONTINUE ONLY IF NOT ALREADY DEFINED
+----------------------------------------------------------------------------*/
+#ifndef PVMP3_GETBITS_H
+#define PVMP3_GETBITS_H
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+#include "pvmp3_dec_defs.h"
+#include "s_mp3bits.h"
+#include "pvmp3_audio_type_defs.h"
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here.
+----------------------------------------------------------------------------*/
+#define INBUF_ARRAY_INDEX_SHIFT (3)
+#define INBUF_BIT_WIDTH (1<<(INBUF_ARRAY_INDEX_SHIFT))
+#define INBUF_BIT_MODULO_MASK ((INBUF_BIT_WIDTH)-1)
+
+
+/*----------------------------------------------------------------------------
+; EXTERNAL VARIABLES REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; SIMPLE TYPEDEF'S
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; ENUMERATED TYPEDEF'S
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; STRUCTURES TYPEDEF'S
+----------------------------------------------------------------------------*/
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+
+ uint32 getNbits(tmp3Bits *pMainData,
+ int32 neededBits);
+
+ uint16 getUpTo9bits(tmp3Bits *pMainData,
+ int32 neededBits);
+
+ uint32 getUpTo17bits(tmp3Bits *pMainData,
+ int32 neededBits);
+
+ uint8 get1bit(tmp3Bits *pMainData);
+
+#ifdef __cplusplus
+}
+#endif
+
+/*----------------------------------------------------------------------------
+; GLOBAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+static inline bool bitsAvailable(tmp3Bits *inputStream, uint32 neededBits)
+{
+ return (inputStream->inputBufferCurrentLength << 3) >= (neededBits + inputStream->usedBits);
+}
+
+/*----------------------------------------------------------------------------
+; END
+----------------------------------------------------------------------------*/
+
+#endif
+
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp b/media/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp
rename to media/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp b/media/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp
rename to media/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_imdct_synth.cpp b/media/codecs/mp3dec/src/pvmp3_imdct_synth.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_imdct_synth.cpp
rename to media/codecs/mp3dec/src/pvmp3_imdct_synth.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_imdct_synth.h b/media/codecs/mp3dec/src/pvmp3_imdct_synth.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_imdct_synth.h
rename to media/codecs/mp3dec/src/pvmp3_imdct_synth.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_18.cpp b/media/codecs/mp3dec/src/pvmp3_mdct_18.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_18.cpp
rename to media/codecs/mp3dec/src/pvmp3_mdct_18.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_18.h b/media/codecs/mp3dec/src/pvmp3_mdct_18.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_18.h
rename to media/codecs/mp3dec/src/pvmp3_mdct_18.h
diff --git a/media/codecs/mp3dec/src/pvmp3_mdct_6.cpp b/media/codecs/mp3dec/src/pvmp3_mdct_6.cpp
new file mode 100644
index 0000000..1ba080d
--- /dev/null
+++ b/media/codecs/mp3dec/src/pvmp3_mdct_6.cpp
@@ -0,0 +1,165 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/*
+------------------------------------------------------------------------------
+
+ PacketVideo Corp.
+ MP3 Decoder Library
+ Filename: mdct_18.cpp
+
+ Date: 09/21/2007
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+
+ Description:
+
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+Input
+ int32 vec[], input vector of length 6
+ int32 *history input for overlap and add, vector updated with
+ next overlap and add values
+Returns
+ none mdct computation in-place
+
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ Returns the mdct of length 6 of the input vector, as well as the overlap
+ vector for next iteration ( on history[])
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+------------------------------------------------------------------------------
+*/
+
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+
+#include "pv_mp3dec_fxd_op.h"
+#include "pvmp3_mdct_6.h"
+
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+#define QFORMAT 29
+#define Qfmt29(a) (int32)((a)*((int32)1<<QFORMAT) + ((a)>=0?0.5F:-0.5F))
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL STORE/BUFFER/POINTER DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+/*
+ * (1./(2*cos((pi/(2*N))*(2*i+1)))), N = 12, i = [0:N/2-1]
+ */
+
+const int32 cosTerms_1_ov_cos_phi_N6[6] =
+{
+
+ Qfmt29(0.50431448029008f), Qfmt29(0.54119610014620f),
+ Qfmt29(0.63023620700513f), Qfmt29(0.82133981585229f),
+ Qfmt29(1.30656296487638f), Qfmt29(3.83064878777019f)
+};
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+__attribute__((no_sanitize("integer")))
+void pvmp3_mdct_6(int32 vec[], int32 *history)
+{
+ int32 i;
+ int32 tmp;
+ int32 tmp1;
+ int32 tmp2;
+
+ int32 *pt_vec = vec;
+ int32 *pt_vec_o = vec;
+ const int32 *pt_cos = cosTerms_1_ov_cos_phi_N6;
+
+ for (i = 2; i != 0; i--)
+ {
+ tmp = *(pt_vec++);
+ tmp1 = *(pt_vec++);
+ tmp2 = *(pt_vec++);
+ *(pt_vec_o++) = fxp_mul32_Q29(tmp, *(pt_cos++));
+ *(pt_vec_o++) = fxp_mul32_Q29(tmp1, *(pt_cos++));
+ *(pt_vec_o++) = fxp_mul32_Q29(tmp2, *(pt_cos++));
+ }
+
+
+ pvmp3_dct_6(vec); // Even terms
+
+
+ tmp = -(vec[0] + vec[1]);
+ history[3] = tmp;
+ history[2] = tmp;
+ tmp = -(vec[1] + vec[2]);
+ vec[0] = vec[3] + vec[4];
+ vec[1] = vec[4] + vec[5];
+ history[4] = tmp;
+ history[1] = tmp;
+ tmp = -(vec[2] + vec[3]);
+ vec[4] = -vec[1];
+ history[5] = tmp;
+ history[0] = tmp;
+
+ vec[2] = vec[5];
+ vec[3] = -vec[5];
+ vec[5] = -vec[0];
+
+}
+
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_6.h b/media/codecs/mp3dec/src/pvmp3_mdct_6.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_6.h
rename to media/codecs/mp3dec/src/pvmp3_mdct_6.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp b/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h b/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp b/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h b/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp b/media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h b/media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_normalize.cpp b/media/codecs/mp3dec/src/pvmp3_normalize.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_normalize.cpp
rename to media/codecs/mp3dec/src/pvmp3_normalize.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_normalize.h b/media/codecs/mp3dec/src/pvmp3_normalize.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_normalize.h
rename to media/codecs/mp3dec/src/pvmp3_normalize.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp b/media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp
rename to media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h b/media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h
rename to media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp b/media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp
rename to media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h b/media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h
rename to media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_reorder.cpp b/media/codecs/mp3dec/src/pvmp3_reorder.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_reorder.cpp
rename to media/codecs/mp3dec/src/pvmp3_reorder.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_reorder.h b/media/codecs/mp3dec/src/pvmp3_reorder.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_reorder.h
rename to media/codecs/mp3dec/src/pvmp3_reorder.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_seek_synch.cpp b/media/codecs/mp3dec/src/pvmp3_seek_synch.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_seek_synch.cpp
rename to media/codecs/mp3dec/src/pvmp3_seek_synch.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_seek_synch.h b/media/codecs/mp3dec/src/pvmp3_seek_synch.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_seek_synch.h
rename to media/codecs/mp3dec/src/pvmp3_seek_synch.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_stereo_proc.cpp b/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
rename to media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_stereo_proc.h b/media/codecs/mp3dec/src/pvmp3_stereo_proc.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_stereo_proc.h
rename to media/codecs/mp3dec/src/pvmp3_stereo_proc.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_tables.cpp b/media/codecs/mp3dec/src/pvmp3_tables.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_tables.cpp
rename to media/codecs/mp3dec/src/pvmp3_tables.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_tables.h b/media/codecs/mp3dec/src/pvmp3_tables.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_tables.h
rename to media/codecs/mp3dec/src/pvmp3_tables.h
diff --git a/media/libstagefright/codecs/mp3dec/src/s_huffcodetab.h b/media/codecs/mp3dec/src/s_huffcodetab.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/s_huffcodetab.h
rename to media/codecs/mp3dec/src/s_huffcodetab.h
diff --git a/media/libstagefright/codecs/mp3dec/src/s_mp3bits.h b/media/codecs/mp3dec/src/s_mp3bits.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/s_mp3bits.h
rename to media/codecs/mp3dec/src/s_mp3bits.h
diff --git a/media/libstagefright/codecs/mp3dec/src/s_tmp3dec_chan.h b/media/codecs/mp3dec/src/s_tmp3dec_chan.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/s_tmp3dec_chan.h
rename to media/codecs/mp3dec/src/s_tmp3dec_chan.h
diff --git a/media/libstagefright/codecs/mp3dec/src/s_tmp3dec_file.h b/media/codecs/mp3dec/src/s_tmp3dec_file.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/s_tmp3dec_file.h
rename to media/codecs/mp3dec/src/s_tmp3dec_file.h
diff --git a/media/libstagefright/codecs/mp3dec/test/Android.bp b/media/codecs/mp3dec/test/Android.bp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/Android.bp
rename to media/codecs/mp3dec/test/Android.bp
diff --git a/media/codecs/mp3dec/test/AndroidTest.xml b/media/codecs/mp3dec/test/AndroidTest.xml
new file mode 100644
index 0000000..29952eb
--- /dev/null
+++ b/media/codecs/mp3dec/test/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Test module config for Mp3 Decoder unit test">
+ <option name="test-suite-tag" value="Mp3DecoderTest" />
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="true" />
+ <option name="push" value="Mp3DecoderTest->/data/local/tmp/Mp3DecoderTest" />
+ <option name="push-file"
+ key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mp3dec/test/Mp3DecoderTest-1.2.zip?unzip=true"
+ value="/data/local/tmp/Mp3DecoderTestRes/" />
+ </target_preparer>
+
+ <test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="module-name" value="Mp3DecoderTest" />
+ <option name="native-test-flag" value="-P /data/local/tmp/Mp3DecoderTestRes/" />
+ </test>
+</configuration>
diff --git a/media/codecs/mp3dec/test/Mp3DecoderTest.cpp b/media/codecs/mp3dec/test/Mp3DecoderTest.cpp
new file mode 100644
index 0000000..91326a8
--- /dev/null
+++ b/media/codecs/mp3dec/test/Mp3DecoderTest.cpp
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Mp3DecoderTest"
+
+#include <utils/Log.h>
+
+#include <audio_utils/sndfile.h>
+#include <stdio.h>
+
+#include "mp3reader.h"
+#include "pvmp3decoder_api.h"
+
+#include "Mp3DecoderTestEnvironment.h"
+
+#define OUTPUT_FILE "/data/local/tmp/mp3Decode.out"
+
+constexpr int32_t kInputBufferSize = 1024 * 10;
+constexpr int32_t kOutputBufferSize = 4608 * 2;
+constexpr int32_t kMaxCount = 10;
+constexpr int32_t kNumFrameReset = 150;
+
+static Mp3DecoderTestEnvironment *gEnv = nullptr;
+
+class Mp3DecoderTest : public ::testing::TestWithParam<string> {
+ public:
+ Mp3DecoderTest() : mConfig(nullptr) {}
+
+ ~Mp3DecoderTest() {
+ if (mConfig) {
+ delete mConfig;
+ mConfig = nullptr;
+ }
+ }
+
+ virtual void SetUp() override {
+ mConfig = new tPVMP3DecoderExternal{};
+ ASSERT_NE(mConfig, nullptr) << "Failed to initialize config. No Memory available";
+ mConfig->equalizerType = flat;
+ mConfig->crcEnabled = false;
+ }
+
+ tPVMP3DecoderExternal *mConfig;
+ Mp3Reader mMp3Reader;
+
+ ERROR_CODE DecodeFrames(void *decoderbuf, SNDFILE *outFileHandle, SF_INFO sfInfo,
+ int32_t frameCount = INT32_MAX);
+ SNDFILE *openOutputFile(SF_INFO *sfInfo);
+};
+
+ERROR_CODE Mp3DecoderTest::DecodeFrames(void *decoderBuf, SNDFILE *outFileHandle, SF_INFO sfInfo,
+ int32_t frameCount) {
+ uint8_t inputBuf[kInputBufferSize];
+ int16_t outputBuf[kOutputBufferSize];
+ uint32_t bytesRead;
+ ERROR_CODE decoderErr;
+ while (frameCount > 0) {
+ bool success = mMp3Reader.getFrame(inputBuf, &bytesRead);
+ if (!success) {
+ break;
+ }
+ mConfig->inputBufferCurrentLength = bytesRead;
+ mConfig->inputBufferMaxLength = 0;
+ mConfig->inputBufferUsedLength = 0;
+ mConfig->pInputBuffer = inputBuf;
+ mConfig->pOutputBuffer = outputBuf;
+ mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t);
+ decoderErr = pvmp3_framedecoder(mConfig, decoderBuf);
+ if (decoderErr != NO_DECODING_ERROR) break;
+ sf_writef_short(outFileHandle, outputBuf, mConfig->outputFrameSize / sfInfo.channels);
+ frameCount--;
+ }
+ return decoderErr;
+}
+
+SNDFILE *Mp3DecoderTest::openOutputFile(SF_INFO *sfInfo) {
+ memset(sfInfo, 0, sizeof(SF_INFO));
+ sfInfo->channels = mMp3Reader.getNumChannels();
+ sfInfo->format = SF_FORMAT_WAV | SF_FORMAT_PCM_16;
+ sfInfo->samplerate = mMp3Reader.getSampleRate();
+ SNDFILE *outFileHandle = sf_open(OUTPUT_FILE, SFM_WRITE, sfInfo);
+ return outFileHandle;
+}
+
+TEST_F(Mp3DecoderTest, MultiCreateMp3DecoderTest) {
+ size_t memRequirements = pvmp3_decoderMemRequirements();
+ ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
+ void *decoderBuf = malloc(memRequirements);
+ ASSERT_NE(decoderBuf, nullptr)
+ << "Failed to allocate decoder memory of size " << memRequirements;
+ for (int count = 0; count < kMaxCount; count++) {
+ pvmp3_InitDecoder(mConfig, decoderBuf);
+ ALOGV("Decoder created successfully");
+ }
+ if (decoderBuf) {
+ free(decoderBuf);
+ decoderBuf = nullptr;
+ }
+}
+
+TEST_P(Mp3DecoderTest, DecodeTest) {
+ size_t memRequirements = pvmp3_decoderMemRequirements();
+ ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
+ void *decoderBuf = malloc(memRequirements);
+ ASSERT_NE(decoderBuf, nullptr)
+ << "Failed to allocate decoder memory of size " << memRequirements;
+
+ pvmp3_InitDecoder(mConfig, decoderBuf);
+ ALOGV("Decoder created successfully");
+ string inputFile = gEnv->getRes() + GetParam();
+ bool status = mMp3Reader.init(inputFile.c_str());
+ ASSERT_TRUE(status) << "Unable to initialize the mp3Reader";
+
+ // Open the output file.
+ SF_INFO sfInfo;
+ SNDFILE *outFileHandle = openOutputFile(&sfInfo);
+ ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
+
+ ERROR_CODE decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo);
+ ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
+ ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
+ ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
+
+ mMp3Reader.close();
+ sf_close(outFileHandle);
+ if (decoderBuf) {
+ free(decoderBuf);
+ decoderBuf = nullptr;
+ }
+}
+
+TEST_P(Mp3DecoderTest, ResetDecoderTest) {
+ size_t memRequirements = pvmp3_decoderMemRequirements();
+ ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
+ void *decoderBuf = malloc(memRequirements);
+ ASSERT_NE(decoderBuf, nullptr)
+ << "Failed to allocate decoder memory of size " << memRequirements;
+
+ pvmp3_InitDecoder(mConfig, decoderBuf);
+ ALOGV("Decoder created successfully.");
+ string inputFile = gEnv->getRes() + GetParam();
+ bool status = mMp3Reader.init(inputFile.c_str());
+ ASSERT_TRUE(status) << "Unable to initialize the mp3Reader";
+
+ // Open the output file.
+ SF_INFO sfInfo;
+ SNDFILE *outFileHandle = openOutputFile(&sfInfo);
+ ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
+
+ ERROR_CODE decoderErr;
+ decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo, kNumFrameReset);
+ ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
+ ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
+ ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
+
+ pvmp3_resetDecoder(decoderBuf);
+ // Decode the same file.
+ decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo);
+ ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
+ ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
+ ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
+
+ mMp3Reader.close();
+ sf_close(outFileHandle);
+ if (decoderBuf) {
+ free(decoderBuf);
+ decoderBuf = nullptr;
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(Mp3DecoderTestAll, Mp3DecoderTest,
+ ::testing::Values(("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3"),
+ ("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3"),
+ ("bug_136053885.mp3"),
+ ("bbb_2ch_44kHz_lame_crc.mp3"),
+ ("bbb_mp3_stereo_192kbps_48000hz.mp3")));
+
+int main(int argc, char **argv) {
+ gEnv = new Mp3DecoderTestEnvironment();
+ ::testing::AddGlobalTestEnvironment(gEnv);
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = gEnv->initFromOptions(argc, argv);
+ if (status == 0) {
+ status = RUN_ALL_TESTS();
+ ALOGV("Test result = %d\n", status);
+ }
+ return status;
+}
diff --git a/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h b/media/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h
rename to media/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h
diff --git a/media/libstagefright/codecs/mp3dec/test/README.md b/media/codecs/mp3dec/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/README.md
rename to media/codecs/mp3dec/test/README.md
diff --git a/media/libstagefright/codecs/mp3dec/test/mp3dec_test.cpp b/media/codecs/mp3dec/test/mp3dec_test.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/mp3dec_test.cpp
rename to media/codecs/mp3dec/test/mp3dec_test.cpp
diff --git a/media/libstagefright/codecs/mp3dec/test/mp3reader.cpp b/media/codecs/mp3dec/test/mp3reader.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/mp3reader.cpp
rename to media/codecs/mp3dec/test/mp3reader.cpp
diff --git a/media/libstagefright/codecs/mp3dec/test/mp3reader.h b/media/codecs/mp3dec/test/mp3reader.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/mp3reader.h
rename to media/codecs/mp3dec/test/mp3reader.h
diff --git a/media/extractors/Android.bp b/media/extractors/Android.bp
index 7c4e62f..f9abfe3 100644
--- a/media/extractors/Android.bp
+++ b/media/extractors/Android.bp
@@ -21,7 +21,6 @@
shared_libs: [
"liblog",
- "libmediandk#29",
],
// extractors are supposed to work on Q(29)
@@ -39,6 +38,21 @@
version_script: "exports.lds",
+ target: {
+ android: {
+ shared_libs: [
+ "libmediandk#29",
+ ],
+ },
+ host: {
+ static_libs: [
+ "libutils",
+ "libmediandk_format",
+ "libmedia_ndkformatpriv",
+ ],
+ },
+ },
+
sanitize: {
cfi: true,
misc_undefined: [
diff --git a/media/extractors/TEST_MAPPING b/media/extractors/TEST_MAPPING
index 038b99a..4984b8f 100644
--- a/media/extractors/TEST_MAPPING
+++ b/media/extractors/TEST_MAPPING
@@ -1,7 +1,5 @@
{
"presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
- // { "name": "ExtractorUnitTest" },
// TODO(b/153661591) enable test once the bug is fixed
// This tests the extractor path
@@ -16,5 +14,14 @@
// }
// ]
// }
+ ],
+
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
+ { "name": "ExtractorUnitTest" }
]
+
+
}
diff --git a/media/extractors/aac/Android.bp b/media/extractors/aac/Android.bp
index 60d3ae1..c036bb5 100644
--- a/media/extractors/aac/Android.bp
+++ b/media/extractors/aac/Android.bp
@@ -10,4 +10,11 @@
"libutils",
],
+ host_supported: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
}
diff --git a/media/extractors/amr/Android.bp b/media/extractors/amr/Android.bp
index 49c9567..440065f 100644
--- a/media/extractors/amr/Android.bp
+++ b/media/extractors/amr/Android.bp
@@ -8,4 +8,10 @@
"libstagefright_foundation",
],
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ }
}
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index 826c1a0..2593000 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -21,4 +21,12 @@
"libutils",
],
+ host_supported: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+
}
diff --git a/media/extractors/flac/FLACExtractor.cpp b/media/extractors/flac/FLACExtractor.cpp
index 0617e88..ec7cb24 100644
--- a/media/extractors/flac/FLACExtractor.cpp
+++ b/media/extractors/flac/FLACExtractor.cpp
@@ -561,6 +561,8 @@
AMediaFormat_setString(mFileMetadata,
AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_AUDIO_FLAC);
}
+ mMaxBufferSize = getMaxBlockSize() * getChannels() * getOutputSampleSize();
+ AMediaFormat_setInt32(mTrackMetadata, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, mMaxBufferSize);
return OK;
}
@@ -568,8 +570,6 @@
{
CHECK(mGroup == NULL);
mGroup = group;
- mMaxBufferSize = getMaxBlockSize() * getChannels() * getOutputSampleSize();
- AMediaFormat_setInt32(mTrackMetadata, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, mMaxBufferSize);
mGroup->add_buffer(mMaxBufferSize);
}
diff --git a/media/extractors/fuzzers/Android.bp b/media/extractors/fuzzers/Android.bp
index 59e9cd2..e900e57 100644
--- a/media/extractors/fuzzers/Android.bp
+++ b/media/extractors/fuzzers/Android.bp
@@ -17,13 +17,8 @@
*****************************************************************************
* Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
*/
-
-cc_library {
- name: "libextractorfuzzerbase",
-
- srcs: [
- "ExtractorFuzzerBase.cpp",
- ],
+cc_defaults {
+ name: "extractor-fuzzerbase-defaults",
local_include_dirs: [
"include",
@@ -36,24 +31,84 @@
static_libs: [
"liblog",
"libstagefright_foundation",
- "libmedia",
+ "libmediandk_format",
+ "libmedia_ndkformatpriv",
],
shared_libs: [
"libutils",
"libbinder",
- "libmediandk",
+ "libbase",
+ "libcutils",
],
- /* GETEXTRACTORDEF is not defined as extractor library is not linked in the
- * base class. It will be included when the extractor fuzzer binary is
- * generated.
- */
- allow_undefined_symbols: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
+cc_defaults {
+ name: "extractor-fuzzer-defaults",
+ defaults: ["extractor-fuzzerbase-defaults"],
+
+ static_libs: [
+ "libextractorfuzzerbase",
+ ],
+
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
+
+cc_defaults {
+ name: "mpeg2-extractor-fuzzer-defaults",
+ defaults: ["extractor-fuzzer-defaults"],
+ host_supported: true,
+
+ include_dirs: [
+ "frameworks/av/media/extractors/mpeg2",
+ "frameworks/av/media/libstagefright",
+ ],
+
+ static_libs: [
+ "libstagefright_foundation_without_imemory",
+ "libstagefright_mpeg2support",
+ "libstagefright_mpeg2extractor",
+ "libstagefright_esds",
+ "libmpeg2extractor",
+ "libmedia_helper",
+ ],
+
+ shared_libs: [
+ "android.hardware.cas@1.0",
+ "android.hardware.cas.native@1.0",
+ "android.hidl.token@1.0-utils",
+ "android.hidl.allocator@1.0",
+ "libcrypto",
+ "libhidlmemory",
+ "libhidlbase",
+ ],
+}
+
+cc_library_static {
+ name: "libextractorfuzzerbase",
+ defaults: ["extractor-fuzzerbase-defaults"],
+ host_supported: true,
+
+ srcs: [
+ "ExtractorFuzzerBase.cpp",
+ ],
}
cc_fuzz {
name: "mp4_extractor_fuzzer",
+ defaults: ["extractor-fuzzer-defaults"],
+ host_supported: true,
srcs: [
"mp4_extractor_fuzzer.cpp",
@@ -63,27 +118,23 @@
"frameworks/av/media/extractors/mp4",
],
+ header_libs: [
+ "libaudioclient_headers",
+ ],
+
static_libs: [
- "liblog",
- "libstagefright_foundation",
- "libmedia",
- "libextractorfuzzerbase",
"libstagefright_id3",
"libstagefright_esds",
"libmp4extractor",
],
- shared_libs: [
- "libutils",
- "libmediandk",
- "libbinder",
- ],
-
dictionary: "mp4_extractor_fuzzer.dict",
}
cc_fuzz {
name: "wav_extractor_fuzzer",
+ defaults: ["extractor-fuzzer-defaults"],
+ host_supported: true,
srcs: [
"wav_extractor_fuzzer.cpp",
@@ -94,25 +145,19 @@
],
static_libs: [
- "liblog",
- "libstagefright_foundation",
- "libmedia",
- "libextractorfuzzerbase",
"libfifo",
"libwavextractor",
],
shared_libs: [
- "libutils",
- "libmediandk",
- "libbinder",
"libbinder_ndk",
- "libbase",
],
}
cc_fuzz {
name: "amr_extractor_fuzzer",
+ defaults: ["extractor-fuzzer-defaults"],
+ host_supported: true,
srcs: [
"amr_extractor_fuzzer.cpp",
@@ -123,24 +168,16 @@
],
static_libs: [
- "liblog",
- "libstagefright_foundation",
- "libmedia",
- "libextractorfuzzerbase",
"libamrextractor",
],
- shared_libs: [
- "libutils",
- "libmediandk",
- "libbinder",
- ],
-
dictionary: "amr_extractor_fuzzer.dict",
}
cc_fuzz {
name: "mkv_extractor_fuzzer",
+ defaults: ["extractor-fuzzer-defaults"],
+ host_supported: true,
srcs: [
"mkv_extractor_fuzzer.cpp",
@@ -151,10 +188,6 @@
],
static_libs: [
- "liblog",
- "libstagefright_foundation",
- "libmedia",
- "libextractorfuzzerbase",
"libwebm",
"libstagefright_flacdec",
"libstagefright_metadatautils",
@@ -162,17 +195,13 @@
"libFLAC",
],
- shared_libs: [
- "libutils",
- "libmediandk",
- "libbinder",
- ],
-
dictionary: "mkv_extractor_fuzzer.dict",
}
cc_fuzz {
name: "ogg_extractor_fuzzer",
+ defaults: ["extractor-fuzzer-defaults"],
+ host_supported: true,
srcs: [
"ogg_extractor_fuzzer.cpp",
@@ -183,108 +212,44 @@
],
static_libs: [
- "liblog",
- "libstagefright_foundation",
- "libmedia",
- "libextractorfuzzerbase",
"libstagefright_metadatautils",
"libvorbisidec",
"liboggextractor",
],
- shared_libs: [
- "libutils",
- "libmediandk",
- "libbinder",
- ],
-
dictionary: "ogg_extractor_fuzzer.dict",
}
cc_fuzz {
name: "mpeg2ps_extractor_fuzzer",
+ defaults: ["mpeg2-extractor-fuzzer-defaults"],
srcs: [
"mpeg2_extractor_fuzzer.cpp",
],
- include_dirs: [
- "frameworks/av/media/extractors/mpeg2",
- "frameworks/av/media/libstagefright",
- ],
-
- static_libs: [
- "liblog",
- "libstagefright_foundation_without_imemory",
- "libmedia",
- "libextractorfuzzerbase",
- "libstagefright_mpeg2support",
- "libstagefright_mpeg2extractor",
- "libstagefright_esds",
- "libmpeg2extractor",
- ],
-
cflags: [
"-DMPEG2PS",
],
- shared_libs: [
- "libutils",
- "libmediandk",
- "libbinder",
- "android.hardware.cas@1.0",
- "android.hardware.cas.native@1.0",
- "android.hidl.token@1.0-utils",
- "android.hidl.allocator@1.0",
- "libcrypto",
- "libhidlmemory",
- "libhidlbase",
- ],
-
dictionary: "mpeg2ps_extractor_fuzzer.dict",
}
cc_fuzz {
name: "mpeg2ts_extractor_fuzzer",
+ defaults: ["mpeg2-extractor-fuzzer-defaults"],
srcs: [
"mpeg2_extractor_fuzzer.cpp",
],
- include_dirs: [
- "frameworks/av/media/extractors/mpeg2",
- "frameworks/av/media/libstagefright",
- ],
-
- static_libs: [
- "liblog",
- "libstagefright_foundation_without_imemory",
- "libmedia",
- "libextractorfuzzerbase",
- "libstagefright_mpeg2support",
- "libstagefright_mpeg2extractor",
- "libstagefright_esds",
- "libmpeg2extractor",
- ],
-
- shared_libs: [
- "libutils",
- "libmediandk",
- "libbinder",
- "android.hardware.cas@1.0",
- "android.hardware.cas.native@1.0",
- "android.hidl.token@1.0-utils",
- "android.hidl.allocator@1.0",
- "libcrypto",
- "libhidlmemory",
- "libhidlbase",
- ],
-
dictionary: "mpeg2ts_extractor_fuzzer.dict",
}
cc_fuzz {
name: "mp3_extractor_fuzzer",
+ defaults: ["extractor-fuzzer-defaults"],
+ host_supported: true,
srcs: [
"mp3_extractor_fuzzer.cpp",
@@ -295,24 +260,16 @@
],
static_libs: [
- "liblog",
- "libstagefright_foundation",
- "libmedia",
- "libextractorfuzzerbase",
"libfifo",
"libmp3extractor",
"libstagefright_id3",
],
-
- shared_libs: [
- "libutils",
- "libmediandk",
- "libbinder",
- ],
}
cc_fuzz {
name: "aac_extractor_fuzzer",
+ defaults: ["extractor-fuzzer-defaults"],
+ host_supported: true,
srcs: [
"aac_extractor_fuzzer.cpp",
@@ -323,23 +280,15 @@
],
static_libs: [
- "liblog",
- "libstagefright_foundation",
- "libmedia",
- "libextractorfuzzerbase",
"libaacextractor",
"libstagefright_metadatautils",
],
-
- shared_libs: [
- "libutils",
- "libmediandk",
- "libbinder",
- ],
}
cc_fuzz {
name: "flac_extractor_fuzzer",
+ defaults: ["extractor-fuzzer-defaults"],
+ host_supported: true,
srcs: [
"flac_extractor_fuzzer.cpp",
@@ -350,22 +299,38 @@
],
static_libs: [
- "liblog",
- "libstagefright_foundation",
- "libmedia",
- "libextractorfuzzerbase",
"libstagefright_metadatautils",
"libFLAC",
"libflacextractor",
],
shared_libs: [
- "libutils",
- "libmediandk",
- "libbinder",
"libbinder_ndk",
- "libbase",
],
dictionary: "flac_extractor_fuzzer.dict",
}
+
+cc_fuzz {
+ name: "midi_extractor_fuzzer",
+ defaults: ["extractor-fuzzer-defaults"],
+
+ srcs: [
+ "midi_extractor_fuzzer.cpp",
+ ],
+
+ include_dirs: [
+ "frameworks/av/media/extractors/midi",
+ ],
+
+ static_libs: [
+ "libsonivox",
+ "libmedia_midiiowrapper",
+ "libmidiextractor",
+ "libwatchdog",
+ ],
+
+ dictionary: "midi_extractor_fuzzer.dict",
+
+ host_supported: true,
+}
diff --git a/media/extractors/fuzzers/ExtractorFuzzerBase.cpp b/media/extractors/fuzzers/ExtractorFuzzerBase.cpp
index cbd6395..1be8466 100644
--- a/media/extractors/fuzzers/ExtractorFuzzerBase.cpp
+++ b/media/extractors/fuzzers/ExtractorFuzzerBase.cpp
@@ -34,7 +34,7 @@
return true;
}
-bool ExtractorFuzzerBase::getExtractorDef() {
+void ExtractorFuzzerBase::getExtractorDef() {
float confidence;
void* meta = nullptr;
FreeMetaFunc freeMeta = nullptr;
@@ -49,16 +49,15 @@
if (meta != nullptr && freeMeta != nullptr) {
freeMeta(meta);
}
-
- return true;
}
-bool ExtractorFuzzerBase::extractTracks() {
+void ExtractorFuzzerBase::extractTracks() {
MediaBufferGroup* bufferGroup = new MediaBufferGroup();
if (!bufferGroup) {
- return false;
+ return;
}
- for (size_t trackIndex = 0; trackIndex < mExtractor->countTracks(); ++trackIndex) {
+ size_t trackCount = mExtractor->countTracks();
+ for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
MediaTrackHelper* track = mExtractor->getTrack(trackIndex);
if (!track) {
continue;
@@ -67,7 +66,6 @@
delete track;
}
delete bufferGroup;
- return true;
}
void ExtractorFuzzerBase::extractTrack(MediaTrackHelper* track, MediaBufferGroup* bufferGroup) {
@@ -94,25 +92,110 @@
free(cTrack);
}
-bool ExtractorFuzzerBase::getTracksMetadata() {
+void ExtractorFuzzerBase::getTracksMetadata() {
AMediaFormat* format = AMediaFormat_new();
uint32_t flags = MediaExtractorPluginHelper::kIncludeExtensiveMetaData;
- for (size_t trackIndex = 0; trackIndex < mExtractor->countTracks(); ++trackIndex) {
+ size_t trackCount = mExtractor->countTracks();
+ for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
mExtractor->getTrackMetaData(format, trackIndex, flags);
}
AMediaFormat_delete(format);
- return true;
}
-bool ExtractorFuzzerBase::getMetadata() {
+void ExtractorFuzzerBase::getMetadata() {
AMediaFormat* format = AMediaFormat_new();
mExtractor->getMetaData(format);
AMediaFormat_delete(format);
- return true;
}
void ExtractorFuzzerBase::setDataSourceFlags(uint32_t flags) {
mBufferSource->setFlags(flags);
}
+
+void ExtractorFuzzerBase::seekAndExtractTracks() {
+ MediaBufferGroup* bufferGroup = new MediaBufferGroup();
+ if (!bufferGroup) {
+ return;
+ }
+ size_t trackCount = mExtractor->countTracks();
+ for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+ MediaTrackHelper* track = mExtractor->getTrack(trackIndex);
+ if (!track) {
+ continue;
+ }
+
+ AMediaFormat* trackMetaData = AMediaFormat_new();
+ int64_t trackDuration = 0;
+ uint32_t flags = MediaExtractorPluginHelper::kIncludeExtensiveMetaData;
+ mExtractor->getTrackMetaData(trackMetaData, trackIndex, flags);
+ AMediaFormat_getInt64(trackMetaData, AMEDIAFORMAT_KEY_DURATION, &trackDuration);
+
+ seekAndExtractTrack(track, bufferGroup, trackDuration);
+ AMediaFormat_delete(trackMetaData);
+ delete track;
+ }
+ delete bufferGroup;
+}
+
+void ExtractorFuzzerBase::seekAndExtractTrack(MediaTrackHelper* track,
+ MediaBufferGroup* bufferGroup,
+ int64_t trackDuration) {
+ CMediaTrack* cTrack = wrap(track);
+ if (!cTrack) {
+ return;
+ }
+
+ media_status_t status = cTrack->start(track, bufferGroup->wrap());
+ if (status != AMEDIA_OK) {
+ free(cTrack);
+ return;
+ }
+
+ int32_t seekCount = 0;
+ std::vector<int64_t> seekToTimeStamp;
+ while (seekCount <= kFuzzerMaxSeekPointsCount) {
+ /* This ensures kFuzzerMaxSeekPointsCount seek points are within the clipDuration and 1 seek
+ * point is outside of the clipDuration.
+ */
+ int64_t timeStamp = (seekCount * trackDuration) / (kFuzzerMaxSeekPointsCount - 1);
+ seekToTimeStamp.push_back(timeStamp);
+ seekCount++;
+ }
+
+ std::vector<uint32_t> seekOptions;
+ seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_CLOSEST);
+ seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_CLOSEST_SYNC);
+ seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC);
+ seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_NEXT_SYNC);
+ seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_FRAME_INDEX);
+
+ for (uint32_t seekOption : seekOptions) {
+ for (int64_t seekPts : seekToTimeStamp) {
+ MediaTrackHelper::ReadOptions* options =
+ new MediaTrackHelper::ReadOptions(seekOption, seekPts);
+ MediaBufferHelper* buffer = nullptr;
+ track->read(&buffer, options);
+ if (buffer) {
+ buffer->release();
+ }
+ delete options;
+ }
+ }
+
+ cTrack->stop(track);
+ free(cTrack);
+}
+
+void ExtractorFuzzerBase::processData(const uint8_t* data, size_t size) {
+ if (setDataSource(data, size)) {
+ if (createExtractor()) {
+ getExtractorDef();
+ getMetadata();
+ extractTracks();
+ getTracksMetadata();
+ seekAndExtractTracks();
+ }
+ }
+}
diff --git a/media/extractors/fuzzers/README.md b/media/extractors/fuzzers/README.md
index 4223b5e..fb1d52f 100644
--- a/media/extractors/fuzzers/README.md
+++ b/media/extractors/fuzzers/README.md
@@ -11,6 +11,7 @@
+ [libmp3extractor](#mp3ExtractorFuzzer)
+ [libaacextractor](#aacExtractorFuzzer)
+ [libflacextractor](#flacExtractor)
++ [libmidiextractor](#midiExtractorFuzzer)
# <a name="ExtractorFuzzerBase"></a> Fuzzer for libextractorfuzzerbase
All the extractors have a common API - creating a data source, extraction
@@ -321,6 +322,41 @@
$ adb shell /data/fuzz/arm64/flac_extractor_fuzzer/flac_extractor_fuzzer CORPUS_DIR
```
+# <a name="midiExtractorFuzzer"></a> Fuzzer for libmidiextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for MIDI extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the MIDI extractor class.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for MIDI to ensure that the required MIDI
+headers are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered as a range of MIDI headers will be
+present in the input data.
+
+
+## Build
+
+This describes steps to build midi_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) midi_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some MIDI files to that folder
+Push this directory to device.
+
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/midi_extractor_fuzzer/midi_extractor_fuzzer CORPUS_DIR
+```
+
## References:
* http://llvm.org/docs/LibFuzzer.html
* https://github.com/google/oss-fuzz
diff --git a/media/extractors/fuzzers/aac_extractor_fuzzer.cpp b/media/extractors/fuzzers/aac_extractor_fuzzer.cpp
index 93665f0..98a6cc9 100644
--- a/media/extractors/fuzzers/aac_extractor_fuzzer.cpp
+++ b/media/extractors/fuzzers/aac_extractor_fuzzer.cpp
@@ -46,17 +46,9 @@
return 0;
}
AacExtractor* extractor = new AacExtractor();
- if (!extractor) {
- return 0;
+ if (extractor) {
+ extractor->processData(data, size);
+ delete extractor;
}
- if (extractor->setDataSource(data, size)) {
- if (extractor->createExtractor()) {
- extractor->getExtractorDef();
- extractor->getMetadata();
- extractor->extractTracks();
- extractor->getTracksMetadata();
- }
- }
- delete extractor;
return 0;
}
diff --git a/media/extractors/fuzzers/amr_extractor_fuzzer.cpp b/media/extractors/fuzzers/amr_extractor_fuzzer.cpp
index b2f9261..6c9e1a5 100644
--- a/media/extractors/fuzzers/amr_extractor_fuzzer.cpp
+++ b/media/extractors/fuzzers/amr_extractor_fuzzer.cpp
@@ -46,17 +46,9 @@
return 0;
}
AmrExtractor* extractor = new AmrExtractor();
- if (!extractor) {
- return 0;
+ if (extractor) {
+ extractor->processData(data, size);
+ delete extractor;
}
- if (extractor->setDataSource(data, size)) {
- if (extractor->createExtractor()) {
- extractor->getExtractorDef();
- extractor->getMetadata();
- extractor->extractTracks();
- extractor->getTracksMetadata();
- }
- }
- delete extractor;
return 0;
}
diff --git a/media/extractors/fuzzers/flac_extractor_fuzzer.cpp b/media/extractors/fuzzers/flac_extractor_fuzzer.cpp
index 61e41cf..8734d45 100644
--- a/media/extractors/fuzzers/flac_extractor_fuzzer.cpp
+++ b/media/extractors/fuzzers/flac_extractor_fuzzer.cpp
@@ -46,17 +46,9 @@
return 0;
}
FlacExtractor* extractor = new FlacExtractor();
- if (!extractor) {
- return 0;
+ if (extractor) {
+ extractor->processData(data, size);
+ delete extractor;
}
- if (extractor->setDataSource(data, size)) {
- if (extractor->createExtractor()) {
- extractor->getExtractorDef();
- extractor->getMetadata();
- extractor->extractTracks();
- extractor->getTracksMetadata();
- }
- }
- delete extractor;
return 0;
}
diff --git a/media/extractors/fuzzers/include/ExtractorFuzzerBase.h b/media/extractors/fuzzers/include/ExtractorFuzzerBase.h
index abf362b..6a2a1c1 100644
--- a/media/extractors/fuzzers/include/ExtractorFuzzerBase.h
+++ b/media/extractors/fuzzers/include/ExtractorFuzzerBase.h
@@ -24,11 +24,14 @@
#include <media/DataSource.h>
#include <media/MediaExtractorPluginHelper.h>
#include <media/stagefright/MediaBufferGroup.h>
+#include <vector>
extern "C" {
android::ExtractorDef GETEXTRACTORDEF();
}
+constexpr int32_t kFuzzerMaxSeekPointsCount = 5;
+
namespace android {
class ExtractorFuzzerBase {
@@ -55,16 +58,20 @@
*/
bool setDataSource(const uint8_t* data, size_t size);
- bool getExtractorDef();
+ void getExtractorDef();
- bool extractTracks();
+ void extractTracks();
- bool getMetadata();
+ void getMetadata();
- bool getTracksMetadata();
+ void getTracksMetadata();
void setDataSourceFlags(uint32_t flags);
+ void seekAndExtractTracks();
+
+ void processData(const uint8_t* data, size_t size);
+
protected:
class BufferSource : public DataSource {
public:
@@ -123,6 +130,8 @@
MediaExtractorPluginHelper* mExtractor = nullptr;
virtual void extractTrack(MediaTrackHelper* track, MediaBufferGroup* bufferGroup);
+ virtual void seekAndExtractTrack(MediaTrackHelper* track, MediaBufferGroup* bufferGroup,
+ int64_t trackDuration);
};
} // namespace android
diff --git a/media/extractors/fuzzers/midi_extractor_fuzzer.cpp b/media/extractors/fuzzers/midi_extractor_fuzzer.cpp
new file mode 100644
index 0000000..e02a12b
--- /dev/null
+++ b/media/extractors/fuzzers/midi_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "MidiExtractor.h"
+
+using namespace android;
+
+class MIDIExtractor : public ExtractorFuzzerBase {
+ public:
+ MIDIExtractor() = default;
+ ~MIDIExtractor() = default;
+
+ bool createExtractor();
+};
+
+bool MIDIExtractor::createExtractor() {
+ mExtractor = new MidiExtractor(mDataSource->wrap());
+ if (!mExtractor) {
+ return false;
+ }
+ mExtractor->name();
+ return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ if ((!data) || (size == 0)) {
+ return 0;
+ }
+ MIDIExtractor* extractor = new MIDIExtractor();
+ if (extractor) {
+ extractor->processData(data, size);
+ delete extractor;
+ }
+ return 0;
+}
diff --git a/media/extractors/fuzzers/midi_extractor_fuzzer.dict b/media/extractors/fuzzers/midi_extractor_fuzzer.dict
new file mode 100644
index 0000000..5b6bb8b
--- /dev/null
+++ b/media/extractors/fuzzers/midi_extractor_fuzzer.dict
@@ -0,0 +1,3 @@
+# MIDI Chunks
+kw1="MThd"
+kw2="MTrk"
diff --git a/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp b/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp
index 14274b7..eceb93f 100644
--- a/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp
+++ b/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp
@@ -46,17 +46,9 @@
return 0;
}
MKVExtractor* extractor = new MKVExtractor();
- if (!extractor) {
- return 0;
+ if (extractor) {
+ extractor->processData(data, size);
+ delete extractor;
}
- if (extractor->setDataSource(data, size)) {
- if (extractor->createExtractor()) {
- extractor->getExtractorDef();
- extractor->getMetadata();
- extractor->extractTracks();
- extractor->getTracksMetadata();
- }
- }
- delete extractor;
return 0;
}
diff --git a/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp b/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp
index 71c154b..9a47c18 100644
--- a/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp
+++ b/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp
@@ -46,17 +46,9 @@
return 0;
}
Mp3Extractor* extractor = new Mp3Extractor();
- if (!extractor) {
- return 0;
+ if (extractor) {
+ extractor->processData(data, size);
+ delete extractor;
}
- if (extractor->setDataSource(data, size)) {
- if (extractor->createExtractor()) {
- extractor->getExtractorDef();
- extractor->getMetadata();
- extractor->extractTracks();
- extractor->getTracksMetadata();
- }
- }
- delete extractor;
return 0;
}
diff --git a/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp b/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp
index d2cc133..3903519 100644
--- a/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp
+++ b/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp
@@ -48,17 +48,9 @@
return 0;
}
MP4Extractor* extractor = new MP4Extractor();
- if (!extractor) {
- return 0;
+ if (extractor) {
+ extractor->processData(data, size);
+ delete extractor;
}
- if (extractor->setDataSource(data, size)) {
- if (extractor->createExtractor()) {
- extractor->getExtractorDef();
- extractor->getMetadata();
- extractor->extractTracks();
- extractor->getTracksMetadata();
- }
- }
- delete extractor;
return 0;
}
diff --git a/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp b/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp
index c34ffa0..240ef66 100644
--- a/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp
+++ b/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp
@@ -54,17 +54,9 @@
return 0;
}
MPEG2Extractor* extractor = new MPEG2Extractor();
- if (!extractor) {
- return 0;
+ if (extractor) {
+ extractor->processData(data, size);
+ delete extractor;
}
- if (extractor->setDataSource(data, size)) {
- if (extractor->createExtractor()) {
- extractor->getExtractorDef();
- extractor->extractTracks();
- extractor->extractTracks();
- extractor->getTracksMetadata();
- }
- }
- delete extractor;
return 0;
}
diff --git a/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp b/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp
index 033c50b..bd2fcc5 100644
--- a/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp
+++ b/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp
@@ -46,17 +46,9 @@
return 0;
}
OGGExtractor* extractor = new OGGExtractor();
- if (!extractor) {
- return 0;
+ if (extractor) {
+ extractor->processData(data, size);
+ delete extractor;
}
- if (extractor->setDataSource(data, size)) {
- if (extractor->createExtractor()) {
- extractor->getExtractorDef();
- extractor->getMetadata();
- extractor->extractTracks();
- extractor->getTracksMetadata();
- }
- }
- delete extractor;
return 0;
}
diff --git a/media/extractors/fuzzers/wav_extractor_fuzzer.cpp b/media/extractors/fuzzers/wav_extractor_fuzzer.cpp
index 1397122..cb11ebd 100644
--- a/media/extractors/fuzzers/wav_extractor_fuzzer.cpp
+++ b/media/extractors/fuzzers/wav_extractor_fuzzer.cpp
@@ -46,17 +46,9 @@
return 0;
}
wavExtractor* extractor = new wavExtractor();
- if (!extractor) {
- return 0;
+ if (extractor) {
+ extractor->processData(data, size);
+ delete extractor;
}
- if (extractor->setDataSource(data, size)) {
- if (extractor->createExtractor()) {
- extractor->getExtractorDef();
- extractor->getMetadata();
- extractor->extractTracks();
- extractor->getTracksMetadata();
- }
- }
- delete extractor;
return 0;
}
diff --git a/media/extractors/midi/Android.bp b/media/extractors/midi/Android.bp
index b8255fc..1c69bb8 100644
--- a/media/extractors/midi/Android.bp
+++ b/media/extractors/midi/Android.bp
@@ -5,7 +5,7 @@
srcs: ["MidiExtractor.cpp"],
header_libs: [
- "libmedia_headers",
+ "libmedia_datasource_headers",
],
static_libs: [
@@ -18,4 +18,12 @@
shared_libs: [
"libbase",
],
+
+ host_supported: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
}
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
index 7ad8cc1..330d4fe 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/extractors/mkv/Android.bp
@@ -21,4 +21,12 @@
"libutils",
],
+ host_supported: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+
}
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
index fd6a8c6..4fd3a56 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -840,7 +840,7 @@
}
if (err != OK) {
- mPendingFrames.clear();
+ clearPendingFrames();
mBlockIter.advance();
mbuf->release();
diff --git a/media/extractors/mp3/Android.bp b/media/extractors/mp3/Android.bp
index 102ac81..7d70548 100644
--- a/media/extractors/mp3/Android.bp
+++ b/media/extractors/mp3/Android.bp
@@ -13,4 +13,11 @@
"libstagefright_foundation",
],
+ host_supported: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
}
diff --git a/media/extractors/mp4/Android.bp b/media/extractors/mp4/Android.bp
index e48e1b7..afa055f 100644
--- a/media/extractors/mp4/Android.bp
+++ b/media/extractors/mp4/Android.bp
@@ -16,4 +16,12 @@
"libstagefright_id3",
"libutils",
],
+
+ host_supported: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
}
diff --git a/media/extractors/mp4/ItemTable.cpp b/media/extractors/mp4/ItemTable.cpp
index 0773387..444664c 100644
--- a/media/extractors/mp4/ItemTable.cpp
+++ b/media/extractors/mp4/ItemTable.cpp
@@ -76,16 +76,19 @@
size_t size;
sp<ABuffer> hvcc;
sp<ABuffer> icc;
+ sp<ABuffer> av1c;
Vector<uint32_t> thumbnails;
Vector<uint32_t> dimgRefs;
- Vector<uint32_t> cdscRefs;
+ Vector<uint32_t> exifRefs;
+ Vector<uint32_t> xmpRefs;
size_t nextTileIndex;
};
-struct ExifItem {
+struct ExternalMetaItem {
off64_t offset;
size_t size;
+ bool isExif;
};
/////////////////////////////////////////////////////////////////////
@@ -481,7 +484,7 @@
void apply(
KeyedVector<uint32_t, ImageItem> &itemIdToItemMap,
- KeyedVector<uint32_t, ExifItem> &itemIdToExifMap) const;
+ KeyedVector<uint32_t, ExternalMetaItem> &itemIdToMetaMap) const;
private:
uint32_t mItemId;
@@ -493,7 +496,7 @@
void ItemReference::apply(
KeyedVector<uint32_t, ImageItem> &itemIdToItemMap,
- KeyedVector<uint32_t, ExifItem> &itemIdToExifMap) const {
+ KeyedVector<uint32_t, ExternalMetaItem> &itemIdToMetaMap) const {
ALOGV("attach reference type 0x%x to item id %d)", type(), mItemId);
switch(type()) {
@@ -546,24 +549,24 @@
continue;
}
ALOGV("Image item id %d uses thumbnail item id %d", mRefs[i], mItemId);
- ImageItem &masterImage = itemIdToItemMap.editValueAt(itemIndex);
- if (!masterImage.thumbnails.empty()) {
+ ImageItem &imageItem = itemIdToItemMap.editValueAt(itemIndex);
+ if (!imageItem.thumbnails.empty()) {
ALOGW("already has thumbnails!");
}
- masterImage.thumbnails.push_back(mItemId);
+ imageItem.thumbnails.push_back(mItemId);
}
break;
}
case FOURCC("cdsc"): {
- ssize_t itemIndex = itemIdToExifMap.indexOfKey(mItemId);
+ ssize_t metaIndex = itemIdToMetaMap.indexOfKey(mItemId);
- // ignore non-exif block items
- if (itemIndex < 0) {
+ // ignore non-meta items
+ if (metaIndex < 0) {
return;
}
for (size_t i = 0; i < mRefs.size(); i++) {
- itemIndex = itemIdToItemMap.indexOfKey(mRefs[i]);
+ ssize_t itemIndex = itemIdToItemMap.indexOfKey(mRefs[i]);
// ignore non-image items
if (itemIndex < 0) {
@@ -571,7 +574,11 @@
}
ALOGV("Image item id %d uses metadata item id %d", mRefs[i], mItemId);
ImageItem &image = itemIdToItemMap.editValueAt(itemIndex);
- image.cdscRefs.push_back(mItemId);
+ if (itemIdToMetaMap[metaIndex].isExif) {
+ image.exifRefs.push_back(mItemId);
+ } else {
+ image.xmpRefs.push_back(mItemId);
+ }
}
break;
}
@@ -764,6 +771,39 @@
return OK;
}
+struct Av1cBox : public Box, public ItemProperty {
+ Av1cBox(DataSourceHelper *source) :
+ Box(source, FOURCC("av1C")) {}
+
+ status_t parse(off64_t offset, size_t size) override;
+
+ void attachTo(ImageItem &image) const override {
+ image.av1c = mAv1c;
+ }
+
+private:
+ sp<ABuffer> mAv1c;
+};
+
+status_t Av1cBox::parse(off64_t offset, size_t size) {
+ ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+ mAv1c = new ABuffer(size);
+
+ if (mAv1c->data() == NULL) {
+ ALOGE("b/28471206");
+ return NO_MEMORY;
+ }
+
+ if (source()->readAt(offset, mAv1c->data(), size) < (ssize_t)size) {
+ return ERROR_IO;
+ }
+
+ ALOGV("property av1C");
+
+ return OK;
+}
+
struct IrotBox : public Box, public ItemProperty {
IrotBox(DataSourceHelper *source) :
Box(source, FOURCC("irot")), mAngle(0) {}
@@ -929,7 +969,7 @@
status_t IpcoBox::parse(off64_t offset, size_t size) {
ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
- // push dummy as the index is 1-based
+ // push a placeholder as the index is 1-based
mItemProperties->push_back(new ItemProperty());
return parseChunks(offset, size);
}
@@ -957,6 +997,11 @@
itemProperty = new ColrBox(source());
break;
}
+ case FOURCC("av1C"):
+ {
+ itemProperty = new Av1cBox(source());
+ break;
+ }
default:
{
// push dummy to maintain correct item property index
@@ -1026,7 +1071,21 @@
struct ItemInfo {
uint32_t itemId;
uint32_t itemType;
+ String8 contentType;
bool hidden;
+
+ bool isXmp() const {
+ return itemType == FOURCC("mime") && contentType == String8("application/rdf+xml");
+ }
+ bool isExif() const {
+ return itemType == FOURCC("Exif");
+ }
+ bool isGrid() const {
+ return itemType == FOURCC("grid");
+ }
+ bool isSample() const {
+ return itemType == FOURCC("av01") || itemType == FOURCC("hvc1");
+ }
};
struct InfeBox : public FullBox {
@@ -1116,6 +1175,7 @@
if (!parseNullTerminatedString(&offset, &size, &content_type)) {
return ERROR_MALFORMED;
}
+ itemInfo->contentType = content_type;
// content_encoding is optional; can be omitted if would be empty
if (size > 0) {
@@ -1136,18 +1196,18 @@
struct IinfBox : public FullBox {
IinfBox(DataSourceHelper *source, Vector<ItemInfo> *itemInfos) :
- FullBox(source, FOURCC("iinf")), mItemInfos(itemInfos) {}
+ FullBox(source, FOURCC("iinf")), mItemInfos(itemInfos), mNeedIref(false) {}
status_t parse(off64_t offset, size_t size);
- bool hasFourCC(uint32_t type) { return mFourCCSeen.count(type) > 0; }
+ bool needIrefBox() { return mNeedIref; }
protected:
status_t onChunkData(uint32_t type, off64_t offset, size_t size) override;
private:
Vector<ItemInfo> *mItemInfos;
- std::unordered_set<uint32_t> mFourCCSeen;
+ bool mNeedIref;
};
status_t IinfBox::parse(off64_t offset, size_t size) {
@@ -1194,7 +1254,7 @@
status_t err = infeBox.parse(offset, size, &itemInfo);
if (err == OK) {
mItemInfos->push_back(itemInfo);
- mFourCCSeen.insert(itemInfo.itemType);
+ mNeedIref |= (itemInfo.isExif() || itemInfo.isXmp() || itemInfo.isGrid());
}
// InfeBox parse returns ERROR_UNSUPPORTED if the box if an unsupported
// version. Ignore this error as it's not fatal.
@@ -1203,8 +1263,9 @@
//////////////////////////////////////////////////////////////////
-ItemTable::ItemTable(DataSourceHelper *source)
+ItemTable::ItemTable(DataSourceHelper *source, bool isHeif)
: mDataSource(source),
+ mIsHeif(isHeif),
mPrimaryItemId(0),
mIdatOffset(0),
mIdatSize(0),
@@ -1283,7 +1344,7 @@
return err;
}
- if (iinfBox.hasFourCC(FOURCC("grid")) || iinfBox.hasFourCC(FOURCC("Exif"))) {
+ if (iinfBox.needIrefBox()) {
mRequiredBoxes.insert('iref');
}
@@ -1359,11 +1420,9 @@
// Only handle 3 types of items, all others are ignored:
// 'grid': derived image from tiles
- // 'hvc1': coded image (or tile)
- // 'Exif': EXIF metadata
- if (info.itemType != FOURCC("grid") &&
- info.itemType != FOURCC("hvc1") &&
- info.itemType != FOURCC("Exif")) {
+ // 'hvc1' or 'av01': coded image (or tile)
+ // 'Exif' or XMP: metadata
+ if (!info.isGrid() && !info.isSample() && !info.isExif() && !info.isXmp()) {
continue;
}
@@ -1386,15 +1445,18 @@
return ERROR_MALFORMED;
}
- if (info.itemType == FOURCC("Exif")) {
- // Only add if the Exif data is non-empty. The first 4 bytes contain
+ if (info.isExif() || info.isXmp()) {
+ // Only add if the meta is non-empty. For Exif, the first 4 bytes contain
// the offset to TIFF header, which the Exif parser doesn't use.
- if (size > 4) {
- ExifItem exifItem = {
+ ALOGV("adding meta to mItemIdToMetaMap: isExif %d, offset %lld, size %lld",
+ info.isExif(), (long long)offset, (long long)size);
+ if ((info.isExif() && size > 4) || (info.isXmp() && size > 0)) {
+ ExternalMetaItem metaItem = {
+ .isExif = info.isExif(),
.offset = offset,
.size = size,
};
- mItemIdToExifMap.add(info.itemId, exifItem);
+ mItemIdToMetaMap.add(info.itemId, metaItem);
}
continue;
}
@@ -1429,7 +1491,7 @@
}
for (size_t i = 0; i < mItemReferences.size(); i++) {
- mItemReferences[i]->apply(mItemIdToItemMap, mItemIdToExifMap);
+ mItemReferences[i]->apply(mItemIdToItemMap, mItemIdToMetaMap);
}
bool foundPrimary = false;
@@ -1509,7 +1571,9 @@
}
AMediaFormat *meta = AMediaFormat_new();
- AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+ AMediaFormat_setString(
+ meta, AMEDIAFORMAT_KEY_MIME,
+ mIsHeif ? MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC : MEDIA_MIMETYPE_IMAGE_AVIF);
if (image->itemId == mPrimaryItemId) {
AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_DEFAULT, 1);
@@ -1539,15 +1603,24 @@
ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(image->thumbnails[0]);
if (thumbItemIndex >= 0) {
const ImageItem &thumbnail = mItemIdToItemMap[thumbItemIndex];
-
- if (thumbnail.hvcc != NULL) {
+ if (thumbnail.hvcc != NULL || thumbnail.av1c != NULL) {
AMediaFormat_setInt32(meta,
AMEDIAFORMAT_KEY_THUMBNAIL_WIDTH, thumbnail.width);
AMediaFormat_setInt32(meta,
AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT, thumbnail.height);
- AMediaFormat_setBuffer(meta,
- AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC,
- thumbnail.hvcc->data(), thumbnail.hvcc->size());
+ if (thumbnail.hvcc != NULL) {
+ AMediaFormat_setBuffer(meta,
+ AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC,
+ thumbnail.hvcc->data(), thumbnail.hvcc->size());
+ } else {
+ // We use a hard-coded string here instead of
+ // AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C. The key is available only from SDK 31.
+ // The mp4 extractor is part of mainline and builds against SDK 29 as of
+ // writing. This hard-coded string can be replaced with the named constant once
+ // the mp4 extractor is built against SDK >= 31.
+ AMediaFormat_setBuffer(meta,
+ "thumbnail-csd-av1c", thumbnail.av1c->data(), thumbnail.av1c->size());
+ }
ALOGV("image[%u]: thumbnail: size %dx%d, item index %zd",
imageIndex, thumbnail.width, thumbnail.height, thumbItemIndex);
} else {
@@ -1574,12 +1647,21 @@
AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, image->width * image->height * 3 / 2);
}
- if (image->hvcc == NULL) {
- ALOGE("%s: hvcc is missing for image[%u]!", __FUNCTION__, imageIndex);
- return NULL;
+ if (mIsHeif) {
+ if (image->hvcc == NULL) {
+ ALOGE("%s: hvcc is missing for image[%u]!", __FUNCTION__, imageIndex);
+ return NULL;
+ }
+ AMediaFormat_setBuffer(meta,
+ AMEDIAFORMAT_KEY_CSD_HEVC, image->hvcc->data(), image->hvcc->size());
+ } else {
+ if (image->av1c == NULL) {
+ ALOGE("%s: av1c is missing for image[%u]!", __FUNCTION__, imageIndex);
+ return NULL;
+ }
+ AMediaFormat_setBuffer(meta,
+ AMEDIAFORMAT_KEY_CSD_0, image->av1c->data(), image->av1c->size());
}
- AMediaFormat_setBuffer(meta,
- AMEDIAFORMAT_KEY_CSD_HEVC, image->hvcc->data(), image->hvcc->size());
if (image->icc != NULL) {
AMediaFormat_setBuffer(meta,
@@ -1614,17 +1696,17 @@
return BAD_VALUE;
}
- uint32_t masterItemIndex = mDisplayables[imageIndex];
+ uint32_t imageItemIndex = mDisplayables[imageIndex];
- const ImageItem &masterImage = mItemIdToItemMap[masterItemIndex];
- if (masterImage.thumbnails.empty()) {
- *itemIndex = masterItemIndex;
+ const ImageItem &imageItem = mItemIdToItemMap[imageItemIndex];
+ if (imageItem.thumbnails.empty()) {
+ *itemIndex = imageItemIndex;
return OK;
}
- ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(masterImage.thumbnails[0]);
+ ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(imageItem.thumbnails[0]);
if (thumbItemIndex < 0) {
- // Do not return the master image in this case, fail it so that the
+ // Do not return the image item in this case, fail it so that the
// thumbnail extraction code knows we really don't have it.
return INVALID_OPERATION;
}
@@ -1686,11 +1768,11 @@
}
const ImageItem &image = mItemIdToItemMap[itemIndex];
- if (image.cdscRefs.size() == 0) {
+ if (image.exifRefs.size() == 0) {
return NAME_NOT_FOUND;
}
- ssize_t exifIndex = mItemIdToExifMap.indexOfKey(image.cdscRefs[0]);
+ ssize_t exifIndex = mItemIdToMetaMap.indexOfKey(image.exifRefs[0]);
if (exifIndex < 0) {
return NAME_NOT_FOUND;
}
@@ -1698,7 +1780,7 @@
// skip the first 4-byte of the offset to TIFF header
uint32_t tiffOffset;
if (!mDataSource->readAt(
- mItemIdToExifMap[exifIndex].offset, &tiffOffset, 4)) {
+ mItemIdToMetaMap[exifIndex].offset, &tiffOffset, 4)) {
return ERROR_IO;
}
@@ -1711,16 +1793,43 @@
// exif data. The size of the item should be > 4 for a non-empty exif (this
// was already checked when the item was added). Also check that the tiff
// header offset is valid.
- if (mItemIdToExifMap[exifIndex].size <= 4 ||
- tiffOffset > mItemIdToExifMap[exifIndex].size - 4) {
+ if (mItemIdToMetaMap[exifIndex].size <= 4 ||
+ tiffOffset > mItemIdToMetaMap[exifIndex].size - 4) {
return ERROR_MALFORMED;
}
// Offset of 'Exif\0\0' relative to the beginning of 'Exif' item
// (first 4-byte is the tiff header offset)
uint32_t exifOffset = 4 + tiffOffset - 6;
- *offset = mItemIdToExifMap[exifIndex].offset + exifOffset;
- *size = mItemIdToExifMap[exifIndex].size - exifOffset;
+ *offset = mItemIdToMetaMap[exifIndex].offset + exifOffset;
+ *size = mItemIdToMetaMap[exifIndex].size - exifOffset;
+ return OK;
+}
+
+status_t ItemTable::getXmpOffsetAndSize(off64_t *offset, size_t *size) {
+ if (!mImageItemsValid) {
+ return INVALID_OPERATION;
+ }
+
+ ssize_t itemIndex = mItemIdToItemMap.indexOfKey(mPrimaryItemId);
+
+ // this should not happen, something's seriously wrong.
+ if (itemIndex < 0) {
+ return INVALID_OPERATION;
+ }
+
+ const ImageItem &image = mItemIdToItemMap[itemIndex];
+ if (image.xmpRefs.size() == 0) {
+ return NAME_NOT_FOUND;
+ }
+
+ ssize_t xmpIndex = mItemIdToMetaMap.indexOfKey(image.xmpRefs[0]);
+ if (xmpIndex < 0) {
+ return NAME_NOT_FOUND;
+ }
+
+ *offset = mItemIdToMetaMap[xmpIndex].offset;
+ *size = mItemIdToMetaMap[xmpIndex].size;
return OK;
}
diff --git a/media/extractors/mp4/ItemTable.h b/media/extractors/mp4/ItemTable.h
index be81b59..62826b6 100644
--- a/media/extractors/mp4/ItemTable.h
+++ b/media/extractors/mp4/ItemTable.h
@@ -34,7 +34,7 @@
struct AssociationEntry;
struct ImageItem;
-struct ExifItem;
+struct ExternalMetaItem;
struct ItemLoc;
struct ItemInfo;
struct ItemProperty;
@@ -42,12 +42,12 @@
/*
* ItemTable keeps track of all image items (including coded images, grids and
- * tiles) inside a HEIF still image (ISO/IEC FDIS 23008-12.2:2017(E)).
+ * tiles) inside a HEIF/AVIF still image (ISO/IEC FDIS 23008-12.2:2017(E)).
*/
class ItemTable : public RefBase {
public:
- explicit ItemTable(DataSourceHelper *source);
+ ItemTable(DataSourceHelper *source, bool isHeif);
status_t parse(uint32_t type, off64_t offset, size_t size);
@@ -59,12 +59,15 @@
status_t getImageOffsetAndSize(
uint32_t *itemIndex, off64_t *offset, size_t *size);
status_t getExifOffsetAndSize(off64_t *offset, size_t *size);
+ status_t getXmpOffsetAndSize(off64_t *offset, size_t *size);
protected:
~ItemTable();
private:
DataSourceHelper *mDataSource;
+ // If this is true, then this item table is for a HEIF image. Otherwise it is for an AVIF image.
+ bool mIsHeif;
KeyedVector<uint32_t, ItemLoc> mItemLocs;
Vector<ItemInfo> mItemInfos;
@@ -82,7 +85,7 @@
bool mImageItemsValid;
uint32_t mCurrentItemIndex;
KeyedVector<uint32_t, ImageItem> mItemIdToItemMap;
- KeyedVector<uint32_t, ExifItem> mItemIdToExifMap;
+ KeyedVector<uint32_t, ExternalMetaItem> mItemIdToMetaMap;
Vector<uint32_t> mDisplayables;
status_t parseIlocBox(off64_t offset, size_t size);
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 73d1267..221bf4f 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -20,6 +20,7 @@
#include <ctype.h>
#include <inttypes.h>
#include <algorithm>
+#include <map>
#include <memory>
#include <stdint.h>
#include <stdlib.h>
@@ -148,7 +149,9 @@
uint8_t *mSrcBuffer;
bool mIsHeif;
+ bool mIsAvif;
bool mIsAudio;
+ bool mIsUsac = false;
sp<ItemTable> mItemTable;
/* Shift start offset (move to earlier time) when media_time > 0,
@@ -200,10 +203,11 @@
uint32_t duration;
int32_t compositionOffset;
uint8_t iv[16];
- Vector<size_t> clearsizes;
- Vector<size_t> encryptedsizes;
+ Vector<uint32_t> clearsizes;
+ Vector<uint32_t> encryptedsizes;
};
Vector<Sample> mCurrentSamples;
+ std::map<off64_t, uint32_t> mDrmOffsets;
MPEG4Source(const MPEG4Source &);
MPEG4Source &operator=(const MPEG4Source &);
@@ -411,6 +415,7 @@
mIsHeif(false),
mHasMoovBox(false),
mPreferHeif(mime != NULL && !strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_HEIF)),
+ mIsAvif(false),
mFirstTrack(NULL),
mLastTrack(NULL) {
ALOGV("mime=%s, mPreferHeif=%d", mime, mPreferHeif);
@@ -667,7 +672,7 @@
}
}
- if (mIsHeif && (mItemTable != NULL) && (mItemTable->countImages() > 0)) {
+ if ((mIsAvif || mIsHeif) && (mItemTable != NULL) && (mItemTable->countImages() > 0)) {
off64_t exifOffset;
size_t exifSize;
if (mItemTable->getExifOffsetAndSize(&exifOffset, &exifSize) == OK) {
@@ -676,6 +681,19 @@
AMediaFormat_setInt64(mFileMetaData,
AMEDIAFORMAT_KEY_EXIF_SIZE, (int64_t)exifSize);
}
+ off64_t xmpOffset;
+ size_t xmpSize;
+ if (mItemTable->getXmpOffsetAndSize(&xmpOffset, &xmpSize) == OK) {
+ // TODO(chz): b/175717339
+ // Use a hard-coded string here instead of named keys. The keys are available
+ // only on API 31+. The mp4 extractor is part of mainline and has min_sdk_version
+ // of 29. This hard-coded string can be replaced with the named constant once
+ // the mp4 extractor is built against API 31+.
+ AMediaFormat_setInt64(mFileMetaData,
+ "xmp-offset" /*AMEDIAFORMAT_KEY_XMP_OFFSET*/, (int64_t)xmpOffset);
+ AMediaFormat_setInt64(mFileMetaData,
+ "xmp-size" /*AMEDIAFORMAT_KEY_XMP_SIZE*/, (int64_t)xmpSize);
+ }
for (uint32_t imageIndex = 0;
imageIndex < mItemTable->countImages(); imageIndex++) {
AMediaFormat *meta = mItemTable->getImageMeta(imageIndex);
@@ -693,7 +711,7 @@
}
mInitCheck = OK;
- ALOGV("adding HEIF image track %u", imageIndex);
+ ALOGV("adding %s image track %u", mIsHeif ? "HEIF" : "AVIF", imageIndex);
Track *track = new Track;
if (mLastTrack != NULL) {
mLastTrack->next = track;
@@ -719,6 +737,10 @@
MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) != NULL) {
AMediaFormat_setString(mFileMetaData,
AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_CONTAINER_HEIF);
+ } else if (findTrackByMimePrefix(
+ MEDIA_MIMETYPE_IMAGE_AVIF) != NULL) {
+ AMediaFormat_setString(mFileMetaData,
+ AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_IMAGE_AVIF);
} else {
AMediaFormat_setString(mFileMetaData,
AMEDIAFORMAT_KEY_MIME, "application/octet-stream");
@@ -1148,7 +1170,7 @@
} else if (chunk_type == FOURCC("moov")) {
mInitCheck = OK;
- return UNKNOWN_ERROR; // Return a dummy error.
+ return UNKNOWN_ERROR; // Return a generic error.
}
break;
}
@@ -2573,9 +2595,9 @@
case FOURCC("iref"):
case FOURCC("ipro"):
{
- if (mIsHeif) {
+ if (mIsHeif || mIsAvif) {
if (mItemTable == NULL) {
- mItemTable = new ItemTable(mDataSource);
+ mItemTable = new ItemTable(mDataSource, mIsHeif);
}
status_t err = mItemTable->parse(
chunk_type, data_offset, chunk_data_size);
@@ -2877,6 +2899,21 @@
break;
}
+ case FOURCC("pasp"):
+ {
+ *offset += chunk_size;
+ // this must be in a VisualSampleEntry box under the Sample Description Box ('stsd')
+ // ignore otherwise
+ if (depth >= 2 && mPath[depth - 2] == FOURCC("stsd")) {
+ status_t err = parsePaspBox(data_offset, chunk_data_size);
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ break;
+ }
+
case FOURCC("titl"):
case FOURCC("perf"):
case FOURCC("auth"):
@@ -3001,14 +3038,20 @@
mIsHeif = true;
brandSet.erase(FOURCC("mif1"));
brandSet.erase(FOURCC("heic"));
+ } else if (brandSet.count(FOURCC("avif")) > 0 ||
+ brandSet.count(FOURCC("avis")) > 0) {
+ ALOGV("identified AVIF image");
+ mIsAvif = true;
+ brandSet.erase(FOURCC("avif"));
+ brandSet.erase(FOURCC("avis"));
}
if (!brandSet.empty()) {
// This means that the file should have moov box.
// It could be any iso files (mp4, heifs, etc.)
mHasMoovBox = true;
- if (mIsHeif) {
- ALOGV("identified HEIF image with other tracks");
+ if (mIsHeif || mIsAvif) {
+ ALOGV("identified %s image with other tracks", mIsHeif ? "HEIF" : "AVIF");
}
}
}
@@ -3818,43 +3861,44 @@
switch ((int32_t)mPath[4]) {
case FOURCC("\251alb"):
{
- metadataKey = "album";
+ metadataKey = AMEDIAFORMAT_KEY_ALBUM;
break;
}
case FOURCC("\251ART"):
{
- metadataKey = "artist";
+ metadataKey = AMEDIAFORMAT_KEY_ARTIST;
break;
}
case FOURCC("aART"):
{
- metadataKey = "albumartist";
+ metadataKey = AMEDIAFORMAT_KEY_ALBUMARTIST;
break;
}
case FOURCC("\251day"):
{
- metadataKey = "year";
+ metadataKey = AMEDIAFORMAT_KEY_YEAR;
break;
}
case FOURCC("\251nam"):
{
- metadataKey = "title";
+ metadataKey = AMEDIAFORMAT_KEY_TITLE;
break;
}
case FOURCC("\251wrt"):
{
- metadataKey = "writer";
+ // various open source taggers agree that the "©wrt" tag is for composer, not writer
+ metadataKey = AMEDIAFORMAT_KEY_COMPOSER;
break;
}
case FOURCC("covr"):
{
- metadataKey = "albumart";
+ metadataKey = AMEDIAFORMAT_KEY_ALBUMART;
break;
}
case FOURCC("gnre"):
case FOURCC("\251gen"):
{
- metadataKey = "genre";
+ metadataKey = AMEDIAFORMAT_KEY_GENRE;
break;
}
case FOURCC("cpil"):
@@ -3959,7 +4003,7 @@
if (!strcmp(metadataKey, "albumart")) {
AMediaFormat_setBuffer(mFileMetaData, metadataKey,
buffer + 8, size - 8);
- } else if (!strcmp(metadataKey, "genre")) {
+ } else if (!strcmp(metadataKey, AMEDIAFORMAT_KEY_GENRE)) {
if (flags == 0) {
// uint8_t genre code, iTunes genre codes are
// the standard id3 codes, except they start
@@ -4048,6 +4092,26 @@
return OK;
}
+status_t MPEG4Extractor::parsePaspBox(off64_t offset, size_t size) {
+ if (size < 8 || size == SIZE_MAX || mLastTrack == NULL) {
+ return ERROR_MALFORMED;
+ }
+
+ uint32_t data[2]; // hSpacing, vSpacing
+ if (mDataSource->readAt(offset, data, 8) < 8) {
+ return ERROR_IO;
+ }
+ uint32_t hSpacing = ntohl(data[0]);
+ uint32_t vSpacing = ntohl(data[1]);
+
+ if (hSpacing != 0 && vSpacing != 0) {
+ AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAR_WIDTH, hSpacing);
+ AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAR_HEIGHT, vSpacing);
+ }
+
+ return OK;
+}
+
status_t MPEG4Extractor::parse3GPPMetaData(off64_t offset, size_t size, int depth) {
if (size < 4 || size == SIZE_MAX) {
return ERROR_MALFORMED;
@@ -4325,7 +4389,8 @@
if (size != 24 || ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1))) {
return NULL;
}
- } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)
+ || !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
void *data;
size_t size;
if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
@@ -4334,9 +4399,12 @@
const uint8_t *ptr = (const uint8_t *)data;
- if (size < 5 || ptr[0] != 0x81) { // configurationVersion == 1
+ if (size < 4 || ptr[0] != 0x81) { // configurationVersion == 1
return NULL;
}
+ if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
+ itemTable = mItemTable;
+ }
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_VP9)) {
void *data;
size_t size;
@@ -4474,7 +4542,7 @@
//AOT_SLS = 38, /**< SLS */
//AOT_ER_AAC_ELD = 39, /**< AAC Enhanced Low Delay */
- //AOT_USAC = 42, /**< USAC */
+ AOT_USAC = 42, /**< USAC */
//AOT_SAOC = 43, /**< SAOC */
//AOT_LD_MPEGS = 44, /**< Low Delay MPEG Surround */
@@ -4622,7 +4690,7 @@
ABitReader br(csd, csd_size);
uint32_t objectType = br.getBits(5);
- if (objectType == 31) { // AAC-ELD => additional 6 bits
+ if (objectType == AOT_ESCAPE) { // AAC-ELD => additional 6 bits
objectType = 32 + br.getBits(6);
}
@@ -4898,7 +4966,6 @@
mStarted(false),
mBuffer(NULL),
mSrcBuffer(NULL),
- mIsHeif(itemTable != NULL),
mItemTable(itemTable),
mElstShiftStartTicks(elstShiftStartTicks),
mElstInitialEmptyEditTicks(elstInitialEmptyEditTicks) {
@@ -4933,6 +5000,8 @@
!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
mIsAC4 = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC4);
mIsDolbyVision = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
+ mIsHeif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) && mItemTable != NULL;
+ mIsAvif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF) && mItemTable != NULL;
if (mIsAVC) {
void *data;
@@ -4998,6 +5067,12 @@
mIsPcm = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW);
mIsAudio = !strncasecmp(mime, "audio/", 6);
+ int32_t aacObjectType = -1;
+
+ if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_AAC_PROFILE, &aacObjectType)) {
+ mIsUsac = (aacObjectType == AOT_USAC);
+ }
+
if (mIsPcm) {
int32_t numChannels = 0;
int32_t bitsPerSample = 0;
@@ -5172,6 +5247,9 @@
if (chunk_type == FOURCC("moof")) {
mNextMoofOffset = *offset;
break;
+ } else if (chunk_type == FOURCC("mdat")) {
+ parseChunk(offset);
+ continue;
} else if (chunk_size == 0) {
break;
}
@@ -5233,6 +5311,22 @@
// parse DRM info if present
ALOGV("MPEG4Source::parseChunk mdat");
// if saiz/saoi was previously observed, do something with the sampleinfos
+ status_t err = OK;
+ auto kv = mDrmOffsets.lower_bound(*offset);
+ if (kv != mDrmOffsets.end()) {
+ auto drmoffset = kv->first;
+ auto flags = kv->second;
+ mDrmOffsets.erase(kv);
+ ALOGV("mdat chunk_size %" PRIu64 " drmoffset %" PRId64 " offset %" PRId64,
+ chunk_size, drmoffset, *offset);
+ if (chunk_size >= drmoffset - *offset) {
+ err = parseClearEncryptedSizes(drmoffset, false, flags,
+ chunk_size - (drmoffset - *offset));
+ }
+ }
+ if (err != OK) {
+ return err;
+ }
*offset += chunk_size;
break;
}
@@ -5414,8 +5508,10 @@
off64_t drmoffset = mCurrentSampleInfoOffsets[0]; // from moof
drmoffset += mCurrentMoofOffset;
+ mDrmOffsets[drmoffset] = flags;
+ ALOGV("saio drmoffset %" PRId64 " flags %u", drmoffset, flags);
- return parseClearEncryptedSizes(drmoffset, false, 0, mCurrentMoofSize);
+ return OK;
}
status_t MPEG4Source::parseClearEncryptedSizes(
@@ -5754,7 +5850,7 @@
return -EINVAL;
}
- // apply some sanity (vs strict legality) checks
+ // apply some quick (vs strict legality) checks
//
static constexpr uint32_t kMaxTrunSampleCount = 10000;
if (sampleCount > kMaxTrunSampleCount) {
@@ -5900,7 +5996,7 @@
if (options && options->getSeekTo(&seekTimeUs, &mode)) {
ALOGV("seekTimeUs:%" PRId64, seekTimeUs);
- if (mIsHeif) {
+ if (mIsHeif || mIsAvif) {
CHECK(mSampleTable == NULL);
CHECK(mItemTable != NULL);
int32_t imageIndex;
@@ -5970,10 +6066,10 @@
}
uint32_t syncSampleIndex = sampleIndex;
- // assume every audio sample is a sync sample. This works around
+ // assume every non-USAC audio sample is a sync sample. This works around
// seek issues with files that were incorrectly written with an
// empty or single-sample stss block for the audio track
- if (err == OK && !mIsAudio) {
+ if (err == OK && (!mIsAudio || mIsUsac)) {
err = mSampleTable->findSyncSampleNear(
sampleIndex, &syncSampleIndex, findFlags);
}
@@ -6045,7 +6141,7 @@
newBuffer = true;
status_t err;
- if (!mIsHeif) {
+ if (!mIsHeif && !mIsAvif) {
err = mSampleTable->getMetaDataForSample(mCurrentSampleIndex, &offset, &size,
(uint64_t*)&cts, &isSyncSample, &stts);
if(err == OK) {
@@ -6473,9 +6569,9 @@
if (smpl->encryptedsizes.size()) {
// store clear/encrypted lengths in metadata
AMediaFormat_setBuffer(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES,
- smpl->clearsizes.array(), smpl->clearsizes.size() * 4);
+ smpl->clearsizes.array(), smpl->clearsizes.size() * sizeof(uint32_t));
AMediaFormat_setBuffer(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_SIZES,
- smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * 4);
+ smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * sizeof(uint32_t));
AMediaFormat_setInt32(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE, mDefaultIVSize);
AMediaFormat_setInt32(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_MODE, mCryptoMode);
AMediaFormat_setBuffer(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_KEY, mCryptoKey, 16);
@@ -6684,7 +6780,8 @@
|| !memcmp(header, "ftypM4A ", 8) || !memcmp(header, "ftypf4v ", 8)
|| !memcmp(header, "ftypkddi", 8) || !memcmp(header, "ftypM4VP", 8)
|| !memcmp(header, "ftypmif1", 8) || !memcmp(header, "ftypheic", 8)
- || !memcmp(header, "ftypmsf1", 8) || !memcmp(header, "ftyphevc", 8)) {
+ || !memcmp(header, "ftypmsf1", 8) || !memcmp(header, "ftyphevc", 8)
+ || !memcmp(header, "ftypavif", 8) || !memcmp(header, "ftypavis", 8)) {
*confidence = 0.4;
return true;
@@ -6720,6 +6817,8 @@
FOURCC("heic"), // HEIF image
FOURCC("msf1"), // HEIF image sequence
FOURCC("hevc"), // HEIF image sequence
+ FOURCC("avif"), // AVIF image
+ FOURCC("avis"), // AVIF image sequence
};
for (size_t i = 0;
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 1e49d50..542a3e6 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -144,6 +144,7 @@
bool mIsHeif;
bool mHasMoovBox;
bool mPreferHeif;
+ bool mIsAvif;
Track *mFirstTrack, *mLastTrack;
@@ -160,6 +161,7 @@
status_t parseChunk(off64_t *offset, int depth);
status_t parseITunesMetaData(off64_t offset, size_t size);
status_t parseColorInfo(off64_t offset, size_t size);
+ status_t parsePaspBox(off64_t offset, size_t size);
status_t parse3GPPMetaData(off64_t offset, size_t size, int depth);
void parseID3v2MetaData(off64_t offset, uint64_t size);
status_t parseQTMetaKey(off64_t data_offset, size_t data_size);
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index bc8632c..4c25314 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -1,6 +1,16 @@
cc_library {
name: "libmpeg2extractor",
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ android: {
+ shared_libs: ["libvndksupport#29"],
+ },
+ },
+
defaults: ["extractor-defaults"],
srcs: [
@@ -12,14 +22,13 @@
shared_libs: [
"libbase",
"libcgrouprc#29",
- "libvndksupport#29",
],
header_libs: [
"libaudioclient_headers",
"libbase_headers",
"libstagefright_headers",
- "libmedia_headers",
+ "libmedia_datasource_headers",
],
static_libs: [
@@ -37,7 +46,7 @@
"libstagefright_esds",
"libstagefright_foundation_without_imemory",
"libstagefright_mpeg2extractor",
- "libstagefright_mpeg2support",
+ "libstagefright_mpeg2support_nocrypto",
"libutils",
],
diff --git a/media/extractors/ogg/Android.bp b/media/extractors/ogg/Android.bp
index 7aed683..579065e 100644
--- a/media/extractors/ogg/Android.bp
+++ b/media/extractors/ogg/Android.bp
@@ -20,4 +20,11 @@
"libvorbisidec",
],
+ host_supported: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
}
diff --git a/media/extractors/ogg/OggExtractor.cpp b/media/extractors/ogg/OggExtractor.cpp
index 828bcd6..62f0808 100644
--- a/media/extractors/ogg/OggExtractor.cpp
+++ b/media/extractors/ogg/OggExtractor.cpp
@@ -43,6 +43,9 @@
long vorbis_packet_blocksize(vorbis_info *vi,ogg_packet *op);
}
+static constexpr int OGG_PAGE_FLAG_CONTINUED_PACKET = 1;
+static constexpr int OGG_PAGE_FLAG_END_OF_STREAM = 4;
+
namespace android {
struct OggSource : public MediaTrackHelper {
@@ -297,7 +300,8 @@
AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, 1);
*out = packet;
- ALOGV("returning buffer %p", packet);
+ ALOGV("returning buffer %p, size %zu, length %zu",
+ packet, packet->size(), packet->range_length());
return AMEDIA_OK;
}
@@ -358,10 +362,10 @@
if (!memcmp(signature, "OggS", 4)) {
if (*pageOffset > startOffset) {
- ALOGV("skipped %lld bytes of junk to reach next frame",
- (long long)(*pageOffset - startOffset));
+ ALOGV("skipped %lld bytes of junk at %lld to reach next frame",
+ (long long)(*pageOffset - startOffset), (long long)(startOffset));
}
-
+ ALOGV("found frame at %lld", (long long)(*pageOffset));
return OK;
}
@@ -629,7 +633,8 @@
// Calculate timestamps by accumulating durations starting from the first sample of a page;
// We assume that we only seek to page boundaries.
AMediaFormat *meta = (*out)->meta_data();
- if (AMediaFormat_getInt32(meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, ¤tPageSamples)) {
+ if (AMediaFormat_getInt32(meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, ¤tPageSamples) &&
+ (mCurrentPage.mFlags & OGG_PAGE_FLAG_END_OF_STREAM)) {
// first packet in page
if (mOffset == mFirstDataOffset) {
currentPageSamples -= mStartGranulePosition;
@@ -812,6 +817,7 @@
}
buffer = tmp;
+ ALOGV("reading %zu bytes @ %zu", packetSize, size_t(dataOffset));
ssize_t n = mSource->readAt(
dataOffset,
(uint8_t *)buffer->data() + buffer->range_length(),
@@ -830,8 +836,9 @@
if (gotFullPacket) {
// We've just read the entire packet.
+ ALOGV("got full packet, size %zu", fullSize);
- if (mFirstPacketInPage) {
+ if (mFirstPacketInPage && (mCurrentPage.mFlags & OGG_PAGE_FLAG_END_OF_STREAM)) {
AMediaFormat *meta = buffer->meta_data();
AMediaFormat_setInt32(
meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, mCurrentPageSamples);
@@ -864,6 +871,9 @@
}
// fall through, the buffer now contains the start of the packet.
+ ALOGV("have start of packet, getting rest");
+ } else {
+ ALOGV("moving to next page");
}
CHECK_EQ(mNextLaceIndex, mCurrentPage.mNumSegments);
@@ -899,9 +909,10 @@
mNextLaceIndex = 0;
if (buffer != NULL) {
- if ((mCurrentPage.mFlags & 1) == 0) {
+ if ((mCurrentPage.mFlags & OGG_PAGE_FLAG_CONTINUED_PACKET) == 0) {
// This page does not continue the packet, i.e. the packet
// is already complete.
+ ALOGV("packet was already complete?!");
if (timeUs >= 0) {
AMediaFormat *meta = buffer->meta_data();
@@ -909,8 +920,10 @@
}
AMediaFormat *meta = buffer->meta_data();
- AMediaFormat_setInt32(
- meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, mCurrentPageSamples);
+ if (mCurrentPage.mFlags & OGG_PAGE_FLAG_END_OF_STREAM) {
+ AMediaFormat_setInt32(
+ meta, AMEDIAFORMAT_KEY_VALID_SAMPLES, mCurrentPageSamples);
+ }
mFirstPacketInPage = false;
*out = buffer;
@@ -929,6 +942,7 @@
for (size_t i = 0; i < mNumHeaders; ++i) {
// ignore timestamp for configuration packets
if ((err = _readNextPacket(&packet, /* calcVorbisTimestamp = */ false)) != AMEDIA_OK) {
+ ALOGV("readNextPacket failed");
return err;
}
ALOGV("read packet of size %zu\n", packet->range_length());
@@ -1008,6 +1022,10 @@
size_t size = buffer->range_length();
+ if (size == 0) {
+ return 0;
+ }
+
ogg_buffer buf;
buf.data = (uint8_t *)data;
buf.size = size;
@@ -1304,8 +1322,8 @@
|| audioChannelCount <= 0 || audioChannelCount > FCC_8) {
ALOGE("Invalid haptic channel count found in metadata: %d", mHapticChannelCount);
} else {
- const audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(
- audioChannelCount) | hapticChannelMask;
+ const audio_channel_mask_t channelMask = static_cast<audio_channel_mask_t>(
+ audio_channel_out_mask_from_count(audioChannelCount) | hapticChannelMask);
AMediaFormat_setInt32(mMeta, AMEDIAFORMAT_KEY_CHANNEL_MASK, channelMask);
AMediaFormat_setInt32(
mMeta, AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT, mHapticChannelCount);
diff --git a/media/extractors/tests/AndroidTest.xml b/media/extractors/tests/AndroidTest.xml
index 6bb2c8a..fc8152c 100644
--- a/media/extractors/tests/AndroidTest.xml
+++ b/media/extractors/tests/AndroidTest.xml
@@ -19,7 +19,7 @@
<option name="cleanup" value="true" />
<option name="push" value="ExtractorUnitTest->/data/local/tmp/ExtractorUnitTest" />
<option name="push-file"
- key="https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor.zip?unzip=true"
+ key="https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.4.zip?unzip=true"
value="/data/local/tmp/ExtractorUnitTestRes/" />
</target_preparer>
diff --git a/media/extractors/tests/ExtractorUnitTest.cpp b/media/extractors/tests/ExtractorUnitTest.cpp
index 3075571..d91fffa 100644
--- a/media/extractors/tests/ExtractorUnitTest.cpp
+++ b/media/extractors/tests/ExtractorUnitTest.cpp
@@ -50,8 +50,30 @@
constexpr int32_t kRandomSeed = 700;
constexpr int32_t kUndefined = -1;
+enum inputID {
+ // audio streams
+ AAC_1,
+ AMR_NB_1,
+ AMR_WB_1,
+ FLAC_1,
+ GSM_1,
+ MIDI_1,
+ MP3_1,
+ OPUS_1,
+ VORBIS_1,
+ // video streams
+ HEVC_1,
+ HEVC_2,
+ MPEG2_PS_1,
+ MPEG2_TS_1,
+ MPEG4_1,
+ VP9_1,
+ UNKNOWN_ID,
+};
+
// LookUpTable of clips and metadata for component testing
static const struct InputData {
+ inputID inpId;
string mime;
string inputFile;
int32_t firstParam;
@@ -59,26 +81,35 @@
int32_t profile;
int32_t frameRate;
} kInputData[] = {
- {MEDIA_MIMETYPE_AUDIO_AAC, "test_mono_44100Hz_aac.aac", 44100, 1, AACObjectLC, kUndefined},
- {MEDIA_MIMETYPE_AUDIO_AMR_NB, "bbb_mono_8kHz_amrnb.amr", 8000, 1, kUndefined, kUndefined},
- {MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_mono_16kHz_amrwb.amr", 16000, 1, kUndefined, kUndefined},
- {MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_stereo_48kHz_vorbis.ogg", 48000, 2, kUndefined,
+ {AAC_1, MEDIA_MIMETYPE_AUDIO_AAC, "test_mono_44100Hz_aac.aac", 44100, 1, AACObjectLC,
kUndefined},
- {MEDIA_MIMETYPE_AUDIO_MSGSM, "test_mono_8kHz_gsm.wav", 8000, 1, kUndefined, kUndefined},
- {MEDIA_MIMETYPE_AUDIO_RAW, "bbb_stereo_48kHz_flac.flac", 48000, 2, kUndefined, kUndefined},
- {MEDIA_MIMETYPE_AUDIO_OPUS, "test_stereo_48kHz_opus.opus", 48000, 2, kUndefined,
+ {AMR_NB_1, MEDIA_MIMETYPE_AUDIO_AMR_NB, "bbb_mono_8kHz_amrnb.amr", 8000, 1, kUndefined,
kUndefined},
- {MEDIA_MIMETYPE_AUDIO_MPEG, "bbb_stereo_48kHz_mp3.mp3", 48000, 2, kUndefined, kUndefined},
- {MEDIA_MIMETYPE_AUDIO_RAW, "midi_a.mid", 22050, 2, kUndefined, kUndefined},
- {MEDIA_MIMETYPE_VIDEO_MPEG2, "bbb_cif_768kbps_30fps_mpeg2.ts", 352, 288, MPEG2ProfileMain,
- 30},
- {MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_cif_768kbps_30fps_mpeg4.mkv", 352, 288,
- MPEG4ProfileSimple, 30},
+ {AMR_WB_1, MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_mono_16kHz_amrwb.amr", 16000, 1, kUndefined,
+ kUndefined},
+ {FLAC_1, MEDIA_MIMETYPE_AUDIO_RAW, "bbb_stereo_48kHz_flac.flac", 48000, 2, kUndefined,
+ kUndefined},
+ {GSM_1, MEDIA_MIMETYPE_AUDIO_MSGSM, "test_mono_8kHz_gsm.wav", 8000, 1, kUndefined,
+ kUndefined},
+ {MIDI_1, MEDIA_MIMETYPE_AUDIO_RAW, "midi_a.mid", 22050, 2, kUndefined, kUndefined},
+ {MP3_1, MEDIA_MIMETYPE_AUDIO_MPEG, "bbb_stereo_48kHz_mp3.mp3", 48000, 2, kUndefined,
+ kUndefined},
+ {OPUS_1, MEDIA_MIMETYPE_AUDIO_OPUS, "test_stereo_48kHz_opus.opus", 48000, 2, kUndefined,
+ kUndefined},
+ {VORBIS_1, MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_stereo_48kHz_vorbis.ogg", 48000, 2, kUndefined,
+ kUndefined},
+
// Test (b/151677264) for MP4 extractor
- {MEDIA_MIMETYPE_VIDEO_HEVC, "crowd_508x240_25fps_hevc.mp4", 508, 240, HEVCProfileMain,
- 25},
- {MEDIA_MIMETYPE_VIDEO_VP9, "bbb_340x280_30fps_vp9.webm", 340, 280, VP9Profile0, 30},
- {MEDIA_MIMETYPE_VIDEO_MPEG2, "swirl_144x136_mpeg2.mpg", 144, 136, MPEG2ProfileMain, 12},
+ {HEVC_1, MEDIA_MIMETYPE_VIDEO_HEVC, "crowd_508x240_25fps_hevc.mp4", 508, 240,
+ HEVCProfileMain, 25},
+ {HEVC_2, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, "test3.heic", 820, 460, kUndefined, kUndefined},
+ {MPEG2_PS_1, MEDIA_MIMETYPE_VIDEO_MPEG2, "swirl_144x136_mpeg2.mpg", 144, 136,
+ MPEG2ProfileMain, 12},
+ {MPEG2_TS_1, MEDIA_MIMETYPE_VIDEO_MPEG2, "bbb_cif_768kbps_30fps_mpeg2.ts", 352, 288,
+ MPEG2ProfileMain, 30},
+ {MPEG4_1, MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_cif_768kbps_30fps_mpeg4.mkv", 352, 288,
+ MPEG4ProfileSimple, 30},
+ {VP9_1, MEDIA_MIMETYPE_VIDEO_VP9, "bbb_340x280_30fps_vp9.webm", 340, 280, VP9Profile0, 30},
};
static ExtractorUnitTestEnvironment *gEnv = nullptr;
@@ -107,10 +138,23 @@
mDisableTest = false;
static const std::map<std::string, standardExtractors> mapExtractor = {
- {"aac", AAC}, {"amr", AMR}, {"mp3", MP3}, {"ogg", OGG},
- {"wav", WAV}, {"mkv", MKV}, {"flac", FLAC}, {"midi", MIDI},
- {"mpeg4", MPEG4}, {"mpeg2ts", MPEG2TS}, {"mpeg2ps", MPEG2PS}, {"mp4", MPEG4},
- {"webm", MKV}, {"ts", MPEG2TS}, {"mpeg", MPEG2PS}};
+ {"aac", AAC},
+ {"amr", AMR},
+ {"flac", FLAC},
+ {"mid", MIDI},
+ {"midi", MIDI},
+ {"mkv", MKV},
+ {"mp3", MP3},
+ {"mp4", MPEG4},
+ {"mpeg2ps", MPEG2PS},
+ {"mpeg2ts", MPEG2TS},
+ {"mpeg4", MPEG4},
+ {"mpg", MPEG2PS},
+ {"ogg", OGG},
+ {"opus", OGG},
+ {"ts", MPEG2TS},
+ {"wav", WAV},
+ {"webm", MKV}};
// Find the component type
if (mapExtractor.find(writerFormat) != mapExtractor.end()) {
mExtractorName = mapExtractor.at(writerFormat);
@@ -148,14 +192,23 @@
MediaExtractorPluginHelper *mExtractor;
};
-class ExtractorFunctionalityTest : public ExtractorUnitTest,
- public ::testing::TestWithParam<pair<string, string>> {
+class ExtractorFunctionalityTest
+ : public ExtractorUnitTest,
+ public ::testing::TestWithParam<tuple<string /* container */, string /* InputFile */,
+ int32_t /* numTracks */, bool /* seekSupported */>> {
public:
- virtual void SetUp() override { setupExtractor(GetParam().first); }
+ virtual void SetUp() override {
+ tuple<string, string, int32_t, bool> params = GetParam();
+ mContainer = get<0>(params);
+ mNumTracks = get<2>(params);
+ setupExtractor(mContainer);
+ }
+ string mContainer;
+ int32_t mNumTracks;
};
class ConfigParamTest : public ExtractorUnitTest,
- public ::testing::TestWithParam<pair<string, int32_t>> {
+ public ::testing::TestWithParam<pair<string, inputID>> {
public:
virtual void SetUp() override { setupExtractor(GetParam().first); }
@@ -169,7 +222,7 @@
int32_t frameRate;
};
- void getFileProperties(int32_t inputIdx, string &inputFile, configFormat &configParam);
+ void getFileProperties(inputID inputId, string &inputFile, configFormat &configParam);
};
int32_t ExtractorUnitTest::setDataSource(string inputFileName) {
@@ -228,9 +281,16 @@
return 0;
}
-void ConfigParamTest::getFileProperties(int32_t inputIdx, string &inputFile,
+void ConfigParamTest::getFileProperties(inputID inputId, string &inputFile,
configFormat &configParam) {
- if (inputIdx >= sizeof(kInputData) / sizeof(kInputData[0])) {
+ int32_t inputDataSize = sizeof(kInputData) / sizeof(kInputData[0]);
+ int32_t inputIdx = 0;
+ for (; inputIdx < inputDataSize; inputIdx++) {
+ if (inputId == kInputData[inputIdx].inpId) {
+ break;
+ }
+ }
+ if (inputIdx == inputDataSize) {
return;
}
inputFile += kInputData[inputIdx].inputFile;
@@ -316,16 +376,17 @@
if (mDisableTest) return;
ALOGV("Checks if a valid extractor is created for a given input file");
- string inputFileName = gEnv->getRes() + GetParam().second;
+ string inputFileName = gEnv->getRes() + get<1>(GetParam());
- ASSERT_EQ(setDataSource(inputFileName), 0)
- << "SetDataSource failed for" << GetParam().first << "extractor";
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
- ASSERT_EQ(createExtractor(), 0)
- << "Extractor creation failed for" << GetParam().first << "extractor";
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
- // A valid extractor instace should return success for following calls
- ASSERT_GT(mExtractor->countTracks(), 0);
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_EQ(numTracks, mNumTracks)
+ << "Extractor reported wrong number of track for the given clip";
AMediaFormat *format = AMediaFormat_new();
ASSERT_NE(format, nullptr) << "AMediaFormat_new returned null AMediaformat";
@@ -337,17 +398,18 @@
TEST_P(ExtractorFunctionalityTest, ExtractorTest) {
if (mDisableTest) return;
- ALOGV("Validates %s Extractor for a given input file", GetParam().first.c_str());
- string inputFileName = gEnv->getRes() + GetParam().second;
+ ALOGV("Validates %s Extractor for a given input file", mContainer.c_str());
+ string inputFileName = gEnv->getRes() + get<1>(GetParam());
int32_t status = setDataSource(inputFileName);
- ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
status = createExtractor();
- ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
int32_t numTracks = mExtractor->countTracks();
- ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+ ASSERT_EQ(numTracks, mNumTracks)
+ << "Extractor reported wrong number of track for the given clip";
for (int32_t idx = 0; idx < numTracks; idx++) {
MediaTrackHelper *track = mExtractor->getTrack(idx);
@@ -388,16 +450,17 @@
if (mDisableTest) return;
ALOGV("Validates Extractor's meta data for a given input file");
- string inputFileName = gEnv->getRes() + GetParam().second;
+ string inputFileName = gEnv->getRes() + get<1>(GetParam());
int32_t status = setDataSource(inputFileName);
- ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
status = createExtractor();
- ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
int32_t numTracks = mExtractor->countTracks();
- ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+ ASSERT_EQ(numTracks, mNumTracks)
+ << "Extractor reported wrong number of track for the given clip";
AMediaFormat *extractorFormat = AMediaFormat_new();
ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
@@ -462,17 +525,18 @@
TEST_P(ExtractorFunctionalityTest, MultipleStartStopTest) {
if (mDisableTest) return;
- ALOGV("Test %s extractor for multiple start and stop calls", GetParam().first.c_str());
- string inputFileName = gEnv->getRes() + GetParam().second;
+ ALOGV("Test %s extractor for multiple start and stop calls", mContainer.c_str());
+ string inputFileName = gEnv->getRes() + get<1>(GetParam());
int32_t status = setDataSource(inputFileName);
- ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
status = createExtractor();
- ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
int32_t numTracks = mExtractor->countTracks();
- ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+ ASSERT_EQ(numTracks, mNumTracks)
+ << "Extractor reported wrong number of track for the given clip";
// start/stop the tracks multiple times
for (int32_t count = 0; count < kMaxCount; count++) {
@@ -504,22 +568,25 @@
TEST_P(ExtractorFunctionalityTest, SeekTest) {
if (mDisableTest) return;
- ALOGV("Validates %s Extractor behaviour for different seek modes", GetParam().first.c_str());
- string inputFileName = gEnv->getRes() + GetParam().second;
+ ALOGV("Validates %s Extractor behaviour for different seek modes", mContainer.c_str());
+ string inputFileName = gEnv->getRes() + get<1>(GetParam());
int32_t status = setDataSource(inputFileName);
- ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
status = createExtractor();
- ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
int32_t numTracks = mExtractor->countTracks();
- ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+ ASSERT_EQ(numTracks, mNumTracks)
+ << "Extractor reported wrong number of track for the given clip";
uint32_t seekFlag = mExtractor->flags();
- if (!(seekFlag & MediaExtractorPluginHelper::CAN_SEEK)) {
- cout << "[ WARN ] Test Skipped. " << GetParam().first
- << " Extractor doesn't support seek\n";
+ bool seekSupported = get<3>(GetParam());
+ bool seekable = seekFlag & MediaExtractorPluginHelper::CAN_SEEK;
+ if (!seekable) {
+ ASSERT_FALSE(seekSupported) << mContainer << "Extractor is expected to support seek ";
+ cout << "[ WARN ] Test Skipped. " << mContainer << " Extractor doesn't support seek\n";
return;
}
@@ -556,6 +623,19 @@
AMediaFormat_delete(trackMeta);
continue;
}
+
+ AMediaFormat *trackFormat = AMediaFormat_new();
+ ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null format";
+ status = track->getFormat(trackFormat);
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+ const char *mime;
+ ASSERT_TRUE(AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime))
+ << "Failed to get mime";
+
+ // Image formats are not expected to be seekable
+ if (!strncmp(mime, "image/", 6)) continue;
+
// Request seekable points for remaining extractors which will be used to validate the seek
// accuracy for the extractors. Depending on SEEK Mode, we expect the extractors to return
// the expected sync frame. We don't prefer random seek test for these extractors because
@@ -563,17 +643,10 @@
// next/previous sync frames but not to samples between two sync frames.
getSeekablePoints(seekablePoints, track);
ASSERT_GT(seekablePoints.size(), 0)
- << "Failed to get seekable points for " << GetParam().first << " extractor";
-
- AMediaFormat *trackFormat = AMediaFormat_new();
- ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null format";
- status = track->getFormat(trackFormat);
- ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+ << "Failed to get seekable points for " << mContainer << " extractor";
bool isOpus = false;
int64_t opusSeekPreRollUs = 0;
- const char *mime;
- AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
if (!strcmp(mime, "audio/opus")) {
isOpus = true;
void *seekPreRollBuf = nullptr;
@@ -664,23 +737,153 @@
seekablePoints.clear();
}
+// Tests the extractors for seek beyond range : (0, ClipDuration)
+TEST_P(ExtractorFunctionalityTest, MonkeySeekTest) {
+ if (mDisableTest) return;
+ // TODO(b/155630778): Enable test for wav extractors
+ if (mExtractorName == WAV) return;
+
+ ALOGV("Validates %s Extractor behaviour for invalid seek points", mContainer.c_str());
+ string inputFileName = gEnv->getRes() + get<1>(GetParam());
+
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
+
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
+
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_EQ(numTracks, mNumTracks)
+ << "Extractor reported wrong number of track for the given clip";
+
+ uint32_t seekFlag = mExtractor->flags();
+ bool seekSupported = get<3>(GetParam());
+ bool seekable = seekFlag & MediaExtractorPluginHelper::CAN_SEEK;
+ if (!seekable) {
+ ASSERT_FALSE(seekSupported) << mContainer << "Extractor is expected to support seek ";
+ cout << "[ WARN ] Test Skipped. " << mContainer << " Extractor doesn't support seek\n";
+ return;
+ }
+
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ MediaTrackHelper *track = mExtractor->getTrack(idx);
+ ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+ CMediaTrack *cTrack = wrap(track);
+ ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+ MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+ status = cTrack->start(track, bufferGroup->wrap());
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+ AMediaFormat *trackMeta = AMediaFormat_new();
+ ASSERT_NE(trackMeta, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+ status = mExtractor->getTrackMetaData(
+ trackMeta, idx, MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+ ASSERT_EQ(OK, (media_status_t)status) << "Failed to get trackMetaData";
+
+ const char *mime;
+ ASSERT_TRUE(AMediaFormat_getString(trackMeta, AMEDIAFORMAT_KEY_MIME, &mime))
+ << "Failed to get mime";
+
+ int64_t clipDuration = 0;
+ AMediaFormat_getInt64(trackMeta, AMEDIAFORMAT_KEY_DURATION, &clipDuration);
+ // Image formats are not expected to have duration information
+ ASSERT_TRUE(clipDuration > 0 || !strncmp(mime, "image/", 6)) << "Invalid clip duration ";
+ AMediaFormat_delete(trackMeta);
+
+ int64_t seekToTimeStampUs[] = {-clipDuration, clipDuration / 2, clipDuration,
+ clipDuration * 2};
+ for (int32_t mode = CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC;
+ mode <= CMediaTrackReadOptions::SEEK_CLOSEST; mode++) {
+ for (int64_t seekTimeUs : seekToTimeStampUs) {
+ MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
+ mode | CMediaTrackReadOptions::SEEK, seekTimeUs);
+ ASSERT_NE(options, nullptr) << "Cannot create read option";
+
+ MediaBufferHelper *buffer = nullptr;
+ status = track->read(&buffer, options);
+ if (status == AMEDIA_ERROR_END_OF_STREAM) {
+ delete options;
+ continue;
+ }
+ if (buffer) {
+ AMediaFormat *metaData = buffer->meta_data();
+ int64_t timeStamp;
+ AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+ ALOGV("Seeked to timestamp : %lld, requested : %lld", (long long)timeStamp,
+ (long long)seekTimeUs);
+ buffer->release();
+ }
+ delete options;
+ }
+ }
+ status = cTrack->stop(track);
+ ASSERT_EQ(OK, status) << "Failed to stop the track";
+ delete bufferGroup;
+ delete track;
+ }
+}
+
+// Tests extractors for invalid tracks
+TEST_P(ExtractorFunctionalityTest, SanityTest) {
+ if (mDisableTest) return;
+ // TODO(b/155626946): Enable test for MPEG2 TS/PS extractors
+ if (mExtractorName == MPEG2TS || mExtractorName == MPEG2PS) return;
+
+ ALOGV("Validates %s Extractor behaviour for invalid tracks", mContainer.c_str());
+ string inputFileName = gEnv->getRes() + get<1>(GetParam());
+
+ int32_t status = setDataSource(inputFileName);
+ ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
+
+ status = createExtractor();
+ ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
+
+ int32_t numTracks = mExtractor->countTracks();
+ ASSERT_EQ(numTracks, mNumTracks)
+ << "Extractor reported wrong number of track for the given clip";
+
+ int32_t trackIdx[] = {-1, numTracks};
+ for (int32_t idx : trackIdx) {
+ MediaTrackHelper *track = mExtractor->getTrack(idx);
+ ASSERT_EQ(track, nullptr) << "Failed to get track for index " << idx << "\n";
+
+ AMediaFormat *extractorFormat = AMediaFormat_new();
+ ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+ status = mExtractor->getTrackMetaData(
+ extractorFormat, idx, MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+ ASSERT_NE(OK, status) << "getTrackMetaData should return error for invalid index " << idx;
+ AMediaFormat_delete(extractorFormat);
+ }
+
+ // Validate Extractor's getTrackMetaData for null format
+ AMediaFormat *mediaFormat = nullptr;
+ status = mExtractor->getTrackMetaData(mediaFormat, 0,
+ MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+ ASSERT_NE(OK, status) << "getTrackMetaData should return error for null Media format";
+}
+
// This test validates config params for a given input file.
// For this test we only take single track files since the focus of this test is
// to validate the file properties reported by Extractor and not multi-track behavior
TEST_P(ConfigParamTest, ConfigParamValidation) {
if (mDisableTest) return;
- ALOGV("Validates %s Extractor for input's file properties", GetParam().first.c_str());
+ string container = GetParam().first;
+ ALOGV("Validates %s Extractor for input's file properties", container.c_str());
string inputFileName = gEnv->getRes();
- int32_t inputFileIdx = GetParam().second;
+ inputID inputFileId = GetParam().second;
configFormat configParam;
- getFileProperties(inputFileIdx, inputFileName, configParam);
+ getFileProperties(inputFileId, inputFileName, configParam);
int32_t status = setDataSource(inputFileName);
- ASSERT_EQ(status, 0) << "SetDataSource failed for " << GetParam().first << "extractor";
+ ASSERT_EQ(status, 0) << "SetDataSource failed for " << container << "extractor";
status = createExtractor();
- ASSERT_EQ(status, 0) << "Extractor creation failed for " << GetParam().first << "extractor";
+ ASSERT_EQ(status, 0) << "Extractor creation failed for " << container << "extractor";
int32_t numTracks = mExtractor->countTracks();
ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
@@ -726,6 +929,9 @@
if (configParam.profile != kUndefined) {
if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_PROFILE, &profile)) {
ASSERT_EQ(configParam.profile, profile) << "profile not as expected";
+ } else if (mExtractorName == AAC &&
+ AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_AAC_PROFILE, &profile)) {
+ ASSERT_EQ(configParam.profile, profile) << "profile not as expected";
} else {
ASSERT_TRUE(false) << "profile not returned in extractor";
}
@@ -747,36 +953,55 @@
}
}
- virtual void SetUp() override {
- string input0 = GetParam().first;
- string input1 = GetParam().second;
-
- // Allocate memory to hold extracted data for both extractors
- struct stat buf;
- int32_t status = stat((gEnv->getRes() + input0).c_str(), &buf);
- ASSERT_EQ(status, 0) << "Unable to get file properties";
-
- // allocating the buffer size as 2x since some
- // extractors like flac, midi and wav decodes the file.
- mExtractorOutput[0] = (int8_t *)calloc(1, buf.st_size * 2);
- ASSERT_NE(mExtractorOutput[0], nullptr)
- << "Unable to allocate memory for writing extractor's output";
- mExtractorOuputSize[0] = buf.st_size * 2;
-
- status = stat((gEnv->getRes() + input1).c_str(), &buf);
- ASSERT_EQ(status, 0) << "Unable to get file properties";
-
- // allocate buffer for extractor output, 2x input file size.
- mExtractorOutput[1] = (int8_t *)calloc(1, buf.st_size * 2);
- ASSERT_NE(mExtractorOutput[1], nullptr)
- << "Unable to allocate memory for writing extractor's output";
- mExtractorOuputSize[1] = buf.st_size * 2;
- }
-
int8_t *mExtractorOutput[2]{};
size_t mExtractorOuputSize[2]{};
};
+size_t allocateOutputBuffers(string inputFileName, AMediaFormat *extractorFormat) {
+ size_t bufferSize = 0u;
+ // allocating the buffer size as sampleRate * channelCount * clipDuration since
+ // some extractors like flac, midi and wav decodes the file. These extractors
+ // advertise the mime type as raw.
+ const char *mime;
+ AMediaFormat_getString(extractorFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+ if (!strcmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
+ int64_t clipDurationUs = -1;
+ int32_t channelCount = -1;
+ int32_t sampleRate = -1;
+ int32_t bitsPerSampple = -1;
+ if (!AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+ &channelCount) || channelCount <= 0) {
+ ALOGE("Invalid channelCount for input file : %s", inputFileName.c_str());
+ return 0;
+ }
+ if (!AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate) ||
+ sampleRate <= 0) {
+ ALOGE("Invalid sampleRate for input file : %s", inputFileName.c_str());
+ return 0;
+ }
+ if (!AMediaFormat_getInt64(extractorFormat, AMEDIAFORMAT_KEY_DURATION, &clipDurationUs) ||
+ clipDurationUs <= 0) {
+ ALOGE("Invalid clip duration for input file : %s", inputFileName.c_str());
+ return 0;
+ }
+ if (!AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_PCM_ENCODING,
+ &bitsPerSampple) || bitsPerSampple <= 0) {
+ ALOGE("Invalid bits per sample for input file : %s", inputFileName.c_str());
+ return 0;
+ }
+ bufferSize = bitsPerSampple * channelCount * sampleRate * (clipDurationUs / 1000000 + 1);
+ } else {
+ struct stat buf;
+ int32_t status = stat(inputFileName.c_str(), &buf);
+ if (status != 0) {
+ ALOGE("Unable to get file properties for: %s", inputFileName.c_str());
+ return 0;
+ }
+ bufferSize = buf.st_size;
+ }
+ return bufferSize;
+}
+
// Compare output of two extractors for identical content
TEST_P(ExtractorComparison, ExtractorComparisonTest) {
vector<string> inputFileNames = {GetParam().first, GetParam().second};
@@ -818,6 +1043,13 @@
CMediaTrack *cTrack = wrap(track);
ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << trackIdx;
+ mExtractorOuputSize[idx] = allocateOutputBuffers(inputFileName, extractorFormat[idx]);
+ ASSERT_GT(mExtractorOuputSize[idx], 0u) << " Invalid size for output buffers";
+
+ mExtractorOutput[idx] = (int8_t *)calloc(1, mExtractorOuputSize[idx]);
+ ASSERT_NE(mExtractorOutput[idx], nullptr)
+ << "Unable to allocate memory for writing extractor's output";
+
MediaBufferGroup *bufferGroup = new MediaBufferGroup();
status = cTrack->start(track, bufferGroup->wrap());
ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
@@ -894,48 +1126,128 @@
<< inputFileNames[1] << " extractors";
}
-INSTANTIATE_TEST_SUITE_P(ExtractorComparisonAll, ExtractorComparison,
- ::testing::Values(make_pair("swirl_144x136_vp9.mp4",
- "swirl_144x136_vp9.webm"),
- make_pair("video_480x360_mp4_vp9_333kbps_25fps.mp4",
- "video_480x360_webm_vp9_333kbps_25fps.webm"),
- make_pair("video_1280x720_av1_hdr_static_3mbps.mp4",
- "video_1280x720_av1_hdr_static_3mbps.webm"),
- make_pair("loudsoftaac.aac", "loudsoftaac.mkv")));
+INSTANTIATE_TEST_SUITE_P(
+ ExtractorComparisonAll, ExtractorComparison,
+ ::testing::Values(make_pair("swirl_144x136_vp9.mp4", "swirl_144x136_vp9.webm"),
+ make_pair("video_480x360_mp4_vp9_333kbps_25fps.mp4",
+ "video_480x360_webm_vp9_333kbps_25fps.webm"),
+ make_pair("video_1280x720_av1_hdr_static_3mbps.mp4",
+ "video_1280x720_av1_hdr_static_3mbps.webm"),
+ make_pair("swirl_132x130_mpeg4.3gp", "swirl_132x130_mpeg4.mkv"),
+ make_pair("swirl_144x136_avc.mkv", "swirl_144x136_avc.mp4"),
+ make_pair("swirl_132x130_mpeg4.mp4", "swirl_132x130_mpeg4.mkv"),
+ make_pair("crowd_508x240_25fps_hevc.mp4","crowd_508x240_25fps_hevc.mkv"),
+ make_pair("bbb_cif_768kbps_30fps_mpeg2.mp4",
+ "bbb_cif_768kbps_30fps_mpeg2.ts"),
+
+ make_pair("loudsoftaac.aac", "loudsoftaac.mkv"),
+ make_pair("sinesweepflacmkv.mkv", "sinesweepflacmp4.mp4"),
+ make_pair("sinesweepmp3lame.mp3", "sinesweepmp3lame.mkv"),
+ make_pair("sinesweepoggmp4.mp4", "sinesweepogg.ogg"),
+ make_pair("sinesweepvorbis.mp4", "sinesweepvorbis.ogg"),
+ make_pair("sinesweepvorbis.mkv", "sinesweepvorbis.ogg"),
+ make_pair("testopus.mkv", "testopus.mp4"),
+ make_pair("testopus.mp4", "testopus.opus"),
+
+ make_pair("loudsoftaac.aac", "loudsoftaac.aac"),
+ make_pair("testamr.amr", "testamr.amr"),
+ make_pair("sinesweepflac.flac", "sinesweepflac.flac"),
+ make_pair("midi_a.mid", "midi_a.mid"),
+ make_pair("sinesweepvorbis.mkv", "sinesweepvorbis.mkv"),
+ make_pair("sinesweepmp3lame.mp3", "sinesweepmp3lame.mp3"),
+ make_pair("sinesweepoggmp4.mp4", "sinesweepoggmp4.mp4"),
+ make_pair("testopus.opus", "testopus.opus"),
+ make_pair("john_cage.ogg", "john_cage.ogg"),
+ make_pair("monotestgsm.wav", "monotestgsm.wav"),
+
+ make_pair("swirl_144x136_mpeg2.mpg", "swirl_144x136_mpeg2.mpg"),
+ make_pair("swirl_132x130_mpeg4.mp4", "swirl_132x130_mpeg4.mp4"),
+ make_pair("swirl_144x136_vp9.webm", "swirl_144x136_vp9.webm"),
+ make_pair("swirl_144x136_vp8.webm", "swirl_144x136_vp8.webm")));
INSTANTIATE_TEST_SUITE_P(ConfigParamTestAll, ConfigParamTest,
- ::testing::Values(make_pair("aac", 0),
- make_pair("amr", 1),
- make_pair("amr", 2),
- make_pair("ogg", 3),
- make_pair("wav", 4),
- make_pair("flac", 5),
- make_pair("ogg", 6),
- make_pair("mp3", 7),
- make_pair("midi", 8),
- make_pair("mpeg2ts", 9),
- make_pair("mkv", 10),
- make_pair("mpeg4", 11),
- make_pair("mkv", 12),
- make_pair("mpeg2ps", 13)));
+ ::testing::Values(make_pair("aac", AAC_1),
+ make_pair("amr", AMR_NB_1),
+ make_pair("amr", AMR_WB_1),
+ make_pair("flac", FLAC_1),
+ make_pair("wav", GSM_1),
+ make_pair("midi", MIDI_1),
+ make_pair("mp3", MP3_1),
+ make_pair("ogg", OPUS_1),
+ make_pair("ogg", VORBIS_1),
-INSTANTIATE_TEST_SUITE_P(ExtractorUnitTestAll, ExtractorFunctionalityTest,
- ::testing::Values(make_pair("aac", "loudsoftaac.aac"),
- make_pair("amr", "testamr.amr"),
- make_pair("amr", "amrwb.wav"),
- make_pair("ogg", "john_cage.ogg"),
- make_pair("wav", "monotestgsm.wav"),
- make_pair("mpeg2ts", "segment000001.ts"),
- make_pair("flac", "sinesweepflac.flac"),
- make_pair("ogg", "testopus.opus"),
- make_pair("midi", "midi_a.mid"),
- make_pair("mkv", "sinesweepvorbis.mkv"),
- make_pair("mpeg4", "sinesweepoggmp4.mp4"),
- make_pair("mp3", "sinesweepmp3lame.mp3"),
- make_pair("mkv", "swirl_144x136_vp9.webm"),
- make_pair("mkv", "swirl_144x136_vp8.webm"),
- make_pair("mpeg2ps", "swirl_144x136_mpeg2.mpg"),
- make_pair("mpeg4", "swirl_132x130_mpeg4.mp4")));
+ make_pair("mpeg4", HEVC_1),
+ make_pair("mpeg4", HEVC_2),
+ make_pair("mpeg2ps", MPEG2_PS_1),
+ make_pair("mpeg2ts", MPEG2_TS_1),
+ make_pair("mkv", MPEG4_1),
+ make_pair("mkv", VP9_1)));
+
+// Validate extractors for container format, input file, no. of tracks and supports seek flag
+INSTANTIATE_TEST_SUITE_P(
+ ExtractorUnitTestAll, ExtractorFunctionalityTest,
+ ::testing::Values(
+ make_tuple("aac", "loudsoftaac.aac", 1, true),
+ make_tuple("amr", "testamr.amr", 1, true),
+ make_tuple("amr", "amrwb.wav", 1, true),
+ make_tuple("flac", "sinesweepflac.flac", 1, true),
+ make_tuple("midi", "midi_a.mid", 1, true),
+ make_tuple("mkv", "sinesweepvorbis.mkv", 1, true),
+ make_tuple("mkv", "sinesweepmp3lame.mkv", 1, true),
+ make_tuple("mkv", "loudsoftaac.mkv", 1, true),
+ make_tuple("mp3", "sinesweepmp3lame.mp3", 1, true),
+ make_tuple("mp3", "id3test10.mp3", 1, true),
+ make_tuple("mpeg2ts", "segment000001.ts", 2, false),
+ make_tuple("mpeg2ts", "testac3ts.ts", 1, false),
+ make_tuple("mpeg2ts", "testac4ts.ts", 1, false),
+ make_tuple("mpeg2ts", "testeac3ts.ts", 1, false),
+ make_tuple("mpeg4", "audio_aac_mono_70kbs_44100hz.mp4", 2, true),
+ make_tuple("mpeg4", "multi0_ac4.mp4", 1, true),
+ make_tuple("mpeg4", "noise_6ch_44khz_aot5_dr_sbr_sig2_mp4.m4a", 1, true),
+ make_tuple("mpeg4", "sinesweepalac.mov", 1, true),
+ make_tuple("mpeg4", "sinesweepflacmp4.mp4", 1, true),
+ make_tuple("mpeg4", "sinesweepm4a.m4a", 1, true),
+ make_tuple("mpeg4", "sinesweepoggmp4.mp4", 1, true),
+ make_tuple("mpeg4", "sinesweepopusmp4.mp4", 1, true),
+ make_tuple("mpeg4", "testac3mp4.mp4", 1, true),
+ make_tuple("mpeg4", "testeac3mp4.mp4", 1, true),
+ make_tuple("ogg", "john_cage.ogg", 1, true),
+ make_tuple("ogg", "testopus.opus", 1, true),
+ make_tuple("ogg", "sinesweepoggalbumart.ogg", 1, true),
+ make_tuple("wav", "loudsoftwav.wav", 1, true),
+ make_tuple("wav", "monotestgsm.wav", 1, true),
+ make_tuple("wav", "noise_5ch_44khz_aot2_wave.wav", 1, true),
+ make_tuple("wav", "sine1khzm40db_alaw.wav", 1, true),
+ make_tuple("wav", "sine1khzm40db_f32le.wav", 1, true),
+ make_tuple("wav", "sine1khzm40db_mulaw.wav", 1, true),
+
+ make_tuple("mkv", "swirl_144x136_avc.mkv", 1, true),
+ make_tuple("mkv", "withoutcues.mkv", 2, true),
+ make_tuple("mkv", "swirl_144x136_vp9.webm", 1, true),
+ make_tuple("mkv", "swirl_144x136_vp8.webm", 1, true),
+ make_tuple("mpeg2ps", "swirl_144x136_mpeg2.mpg", 1, false),
+ make_tuple("mpeg2ps", "programstream.mpeg", 2, false),
+ make_tuple("mpeg4", "color_176x144_bt601_525_lr_sdr_h264.mp4", 1, true),
+ make_tuple("mpeg4", "heifwriter_input.heic", 4, false),
+ make_tuple("mpeg4", "psshtest.mp4", 1, true),
+ make_tuple("mpeg4", "swirl_132x130_mpeg4.mp4", 1, true),
+ make_tuple("mpeg4", "testvideo.3gp", 4, true),
+ make_tuple("mpeg4", "testvideo_with_2_timedtext_tracks.3gp", 4, true),
+ make_tuple("mpeg4",
+ "video_176x144_3gp_h263_300kbps_25fps_aac_stereo_128kbps_11025hz_"
+ "metadata_gyro_compliant.3gp",
+ 3, true),
+ make_tuple(
+ "mpeg4",
+ "video_1920x1080_mp4_mpeg2_12000kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
+ 2, true),
+ make_tuple("mpeg4",
+ "video_480x360_mp4_hevc_650kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 2,
+ true),
+ make_tuple(
+ "mpeg4",
+ "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_dash.mp4",
+ 2, true)));
int main(int argc, char **argv) {
gEnv = new ExtractorUnitTestEnvironment();
diff --git a/media/extractors/tests/README.md b/media/extractors/tests/README.md
index 69538b6..cff09ca 100644
--- a/media/extractors/tests/README.md
+++ b/media/extractors/tests/README.md
@@ -22,7 +22,7 @@
adb push ${OUT}/data/nativetest/ExtractorUnitTest/ExtractorUnitTest /data/local/tmp/
```
-The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.4.zip). Download, unzip and push these files into device for testing.
```
adb push extractor /data/local/tmp/
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index 5d38a81..85d4cce 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -19,4 +19,11 @@
"libfifo",
"libstagefright_foundation",
],
+
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
}
diff --git a/media/extractors/wav/WAVExtractor.cpp b/media/extractors/wav/WAVExtractor.cpp
index d19447a..901b29d 100644
--- a/media/extractors/wav/WAVExtractor.cpp
+++ b/media/extractors/wav/WAVExtractor.cpp
@@ -95,9 +95,9 @@
AMediaFormat *mMeta;
uint16_t mWaveFormat;
const bool mOutputFloat;
- int32_t mSampleRate;
- int32_t mNumChannels;
- int32_t mBitsPerSample;
+ uint32_t mSampleRate;
+ uint32_t mNumChannels;
+ uint32_t mBitsPerSample;
off64_t mOffset;
size_t mSize;
bool mStarted;
@@ -379,9 +379,9 @@
mOffset(offset),
mSize(size),
mStarted(false) {
- CHECK(AMediaFormat_getInt32(mMeta, AMEDIAFORMAT_KEY_SAMPLE_RATE, &mSampleRate));
- CHECK(AMediaFormat_getInt32(mMeta, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &mNumChannels));
- CHECK(AMediaFormat_getInt32(mMeta, AMEDIAFORMAT_KEY_BITS_PER_SAMPLE, &mBitsPerSample));
+ CHECK(AMediaFormat_getInt32(mMeta, AMEDIAFORMAT_KEY_SAMPLE_RATE, (int32_t*) &mSampleRate));
+ CHECK(AMediaFormat_getInt32(mMeta, AMEDIAFORMAT_KEY_CHANNEL_COUNT, (int32_t*) &mNumChannels));
+ CHECK(AMediaFormat_getInt32(mMeta, AMEDIAFORMAT_KEY_BITS_PER_SAMPLE, (int32_t*) &mBitsPerSample));
}
WAVSource::~WAVSource() {
@@ -472,7 +472,7 @@
}
const size_t maxBytesAvailable =
- (mCurrentPos - mOffset >= (off64_t)mSize)
+ (mCurrentPos < mOffset || mCurrentPos - mOffset >= (off64_t)mSize)
? 0 : mSize - (mCurrentPos - mOffset);
if (maxBytesToRead > maxBytesAvailable) {
diff --git a/media/janitors/OWNERS-codecs b/media/janitors/OWNERS-codecs
new file mode 100644
index 0000000..e201399
--- /dev/null
+++ b/media/janitors/OWNERS-codecs
@@ -0,0 +1,5 @@
+# gerrit owner/approvers for the actual software codec libraries
+# differentiated from plugins connecting those codecs to either omx or codec2 infrastructure
+essick@google.com
+lajos@google.com
+marcone@google.com
diff --git a/media/janitors/README b/media/janitors/README
new file mode 100644
index 0000000..9db8e0e
--- /dev/null
+++ b/media/janitors/README
@@ -0,0 +1,4 @@
+A collection of OWNERS files that we reference from other projects,
+such as the software codecs in directories like external/libavc.
+This is to simplify our owner/approver management across the multiple
+projects related to media.
diff --git a/media/libaaudio/Android.bp b/media/libaaudio/Android.bp
index 140052f..7796ed5 100644
--- a/media/libaaudio/Android.bp
+++ b/media/libaaudio/Android.bp
@@ -32,5 +32,6 @@
cc_library_headers {
name: "libaaudio_headers",
export_include_dirs: ["include"],
+ export_shared_lib_headers: ["aaudio-aidl-cpp"],
+ shared_libs: ["aaudio-aidl-cpp"],
}
-
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index 5b7d956..4de632f 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -4,9 +4,11 @@
srcs: ["src/loopback.cpp"],
cflags: ["-Wall", "-Werror"],
static_libs: ["libsndfile"],
+ include_dirs: ["external/oboe/apps/OboeTester/app/src/main/cpp"],
shared_libs: [
"libaaudio",
"libaudioutils",
+ "liblog"
],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h b/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h
deleted file mode 100644
index 04435d1..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h
+++ /dev/null
@@ -1,445 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_GLITCH_ANALYZER_H
-#define ANALYZER_GLITCH_ANALYZER_H
-
-#include <algorithm>
-#include <cctype>
-#include <iomanip>
-#include <iostream>
-
-#include "LatencyAnalyzer.h"
-#include "PseudoRandom.h"
-
-/**
- * Output a steady sine wave and analyze the return signal.
- *
- * Use a cosine transform to measure the predicted magnitude and relative phase of the
- * looped back sine wave. Then generate a predicted signal and compare with the actual signal.
- */
-class GlitchAnalyzer : public LoopbackProcessor {
-public:
-
- int32_t getState() const {
- return mState;
- }
-
- double getPeakAmplitude() const {
- return mPeakFollower.getLevel();
- }
-
- double getTolerance() {
- return mTolerance;
- }
-
- void setTolerance(double tolerance) {
- mTolerance = tolerance;
- mScaledTolerance = mMagnitude * mTolerance;
- }
-
- void setMagnitude(double magnitude) {
- mMagnitude = magnitude;
- mScaledTolerance = mMagnitude * mTolerance;
- }
-
- int32_t getGlitchCount() const {
- return mGlitchCount;
- }
-
- int32_t getStateFrameCount(int state) const {
- return mStateFrameCounters[state];
- }
-
- double getSignalToNoiseDB() {
- static const double threshold = 1.0e-14;
- if (mMeanSquareSignal < threshold || mMeanSquareNoise < threshold) {
- return 0.0;
- } else {
- double signalToNoise = mMeanSquareSignal / mMeanSquareNoise; // power ratio
- double signalToNoiseDB = 10.0 * log(signalToNoise);
- if (signalToNoiseDB < MIN_SNR_DB) {
- ALOGD("ERROR - signal to noise ratio is too low! < %d dB. Adjust volume.",
- MIN_SNR_DB);
- setResult(ERROR_VOLUME_TOO_LOW);
- }
- return signalToNoiseDB;
- }
- }
-
- std::string analyze() override {
- std::stringstream report;
- report << "GlitchAnalyzer ------------------\n";
- report << LOOPBACK_RESULT_TAG "peak.amplitude = " << std::setw(8)
- << getPeakAmplitude() << "\n";
- report << LOOPBACK_RESULT_TAG "sine.magnitude = " << std::setw(8)
- << mMagnitude << "\n";
- report << LOOPBACK_RESULT_TAG "rms.noise = " << std::setw(8)
- << mMeanSquareNoise << "\n";
- report << LOOPBACK_RESULT_TAG "signal.to.noise.db = " << std::setw(8)
- << getSignalToNoiseDB() << "\n";
- report << LOOPBACK_RESULT_TAG "frames.accumulated = " << std::setw(8)
- << mFramesAccumulated << "\n";
- report << LOOPBACK_RESULT_TAG "sine.period = " << std::setw(8)
- << mSinePeriod << "\n";
- report << LOOPBACK_RESULT_TAG "test.state = " << std::setw(8)
- << mState << "\n";
- report << LOOPBACK_RESULT_TAG "frame.count = " << std::setw(8)
- << mFrameCounter << "\n";
- // Did we ever get a lock?
- bool gotLock = (mState == STATE_LOCKED) || (mGlitchCount > 0);
- if (!gotLock) {
- report << "ERROR - failed to lock on reference sine tone.\n";
- setResult(ERROR_NO_LOCK);
- } else {
- // Only print if meaningful.
- report << LOOPBACK_RESULT_TAG "glitch.count = " << std::setw(8)
- << mGlitchCount << "\n";
- report << LOOPBACK_RESULT_TAG "max.glitch = " << std::setw(8)
- << mMaxGlitchDelta << "\n";
- if (mGlitchCount > 0) {
- report << "ERROR - number of glitches > 0\n";
- setResult(ERROR_GLITCHES);
- }
- }
- return report.str();
- }
-
- void printStatus() override {
- ALOGD("st = %d, #gl = %3d,", mState, mGlitchCount);
- }
- /**
- * Calculate the magnitude of the component of the input signal
- * that matches the analysis frequency.
- * Also calculate the phase that we can use to create a
- * signal that matches that component.
- * The phase will be between -PI and +PI.
- */
- double calculateMagnitude(double *phasePtr = nullptr) {
- if (mFramesAccumulated == 0) {
- return 0.0;
- }
- double sinMean = mSinAccumulator / mFramesAccumulated;
- double cosMean = mCosAccumulator / mFramesAccumulated;
- double magnitude = 2.0 * sqrt((sinMean * sinMean) + (cosMean * cosMean));
- if (phasePtr != nullptr) {
- double phase = M_PI_2 - atan2(sinMean, cosMean);
- *phasePtr = phase;
- }
- return magnitude;
- }
-
- /**
- * @param frameData contains microphone data with sine signal feedback
- * @param channelCount
- */
- result_code processInputFrame(float *frameData, int /* channelCount */) override {
- result_code result = RESULT_OK;
-
- float sample = frameData[0];
- float peak = mPeakFollower.process(sample);
-
- // Force a periodic glitch to test the detector!
- if (mForceGlitchDuration > 0) {
- if (mForceGlitchCounter == 0) {
- ALOGE("%s: force a glitch!!", __func__);
- mForceGlitchCounter = getSampleRate();
- } else if (mForceGlitchCounter <= mForceGlitchDuration) {
- // Force an abrupt offset.
- sample += (sample > 0.0) ? -0.5f : 0.5f;
- }
- --mForceGlitchCounter;
- }
-
- mStateFrameCounters[mState]++; // count how many frames we are in each state
-
- switch (mState) {
- case STATE_IDLE:
- mDownCounter--;
- if (mDownCounter <= 0) {
- mState = STATE_IMMUNE;
- mDownCounter = IMMUNE_FRAME_COUNT;
- mInputPhase = 0.0; // prevent spike at start
- mOutputPhase = 0.0;
- }
- break;
-
- case STATE_IMMUNE:
- mDownCounter--;
- if (mDownCounter <= 0) {
- mState = STATE_WAITING_FOR_SIGNAL;
- }
- break;
-
- case STATE_WAITING_FOR_SIGNAL:
- if (peak > mThreshold) {
- mState = STATE_WAITING_FOR_LOCK;
- //ALOGD("%5d: switch to STATE_WAITING_FOR_LOCK", mFrameCounter);
- resetAccumulator();
- }
- break;
-
- case STATE_WAITING_FOR_LOCK:
- mSinAccumulator += sample * sinf(mInputPhase);
- mCosAccumulator += sample * cosf(mInputPhase);
- mFramesAccumulated++;
- // Must be a multiple of the period or the calculation will not be accurate.
- if (mFramesAccumulated == mSinePeriod * PERIODS_NEEDED_FOR_LOCK) {
- double phaseOffset = 0.0;
- setMagnitude(calculateMagnitude(&phaseOffset));
-// ALOGD("%s() mag = %f, offset = %f, prev = %f",
-// __func__, mMagnitude, mPhaseOffset, mPreviousPhaseOffset);
- if (mMagnitude > mThreshold) {
- if (abs(phaseOffset) < kMaxPhaseError) {
- mState = STATE_LOCKED;
-// ALOGD("%5d: switch to STATE_LOCKED", mFrameCounter);
- }
- // Adjust mInputPhase to match measured phase
- mInputPhase += phaseOffset;
- }
- resetAccumulator();
- }
- incrementInputPhase();
- break;
-
- case STATE_LOCKED: {
- // Predict next sine value
- double predicted = sinf(mInputPhase) * mMagnitude;
- double diff = predicted - sample;
- double absDiff = fabs(diff);
- mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
- if (absDiff > mScaledTolerance) {
- result = ERROR_GLITCHES;
- onGlitchStart();
-// LOGI("diff glitch detected, absDiff = %g", absDiff);
- } else {
- mSumSquareSignal += predicted * predicted;
- mSumSquareNoise += diff * diff;
- // Track incoming signal and slowly adjust magnitude to account
- // for drift in the DRC or AGC.
- mSinAccumulator += sample * sinf(mInputPhase);
- mCosAccumulator += sample * cosf(mInputPhase);
- mFramesAccumulated++;
- // Must be a multiple of the period or the calculation will not be accurate.
- if (mFramesAccumulated == mSinePeriod) {
- const double coefficient = 0.1;
- double phaseOffset = 0.0;
- double magnitude = calculateMagnitude(&phaseOffset);
- // One pole averaging filter.
- setMagnitude((mMagnitude * (1.0 - coefficient)) + (magnitude * coefficient));
-
- mMeanSquareNoise = mSumSquareNoise * mInverseSinePeriod;
- mMeanSquareSignal = mSumSquareSignal * mInverseSinePeriod;
- resetAccumulator();
-
- if (abs(phaseOffset) > kMaxPhaseError) {
- result = ERROR_GLITCHES;
- onGlitchStart();
- ALOGD("phase glitch detected, phaseOffset = %g", phaseOffset);
- } else if (mMagnitude < mThreshold) {
- result = ERROR_GLITCHES;
- onGlitchStart();
- ALOGD("magnitude glitch detected, mMagnitude = %g", mMagnitude);
- }
- }
- }
- incrementInputPhase();
- } break;
-
- case STATE_GLITCHING: {
- // Predict next sine value
- mGlitchLength++;
- double predicted = sinf(mInputPhase) * mMagnitude;
- double diff = predicted - sample;
- double absDiff = fabs(diff);
- mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
- if (absDiff < mScaledTolerance) { // close enough?
- // If we get a full sine period of non-glitch samples in a row then consider the glitch over.
- // We don't want to just consider a zero crossing the end of a glitch.
- if (mNonGlitchCount++ > mSinePeriod) {
- onGlitchEnd();
- }
- } else {
- mNonGlitchCount = 0;
- if (mGlitchLength > (4 * mSinePeriod)) {
- relock();
- }
- }
- incrementInputPhase();
- } break;
-
- case NUM_STATES: // not a real state
- break;
- }
-
- mFrameCounter++;
-
- return result;
- }
-
- // advance and wrap phase
- void incrementInputPhase() {
- mInputPhase += mPhaseIncrement;
- if (mInputPhase > M_PI) {
- mInputPhase -= (2.0 * M_PI);
- }
- }
-
- // advance and wrap phase
- void incrementOutputPhase() {
- mOutputPhase += mPhaseIncrement;
- if (mOutputPhase > M_PI) {
- mOutputPhase -= (2.0 * M_PI);
- }
- }
-
- /**
- * @param frameData upon return, contains the reference sine wave
- * @param channelCount
- */
- result_code processOutputFrame(float *frameData, int channelCount) override {
- float output = 0.0f;
- // Output sine wave so we can measure it.
- if (mState != STATE_IDLE) {
- float sinOut = sinf(mOutputPhase);
- incrementOutputPhase();
- output = (sinOut * mOutputAmplitude)
- + (mWhiteNoise.nextRandomDouble() * kNoiseAmplitude);
- // ALOGD("sin(%f) = %f, %f\n", mOutputPhase, sinOut, mPhaseIncrement);
- }
- frameData[0] = output;
- for (int i = 1; i < channelCount; i++) {
- frameData[i] = 0.0f;
- }
- return RESULT_OK;
- }
-
- void onGlitchStart() {
- mGlitchCount++;
-// ALOGD("%5d: STARTED a glitch # %d", mFrameCounter, mGlitchCount);
- mState = STATE_GLITCHING;
- mGlitchLength = 1;
- mNonGlitchCount = 0;
- }
-
- void onGlitchEnd() {
-// ALOGD("%5d: ENDED a glitch # %d, length = %d", mFrameCounter, mGlitchCount, mGlitchLength);
- mState = STATE_LOCKED;
- resetAccumulator();
- }
-
- // reset the sine wave detector
- void resetAccumulator() {
- mFramesAccumulated = 0;
- mSinAccumulator = 0.0;
- mCosAccumulator = 0.0;
- mSumSquareSignal = 0.0;
- mSumSquareNoise = 0.0;
- }
-
- void relock() {
-// ALOGD("relock: %d because of a very long %d glitch", mFrameCounter, mGlitchLength);
- mState = STATE_WAITING_FOR_LOCK;
- resetAccumulator();
- }
-
- void reset() override {
- LoopbackProcessor::reset();
- mState = STATE_IDLE;
- mDownCounter = IDLE_FRAME_COUNT;
- resetAccumulator();
- }
-
- void prepareToTest() override {
- LoopbackProcessor::prepareToTest();
- mSinePeriod = getSampleRate() / kTargetGlitchFrequency;
- mOutputPhase = 0.0f;
- mInverseSinePeriod = 1.0 / mSinePeriod;
- mPhaseIncrement = 2.0 * M_PI * mInverseSinePeriod;
- mGlitchCount = 0;
- mMaxGlitchDelta = 0.0;
- for (int i = 0; i < NUM_STATES; i++) {
- mStateFrameCounters[i] = 0;
- }
- }
-
-private:
-
- // These must match the values in GlitchActivity.java
- enum sine_state_t {
- STATE_IDLE, // beginning
- STATE_IMMUNE, // ignoring input, waiting fo HW to settle
- STATE_WAITING_FOR_SIGNAL, // looking for a loud signal
- STATE_WAITING_FOR_LOCK, // trying to lock onto the phase of the sine
- STATE_LOCKED, // locked on the sine wave, looking for glitches
- STATE_GLITCHING, // locked on the sine wave but glitching
- NUM_STATES
- };
-
- enum constants {
- // Arbitrary durations, assuming 48000 Hz
- IDLE_FRAME_COUNT = 48 * 100,
- IMMUNE_FRAME_COUNT = 48 * 100,
- PERIODS_NEEDED_FOR_LOCK = 8,
- MIN_SNR_DB = 65
- };
-
- static constexpr float kNoiseAmplitude = 0.00; // Used to experiment with warbling caused by DRC.
- static constexpr int kTargetGlitchFrequency = 607;
- static constexpr double kMaxPhaseError = M_PI * 0.05;
-
- float mTolerance = 0.10; // scaled from 0.0 to 1.0
- double mThreshold = 0.005;
- int mSinePeriod = 1; // this will be set before use
- double mInverseSinePeriod = 1.0;
-
- int32_t mStateFrameCounters[NUM_STATES];
-
- double mPhaseIncrement = 0.0;
- double mInputPhase = 0.0;
- double mOutputPhase = 0.0;
- double mMagnitude = 0.0;
- int32_t mFramesAccumulated = 0;
- double mSinAccumulator = 0.0;
- double mCosAccumulator = 0.0;
- double mMaxGlitchDelta = 0.0;
- int32_t mGlitchCount = 0;
- int32_t mNonGlitchCount = 0;
- int32_t mGlitchLength = 0;
- // This is used for processing every frame so we cache it here.
- double mScaledTolerance = 0.0;
- int mDownCounter = IDLE_FRAME_COUNT;
- int32_t mFrameCounter = 0;
- double mOutputAmplitude = 0.75;
-
- int32_t mForceGlitchDuration = 0; // if > 0 then force a glitch for debugging
- int32_t mForceGlitchCounter = 4 * 48000; // count down and trigger at zero
-
- // measure background noise continuously as a deviation from the expected signal
- double mSumSquareSignal = 0.0;
- double mSumSquareNoise = 0.0;
- double mMeanSquareSignal = 0.0;
- double mMeanSquareNoise = 0.0;
-
- PeakDetector mPeakFollower;
-
- PseudoRandom mWhiteNoise;
-
- sine_state_t mState = STATE_IDLE;
-};
-
-
-#endif //ANALYZER_GLITCH_ANALYZER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h b/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h
deleted file mode 100644
index e506791..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h
+++ /dev/null
@@ -1,606 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Tools for measuring latency and for detecting glitches.
- * These classes are pure math and can be used with any audio system.
- */
-
-#ifndef ANALYZER_LATENCY_ANALYZER_H
-#define ANALYZER_LATENCY_ANALYZER_H
-
-#include <algorithm>
-#include <assert.h>
-#include <cctype>
-#include <iomanip>
-#include <iostream>
-#include <math.h>
-#include <memory>
-#include <sstream>
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <vector>
-
-#include "PeakDetector.h"
-#include "PseudoRandom.h"
-#include "RandomPulseGenerator.h"
-
-// This is used when the code is in Oboe.
-#ifndef ALOGD
-#define ALOGD printf
-#define ALOGE printf
-#define ALOGW printf
-#endif
-
-#define LOOPBACK_RESULT_TAG "RESULT: "
-
-static constexpr int32_t kDefaultSampleRate = 48000;
-static constexpr int32_t kMillisPerSecond = 1000;
-static constexpr int32_t kMaxLatencyMillis = 700; // arbitrary and generous
-static constexpr double kMinimumConfidence = 0.2;
-
-struct LatencyReport {
- int32_t latencyInFrames = 0.0;
- double confidence = 0.0;
-
- void reset() {
- latencyInFrames = 0;
- confidence = 0.0;
- }
-};
-
-// Calculate a normalized cross correlation.
-static double calculateNormalizedCorrelation(const float *a,
- const float *b,
- int windowSize) {
- double correlation = 0.0;
- double sumProducts = 0.0;
- double sumSquares = 0.0;
-
- // Correlate a against b.
- for (int i = 0; i < windowSize; i++) {
- float s1 = a[i];
- float s2 = b[i];
- // Use a normalized cross-correlation.
- sumProducts += s1 * s2;
- sumSquares += ((s1 * s1) + (s2 * s2));
- }
-
- if (sumSquares >= 1.0e-9) {
- correlation = 2.0 * sumProducts / sumSquares;
- }
- return correlation;
-}
-
-static double calculateRootMeanSquare(float *data, int32_t numSamples) {
- double sum = 0.0;
- for (int32_t i = 0; i < numSamples; i++) {
- float sample = data[i];
- sum += sample * sample;
- }
- return sqrt(sum / numSamples);
-}
-
-/**
- * Monophonic recording with processing.
- */
-class AudioRecording
-{
-public:
-
- void allocate(int maxFrames) {
- mData = std::make_unique<float[]>(maxFrames);
- mMaxFrames = maxFrames;
- }
-
- // Write SHORT data from the first channel.
- int32_t write(int16_t *inputData, int32_t inputChannelCount, int32_t numFrames) {
- // stop at end of buffer
- if ((mFrameCounter + numFrames) > mMaxFrames) {
- numFrames = mMaxFrames - mFrameCounter;
- }
- for (int i = 0; i < numFrames; i++) {
- mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
- }
- return numFrames;
- }
-
- // Write FLOAT data from the first channel.
- int32_t write(float *inputData, int32_t inputChannelCount, int32_t numFrames) {
- // stop at end of buffer
- if ((mFrameCounter + numFrames) > mMaxFrames) {
- numFrames = mMaxFrames - mFrameCounter;
- }
- for (int i = 0; i < numFrames; i++) {
- mData[mFrameCounter++] = inputData[i * inputChannelCount];
- }
- return numFrames;
- }
-
- // Write FLOAT data from the first channel.
- int32_t write(float sample) {
- // stop at end of buffer
- if (mFrameCounter < mMaxFrames) {
- mData[mFrameCounter++] = sample;
- return 1;
- }
- return 0;
- }
-
- void clear() {
- mFrameCounter = 0;
- }
- int32_t size() const {
- return mFrameCounter;
- }
-
- bool isFull() const {
- return mFrameCounter >= mMaxFrames;
- }
-
- float *getData() const {
- return mData.get();
- }
-
- void setSampleRate(int32_t sampleRate) {
- mSampleRate = sampleRate;
- }
-
- int32_t getSampleRate() const {
- return mSampleRate;
- }
-
- /**
- * Square the samples so they are all positive and so the peaks are emphasized.
- */
- void square() {
- float *x = mData.get();
- for (int i = 0; i < mFrameCounter; i++) {
- x[i] *= x[i];
- }
- }
-
- /**
- * Amplify a signal so that the peak matches the specified target.
- *
- * @param target final max value
- * @return gain applied to signal
- */
- float normalize(float target) {
- float maxValue = 1.0e-9f;
- for (int i = 0; i < mFrameCounter; i++) {
- maxValue = std::max(maxValue, abs(mData[i]));
- }
- float gain = target / maxValue;
- for (int i = 0; i < mFrameCounter; i++) {
- mData[i] *= gain;
- }
- return gain;
- }
-
-private:
- std::unique_ptr<float[]> mData;
- int32_t mFrameCounter = 0;
- int32_t mMaxFrames = 0;
- int32_t mSampleRate = kDefaultSampleRate; // common default
-};
-
-static int measureLatencyFromPulse(AudioRecording &recorded,
- AudioRecording &pulse,
- LatencyReport *report) {
-
- report->latencyInFrames = 0;
- report->confidence = 0.0;
-
- int numCorrelations = recorded.size() - pulse.size();
- if (numCorrelations < 10) {
- ALOGE("%s() recording too small = %d frames\n", __func__, recorded.size());
- return -1;
- }
- std::unique_ptr<float[]> correlations= std::make_unique<float[]>(numCorrelations);
-
- // Correlate pulse against the recorded data.
- for (int i = 0; i < numCorrelations; i++) {
- float correlation = (float) calculateNormalizedCorrelation(&recorded.getData()[i],
- &pulse.getData()[0],
- pulse.size());
- correlations[i] = correlation;
- }
-
- // Find highest peak in correlation array.
- float peakCorrelation = 0.0;
- int peakIndex = -1;
- for (int i = 0; i < numCorrelations; i++) {
- float value = abs(correlations[i]);
- if (value > peakCorrelation) {
- peakCorrelation = value;
- peakIndex = i;
- }
- }
- if (peakIndex < 0) {
- ALOGE("%s() no signal for correlation\n", __func__);
- return -2;
- }
-
- report->latencyInFrames = peakIndex;
- report->confidence = peakCorrelation;
-
- return 0;
-}
-
-// ====================================================================================
-class LoopbackProcessor {
-public:
- virtual ~LoopbackProcessor() = default;
-
- enum result_code {
- RESULT_OK = 0,
- ERROR_NOISY = -99,
- ERROR_VOLUME_TOO_LOW,
- ERROR_VOLUME_TOO_HIGH,
- ERROR_CONFIDENCE,
- ERROR_INVALID_STATE,
- ERROR_GLITCHES,
- ERROR_NO_LOCK
- };
-
- virtual void prepareToTest() {
- reset();
- }
-
- virtual void reset() {
- mResult = 0;
- mResetCount++;
- }
-
- virtual result_code processInputFrame(float *frameData, int channelCount) = 0;
- virtual result_code processOutputFrame(float *frameData, int channelCount) = 0;
-
- void process(float *inputData, int inputChannelCount, int numInputFrames,
- float *outputData, int outputChannelCount, int numOutputFrames) {
- int numBoth = std::min(numInputFrames, numOutputFrames);
- // Process one frame at a time.
- for (int i = 0; i < numBoth; i++) {
- processInputFrame(inputData, inputChannelCount);
- inputData += inputChannelCount;
- processOutputFrame(outputData, outputChannelCount);
- outputData += outputChannelCount;
- }
- // If there is more input than output.
- for (int i = numBoth; i < numInputFrames; i++) {
- processInputFrame(inputData, inputChannelCount);
- inputData += inputChannelCount;
- }
- // If there is more output than input.
- for (int i = numBoth; i < numOutputFrames; i++) {
- processOutputFrame(outputData, outputChannelCount);
- outputData += outputChannelCount;
- }
- }
-
- virtual std::string analyze() = 0;
-
- virtual void printStatus() {};
-
- int32_t getResult() {
- return mResult;
- }
-
- void setResult(int32_t result) {
- mResult = result;
- }
-
- virtual bool isDone() {
- return false;
- }
-
- virtual int save(const char *fileName) {
- (void) fileName;
- return -1;
- }
-
- virtual int load(const char *fileName) {
- (void) fileName;
- return -1;
- }
-
- virtual void setSampleRate(int32_t sampleRate) {
- mSampleRate = sampleRate;
- }
-
- int32_t getSampleRate() const {
- return mSampleRate;
- }
-
- int32_t getResetCount() const {
- return mResetCount;
- }
-
- /** Called when not enough input frames could be read after synchronization.
- */
- virtual void onInsufficientRead() {
- reset();
- }
-
-protected:
- int32_t mResetCount = 0;
-
-private:
- int32_t mSampleRate = kDefaultSampleRate;
- int32_t mResult = 0;
-};
-
-class LatencyAnalyzer : public LoopbackProcessor {
-public:
-
- LatencyAnalyzer() : LoopbackProcessor() {}
- virtual ~LatencyAnalyzer() = default;
-
- virtual int32_t getProgress() const = 0;
-
- virtual int getState() = 0;
-
- // @return latency in frames
- virtual int32_t getMeasuredLatency() = 0;
-
- virtual double getMeasuredConfidence() = 0;
-
- virtual double getBackgroundRMS() = 0;
-
- virtual double getSignalRMS() = 0;
-
-};
-
-// ====================================================================================
-/**
- * Measure latency given a loopback stream data.
- * Use an encoded bit train as the sound source because it
- * has an unambiguous correlation value.
- * Uses a state machine to cycle through various stages.
- *
- */
-class PulseLatencyAnalyzer : public LatencyAnalyzer {
-public:
-
- PulseLatencyAnalyzer() : LatencyAnalyzer() {
- int32_t maxLatencyFrames = getSampleRate() * kMaxLatencyMillis / kMillisPerSecond;
- int32_t numPulseBits = getSampleRate() * kPulseLengthMillis
- / (kFramesPerEncodedBit * kMillisPerSecond);
- int32_t pulseLength = numPulseBits * kFramesPerEncodedBit;
- mFramesToRecord = pulseLength + maxLatencyFrames;
- mAudioRecording.allocate(mFramesToRecord);
- mAudioRecording.setSampleRate(getSampleRate());
- generateRandomPulse(pulseLength);
- }
-
- void generateRandomPulse(int32_t pulseLength) {
- mPulse.allocate(pulseLength);
- RandomPulseGenerator pulser(kFramesPerEncodedBit);
- for (int i = 0; i < pulseLength; i++) {
- mPulse.write(pulser.nextFloat());
- }
- }
-
- int getState() override {
- return mState;
- }
-
- void setSampleRate(int32_t sampleRate) override {
- LoopbackProcessor::setSampleRate(sampleRate);
- mAudioRecording.setSampleRate(sampleRate);
- }
-
- void reset() override {
- LoopbackProcessor::reset();
- mDownCounter = getSampleRate() / 2;
- mLoopCounter = 0;
-
- mPulseCursor = 0;
- mBackgroundSumSquare = 0.0f;
- mBackgroundSumCount = 0;
- mBackgroundRMS = 0.0f;
- mSignalRMS = 0.0f;
-
- mState = STATE_MEASURE_BACKGROUND;
- mAudioRecording.clear();
- mLatencyReport.reset();
- }
-
- bool hasEnoughData() {
- return mAudioRecording.isFull();
- }
-
- bool isDone() override {
- return mState == STATE_DONE;
- }
-
- int32_t getProgress() const override {
- return mAudioRecording.size();
- }
-
- std::string analyze() override {
- std::stringstream report;
- report << "PulseLatencyAnalyzer ---------------\n";
- report << LOOPBACK_RESULT_TAG "test.state = "
- << std::setw(8) << mState << "\n";
- report << LOOPBACK_RESULT_TAG "test.state.name = "
- << convertStateToText(mState) << "\n";
- report << LOOPBACK_RESULT_TAG "background.rms = "
- << std::setw(8) << mBackgroundRMS << "\n";
-
- int32_t newResult = RESULT_OK;
- if (mState != STATE_GOT_DATA) {
- report << "WARNING - Bad state. Check volume on device.\n";
- // setResult(ERROR_INVALID_STATE);
- } else {
- float gain = mAudioRecording.normalize(1.0f);
- measureLatencyFromPulse(mAudioRecording,
- mPulse,
- &mLatencyReport);
-
- if (mLatencyReport.confidence < kMinimumConfidence) {
- report << " ERROR - confidence too low!";
- newResult = ERROR_CONFIDENCE;
- } else {
- mSignalRMS = calculateRootMeanSquare(
- &mAudioRecording.getData()[mLatencyReport.latencyInFrames], mPulse.size())
- / gain;
- }
- double latencyMillis = kMillisPerSecond * (double) mLatencyReport.latencyInFrames
- / getSampleRate();
- report << LOOPBACK_RESULT_TAG "latency.frames = " << std::setw(8)
- << mLatencyReport.latencyInFrames << "\n";
- report << LOOPBACK_RESULT_TAG "latency.msec = " << std::setw(8)
- << latencyMillis << "\n";
- report << LOOPBACK_RESULT_TAG "latency.confidence = " << std::setw(8)
- << mLatencyReport.confidence << "\n";
- }
- mState = STATE_DONE;
- if (getResult() == RESULT_OK) {
- setResult(newResult);
- }
-
- return report.str();
- }
-
- int32_t getMeasuredLatency() override {
- return mLatencyReport.latencyInFrames;
- }
-
- double getMeasuredConfidence() override {
- return mLatencyReport.confidence;
- }
-
- double getBackgroundRMS() override {
- return mBackgroundRMS;
- }
-
- double getSignalRMS() override {
- return mSignalRMS;
- }
-
- void printStatus() override {
- ALOGD("st = %d", mState);
- }
-
- result_code processInputFrame(float *frameData, int channelCount) override {
- echo_state nextState = mState;
- mLoopCounter++;
-
- switch (mState) {
- case STATE_MEASURE_BACKGROUND:
- // Measure background RMS on channel 0
- mBackgroundSumSquare += frameData[0] * frameData[0];
- mBackgroundSumCount++;
- mDownCounter--;
- if (mDownCounter <= 0) {
- mBackgroundRMS = sqrtf(mBackgroundSumSquare / mBackgroundSumCount);
- nextState = STATE_IN_PULSE;
- mPulseCursor = 0;
- }
- break;
-
- case STATE_IN_PULSE:
- // Record input until the mAudioRecording is full.
- mAudioRecording.write(frameData, channelCount, 1);
- if (hasEnoughData()) {
- nextState = STATE_GOT_DATA;
- }
- break;
-
- case STATE_GOT_DATA:
- case STATE_DONE:
- default:
- break;
- }
-
- mState = nextState;
- return RESULT_OK;
- }
-
- result_code processOutputFrame(float *frameData, int channelCount) override {
- switch (mState) {
- case STATE_IN_PULSE:
- if (mPulseCursor < mPulse.size()) {
- float pulseSample = mPulse.getData()[mPulseCursor++];
- for (int i = 0; i < channelCount; i++) {
- frameData[i] = pulseSample;
- }
- } else {
- for (int i = 0; i < channelCount; i++) {
- frameData[i] = 0;
- }
- }
- break;
-
- case STATE_MEASURE_BACKGROUND:
- case STATE_GOT_DATA:
- case STATE_DONE:
- default:
- for (int i = 0; i < channelCount; i++) {
- frameData[i] = 0.0f; // silence
- }
- break;
- }
-
- return RESULT_OK;
- }
-
-private:
-
- enum echo_state {
- STATE_MEASURE_BACKGROUND,
- STATE_IN_PULSE,
- STATE_GOT_DATA, // must match RoundTripLatencyActivity.java
- STATE_DONE,
- };
-
- const char *convertStateToText(echo_state state) {
- switch (state) {
- case STATE_MEASURE_BACKGROUND:
- return "INIT";
- case STATE_IN_PULSE:
- return "PULSE";
- case STATE_GOT_DATA:
- return "GOT_DATA";
- case STATE_DONE:
- return "DONE";
- }
- return "UNKNOWN";
- }
-
- int32_t mDownCounter = 500;
- int32_t mLoopCounter = 0;
- echo_state mState = STATE_MEASURE_BACKGROUND;
-
- static constexpr int32_t kFramesPerEncodedBit = 8; // multiple of 2
- static constexpr int32_t kPulseLengthMillis = 500;
-
- AudioRecording mPulse;
- int32_t mPulseCursor = 0;
-
- double mBackgroundSumSquare = 0.0;
- int32_t mBackgroundSumCount = 0;
- double mBackgroundRMS = 0.0;
- double mSignalRMS = 0.0;
- int32_t mFramesToRecord = 0;
-
- AudioRecording mAudioRecording; // contains only the input after starting the pulse
- LatencyReport mLatencyReport;
-};
-
-#endif // ANALYZER_LATENCY_ANALYZER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h b/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h
deleted file mode 100644
index 0a4bd5b..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_MANCHESTER_ENCODER_H
-#define ANALYZER_MANCHESTER_ENCODER_H
-
-#include <cstdint>
-
-/**
- * Encode bytes using Manchester Coding scheme.
- *
- * Manchester Code is self clocking.
- * There is a transition in the middle of every bit.
- * Zero is high then low.
- * One is low then high.
- *
- * This avoids having long DC sections that would droop when
- * passed though analog circuits with AC coupling.
- *
- * IEEE 802.3 compatible.
- */
-
-class ManchesterEncoder {
-public:
- ManchesterEncoder(int samplesPerPulse)
- : mSamplesPerPulse(samplesPerPulse)
- , mSamplesPerPulseHalf(samplesPerPulse / 2)
- , mCursor(samplesPerPulse) {
- }
-
- virtual ~ManchesterEncoder() = default;
-
- /**
- * This will be called when the next byte is needed.
- * @return
- */
- virtual uint8_t onNextByte() = 0;
-
- /**
- * Generate the next floating point sample.
- * @return
- */
- virtual float nextFloat() {
- advanceSample();
- if (mCurrentBit) {
- return (mCursor < mSamplesPerPulseHalf) ? -1.0f : 1.0f; // one
- } else {
- return (mCursor < mSamplesPerPulseHalf) ? 1.0f : -1.0f; // zero
- }
- }
-
-protected:
- /**
- * This will be called when a new bit is ready to be encoded.
- * It can be used to prepare the encoded samples.
- * @param current
- */
- virtual void onNextBit(bool /* current */) {};
-
- void advanceSample() {
- // Are we ready for a new bit?
- if (++mCursor >= mSamplesPerPulse) {
- mCursor = 0;
- if (mBitsLeft == 0) {
- mCurrentByte = onNextByte();
- mBitsLeft = 8;
- }
- --mBitsLeft;
- mCurrentBit = (mCurrentByte >> mBitsLeft) & 1;
- onNextBit(mCurrentBit);
- }
- }
-
- bool getCurrentBit() {
- return mCurrentBit;
- }
-
- const int mSamplesPerPulse;
- const int mSamplesPerPulseHalf;
- int mCursor;
- int mBitsLeft = 0;
- uint8_t mCurrentByte = 0;
- bool mCurrentBit = false;
-};
-#endif //ANALYZER_MANCHESTER_ENCODER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h b/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h
deleted file mode 100644
index 4b3b4e7..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_PEAK_DETECTOR_H
-#define ANALYZER_PEAK_DETECTOR_H
-
-#include <math.h>
-
-/**
- * Measure a peak envelope by rising with the peaks,
- * and decaying exponentially after each peak.
- * The absolute value of the input signal is used.
- */
-class PeakDetector {
-public:
-
- void reset() {
- mLevel = 0.0;
- }
-
- double process(double input) {
- mLevel *= mDecay; // exponential decay
- input = fabs(input);
- // never fall below the input signal
- if (input > mLevel) {
- mLevel = input;
- }
- return mLevel;
- }
-
- double getLevel() const {
- return mLevel;
- }
-
- double getDecay() const {
- return mDecay;
- }
-
- /**
- * Multiply the level by this amount on every iteration.
- * This provides an exponential decay curve.
- * A value just under 1.0 is best, for example, 0.99;
- * @param decay scale level for each input
- */
- void setDecay(double decay) {
- mDecay = decay;
- }
-
-private:
- static constexpr double kDefaultDecay = 0.99f;
-
- double mLevel = 0.0;
- double mDecay = kDefaultDecay;
-};
-#endif //ANALYZER_PEAK_DETECTOR_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h b/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h
deleted file mode 100644
index 1c4938c..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-#ifndef ANALYZER_PSEUDORANDOM_H
-#define ANALYZER_PSEUDORANDOM_H
-
-#include <cctype>
-
-class PseudoRandom {
-public:
- PseudoRandom(int64_t seed = 99887766)
- : mSeed(seed)
- {}
-
- /**
- * Returns the next random double from -1.0 to 1.0
- *
- * @return value from -1.0 to 1.0
- */
- double nextRandomDouble() {
- return nextRandomInteger() * (0.5 / (((int32_t)1) << 30));
- }
-
- /** Calculate random 32 bit number using linear-congruential method
- * with known real-time performance.
- */
- int32_t nextRandomInteger() {
-#if __has_builtin(__builtin_mul_overflow) && __has_builtin(__builtin_add_overflow)
- int64_t prod;
- // Use values for 64-bit sequence from MMIX by Donald Knuth.
- __builtin_mul_overflow(mSeed, (int64_t)6364136223846793005, &prod);
- __builtin_add_overflow(prod, (int64_t)1442695040888963407, &mSeed);
-#else
- mSeed = (mSeed * (int64_t)6364136223846793005) + (int64_t)1442695040888963407;
-#endif
- return (int32_t) (mSeed >> 32); // The higher bits have a longer sequence.
- }
-
-private:
- int64_t mSeed;
-};
-
-#endif //ANALYZER_PSEUDORANDOM_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h b/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h
deleted file mode 100644
index 030050b..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_RANDOM_PULSE_GENERATOR_H
-#define ANALYZER_RANDOM_PULSE_GENERATOR_H
-
-#include <stdlib.h>
-#include "RoundedManchesterEncoder.h"
-
-/**
- * Encode random ones and zeros using Manchester Code per IEEE 802.3.
- */
-class RandomPulseGenerator : public RoundedManchesterEncoder {
-public:
- RandomPulseGenerator(int samplesPerPulse)
- : RoundedManchesterEncoder(samplesPerPulse) {
- }
-
- virtual ~RandomPulseGenerator() = default;
-
- /**
- * This will be called when the next byte is needed.
- * @return random byte
- */
- uint8_t onNextByte() override {
- return static_cast<uint8_t>(rand());
- }
-};
-
-#endif //ANALYZER_RANDOM_PULSE_GENERATOR_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h b/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h
deleted file mode 100644
index f2eba84..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
-#define ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
-
-#include <math.h>
-#include <memory.h>
-#include <stdlib.h>
-#include "ManchesterEncoder.h"
-
-/**
- * Encode bytes using Manchester Code.
- * Round the edges using a half cosine to reduce ringing caused by a hard edge.
- */
-
-class RoundedManchesterEncoder : public ManchesterEncoder {
-public:
- RoundedManchesterEncoder(int samplesPerPulse)
- : ManchesterEncoder(samplesPerPulse) {
- int rampSize = samplesPerPulse / 4;
- mZeroAfterZero = std::make_unique<float[]>(samplesPerPulse);
- mZeroAfterOne = std::make_unique<float[]>(samplesPerPulse);
-
- int sampleIndex = 0;
- for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
- float phase = (rampIndex + 1) * M_PI / rampSize;
- float sample = -cosf(phase);
- mZeroAfterZero[sampleIndex] = sample;
- mZeroAfterOne[sampleIndex] = 1.0f;
- sampleIndex++;
- }
- for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
- mZeroAfterZero[sampleIndex] = 1.0f;
- mZeroAfterOne[sampleIndex] = 1.0f;
- sampleIndex++;
- }
- for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
- float phase = (rampIndex + 1) * M_PI / rampSize;
- float sample = cosf(phase);
- mZeroAfterZero[sampleIndex] = sample;
- mZeroAfterOne[sampleIndex] = sample;
- sampleIndex++;
- }
- for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
- mZeroAfterZero[sampleIndex] = -1.0f;
- mZeroAfterOne[sampleIndex] = -1.0f;
- sampleIndex++;
- }
- }
-
- void onNextBit(bool current) override {
- // Do we need to use the rounded edge?
- mCurrentSamples = (current ^ mPreviousBit)
- ? mZeroAfterOne.get()
- : mZeroAfterZero.get();
- mPreviousBit = current;
- }
-
- float nextFloat() override {
- advanceSample();
- float output = mCurrentSamples[mCursor];
- if (getCurrentBit()) output = -output;
- return output;
- }
-
-private:
-
- bool mPreviousBit = false;
- float *mCurrentSamples = nullptr;
- std::unique_ptr<float[]> mZeroAfterZero;
- std::unique_ptr<float[]> mZeroAfterOne;
-};
-
-#endif //ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 0d2ec70..6fff568 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -36,8 +36,12 @@
#include "AAudioSimpleRecorder.h"
#include "AAudioExampleUtils.h"
+// Get logging macros from OboeTester
+#include "android_debug.h"
+// Get signal analyzers from OboeTester
#include "analyzer/GlitchAnalyzer.h"
#include "analyzer/LatencyAnalyzer.h"
+
#include "../../utils/AAudioExampleUtils.h"
// V0.4.00 = rectify and low-pass filter the echos, auto-correlate entire echo
@@ -45,8 +49,9 @@
// fix -n option to set output buffer for -tm
// plot first glitch
// V0.4.02 = allow -n0 for minimal buffer size
-// V0.5.00 = use latency analyzer from OboeTester, uses random noise for latency
-#define APP_VERSION "0.5.00"
+// V0.5.00 = use latency analyzer copied from OboeTester, uses random noise for latency
+// V0.5.01 = use latency analyzer directly from OboeTester in external/oboe
+#define APP_VERSION "0.5.01"
// Tag for machine readable results as property = value pairs
#define RESULT_TAG "RESULT: "
diff --git a/media/libaaudio/examples/utils/AAudioArgsParser.h b/media/libaaudio/examples/utils/AAudioArgsParser.h
index 4bba436..e670642 100644
--- a/media/libaaudio/examples/utils/AAudioArgsParser.h
+++ b/media/libaaudio/examples/utils/AAudioArgsParser.h
@@ -421,7 +421,9 @@
printf(" -f{0|1|2} set format\n");
printf(" 0 = UNSPECIFIED\n");
printf(" 1 = PCM_I16\n");
- printf(" 2 = FLOAT\n");
+ printf(" 2 = PCM_FLOAT\n");
+ printf(" 3 = PCM_I24_PACKED\n");
+ printf(" 4 = PCM_I32\n");
printf(" -i{inputPreset} eg. 5 for AAUDIO_INPUT_PRESET_CAMCORDER\n");
printf(" -m{0|1|2|3} set MMAP policy\n");
printf(" 0 = _UNSPECIFIED, use aaudio.mmap_policy system property, default\n");
diff --git a/media/libaaudio/examples/utils/AAudioExampleUtils.h b/media/libaaudio/examples/utils/AAudioExampleUtils.h
index 46b8895..5819dfd 100644
--- a/media/libaaudio/examples/utils/AAudioExampleUtils.h
+++ b/media/libaaudio/examples/utils/AAudioExampleUtils.h
@@ -32,6 +32,7 @@
#define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
#define NANOS_PER_SECOND (NANOS_PER_MILLISECOND * 1000)
+// Use template functions to avoid warning of unused static functions.
template <class T = aaudio_sharing_mode_t>
const char *getSharingModeText(aaudio_sharing_mode_t mode) {
const char *text = "unknown";
@@ -48,6 +49,7 @@
return text;
}
+template <class T = aaudio_performance_mode_t>
const char *getPerformanceModeText(aaudio_performance_mode_t mode) {
const char *text = "unknown";
switch (mode) {
@@ -66,6 +68,7 @@
return text;
}
+template <class T = aaudio_direction_t>
const char *getDirectionText(aaudio_direction_t direction) {
const char *text = "unknown";
switch (direction) {
@@ -81,6 +84,29 @@
return text;
}
+template <class T = aaudio_direction_t>
+constexpr int32_t getBytesPerSample(aaudio_format_t format) {
+ switch (format) {
+ case AAUDIO_FORMAT_PCM_I16:
+ return 2;
+ case AAUDIO_FORMAT_PCM_FLOAT:
+ return 4;
+ case AAUDIO_FORMAT_PCM_I24_PACKED:
+ return 3;
+ case AAUDIO_FORMAT_PCM_I32:
+ return 4;
+ default:
+ return -1;
+ }
+}
+
+// Return true if CPU is native Little Endian
+inline bool isNativeLittleEndian() {
+ // If the first byte of the data word in memory is 1 then Little Endian.
+ constexpr union { unsigned u; unsigned char c[sizeof(unsigned)]; } one = {1};
+ return one.c[0] != 0;
+}
+
template <class T = int64_t>
void convertNanosecondsToTimespec(int64_t nanoseconds, struct timespec *time) {
time->tv_sec = nanoseconds / NANOS_PER_SECOND;
diff --git a/media/libaaudio/examples/utils/AAudioSimplePlayer.h b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
index fd1fc45..7daac20 100644
--- a/media/libaaudio/examples/utils/AAudioSimplePlayer.h
+++ b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
@@ -359,22 +359,38 @@
int32_t samplesPerFrame = AAudioStream_getChannelCount(stream);
-
- int numActiveOscilators = (samplesPerFrame > MAX_CHANNELS) ? MAX_CHANNELS : samplesPerFrame;
+ int numActiveOscillators = std::min(samplesPerFrame, MAX_CHANNELS);
switch (AAudioStream_getFormat(stream)) {
case AAUDIO_FORMAT_PCM_I16: {
int16_t *audioBuffer = (int16_t *) audioData;
- for (int i = 0; i < numActiveOscilators; ++i) {
- sineData->sineOscillators[i].render(&audioBuffer[i], samplesPerFrame,
- numFrames);
+ for (int i = 0; i < numActiveOscillators; ++i) {
+ sineData->sineOscillators[i].render(&audioBuffer[i],
+ samplesPerFrame, numFrames);
}
}
break;
case AAUDIO_FORMAT_PCM_FLOAT: {
float *audioBuffer = (float *) audioData;
- for (int i = 0; i < numActiveOscilators; ++i) {
- sineData->sineOscillators[i].render(&audioBuffer[i], samplesPerFrame,
- numFrames);
+ for (int i = 0; i < numActiveOscillators; ++i) {
+ sineData->sineOscillators[i].render(&audioBuffer[i],
+ samplesPerFrame, numFrames);
+ }
+ }
+ break;
+ case AAUDIO_FORMAT_PCM_I24_PACKED: {
+ uint8_t *audioBuffer = (uint8_t *) audioData;
+ for (int i = 0; i < numActiveOscillators; ++i) {
+ static const int bytesPerSample = getBytesPerSample(AAUDIO_FORMAT_PCM_I24_PACKED);
+ sineData->sineOscillators[i].render24(&audioBuffer[i * bytesPerSample],
+ samplesPerFrame, numFrames);
+ }
+ }
+ break;
+ case AAUDIO_FORMAT_PCM_I32: {
+ int32_t *audioBuffer = (int32_t *) audioData;
+ for (int i = 0; i < numActiveOscillators; ++i) {
+ sineData->sineOscillators[i].render(&audioBuffer[i],
+ samplesPerFrame, numFrames);
}
}
break;
diff --git a/media/libaaudio/examples/utils/SineGenerator.h b/media/libaaudio/examples/utils/SineGenerator.h
index 9e6d46d..66a08fd 100644
--- a/media/libaaudio/examples/utils/SineGenerator.h
+++ b/media/libaaudio/examples/utils/SineGenerator.h
@@ -41,20 +41,54 @@
}
}
+ float next() {
+ float value = sinf(mPhase) * mAmplitude;
+ advancePhase();
+ return value;
+ }
+
void render(int16_t *buffer, int32_t channelStride, int32_t numFrames) {
int sampleIndex = 0;
for (int i = 0; i < numFrames; i++) {
- buffer[sampleIndex] = (int16_t) (INT16_MAX * sin(mPhase) * mAmplitude);
+ buffer[sampleIndex] = (int16_t) (INT16_MAX * next());
sampleIndex += channelStride;
- advancePhase();
}
}
+
void render(float *buffer, int32_t channelStride, int32_t numFrames) {
int sampleIndex = 0;
for (int i = 0; i < numFrames; i++) {
- buffer[sampleIndex] = sin(mPhase) * mAmplitude;
+ buffer[sampleIndex] = next();
sampleIndex += channelStride;
- advancePhase();
+ }
+ }
+
+ void render(int32_t *buffer, int32_t channelStride, int32_t numFrames) {
+ int sampleIndex = 0;
+ for (int i = 0; i < numFrames; i++) {
+ buffer[sampleIndex] = (int32_t) (INT32_MAX * next());
+ sampleIndex += channelStride;
+ }
+ }
+
+ void render24(uint8_t *buffer, int32_t channelStride, int32_t numFrames) {
+ int sampleIndex = 0;
+ constexpr int32_t INT24_MAX = (1 << 23) - 1;
+ constexpr int bytesPerSample = getBytesPerSample(AAUDIO_FORMAT_PCM_I24_PACKED);
+ const bool isLittleEndian = isNativeLittleEndian();
+ for (int i = 0; i < numFrames; i++) {
+ int32_t sample = (int32_t) (INT24_MAX * next());
+ uint32_t usample = (uint32_t) sample;
+ if (isLittleEndian) {
+ buffer[sampleIndex] = usample; // little end first
+ buffer[sampleIndex + 1] = usample >> 8;
+ buffer[sampleIndex + 2] = usample >> 16;
+ } else {
+ buffer[sampleIndex] = usample >> 16; // big end first
+ buffer[sampleIndex + 1] = usample >> 8;
+ buffer[sampleIndex + 2] = usample;
+ }
+ sampleIndex += channelStride * bytesPerSample;
}
}
@@ -100,4 +134,3 @@
};
#endif /* SINE_GENERATOR_H */
-
diff --git a/media/libaaudio/examples/utils/dummy.cpp b/media/libaaudio/examples/utils/dummy.cpp
deleted file mode 100644
index 8ef7e36..0000000
--- a/media/libaaudio/examples/utils/dummy.cpp
+++ /dev/null
@@ -1,5 +0,0 @@
-/**
- * Dummy file needed to get Android Studio to scan this folder.
- */
-
-int g_DoNotUseThisVariable = 0;
diff --git a/media/libaaudio/examples/utils/unused.cpp b/media/libaaudio/examples/utils/unused.cpp
new file mode 100644
index 0000000..9a5205e
--- /dev/null
+++ b/media/libaaudio/examples/utils/unused.cpp
@@ -0,0 +1,5 @@
+/**
+ * Unused file required to get Android Studio to scan this folder.
+ */
+
+int g_DoNotUseThisVariable = 0;
diff --git a/media/libaaudio/examples/write_sine/src/write_sine.cpp b/media/libaaudio/examples/write_sine/src/write_sine.cpp
index 8e33a31..33d07f0 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine.cpp
@@ -47,9 +47,11 @@
int32_t framesToPlay = 0;
int32_t framesLeft = 0;
int32_t xRunCount = 0;
- int numActiveOscilators = 0;
+ int numActiveOscillators = 0;
float *floatData = nullptr;
int16_t *shortData = nullptr;
+ int32_t *int32Data = nullptr;
+ uint8_t *byteData = nullptr;
int testFd = -1;
@@ -57,7 +59,7 @@
// in a buffer if we hang or crash.
setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
- printf("%s - Play a sine wave using AAudio V0.1.3\n", argv[0]);
+ printf("%s - Play a sine wave using AAudio V0.1.4\n", argv[0]);
if (argParser.parseArgs(argc, argv)) {
return EXIT_FAILURE;
@@ -91,13 +93,23 @@
printf("Buffer: framesPerWrite = %d\n",framesPerWrite);
// Allocate a buffer for the audio data.
- if (actualDataFormat == AAUDIO_FORMAT_PCM_FLOAT) {
- floatData = new float[framesPerWrite * actualChannelCount];
- } else if (actualDataFormat == AAUDIO_FORMAT_PCM_I16) {
- shortData = new int16_t[framesPerWrite * actualChannelCount];
- } else {
- printf("ERROR Unsupported data format!\n");
- goto finish;
+ switch (actualDataFormat) {
+ case AAUDIO_FORMAT_PCM_FLOAT:
+ floatData = new float[framesPerWrite * actualChannelCount];
+ break;
+ case AAUDIO_FORMAT_PCM_I16:
+ shortData = new int16_t[framesPerWrite * actualChannelCount];
+ break;
+ case AAUDIO_FORMAT_PCM_I24_PACKED:
+ byteData = new uint8_t[framesPerWrite * actualChannelCount
+ * getBytesPerSample(AAUDIO_FORMAT_PCM_I24_PACKED)];
+ break;
+ case AAUDIO_FORMAT_PCM_I32:
+ int32Data = new int32_t[framesPerWrite * actualChannelCount];
+ break;
+ default:
+ printf("ERROR Unsupported data format!\n");
+ goto finish;
}
testFd = open("/data/aaudio_temp.raw", O_CREAT | O_RDWR, S_IRWXU);
@@ -117,29 +129,56 @@
// Play for a while.
framesToPlay = actualSampleRate * argParser.getDurationSeconds();
framesLeft = framesToPlay;
- numActiveOscilators = (actualChannelCount > MAX_CHANNELS) ? MAX_CHANNELS : actualChannelCount;
+ numActiveOscillators = (actualChannelCount > MAX_CHANNELS) ? MAX_CHANNELS : actualChannelCount;
while (framesLeft > 0) {
// Render as FLOAT or PCM
- if (actualDataFormat == AAUDIO_FORMAT_PCM_FLOAT) {
- for (int i = 0; i < numActiveOscilators; ++i) {
- myData.sineOscillators[i].render(&floatData[i], actualChannelCount,
- framesPerWrite);
- }
- } else if (actualDataFormat == AAUDIO_FORMAT_PCM_I16) {
- for (int i = 0; i < numActiveOscilators; ++i) {
- myData.sineOscillators[i].render(&shortData[i], actualChannelCount,
- framesPerWrite);
- }
+ switch (actualDataFormat) {
+ case AAUDIO_FORMAT_PCM_FLOAT:
+ for (int i = 0; i < numActiveOscillators; ++i) {
+ myData.sineOscillators[i].render(&floatData[i], actualChannelCount,
+ framesPerWrite);
+ }
+ break;
+ case AAUDIO_FORMAT_PCM_I16:
+ for (int i = 0; i < numActiveOscillators; ++i) {
+ myData.sineOscillators[i].render(&shortData[i], actualChannelCount,
+ framesPerWrite);
+ }
+ break;
+ case AAUDIO_FORMAT_PCM_I32:
+ for (int i = 0; i < numActiveOscillators; ++i) {
+ myData.sineOscillators[i].render(&int32Data[i], actualChannelCount,
+ framesPerWrite);
+ }
+ break;
+ case AAUDIO_FORMAT_PCM_I24_PACKED:
+ for (int i = 0; i < numActiveOscillators; ++i) {
+ static const int
+ bytesPerSample = getBytesPerSample(AAUDIO_FORMAT_PCM_I24_PACKED);
+ myData.sineOscillators[i].render24(&byteData[i * bytesPerSample],
+ actualChannelCount,
+ framesPerWrite);
+ }
+ break;
}
// Write audio data to the stream.
int64_t timeoutNanos = 1000 * NANOS_PER_MILLISECOND;
int32_t minFrames = (framesToPlay < framesPerWrite) ? framesToPlay : framesPerWrite;
int32_t actual = 0;
- if (actualDataFormat == AAUDIO_FORMAT_PCM_FLOAT) {
- actual = AAudioStream_write(aaudioStream, floatData, minFrames, timeoutNanos);
- } else if (actualDataFormat == AAUDIO_FORMAT_PCM_I16) {
- actual = AAudioStream_write(aaudioStream, shortData, minFrames, timeoutNanos);
+ switch (actualDataFormat) {
+ case AAUDIO_FORMAT_PCM_FLOAT:
+ actual = AAudioStream_write(aaudioStream, floatData, minFrames, timeoutNanos);
+ break;
+ case AAUDIO_FORMAT_PCM_I16:
+ actual = AAudioStream_write(aaudioStream, shortData, minFrames, timeoutNanos);
+ break;
+ case AAUDIO_FORMAT_PCM_I32:
+ actual = AAudioStream_write(aaudioStream, int32Data, minFrames, timeoutNanos);
+ break;
+ case AAUDIO_FORMAT_PCM_I24_PACKED:
+ actual = AAudioStream_write(aaudioStream, byteData, minFrames, timeoutNanos);
+ break;
}
if (actual < 0) {
fprintf(stderr, "ERROR - AAudioStream_write() returned %d\n", actual);
@@ -196,6 +235,8 @@
delete[] floatData;
delete[] shortData;
+ delete[] int32Data;
+ delete[] byteData;
printf("exiting - AAudio result = %d = %s\n", result, AAudio_convertResultToText(result));
return (result != AAUDIO_OK) ? EXIT_FAILURE : EXIT_SUCCESS;
}
diff --git a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
index ca60233..cdc987b 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
@@ -31,7 +31,7 @@
#include "AAudioSimplePlayer.h"
#include "AAudioArgsParser.h"
-#define APP_VERSION "0.1.7"
+#define APP_VERSION "0.1.8"
constexpr int32_t kDefaultHangTimeMSec = 10;
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index a47f189..ea4fe04 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -29,6 +29,8 @@
#ifndef AAUDIO_AAUDIO_H
#define AAUDIO_AAUDIO_H
+#include <stdbool.h>
+#include <stdint.h>
#include <time.h>
#ifdef __cplusplus
@@ -63,7 +65,7 @@
/**
* This format uses the int16_t data type.
- * The maximum range of the data is -32768 to 32767.
+ * The maximum range of the data is -32768 (0x8000) to 32767 (0x7FFF).
*/
AAUDIO_FORMAT_PCM_I16,
@@ -75,7 +77,31 @@
* See also 'floatData' at
* https://developer.android.com/reference/android/media/AudioTrack#write(float[],%20int,%20int,%20int)
*/
- AAUDIO_FORMAT_PCM_FLOAT
+ AAUDIO_FORMAT_PCM_FLOAT,
+
+ /**
+ * This format uses 24-bit samples packed into 3 bytes.
+ * The bytes are in the native endian order.
+ * The maximum range of the data is -8388608 (0x800000)
+ * to 8388607 (0x7FFFFF).
+ *
+ * Note that the lower precision bits may be ignored by the device.
+ *
+ * Available since API level 31.
+ */
+ AAUDIO_FORMAT_PCM_I24_PACKED,
+
+ /**
+ * This format uses 32-bit samples stored in an int32_t data type.
+ * The maximum range of the data is -2147483648 (0x80000000)
+ * to 2147483647 (0x7FFFFFFF).
+ *
+ * Note that the lower precision bits may be ignored by the device.
+ *
+ * Available since API level 31.
+ */
+ AAUDIO_FORMAT_PCM_I32
+
};
typedef int32_t aaudio_format_t;
@@ -687,7 +713,7 @@
aaudio_performance_mode_t mode) __INTRODUCED_IN(26);
/**
- * Set the intended use case for the stream.
+ * Set the intended use case for the output stream.
*
* The AAudio system will use this information to optimize the
* behavior of the stream.
@@ -704,7 +730,7 @@
aaudio_usage_t usage) __INTRODUCED_IN(28);
/**
- * Set the type of audio data that the stream will carry.
+ * Set the type of audio data that the output stream will carry.
*
* The AAudio system will use this information to optimize the
* behavior of the stream.
@@ -894,8 +920,9 @@
* It will stop being called after AAudioStream_requestPause() or
* AAudioStream_requestStop() is called.
*
- * This callback function will be called on a real-time thread owned by AAudio. See
- * {@link #AAudioStream_dataCallback} for more information.
+ * This callback function will be called on a real-time thread owned by AAudio.
+ * The low latency streams may have callback threads with higher priority than normal streams.
+ * See {@link #AAudioStream_dataCallback} for more information.
*
* Note that the AAudio callbacks will never be called simultaneously from multiple threads.
*
@@ -1035,6 +1062,11 @@
* but still allow queries to the stream to occur from other threads. This often
* happens if you are monitoring stream progress from a UI thread.
*
+ * NOTE: This function is only fully implemented for MMAP streams,
+ * which are low latency streams supported by some devices.
+ * On other "Legacy" streams some audio resources will still be in use
+ * and some callbacks may still be in process after this call.
+ *
* @param stream reference provided by AAudioStreamBuilder_openStream()
* @return {@link #AAUDIO_OK} or a negative error.
*/
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 717f31a..d02d1b6 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -21,6 +21,7 @@
],
cflags: [
+ "-Wthread-safety",
"-Wno-unused-parameter",
"-Wall",
"-Werror",
@@ -85,6 +86,7 @@
"libcutils",
"libutils",
"libbinder",
+ "aaudio-aidl-cpp",
],
cflags: [
@@ -114,11 +116,10 @@
"client/AudioStreamInternalPlay.cpp",
"client/IsochronousClockModel.cpp",
"binding/AudioEndpointParcelable.cpp",
+ "binding/AAudioBinderAdapter.cpp",
"binding/AAudioBinderClient.cpp",
"binding/AAudioStreamRequest.cpp",
"binding/AAudioStreamConfiguration.cpp",
- "binding/IAAudioClient.cpp",
- "binding/IAAudioService.cpp",
"binding/RingBufferParcelable.cpp",
"binding/SharedMemoryParcelable.cpp",
"binding/SharedRegionParcelable.cpp",
@@ -129,12 +130,44 @@
"flowgraph/SinkFloat.cpp",
"flowgraph/SinkI16.cpp",
"flowgraph/SinkI24.cpp",
+ "flowgraph/SinkI32.cpp",
"flowgraph/SourceFloat.cpp",
"flowgraph/SourceI16.cpp",
"flowgraph/SourceI24.cpp",
+ "flowgraph/SourceI32.cpp",
],
sanitize: {
integer_overflow: true,
misc_undefined: ["bounds"],
},
}
+
+aidl_interface {
+ name: "aaudio-aidl",
+ unstable: true,
+ local_include_dir: "binding/aidl",
+ srcs: [
+ "binding/aidl/aaudio/Endpoint.aidl",
+ "binding/aidl/aaudio/RingBuffer.aidl",
+ "binding/aidl/aaudio/SharedRegion.aidl",
+ "binding/aidl/aaudio/StreamParameters.aidl",
+ "binding/aidl/aaudio/StreamRequest.aidl",
+ "binding/aidl/aaudio/IAAudioClient.aidl",
+ "binding/aidl/aaudio/IAAudioService.aidl",
+ ],
+ imports: [
+ "audio_common-aidl",
+ "shared-file-region-aidl",
+ ],
+ backend:
+ {
+ cpp: {
+ enabled: true,
+ },
+ java: {
+ // TODO: need to have audio_common-aidl available in Java to enable
+ // this.
+ enabled: false,
+ },
+ },
+}
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.cpp b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
new file mode 100644
index 0000000..6e3a1c8
--- /dev/null
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <binding/AAudioBinderAdapter.h>
+#include <media/AidlConversionUtil.h>
+#include <utility/AAudioUtilities.h>
+
+namespace aaudio {
+
+using android::aidl_utils::statusTFromBinderStatus;
+using android::binder::Status;
+
+AAudioBinderAdapter::AAudioBinderAdapter(IAAudioService* delegate)
+ : mDelegate(delegate) {}
+
+void AAudioBinderAdapter::registerClient(const android::sp<IAAudioClient>& client) {
+ mDelegate->registerClient(client);
+}
+
+aaudio_handle_t AAudioBinderAdapter::openStream(const AAudioStreamRequest& request,
+ AAudioStreamConfiguration& config) {
+ aaudio_handle_t result;
+ StreamParameters params;
+ Status status = mDelegate->openStream(request.parcelable(),
+ ¶ms,
+ &result);
+ if (!status.isOk()) {
+ result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+ }
+ config = params;
+ return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::closeStream(aaudio_handle_t streamHandle) {
+ aaudio_result_t result;
+ Status status = mDelegate->closeStream(streamHandle, &result);
+ if (!status.isOk()) {
+ result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+ }
+ return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::getStreamDescription(aaudio_handle_t streamHandle,
+ AudioEndpointParcelable& endpointOut) {
+ aaudio_result_t result;
+ Endpoint endpoint;
+ Status status = mDelegate->getStreamDescription(streamHandle,
+ &endpoint,
+ &result);
+ if (!status.isOk()) {
+ result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+ }
+ endpointOut = std::move(endpoint);
+ return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::startStream(aaudio_handle_t streamHandle) {
+ aaudio_result_t result;
+ Status status = mDelegate->startStream(streamHandle, &result);
+ if (!status.isOk()) {
+ result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+ }
+ return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::pauseStream(aaudio_handle_t streamHandle) {
+ aaudio_result_t result;
+ Status status = mDelegate->pauseStream(streamHandle, &result);
+ if (!status.isOk()) {
+ result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+ }
+ return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::stopStream(aaudio_handle_t streamHandle) {
+ aaudio_result_t result;
+ Status status = mDelegate->stopStream(streamHandle, &result);
+ if (!status.isOk()) {
+ result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+ }
+ return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::flushStream(aaudio_handle_t streamHandle) {
+ aaudio_result_t result;
+ Status status = mDelegate->flushStream(streamHandle, &result);
+ if (!status.isOk()) {
+ result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+ }
+ return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::registerAudioThread(aaudio_handle_t streamHandle,
+ pid_t clientThreadId,
+ int64_t periodNanoseconds) {
+ aaudio_result_t result;
+ Status status = mDelegate->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds, &result);
+ if (!status.isOk()) {
+ result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+ }
+ return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::unregisterAudioThread(aaudio_handle_t streamHandle,
+ pid_t clientThreadId) {
+ aaudio_result_t result;
+ Status status = mDelegate->unregisterAudioThread(streamHandle, clientThreadId, &result);
+ if (!status.isOk()) {
+ result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+ }
+ return result;
+}
+
+} // namespace aaudio
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.h b/media/libaaudio/src/binding/AAudioBinderAdapter.h
new file mode 100644
index 0000000..5e9ab57
--- /dev/null
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aaudio/IAAudioService.h>
+#include <binding/AAudioServiceInterface.h>
+
+namespace aaudio {
+
+/**
+ * An adapter that takes in an underlying IAAudioService and exposes an
+ * AAudioServiceInterface.
+ *
+ * This class is abstract: the client is expected to inherit from this class and implement those
+ * methods from AAudioServiceInterface that don't have counterparts in IAAudioService.
+ */
+class AAudioBinderAdapter : public AAudioServiceInterface {
+public:
+ explicit AAudioBinderAdapter(IAAudioService* delegate);
+
+ void registerClient(const android::sp<IAAudioClient>& client) override;
+
+ aaudio_handle_t openStream(const AAudioStreamRequest& request,
+ AAudioStreamConfiguration& configuration) override;
+
+ aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+
+ aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+ AudioEndpointParcelable& endpoint) override;
+
+ aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+
+ aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+
+ aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+
+ aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+
+ aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
+ pid_t clientThreadId,
+ int64_t periodNanoseconds) override;
+
+ aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+ pid_t clientThreadId) override;
+
+private:
+ IAAudioService* const mDelegate;
+};
+
+} // namespace aaudio
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index 7b0d31f..fa5a2da 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -19,35 +19,30 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
-#include <binder/IInterface.h>
#include <binder/IServiceManager.h>
#include <binder/ProcessState.h>
#include <utils/Mutex.h>
#include <utils/RefBase.h>
#include <utils/Singleton.h>
-#include <media/AudioSystem.h>
-
#include <aaudio/AAudio.h>
#include "AudioEndpointParcelable.h"
-#include "binding/AAudioBinderClient.h"
-//#include "binding/AAudioStreamRequest.h"
-//#include "binding/AAudioStreamConfiguration.h"
-//#include "binding/IAAudioService.h"
-//#include "binding/AAudioServiceMessage.h"
-//#include "AAudioServiceInterface.h"
+#include "binding/AAudioBinderClient.h"
+
+#define AAUDIO_SERVICE_NAME "media.aaudio"
using android::String16;
using android::IServiceManager;
using android::defaultServiceManager;
using android::interface_cast;
using android::IInterface;
-using android::IAAudioService;
using android::Mutex;
using android::ProcessState;
using android::sp;
+using android::status_t;
using android::wp;
+using android::binder::Status;
using namespace aaudio;
@@ -67,20 +62,18 @@
AAudioBinderClient::~AAudioBinderClient() {
ALOGV("%s - destroying %p", __func__, this);
Mutex::Autolock _l(mServiceLock);
- if (mAAudioService != 0) {
- IInterface::asBinder(mAAudioService)->unlinkToDeath(mAAudioClient);
- }
}
// TODO Share code with other service clients.
// Helper function to get access to the "AAudioService" service.
// This code was modeled after frameworks/av/media/libaudioclient/AudioSystem.cpp
-const sp<IAAudioService> AAudioBinderClient::getAAudioService() {
+std::shared_ptr<AAudioServiceInterface> AAudioBinderClient::getAAudioService() {
+ std::shared_ptr<AAudioServiceInterface> result;
sp<IAAudioService> aaudioService;
bool needToRegister = false;
{
Mutex::Autolock _l(mServiceLock);
- if (mAAudioService.get() == nullptr) {
+ if (mAdapter == nullptr) {
sp<IBinder> binder;
sp<IServiceManager> sm = defaultServiceManager();
// Try several times to get the service.
@@ -99,7 +92,8 @@
if (status != NO_ERROR) {
ALOGE("%s() - linkToDeath() returned %d", __func__, status);
}
- mAAudioService = interface_cast<IAAudioService>(binder);
+ aaudioService = interface_cast<IAAudioService>(binder);
+ mAdapter.reset(new Adapter(aaudioService, mAAudioClient));
needToRegister = true;
// Make sure callbacks can be received by mAAudioClient
ProcessState::self()->startThreadPool();
@@ -107,18 +101,18 @@
ALOGE("AAudioBinderClient could not connect to %s", AAUDIO_SERVICE_NAME);
}
}
- aaudioService = mAAudioService;
+ result = mAdapter;
}
// Do this outside the mutex lock.
if (needToRegister && aaudioService.get() != nullptr) { // new client?
aaudioService->registerClient(mAAudioClient);
}
- return aaudioService;
+ return result;
}
void AAudioBinderClient::dropAAudioService() {
Mutex::Autolock _l(mServiceLock);
- mAAudioService.clear(); // force a reconnect
+ mAdapter.reset();
}
/**
@@ -127,13 +121,13 @@
* @return handle to the stream or a negative error
*/
aaudio_handle_t AAudioBinderClient::openStream(const AAudioStreamRequest &request,
- AAudioStreamConfiguration &configurationOutput) {
+ AAudioStreamConfiguration &configuration) {
aaudio_handle_t stream;
for (int i = 0; i < 2; i++) {
- const sp<IAAudioService> &service = getAAudioService();
+ std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- stream = service->openStream(request, configurationOutput);
+ stream = service->openStream(request, configuration);
if (stream == AAUDIO_ERROR_NO_SERVICE) {
ALOGE("openStream lost connection to AAudioService.");
@@ -146,8 +140,9 @@
}
aaudio_result_t AAudioBinderClient::closeStream(aaudio_handle_t streamHandle) {
- const sp<IAAudioService> service = getAAudioService();
+ std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
return service->closeStream(streamHandle);
}
@@ -155,33 +150,38 @@
* used to communicate with the underlying HAL or Service.
*/
aaudio_result_t AAudioBinderClient::getStreamDescription(aaudio_handle_t streamHandle,
- AudioEndpointParcelable &parcelable) {
- const sp<IAAudioService> service = getAAudioService();
+ AudioEndpointParcelable& endpointOut) {
+ std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->getStreamDescription(streamHandle, parcelable);
+
+ return service->getStreamDescription(streamHandle, endpointOut);
}
aaudio_result_t AAudioBinderClient::startStream(aaudio_handle_t streamHandle) {
- const sp<IAAudioService> service = getAAudioService();
+ std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
return service->startStream(streamHandle);
}
aaudio_result_t AAudioBinderClient::pauseStream(aaudio_handle_t streamHandle) {
- const sp<IAAudioService> service = getAAudioService();
+ std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
return service->pauseStream(streamHandle);
}
aaudio_result_t AAudioBinderClient::stopStream(aaudio_handle_t streamHandle) {
- const sp<IAAudioService> service = getAAudioService();
+ std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
return service->stopStream(streamHandle);
}
aaudio_result_t AAudioBinderClient::flushStream(aaudio_handle_t streamHandle) {
- const sp<IAAudioService> service = getAAudioService();
+ std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
return service->flushStream(streamHandle);
}
@@ -191,17 +191,16 @@
aaudio_result_t AAudioBinderClient::registerAudioThread(aaudio_handle_t streamHandle,
pid_t clientThreadId,
int64_t periodNanoseconds) {
- const sp<IAAudioService> service = getAAudioService();
+ std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->registerAudioThread(streamHandle,
- clientThreadId,
- periodNanoseconds);
+
+ return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
}
aaudio_result_t AAudioBinderClient::unregisterAudioThread(aaudio_handle_t streamHandle,
pid_t clientThreadId) {
- const sp<IAAudioService> service = getAAudioService();
+ std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->unregisterAudioThread(streamHandle,
- clientThreadId);
+
+ return service->unregisterAudioThread(streamHandle, clientThreadId);
}
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.h b/media/libaaudio/src/binding/AAudioBinderClient.h
index e8c91fc..6a7b639 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.h
+++ b/media/libaaudio/src/binding/AAudioBinderClient.h
@@ -21,12 +21,15 @@
#include <utils/Singleton.h>
#include <aaudio/AAudio.h>
-#include "AAudioServiceDefinitions.h"
+#include <binder/IInterface.h>
+
+#include "aaudio/BnAAudioClient.h"
+#include "aaudio/IAAudioService.h"
#include "AAudioServiceInterface.h"
+#include "binding/AAudioBinderAdapter.h"
#include "binding/AAudioStreamRequest.h"
-#include "binding/AAudioStreamConfiguration.h"
#include "binding/AudioEndpointParcelable.h"
-#include "binding/IAAudioService.h"
+#include "core/AAudioStreamParameters.h"
/**
* Implements the AAudioServiceInterface by talking to the service through Binder.
@@ -44,11 +47,7 @@
virtual ~AAudioBinderClient();
- const android::sp<android::IAAudioService> getAAudioService();
-
- void dropAAudioService();
-
- void registerClient(const android::sp<android::IAAudioClient>& client __unused) override {}
+ void registerClient(const android::sp<IAAudioClient>& client __unused) override {}
/**
* @param request info needed to create the stream
@@ -64,7 +63,7 @@
* used to communicate with the underlying HAL or Service.
*/
aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
- AudioEndpointParcelable &parcelable) override;
+ AudioEndpointParcelable &endpointOut) override;
/**
* Start the flow of data.
@@ -115,8 +114,7 @@
ALOGW("onStreamChange called!");
}
- class AAudioClient : public android::IBinder::DeathRecipient , public android::BnAAudioClient
- {
+ class AAudioClient : public android::IBinder::DeathRecipient, public BnAAudioClient {
public:
AAudioClient(android::wp<AAudioBinderClient> aaudioBinderClient)
: mBinderClient(aaudioBinderClient) {
@@ -132,21 +130,66 @@
}
// implement BnAAudioClient
- void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) {
+ android::binder::Status onStreamChange(int32_t handle, int32_t opcode, int32_t value) {
+ static_assert(std::is_same_v<aaudio_handle_t, int32_t>);
android::sp<AAudioBinderClient> client = mBinderClient.promote();
if (client.get() != nullptr) {
client->onStreamChange(handle, opcode, value);
}
+ return android::binder::Status::ok();
}
private:
android::wp<AAudioBinderClient> mBinderClient;
};
-private:
+ // This adapter is used to convert the binder interface (delegate) to the AudioServiceInterface
+ // conventions (translating between data types and respective parcelables, translating error
+ // codes and calling conventions).
+ // The adapter also owns the underlying service object and is responsible to unlink its death
+ // listener when destroyed.
+ class Adapter : public AAudioBinderAdapter {
+ public:
+ Adapter(const android::sp<IAAudioService>& delegate,
+ const android::sp<AAudioClient>& aaudioClient)
+ : AAudioBinderAdapter(delegate.get()),
+ mDelegate(delegate),
+ mAAudioClient(aaudioClient) {}
- android::Mutex mServiceLock;
- android::sp<android::IAAudioService> mAAudioService;
- android::sp<AAudioClient> mAAudioClient;
+ virtual ~Adapter() {
+ if (mDelegate != nullptr) {
+ android::IInterface::asBinder(mDelegate)->unlinkToDeath(mAAudioClient);
+ }
+ }
+
+ // This should never be called (call is rejected at the AudioBinderClient level).
+ aaudio_result_t startClient(aaudio_handle_t streamHandle __unused,
+ const android::AudioClient& client __unused,
+ const audio_attributes_t* attr __unused,
+ audio_port_handle_t* clientHandle __unused) override {
+ LOG_ALWAYS_FATAL("Shouldn't get here");
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
+
+ // This should never be called (call is rejected at the AudioBinderClient level).
+ aaudio_result_t stopClient(aaudio_handle_t streamHandle __unused,
+ audio_port_handle_t clientHandle __unused) override {
+ LOG_ALWAYS_FATAL("Shouldn't get here");
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
+
+ private:
+ android::sp<IAAudioService> mDelegate;
+ android::sp<AAudioClient> mAAudioClient;
+ };
+
+private:
+ android::Mutex mServiceLock;
+ std::shared_ptr<AAudioServiceInterface> mAdapter;
+ android::sp<AAudioClient> mAAudioClient;
+
+ std::shared_ptr<AAudioServiceInterface> getAAudioService();
+
+ void dropAAudioService();
};
diff --git a/media/libaaudio/src/binding/AAudioServiceInterface.h b/media/libaaudio/src/binding/AAudioServiceInterface.h
index 9c28cc7..5d11512 100644
--- a/media/libaaudio/src/binding/AAudioServiceInterface.h
+++ b/media/libaaudio/src/binding/AAudioServiceInterface.h
@@ -20,11 +20,11 @@
#include <utils/StrongPointer.h>
#include <media/AudioClient.h>
+#include "aaudio/IAAudioClient.h"
#include "binding/AAudioServiceDefinitions.h"
#include "binding/AAudioStreamRequest.h"
#include "binding/AAudioStreamConfiguration.h"
#include "binding/AudioEndpointParcelable.h"
-#include "binding/IAAudioClient.h"
/**
* This has the same methods as IAAudioService but without the Binder features.
@@ -40,7 +40,7 @@
AAudioServiceInterface() {};
virtual ~AAudioServiceInterface() = default;
- virtual void registerClient(const android::sp<android::IAAudioClient>& client) = 0;
+ virtual void registerClient(const android::sp<IAAudioClient>& client) = 0;
/**
* @param request info needed to create the stream
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index b785f88..2d501ef 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -23,101 +23,66 @@
#include <sys/mman.h>
#include <aaudio/AAudio.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
-
#include "binding/AAudioStreamConfiguration.h"
-using android::NO_ERROR;
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
using namespace aaudio;
-AAudioStreamConfiguration::AAudioStreamConfiguration() {}
-AAudioStreamConfiguration::~AAudioStreamConfiguration() {}
+using android::media::audio::common::AudioFormat;
-status_t AAudioStreamConfiguration::writeToParcel(Parcel* parcel) const {
- status_t status;
-
- status = parcel->writeInt32(getDeviceId());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32(getSampleRate());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32(getSamplesPerFrame());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32((int32_t) getSharingMode());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32((int32_t) getFormat());
- if (status != NO_ERROR) goto error;
-
- status = parcel->writeInt32((int32_t) getDirection());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32(getBufferCapacity());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32((int32_t) getUsage());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32((int32_t) getContentType());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32((int32_t) getInputPreset());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32((int32_t) getAllowedCapturePolicy());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32(getSessionId());
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32(isPrivacySensitive() ? 1 : 0);
- if (status != NO_ERROR) goto error;
- return NO_ERROR;
-error:
- ALOGE("%s(): write failed = %d", __func__, status);
- return status;
+AAudioStreamConfiguration::AAudioStreamConfiguration(const StreamParameters& parcelable) {
+ setSamplesPerFrame(parcelable.samplesPerFrame);
+ setSampleRate(parcelable.sampleRate);
+ setDeviceId(parcelable.deviceId);
+ static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(parcelable.sharingMode));
+ setSharingMode(parcelable.sharingMode);
+ static_assert(sizeof(audio_format_t) == sizeof(parcelable.audioFormat));
+ setFormat(static_cast<audio_format_t>(parcelable.audioFormat));
+ static_assert(sizeof(aaudio_direction_t) == sizeof(parcelable.direction));
+ setDirection(parcelable.direction);
+ static_assert(sizeof(audio_usage_t) == sizeof(parcelable.usage));
+ setUsage(parcelable.usage);
+ static_assert(sizeof(aaudio_content_type_t) == sizeof(parcelable.contentType));
+ setContentType(parcelable.contentType);
+ static_assert(sizeof(aaudio_input_preset_t) == sizeof(parcelable.inputPreset));
+ setInputPreset(parcelable.inputPreset);
+ setBufferCapacity(parcelable.bufferCapacity);
+ static_assert(
+ sizeof(aaudio_allowed_capture_policy_t) == sizeof(parcelable.allowedCapturePolicy));
+ setAllowedCapturePolicy(parcelable.allowedCapturePolicy);
+ static_assert(sizeof(aaudio_session_id_t) == sizeof(parcelable.sessionId));
+ setSessionId(parcelable.sessionId);
+ setPrivacySensitive(parcelable.isPrivacySensitive);
}
-status_t AAudioStreamConfiguration::readFromParcel(const Parcel* parcel) {
- int32_t value;
- status_t status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setDeviceId(value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setSampleRate(value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setSamplesPerFrame(value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setSharingMode((aaudio_sharing_mode_t) value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setFormat((audio_format_t) value);
+AAudioStreamConfiguration&
+AAudioStreamConfiguration::operator=(const StreamParameters& parcelable) {
+ this->~AAudioStreamConfiguration();
+ new (this) AAudioStreamConfiguration(parcelable);
+ return *this;
+}
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setDirection((aaudio_direction_t) value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setBufferCapacity(value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setUsage((aaudio_usage_t) value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setContentType((aaudio_content_type_t) value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setInputPreset((aaudio_input_preset_t) value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setAllowedCapturePolicy((aaudio_allowed_capture_policy_t) value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setSessionId(value);
- status = parcel->readInt32(&value);
- if (status != NO_ERROR) goto error;
- setPrivacySensitive(value == 1);
- return NO_ERROR;
-error:
- ALOGE("%s(): read failed = %d", __func__, status);
- return status;
+StreamParameters AAudioStreamConfiguration::parcelable() const {
+ StreamParameters result;
+ result.samplesPerFrame = getSamplesPerFrame();
+ result.sampleRate = getSampleRate();
+ result.deviceId = getDeviceId();
+ static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(result.sharingMode));
+ result.sharingMode = getSharingMode();
+ static_assert(sizeof(audio_format_t) == sizeof(result.audioFormat));
+ result.audioFormat = static_cast<AudioFormat>(getFormat());
+ static_assert(sizeof(aaudio_direction_t) == sizeof(result.direction));
+ result.direction = getDirection();
+ static_assert(sizeof(audio_usage_t) == sizeof(result.usage));
+ result.usage = getUsage();
+ static_assert(sizeof(aaudio_content_type_t) == sizeof(result.contentType));
+ result.contentType = getContentType();
+ static_assert(sizeof(aaudio_input_preset_t) == sizeof(result.inputPreset));
+ result.inputPreset = getInputPreset();
+ result.bufferCapacity = getBufferCapacity();
+ static_assert(sizeof(aaudio_allowed_capture_policy_t) == sizeof(result.allowedCapturePolicy));
+ result.allowedCapturePolicy = getAllowedCapturePolicy();
+ static_assert(sizeof(aaudio_session_id_t) == sizeof(result.sessionId));
+ result.sessionId = getSessionId();
+ result.isPrivacySensitive = isPrivacySensitive();
+ return result;
}
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.h b/media/libaaudio/src/binding/AAudioStreamConfiguration.h
index b324896..f428eb0 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.h
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.h
@@ -20,24 +20,24 @@
#include <stdint.h>
#include <aaudio/AAudio.h>
+#include <aaudio/StreamParameters.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
#include "core/AAudioStreamParameters.h"
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
namespace aaudio {
-class AAudioStreamConfiguration : public AAudioStreamParameters, public Parcelable {
+// This is a holder for AAudioStreamParameters, which allows conversion to/from it parcelable
+// representation, StreamParameters.
+class AAudioStreamConfiguration : public AAudioStreamParameters {
public:
- AAudioStreamConfiguration();
- virtual ~AAudioStreamConfiguration();
+ AAudioStreamConfiguration() = default;
- virtual status_t writeToParcel(Parcel* parcel) const override;
+ explicit AAudioStreamConfiguration(const StreamParameters& parcelable);
- virtual status_t readFromParcel(const Parcel* parcel) override;
+ AAudioStreamConfiguration& operator=(const StreamParameters& parcelable);
+
+ StreamParameters parcelable() const;
};
} /* namespace aaudio */
diff --git a/media/libaaudio/src/binding/AAudioStreamRequest.cpp b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
index c30c5b9..536395a 100644
--- a/media/libaaudio/src/binding/AAudioStreamRequest.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
@@ -21,67 +21,32 @@
#include <stdint.h>
#include <sys/mman.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
#include <aaudio/AAudio.h>
#include "binding/AAudioStreamConfiguration.h"
#include "binding/AAudioStreamRequest.h"
-using android::NO_ERROR;
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
using namespace aaudio;
-AAudioStreamRequest::AAudioStreamRequest()
- : mConfiguration()
- {}
-
-AAudioStreamRequest::~AAudioStreamRequest() {}
-
-status_t AAudioStreamRequest::writeToParcel(Parcel* parcel) const {
- status_t status = parcel->writeInt32((int32_t) mUserId);
- if (status != NO_ERROR) goto error;
-
- status = parcel->writeBool(mSharingModeMatchRequired);
- if (status != NO_ERROR) goto error;
-
- status = parcel->writeBool(mInService);
- if (status != NO_ERROR) goto error;
-
- status = mConfiguration.writeToParcel(parcel);
- if (status != NO_ERROR) goto error;
-
- return NO_ERROR;
-
-error:
- ALOGE("writeToParcel(): write failed = %d", status);
- return status;
+AAudioStreamRequest::AAudioStreamRequest(const StreamRequest& parcelable) :
+ mConfiguration(std::move(parcelable.params)),
+ mUserId(parcelable.userId),
+ mProcessId(parcelable.processId),
+ mSharingModeMatchRequired(parcelable.sharingModeMatchRequired),
+ mInService(parcelable.inService) {
+ static_assert(sizeof(mUserId) == sizeof(parcelable.userId));
+ static_assert(sizeof(mProcessId) == sizeof(parcelable.processId));
}
-status_t AAudioStreamRequest::readFromParcel(const Parcel* parcel) {
- int32_t temp;
- status_t status = parcel->readInt32(&temp);
- if (status != NO_ERROR) goto error;
- mUserId = (uid_t) temp;
-
- status = parcel->readBool(&mSharingModeMatchRequired);
- if (status != NO_ERROR) goto error;
-
- status = parcel->readBool(&mInService);
- if (status != NO_ERROR) goto error;
-
- status = mConfiguration.readFromParcel(parcel);
- if (status != NO_ERROR) goto error;
-
- return NO_ERROR;
-
-error:
- ALOGE("readFromParcel(): read failed = %d", status);
- return status;
+StreamRequest AAudioStreamRequest::parcelable() const {
+ StreamRequest result;
+ result.params = std::move(mConfiguration).parcelable();
+ result.userId = mUserId;
+ result.processId = mProcessId;
+ result.sharingModeMatchRequired = mSharingModeMatchRequired;
+ result.inService = mInService;
+ return result;
}
aaudio_result_t AAudioStreamRequest::validate() const {
diff --git a/media/libaaudio/src/binding/AAudioStreamRequest.h b/media/libaaudio/src/binding/AAudioStreamRequest.h
index 492f69d..31d3ea1 100644
--- a/media/libaaudio/src/binding/AAudioStreamRequest.h
+++ b/media/libaaudio/src/binding/AAudioStreamRequest.h
@@ -20,21 +20,18 @@
#include <stdint.h>
#include <aaudio/AAudio.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
+#include <aaudio/StreamRequest.h>
#include "binding/AAudioStreamConfiguration.h"
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
namespace aaudio {
-class AAudioStreamRequest : public Parcelable {
+class AAudioStreamRequest {
public:
- AAudioStreamRequest();
- virtual ~AAudioStreamRequest();
+ AAudioStreamRequest() = default;
+
+ // Construct based on a parcelable representation.
+ explicit AAudioStreamRequest(const StreamRequest& parcelable);
uid_t getUserId() const {
return mUserId;
@@ -76,15 +73,14 @@
mInService = inService;
}
- virtual status_t writeToParcel(Parcel* parcel) const override;
-
- virtual status_t readFromParcel(const Parcel* parcel) override;
-
aaudio_result_t validate() const;
void dump() const;
-protected:
+ // Extract a parcelable representation of this object.
+ StreamRequest parcelable() const;
+
+private:
AAudioStreamConfiguration mConfiguration;
uid_t mUserId = (uid_t) -1;
pid_t mProcessId = (pid_t) -1;
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
index 61d7d27..aa4ac27 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
@@ -29,22 +29,43 @@
#include "binding/AudioEndpointParcelable.h"
using android::base::unique_fd;
+using android::media::SharedFileRegion;
using android::NO_ERROR;
using android::status_t;
-using android::Parcel;
-using android::Parcelable;
using namespace aaudio;
-/**
- * Container for information about the message queues plus
- * general stream information needed by AAudio clients.
- * It contains no addresses, just sizes, offsets and file descriptors for
- * shared memory that can be passed through Binder.
- */
-AudioEndpointParcelable::AudioEndpointParcelable() {}
+AudioEndpointParcelable::AudioEndpointParcelable(Endpoint&& parcelable)
+ : mUpMessageQueueParcelable(std::move(parcelable.upMessageQueueParcelable)),
+ mDownMessageQueueParcelable(std::move(parcelable.downMessageQueueParcelable)),
+ mUpDataQueueParcelable(std::move(parcelable.upDataQueueParcelable)),
+ mDownDataQueueParcelable(std::move(parcelable.downDataQueueParcelable)),
+ mNumSharedMemories(parcelable.sharedMemories.size()) {
+ for (size_t i = 0; i < parcelable.sharedMemories.size() && i < MAX_SHARED_MEMORIES; ++i) {
+ // Re-construct.
+ mSharedMemories[i].~SharedMemoryParcelable();
+ new(&mSharedMemories[i]) SharedMemoryParcelable(std::move(parcelable.sharedMemories[i]));
+ }
+}
-AudioEndpointParcelable::~AudioEndpointParcelable() {}
+AudioEndpointParcelable& AudioEndpointParcelable::operator=(Endpoint&& parcelable) {
+ this->~AudioEndpointParcelable();
+ new(this) AudioEndpointParcelable(std::move(parcelable));
+ return *this;
+}
+
+Endpoint AudioEndpointParcelable::parcelable()&& {
+ Endpoint result;
+ result.upMessageQueueParcelable = std::move(mUpMessageQueueParcelable).parcelable();
+ result.downMessageQueueParcelable = std::move(mDownMessageQueueParcelable).parcelable();
+ result.upDataQueueParcelable = std::move(mUpDataQueueParcelable).parcelable();
+ result.downDataQueueParcelable = std::move(mDownDataQueueParcelable).parcelable();
+ result.sharedMemories.reserve(std::min(mNumSharedMemories, MAX_SHARED_MEMORIES));
+ for (size_t i = 0; i < mNumSharedMemories && i < MAX_SHARED_MEMORIES; ++i) {
+ result.sharedMemories.emplace_back(std::move(mSharedMemories[i]).parcelable());
+ }
+ return result;
+}
/**
* Add the file descriptor to the table.
@@ -60,60 +81,6 @@
return index;
}
-/**
- * The read and write must be symmetric.
- */
-status_t AudioEndpointParcelable::writeToParcel(Parcel* parcel) const {
- status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
- if (status != NO_ERROR) goto error;
-
- status = parcel->writeInt32(mNumSharedMemories);
- if (status != NO_ERROR) goto error;
-
- for (int i = 0; i < mNumSharedMemories; i++) {
- status = mSharedMemories[i].writeToParcel(parcel);
- if (status != NO_ERROR) goto error;
- }
- status = mUpMessageQueueParcelable.writeToParcel(parcel);
- if (status != NO_ERROR) goto error;
- status = mDownMessageQueueParcelable.writeToParcel(parcel);
- if (status != NO_ERROR) goto error;
- status = mUpDataQueueParcelable.writeToParcel(parcel);
- if (status != NO_ERROR) goto error;
- status = mDownDataQueueParcelable.writeToParcel(parcel);
- if (status != NO_ERROR) goto error;
-
- return NO_ERROR;
-
-error:
- ALOGE("%s returning %d", __func__, status);
- return status;
-}
-
-status_t AudioEndpointParcelable::readFromParcel(const Parcel* parcel) {
- status_t status = parcel->readInt32(&mNumSharedMemories);
- if (status != NO_ERROR) goto error;
-
- for (int i = 0; i < mNumSharedMemories; i++) {
- mSharedMemories[i].readFromParcel(parcel);
- if (status != NO_ERROR) goto error;
- }
- status = mUpMessageQueueParcelable.readFromParcel(parcel);
- if (status != NO_ERROR) goto error;
- status = mDownMessageQueueParcelable.readFromParcel(parcel);
- if (status != NO_ERROR) goto error;
- status = mUpDataQueueParcelable.readFromParcel(parcel);
- if (status != NO_ERROR) goto error;
- status = mDownDataQueueParcelable.readFromParcel(parcel);
- if (status != NO_ERROR) goto error;
-
- return AAudioConvert_aaudioToAndroidStatus(validate());
-
-error:
- ALOGE("%s returning %d", __func__, status);
- return status;
-}
-
aaudio_result_t AudioEndpointParcelable::resolve(EndpointDescriptor *descriptor) {
aaudio_result_t result = mUpMessageQueueParcelable.resolve(mSharedMemories,
&descriptor->upMessageQueueDescriptor);
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.h b/media/libaaudio/src/binding/AudioEndpointParcelable.h
index e4f8b9e..5237a1a 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.h
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.h
@@ -20,16 +20,13 @@
#include <stdint.h>
//#include <sys/mman.h>
+#include <aaudio/Endpoint.h>
#include <android-base/unique_fd.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
#include "binding/AAudioServiceDefinitions.h"
#include "binding/RingBufferParcelable.h"
using android::status_t;
-using android::Parcel;
-using android::Parcelable;
namespace aaudio {
@@ -39,10 +36,15 @@
* It contains no addresses, just sizes, offsets and file descriptors for
* shared memory that can be passed through Binder.
*/
-class AudioEndpointParcelable : public Parcelable {
+class AudioEndpointParcelable {
public:
- AudioEndpointParcelable();
- virtual ~AudioEndpointParcelable();
+ AudioEndpointParcelable() = default;
+
+ // Ctor/assignment from a parcelable representation.
+ // Since the parcelable object owns unique FDs (for shared memory blocks), move semantics are
+ // provided to avoid the need to dupe.
+ AudioEndpointParcelable(Endpoint&& parcelable);
+ AudioEndpointParcelable& operator=(Endpoint&& parcelable);
/**
* Add the file descriptor to the table.
@@ -50,16 +52,17 @@
*/
int32_t addFileDescriptor(const android::base::unique_fd& fd, int32_t sizeInBytes);
- virtual status_t writeToParcel(Parcel* parcel) const override;
-
- virtual status_t readFromParcel(const Parcel* parcel) override;
-
aaudio_result_t resolve(EndpointDescriptor *descriptor);
aaudio_result_t close();
void dump();
+ // Extract a parcelable representation of this object.
+ // Since our shared memory objects own a unique FD, move semantics are provided to avoid the
+ // need to dupe.
+ Endpoint parcelable()&&;
+
public: // TODO add getters
// Set capacityInFrames to zero if Queue is unused.
RingBufferParcelable mUpMessageQueueParcelable; // server to client
diff --git a/media/libaaudio/src/binding/IAAudioClient.cpp b/media/libaaudio/src/binding/IAAudioClient.cpp
deleted file mode 100644
index c69c4e8..0000000
--- a/media/libaaudio/src/binding/IAAudioClient.cpp
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "AAudio"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <aaudio/AAudio.h>
-
-#include "binding/AAudioBinderClient.h"
-#include "binding/AAudioServiceDefinitions.h"
-#include "binding/IAAudioClient.h"
-#include "utility/AAudioUtilities.h"
-
-namespace android {
-
-using aaudio::aaudio_handle_t;
-
-/**
- * This is used by the AAudio Service to talk to an AAudio Client.
- *
- * The order of parameters in the Parcels must match with code in AAudioClient.cpp.
- */
-class BpAAudioClient : public BpInterface<IAAudioClient>
-{
-public:
- explicit BpAAudioClient(const sp<IBinder>& impl)
- : BpInterface<IAAudioClient>(impl)
- {
- }
-
- void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) override {
- Parcel data, reply;
- data.writeInterfaceToken(IAAudioClient::getInterfaceDescriptor());
- data.writeInt32(handle);
- data.writeInt32(opcode);
- data.writeInt32(value);
- remote()->transact(ON_STREAM_CHANGE, data, &reply, IBinder::FLAG_ONEWAY);
- }
-
-};
-
-// Implement an interface to the service.
-IMPLEMENT_META_INTERFACE(AAudioClient, "IAAudioClient");
-
-// The order of parameters in the Parcels must match with code in BpAAudioClient
-
-status_t BnAAudioClient::onTransact(uint32_t code, const Parcel& data,
- Parcel* reply, uint32_t flags) {
- aaudio_handle_t streamHandle;
- int32_t opcode = 0;
- int32_t value = 0;
- ALOGV("BnAAudioClient::onTransact(%u) %u", code, flags);
-
- switch(code) {
- case ON_STREAM_CHANGE: {
- CHECK_INTERFACE(IAAudioClient, data, reply);
- data.readInt32(&streamHandle);
- data.readInt32(&opcode);
- data.readInt32(&value);
- onStreamChange(streamHandle, opcode, value);
- ALOGD("BnAAudioClient onStreamChange(%x, %d, %d)", streamHandle, opcode, value);
- return NO_ERROR;
- } break;
-
- default:
- // ALOGW("BnAAudioClient::onTransact not handled %u", code);
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-} /* namespace android */
diff --git a/media/libaaudio/src/binding/IAAudioClient.h b/media/libaaudio/src/binding/IAAudioClient.h
deleted file mode 100644
index f21fd93..0000000
--- a/media/libaaudio/src/binding/IAAudioClient.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_AAUDIO_IAAUDIO_CLIENT_H
-#define ANDROID_AAUDIO_IAAUDIO_CLIENT_H
-
-#include <stdint.h>
-#include <binder/IInterface.h>
-
-#include <aaudio/AAudio.h>
-
-#include "binding/AAudioCommon.h"
-
-namespace android {
-
-
-// Interface (our AIDL) - client methods called by service
-class IAAudioClient : public IInterface {
-public:
-
- DECLARE_META_INTERFACE(AAudioClient);
-
- virtual void onStreamChange(aaudio::aaudio_handle_t handle, int32_t opcode, int32_t value) = 0;
-
-};
-
-class BnAAudioClient : public BnInterface<IAAudioClient> {
-public:
- virtual status_t onTransact(uint32_t code, const Parcel& data,
- Parcel* reply, uint32_t flags = 0);
-};
-
-} /* namespace android */
-
-#endif //ANDROID_AAUDIO_IAAUDIO_SERVICE_H
diff --git a/media/libaaudio/src/binding/IAAudioService.cpp b/media/libaaudio/src/binding/IAAudioService.cpp
deleted file mode 100644
index e017b3a..0000000
--- a/media/libaaudio/src/binding/IAAudioService.cpp
+++ /dev/null
@@ -1,424 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "AAudio"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <aaudio/AAudio.h>
-#include <binder/IPCThreadState.h>
-
-#include "binding/AudioEndpointParcelable.h"
-#include "binding/AAudioStreamRequest.h"
-#include "binding/AAudioServiceDefinitions.h"
-#include "binding/AAudioStreamConfiguration.h"
-#include "binding/IAAudioService.h"
-#include "utility/AAudioUtilities.h"
-
-namespace android {
-
-using aaudio::aaudio_handle_t;
-
-/**
- * This is used by the AAudio Client to talk to the AAudio Service.
- *
- * The order of parameters in the Parcels must match with code in AAudioService.cpp.
- */
-class BpAAudioService : public BpInterface<IAAudioService>
-{
-public:
- explicit BpAAudioService(const sp<IBinder>& impl)
- : BpInterface<IAAudioService>(impl)
- {
- }
-
- void registerClient(const sp<IAAudioClient>& client) override
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
- data.writeStrongBinder(IInterface::asBinder(client));
- remote()->transact(REGISTER_CLIENT, data, &reply);
- }
-
- aaudio_handle_t openStream(const aaudio::AAudioStreamRequest &request,
- aaudio::AAudioStreamConfiguration &configurationOutput) override {
- Parcel data, reply;
- // send command
- data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
- // request.dump();
- request.writeToParcel(&data);
- status_t err = remote()->transact(OPEN_STREAM, data, &reply);
- if (err != NO_ERROR) {
- ALOGE("BpAAudioService::client openStream transact failed %d", err);
- return AAudioConvert_androidToAAudioResult(err);
- }
- // parse reply
- aaudio_handle_t stream;
- err = reply.readInt32(&stream);
- if (err != NO_ERROR) {
- ALOGE("BpAAudioService::client transact(OPEN_STREAM) readInt %d", err);
- return AAudioConvert_androidToAAudioResult(err);
- } else if (stream < 0) {
- return stream;
- }
- err = configurationOutput.readFromParcel(&reply);
- if (err != NO_ERROR) {
- ALOGE("BpAAudioService::client openStream readFromParcel failed %d", err);
- closeStream(stream);
- return AAudioConvert_androidToAAudioResult(err);
- }
- return stream;
- }
-
- virtual aaudio_result_t closeStream(aaudio_handle_t streamHandle) override {
- Parcel data, reply;
- // send command
- data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
- data.writeInt32(streamHandle);
- status_t err = remote()->transact(CLOSE_STREAM, data, &reply);
- if (err != NO_ERROR) {
- ALOGE("BpAAudioService::client closeStream transact failed %d", err);
- return AAudioConvert_androidToAAudioResult(err);
- }
- // parse reply
- aaudio_result_t res;
- reply.readInt32(&res);
- return res;
- }
-
- virtual aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
- aaudio::AudioEndpointParcelable &parcelable) {
- Parcel data, reply;
- // send command
- data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
- data.writeInt32(streamHandle);
- status_t err = remote()->transact(GET_STREAM_DESCRIPTION, data, &reply);
- if (err != NO_ERROR) {
- ALOGE("BpAAudioService::client transact(GET_STREAM_DESCRIPTION) returns %d", err);
- return AAudioConvert_androidToAAudioResult(err);
- }
- // parse reply
- aaudio_result_t result;
- err = reply.readInt32(&result);
- if (err != NO_ERROR) {
- ALOGE("BpAAudioService::client transact(GET_STREAM_DESCRIPTION) readInt %d", err);
- return AAudioConvert_androidToAAudioResult(err);
- } else if (result != AAUDIO_OK) {
- ALOGE("BpAAudioService::client GET_STREAM_DESCRIPTION passed result %d", result);
- return result;
- }
- err = parcelable.readFromParcel(&reply);
- if (err != NO_ERROR) {
- ALOGE("BpAAudioService::client transact(GET_STREAM_DESCRIPTION) read endpoint %d", err);
- return AAudioConvert_androidToAAudioResult(err);
- }
- return result;
- }
-
- // TODO should we wait for a reply?
- virtual aaudio_result_t startStream(aaudio_handle_t streamHandle) override {
- Parcel data, reply;
- // send command
- data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
- data.writeInt32(streamHandle);
- status_t err = remote()->transact(START_STREAM, data, &reply);
- if (err != NO_ERROR) {
- return AAudioConvert_androidToAAudioResult(err);
- }
- // parse reply
- aaudio_result_t res;
- reply.readInt32(&res);
- return res;
- }
-
- virtual aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override {
- Parcel data, reply;
- // send command
- data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
- data.writeInt32(streamHandle);
- status_t err = remote()->transact(PAUSE_STREAM, data, &reply);
- if (err != NO_ERROR) {
- return AAudioConvert_androidToAAudioResult(err);
- }
- // parse reply
- aaudio_result_t res;
- reply.readInt32(&res);
- return res;
- }
-
- virtual aaudio_result_t stopStream(aaudio_handle_t streamHandle) override {
- Parcel data, reply;
- // send command
- data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
- data.writeInt32(streamHandle);
- status_t err = remote()->transact(STOP_STREAM, data, &reply);
- if (err != NO_ERROR) {
- return AAudioConvert_androidToAAudioResult(err);
- }
- // parse reply
- aaudio_result_t res;
- reply.readInt32(&res);
- return res;
- }
-
- virtual aaudio_result_t flushStream(aaudio_handle_t streamHandle) override {
- Parcel data, reply;
- // send command
- data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
- data.writeInt32(streamHandle);
- status_t err = remote()->transact(FLUSH_STREAM, data, &reply);
- if (err != NO_ERROR) {
- return AAudioConvert_androidToAAudioResult(err);
- }
- // parse reply
- aaudio_result_t res;
- reply.readInt32(&res);
- return res;
- }
-
- virtual aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
- pid_t clientThreadId,
- int64_t periodNanoseconds)
- override {
- Parcel data, reply;
- // send command
- data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
- data.writeInt32(streamHandle);
- data.writeInt32((int32_t) clientThreadId);
- data.writeInt64(periodNanoseconds);
- status_t err = remote()->transact(REGISTER_AUDIO_THREAD, data, &reply);
- if (err != NO_ERROR) {
- return AAudioConvert_androidToAAudioResult(err);
- }
- // parse reply
- aaudio_result_t res;
- reply.readInt32(&res);
- return res;
- }
-
- virtual aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
- pid_t clientThreadId)
- override {
- Parcel data, reply;
- // send command
- data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
- data.writeInt32(streamHandle);
- data.writeInt32((int32_t) clientThreadId);
- status_t err = remote()->transact(UNREGISTER_AUDIO_THREAD, data, &reply);
- if (err != NO_ERROR) {
- return AAudioConvert_androidToAAudioResult(err);
- }
- // parse reply
- aaudio_result_t res;
- reply.readInt32(&res);
- return res;
- }
-
-};
-
-// Implement an interface to the service.
-// This is here so that you don't have to link with libaaudio static library.
-IMPLEMENT_META_INTERFACE(AAudioService, "IAAudioService");
-
-// The order of parameters in the Parcels must match with code in BpAAudioService
-
-status_t BnAAudioService::onTransact(uint32_t code, const Parcel& data,
- Parcel* reply, uint32_t flags) {
- aaudio_handle_t streamHandle = 0;
- aaudio::AAudioStreamRequest request;
- aaudio::AAudioStreamConfiguration configuration;
- pid_t tid = 0;
- int64_t nanoseconds = 0;
- aaudio_result_t result = AAUDIO_OK;
- status_t status = NO_ERROR;
- ALOGV("BnAAudioService::onTransact(%i) %i", code, flags);
-
- switch(code) {
- case REGISTER_CLIENT: {
- CHECK_INTERFACE(IAAudioService, data, reply);
- sp<IAAudioClient> client = interface_cast<IAAudioClient>(
- data.readStrongBinder());
- // readStrongBinder() can return null
- if (client.get() == nullptr) {
- ALOGE("BnAAudioService::%s(REGISTER_CLIENT) client is NULL!", __func__);
- android_errorWriteLog(0x534e4554, "116230453");
- return DEAD_OBJECT;
- } else {
- registerClient(client);
- return NO_ERROR;
- }
- } break;
-
- case OPEN_STREAM: {
- CHECK_INTERFACE(IAAudioService, data, reply);
- request.readFromParcel(&data);
- result = request.validate();
- if (result != AAUDIO_OK) {
- streamHandle = result;
- } else {
- //ALOGD("BnAAudioService::client openStream request dump --------------------");
- //request.dump();
- // Override the uid and pid from the client in case they are incorrect.
- request.setUserId(IPCThreadState::self()->getCallingUid());
- request.setProcessId(IPCThreadState::self()->getCallingPid());
- streamHandle = openStream(request, configuration);
- //ALOGD("BnAAudioService::onTransact OPEN_STREAM server handle = 0x%08X",
- // streamHandle);
- }
- reply->writeInt32(streamHandle);
- configuration.writeToParcel(reply);
- return NO_ERROR;
- } break;
-
- case CLOSE_STREAM: {
- CHECK_INTERFACE(IAAudioService, data, reply);
- status = data.readInt32(&streamHandle);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(CLOSE_STREAM) streamHandle failed!", __func__);
- return status;
- }
- result = closeStream(streamHandle);
- //ALOGD("BnAAudioService::onTransact CLOSE_STREAM 0x%08X, result = %d",
- // streamHandle, result);
- reply->writeInt32(result);
- return NO_ERROR;
- } break;
-
- case GET_STREAM_DESCRIPTION: {
- CHECK_INTERFACE(IAAudioService, data, reply);
- status = data.readInt32(&streamHandle);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(GET_STREAM_DESCRIPTION) streamHandle failed!", __func__);
- return status;
- }
- aaudio::AudioEndpointParcelable parcelable;
- result = getStreamDescription(streamHandle, parcelable);
- if (result != AAUDIO_OK) {
- return AAudioConvert_aaudioToAndroidStatus(result);
- }
- status = reply->writeInt32(result);
- if (status != NO_ERROR) {
- return status;
- }
- return parcelable.writeToParcel(reply);
- } break;
-
- case START_STREAM: {
- CHECK_INTERFACE(IAAudioService, data, reply);
- status = data.readInt32(&streamHandle);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(START_STREAM) streamHandle failed!", __func__);
- return status;
- }
- result = startStream(streamHandle);
- ALOGV("BnAAudioService::onTransact START_STREAM 0x%08X, result = %d",
- streamHandle, result);
- reply->writeInt32(result);
- return NO_ERROR;
- } break;
-
- case PAUSE_STREAM: {
- CHECK_INTERFACE(IAAudioService, data, reply);
- status = data.readInt32(&streamHandle);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(PAUSE_STREAM) streamHandle failed!", __func__);
- return status;
- }
- result = pauseStream(streamHandle);
- ALOGV("BnAAudioService::onTransact PAUSE_STREAM 0x%08X, result = %d",
- streamHandle, result);
- reply->writeInt32(result);
- return NO_ERROR;
- } break;
-
- case STOP_STREAM: {
- CHECK_INTERFACE(IAAudioService, data, reply);
- status = data.readInt32(&streamHandle);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(STOP_STREAM) streamHandle failed!", __func__);
- return status;
- }
- result = stopStream(streamHandle);
- ALOGV("BnAAudioService::onTransact STOP_STREAM 0x%08X, result = %d",
- streamHandle, result);
- reply->writeInt32(result);
- return NO_ERROR;
- } break;
-
- case FLUSH_STREAM: {
- CHECK_INTERFACE(IAAudioService, data, reply);
- status = data.readInt32(&streamHandle);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(FLUSH_STREAM) streamHandle failed!", __func__);
- return status;
- }
- result = flushStream(streamHandle);
- ALOGV("BnAAudioService::onTransact FLUSH_STREAM 0x%08X, result = %d",
- streamHandle, result);
- reply->writeInt32(result);
- return NO_ERROR;
- } break;
-
- case REGISTER_AUDIO_THREAD: {
- CHECK_INTERFACE(IAAudioService, data, reply);
- status = data.readInt32(&streamHandle);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) streamHandle failed!", __func__);
- return status;
- }
- status = data.readInt32(&tid);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) tid failed!", __func__);
- return status;
- }
- status = data.readInt64(&nanoseconds);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) nanoseconds failed!", __func__);
- return status;
- }
- result = registerAudioThread(streamHandle, tid, nanoseconds);
- ALOGV("BnAAudioService::%s(REGISTER_AUDIO_THREAD) 0x%08X, result = %d",
- __func__, streamHandle, result);
- reply->writeInt32(result);
- return NO_ERROR;
- } break;
-
- case UNREGISTER_AUDIO_THREAD: {
- CHECK_INTERFACE(IAAudioService, data, reply);
- status = data.readInt32(&streamHandle);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(UNREGISTER_AUDIO_THREAD) streamHandle failed!", __func__);
- return status;
- }
- status = data.readInt32(&tid);
- if (status != NO_ERROR) {
- ALOGE("BnAAudioService::%s(UNREGISTER_AUDIO_THREAD) tid failed!", __func__);
- return status;
- }
- result = unregisterAudioThread(streamHandle, tid);
- ALOGV("BnAAudioService::onTransact UNREGISTER_AUDIO_THREAD 0x%08X, result = %d",
- streamHandle, result);
- reply->writeInt32(result);
- return NO_ERROR;
- } break;
-
- default:
- // ALOGW("BnAAudioService::onTransact not handled %u", code);
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-} /* namespace android */
diff --git a/media/libaaudio/src/binding/IAAudioService.h b/media/libaaudio/src/binding/IAAudioService.h
deleted file mode 100644
index 6bdb826..0000000
--- a/media/libaaudio/src/binding/IAAudioService.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_AAUDIO_IAAUDIO_SERVICE_H
-#define ANDROID_AAUDIO_IAAUDIO_SERVICE_H
-
-#include <stdint.h>
-#include <utils/RefBase.h>
-#include <binder/TextOutput.h>
-#include <binder/IInterface.h>
-
-#include <aaudio/AAudio.h>
-
-#include "binding/AAudioCommon.h"
-#include "binding/AAudioServiceDefinitions.h"
-#include "binding/AAudioStreamConfiguration.h"
-#include "binding/AAudioStreamRequest.h"
-#include "binding/AudioEndpointParcelable.h"
-#include "binding/IAAudioClient.h"
-
-namespace android {
-
-#define AAUDIO_SERVICE_NAME "media.aaudio"
-
-// Interface (our AIDL) - service methods called by client
-class IAAudioService : public IInterface {
-public:
-
- DECLARE_META_INTERFACE(AAudioService);
-
- // Register an object to receive audio input/output change and track notifications.
- // For a given calling pid, AAudio service disregards any registrations after the first.
- // Thus the IAAudioClient must be a singleton per process.
- virtual void registerClient(const sp<IAAudioClient>& client) = 0;
-
- /**
- * @param request info needed to create the stream
- * @param configuration contains information about the created stream
- * @return handle to the stream or a negative error
- */
- virtual aaudio::aaudio_handle_t openStream(const aaudio::AAudioStreamRequest &request,
- aaudio::AAudioStreamConfiguration &configurationOutput) = 0;
-
- virtual aaudio_result_t closeStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
- /* Get an immutable description of the in-memory queues
- * used to communicate with the underlying HAL or Service.
- */
- virtual aaudio_result_t getStreamDescription(aaudio::aaudio_handle_t streamHandle,
- aaudio::AudioEndpointParcelable &parcelable) = 0;
-
- /**
- * Start the flow of data.
- * This is asynchronous. When complete, the service will send a STARTED event.
- */
- virtual aaudio_result_t startStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
- /**
- * Stop the flow of data such that start() can resume without loss of data.
- * This is asynchronous. When complete, the service will send a PAUSED event.
- */
- virtual aaudio_result_t pauseStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
- /**
- * Stop the flow of data such that the data currently in the buffer is played.
- * This is asynchronous. When complete, the service will send a STOPPED event.
- */
- virtual aaudio_result_t stopStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
- /**
- * Discard any data held by the underlying HAL or Service.
- * This is asynchronous. When complete, the service will send a FLUSHED event.
- */
- virtual aaudio_result_t flushStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
- /**
- * Manage the specified thread as a low latency audio thread.
- */
- virtual aaudio_result_t registerAudioThread(aaudio::aaudio_handle_t streamHandle,
- pid_t clientThreadId,
- int64_t periodNanoseconds) = 0;
-
- virtual aaudio_result_t unregisterAudioThread(aaudio::aaudio_handle_t streamHandle,
- pid_t clientThreadId) = 0;
-};
-
-class BnAAudioService : public BnInterface<IAAudioService> {
-public:
- virtual status_t onTransact(uint32_t code, const Parcel& data,
- Parcel* reply, uint32_t flags = 0);
-
-};
-
-} /* namespace android */
-
-#endif //ANDROID_AAUDIO_IAAUDIO_SERVICE_H
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.cpp b/media/libaaudio/src/binding/RingBufferParcelable.cpp
index 4996b3f..a4b3cec 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.cpp
+++ b/media/libaaudio/src/binding/RingBufferParcelable.cpp
@@ -29,8 +29,29 @@
using namespace aaudio;
-RingBufferParcelable::RingBufferParcelable() {}
-RingBufferParcelable::~RingBufferParcelable() {}
+RingBufferParcelable::RingBufferParcelable(const RingBuffer& parcelable)
+ : mReadCounterParcelable(std::move(parcelable.readCounterParcelable)),
+ mWriteCounterParcelable(std::move(parcelable.writeCounterParcelable)),
+ mDataParcelable(std::move(parcelable.dataParcelable)),
+ mBytesPerFrame(parcelable.bytesPerFrame),
+ mFramesPerBurst(parcelable.framesPerBurst),
+ mCapacityInFrames(parcelable.capacityInFrames),
+ mFlags(static_cast<RingbufferFlags>(parcelable.flags)) {
+ static_assert(sizeof(mFlags) == sizeof(parcelable.flags));
+}
+
+RingBuffer RingBufferParcelable::parcelable() const {
+ RingBuffer result;
+ result.readCounterParcelable = std::move(mReadCounterParcelable).parcelable();
+ result.writeCounterParcelable = std::move(mWriteCounterParcelable).parcelable();
+ result.dataParcelable = std::move(mDataParcelable).parcelable();
+ result.bytesPerFrame = mBytesPerFrame;
+ result.framesPerBurst = mFramesPerBurst;
+ result.capacityInFrames = mCapacityInFrames;
+ static_assert(sizeof(mFlags) == sizeof(result.flags));
+ result.flags = static_cast<int32_t>(mFlags);
+ return result;
+}
// TODO This assumes that all three use the same SharedMemoryParcelable
void RingBufferParcelable::setupMemory(int32_t sharedMemoryIndex,
@@ -76,58 +97,6 @@
mCapacityInFrames = capacityInFrames;
}
-/**
- * The read and write must be symmetric.
- */
-status_t RingBufferParcelable::writeToParcel(Parcel* parcel) const {
- status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
- if (status != NO_ERROR) goto error;
-
- status = parcel->writeInt32(mCapacityInFrames);
- if (status != NO_ERROR) goto error;
- if (mCapacityInFrames > 0) {
- status = parcel->writeInt32(mBytesPerFrame);
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32(mFramesPerBurst);
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32(mFlags);
- if (status != NO_ERROR) goto error;
- status = mReadCounterParcelable.writeToParcel(parcel);
- if (status != NO_ERROR) goto error;
- status = mWriteCounterParcelable.writeToParcel(parcel);
- if (status != NO_ERROR) goto error;
- status = mDataParcelable.writeToParcel(parcel);
- if (status != NO_ERROR) goto error;
- }
- return NO_ERROR;
-error:
- ALOGE("%s returning %d", __func__, status);
- return status;
-}
-
-status_t RingBufferParcelable::readFromParcel(const Parcel* parcel) {
- status_t status = parcel->readInt32(&mCapacityInFrames);
- if (status != NO_ERROR) goto error;
- if (mCapacityInFrames > 0) {
- status = parcel->readInt32(&mBytesPerFrame);
- if (status != NO_ERROR) goto error;
- status = parcel->readInt32(&mFramesPerBurst);
- if (status != NO_ERROR) goto error;
- status = parcel->readInt32((int32_t *)&mFlags);
- if (status != NO_ERROR) goto error;
- status = mReadCounterParcelable.readFromParcel(parcel);
- if (status != NO_ERROR) goto error;
- status = mWriteCounterParcelable.readFromParcel(parcel);
- if (status != NO_ERROR) goto error;
- status = mDataParcelable.readFromParcel(parcel);
- if (status != NO_ERROR) goto error;
- }
- return AAudioConvert_aaudioToAndroidStatus(validate());
-error:
- ALOGE("%s returning %d", __func__, status);
- return status;
-}
-
aaudio_result_t RingBufferParcelable::resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor) {
aaudio_result_t result;
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.h b/media/libaaudio/src/binding/RingBufferParcelable.h
index 1dbcf07..2508cea 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.h
+++ b/media/libaaudio/src/binding/RingBufferParcelable.h
@@ -19,6 +19,7 @@
#include <stdint.h>
+#include <aaudio/RingBuffer.h>
#include <binder/Parcelable.h>
#include "binding/AAudioServiceDefinitions.h"
@@ -26,10 +27,12 @@
namespace aaudio {
-class RingBufferParcelable : public Parcelable {
+class RingBufferParcelable {
public:
- RingBufferParcelable();
- virtual ~RingBufferParcelable();
+ RingBufferParcelable() = default;
+
+ // Construct based on a parcelable representation.
+ explicit RingBufferParcelable(const RingBuffer& parcelable);
// TODO This assumes that all three use the same SharedMemoryParcelable
void setupMemory(int32_t sharedMemoryIndex,
@@ -57,21 +60,14 @@
bool isFileDescriptorSafe(SharedMemoryParcelable *memoryParcels);
- /**
- * The read and write must be symmetric.
- */
- virtual status_t writeToParcel(Parcel* parcel) const override;
-
- virtual status_t readFromParcel(const Parcel* parcel) override;
-
aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor);
void dump();
+ // Extract a parcelable representation of this object.
+ RingBuffer parcelable() const;
+
private:
-
- aaudio_result_t validate() const;
-
SharedRegionParcelable mReadCounterParcelable;
SharedRegionParcelable mWriteCounterParcelable;
SharedRegionParcelable mDataParcelable;
@@ -79,6 +75,8 @@
int32_t mFramesPerBurst = 0; // for ISOCHRONOUS queues
int32_t mCapacityInFrames = 0; // zero if unused
RingbufferFlags mFlags = RingbufferFlags::NONE;
+
+ aaudio_result_t validate() const;
};
} /* namespace aaudio */
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index b6e8472..685b779 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -18,6 +18,7 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <inttypes.h>
#include <stdint.h>
#include <stdio.h>
@@ -33,61 +34,36 @@
using android::base::unique_fd;
using android::NO_ERROR;
using android::status_t;
-using android::Parcel;
-using android::Parcelable;
+using android::media::SharedFileRegion;
using namespace aaudio;
-SharedMemoryParcelable::SharedMemoryParcelable() {}
-SharedMemoryParcelable::~SharedMemoryParcelable() {};
+SharedMemoryParcelable::SharedMemoryParcelable(SharedFileRegion&& parcelable) {
+ mFd = parcelable.fd.release();
+ mSizeInBytes = parcelable.size;
+ mOffsetInBytes = parcelable.offset;
+}
+
+SharedFileRegion SharedMemoryParcelable::parcelable() && {
+ SharedFileRegion result;
+ result.fd.reset(std::move(mFd));
+ result.size = mSizeInBytes;
+ result.offset = mOffsetInBytes;
+ return result;
+}
+
+SharedMemoryParcelable SharedMemoryParcelable::dup() const {
+ SharedMemoryParcelable result;
+ result.setup(mFd, static_cast<int32_t>(mSizeInBytes));
+ return result;
+}
void SharedMemoryParcelable::setup(const unique_fd& fd, int32_t sizeInBytes) {
- mFd.reset(dup(fd.get())); // store a duplicate fd
+ mFd.reset(::dup(fd.get())); // store a duplicate fd
ALOGV("setup(fd = %d -> %d, size = %d) this = %p\n", fd.get(), mFd.get(), sizeInBytes, this);
mSizeInBytes = sizeInBytes;
}
-status_t SharedMemoryParcelable::writeToParcel(Parcel* parcel) const {
- status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
- if (status != NO_ERROR) return status;
-
- status = parcel->writeInt32(mSizeInBytes);
- if (status != NO_ERROR) return status;
- if (mSizeInBytes > 0) {
- ALOGV("writeToParcel() mFd = %d, this = %p\n", mFd.get(), this);
- status = parcel->writeUniqueFileDescriptor(mFd);
- ALOGE_IF(status != NO_ERROR, "SharedMemoryParcelable writeDupFileDescriptor failed : %d",
- status);
- }
- return status;
-}
-
-status_t SharedMemoryParcelable::readFromParcel(const Parcel* parcel) {
- status_t status = parcel->readInt32(&mSizeInBytes);
- if (status != NO_ERROR) goto error;
-
- if (mSizeInBytes > 0) {
- // The Parcel owns the file descriptor and will close it later.
- unique_fd mmapFd;
- status = parcel->readUniqueFileDescriptor(&mmapFd);
- if (status != NO_ERROR) {
- ALOGE("readFromParcel() readUniqueFileDescriptor() failed : %d", status);
- goto error;
- }
-
- // Resolve the memory now while we still have the FD from the Parcel.
- // Closing the FD will not affect the shared memory once mmap() has been called.
- aaudio_result_t result = resolveSharedMemory(mmapFd);
- status = AAudioConvert_aaudioToAndroidStatus(result);
- if (status != NO_ERROR) goto error;
- }
-
- return AAudioConvert_aaudioToAndroidStatus(validate());
-
-error:
- return status;
-}
-
aaudio_result_t SharedMemoryParcelable::close() {
if (mResolvedAddress != MMAP_UNRESOLVED_ADDRESS) {
int err = munmap(mResolvedAddress, mSizeInBytes);
@@ -104,7 +80,7 @@
mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ | PROT_WRITE,
MAP_SHARED, fd.get(), 0);
if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
- ALOGE("mmap() failed for fd = %d, nBytes = %d, errno = %s",
+ ALOGE("mmap() failed for fd = %d, nBytes = %" PRId64 ", errno = %s",
fd.get(), mSizeInBytes, strerror(errno));
return AAUDIO_ERROR_INTERNAL;
}
@@ -118,7 +94,7 @@
return AAUDIO_ERROR_OUT_OF_RANGE;
} else if ((offsetInBytes + sizeInBytes) > mSizeInBytes) {
ALOGE("out of range, offsetInBytes = %d, "
- "sizeInBytes = %d, mSizeInBytes = %d",
+ "sizeInBytes = %d, mSizeInBytes = %" PRId64,
offsetInBytes, sizeInBytes, mSizeInBytes);
return AAUDIO_ERROR_OUT_OF_RANGE;
}
@@ -148,7 +124,11 @@
aaudio_result_t SharedMemoryParcelable::validate() const {
if (mSizeInBytes < 0 || mSizeInBytes >= MAX_MMAP_SIZE_BYTES) {
- ALOGE("invalid mSizeInBytes = %d", mSizeInBytes);
+ ALOGE("invalid mSizeInBytes = %" PRId64, mSizeInBytes);
+ return AAUDIO_ERROR_OUT_OF_RANGE;
+ }
+ if (mOffsetInBytes != 0) {
+ ALOGE("invalid mOffsetInBytes = %" PRId64, mOffsetInBytes);
return AAUDIO_ERROR_OUT_OF_RANGE;
}
return AAUDIO_OK;
@@ -156,5 +136,5 @@
void SharedMemoryParcelable::dump() {
ALOGD("mFd = %d", mFd.get());
- ALOGD("mSizeInBytes = %d", mSizeInBytes);
+ ALOGD("mSizeInBytes = %" PRId64, mSizeInBytes);
}
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 4ec38c5..1f2c335 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -21,12 +21,11 @@
#include <sys/mman.h>
#include <android-base/unique_fd.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
+#include <android/media/SharedFileRegion.h>
namespace aaudio {
-// Arbitrary limits for sanity checks. TODO remove after debugging.
+// Arbitrary limits for range checks.
#define MAX_SHARED_MEMORIES (32)
#define MAX_MMAP_OFFSET_BYTES (32 * 1024 * 8)
#define MAX_MMAP_SIZE_BYTES (32 * 1024 * 8)
@@ -36,10 +35,14 @@
* It may be divided into several regions.
* The memory can be shared using Binder or simply shared between threads.
*/
-class SharedMemoryParcelable : public android::Parcelable {
+class SharedMemoryParcelable {
public:
- SharedMemoryParcelable();
- virtual ~SharedMemoryParcelable();
+ SharedMemoryParcelable() = default;
+
+ // Ctor from a parcelable representation.
+ // Since the parcelable object owns a unique FD, move semantics are provided to avoid the need
+ // to dupe.
+ explicit SharedMemoryParcelable(android::media::SharedFileRegion&& parcelable);
/**
* Make a dup() of the fd and store it for later use.
@@ -49,10 +52,6 @@
*/
void setup(const android::base::unique_fd& fd, int32_t sizeInBytes);
- virtual android::status_t writeToParcel(android::Parcel* parcel) const override;
-
- virtual android::status_t readFromParcel(const android::Parcel* parcel) override;
-
// mmap() shared memory
aaudio_result_t resolve(int32_t offsetInBytes, int32_t sizeInBytes, void **regionAddressPtr);
@@ -63,20 +62,23 @@
void dump();
-protected:
+ // Extract a parcelable representation of this object.
+ // Since we own a unique FD, move semantics are provided to avoid the need to dupe.
+ android::media::SharedFileRegion parcelable() &&;
-#define MMAP_UNRESOLVED_ADDRESS reinterpret_cast<uint8_t*>(MAP_FAILED)
-
- aaudio_result_t resolveSharedMemory(const android::base::unique_fd& fd);
-
- android::base::unique_fd mFd;
- int32_t mSizeInBytes = 0;
- uint8_t *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
+ // Copy this instance. Duplicates the underlying FD.
+ SharedMemoryParcelable dup() const;
private:
+#define MMAP_UNRESOLVED_ADDRESS reinterpret_cast<uint8_t*>(MAP_FAILED)
+ android::base::unique_fd mFd;
+ int64_t mSizeInBytes = 0;
+ int64_t mOffsetInBytes = 0;
+ uint8_t *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
+
+ aaudio_result_t resolveSharedMemory(const android::base::unique_fd& fd);
aaudio_result_t validate() const;
-
};
} /* namespace aaudio */
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.cpp b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
index c776116..56b99c0 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
@@ -36,8 +36,18 @@
using namespace aaudio;
-SharedRegionParcelable::SharedRegionParcelable() {}
-SharedRegionParcelable::~SharedRegionParcelable() {}
+SharedRegionParcelable::SharedRegionParcelable(const SharedRegion& parcelable)
+ : mSharedMemoryIndex(parcelable.sharedMemoryIndex),
+ mOffsetInBytes(parcelable.offsetInBytes),
+ mSizeInBytes(parcelable.sizeInBytes) {}
+
+SharedRegion SharedRegionParcelable::parcelable() const {
+ SharedRegion result;
+ result.sharedMemoryIndex = mSharedMemoryIndex;
+ result.offsetInBytes = mOffsetInBytes;
+ result.sizeInBytes = mSizeInBytes;
+ return result;
+}
void SharedRegionParcelable::setup(int32_t sharedMemoryIndex,
int32_t offsetInBytes,
@@ -47,41 +57,6 @@
mSizeInBytes = sizeInBytes;
}
-status_t SharedRegionParcelable::writeToParcel(Parcel* parcel) const {
- status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
- if (status != NO_ERROR) goto error;
-
- status = parcel->writeInt32(mSizeInBytes);
- if (status != NO_ERROR) goto error;
- if (mSizeInBytes > 0) {
- status = parcel->writeInt32(mSharedMemoryIndex);
- if (status != NO_ERROR) goto error;
- status = parcel->writeInt32(mOffsetInBytes);
- if (status != NO_ERROR) goto error;
- }
- return NO_ERROR;
-
-error:
- ALOGE("%s returning %d", __func__, status);
- return status;
-}
-
-status_t SharedRegionParcelable::readFromParcel(const Parcel* parcel) {
- status_t status = parcel->readInt32(&mSizeInBytes);
- if (status != NO_ERROR) goto error;
- if (mSizeInBytes > 0) {
- status = parcel->readInt32(&mSharedMemoryIndex);
- if (status != NO_ERROR) goto error;
- status = parcel->readInt32(&mOffsetInBytes);
- if (status != NO_ERROR) goto error;
- }
- return AAudioConvert_aaudioToAndroidStatus(validate());
-
-error:
- ALOGE("%s returning %d", __func__, status);
- return status;
-}
-
aaudio_result_t SharedRegionParcelable::resolve(SharedMemoryParcelable *memoryParcels,
void **regionAddressPtr) {
if (mSizeInBytes == 0) {
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.h b/media/libaaudio/src/binding/SharedRegionParcelable.h
index 0cd8c04..c15fc30 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.h
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.h
@@ -20,41 +20,39 @@
#include <stdint.h>
#include <sys/mman.h>
-#include <binder/Parcelable.h>
#include <aaudio/AAudio.h>
+#include <aaudio/SharedRegion.h>
#include "binding/SharedMemoryParcelable.h"
using android::status_t;
-using android::Parcel;
-using android::Parcelable;
namespace aaudio {
-class SharedRegionParcelable : public Parcelable {
+class SharedRegionParcelable {
public:
- SharedRegionParcelable();
- virtual ~SharedRegionParcelable();
+ SharedRegionParcelable() = default;
+
+ // Construct based on a parcelable representation.
+ explicit SharedRegionParcelable(const SharedRegion& parcelable);
void setup(int32_t sharedMemoryIndex, int32_t offsetInBytes, int32_t sizeInBytes);
- virtual status_t writeToParcel(Parcel* parcel) const override;
-
- virtual status_t readFromParcel(const Parcel* parcel) override;
-
aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, void **regionAddressPtr);
bool isFileDescriptorSafe(SharedMemoryParcelable *memoryParcels);
void dump();
-protected:
+ // Extract a parcelable representation of this object.
+ SharedRegion parcelable() const;
+
+private:
int32_t mSharedMemoryIndex = -1;
int32_t mOffsetInBytes = 0;
int32_t mSizeInBytes = 0;
-private:
aaudio_result_t validate() const;
};
diff --git a/media/libaaudio/src/binding/aidl/aaudio/Endpoint.aidl b/media/libaaudio/src/binding/aidl/aaudio/Endpoint.aidl
new file mode 100644
index 0000000..3600b6a
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/Endpoint.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.RingBuffer;
+import android.media.SharedFileRegion;
+
+parcelable Endpoint {
+ // Set capacityInFrames to zero if Queue is unused.
+ RingBuffer upMessageQueueParcelable; // server to client
+ RingBuffer downMessageQueueParcelable; // to server
+ RingBuffer upDataQueueParcelable; // eg. record, could share same queue
+ RingBuffer downDataQueueParcelable; // eg. playback
+ SharedFileRegion[] sharedMemories;
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/IAAudioClient.aidl b/media/libaaudio/src/binding/aidl/aaudio/IAAudioClient.aidl
new file mode 100644
index 0000000..a010dbc
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/IAAudioClient.aidl
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+interface IAAudioClient {
+ oneway void onStreamChange(int handle, int opcode, int value);
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl b/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl
new file mode 100644
index 0000000..44d2211
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.Endpoint;
+import aaudio.IAAudioClient;
+import aaudio.StreamParameters;
+import aaudio.StreamRequest;
+
+interface IAAudioService {
+ /**
+ * Register an object to receive audio input/output change and track notifications.
+ * For a given calling pid, AAudio service disregards any registrations after the first.
+ * Thus the IAAudioClient must be a singleton per process.
+ */
+ void registerClient(IAAudioClient client);
+
+ /**
+ * @param request info needed to create the stream
+ * @param paramsOut contains information about the created stream
+ * @return handle to the stream or a negative error
+ */
+ int openStream(in StreamRequest request,
+ out StreamParameters paramsOut);
+
+ int closeStream(int streamHandle);
+
+ /*
+ * Get an immutable description of the in-memory queues
+ * used to communicate with the underlying HAL or Service.
+ */
+ int getStreamDescription(int streamHandle, out Endpoint endpoint);
+
+ /**
+ * Start the flow of data.
+ * This is asynchronous. When complete, the service will send a STARTED event.
+ */
+ int startStream(int streamHandle);
+
+ /**
+ * Stop the flow of data such that start() can resume without loss of data.
+ * This is asynchronous. When complete, the service will send a PAUSED event.
+ */
+ int pauseStream(int streamHandle);
+
+ /**
+ * Stop the flow of data such that the data currently in the buffer is played.
+ * This is asynchronous. When complete, the service will send a STOPPED event.
+ */
+ int stopStream(int streamHandle);
+
+ /**
+ * Discard any data held by the underlying HAL or Service.
+ * This is asynchronous. When complete, the service will send a FLUSHED event.
+ */
+ int flushStream(int streamHandle);
+
+ /**
+ * Manage the specified thread as a low latency audio thread.
+ */
+ int registerAudioThread(int streamHandle,
+ int clientThreadId,
+ long periodNanoseconds);
+
+ int unregisterAudioThread(int streamHandle,
+ int clientThreadId);
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
new file mode 100644
index 0000000..a58b33a
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.SharedRegion;
+
+parcelable RingBuffer {
+ SharedRegion readCounterParcelable;
+ SharedRegion writeCounterParcelable;
+ SharedRegion dataParcelable;
+ int bytesPerFrame; // index is in frames
+ int framesPerBurst; // for ISOCHRONOUS queues
+ int capacityInFrames; // zero if unused
+ int /* RingbufferFlags */ flags; // = RingbufferFlags::NONE;
+}
\ No newline at end of file
diff --git a/media/libaaudio/src/binding/aidl/aaudio/SharedRegion.aidl b/media/libaaudio/src/binding/aidl/aaudio/SharedRegion.aidl
new file mode 100644
index 0000000..26153e8
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/SharedRegion.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+parcelable SharedRegion {
+ int sharedMemoryIndex;
+ int offsetInBytes;
+ int sizeInBytes;
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
new file mode 100644
index 0000000..b7c4f70
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import android.media.audio.common.AudioFormat;
+
+parcelable StreamParameters {
+ int samplesPerFrame; // = AAUDIO_UNSPECIFIED;
+ int sampleRate; // = AAUDIO_UNSPECIFIED;
+ int deviceId; // = AAUDIO_UNSPECIFIED;
+ int /* aaudio_sharing_mode_t */ sharingMode; // = AAUDIO_SHARING_MODE_SHARED;
+ AudioFormat audioFormat; // = AUDIO_FORMAT_DEFAULT;
+ int /* aaudio_direction_t */ direction; // = AAUDIO_DIRECTION_OUTPUT;
+ int /* aaudio_usage_t */ usage; // = AAUDIO_UNSPECIFIED;
+ int /* aaudio_content_type_t */ contentType; // = AAUDIO_UNSPECIFIED;
+ int /* aaudio_input_preset_t */ inputPreset; // = AAUDIO_UNSPECIFIED;
+ int bufferCapacity; // = AAUDIO_UNSPECIFIED;
+ int /* aaudio_allowed_capture_policy_t */ allowedCapturePolicy; // = AAUDIO_UNSPECIFIED;
+ int /* aaudio_session_id_t */ sessionId; // = AAUDIO_SESSION_ID_NONE;
+ boolean isPrivacySensitive; // = false;
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl
new file mode 100644
index 0000000..9bf4077
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.StreamParameters;
+
+parcelable StreamRequest {
+ StreamParameters params;
+ int userId; // = (uid_t) -1;
+ int processId; // = (pid_t) -1;
+ boolean sharingModeMatchRequired; // = false;
+ boolean inService; // = false; // Stream opened by AAudioservice
+}
\ No newline at end of file
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.cpp b/media/libaaudio/src/client/AAudioFlowGraph.cpp
index 8f2c488..61b50f3 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.cpp
+++ b/media/libaaudio/src/client/AAudioFlowGraph.cpp
@@ -26,9 +26,11 @@
#include <flowgraph/SinkFloat.h>
#include <flowgraph/SinkI16.h>
#include <flowgraph/SinkI24.h>
+#include <flowgraph/SinkI32.h>
#include <flowgraph/SourceFloat.h>
#include <flowgraph/SourceI16.h>
#include <flowgraph/SourceI24.h>
+#include <flowgraph/SourceI32.h>
using namespace flowgraph;
@@ -38,7 +40,8 @@
int32_t sinkChannelCount) {
AudioFloatOutputPort *lastOutput = nullptr;
- ALOGV("%s() source format = 0x%08x, channels = %d, sink format = 0x%08x, channels = %d",
+ // TODO change back to ALOGD
+ ALOGI("%s() source format = 0x%08x, channels = %d, sink format = 0x%08x, channels = %d",
__func__, sourceFormat, sourceChannelCount, sinkFormat, sinkChannelCount);
switch (sourceFormat) {
@@ -51,7 +54,10 @@
case AUDIO_FORMAT_PCM_24_BIT_PACKED:
mSource = std::make_unique<SourceI24>(sourceChannelCount);
break;
- default: // TODO add I32
+ case AUDIO_FORMAT_PCM_32_BIT:
+ mSource = std::make_unique<SourceI32>(sourceChannelCount);
+ break;
+ default:
ALOGE("%s() Unsupported source format = %d", __func__, sourceFormat);
return AAUDIO_ERROR_UNIMPLEMENTED;
}
@@ -90,7 +96,10 @@
case AUDIO_FORMAT_PCM_24_BIT_PACKED:
mSink = std::make_unique<SinkI24>(sinkChannelCount);
break;
- default: // TODO add I32
+ case AUDIO_FORMAT_PCM_32_BIT:
+ mSink = std::make_unique<SinkI32>(sinkChannelCount);
+ break;
+ default:
ALOGE("%s() Unsupported sink format = %d", __func__, sinkFormat);
return AAUDIO_ERROR_UNIMPLEMENTED;
}
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index 06f66d3..0a19d17 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -137,7 +137,7 @@
return AAUDIO_ERROR_INTERNAL;
}
- mUpCommandQueue = std::make_unique<FifoBuffer>(
+ mUpCommandQueue = std::make_unique<FifoBufferIndirect>(
descriptor->bytesPerFrame,
descriptor->capacityInFrames,
descriptor->readCounterAddress,
@@ -166,7 +166,7 @@
? &mDataWriteCounter
: descriptor->writeCounterAddress;
- mDataQueue = std::make_unique<FifoBuffer>(
+ mDataQueue = std::make_unique<FifoBufferIndirect>(
descriptor->bytesPerFrame,
descriptor->capacityInFrames,
readCounterAddress,
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index 484d917..4c8d60f 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -93,8 +93,8 @@
void dump() const;
private:
- std::unique_ptr<android::FifoBuffer> mUpCommandQueue;
- std::unique_ptr<android::FifoBuffer> mDataQueue;
+ std::unique_ptr<android::FifoBufferIndirect> mUpCommandQueue;
+ std::unique_ptr<android::FifoBufferIndirect> mDataQueue;
bool mFreeRunning;
android::fifo_counter_t mDataReadCounter; // only used if free-running
android::fifo_counter_t mDataWriteCounter; // only used if free-running
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 79fa5ed..2815c6a 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -34,7 +34,6 @@
#include "AudioEndpointParcelable.h"
#include "binding/AAudioStreamRequest.h"
#include "binding/AAudioStreamConfiguration.h"
-#include "binding/IAAudioService.h"
#include "binding/AAudioServiceMessage.h"
#include "core/AudioGlobal.h"
#include "core/AudioStreamBuilder.h"
@@ -76,6 +75,7 @@
}
AudioStreamInternal::~AudioStreamInternal() {
+ ALOGD("%s() %p called", __func__, this);
}
aaudio_result_t AudioStreamInternal::open(const AudioStreamBuilder &builder) {
@@ -104,7 +104,7 @@
if (getFormat() == AUDIO_FORMAT_DEFAULT) {
setFormat(AUDIO_FORMAT_PCM_FLOAT);
}
- // Request FLOAT for the shared mixer.
+ // Request FLOAT for the shared mixer or the device.
request.getConfiguration().setFormat(AUDIO_FORMAT_PCM_FLOAT);
// Build the request to send to the server.
@@ -211,10 +211,10 @@
result = AAUDIO_ERROR_OUT_OF_RANGE;
goto error;
}
- mFramesPerBurst = framesPerBurst; // only save good value
+ setFramesPerBurst(framesPerBurst); // only save good value
mBufferCapacityInFrames = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
- if (mBufferCapacityInFrames < mFramesPerBurst
+ if (mBufferCapacityInFrames < getFramesPerBurst()
|| mBufferCapacityInFrames > MAX_BUFFER_CAPACITY_IN_FRAMES) {
ALOGE("%s - bufferCapacity out of range = %d", __func__, mBufferCapacityInFrames);
result = AAUDIO_ERROR_OUT_OF_RANGE;
@@ -239,7 +239,7 @@
}
if (mCallbackFrames == AAUDIO_UNSPECIFIED) {
- mCallbackFrames = mFramesPerBurst;
+ mCallbackFrames = getFramesPerBurst();
}
const int32_t callbackBufferSize = mCallbackFrames * getBytesPerFrame();
@@ -271,24 +271,24 @@
return result;
error:
- releaseCloseFinal();
+ safeReleaseClose();
return result;
}
// This must be called under mStreamLock.
aaudio_result_t AudioStreamInternal::release_l() {
aaudio_result_t result = AAUDIO_OK;
- ALOGV("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
+ ALOGD("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
if (mServiceStreamHandle != AAUDIO_HANDLE_INVALID) {
aaudio_stream_state_t currentState = getState();
// Don't release a stream while it is running. Stop it first.
// If DISCONNECTED then we should still try to stop in case the
// error callback is still running.
if (isActive() || currentState == AAUDIO_STREAM_STATE_DISCONNECTED) {
- requestStop();
+ requestStop_l();
}
- logBufferState();
+ logReleaseBufferState();
setState(AAUDIO_STREAM_STATE_CLOSING);
aaudio_handle_t serviceStreamHandle = mServiceStreamHandle;
@@ -331,7 +331,7 @@
* The processing code will then save the current offset
* between client and server and apply that to any position given to the app.
*/
-aaudio_result_t AudioStreamInternal::requestStart()
+aaudio_result_t AudioStreamInternal::requestStart_l()
{
int64_t startTime;
if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
@@ -353,6 +353,8 @@
// Clear any stale timestamps from the previous run.
drainTimestampsFromService();
+ prepareBuffersForStart(); // tell subclasses to get ready
+
aaudio_result_t result = mServiceInterface.startStream(mServiceStreamHandle);
if (result == AAUDIO_ERROR_INVALID_HANDLE) {
ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
@@ -372,7 +374,7 @@
* AAUDIO_NANOS_PER_SECOND
/ getSampleRate();
mCallbackEnabled.store(true);
- result = createThread(periodNanos, aaudio_callback_thread_proc, this);
+ result = createThread_l(periodNanos, aaudio_callback_thread_proc, this);
}
if (result != AAUDIO_OK) {
setState(originalState);
@@ -398,33 +400,36 @@
}
// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::stopCallback()
+aaudio_result_t AudioStreamInternal::stopCallback_l()
{
if (isDataCallbackSet()
&& (isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
mCallbackEnabled.store(false);
- aaudio_result_t result = joinThread(NULL); // may temporarily unlock mStreamLock
+ aaudio_result_t result = joinThread_l(NULL); // may temporarily unlock mStreamLock
if (result == AAUDIO_ERROR_INVALID_HANDLE) {
ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
result = AAUDIO_OK;
}
return result;
} else {
+ ALOGD("%s() skipped, isDataCallbackSet() = %d, isActive() = %d, getState() = %d", __func__,
+ isDataCallbackSet(), isActive(), getState());
return AAUDIO_OK;
}
}
-// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::requestStop() {
- aaudio_result_t result = stopCallback();
+aaudio_result_t AudioStreamInternal::requestStop_l() {
+ aaudio_result_t result = stopCallback_l();
if (result != AAUDIO_OK) {
+ ALOGW("%s() stop callback returned %d, returning early", __func__, result);
return result;
}
// The stream may have been unlocked temporarily to let a callback finish
// and the callback may have stopped the stream.
// Check to make sure the stream still needs to be stopped.
- // See also AudioStream::safeStop().
+ // See also AudioStream::safeStop_l().
if (!(isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
+ ALOGD("%s() returning early, not active or disconnected", __func__);
return AAUDIO_OK;
}
@@ -755,9 +760,9 @@
aaudio_result_t AudioStreamInternal::setBufferSize(int32_t requestedFrames) {
int32_t adjustedFrames = requestedFrames;
- const int32_t maximumSize = getBufferCapacity() - mFramesPerBurst;
+ const int32_t maximumSize = getBufferCapacity() - getFramesPerBurst();
// Minimum size should be a multiple number of bursts.
- const int32_t minimumSize = 1 * mFramesPerBurst;
+ const int32_t minimumSize = 1 * getFramesPerBurst();
// Clip to minimum size so that rounding up will work better.
adjustedFrames = std::max(minimumSize, adjustedFrames);
@@ -767,9 +772,9 @@
adjustedFrames = maximumSize;
} else {
// Round to the next highest burst size.
- int32_t numBursts = (adjustedFrames + mFramesPerBurst - 1) / mFramesPerBurst;
- adjustedFrames = numBursts * mFramesPerBurst;
- // Clip just in case maximumSize is not a multiple of mFramesPerBurst.
+ int32_t numBursts = (adjustedFrames + getFramesPerBurst() - 1) / getFramesPerBurst();
+ adjustedFrames = numBursts * getFramesPerBurst();
+ // Clip just in case maximumSize is not a multiple of getFramesPerBurst().
adjustedFrames = std::min(maximumSize, adjustedFrames);
}
@@ -783,6 +788,14 @@
adjustedFrames = std::min(actualFrames, adjustedFrames);
}
+ if (adjustedFrames != mBufferSizeInFrames) {
+ android::mediametrics::LogItem(mMetricsId)
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETBUFFERSIZE)
+ .set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, adjustedFrames)
+ .set(AMEDIAMETRICS_PROP_UNDERRUN, (int32_t) getXRunCount())
+ .record();
+ }
+
mBufferSizeInFrames = adjustedFrames;
ALOGV("%s(%d) returns %d", __func__, requestedFrames, adjustedFrames);
return (aaudio_result_t) adjustedFrames;
@@ -796,15 +809,6 @@
return mBufferCapacityInFrames;
}
-int32_t AudioStreamInternal::getFramesPerBurst() const {
- return mFramesPerBurst;
-}
-
-// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::joinThread(void** returnArg) {
- return AudioStream::joinThread(returnArg, calculateReasonableTimeout(getFramesPerBurst()));
-}
-
bool AudioStreamInternal::isClockModelInControl() const {
return isActive() && mAudioEndpoint->isFreeRunning() && mClockModel.isRunning();
}
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 61591b3..fbe4c13 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -20,7 +20,6 @@
#include <stdint.h>
#include <aaudio/AAudio.h>
-#include "binding/IAAudioService.h"
#include "binding/AudioEndpointParcelable.h"
#include "binding/AAudioServiceInterface.h"
#include "client/IsochronousClockModel.h"
@@ -29,7 +28,6 @@
#include "utility/AudioClock.h"
using android::sp;
-using android::IAAudioService;
namespace aaudio {
@@ -46,10 +44,6 @@
AudioStreamInternal(AAudioServiceInterface &serviceInterface, bool inService);
virtual ~AudioStreamInternal();
- aaudio_result_t requestStart() override;
-
- aaudio_result_t requestStop() override;
-
aaudio_result_t getTimestamp(clockid_t clockId,
int64_t *framePosition,
int64_t *timeNanoseconds) override;
@@ -58,16 +52,12 @@
aaudio_result_t open(const AudioStreamBuilder &builder) override;
- aaudio_result_t release_l() override;
-
aaudio_result_t setBufferSize(int32_t requestedFrames) override;
int32_t getBufferSize() const override;
int32_t getBufferCapacity() const override;
- int32_t getFramesPerBurst() const override;
-
int32_t getXRunCount() const override {
return mXRunCount;
}
@@ -76,12 +66,9 @@
aaudio_result_t unregisterThread() override;
- aaudio_result_t joinThread(void** returnArg);
-
// Called internally from 'C'
virtual void *callbackLoop() = 0;
-
bool isMMap() override {
return true;
}
@@ -100,6 +87,10 @@
}
protected:
+ aaudio_result_t requestStart_l() REQUIRES(mStreamLock) override;
+ aaudio_result_t requestStop_l() REQUIRES(mStreamLock) override;
+
+ aaudio_result_t release_l() REQUIRES(mStreamLock) override;
aaudio_result_t processData(void *buffer,
int32_t numFrames,
@@ -121,9 +112,11 @@
aaudio_result_t processCommands();
- aaudio_result_t stopCallback();
+ aaudio_result_t stopCallback_l();
- virtual void advanceClientToMatchServerPosition() = 0;
+ virtual void prepareBuffersForStart() {}
+
+ virtual void advanceClientToMatchServerPosition(int32_t serverMargin = 0) = 0;
virtual void onFlushFromServer() {}
@@ -159,7 +152,6 @@
aaudio_handle_t mServiceStreamHandle; // opaque handle returned from service
- int32_t mFramesPerBurst = MIN_FRAMES_PER_BURST; // frames per HAL transfer
int32_t mXRunCount = 0; // how many underrun events?
// Offset from underlying frame position.
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 9fa2e40..5d311fc 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -14,8 +14,6 @@
* limitations under the License.
*/
-#define LOG_TAG (mInService ? "AudioStreamInternalCapture_Service" \
- : "AudioStreamInternalCapture_Client")
//#define LOG_NDEBUG 0
#include <utils/Log.h>
@@ -29,6 +27,14 @@
#define ATRACE_TAG ATRACE_TAG_AUDIO
#include <utils/Trace.h>
+// We do this after the #includes because if a header uses ALOG.
+// it would fail on the reference to mInService.
+#undef LOG_TAG
+// This file is used in both client and server processes.
+// This is needed to make sense of the logs more easily.
+#define LOG_TAG (mInService ? "AudioStreamInternalCapture_Service" \
+ : "AudioStreamInternalCapture_Client")
+
using android::WrappingBuffer;
using namespace aaudio;
@@ -41,9 +47,9 @@
AudioStreamInternalCapture::~AudioStreamInternalCapture() {}
-void AudioStreamInternalCapture::advanceClientToMatchServerPosition() {
+void AudioStreamInternalCapture::advanceClientToMatchServerPosition(int32_t serverMargin) {
int64_t readCounter = mAudioEndpoint->getDataReadCounter();
- int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
+ int64_t writeCounter = mAudioEndpoint->getDataWriteCounter() + serverMargin;
// Bump offset so caller does not see the retrograde motion in getFramesRead().
int64_t offset = readCounter - writeCounter;
@@ -143,7 +149,7 @@
// Calculate frame position based off of the readCounter because
// the writeCounter might have just advanced in the background,
// causing us to sleep until a later burst.
- int64_t nextPosition = mAudioEndpoint->getDataReadCounter() + mFramesPerBurst;
+ int64_t nextPosition = mAudioEndpoint->getDataReadCounter() + getFramesPerBurst();
wakeTime = mClockModel.convertPositionToLatestTime(nextPosition);
}
break;
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.h b/media/libaaudio/src/client/AudioStreamInternalCapture.h
index 6436a53..251a7f2 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.h
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.h
@@ -23,7 +23,6 @@
#include "client/AudioStreamInternal.h"
using android::sp;
-using android::IAAudioService;
namespace aaudio {
@@ -46,7 +45,7 @@
}
protected:
- void advanceClientToMatchServerPosition() override;
+ void advanceClientToMatchServerPosition(int32_t serverOffset = 0) override;
/**
* Low level data processing that will not block. It will just read or write as much as it can.
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 1303daf..b81e5e4 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -14,8 +14,6 @@
* limitations under the License.
*/
-#define LOG_TAG (mInService ? "AudioStreamInternalPlay_Service" \
- : "AudioStreamInternalPlay_Client")
//#define LOG_NDEBUG 0
#include <utils/Log.h>
@@ -26,6 +24,15 @@
#include "client/AudioStreamInternalPlay.h"
#include "utility/AudioClock.h"
+// We do this after the #includes because if a header uses ALOG.
+// it would fail on the reference to mInService.
+#undef LOG_TAG
+// This file is used in both client and server processes.
+// This is needed to make sense of the logs more easily.
+#define LOG_TAG (mInService ? "AudioStreamInternalPlay_Service" \
+ : "AudioStreamInternalPlay_Client")
+
+using android::status_t;
using android::WrappingBuffer;
using namespace aaudio;
@@ -49,7 +56,7 @@
getDeviceChannelCount());
if (result != AAUDIO_OK) {
- releaseCloseFinal();
+ safeReleaseClose();
}
// Sample rate is constrained to common values by now and should not overflow.
int32_t numFrames = kRampMSec * getSampleRate() / AAUDIO_MILLIS_PER_SECOND;
@@ -59,9 +66,9 @@
}
// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternalPlay::requestPause()
+aaudio_result_t AudioStreamInternalPlay::requestPause_l()
{
- aaudio_result_t result = stopCallback();
+ aaudio_result_t result = stopCallback_l();
if (result != AAUDIO_OK) {
return result;
}
@@ -76,7 +83,7 @@
return mServiceInterface.pauseStream(mServiceStreamHandle);
}
-aaudio_result_t AudioStreamInternalPlay::requestFlush() {
+aaudio_result_t AudioStreamInternalPlay::requestFlush_l() {
if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
ALOGW("%s() mServiceStreamHandle invalid", __func__);
return AAUDIO_ERROR_INVALID_STATE;
@@ -86,8 +93,13 @@
return mServiceInterface.flushStream(mServiceStreamHandle);
}
-void AudioStreamInternalPlay::advanceClientToMatchServerPosition() {
- int64_t readCounter = mAudioEndpoint->getDataReadCounter();
+void AudioStreamInternalPlay::prepareBuffersForStart() {
+ // Prevent stale data from being played.
+ mAudioEndpoint->eraseDataMemory();
+}
+
+void AudioStreamInternalPlay::advanceClientToMatchServerPosition(int32_t serverMargin) {
+ int64_t readCounter = mAudioEndpoint->getDataReadCounter() + serverMargin;
int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
// Bump offset so caller does not see the retrograde motion in getFramesRead().
@@ -145,7 +157,9 @@
if (mNeedCatchUp.isRequested()) {
// Catch an MMAP pointer that is already advancing.
// This will avoid initial underruns caused by a slow cold start.
- advanceClientToMatchServerPosition();
+ // We add a one burst margin in case the DSP advances before we can write the data.
+ // This can help prevent the beginning of the stream from being skipped.
+ advanceClientToMatchServerPosition(getFramesPerBurst());
mNeedCatchUp.acknowledge();
}
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index 2e93157..03c957d 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -25,7 +25,6 @@
#include "client/AudioStreamInternal.h"
using android::sp;
-using android::IAAudioService;
namespace aaudio {
@@ -36,9 +35,9 @@
aaudio_result_t open(const AudioStreamBuilder &builder) override;
- aaudio_result_t requestPause() override;
+ aaudio_result_t requestPause_l() override;
- aaudio_result_t requestFlush() override;
+ aaudio_result_t requestFlush_l() override;
bool isFlushSupported() const override {
// Only implement FLUSH for OUTPUT streams.
@@ -65,7 +64,9 @@
protected:
- void advanceClientToMatchServerPosition() override;
+ void prepareBuffersForStart() override;
+
+ void advanceClientToMatchServerPosition(int32_t serverMargin = 0) override;
void onFlushFromServer() override;
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 8965875..cfa7221 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -255,17 +255,16 @@
if (audioStream != nullptr) {
aaudio_stream_id_t id = audioStream->getId();
ALOGD("%s(s#%u) called ---------------", __func__, id);
- result = audioStream->safeRelease();
- // safeRelease will only fail if called illegally, for example, from a callback.
+ result = audioStream->safeReleaseClose();
+ // safeReleaseClose will only fail if called illegally, for example, from a callback.
// That would result in deleting an active stream, which would cause a crash.
if (result != AAUDIO_OK) {
ALOGW("%s(s#%u) failed. Close it from another thread.",
__func__, id);
} else {
audioStream->unregisterPlayerBase();
- // Mark CLOSED to keep destructors from asserting.
- audioStream->closeFinal();
- delete audioStream;
+ // Allow the stream to be deleted.
+ AudioStreamBuilder::stopUsingStream(audioStream);
}
ALOGD("%s(s#%u) returned %d ---------", __func__, id, result);
}
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 5f45261..2c81c91 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -54,7 +54,9 @@
switch (format) {
case AUDIO_FORMAT_DEFAULT:
case AUDIO_FORMAT_PCM_16_BIT:
+ case AUDIO_FORMAT_PCM_32_BIT:
case AUDIO_FORMAT_PCM_FLOAT:
+ case AUDIO_FORMAT_PCM_24_BIT_PACKED:
break; // valid
default:
ALOGD("audioFormat not valid, audio_format_t = 0x%08x", format);
diff --git a/media/libaaudio/src/core/AudioGlobal.cpp b/media/libaaudio/src/core/AudioGlobal.cpp
index 7f5d8d5..0e5b8be 100644
--- a/media/libaaudio/src/core/AudioGlobal.cpp
+++ b/media/libaaudio/src/core/AudioGlobal.cpp
@@ -80,6 +80,8 @@
AAUDIO_CASE_ENUM(AAUDIO_FORMAT_INVALID);
AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_I16);
AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_FLOAT);
+ AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_I24_PACKED);
+ AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_I32);
}
return "Unrecognized";
}
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 0644368..57c4c16 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -39,15 +39,23 @@
}
AudioStream::AudioStream()
- : mPlayerBase(new MyPlayerBase(this))
+ : mPlayerBase(new MyPlayerBase())
, mStreamId(AAudio_getNextStreamId())
{
- // mThread is a pthread_t of unknown size so we need memset.
- memset(&mThread, 0, sizeof(mThread));
setPeriodNanoseconds(0);
}
AudioStream::~AudioStream() {
+ // Please preserve these logs because there have been several bugs related to
+ // AudioStream deletion and late callbacks.
+ ALOGD("%s(s#%u) mPlayerBase strongCount = %d",
+ __func__, getId(), mPlayerBase->getStrongCount());
+
+ ALOGE_IF(pthread_equal(pthread_self(), mThread),
+ "%s() destructor running in callback", __func__);
+
+ ALOGE_IF(mHasThread, "%s() callback thread never join()ed", __func__);
+
// If the stream is deleted when OPEN or in use then audio resources will leak.
// This would indicate an internal error. So we want to find this ASAP.
LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
@@ -55,8 +63,6 @@
|| getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
"~AudioStream() - still in use, state = %s",
AudioGlobal_convertStreamStateToText(getState()));
-
- mPlayerBase->clearParentReference(); // remove reference to this AudioStream
}
aaudio_result_t AudioStream::open(const AudioStreamBuilder& builder)
@@ -116,9 +122,10 @@
}
}
-void AudioStream::logBufferState() {
+void AudioStream::logReleaseBufferState() {
if (mMetricsId.size() > 0) {
android::mediametrics::LogItem(mMetricsId)
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_RELEASE)
.set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, (int32_t) getBufferSize())
.set(AMEDIAMETRICS_PROP_UNDERRUN, (int32_t) getXRunCount())
.record();
@@ -140,6 +147,7 @@
case AAUDIO_STREAM_STATE_PAUSED:
case AAUDIO_STREAM_STATE_STOPPING:
case AAUDIO_STREAM_STATE_STOPPED:
+ case AAUDIO_STREAM_STATE_FLUSHING:
case AAUDIO_STREAM_STATE_FLUSHED:
break; // Proceed with starting.
@@ -160,7 +168,7 @@
return AAUDIO_ERROR_INVALID_STATE;
}
- aaudio_result_t result = requestStart();
+ aaudio_result_t result = requestStart_l();
if (result == AAUDIO_OK) {
// We only call this for logging in "dumpsys audio". So ignore return code.
(void) mPlayerBase->start();
@@ -210,7 +218,7 @@
return AAUDIO_ERROR_INVALID_STATE;
}
- aaudio_result_t result = requestPause();
+ aaudio_result_t result = requestPause_l();
if (result == AAUDIO_OK) {
// We only call this for logging in "dumpsys audio". So ignore return code.
(void) mPlayerBase->pause();
@@ -235,12 +243,12 @@
return result;
}
- return requestFlush();
+ return requestFlush_l();
}
aaudio_result_t AudioStream::systemStopFromCallback() {
std::lock_guard<std::mutex> lock(mStreamLock);
- aaudio_result_t result = safeStop();
+ aaudio_result_t result = safeStop_l();
if (result == AAUDIO_OK) {
// We only call this for logging in "dumpsys audio". So ignore return code.
(void) mPlayerBase->stop();
@@ -254,7 +262,7 @@
ALOGE("stream cannot be stopped by calling from a callback!");
return AAUDIO_ERROR_INVALID_STATE;
}
- aaudio_result_t result = safeStop();
+ aaudio_result_t result = safeStop_l();
if (result == AAUDIO_OK) {
// We only call this for logging in "dumpsys audio". So ignore return code.
(void) mPlayerBase->stop();
@@ -262,8 +270,7 @@
return result;
}
-// This must be called under mStreamLock.
-aaudio_result_t AudioStream::safeStop() {
+aaudio_result_t AudioStream::safeStop_l() {
switch (getState()) {
// Proceed with stopping.
@@ -295,26 +302,47 @@
return AAUDIO_ERROR_INVALID_STATE;
}
- return requestStop();
+ return requestStop_l();
}
aaudio_result_t AudioStream::safeRelease() {
- // This get temporarily unlocked in the release() when joining callback threads.
+ // This may get temporarily unlocked in the MMAP release() when joining callback threads.
std::lock_guard<std::mutex> lock(mStreamLock);
if (collidesWithCallback()) {
ALOGE("%s cannot be called from a callback!", __func__);
return AAUDIO_ERROR_INVALID_STATE;
}
- if (getState() == AAUDIO_STREAM_STATE_CLOSING) {
+ if (getState() == AAUDIO_STREAM_STATE_CLOSING) { // already released?
return AAUDIO_OK;
}
return release_l();
}
+aaudio_result_t AudioStream::safeReleaseClose() {
+ // This get temporarily unlocked in the MMAP release() when joining callback threads.
+ std::lock_guard<std::mutex> lock(mStreamLock);
+ if (collidesWithCallback()) {
+ ALOGE("%s cannot be called from a callback!", __func__);
+ return AAUDIO_ERROR_INVALID_STATE;
+ }
+ releaseCloseFinal_l();
+ return AAUDIO_OK;
+}
+
+aaudio_result_t AudioStream::safeReleaseCloseFromCallback() {
+ // This get temporarily unlocked in the MMAP release() when joining callback threads.
+ std::lock_guard<std::mutex> lock(mStreamLock);
+ releaseCloseFinal_l();
+ return AAUDIO_OK;
+}
+
void AudioStream::setState(aaudio_stream_state_t state) {
ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
+ if (state == mState) {
+ return; // no change
+ }
// Track transition to DISCONNECTED state.
- if (state == AAUDIO_STREAM_STATE_DISCONNECTED && mState != state) {
+ if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
android::mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
.set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
@@ -322,18 +350,18 @@
}
// CLOSED is a final state
if (mState == AAUDIO_STREAM_STATE_CLOSED) {
- ALOGE("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
+ ALOGW("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
// Once CLOSING, we can only move to CLOSED state.
} else if (mState == AAUDIO_STREAM_STATE_CLOSING
&& state != AAUDIO_STREAM_STATE_CLOSED) {
- ALOGE("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
+ ALOGW("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
// Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
} else if (mState == AAUDIO_STREAM_STATE_DISCONNECTED
&& !(state == AAUDIO_STREAM_STATE_CLOSING
|| state == AAUDIO_STREAM_STATE_CLOSED)) {
- ALOGE("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
+ ALOGW("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
} else {
mState = state;
@@ -385,21 +413,28 @@
return procResult;
}
-// This is the entry point for the new thread created by createThread().
+
+// This is the entry point for the new thread created by createThread_l().
// It converts the 'C' function call to a C++ method call.
static void* AudioStream_internalThreadProc(void* threadArg) {
AudioStream *audioStream = (AudioStream *) threadArg;
- return audioStream->wrapUserThread();
+ // Prevent the stream from being deleted while being used.
+ // This is just for extra safety. It is probably not needed because
+ // this callback should be joined before the stream is closed.
+ android::sp<AudioStream> protectedStream(audioStream);
+ // Balance the incStrong() in createThread_l().
+ protectedStream->decStrong(nullptr);
+ return protectedStream->wrapUserThread();
}
// This is not exposed in the API.
// But it is still used internally to implement callbacks for MMAP mode.
-aaudio_result_t AudioStream::createThread(int64_t periodNanoseconds,
- aaudio_audio_thread_proc_t threadProc,
- void* threadArg)
+aaudio_result_t AudioStream::createThread_l(int64_t periodNanoseconds,
+ aaudio_audio_thread_proc_t threadProc,
+ void* threadArg)
{
if (mHasThread) {
- ALOGE("createThread() - mHasThread already true");
+ ALOGE("%s() - mHasThread already true", __func__);
return AAUDIO_ERROR_INVALID_STATE;
}
if (threadProc == nullptr) {
@@ -409,10 +444,14 @@
mThreadProc = threadProc;
mThreadArg = threadArg;
setPeriodNanoseconds(periodNanoseconds);
+ // Prevent this object from getting deleted before the thread has a chance to create
+ // its strong pointer. Assume the thread will call decStrong().
+ this->incStrong(nullptr);
int err = pthread_create(&mThread, nullptr, AudioStream_internalThreadProc, this);
if (err != 0) {
android::status_t status = -errno;
- ALOGE("createThread() - pthread_create() failed, %d", status);
+ ALOGE("%s() - pthread_create() failed, %d", __func__, status);
+ this->decStrong(nullptr); // Because the thread won't do it.
return AAudioConvert_androidToAAudioResult(status);
} else {
// TODO Use AAudioThread or maybe AndroidThread
@@ -432,36 +471,39 @@
}
}
+aaudio_result_t AudioStream::joinThread(void** returnArg) {
+ // This may get temporarily unlocked in the MMAP release() when joining callback threads.
+ std::lock_guard<std::mutex> lock(mStreamLock);
+ return joinThread_l(returnArg);
+}
+
// This must be called under mStreamLock.
-aaudio_result_t AudioStream::joinThread(void** returnArg, int64_t timeoutNanoseconds __unused)
-{
+aaudio_result_t AudioStream::joinThread_l(void** returnArg) {
if (!mHasThread) {
- ALOGE("joinThread() - but has no thread");
+ ALOGD("joinThread() - but has no thread");
return AAUDIO_ERROR_INVALID_STATE;
}
aaudio_result_t result = AAUDIO_OK;
// If the callback is stopping the stream because the app passed back STOP
// then we don't need to join(). The thread is already about to exit.
- if (pthread_self() != mThread) {
+ if (!pthread_equal(pthread_self(), mThread)) {
// Called from an app thread. Not the callback.
// Unlock because the callback may be trying to stop the stream but is blocked.
mStreamLock.unlock();
-#if 0
- // TODO implement equivalent of pthread_timedjoin_np()
- struct timespec abstime;
- int err = pthread_timedjoin_np(mThread, returnArg, &abstime);
-#else
int err = pthread_join(mThread, returnArg);
-#endif
mStreamLock.lock();
if (err) {
ALOGE("%s() pthread_join() returns err = %d", __func__, err);
result = AAudioConvert_androidToAAudioResult(-err);
+ } else {
+ ALOGD("%s() pthread_join succeeded", __func__);
+ // This must be set false so that the callback thread can be created
+ // when the stream is restarted.
+ mHasThread = false;
}
+ } else {
+ ALOGD("%s() pthread_join() called on itself!", __func__);
}
- // This must be set false so that the callback thread can be created
- // when the stream is restarted.
- mHasThread = false;
return (result != AAUDIO_OK) ? result : mThreadRegistrationResult;
}
@@ -518,11 +560,18 @@
}
#if AAUDIO_USE_VOLUME_SHAPER
-android::media::VolumeShaper::Status AudioStream::applyVolumeShaper(
- const android::media::VolumeShaper::Configuration& configuration __unused,
- const android::media::VolumeShaper::Operation& operation __unused) {
- ALOGW("applyVolumeShaper() is not supported");
- return android::media::VolumeShaper::Status::ok();
+::android::binder::Status AudioStream::MyPlayerBase::applyVolumeShaper(
+ const ::android::media::VolumeShaper::Configuration& configuration,
+ const ::android::media::VolumeShaper::Operation& operation) {
+ android::sp<AudioStream> audioStream;
+ {
+ std::lock_guard<std::mutex> lock(mParentLock);
+ audioStream = mParent.promote();
+ }
+ if (audioStream) {
+ return audioStream->applyVolumeShaper(configuration, operation);
+ }
+ return android::NO_ERROR;
}
#endif
@@ -532,26 +581,36 @@
doSetVolume(); // apply this change
}
-AudioStream::MyPlayerBase::MyPlayerBase(AudioStream *parent) : mParent(parent) {
-}
-
-AudioStream::MyPlayerBase::~MyPlayerBase() {
-}
-
-void AudioStream::MyPlayerBase::registerWithAudioManager() {
+void AudioStream::MyPlayerBase::registerWithAudioManager(const android::sp<AudioStream>& parent) {
+ std::lock_guard<std::mutex> lock(mParentLock);
+ mParent = parent;
if (!mRegistered) {
- init(android::PLAYER_TYPE_AAUDIO, AAudioConvert_usageToInternal(mParent->getUsage()));
+ init(android::PLAYER_TYPE_AAUDIO, AAudioConvert_usageToInternal(parent->getUsage()));
mRegistered = true;
}
}
void AudioStream::MyPlayerBase::unregisterWithAudioManager() {
+ std::lock_guard<std::mutex> lock(mParentLock);
if (mRegistered) {
baseDestroy();
mRegistered = false;
}
}
+android::status_t AudioStream::MyPlayerBase::playerSetVolume() {
+ android::sp<AudioStream> audioStream;
+ {
+ std::lock_guard<std::mutex> lock(mParentLock);
+ audioStream = mParent.promote();
+ }
+ if (audioStream) {
+ // No pan and only left volume is taken into account from IPLayer interface
+ audioStream->setDuckAndMuteVolume(mVolumeMultiplierL /* mPanMultiplierL */);
+ }
+ return android::NO_ERROR;
+}
+
void AudioStream::MyPlayerBase::destroy() {
unregisterWithAudioManager();
}
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 613a092..510ead8 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -20,13 +20,17 @@
#include <atomic>
#include <mutex>
#include <stdint.h>
-#include <aaudio/AAudio.h>
+
+#include <android-base/thread_annotations.h>
#include <binder/IServiceManager.h>
#include <binder/Status.h>
#include <utils/StrongPointer.h>
-#include "media/VolumeShaper.h"
-#include "media/PlayerBase.h"
+#include <aaudio/AAudio.h>
+#include <media/AudioSystem.h>
+#include <media/PlayerBase.h>
+#include <media/VolumeShaper.h>
+
#include "utility/AAudioUtilities.h"
#include "utility/MonotonicCounter.h"
@@ -45,7 +49,8 @@
/**
* AAudio audio stream.
*/
-class AudioStream {
+// By extending AudioDeviceCallback, we also inherit from RefBase.
+class AudioStream : public android::AudioSystem::AudioDeviceCallback {
public:
AudioStream();
@@ -54,11 +59,6 @@
protected:
- /* Asynchronous requests.
- * Use waitForStateChange() to wait for completion.
- */
- virtual aaudio_result_t requestStart() = 0;
-
/**
* Check the state to see if Pause is currently legal.
*
@@ -77,18 +77,22 @@
return false;
}
- virtual aaudio_result_t requestPause()
- {
+ /* Asynchronous requests.
+ * Use waitForStateChange() to wait for completion.
+ */
+ virtual aaudio_result_t requestStart_l() REQUIRES(mStreamLock) = 0;
+
+ virtual aaudio_result_t requestPause_l() REQUIRES(mStreamLock) {
// Only implement this for OUTPUT streams.
return AAUDIO_ERROR_UNIMPLEMENTED;
}
- virtual aaudio_result_t requestFlush() {
+ virtual aaudio_result_t requestFlush_l() REQUIRES(mStreamLock) {
// Only implement this for OUTPUT streams.
return AAUDIO_ERROR_UNIMPLEMENTED;
}
- virtual aaudio_result_t requestStop() = 0;
+ virtual aaudio_result_t requestStop_l() REQUIRES(mStreamLock) = 0;
public:
virtual aaudio_result_t getTimestamp(clockid_t clockId,
@@ -115,35 +119,42 @@
// log to MediaMetrics
virtual void logOpen();
- void logBufferState();
+ void logReleaseBufferState();
+ /* Note about naming for "release" and "close" related methods.
+ *
+ * These names are intended to match the public AAudio API.
+ * The original AAudio API had an AAudioStream_close() function that
+ * released the hardware and deleted the stream. That made it difficult
+ * because apps want to release the HW ASAP but are not in a rush to delete
+ * the stream object. So in R we added an AAudioStream_release() function
+ * that just released the hardware.
+ * The AAudioStream_close() method releases if needed and then closes.
+ */
+
+protected:
/**
* Free any hardware or system resources from the open() call.
* It is safe to call release_l() multiple times.
*/
- virtual aaudio_result_t release_l() {
+ virtual aaudio_result_t release_l() REQUIRES(mStreamLock) {
setState(AAUDIO_STREAM_STATE_CLOSING);
return AAUDIO_OK;
}
- aaudio_result_t closeFinal() {
+ /**
+ * Free any resources not already freed by release_l().
+ * Assume release_l() already called.
+ */
+ virtual void close_l() REQUIRES(mStreamLock) {
+ // Releasing the stream will set the state to CLOSING.
+ assert(getState() == AAUDIO_STREAM_STATE_CLOSING);
+ // setState() prevents a transition from CLOSING to any state other than CLOSED.
// State is checked by destructor.
setState(AAUDIO_STREAM_STATE_CLOSED);
- return AAUDIO_OK;
}
- /**
- * Release then close the stream.
- * @return AAUDIO_OK or negative error.
- */
- aaudio_result_t releaseCloseFinal() {
- aaudio_result_t result = release_l(); // TODO review locking
- if (result == AAUDIO_OK) {
- result = closeFinal();
- }
- return result;
- }
-
+public:
// This is only used to identify a stream in the logs without
// revealing any pointers.
aaudio_stream_id_t getId() {
@@ -152,11 +163,11 @@
virtual aaudio_result_t setBufferSize(int32_t requestedFrames) = 0;
- virtual aaudio_result_t createThread(int64_t periodNanoseconds,
- aaudio_audio_thread_proc_t threadProc,
- void *threadArg);
+ virtual aaudio_result_t createThread_l(int64_t periodNanoseconds,
+ aaudio_audio_thread_proc_t threadProc,
+ void *threadArg);
- aaudio_result_t joinThread(void **returnArg, int64_t timeoutNanoseconds);
+ aaudio_result_t joinThread(void **returnArg);
virtual aaudio_result_t registerThread() {
return AAUDIO_OK;
@@ -183,11 +194,11 @@
}
virtual int32_t getBufferCapacity() const {
- return AAUDIO_ERROR_UNIMPLEMENTED;
+ return mBufferCapacity;
}
virtual int32_t getFramesPerBurst() const {
- return AAUDIO_ERROR_UNIMPLEMENTED;
+ return mFramesPerBurst;
}
virtual int32_t getXRunCount() const {
@@ -328,6 +339,10 @@
*/
bool collidesWithCallback() const;
+ // Implement AudioDeviceCallback
+ void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+ audio_port_handle_t deviceId) override {};
+
// ============== I/O ===========================
// A Stream will only implement read() or write() depending on its direction.
virtual aaudio_result_t write(const void *buffer __unused,
@@ -366,7 +381,7 @@
*/
void registerPlayerBase() {
if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
- mPlayerBase->registerWithAudioManager();
+ mPlayerBase->registerWithAudioManager(this);
}
}
@@ -395,21 +410,35 @@
*/
aaudio_result_t systemStopFromCallback();
+ /**
+ * Safely RELEASE a stream after taking mStreamLock and checking
+ * to make sure we are not being called from a callback.
+ * @return AAUDIO_OK or a negative error
+ */
aaudio_result_t safeRelease();
+ /**
+ * Safely RELEASE and CLOSE a stream after taking mStreamLock and checking
+ * to make sure we are not being called from a callback.
+ * @return AAUDIO_OK or a negative error
+ */
+ aaudio_result_t safeReleaseClose();
+
+ aaudio_result_t safeReleaseCloseFromCallback();
+
protected:
// PlayerBase allows the system to control the stream volume.
class MyPlayerBase : public android::PlayerBase {
public:
- explicit MyPlayerBase(AudioStream *parent);
+ MyPlayerBase() {};
- virtual ~MyPlayerBase();
+ virtual ~MyPlayerBase() = default;
/**
* Register for volume changes and remote control.
*/
- void registerWithAudioManager();
+ void registerWithAudioManager(const android::sp<AudioStream>& parent);
/**
* UnRegister.
@@ -421,8 +450,6 @@
*/
void destroy() override;
- void clearParentReference() { mParent = nullptr; }
-
// Just a stub. The ability to start audio through PlayerBase is being deprecated.
android::status_t playerStart() override {
return android::NO_ERROR;
@@ -438,18 +465,10 @@
return android::NO_ERROR;
}
- android::status_t playerSetVolume() override {
- // No pan and only left volume is taken into account from IPLayer interface
- mParent->setDuckAndMuteVolume(mVolumeMultiplierL /* * mPanMultiplierL */);
- return android::NO_ERROR;
- }
+ android::status_t playerSetVolume() override;
#if AAUDIO_USE_VOLUME_SHAPER
- ::android::binder::Status applyVolumeShaper(
- const ::android::media::VolumeShaper::Configuration& configuration,
- const ::android::media::VolumeShaper::Operation& operation) {
- return mParent->applyVolumeShaper(configuration, operation);
- }
+ ::android::binder::Status applyVolumeShaper();
#endif
aaudio_result_t getResult() {
@@ -457,9 +476,11 @@
}
private:
- AudioStream *mParent;
- aaudio_result_t mResult = AAUDIO_OK;
- bool mRegistered = false;
+ // Use a weak pointer so the AudioStream can be deleted.
+ std::mutex mParentLock;
+ android::wp<AudioStream> mParent GUARDED_BY(mParentLock);
+ aaudio_result_t mResult = AAUDIO_OK;
+ bool mRegistered = false;
};
/**
@@ -470,30 +491,32 @@
mSampleRate = sampleRate;
}
- /**
- * This should not be called after the open() call.
- */
+ // This should not be called after the open() call.
void setSamplesPerFrame(int32_t samplesPerFrame) {
mSamplesPerFrame = samplesPerFrame;
}
- /**
- * This should not be called after the open() call.
- */
+ // This should not be called after the open() call.
+ void setFramesPerBurst(int32_t framesPerBurst) {
+ mFramesPerBurst = framesPerBurst;
+ }
+
+ // This should not be called after the open() call.
+ void setBufferCapacity(int32_t bufferCapacity) {
+ mBufferCapacity = bufferCapacity;
+ }
+
+ // This should not be called after the open() call.
void setSharingMode(aaudio_sharing_mode_t sharingMode) {
mSharingMode = sharingMode;
}
- /**
- * This should not be called after the open() call.
- */
+ // This should not be called after the open() call.
void setFormat(audio_format_t format) {
mFormat = format;
}
- /**
- * This should not be called after the open() call.
- */
+ // This should not be called after the open() call.
void setDeviceFormat(audio_format_t format) {
mDeviceFormat = format;
}
@@ -508,10 +531,13 @@
mDeviceId = deviceId;
}
+ // This should not be called after the open() call.
void setSessionId(int32_t sessionId) {
mSessionId = sessionId;
}
+ aaudio_result_t joinThread_l(void **returnArg) REQUIRES(mStreamLock);
+
std::atomic<bool> mCallbackEnabled{false};
float mDuckAndMuteVolume = 1.0f;
@@ -578,11 +604,22 @@
std::string mMetricsId; // set once during open()
+ std::mutex mStreamLock;
+
private:
- aaudio_result_t safeStop();
+ aaudio_result_t safeStop_l() REQUIRES(mStreamLock);
- std::mutex mStreamLock;
+ /**
+ * Release then close the stream.
+ */
+ void releaseCloseFinal_l() REQUIRES(mStreamLock) {
+ if (getState() != AAUDIO_STREAM_STATE_CLOSING) { // not already released?
+ // Ignore result and keep closing.
+ (void) release_l();
+ }
+ close_l();
+ }
const android::sp<MyPlayerBase> mPlayerBase;
@@ -595,6 +632,8 @@
audio_format_t mFormat = AUDIO_FORMAT_DEFAULT;
aaudio_stream_state_t mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
aaudio_performance_mode_t mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
+ int32_t mFramesPerBurst = 0;
+ int32_t mBufferCapacity = 0;
aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
@@ -620,8 +659,8 @@
std::atomic<pid_t> mErrorCallbackThread{CALLBACK_THREAD_NONE};
// background thread ----------------------------------
- bool mHasThread = false;
- pthread_t mThread; // initialized in constructor
+ bool mHasThread GUARDED_BY(mStreamLock) = false;
+ pthread_t mThread GUARDED_BY(mStreamLock) = {};
// These are set by the application thread and then read by the audio pthread.
std::atomic<int64_t> mPeriodNanoseconds; // for tuning SCHED_FIFO threads
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 60dad84..630b289 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -63,27 +63,26 @@
static aaudio_result_t builder_createStream(aaudio_direction_t direction,
aaudio_sharing_mode_t sharingMode,
bool tryMMap,
- AudioStream **audioStreamPtr) {
- *audioStreamPtr = nullptr;
+ android::sp<AudioStream> &stream) {
aaudio_result_t result = AAUDIO_OK;
switch (direction) {
case AAUDIO_DIRECTION_INPUT:
if (tryMMap) {
- *audioStreamPtr = new AudioStreamInternalCapture(AAudioBinderClient::getInstance(),
+ stream = new AudioStreamInternalCapture(AAudioBinderClient::getInstance(),
false);
} else {
- *audioStreamPtr = new AudioStreamRecord();
+ stream = new AudioStreamRecord();
}
break;
case AAUDIO_DIRECTION_OUTPUT:
if (tryMMap) {
- *audioStreamPtr = new AudioStreamInternalPlay(AAudioBinderClient::getInstance(),
+ stream = new AudioStreamInternalPlay(AAudioBinderClient::getInstance(),
false);
} else {
- *audioStreamPtr = new AudioStreamTrack();
+ stream = new AudioStreamTrack();
}
break;
@@ -98,7 +97,7 @@
// Fall back to Legacy path if MMAP not available.
// Exact behavior is controlled by MMapPolicy.
aaudio_result_t AudioStreamBuilder::build(AudioStream** streamPtr) {
- AudioStream *audioStream = nullptr;
+
if (streamPtr == nullptr) {
ALOGE("%s() streamPtr is null", __func__);
return AAUDIO_ERROR_NULL;
@@ -171,41 +170,48 @@
setPrivacySensitive(true);
}
- result = builder_createStream(getDirection(), sharingMode, allowMMap, &audioStream);
+ android::sp<AudioStream> audioStream;
+ result = builder_createStream(getDirection(), sharingMode, allowMMap, audioStream);
if (result == AAUDIO_OK) {
// Open the stream using the parameters from the builder.
result = audioStream->open(*this);
- if (result == AAUDIO_OK) {
- *streamPtr = audioStream;
- } else {
+ if (result != AAUDIO_OK) {
bool isMMap = audioStream->isMMap();
- delete audioStream;
- audioStream = nullptr;
-
if (isMMap && allowLegacy) {
ALOGV("%s() MMAP stream did not open so try Legacy path", __func__);
// If MMAP stream failed to open then TRY using a legacy stream.
result = builder_createStream(getDirection(), sharingMode,
- false, &audioStream);
+ false, audioStream);
if (result == AAUDIO_OK) {
result = audioStream->open(*this);
- if (result == AAUDIO_OK) {
- *streamPtr = audioStream;
- } else {
- delete audioStream;
- audioStream = nullptr;
- }
}
}
}
- if (audioStream != nullptr) {
+ if (result == AAUDIO_OK) {
audioStream->logOpen();
- }
+ *streamPtr = startUsingStream(audioStream);
+ } // else audioStream will go out of scope and be deleted
}
return result;
}
+AudioStream *AudioStreamBuilder::startUsingStream(android::sp<AudioStream> &audioStream) {
+ // Increment the smart pointer so it will not get deleted when
+ // we pass it to the C caller and it goes out of scope.
+ // The C code cannot hold a smart pointer so we increment the reference
+ // count to indicate that the C app owns a reference.
+ audioStream->incStrong(nullptr);
+ return audioStream.get();
+}
+
+void AudioStreamBuilder::stopUsingStream(AudioStream *stream) {
+ // Undo the effect of startUsingStream()
+ android::sp<AudioStream> spAudioStream(stream);
+ ALOGV("%s() strongCount = %d", __func__, spAudioStream->getStrongCount());
+ spAudioStream->decStrong(nullptr);
+}
+
aaudio_result_t AudioStreamBuilder::validate() const {
// Check for values that are ridiculously out of range to prevent math overflow exploits.
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.h b/media/libaaudio/src/core/AudioStreamBuilder.h
index d5fb80d..9f93341 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.h
+++ b/media/libaaudio/src/core/AudioStreamBuilder.h
@@ -108,9 +108,16 @@
virtual aaudio_result_t validate() const override;
+
void logParameters() const;
+ // Mark the stream so it can be deleted.
+ static void stopUsingStream(AudioStream *stream);
+
private:
+ // Extract a raw pointer that we can pass to a 'C' app.
+ static AudioStream *startUsingStream(android::sp<AudioStream> &spAudioStream);
+
bool mSharingModeMatchRequired = false; // must match sharing mode requested
aaudio_performance_mode_t mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index f5113f2..5c11882 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -31,40 +31,37 @@
#include "FifoBuffer.h"
using android::FifoBuffer;
+using android::FifoBufferAllocated;
+using android::FifoBufferIndirect;
using android::fifo_frames_t;
-FifoBuffer::FifoBuffer(int32_t bytesPerFrame, fifo_frames_t capacityInFrames)
- : mBytesPerFrame(bytesPerFrame)
+FifoBuffer::FifoBuffer(int32_t bytesPerFrame)
+ : mBytesPerFrame(bytesPerFrame) {}
+
+FifoBufferAllocated::FifoBufferAllocated(int32_t bytesPerFrame, fifo_frames_t capacityInFrames)
+ : FifoBuffer(bytesPerFrame)
{
mFifo = std::make_unique<FifoController>(capacityInFrames, capacityInFrames);
// allocate buffer
int32_t bytesPerBuffer = bytesPerFrame * capacityInFrames;
- mStorage = new uint8_t[bytesPerBuffer];
- mStorageOwned = true;
+ mInternalStorage = std::make_unique<uint8_t[]>(bytesPerBuffer);
ALOGV("%s() capacityInFrames = %d, bytesPerFrame = %d",
__func__, capacityInFrames, bytesPerFrame);
}
-FifoBuffer::FifoBuffer( int32_t bytesPerFrame,
+FifoBufferIndirect::FifoBufferIndirect( int32_t bytesPerFrame,
fifo_frames_t capacityInFrames,
- fifo_counter_t * readIndexAddress,
- fifo_counter_t * writeIndexAddress,
+ fifo_counter_t *readIndexAddress,
+ fifo_counter_t *writeIndexAddress,
void * dataStorageAddress
)
- : mBytesPerFrame(bytesPerFrame)
- , mStorage(static_cast<uint8_t *>(dataStorageAddress))
+ : FifoBuffer(bytesPerFrame)
+ , mExternalStorage(static_cast<uint8_t *>(dataStorageAddress))
{
mFifo = std::make_unique<FifoControllerIndirect>(capacityInFrames,
capacityInFrames,
readIndexAddress,
writeIndexAddress);
- mStorageOwned = false;
-}
-
-FifoBuffer::~FifoBuffer() {
- if (mStorageOwned) {
- delete[] mStorage;
- }
}
int32_t FifoBuffer::convertFramesToBytes(fifo_frames_t frames) {
@@ -76,15 +73,16 @@
int32_t startIndex) {
wrappingBuffer->data[1] = nullptr;
wrappingBuffer->numFrames[1] = 0;
+ uint8_t *storage = getStorage();
if (framesAvailable > 0) {
fifo_frames_t capacity = mFifo->getCapacity();
- uint8_t *source = &mStorage[convertFramesToBytes(startIndex)];
+ uint8_t *source = &storage[convertFramesToBytes(startIndex)];
// Does the available data cross the end of the FIFO?
if ((startIndex + framesAvailable) > capacity) {
wrappingBuffer->data[0] = source;
fifo_frames_t firstFrames = capacity - startIndex;
wrappingBuffer->numFrames[0] = firstFrames;
- wrappingBuffer->data[1] = &mStorage[0];
+ wrappingBuffer->data[1] = &storage[0];
wrappingBuffer->numFrames[1] = framesAvailable - firstFrames;
} else {
wrappingBuffer->data[0] = source;
@@ -191,6 +189,6 @@
void FifoBuffer::eraseMemory() {
int32_t numBytes = convertFramesToBytes(getBufferCapacityInFrames());
if (numBytes > 0) {
- memset(mStorage, 0, (size_t) numBytes);
+ memset(getStorage(), 0, (size_t) numBytes);
}
}
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index 0d188c4..37548f0 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -38,15 +38,9 @@
class FifoBuffer {
public:
- FifoBuffer(int32_t bytesPerFrame, fifo_frames_t capacityInFrames);
+ FifoBuffer(int32_t bytesPerFrame);
- FifoBuffer(int32_t bytesPerFrame,
- fifo_frames_t capacityInFrames,
- fifo_counter_t *readCounterAddress,
- fifo_counter_t *writeCounterAddress,
- void *dataStorageAddress);
-
- ~FifoBuffer();
+ virtual ~FifoBuffer() = default;
int32_t convertFramesToBytes(fifo_frames_t frames);
@@ -121,19 +115,53 @@
*/
void eraseMemory();
-private:
+protected:
+
+ virtual uint8_t *getStorage() const = 0;
void fillWrappingBuffer(WrappingBuffer *wrappingBuffer,
int32_t framesAvailable, int32_t startIndex);
const int32_t mBytesPerFrame;
- // We do not use a std::unique_ptr for mStorage because it is often a pointer to
- // memory shared between processes and cannot be deleted trivially.
- uint8_t *mStorage = nullptr;
- bool mStorageOwned = false; // did this object allocate the storage?
std::unique_ptr<FifoControllerBase> mFifo{};
};
+// Define two subclasses to handle the two ways that storage is allocated.
+
+// Allocate storage internally.
+class FifoBufferAllocated : public FifoBuffer {
+public:
+ FifoBufferAllocated(int32_t bytesPerFrame, fifo_frames_t capacityInFrames);
+
+private:
+
+ uint8_t *getStorage() const override {
+ return mInternalStorage.get();
+ };
+
+ std::unique_ptr<uint8_t[]> mInternalStorage;
+};
+
+// Allocate storage externally and pass it in.
+class FifoBufferIndirect : public FifoBuffer {
+public:
+ // We use raw pointers because the memory may be
+ // in the middle of an allocated block and cannot be deleted directly.
+ FifoBufferIndirect(int32_t bytesPerFrame,
+ fifo_frames_t capacityInFrames,
+ fifo_counter_t* readCounterAddress,
+ fifo_counter_t* writeCounterAddress,
+ void* dataStorageAddress);
+
+private:
+
+ uint8_t *getStorage() const override {
+ return mExternalStorage;
+ };
+
+ uint8_t *mExternalStorage = nullptr;
+};
+
} // android
#endif //FIFO_FIFO_BUFFER_H
diff --git a/media/libaaudio/src/fifo/FifoControllerIndirect.h b/media/libaaudio/src/fifo/FifoControllerIndirect.h
index 5832d9c..ec48e57 100644
--- a/media/libaaudio/src/fifo/FifoControllerIndirect.h
+++ b/media/libaaudio/src/fifo/FifoControllerIndirect.h
@@ -27,7 +27,7 @@
/**
* A FifoControllerBase with counters external to the class.
*
- * The actual copunters may be stored in separate regions of shared memory
+ * The actual counters may be stored in separate regions of shared memory
* with different access rights.
*/
class FifoControllerIndirect : public FifoControllerBase {
diff --git a/media/libaaudio/src/flowgraph/AudioProcessorBase.h b/media/libaaudio/src/flowgraph/AudioProcessorBase.h
index eda46ae..972932f 100644
--- a/media/libaaudio/src/flowgraph/AudioProcessorBase.h
+++ b/media/libaaudio/src/flowgraph/AudioProcessorBase.h
@@ -267,7 +267,7 @@
AudioFloatInputPort input;
/**
- * Dummy processor. The work happens in the read() method.
+ * Do nothing. The work happens in the read() method.
*
* @param framePosition index of first frame to be processed
* @param numFrames
diff --git a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
new file mode 100644
index 0000000..b750410
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_UTILITIES_H
+#define FLOWGRAPH_UTILITIES_H
+
+#include <unistd.h>
+
+using namespace flowgraph;
+
+class FlowgraphUtilities {
+public:
+// This was copied from audio_utils/primitives.h
+/**
+ * Convert a single-precision floating point value to a Q0.31 integer value.
+ * Rounds to nearest, ties away from 0.
+ *
+ * Values outside the range [-1.0, 1.0) are properly clamped to -2147483648 and 2147483647,
+ * including -Inf and +Inf. NaN values are considered undefined, and behavior may change
+ * depending on hardware and future implementation of this function.
+ */
+static int32_t clamp32FromFloat(float f)
+{
+ static const float scale = (float)(1UL << 31);
+ static const float limpos = 1.;
+ static const float limneg = -1.;
+
+ if (f <= limneg) {
+ return -0x80000000; /* or 0x80000000 */
+ } else if (f >= limpos) {
+ return 0x7fffffff;
+ }
+ f *= scale;
+ /* integer conversion is through truncation (though int to float is not).
+ * ensure that we round to nearest, ties away from 0.
+ */
+ return f > 0 ? f + 0.5 : f - 0.5;
+}
+
+};
+
+#endif // FLOWGRAPH_UTILITIES_H
diff --git a/media/libaaudio/src/flowgraph/SinkI24.cpp b/media/libaaudio/src/flowgraph/SinkI24.cpp
index 6592828..0cb077d 100644
--- a/media/libaaudio/src/flowgraph/SinkI24.cpp
+++ b/media/libaaudio/src/flowgraph/SinkI24.cpp
@@ -15,7 +15,7 @@
*/
#include <algorithm>
-#include <unistd.h>
+#include <stdint.h>
#ifdef __ANDROID__
#include <audio_utils/primitives.h>
@@ -26,7 +26,6 @@
using namespace flowgraph;
-
SinkI24::SinkI24(int32_t channelCount)
: AudioSink(channelCount) {}
diff --git a/media/libaaudio/src/flowgraph/SinkI32.cpp b/media/libaaudio/src/flowgraph/SinkI32.cpp
new file mode 100644
index 0000000..eab863d
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/SinkI32.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef __ANDROID__
+#include <audio_utils/primitives.h>
+#endif
+
+#include "AudioProcessorBase.h"
+#include "FlowgraphUtilities.h"
+#include "SinkI32.h"
+
+using namespace flowgraph;
+
+SinkI32::SinkI32(int32_t channelCount)
+ : AudioSink(channelCount) {}
+
+int32_t SinkI32::read(void *data, int32_t numFrames) {
+ int32_t *intData = (int32_t *) data;
+ const int32_t channelCount = input.getSamplesPerFrame();
+
+ int32_t framesLeft = numFrames;
+ while (framesLeft > 0) {
+ // Run the graph and pull data through the input port.
+ int32_t framesRead = pull(framesLeft);
+ if (framesRead <= 0) {
+ break;
+ }
+ const float *signal = input.getBlock();
+ int32_t numSamples = framesRead * channelCount;
+#ifdef __ANDROID__
+ memcpy_to_i32_from_float(intData, signal, numSamples);
+ intData += numSamples;
+ signal += numSamples;
+#else
+ for (int i = 0; i < numSamples; i++) {
+ *intData++ = FlowgraphUtilities::clamp32FromFloat(*signal++);
+ }
+#endif
+ framesLeft -= framesRead;
+ }
+ return numFrames - framesLeft;
+}
diff --git a/media/libaaudio/src/flowgraph/SinkI32.h b/media/libaaudio/src/flowgraph/SinkI32.h
new file mode 100644
index 0000000..09d23b7
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/SinkI32.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_SINK_I32_H
+#define FLOWGRAPH_SINK_I32_H
+
+#include <stdint.h>
+
+#include "AudioProcessorBase.h"
+
+namespace flowgraph {
+
+class SinkI32 : public AudioSink {
+public:
+ explicit SinkI32(int32_t channelCount);
+ ~SinkI32() override = default;
+
+ int32_t read(void *data, int32_t numFrames) override;
+};
+
+} /* namespace flowgraph */
+
+#endif //FLOWGRAPH_SINK_I32_H
diff --git a/media/libaaudio/src/flowgraph/SourceI24.cpp b/media/libaaudio/src/flowgraph/SourceI24.cpp
index f319880..097954e 100644
--- a/media/libaaudio/src/flowgraph/SourceI24.cpp
+++ b/media/libaaudio/src/flowgraph/SourceI24.cpp
@@ -15,7 +15,7 @@
*/
#include <algorithm>
-#include <unistd.h>
+#include <stdint.h>
#ifdef __ANDROID__
#include <audio_utils/primitives.h>
diff --git a/media/libaaudio/src/flowgraph/SourceI24.h b/media/libaaudio/src/flowgraph/SourceI24.h
index 39f14da..2ed6f18 100644
--- a/media/libaaudio/src/flowgraph/SourceI24.h
+++ b/media/libaaudio/src/flowgraph/SourceI24.h
@@ -17,8 +17,7 @@
#ifndef FLOWGRAPH_SOURCE_I24_H
#define FLOWGRAPH_SOURCE_I24_H
-#include <unistd.h>
-#include <sys/types.h>
+#include <stdint.h>
#include "AudioProcessorBase.h"
diff --git a/media/libaaudio/src/flowgraph/SourceI32.cpp b/media/libaaudio/src/flowgraph/SourceI32.cpp
new file mode 100644
index 0000000..e8177ad
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/SourceI32.cpp
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <unistd.h>
+
+#ifdef __ANDROID__
+#include <audio_utils/primitives.h>
+#endif
+
+#include "AudioProcessorBase.h"
+#include "SourceI32.h"
+
+using namespace flowgraph;
+
+SourceI32::SourceI32(int32_t channelCount)
+ : AudioSource(channelCount) {
+}
+
+int32_t SourceI32::onProcess(int64_t framePosition, int32_t numFrames) {
+ float *floatData = output.getBlock();
+ int32_t channelCount = output.getSamplesPerFrame();
+
+ int32_t framesLeft = mSizeInFrames - mFrameIndex;
+ int32_t framesToProcess = std::min(numFrames, framesLeft);
+ int32_t numSamples = framesToProcess * channelCount;
+
+ const int32_t *intBase = static_cast<const int32_t *>(mData);
+ const int32_t *intData = &intBase[mFrameIndex * channelCount];
+
+#ifdef __ANDROID__
+ memcpy_to_float_from_i32(floatData, intData, numSamples);
+#else
+ for (int i = 0; i < numSamples; i++) {
+ *floatData++ = *intData++ * kScale;
+ }
+#endif
+
+ mFrameIndex += framesToProcess;
+ return framesToProcess;
+}
diff --git a/media/libaaudio/src/flowgraph/SourceI32.h b/media/libaaudio/src/flowgraph/SourceI32.h
new file mode 100644
index 0000000..e50f9be
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/SourceI32.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_SOURCE_I32_H
+#define FLOWGRAPH_SOURCE_I32_H
+
+#include <stdint.h>
+
+#include "AudioProcessorBase.h"
+
+namespace flowgraph {
+
+class SourceI32 : public AudioSource {
+public:
+ explicit SourceI32(int32_t channelCount);
+ ~SourceI32() override = default;
+
+ int32_t onProcess(int64_t framePosition, int32_t numFrames) override;
+
+private:
+ static constexpr float kScale = 1.0 / (1UL << 31);
+};
+
+} /* namespace flowgraph */
+
+#endif //FLOWGRAPH_SOURCE_I32_H
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index c062882..fdaa2ab 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -34,8 +34,7 @@
using namespace aaudio;
AudioStreamLegacy::AudioStreamLegacy()
- : AudioStream()
- , mDeviceCallback(new StreamDeviceCallback(this)) {
+ : AudioStream() {
}
AudioStreamLegacy::~AudioStreamLegacy() {
@@ -163,7 +162,11 @@
}
void AudioStreamLegacy::forceDisconnect(bool errorCallbackEnabled) {
- if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED) {
+ // There is no need to disconnect if already in these states.
+ if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+ && getState() != AAUDIO_STREAM_STATE_CLOSING
+ && getState() != AAUDIO_STREAM_STATE_CLOSED
+ ) {
setState(AAUDIO_STREAM_STATE_DISCONNECTED);
if (errorCallbackEnabled) {
maybeCallErrorCallback(AAUDIO_ERROR_DISCONNECTED);
@@ -205,24 +208,30 @@
return AAudioConvert_androidToAAudioResult(status);
}
-void AudioStreamLegacy::onAudioDeviceUpdate(audio_port_handle_t deviceId)
-{
+void AudioStreamLegacy::onAudioDeviceUpdate(audio_io_handle_t /* audioIo */,
+ audio_port_handle_t deviceId) {
// Device routing is a common source of errors and DISCONNECTS.
- // Please leave this log in place.
- ALOGD("%s() devId %d => %d", __func__, (int) getDeviceId(), (int)deviceId);
- if (getDeviceId() != AAUDIO_UNSPECIFIED && getDeviceId() != deviceId &&
- getState() != AAUDIO_STREAM_STATE_DISCONNECTED) {
+ // Please leave this log in place. If there is a bug then this might
+ // get called after the stream has been deleted so log before we
+ // touch the stream object.
+ ALOGD("%s(deviceId = %d)", __func__, (int)deviceId);
+ if (getDeviceId() != AAUDIO_UNSPECIFIED
+ && getDeviceId() != deviceId
+ && getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+ ) {
// Note that isDataCallbackActive() is affected by state so call it before DISCONNECTING.
// If we have a data callback and the stream is active, then ask the data callback
// to DISCONNECT and call the error callback.
if (isDataCallbackActive()) {
- ALOGD("onAudioDeviceUpdate() request DISCONNECT in data callback due to device change");
+ ALOGD("%s() request DISCONNECT in data callback, device %d => %d",
+ __func__, (int) getDeviceId(), (int) deviceId);
// If the stream is stopped before the data callback has a chance to handle the
- // request then the requestStop() and requestPause() methods will handle it after
+ // request then the requestStop_l() and requestPause() methods will handle it after
// the callback has stopped.
mRequestDisconnect.request();
} else {
- ALOGD("onAudioDeviceUpdate() DISCONNECT the stream now");
+ ALOGD("%s() DISCONNECT the stream now, device %d => %d",
+ __func__, (int) getDeviceId(), (int) deviceId);
forceDisconnect();
}
}
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.h b/media/libaaudio/src/legacy/AudioStreamLegacy.h
index 9c24b2b..88ef270 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.h
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.h
@@ -87,29 +87,13 @@
protected:
- class StreamDeviceCallback : public android::AudioSystem::AudioDeviceCallback
- {
- public:
-
- StreamDeviceCallback(AudioStreamLegacy *parent) : mParent(parent) {}
- virtual ~StreamDeviceCallback() {}
-
- virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo __unused,
- audio_port_handle_t deviceId) {
- if (mParent != nullptr) {
- mParent->onAudioDeviceUpdate(deviceId);
- }
- }
-
- AudioStreamLegacy *mParent;
- };
-
aaudio_result_t getBestTimestamp(clockid_t clockId,
int64_t *framePosition,
int64_t *timeNanoseconds,
android::ExtendedTimestamp *extendedTimestamp);
- void onAudioDeviceUpdate(audio_port_handle_t deviceId);
+ void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+ audio_port_handle_t deviceId) override;
/*
* Check to see whether a callback thread has requested a disconnected.
@@ -128,6 +112,18 @@
return mFramesRead.increment(frames);
}
+ /**
+ * Get the framesPerBurst from the underlying API.
+ * @return framesPerBurst
+ */
+ virtual int32_t getFramesPerBurstFromDevice() const = 0;
+
+ /**
+ * Get the bufferCapacity from the underlying API.
+ * @return bufferCapacity in frames
+ */
+ virtual int32_t getBufferCapacityFromDevice() const = 0;
+
// This is used for exact matching by MediaMetrics. So do not change it.
// MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_AAUDIO
static constexpr char kCallerName[] = "aaudio";
@@ -140,7 +136,6 @@
int32_t mBlockAdapterBytesPerFrame = 0;
aaudio_wrapping_frames_t mPositionWhenStarting = 0;
int32_t mCallbackBufferSize = 0;
- const android::sp<StreamDeviceCallback> mDeviceCallback;
AtomicRequestor mRequestDisconnect;
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 853c0db..45b2258 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -118,6 +118,7 @@
setDeviceFormat(getFormat());
}
+ // To avoid glitching, let AudioFlinger pick the optimal burst size.
uint32_t notificationFrames = 0;
// Setup the callback if there is one.
@@ -128,7 +129,6 @@
streamTransferType = AudioRecord::transfer_type::TRANSFER_CALLBACK;
callback = getLegacyCallback();
callbackData = this;
- notificationFrames = builder.getFramesPerDataCallback();
}
mCallbackBufferSize = builder.getFramesPerDataCallback();
@@ -185,7 +185,7 @@
// Did we get a valid track?
status_t status = mAudioRecord->initCheck();
if (status != OK) {
- releaseCloseFinal();
+ safeReleaseClose();
ALOGE("open(), initCheck() returned %d", status);
return AAudioConvert_androidToAAudioResult(status);
}
@@ -210,12 +210,9 @@
// Get the actual values from the AudioRecord.
setSamplesPerFrame(mAudioRecord->channelCount());
-
- int32_t actualSampleRate = mAudioRecord->getSampleRate();
- ALOGW_IF(actualSampleRate != getSampleRate(),
- "open() sampleRate changed from %d to %d",
- getSampleRate(), actualSampleRate);
- setSampleRate(actualSampleRate);
+ setSampleRate(mAudioRecord->getSampleRate());
+ setBufferCapacity(getBufferCapacityFromDevice());
+ setFramesPerBurst(getFramesPerBurstFromDevice());
// We may need to pass the data through a block size adapter to guarantee constant size.
if (mCallbackBufferSize != AAUDIO_UNSPECIFIED) {
@@ -282,7 +279,7 @@
: (aaudio_session_id_t) mAudioRecord->getSessionId();
setSessionId(actualSessionId);
- mAudioRecord->addAudioDeviceCallback(mDeviceCallback);
+ mAudioRecord->addAudioDeviceCallback(this);
return AAUDIO_OK;
}
@@ -291,16 +288,24 @@
// TODO add close() or release() to AudioFlinger's AudioRecord API.
// Then call it from here
if (getState() != AAUDIO_STREAM_STATE_CLOSING) {
- mAudioRecord->removeAudioDeviceCallback(mDeviceCallback);
- logBufferState();
- mAudioRecord.clear();
- mFixedBlockWriter.close();
+ mAudioRecord->removeAudioDeviceCallback(this);
+ logReleaseBufferState();
+ // Data callbacks may still be running!
return AudioStream::release_l();
} else {
return AAUDIO_OK; // already released
}
}
+void AudioStreamRecord::close_l() {
+ mAudioRecord.clear();
+ // Do not close mFixedBlockWriter because a data callback
+ // thread might still be running if someone else has a reference
+ // to mAudioRecord.
+ // It has a unique_ptr to its buffer so it will clean up by itself.
+ AudioStream::close_l();
+}
+
const void * AudioStreamRecord::maybeConvertDeviceData(const void *audioData, int32_t numFrames) {
if (mFormatConversionBufferFloat.get() != nullptr) {
LOG_ALWAYS_FATAL_IF(numFrames > mFormatConversionBufferSizeInFrames,
@@ -336,7 +341,7 @@
return;
}
-aaudio_result_t AudioStreamRecord::requestStart()
+aaudio_result_t AudioStreamRecord::requestStart_l()
{
if (mAudioRecord.get() == nullptr) {
return AAUDIO_ERROR_INVALID_STATE;
@@ -345,18 +350,22 @@
// Enable callback before starting AudioRecord to avoid shutting
// down because of a race condition.
mCallbackEnabled.store(true);
+ aaudio_stream_state_t originalState = getState();
+ // Set before starting the callback so that we are in the correct state
+ // before updateStateMachine() can be called by the callback.
+ setState(AAUDIO_STREAM_STATE_STARTING);
mFramesWritten.reset32(); // service writes frames
mTimestampPosition.reset32();
status_t err = mAudioRecord->start(); // resets position to zero
if (err != OK) {
+ mCallbackEnabled.store(false);
+ setState(originalState);
return AAudioConvert_androidToAAudioResult(err);
- } else {
- setState(AAUDIO_STREAM_STATE_STARTING);
}
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamRecord::requestStop() {
+aaudio_result_t AudioStreamRecord::requestStop_l() {
if (mAudioRecord.get() == nullptr) {
return AAUDIO_ERROR_INVALID_STATE;
}
@@ -479,7 +488,7 @@
return getBufferCapacity(); // TODO implement in AudioRecord?
}
-int32_t AudioStreamRecord::getBufferCapacity() const
+int32_t AudioStreamRecord::getBufferCapacityFromDevice() const
{
return static_cast<int32_t>(mAudioRecord->frameCount());
}
@@ -489,8 +498,7 @@
return 0; // TODO implement when AudioRecord supports it
}
-int32_t AudioStreamRecord::getFramesPerBurst() const
-{
+int32_t AudioStreamRecord::getFramesPerBurstFromDevice() const {
return static_cast<int32_t>(mAudioRecord->getNotificationPeriodInFrames());
}
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index c5944c7..b2f8ba5 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -39,9 +39,7 @@
aaudio_result_t open(const AudioStreamBuilder & builder) override;
aaudio_result_t release_l() override;
-
- aaudio_result_t requestStart() override;
- aaudio_result_t requestStop() override;
+ void close_l() override;
virtual aaudio_result_t getTimestamp(clockid_t clockId,
int64_t *framePosition,
@@ -55,14 +53,10 @@
int32_t getBufferSize() const override;
- int32_t getBufferCapacity() const override;
-
int32_t getXRunCount() const override;
int64_t getFramesWritten() override;
- int32_t getFramesPerBurst() const override;
-
aaudio_result_t updateStateMachine() override;
aaudio_direction_t getDirection() const override {
@@ -78,6 +72,14 @@
const void * maybeConvertDeviceData(const void *audioData, int32_t numFrames) override;
+protected:
+
+ aaudio_result_t requestStart_l() REQUIRES(mStreamLock) override;
+ aaudio_result_t requestStop_l() REQUIRES(mStreamLock) override;
+
+ int32_t getFramesPerBurstFromDevice() const override;
+ int32_t getBufferCapacityFromDevice() const override;
+
private:
android::sp<android::AudioRecord> mAudioRecord;
// adapts between variable sized blocks and fixed size blocks
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 1120f05..af8ff19 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -96,6 +96,7 @@
size_t frameCount = (size_t)builder.getBufferCapacity();
+ // To avoid glitching, let AudioFlinger pick the optimal burst size.
int32_t notificationFrames = 0;
const audio_format_t format = (getFormat() == AUDIO_FORMAT_DEFAULT)
@@ -118,8 +119,6 @@
// Take advantage of a special trick that allows us to create a buffer
// that is some multiple of the burst size.
notificationFrames = 0 - DEFAULT_BURSTS_PER_BUFFER_CAPACITY;
- } else {
- notificationFrames = builder.getFramesPerDataCallback();
}
}
mCallbackBufferSize = builder.getFramesPerDataCallback();
@@ -179,7 +178,7 @@
// Did we get a valid track?
status_t status = mAudioTrack->initCheck();
if (status != NO_ERROR) {
- releaseCloseFinal();
+ safeReleaseClose();
ALOGE("open(), initCheck() returned %d", status);
return AAudioConvert_androidToAAudioResult(status);
}
@@ -193,12 +192,9 @@
setSamplesPerFrame(mAudioTrack->channelCount());
setFormat(mAudioTrack->format());
setDeviceFormat(mAudioTrack->format());
-
- int32_t actualSampleRate = mAudioTrack->getSampleRate();
- ALOGW_IF(actualSampleRate != getSampleRate(),
- "open() sampleRate changed from %d to %d",
- getSampleRate(), actualSampleRate);
- setSampleRate(actualSampleRate);
+ setSampleRate(mAudioTrack->getSampleRate());
+ setBufferCapacity(getBufferCapacityFromDevice());
+ setFramesPerBurst(getFramesPerBurstFromDevice());
// We may need to pass the data through a block size adapter to guarantee constant size.
if (mCallbackBufferSize != AAUDIO_UNSPECIFIED) {
@@ -221,10 +217,7 @@
: (aaudio_session_id_t) mAudioTrack->getSessionId();
setSessionId(actualSessionId);
- mInitialBufferCapacity = getBufferCapacity();
- mInitialFramesPerBurst = getFramesPerBurst();
-
- mAudioTrack->addAudioDeviceCallback(mDeviceCallback);
+ mAudioTrack->addAudioDeviceCallback(this);
// Update performance mode based on the actual stream flags.
// For example, if the sample rate is not allowed then you won't get a FAST track.
@@ -240,11 +233,11 @@
setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
- // Log warning if we did not get what we asked for.
- ALOGW_IF(actualFlags != flags,
+ // Log if we did not get what we asked for.
+ ALOGD_IF(actualFlags != flags,
"open() flags changed from 0x%08X to 0x%08X",
flags, actualFlags);
- ALOGW_IF(actualPerformanceMode != perfMode,
+ ALOGD_IF(actualPerformanceMode != perfMode,
"open() perfMode changed from %d to %d",
perfMode, actualPerformanceMode);
@@ -253,19 +246,26 @@
aaudio_result_t AudioStreamTrack::release_l() {
if (getState() != AAUDIO_STREAM_STATE_CLOSING) {
- mAudioTrack->removeAudioDeviceCallback(mDeviceCallback);
- logBufferState();
- // TODO Investigate why clear() causes a hang in test_various.cpp
- // if I call close() from a data callback.
- // But the same thing in AudioRecord is OK!
- // mAudioTrack.clear();
- mFixedBlockReader.close();
+ status_t err = mAudioTrack->removeAudioDeviceCallback(this);
+ ALOGE_IF(err, "%s() removeAudioDeviceCallback returned %d", __func__, err);
+ logReleaseBufferState();
+ // Data callbacks may still be running!
return AudioStream::release_l();
} else {
return AAUDIO_OK; // already released
}
}
+void AudioStreamTrack::close_l() {
+ // Stop callbacks before deleting mFixedBlockReader memory.
+ mAudioTrack.clear();
+ // Do not close mFixedBlockReader because a data callback
+ // thread might still be running if someone else has a reference
+ // to mAudioRecord.
+ // It has a unique_ptr to its buffer so it will clean up by itself.
+ AudioStream::close_l();
+}
+
void AudioStreamTrack::processCallback(int event, void *info) {
switch (event) {
@@ -281,8 +281,8 @@
|| mAudioTrack->format() != getFormat()
|| mAudioTrack->getSampleRate() != getSampleRate()
|| mAudioTrack->getRoutedDeviceId() != getDeviceId()
- || getBufferCapacity() != mInitialBufferCapacity
- || getFramesPerBurst() != mInitialFramesPerBurst) {
+ || getBufferCapacityFromDevice() != getBufferCapacity()
+ || getFramesPerBurstFromDevice() != getFramesPerBurst()) {
processCallbackCommon(AAUDIO_CALLBACK_OPERATION_DISCONNECTED, info);
}
break;
@@ -293,7 +293,7 @@
return;
}
-aaudio_result_t AudioStreamTrack::requestStart() {
+aaudio_result_t AudioStreamTrack::requestStart_l() {
if (mAudioTrack.get() == nullptr) {
ALOGE("requestStart() no AudioTrack");
return AAUDIO_ERROR_INVALID_STATE;
@@ -307,16 +307,20 @@
// Enable callback before starting AudioTrack to avoid shutting
// down because of a race condition.
mCallbackEnabled.store(true);
+ aaudio_stream_state_t originalState = getState();
+ // Set before starting the callback so that we are in the correct state
+ // before updateStateMachine() can be called by the callback.
+ setState(AAUDIO_STREAM_STATE_STARTING);
err = mAudioTrack->start();
if (err != OK) {
+ mCallbackEnabled.store(false);
+ setState(originalState);
return AAudioConvert_androidToAAudioResult(err);
- } else {
- setState(AAUDIO_STREAM_STATE_STARTING);
}
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamTrack::requestPause() {
+aaudio_result_t AudioStreamTrack::requestPause_l() {
if (mAudioTrack.get() == nullptr) {
ALOGE("%s() no AudioTrack", __func__);
return AAUDIO_ERROR_INVALID_STATE;
@@ -332,7 +336,7 @@
return checkForDisconnectRequest(false);
}
-aaudio_result_t AudioStreamTrack::requestFlush() {
+aaudio_result_t AudioStreamTrack::requestFlush_l() {
if (mAudioTrack.get() == nullptr) {
ALOGE("%s() no AudioTrack", __func__);
return AAUDIO_ERROR_INVALID_STATE;
@@ -346,7 +350,7 @@
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamTrack::requestStop() {
+aaudio_result_t AudioStreamTrack::requestStop_l() {
if (mAudioTrack.get() == nullptr) {
ALOGE("%s() no AudioTrack", __func__);
return AAUDIO_ERROR_INVALID_STATE;
@@ -467,7 +471,7 @@
return static_cast<int32_t>(mAudioTrack->getBufferSizeInFrames());
}
-int32_t AudioStreamTrack::getBufferCapacity() const
+int32_t AudioStreamTrack::getBufferCapacityFromDevice() const
{
return static_cast<int32_t>(mAudioTrack->frameCount());
}
@@ -477,8 +481,7 @@
return static_cast<int32_t>(mAudioTrack->getUnderrunCount());
}
-int32_t AudioStreamTrack::getFramesPerBurst() const
-{
+int32_t AudioStreamTrack::getFramesPerBurstFromDevice() const {
return static_cast<int32_t>(mAudioTrack->getNotificationPeriodInFrames());
}
@@ -555,7 +558,7 @@
if (status < 0) { // a non-negative value is the volume shaper id.
ALOGE("applyVolumeShaper() failed with status %d", status);
}
- return binder::Status::fromStatusT(status);
+ return aidl_utils::binderStatusFromStatusT(status);
} else {
ALOGD("applyVolumeShaper()"
" no AudioTrack for volume control from IPlayer");
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 93a1ff4..f604871 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -42,12 +42,15 @@
aaudio_result_t open(const AudioStreamBuilder & builder) override;
aaudio_result_t release_l() override;
+ void close_l() override;
- aaudio_result_t requestStart() override;
- aaudio_result_t requestPause() override;
- aaudio_result_t requestFlush() override;
- aaudio_result_t requestStop() override;
+protected:
+ aaudio_result_t requestStart_l() REQUIRES(mStreamLock) override;
+ aaudio_result_t requestPause_l() REQUIRES(mStreamLock) override;
+ aaudio_result_t requestFlush_l() REQUIRES(mStreamLock) override;
+ aaudio_result_t requestStop_l() REQUIRES(mStreamLock) override;
+public:
bool isFlushSupported() const override {
// Only implement FLUSH for OUTPUT streams.
return true;
@@ -68,8 +71,6 @@
aaudio_result_t setBufferSize(int32_t requestedFrames) override;
int32_t getBufferSize() const override;
- int32_t getBufferCapacity() const override;
- int32_t getFramesPerBurst()const override;
int32_t getXRunCount() const override;
int64_t getFramesRead() override;
@@ -95,6 +96,11 @@
const android::media::VolumeShaper::Operation& operation) override;
#endif
+protected:
+
+ int32_t getFramesPerBurstFromDevice() const override;
+ int32_t getBufferCapacityFromDevice() const override;
+
private:
android::sp<android::AudioTrack> mAudioTrack;
@@ -104,10 +110,6 @@
// TODO add 64-bit position reporting to AudioTrack and use it.
aaudio_wrapping_frames_t mPositionWhenPausing = 0;
-
- // initial AudioTrack frame count and notification period
- int32_t mInitialBufferCapacity = 0;
- int32_t mInitialFramesPerBurst = 0;
};
} /* namespace aaudio */
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 9007b10..d795725 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -27,7 +27,7 @@
#include "core/AudioGlobal.h"
#include <aaudio/AAudioTesting.h>
#include <math.h>
-#include <system/audio-base.h>
+#include <system/audio.h>
#include <assert.h>
#include "utility/AAudioUtilities.h"
@@ -134,6 +134,12 @@
case AAUDIO_FORMAT_PCM_FLOAT:
androidFormat = AUDIO_FORMAT_PCM_FLOAT;
break;
+ case AAUDIO_FORMAT_PCM_I24_PACKED:
+ androidFormat = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+ break;
+ case AAUDIO_FORMAT_PCM_I32:
+ androidFormat = AUDIO_FORMAT_PCM_32_BIT;
+ break;
default:
androidFormat = AUDIO_FORMAT_INVALID;
ALOGE("%s() 0x%08X unrecognized", __func__, aaudioFormat);
@@ -154,6 +160,12 @@
case AUDIO_FORMAT_PCM_FLOAT:
aaudioFormat = AAUDIO_FORMAT_PCM_FLOAT;
break;
+ case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+ aaudioFormat = AAUDIO_FORMAT_PCM_I24_PACKED;
+ break;
+ case AUDIO_FORMAT_PCM_32_BIT:
+ aaudioFormat = AAUDIO_FORMAT_PCM_I32;
+ break;
default:
aaudioFormat = AAUDIO_FORMAT_INVALID;
ALOGE("%s() 0x%08X unrecognized", __func__, androidFormat);
@@ -231,7 +243,8 @@
case AAUDIO_ALLOW_CAPTURE_BY_SYSTEM:
return AUDIO_FLAG_NO_MEDIA_PROJECTION;
case AAUDIO_ALLOW_CAPTURE_BY_NONE:
- return AUDIO_FLAG_NO_MEDIA_PROJECTION | AUDIO_FLAG_NO_SYSTEM_CAPTURE;
+ return static_cast<audio_flags_mask_t>(
+ AUDIO_FLAG_NO_MEDIA_PROJECTION | AUDIO_FLAG_NO_SYSTEM_CAPTURE);
default:
ALOGE("%s() 0x%08X unrecognized", __func__, policy);
return AUDIO_FLAG_NONE; //
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index a6e5f70..95d6543 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -11,10 +11,12 @@
defaults: ["libaaudio_tests_defaults"],
srcs: ["test_marshalling.cpp"],
shared_libs: [
+ "aaudio-aidl-cpp",
"libaaudio_internal",
"libbinder",
"libcutils",
"libutils",
+ "shared-file-region-aidl-unstable-cpp",
],
}
@@ -233,6 +235,7 @@
srcs: ["test_steal_exclusive.cpp"],
shared_libs: [
"libaaudio",
+ "liblog",
"libbinder",
"libcutils",
"libutils",
diff --git a/media/libaaudio/tests/test_aaudio_monkey.cpp b/media/libaaudio/tests/test_aaudio_monkey.cpp
index be54835..cc29678 100644
--- a/media/libaaudio/tests/test_aaudio_monkey.cpp
+++ b/media/libaaudio/tests/test_aaudio_monkey.cpp
@@ -46,11 +46,10 @@
int32_t numFrames);
void AAudioMonkeyErrorCallbackProc(
- AAudioStream *stream __unused,
- void *userData __unused,
- aaudio_result_t error) {
- printf("Error Callback, error: %d\n",(int)error);
-}
+ AAudioStream * /* stream */,
+ void *userData,
+ aaudio_result_t error);
+
// This function is not thread safe. Only use this from a single thread.
double nextRandomDouble() {
@@ -99,6 +98,10 @@
aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
aaudio_result_t result = AAudioStream_waitForStateChange(getStream(),
AAUDIO_STREAM_STATE_UNKNOWN, &state, 0);
+ if (result == AAUDIO_ERROR_DISCONNECTED) {
+ printf("WARNING - AAudioStream_waitForStateChange returned DISCONNECTED\n");
+ return true; // OK
+ }
if (result != AAUDIO_OK) {
printf("ERROR - AAudioStream_waitForStateChange returned %d\n", result);
return false;
@@ -114,7 +117,7 @@
(unsigned long long) framesRead,
xRuns);
- if (framesWritten < framesRead) {
+ if (state != AAUDIO_STREAM_STATE_STARTING && framesWritten < framesRead) {
printf("WARNING - UNDERFLOW - diff = %d !!!!!!!!!!!!\n",
(int) (framesWritten - framesRead));
}
@@ -132,8 +135,23 @@
return -1;
}
+ // update and query stream state
+ aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+ state = AAudioStream_getState(getStream());
+ if (state < 0) {
+ printf("ERROR - AAudioStream_getState returned %d\n", state);
+ return state;
+ }
+
+ if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ printf("#%d, Closing disconnected stream.\n", getIndex());
+ result = close();
+ return result;
+ }
+
double dice = nextRandomDouble();
// Select an action based on a weighted probability.
+ printf(" "); // indent action
if (dice < PROB_START) {
printf("start\n");
result = AAudioStream_requestStart(getStream());
@@ -200,6 +218,10 @@
return AAUDIO_CALLBACK_RESULT_CONTINUE;
}
+ int getIndex() const {
+ return mIndex;
+ }
+
private:
const AAudioArgsParser *mArgParser;
const int mIndex;
@@ -223,6 +245,13 @@
return monkey->renderAudio(stream, audioData, numFrames);
}
+void AAudioMonkeyErrorCallbackProc(
+ AAudioStream * /* stream */,
+ void *userData,
+ aaudio_result_t error) {
+ AAudioMonkey *monkey = (AAudioMonkey *) userData;
+ printf("#%d, Error Callback, error: %d\n", monkey->getIndex(), (int)error);
+}
static void usage() {
AAudioArgsParser::usage();
diff --git a/media/libaaudio/tests/test_atomic_fifo.cpp b/media/libaaudio/tests/test_atomic_fifo.cpp
index 130ef43..4dbb219 100644
--- a/media/libaaudio/tests/test_atomic_fifo.cpp
+++ b/media/libaaudio/tests/test_atomic_fifo.cpp
@@ -26,6 +26,7 @@
using android::fifo_counter_t;
using android::FifoController;
using android::FifoBuffer;
+using android::FifoBufferIndirect;
using android::WrappingBuffer;
TEST(test_fifo_controller, fifo_indices) {
@@ -325,7 +326,7 @@
verifyStorageIntegrity();
}
- FifoBuffer mFifoBuffer;
+ FifoBufferIndirect mFifoBuffer;
fifo_frames_t mNextWriteIndex = 0;
fifo_frames_t mNextVerifyIndex = 0;
fifo_frames_t mThreshold;
diff --git a/media/libaaudio/tests/test_marshalling.cpp b/media/libaaudio/tests/test_marshalling.cpp
index c51fbce..49213dc 100644
--- a/media/libaaudio/tests/test_marshalling.cpp
+++ b/media/libaaudio/tests/test_marshalling.cpp
@@ -33,6 +33,29 @@
using namespace android;
using namespace aaudio;
+template<typename T>
+T copy(const T& object) {
+ return T(object);
+}
+
+template<>
+SharedMemoryParcelable copy<SharedMemoryParcelable>(const SharedMemoryParcelable& object) {
+ return object.dup();
+}
+
+template<typename T>
+void writeToParcel(const T& object, Parcel* parcel) {
+ copy(object).parcelable().writeToParcel(parcel);
+}
+
+template<typename T>
+T readFromParcel(const Parcel& parcel) {
+ using ParcelType = std::decay_t<decltype(std::declval<T>().parcelable())>;
+ ParcelType parcelable;
+ parcelable.readFromParcel(&parcel);
+ return T(std::move(parcelable));
+}
+
// Test adding one value.
TEST(test_marshalling, aaudio_one_read_write) {
Parcel parcel;
@@ -48,7 +71,6 @@
// Test SharedMemoryParcel.
TEST(test_marshalling, aaudio_shared_memory) {
SharedMemoryParcelable sharedMemoryA;
- SharedMemoryParcelable sharedMemoryB;
const size_t memSizeBytes = 840;
unique_fd fd(ashmem_create_region("TestMarshalling", memSizeBytes));
ASSERT_LE(0, fd);
@@ -63,10 +85,10 @@
Parcel parcel;
size_t pos = parcel.dataPosition();
- sharedMemoryA.writeToParcel(&parcel);
+ writeToParcel(sharedMemoryA, &parcel);
parcel.setDataPosition(pos);
- sharedMemoryB.readFromParcel(&parcel);
+ SharedMemoryParcelable sharedMemoryB = readFromParcel<SharedMemoryParcelable>(parcel);
EXPECT_EQ(sharedMemoryA.getSizeInBytes(), sharedMemoryB.getSizeInBytes());
// should see same value at two different addresses
@@ -81,7 +103,6 @@
TEST(test_marshalling, aaudio_shared_region) {
SharedMemoryParcelable sharedMemories[2];
SharedRegionParcelable sharedRegionA;
- SharedRegionParcelable sharedRegionB;
const size_t memSizeBytes = 840;
unique_fd fd(ashmem_create_region("TestMarshalling", memSizeBytes));
ASSERT_LE(0, fd);
@@ -97,10 +118,10 @@
Parcel parcel;
size_t pos = parcel.dataPosition();
- sharedRegionA.writeToParcel(&parcel);
+ writeToParcel(sharedRegionA, &parcel);
parcel.setDataPosition(pos);
- sharedRegionB.readFromParcel(&parcel);
+ SharedRegionParcelable sharedRegionB = readFromParcel<SharedRegionParcelable>(parcel);
// should see same value
void *region2;
@@ -113,7 +134,6 @@
TEST(test_marshalling, aaudio_ring_buffer_parcelable) {
SharedMemoryParcelable sharedMemories[2];
RingBufferParcelable ringBufferA;
- RingBufferParcelable ringBufferB;
const size_t bytesPerFrame = 8;
const size_t framesPerBurst = 32;
@@ -147,11 +167,11 @@
// write A to parcel
Parcel parcel;
size_t pos = parcel.dataPosition();
- ringBufferA.writeToParcel(&parcel);
+ writeToParcel(ringBufferA, &parcel);
// read B from parcel
parcel.setDataPosition(pos);
- ringBufferB.readFromParcel(&parcel);
+ RingBufferParcelable ringBufferB = readFromParcel<RingBufferParcelable>(parcel);
RingBufferDescriptor descriptorB;
EXPECT_EQ(AAUDIO_OK, ringBufferB.resolve(sharedMemories, &descriptorB));
diff --git a/media/libaaudio/tests/test_steal_exclusive.cpp b/media/libaaudio/tests/test_steal_exclusive.cpp
index 2a05910..05c560d 100644
--- a/media/libaaudio/tests/test_steal_exclusive.cpp
+++ b/media/libaaudio/tests/test_steal_exclusive.cpp
@@ -47,137 +47,271 @@
*/
#include <atomic>
+#include <mutex>
#include <stdio.h>
#include <thread>
#include <unistd.h>
+#include <android/log.h>
+
#include <aaudio/AAudio.h>
+#include <aaudio/AAudioTesting.h>
#define DEFAULT_TIMEOUT_NANOS ((int64_t)1000000000)
#define SOLO_DURATION_MSEC 2000
#define DUET_DURATION_MSEC 8000
#define SLEEP_DURATION_MSEC 500
+#define MODULE_NAME "stealAudio"
+#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, MODULE_NAME, __VA_ARGS__)
+
static const char * s_sharingModeToText(aaudio_sharing_mode_t mode) {
return (mode == AAUDIO_SHARING_MODE_EXCLUSIVE) ? "EXCLUSIVE"
: ((mode == AAUDIO_SHARING_MODE_SHARED) ? "SHARED"
: AAudio_convertResultToText(mode));
}
+static const char * s_performanceModeToText(aaudio_performance_mode_t mode) {
+ return (mode == AAUDIO_PERFORMANCE_MODE_LOW_LATENCY) ? "LOWLAT"
+ : ((mode == AAUDIO_PERFORMANCE_MODE_NONE) ? "NONE"
+ : AAudio_convertResultToText(mode));
+}
+
+static aaudio_data_callback_result_t s_myDataCallbackProc(
+ AAudioStream * /* stream */,
+ void *userData,
+ void *audioData,
+ int32_t numFrames);
+
static void s_myErrorCallbackProc(
AAudioStream *stream,
void *userData,
aaudio_result_t error);
-struct AudioEngine {
- AAudioStream *stream = nullptr;
- std::thread *thread = nullptr;
- aaudio_direction_t direction = AAUDIO_DIRECTION_OUTPUT;
+class AudioEngine {
+public:
+
+ AudioEngine(const char *name) {
+ mName = name;
+ }
// These counters are read and written by the callback and the main thread.
- std::atomic<int32_t> framesRead{};
std::atomic<int32_t> framesCalled{};
std::atomic<int32_t> callbackCount{};
+ std::atomic<aaudio_sharing_mode_t> sharingMode{};
+ std::atomic<aaudio_performance_mode_t> performanceMode{};
+ std::atomic<bool> isMMap{false};
+ void setMaxRetries(int maxRetries) {
+ mMaxRetries = maxRetries;
+ }
+
+ void setOpenDelayMillis(int openDelayMillis) {
+ mOpenDelayMillis = openDelayMillis;
+ }
+
+ void restartStream() {
+ int retriesLeft = mMaxRetries;
+ aaudio_result_t result;
+ do {
+ closeAudioStream();
+ if (mOpenDelayMillis) usleep(mOpenDelayMillis * 1000);
+ openAudioStream(mDirection, mRequestedSharingMode);
+ // It is possible for the stream to be disconnected, or stolen between the time
+ // it is opened and when it is started. If that happens then try again.
+ // If it was stolen then it should succeed the second time because there will already be
+ // a SHARED stream, which will not get stolen.
+ result = AAudioStream_requestStart(mStream);
+ printf("%s: AAudioStream_requestStart() returns %s\n",
+ mName.c_str(),
+ AAudio_convertResultToText(result));
+ } while (retriesLeft-- > 0 && result != AAUDIO_OK);
+ }
+
+ aaudio_data_callback_result_t onAudioReady(
+ void * /*audioData */,
+ int32_t numFrames) {
+ callbackCount++;
+ framesCalled += numFrames;
+ return AAUDIO_CALLBACK_RESULT_CONTINUE;
+ }
+
+ aaudio_result_t openAudioStream(aaudio_direction_t direction,
+ aaudio_sharing_mode_t requestedSharingMode) {
+ std::lock_guard<std::mutex> lock(mLock);
+
+ AAudioStreamBuilder *builder = nullptr;
+ mDirection = direction;
+ mRequestedSharingMode = requestedSharingMode;
+
+ // Use an AAudioStreamBuilder to contain requested parameters.
+ aaudio_result_t result = AAudio_createStreamBuilder(&builder);
+ if (result != AAUDIO_OK) {
+ printf("AAudio_createStreamBuilder returned %s",
+ AAudio_convertResultToText(result));
+ return result;
+ }
+
+ // Request stream properties.
+ AAudioStreamBuilder_setFormat(builder, AAUDIO_FORMAT_PCM_FLOAT);
+ AAudioStreamBuilder_setPerformanceMode(builder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+ AAudioStreamBuilder_setSharingMode(builder, mRequestedSharingMode);
+ AAudioStreamBuilder_setDirection(builder, direction);
+ AAudioStreamBuilder_setDataCallback(builder, s_myDataCallbackProc, this);
+ AAudioStreamBuilder_setErrorCallback(builder, s_myErrorCallbackProc, this);
+
+ // Create an AAudioStream using the Builder.
+ result = AAudioStreamBuilder_openStream(builder, &mStream);
+ AAudioStreamBuilder_delete(builder);
+ builder = nullptr;
+ if (result != AAUDIO_OK) {
+ printf("AAudioStreamBuilder_openStream returned %s",
+ AAudio_convertResultToText(result));
+ }
+
+ // See what kind of stream we actually opened.
+ int32_t deviceId = AAudioStream_getDeviceId(mStream);
+ sharingMode = AAudioStream_getSharingMode(mStream);
+ performanceMode = AAudioStream_getPerformanceMode(mStream);
+ isMMap = AAudioStream_isMMapUsed(mStream);
+ printf("%s: opened: deviceId = %3d, sharingMode = %s, perf = %s, %s --------\n",
+ mName.c_str(),
+ deviceId,
+ s_sharingModeToText(sharingMode),
+ s_performanceModeToText(performanceMode),
+ (isMMap ? "MMAP" : "Legacy")
+ );
+
+ return result;
+ }
+
+ aaudio_result_t closeAudioStream() {
+ std::lock_guard<std::mutex> lock(mLock);
+ aaudio_result_t result = AAUDIO_OK;
+ if (mStream != nullptr) {
+ result = AAudioStream_close(mStream);
+ if (result != AAUDIO_OK) {
+ printf("AAudioStream_close returned %s\n",
+ AAudio_convertResultToText(result));
+ }
+ mStream = nullptr;
+ }
+ return result;
+ }
+
+ /**
+ * @return 0 is OK, -1 for error
+ */
+ int checkEnginePositions() {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStream == nullptr) return 0;
+
+ const int64_t framesRead = AAudioStream_getFramesRead(mStream);
+ const int64_t framesWritten = AAudioStream_getFramesWritten(mStream);
+ const int32_t delta = (int32_t)(framesWritten - framesRead);
+ printf("%s: playing framesRead = %7d, framesWritten = %7d"
+ ", delta = %4d, framesCalled = %6d, callbackCount = %4d\n",
+ mName.c_str(),
+ (int32_t) framesRead,
+ (int32_t) framesWritten,
+ delta,
+ framesCalled.load(),
+ callbackCount.load()
+ );
+ if (delta > AAudioStream_getBufferCapacityInFrames(mStream)) {
+ printf("ERROR - delta > capacity\n");
+ return -1;
+ }
+ return 0;
+ }
+
+ aaudio_result_t start() {
+ std::lock_guard<std::mutex> lock(mLock);
+ reset();
+ if (mStream == nullptr) return 0;
+ return AAudioStream_requestStart(mStream);
+ }
+
+ aaudio_result_t stop() {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStream == nullptr) return 0;
+ return AAudioStream_requestStop(mStream);
+ }
+
+ bool hasAdvanced() {
+ std::lock_guard<std::mutex> lock(mLock);
+ if (mStream == nullptr) return 0;
+ if (mDirection == AAUDIO_DIRECTION_OUTPUT) {
+ return AAudioStream_getFramesRead(mStream) > 0;
+ } else {
+ return AAudioStream_getFramesWritten(mStream) > 0;
+ }
+ }
+
+ aaudio_result_t verify() {
+ int errorCount = 0;
+ if (hasAdvanced()) {
+ printf("%s: stream is running => PASS\n", mName.c_str());
+ } else {
+ errorCount++;
+ printf("%s: stream should be running => FAIL!!\n", mName.c_str());
+ }
+
+ if (isMMap) {
+ printf("%s: data path is MMAP => PASS\n", mName.c_str());
+ } else {
+ errorCount++;
+ printf("%s: data path is Legacy! => FAIL\n", mName.c_str());
+ }
+
+ // Check for PASS/FAIL
+ if (sharingMode == AAUDIO_SHARING_MODE_SHARED) {
+ printf("%s: mode is SHARED => PASS\n", mName.c_str());
+ } else {
+ errorCount++;
+ printf("%s: modes is EXCLUSIVE => FAIL!!\n", mName.c_str());
+ }
+ return errorCount ? AAUDIO_ERROR_INVALID_FORMAT : AAUDIO_OK;
+ }
+
+private:
void reset() {
- framesRead.store(0);
framesCalled.store(0);
callbackCount.store(0);
}
+
+ AAudioStream *mStream = nullptr;
+ aaudio_direction_t mDirection = AAUDIO_DIRECTION_OUTPUT;
+ aaudio_sharing_mode_t mRequestedSharingMode = AAUDIO_UNSPECIFIED;
+ std::mutex mLock;
+ std::string mName;
+ int mMaxRetries = 1;
+ int mOpenDelayMillis = 0;
};
// Callback function that fills the audio output buffer.
static aaudio_data_callback_result_t s_myDataCallbackProc(
- AAudioStream *stream,
+ AAudioStream * /* stream */,
void *userData,
void *audioData,
int32_t numFrames
) {
- (void) audioData;
- (void) numFrames;
- AudioEngine *engine = (struct AudioEngine *)userData;
- engine->callbackCount++;
-
- engine->framesRead = (int32_t)AAudioStream_getFramesRead(stream);
- engine->framesCalled += numFrames;
- return AAUDIO_CALLBACK_RESULT_CONTINUE;
-}
-
-static aaudio_result_t s_OpenAudioStream(struct AudioEngine *engine,
- aaudio_direction_t direction) {
- AAudioStreamBuilder *builder = nullptr;
- engine->direction = direction;
-
- // Use an AAudioStreamBuilder to contain requested parameters.
- aaudio_result_t result = AAudio_createStreamBuilder(&builder);
- if (result != AAUDIO_OK) {
- printf("AAudio_createStreamBuilder returned %s",
- AAudio_convertResultToText(result));
- return result;
- }
-
- // Request stream properties.
- AAudioStreamBuilder_setFormat(builder, AAUDIO_FORMAT_PCM_FLOAT);
- AAudioStreamBuilder_setPerformanceMode(builder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
- AAudioStreamBuilder_setSharingMode(builder, AAUDIO_SHARING_MODE_EXCLUSIVE);
- AAudioStreamBuilder_setDirection(builder, direction);
- AAudioStreamBuilder_setDataCallback(builder, s_myDataCallbackProc, engine);
- AAudioStreamBuilder_setErrorCallback(builder, s_myErrorCallbackProc, engine);
-
- // Create an AAudioStream using the Builder.
- result = AAudioStreamBuilder_openStream(builder, &engine->stream);
- AAudioStreamBuilder_delete(builder);
- builder = nullptr;
- if (result != AAUDIO_OK) {
- printf("AAudioStreamBuilder_openStream returned %s",
- AAudio_convertResultToText(result));
- }
-
- // See see what kind of stream we actually opened.
- int32_t deviceId = AAudioStream_getDeviceId(engine->stream);
- aaudio_sharing_mode_t actualSharingMode = AAudioStream_getSharingMode(engine->stream);
- printf("-------- opened: deviceId = %3d, actualSharingMode = %s\n",
- deviceId,
- s_sharingModeToText(actualSharingMode));
-
- return result;
-}
-
-static aaudio_result_t s_CloseAudioStream(struct AudioEngine *engine) {
- aaudio_result_t result = AAUDIO_OK;
- if (engine->stream != nullptr) {
- result = AAudioStream_close(engine->stream);
- if (result != AAUDIO_OK) {
- printf("AAudioStream_close returned %s\n",
- AAudio_convertResultToText(result));
- }
- engine->stream = nullptr;
- }
- return result;
+ AudioEngine *engine = (AudioEngine *)userData;
+ return engine->onAudioReady(audioData, numFrames);
}
static void s_myRestartStreamProc(void *userData) {
+ LOGI("%s() called", __func__);
printf("%s() - restart in separate thread\n", __func__);
AudioEngine *engine = (AudioEngine *) userData;
- int retriesLeft = 1;
- aaudio_result_t result;
- do {
- s_CloseAudioStream(engine);
- s_OpenAudioStream(engine, engine->direction);
- // It is possible for the stream to be disconnected, or stolen between the time
- // it is opened and when it is started. If that happens then try again.
- // If it was stolen then it should succeed the second time because there will already be
- // a SHARED stream, which will not get stolen.
- result = AAudioStream_requestStart(engine->stream);
- printf("%s() - AAudioStream_requestStart() returns %s\n", __func__,
- AAudio_convertResultToText(result));
- } while (retriesLeft-- > 0 && result != AAUDIO_OK);
+ engine->restartStream();
}
static void s_myErrorCallbackProc(
AAudioStream * /* stream */,
void *userData,
aaudio_result_t error) {
+ LOGI("%s() called", __func__);
printf("%s() - error = %s\n", __func__, AAudio_convertResultToText(error));
// Handle error on a separate thread.
std::thread t(s_myRestartStreamProc, userData);
@@ -185,48 +319,28 @@
}
static void s_usage() {
- printf("test_steal_exclusive [-i]\n");
+ printf("test_steal_exclusive [-i] [-r{maxRetries}] [-d{delay}] -s\n");
printf(" -i direction INPUT, otherwise OUTPUT\n");
+ printf(" -d delay open by milliseconds, default = 0\n");
+ printf(" -r max retries in the error callback, default = 1\n");
+ printf(" -s try to open in SHARED mode\n");
}
-/**
- * @return 0 is OK, -1 for error
- */
-static int s_checkEnginePositions(AudioEngine *engine) {
- if (engine->stream == nullptr) return 0; // race condition with onError procs!
-
- const int64_t framesRead = AAudioStream_getFramesRead(engine->stream);
- const int64_t framesWritten = AAudioStream_getFramesWritten(engine->stream);
- const int32_t delta = (int32_t)(framesWritten - framesRead);
- printf("playing framesRead = %7d, framesWritten = %7d"
- ", delta = %4d, framesCalled = %6d, callbackCount = %4d\n",
- (int32_t) framesRead,
- (int32_t) framesWritten,
- delta,
- engine->framesCalled.load(),
- engine->callbackCount.load()
- );
- if (delta > AAudioStream_getBufferCapacityInFrames(engine->stream)) {
- printf("ERROR - delta > capacity\n");
- return -1;
- }
- return 0;
-}
-
-int main(int argc, char **argv) {
- (void) argc;
- (void) argv;
- struct AudioEngine victim;
- struct AudioEngine thief;
+int main(int argc, char ** argv) {
+ AudioEngine victim("victim");
+ AudioEngine thief("thief");
aaudio_direction_t direction = AAUDIO_DIRECTION_OUTPUT;
aaudio_result_t result = AAUDIO_OK;
int errorCount = 0;
+ int maxRetries = 1;
+ int openDelayMillis = 0;
+ aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_EXCLUSIVE;
// Make printf print immediately so that debug info is not stuck
// in a buffer if we hang or crash.
setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
- printf("Test Stealing an EXCLUSIVE stream V1.0\n");
+ printf("Test interaction between streams V1.1\n");
printf("\n");
for (int i = 1; i < argc; i++) {
@@ -234,9 +348,18 @@
if (arg[0] == '-') {
char option = arg[1];
switch (option) {
+ case 'd':
+ openDelayMillis = atoi(&arg[2]);
+ break;
case 'i':
direction = AAUDIO_DIRECTION_INPUT;
break;
+ case 'r':
+ maxRetries = atoi(&arg[2]);
+ break;
+ case 's':
+ requestedSharingMode = AAUDIO_SHARING_MODE_SHARED;
+ break;
default:
s_usage();
exit(EXIT_FAILURE);
@@ -249,16 +372,35 @@
}
}
- result = s_OpenAudioStream(&victim, direction);
+ victim.setOpenDelayMillis(openDelayMillis);
+ thief.setOpenDelayMillis(openDelayMillis);
+ victim.setMaxRetries(maxRetries);
+ thief.setMaxRetries(maxRetries);
+
+ result = victim.openAudioStream(direction, requestedSharingMode);
if (result != AAUDIO_OK) {
printf("s_OpenAudioStream victim returned %s\n",
AAudio_convertResultToText(result));
errorCount++;
}
- victim.reset();
+
+ if (victim.sharingMode == requestedSharingMode) {
+ printf("Victim modes is %s => OK\n", s_sharingModeToText(requestedSharingMode));
+ } else {
+ printf("Victim modes should be %s => test not valid!\n",
+ s_sharingModeToText(requestedSharingMode));
+ goto onerror;
+ }
+
+ if (victim.isMMap) {
+ printf("Victim data path is MMAP => OK\n");
+ } else {
+ printf("Victim data path is Legacy! => test not valid\n");
+ goto onerror;
+ }
// Start stream.
- result = AAudioStream_requestStart(victim.stream);
+ result = victim.start();
printf("AAudioStream_requestStart(VICTIM) returned %d >>>>>>>>>>>>>>>>>>>>>>\n", result);
if (result != AAUDIO_OK) {
errorCount++;
@@ -267,77 +409,69 @@
if (result == AAUDIO_OK) {
const int watchLoops = SOLO_DURATION_MSEC / SLEEP_DURATION_MSEC;
for (int i = watchLoops; i > 0; i--) {
- errorCount += s_checkEnginePositions(&victim) ? 1 : 0;
+ errorCount += victim.checkEnginePositions() ? 1 : 0;
usleep(SLEEP_DURATION_MSEC * 1000);
}
}
- printf("Try to start the THIEF stream that may steal the VICTIM MMAP resource -----\n");
- result = s_OpenAudioStream(&thief, direction);
+ printf("Trying to start the THIEF stream, which may steal the VICTIM MMAP resource -----\n");
+ result = thief.openAudioStream(direction, requestedSharingMode);
if (result != AAUDIO_OK) {
printf("s_OpenAudioStream victim returned %s\n",
AAudio_convertResultToText(result));
errorCount++;
}
- thief.reset();
// Start stream.
- result = AAudioStream_requestStart(thief.stream);
+ result = thief.start();
printf("AAudioStream_requestStart(THIEF) returned %d >>>>>>>>>>>>>>>>>>>>>>\n", result);
if (result != AAUDIO_OK) {
errorCount++;
}
- printf("You might enjoy plugging in a headset now to see what happens...\n");
+
+ // Give stream time to advance.
+ usleep(SLEEP_DURATION_MSEC * 1000);
+
+ if (victim.verify()) {
+ errorCount++;
+ goto onerror;
+ }
+ if (thief.verify()) {
+ errorCount++;
+ goto onerror;
+ }
+
+ LOGI("Both streams running. Ask user to plug in headset. ====");
+ printf("\n====\nPlease PLUG IN A HEADSET now!\n====\n\n");
if (result == AAUDIO_OK) {
const int watchLoops = DUET_DURATION_MSEC / SLEEP_DURATION_MSEC;
for (int i = watchLoops; i > 0; i--) {
- printf("victim: ");
- errorCount += s_checkEnginePositions(&victim) ? 1 : 0;
- printf(" thief: ");
- errorCount += s_checkEnginePositions(&thief) ? 1 : 0;
+ errorCount += victim.checkEnginePositions() ? 1 : 0;
+ errorCount += thief.checkEnginePositions() ? 1 : 0;
usleep(SLEEP_DURATION_MSEC * 1000);
}
}
- // Check for PASS/FAIL
- aaudio_sharing_mode_t victimSharingMode = AAudioStream_getSharingMode(victim.stream);
- aaudio_sharing_mode_t thiefSharingMode = AAudioStream_getSharingMode(thief.stream);
- printf("victimSharingMode = %s, thiefSharingMode = %s, - ",
- s_sharingModeToText(victimSharingMode),
- s_sharingModeToText(thiefSharingMode));
- if ((victimSharingMode == AAUDIO_SHARING_MODE_SHARED)
- && (thiefSharingMode == AAUDIO_SHARING_MODE_SHARED)) {
- printf("Both modes are SHARED => PASS\n");
- } else {
- errorCount++;
- printf("Both modes should be SHARED => FAIL!!\n");
- }
+ errorCount += victim.verify() ? 1 : 0;
+ errorCount += thief.verify() ? 1 : 0;
- const int64_t victimFramesRead = AAudioStream_getFramesRead(victim.stream);
- const int64_t thiefFramesRead = AAudioStream_getFramesRead(thief.stream);
- printf("victimFramesRead = %d, thiefFramesRead = %d, - ",
- (int)victimFramesRead, (int)thiefFramesRead);
- if (victimFramesRead > 0 && thiefFramesRead > 0) {
- printf("Both streams are running => PASS\n");
- } else {
- errorCount++;
- printf("Both streams should be running => FAIL!!\n");
- }
-
- result = AAudioStream_requestStop(victim.stream);
+ result = victim.stop();
printf("AAudioStream_requestStop() returned %d <<<<<<<<<<<<<<<<<<<<<\n", result);
if (result != AAUDIO_OK) {
+ printf("stop result = %d = %s\n", result, AAudio_convertResultToText(result));
errorCount++;
}
- result = AAudioStream_requestStop(thief.stream);
+ result = thief.stop();
printf("AAudioStream_requestStop() returned %d <<<<<<<<<<<<<<<<<<<<<\n", result);
if (result != AAUDIO_OK) {
+ printf("stop result = %d = %s\n", result, AAudio_convertResultToText(result));
errorCount++;
}
- s_CloseAudioStream(&victim);
- s_CloseAudioStream(&thief);
+onerror:
+ victim.closeAudioStream();
+ thief.closeAudioStream();
printf("aaudio result = %d = %s\n", result, AAudio_convertResultToText(result));
printf("test %s\n", errorCount ? "FAILED" : "PASSED");
diff --git a/media/libaaudio/tests/test_stop_hang.cpp b/media/libaaudio/tests/test_stop_hang.cpp
index 2397b6c..982ff4a 100644
--- a/media/libaaudio/tests/test_stop_hang.cpp
+++ b/media/libaaudio/tests/test_stop_hang.cpp
@@ -45,7 +45,7 @@
{
// Will block if the thread is running.
// This mutex is used to close() immediately after the callback returns
- // and before the requestStop() is called.
+ // and before the requestStop_l() is called.
std::lock_guard<std::mutex> lock(doneLock);
if (done) break;
}
diff --git a/media/libaaudio/tests/test_various.cpp b/media/libaaudio/tests/test_various.cpp
index 1c26615..cbf863f 100644
--- a/media/libaaudio/tests/test_various.cpp
+++ b/media/libaaudio/tests/test_various.cpp
@@ -33,6 +33,11 @@
void *audioData,
int32_t numFrames
) {
+ aaudio_direction_t direction = AAudioStream_getDirection(stream);
+ if (direction == AAUDIO_DIRECTION_INPUT) {
+ return AAUDIO_CALLBACK_RESULT_CONTINUE;
+ }
+ // Check to make sure the buffer is initialized to all zeros.
int channels = AAudioStream_getChannelCount(stream);
int numSamples = channels * numFrames;
bool allZeros = true;
@@ -48,7 +53,8 @@
constexpr int64_t NANOS_PER_MILLISECOND = 1000 * 1000;
void checkReleaseThenClose(aaudio_performance_mode_t perfMode,
- aaudio_sharing_mode_t sharingMode) {
+ aaudio_sharing_mode_t sharingMode,
+ aaudio_direction_t direction = AAUDIO_DIRECTION_OUTPUT) {
AAudioStreamBuilder* aaudioBuilder = nullptr;
AAudioStream* aaudioStream = nullptr;
@@ -61,6 +67,7 @@
nullptr);
AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
AAudioStreamBuilder_setSharingMode(aaudioBuilder, sharingMode);
+ AAudioStreamBuilder_setDirection(aaudioBuilder, direction);
AAudioStreamBuilder_setFormat(aaudioBuilder, AAUDIO_FORMAT_PCM_FLOAT);
// Create an AAudioStream using the Builder.
@@ -88,14 +95,28 @@
// We should NOT be able to start or change a stream after it has been released.
EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestStart(aaudioStream));
EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, AAudioStream_getState(aaudioStream));
- EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestPause(aaudioStream));
+ // Pause is only implemented for OUTPUT.
+ if (direction == AAUDIO_DIRECTION_OUTPUT) {
+ EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE,
+ AAudioStream_requestPause(aaudioStream));
+ }
EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, AAudioStream_getState(aaudioStream));
EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestStop(aaudioStream));
EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, AAudioStream_getState(aaudioStream));
// Does this crash?
- EXPECT_LT(0, AAudioStream_getFramesRead(aaudioStream));
- EXPECT_LT(0, AAudioStream_getFramesWritten(aaudioStream));
+ EXPECT_GT(AAudioStream_getFramesRead(aaudioStream), 0);
+ EXPECT_GT(AAudioStream_getFramesWritten(aaudioStream), 0);
+ EXPECT_GT(AAudioStream_getFramesPerBurst(aaudioStream), 0);
+ EXPECT_GE(AAudioStream_getXRunCount(aaudioStream), 0);
+ EXPECT_GT(AAudioStream_getBufferCapacityInFrames(aaudioStream), 0);
+ EXPECT_GT(AAudioStream_getBufferSizeInFrames(aaudioStream), 0);
+
+ int64_t timestampFrames = 0;
+ int64_t timestampNanos = 0;
+ aaudio_result_t result = AAudioStream_getTimestamp(aaudioStream, CLOCK_MONOTONIC,
+ ×tampFrames, ×tampNanos);
+ EXPECT_TRUE(result == AAUDIO_ERROR_INVALID_STATE || result == AAUDIO_ERROR_UNIMPLEMENTED);
// Verify Closing State. Does this crash?
aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
@@ -107,24 +128,46 @@
EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
}
-TEST(test_various, aaudio_release_close_none) {
+TEST(test_various, aaudio_release_close_none_output) {
checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_NONE,
- AAUDIO_SHARING_MODE_SHARED);
+ AAUDIO_SHARING_MODE_SHARED,
+ AAUDIO_DIRECTION_OUTPUT);
// No EXCLUSIVE streams with MODE_NONE.
}
-TEST(test_various, aaudio_release_close_low_shared) {
- checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
- AAUDIO_SHARING_MODE_SHARED);
+TEST(test_various, aaudio_release_close_none_input) {
+ checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_NONE,
+ AAUDIO_SHARING_MODE_SHARED,
+ AAUDIO_DIRECTION_INPUT);
+ // No EXCLUSIVE streams with MODE_NONE.
}
-TEST(test_various, aaudio_release_close_low_exclusive) {
+TEST(test_various, aaudio_release_close_low_shared_output) {
checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
- AAUDIO_SHARING_MODE_EXCLUSIVE);
+ AAUDIO_SHARING_MODE_SHARED,
+ AAUDIO_DIRECTION_OUTPUT);
+}
+
+TEST(test_various, aaudio_release_close_low_shared_input) {
+ checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_SHARED,
+ AAUDIO_DIRECTION_INPUT);
+}
+
+TEST(test_various, aaudio_release_close_low_exclusive_output) {
+ checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_EXCLUSIVE,
+ AAUDIO_DIRECTION_OUTPUT);
+}
+
+TEST(test_various, aaudio_release_close_low_exclusive_input) {
+ checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_SHARING_MODE_EXCLUSIVE,
+ AAUDIO_DIRECTION_INPUT);
}
enum FunctionToCall {
- CALL_START, CALL_STOP, CALL_PAUSE, CALL_FLUSH
+ CALL_START, CALL_STOP, CALL_PAUSE, CALL_FLUSH, CALL_RELEASE
};
void checkStateTransition(aaudio_performance_mode_t perfMode,
@@ -177,11 +220,27 @@
} else if (originalState == AAUDIO_STREAM_STATE_PAUSED) {
ASSERT_EQ(AAUDIO_OK, AAudioStream_requestPause(aaudioStream));
inputState = AAUDIO_STREAM_STATE_PAUSING;
+ } else if (originalState == AAUDIO_STREAM_STATE_FLUSHING) {
+ ASSERT_EQ(AAUDIO_OK, AAudioStream_requestPause(aaudioStream));
+ // We can only flush() after pause is complete.
+ ASSERT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+ AAUDIO_STREAM_STATE_PAUSING,
+ &state,
+ 1000 * NANOS_PER_MILLISECOND));
+ ASSERT_EQ(AAUDIO_STREAM_STATE_PAUSED, state);
+ ASSERT_EQ(AAUDIO_OK, AAudioStream_requestFlush(aaudioStream));
+ // That will put the stream into the FLUSHING state.
+ // The FLUSHING state will persist until we process functionToCall.
+ // That is because the transition to FLUSHED is caused by the callback,
+ // or by calling write() or waitForStateChange(). But those will not
+ // occur.
+ } else if (originalState == AAUDIO_STREAM_STATE_CLOSING) {
+ ASSERT_EQ(AAUDIO_OK, AAudioStream_release(aaudioStream));
}
}
}
- // Wait until past transitional state.
+ // Wait until we get past the transitional state if requested.
if (inputState != AAUDIO_STREAM_STATE_UNINITIALIZED) {
ASSERT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
inputState,
@@ -208,12 +267,20 @@
EXPECT_EQ(expectedResult, AAudioStream_requestFlush(aaudioStream));
transitionalState = AAUDIO_STREAM_STATE_FLUSHING;
break;
+ case FunctionToCall::CALL_RELEASE:
+ EXPECT_EQ(expectedResult, AAudioStream_release(aaudioStream));
+ // Set to UNINITIALIZED so the waitForStateChange() below will
+ // will return immediately with the current state.
+ transitionalState = AAUDIO_STREAM_STATE_UNINITIALIZED;
+ break;
}
- EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
- transitionalState,
- &state,
- 1000 * NANOS_PER_MILLISECOND));
+ EXPECT_EQ(AAUDIO_OK,
+ AAudioStream_waitForStateChange(aaudioStream,
+ transitionalState,
+ &state,
+ 1000 * NANOS_PER_MILLISECOND));
+
// We should not change state when a function fails.
if (expectedResult != AAUDIO_OK) {
ASSERT_EQ(originalState, expectedState);
@@ -493,6 +560,88 @@
AAUDIO_STREAM_STATE_FLUSHED);
}
+// FLUSHING ================================================================
+TEST(test_various, aaudio_state_lowlat_flushing_start) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_STREAM_STATE_FLUSHING,
+ FunctionToCall::CALL_START,
+ AAUDIO_OK,
+ AAUDIO_STREAM_STATE_STARTED);
+}
+
+TEST(test_various, aaudio_state_none_flushing_start) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_NONE,
+ AAUDIO_STREAM_STATE_FLUSHING,
+ FunctionToCall::CALL_START,
+ AAUDIO_OK,
+ AAUDIO_STREAM_STATE_STARTED);
+}
+
+TEST(test_various, aaudio_state_lowlat_flushing_release) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_STREAM_STATE_FLUSHING,
+ FunctionToCall::CALL_RELEASE,
+ AAUDIO_OK,
+ AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_none_flushing_release) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_NONE,
+ AAUDIO_STREAM_STATE_FLUSHING,
+ FunctionToCall::CALL_RELEASE,
+ AAUDIO_OK,
+ AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_lowlat_starting_release) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_STREAM_STATE_STARTING,
+ FunctionToCall::CALL_RELEASE,
+ AAUDIO_OK,
+ AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_none_starting_release) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_NONE,
+ AAUDIO_STREAM_STATE_STARTING,
+ FunctionToCall::CALL_RELEASE,
+ AAUDIO_OK,
+ AAUDIO_STREAM_STATE_CLOSING);
+}
+
+// CLOSING ================================================================
+TEST(test_various, aaudio_state_lowlat_closing_start) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_STREAM_STATE_CLOSING,
+ FunctionToCall::CALL_START,
+ AAUDIO_ERROR_INVALID_STATE,
+ AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_none_closing_start) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_NONE,
+ AAUDIO_STREAM_STATE_CLOSING,
+ FunctionToCall::CALL_START,
+ AAUDIO_ERROR_INVALID_STATE,
+ AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_lowlat_closing_stop) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+ AAUDIO_STREAM_STATE_CLOSING,
+ FunctionToCall::CALL_STOP,
+ AAUDIO_ERROR_INVALID_STATE,
+ AAUDIO_STREAM_STATE_CLOSING);
+}
+
+TEST(test_various, aaudio_state_none_closing_stop) {
+checkStateTransition(AAUDIO_PERFORMANCE_MODE_NONE,
+ AAUDIO_STREAM_STATE_CLOSING,
+ FunctionToCall::CALL_STOP,
+ AAUDIO_ERROR_INVALID_STATE,
+ AAUDIO_STREAM_STATE_CLOSING);
+}
+
// ==========================================================================
TEST(test_various, aaudio_set_buffer_size) {
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
new file mode 100644
index 0000000..31c071e
--- /dev/null
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -0,0 +1,2200 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlConversion"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "media/AidlConversion.h"
+
+#include <media/ShmemCompat.h>
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities
+
+namespace android {
+
+using base::unexpected;
+
+namespace {
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// The code below establishes:
+// IntegralTypeOf<T>, which works for either integral types (in which case it evaluates to T), or
+// enum types (in which case it evaluates to std::underlying_type_T<T>).
+
+template<typename T, typename = std::enable_if_t<std::is_integral_v<T> || std::is_enum_v<T>>>
+struct IntegralTypeOfStruct {
+ using Type = T;
+};
+
+template<typename T>
+struct IntegralTypeOfStruct<T, std::enable_if_t<std::is_enum_v<T>>> {
+ using Type = std::underlying_type_t<T>;
+};
+
+template<typename T>
+using IntegralTypeOf = typename IntegralTypeOfStruct<T>::Type;
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for handling bitmasks.
+
+template<typename Enum>
+Enum index2enum_index(int index) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ return static_cast<Enum>(index);
+}
+
+template<typename Enum>
+Enum index2enum_bitmask(int index) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ return static_cast<Enum>(1 << index);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_bitmask(Enum e) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+ return static_cast<Mask>(e);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_index(Enum e) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+ return static_cast<Mask>(static_cast<std::make_unsigned_t<IntegralTypeOf<Mask>>>(1)
+ << static_cast<int>(e));
+}
+
+template<typename DestMask, typename SrcMask, typename DestEnum, typename SrcEnum>
+ConversionResult<DestMask> convertBitmask(
+ SrcMask src, const std::function<ConversionResult<DestEnum>(SrcEnum)>& enumConversion,
+ const std::function<SrcEnum(int)>& srcIndexToEnum,
+ const std::function<DestMask(DestEnum)>& destEnumToMask) {
+ using UnsignedDestMask = std::make_unsigned_t<IntegralTypeOf<DestMask>>;
+ using UnsignedSrcMask = std::make_unsigned_t<IntegralTypeOf<SrcMask>>;
+
+ UnsignedDestMask dest = static_cast<UnsignedDestMask>(0);
+ UnsignedSrcMask usrc = static_cast<UnsignedSrcMask>(src);
+
+ int srcBitIndex = 0;
+ while (usrc != 0) {
+ if (usrc & 1) {
+ SrcEnum srcEnum = srcIndexToEnum(srcBitIndex);
+ DestEnum destEnum = VALUE_OR_RETURN(enumConversion(srcEnum));
+ DestMask destMask = destEnumToMask(destEnum);
+ dest |= destMask;
+ }
+ ++srcBitIndex;
+ usrc >>= 1;
+ }
+ return static_cast<DestMask>(dest);
+}
+
+template<typename Mask, typename Enum>
+bool bitmaskIsSet(Mask mask, Enum index) {
+ return (mask & enumToMask_index<Mask, Enum>(index)) != 0;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+
+enum class Direction {
+ INPUT, OUTPUT
+};
+
+ConversionResult<Direction> direction(media::AudioPortRole role, media::AudioPortType type) {
+ switch (type) {
+ case media::AudioPortType::NONE:
+ case media::AudioPortType::SESSION:
+ break; // must be listed -Werror,-Wswitch
+ case media::AudioPortType::DEVICE:
+ switch (role) {
+ case media::AudioPortRole::NONE:
+ break; // must be listed -Werror,-Wswitch
+ case media::AudioPortRole::SOURCE:
+ return Direction::INPUT;
+ case media::AudioPortRole::SINK:
+ return Direction::OUTPUT;
+ }
+ break;
+ case media::AudioPortType::MIX:
+ switch (role) {
+ case media::AudioPortRole::NONE:
+ break; // must be listed -Werror,-Wswitch
+ case media::AudioPortRole::SOURCE:
+ return Direction::OUTPUT;
+ case media::AudioPortRole::SINK:
+ return Direction::INPUT;
+ }
+ break;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Direction> direction(audio_port_role_t role, audio_port_type_t type) {
+ switch (type) {
+ case AUDIO_PORT_TYPE_NONE:
+ case AUDIO_PORT_TYPE_SESSION:
+ break; // must be listed -Werror,-Wswitch
+ case AUDIO_PORT_TYPE_DEVICE:
+ switch (role) {
+ case AUDIO_PORT_ROLE_NONE:
+ break; // must be listed -Werror,-Wswitch
+ case AUDIO_PORT_ROLE_SOURCE:
+ return Direction::INPUT;
+ case AUDIO_PORT_ROLE_SINK:
+ return Direction::OUTPUT;
+ }
+ break;
+ case AUDIO_PORT_TYPE_MIX:
+ switch (role) {
+ case AUDIO_PORT_ROLE_NONE:
+ break; // must be listed -Werror,-Wswitch
+ case AUDIO_PORT_ROLE_SOURCE:
+ return Direction::OUTPUT;
+ case AUDIO_PORT_ROLE_SINK:
+ return Direction::INPUT;
+ }
+ break;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+} // namespace
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Converters
+
+status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize) {
+ if (aidl.size() > maxSize - 1) {
+ return BAD_VALUE;
+ }
+ aidl.copy(dest, aidl.size());
+ dest[aidl.size()] = '\0';
+ return OK;
+}
+
+ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize) {
+ if (legacy == nullptr) {
+ return unexpected(BAD_VALUE);
+ }
+ if (strnlen(legacy, maxSize) == maxSize) {
+ // No null-terminator.
+ return unexpected(BAD_VALUE);
+ }
+ return std::string(legacy);
+}
+
+ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl) {
+ return convertReinterpret<audio_module_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy) {
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl) {
+ return convertReinterpret<audio_io_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy) {
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl) {
+ return convertReinterpret<audio_port_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy) {
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl) {
+ return convertReinterpret<audio_patch_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy) {
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl) {
+ return convertReinterpret<audio_unique_id_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy) {
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl) {
+ return convertReinterpret<audio_hw_sync_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy) {
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl) {
+ return convertReinterpret<pid_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy) {
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl) {
+ return convertReinterpret<uid_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy) {
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl) {
+ return String16(aidl.data(), aidl.size());
+}
+
+ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy) {
+ return std::string(String8(legacy).c_str());
+}
+
+ConversionResult<String8> aidl2legacy_string_view_String8(std::string_view aidl) {
+ return String8(aidl.data(), aidl.size());
+}
+
+ConversionResult<std::string> legacy2aidl_String8_string(const String8& legacy) {
+ return std::string(legacy.c_str());
+}
+
+// The legacy enum is unnamed. Thus, we use int32_t.
+ConversionResult<int32_t> aidl2legacy_AudioPortConfigType_int32_t(
+ media::AudioPortConfigType aidl) {
+ switch (aidl) {
+ case media::AudioPortConfigType::SAMPLE_RATE:
+ return AUDIO_PORT_CONFIG_SAMPLE_RATE;
+ case media::AudioPortConfigType::CHANNEL_MASK:
+ return AUDIO_PORT_CONFIG_CHANNEL_MASK;
+ case media::AudioPortConfigType::FORMAT:
+ return AUDIO_PORT_CONFIG_FORMAT;
+ case media::AudioPortConfigType::GAIN:
+ return AUDIO_PORT_CONFIG_GAIN;
+ case media::AudioPortConfigType::FLAGS:
+ return AUDIO_PORT_CONFIG_FLAGS;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+// The legacy enum is unnamed. Thus, we use int32_t.
+ConversionResult<media::AudioPortConfigType> legacy2aidl_int32_t_AudioPortConfigType(
+ int32_t legacy) {
+ switch (legacy) {
+ case AUDIO_PORT_CONFIG_SAMPLE_RATE:
+ return media::AudioPortConfigType::SAMPLE_RATE;
+ case AUDIO_PORT_CONFIG_CHANNEL_MASK:
+ return media::AudioPortConfigType::CHANNEL_MASK;
+ case AUDIO_PORT_CONFIG_FORMAT:
+ return media::AudioPortConfigType::FORMAT;
+ case AUDIO_PORT_CONFIG_GAIN:
+ return media::AudioPortConfigType::GAIN;
+ case AUDIO_PORT_CONFIG_FLAGS:
+ return media::AudioPortConfigType::FLAGS;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl) {
+ return convertBitmask<unsigned int, int32_t, int, media::AudioPortConfigType>(
+ aidl, aidl2legacy_AudioPortConfigType_int32_t,
+ // AudioPortConfigType enum is index-based.
+ index2enum_index<media::AudioPortConfigType>,
+ // AUDIO_PORT_CONFIG_* flags are mask-based.
+ enumToMask_bitmask<unsigned int, int>);
+}
+
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy) {
+ return convertBitmask<int32_t, unsigned int, media::AudioPortConfigType, int>(
+ legacy, legacy2aidl_int32_t_AudioPortConfigType,
+ // AUDIO_PORT_CONFIG_* flags are mask-based.
+ index2enum_bitmask<unsigned>,
+ // AudioPortConfigType enum is index-based.
+ enumToMask_index<int32_t, media::AudioPortConfigType>);
+}
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_int32_t_audio_channel_mask_t(int32_t aidl) {
+ // TODO(ytai): should we convert bit-by-bit?
+ // One problem here is that the representation is both opaque and is different based on the
+ // context (input vs. output). Can determine based on type and role, as per useInChannelMask().
+ return convertReinterpret<audio_channel_mask_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_channel_mask_t_int32_t(audio_channel_mask_t legacy) {
+ // TODO(ytai): should we convert bit-by-bit?
+ // One problem here is that the representation is both opaque and is different based on the
+ // context (input vs. output). Can determine based on type and role, as per useInChannelMask().
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
+ media::AudioIoConfigEvent aidl) {
+ switch (aidl) {
+ case media::AudioIoConfigEvent::OUTPUT_REGISTERED:
+ return AUDIO_OUTPUT_REGISTERED;
+ case media::AudioIoConfigEvent::OUTPUT_OPENED:
+ return AUDIO_OUTPUT_OPENED;
+ case media::AudioIoConfigEvent::OUTPUT_CLOSED:
+ return AUDIO_OUTPUT_CLOSED;
+ case media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED:
+ return AUDIO_OUTPUT_CONFIG_CHANGED;
+ case media::AudioIoConfigEvent::INPUT_REGISTERED:
+ return AUDIO_INPUT_REGISTERED;
+ case media::AudioIoConfigEvent::INPUT_OPENED:
+ return AUDIO_INPUT_OPENED;
+ case media::AudioIoConfigEvent::INPUT_CLOSED:
+ return AUDIO_INPUT_CLOSED;
+ case media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED:
+ return AUDIO_INPUT_CONFIG_CHANGED;
+ case media::AudioIoConfigEvent::CLIENT_STARTED:
+ return AUDIO_CLIENT_STARTED;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
+ audio_io_config_event legacy) {
+ switch (legacy) {
+ case AUDIO_OUTPUT_REGISTERED:
+ return media::AudioIoConfigEvent::OUTPUT_REGISTERED;
+ case AUDIO_OUTPUT_OPENED:
+ return media::AudioIoConfigEvent::OUTPUT_OPENED;
+ case AUDIO_OUTPUT_CLOSED:
+ return media::AudioIoConfigEvent::OUTPUT_CLOSED;
+ case AUDIO_OUTPUT_CONFIG_CHANGED:
+ return media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED;
+ case AUDIO_INPUT_REGISTERED:
+ return media::AudioIoConfigEvent::INPUT_REGISTERED;
+ case AUDIO_INPUT_OPENED:
+ return media::AudioIoConfigEvent::INPUT_OPENED;
+ case AUDIO_INPUT_CLOSED:
+ return media::AudioIoConfigEvent::INPUT_CLOSED;
+ case AUDIO_INPUT_CONFIG_CHANGED:
+ return media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED;
+ case AUDIO_CLIENT_STARTED:
+ return media::AudioIoConfigEvent::CLIENT_STARTED;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
+ media::AudioPortRole aidl) {
+ switch (aidl) {
+ case media::AudioPortRole::NONE:
+ return AUDIO_PORT_ROLE_NONE;
+ case media::AudioPortRole::SOURCE:
+ return AUDIO_PORT_ROLE_SOURCE;
+ case media::AudioPortRole::SINK:
+ return AUDIO_PORT_ROLE_SINK;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioPortRole> legacy2aidl_audio_port_role_t_AudioPortRole(
+ audio_port_role_t legacy) {
+ switch (legacy) {
+ case AUDIO_PORT_ROLE_NONE:
+ return media::AudioPortRole::NONE;
+ case AUDIO_PORT_ROLE_SOURCE:
+ return media::AudioPortRole::SOURCE;
+ case AUDIO_PORT_ROLE_SINK:
+ return media::AudioPortRole::SINK;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_port_type_t> aidl2legacy_AudioPortType_audio_port_type_t(
+ media::AudioPortType aidl) {
+ switch (aidl) {
+ case media::AudioPortType::NONE:
+ return AUDIO_PORT_TYPE_NONE;
+ case media::AudioPortType::DEVICE:
+ return AUDIO_PORT_TYPE_DEVICE;
+ case media::AudioPortType::MIX:
+ return AUDIO_PORT_TYPE_MIX;
+ case media::AudioPortType::SESSION:
+ return AUDIO_PORT_TYPE_SESSION;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioPortType> legacy2aidl_audio_port_type_t_AudioPortType(
+ audio_port_type_t legacy) {
+ switch (legacy) {
+ case AUDIO_PORT_TYPE_NONE:
+ return media::AudioPortType::NONE;
+ case AUDIO_PORT_TYPE_DEVICE:
+ return media::AudioPortType::DEVICE;
+ case AUDIO_PORT_TYPE_MIX:
+ return media::AudioPortType::MIX;
+ case AUDIO_PORT_TYPE_SESSION:
+ return media::AudioPortType::SESSION;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormat_audio_format_t(
+ media::audio::common::AudioFormat aidl) {
+ // This relies on AudioFormat being kept in sync with audio_format_t.
+ static_assert(sizeof(media::audio::common::AudioFormat) == sizeof(audio_format_t));
+ return static_cast<audio_format_t>(aidl);
+}
+
+ConversionResult<media::audio::common::AudioFormat> legacy2aidl_audio_format_t_AudioFormat(
+ audio_format_t legacy) {
+ // This relies on AudioFormat being kept in sync with audio_format_t.
+ static_assert(sizeof(media::audio::common::AudioFormat) == sizeof(audio_format_t));
+ return static_cast<media::audio::common::AudioFormat>(legacy);
+}
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_AudioGainMode_audio_gain_mode_t(media::AudioGainMode aidl) {
+ switch (aidl) {
+ case media::AudioGainMode::JOINT:
+ return AUDIO_GAIN_MODE_JOINT;
+ case media::AudioGainMode::CHANNELS:
+ return AUDIO_GAIN_MODE_CHANNELS;
+ case media::AudioGainMode::RAMP:
+ return AUDIO_GAIN_MODE_RAMP;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioGainMode> legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy) {
+ switch (legacy) {
+ case AUDIO_GAIN_MODE_JOINT:
+ return media::AudioGainMode::JOINT;
+ case AUDIO_GAIN_MODE_CHANNELS:
+ return media::AudioGainMode::CHANNELS;
+ case AUDIO_GAIN_MODE_RAMP:
+ return media::AudioGainMode::RAMP;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl) {
+ return convertBitmask<audio_gain_mode_t, int32_t, audio_gain_mode_t, media::AudioGainMode>(
+ aidl, aidl2legacy_AudioGainMode_audio_gain_mode_t,
+ // AudioGainMode is index-based.
+ index2enum_index<media::AudioGainMode>,
+ // AUDIO_GAIN_MODE_* constants are mask-based.
+ enumToMask_bitmask<audio_gain_mode_t, audio_gain_mode_t>);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy) {
+ return convertBitmask<int32_t, audio_gain_mode_t, media::AudioGainMode, audio_gain_mode_t>(
+ legacy, legacy2aidl_audio_gain_mode_t_AudioGainMode,
+ // AUDIO_GAIN_MODE_* constants are mask-based.
+ index2enum_bitmask<audio_gain_mode_t>,
+ // AudioGainMode is index-based.
+ enumToMask_index<int32_t, media::AudioGainMode>);
+}
+
+ConversionResult<audio_devices_t> aidl2legacy_int32_t_audio_devices_t(int32_t aidl) {
+ // TODO(ytai): bitfield?
+ return convertReinterpret<audio_devices_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_devices_t_int32_t(audio_devices_t legacy) {
+ // TODO(ytai): bitfield?
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+ const media::AudioGainConfig& aidl, media::AudioPortRole role, media::AudioPortType type) {
+ audio_gain_config legacy;
+ legacy.index = VALUE_OR_RETURN(convertIntegral<int>(aidl.index));
+ legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t_mask(aidl.mode));
+ legacy.channel_mask =
+ VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+ const bool isInput = VALUE_OR_RETURN(direction(role, type)) == Direction::INPUT;
+ const bool isJoint = bitmaskIsSet(aidl.mode, media::AudioGainMode::JOINT);
+ size_t numValues = isJoint ? 1
+ : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
+ : audio_channel_count_from_out_mask(legacy.channel_mask);
+ if (aidl.values.size() != numValues || aidl.values.size() > std::size(legacy.values)) {
+ return unexpected(BAD_VALUE);
+ }
+ for (size_t i = 0; i < numValues; ++i) {
+ legacy.values[i] = VALUE_OR_RETURN(convertIntegral<int>(aidl.values[i]));
+ }
+ legacy.ramp_duration_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.rampDurationMs));
+ return legacy;
+}
+
+ConversionResult<media::AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
+ const audio_gain_config& legacy, audio_port_role_t role, audio_port_type_t type) {
+ media::AudioGainConfig aidl;
+ aidl.index = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.index));
+ aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t_mask(legacy.mode));
+ aidl.channelMask =
+ VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+ const bool isInput = VALUE_OR_RETURN(direction(role, type)) == Direction::INPUT;
+ const bool isJoint = (legacy.mode & AUDIO_GAIN_MODE_JOINT) != 0;
+ size_t numValues = isJoint ? 1
+ : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
+ : audio_channel_count_from_out_mask(legacy.channel_mask);
+ aidl.values.resize(numValues);
+ for (size_t i = 0; i < numValues; ++i) {
+ aidl.values[i] = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.values[i]));
+ }
+ aidl.rampDurationMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.ramp_duration_ms));
+ return aidl;
+}
+
+ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
+ media::AudioInputFlags aidl) {
+ switch (aidl) {
+ case media::AudioInputFlags::FAST:
+ return AUDIO_INPUT_FLAG_FAST;
+ case media::AudioInputFlags::HW_HOTWORD:
+ return AUDIO_INPUT_FLAG_HW_HOTWORD;
+ case media::AudioInputFlags::RAW:
+ return AUDIO_INPUT_FLAG_RAW;
+ case media::AudioInputFlags::SYNC:
+ return AUDIO_INPUT_FLAG_SYNC;
+ case media::AudioInputFlags::MMAP_NOIRQ:
+ return AUDIO_INPUT_FLAG_MMAP_NOIRQ;
+ case media::AudioInputFlags::VOIP_TX:
+ return AUDIO_INPUT_FLAG_VOIP_TX;
+ case media::AudioInputFlags::HW_AV_SYNC:
+ return AUDIO_INPUT_FLAG_HW_AV_SYNC;
+ case media::AudioInputFlags::DIRECT:
+ return AUDIO_INPUT_FLAG_DIRECT;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+ audio_input_flags_t legacy) {
+ switch (legacy) {
+ case AUDIO_INPUT_FLAG_NONE:
+ break; // shouldn't get here. must be listed -Werror,-Wswitch
+ case AUDIO_INPUT_FLAG_FAST:
+ return media::AudioInputFlags::FAST;
+ case AUDIO_INPUT_FLAG_HW_HOTWORD:
+ return media::AudioInputFlags::HW_HOTWORD;
+ case AUDIO_INPUT_FLAG_RAW:
+ return media::AudioInputFlags::RAW;
+ case AUDIO_INPUT_FLAG_SYNC:
+ return media::AudioInputFlags::SYNC;
+ case AUDIO_INPUT_FLAG_MMAP_NOIRQ:
+ return media::AudioInputFlags::MMAP_NOIRQ;
+ case AUDIO_INPUT_FLAG_VOIP_TX:
+ return media::AudioInputFlags::VOIP_TX;
+ case AUDIO_INPUT_FLAG_HW_AV_SYNC:
+ return media::AudioInputFlags::HW_AV_SYNC;
+ case AUDIO_INPUT_FLAG_DIRECT:
+ return media::AudioInputFlags::DIRECT;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
+ media::AudioOutputFlags aidl) {
+ switch (aidl) {
+ case media::AudioOutputFlags::DIRECT:
+ return AUDIO_OUTPUT_FLAG_DIRECT;
+ case media::AudioOutputFlags::PRIMARY:
+ return AUDIO_OUTPUT_FLAG_PRIMARY;
+ case media::AudioOutputFlags::FAST:
+ return AUDIO_OUTPUT_FLAG_FAST;
+ case media::AudioOutputFlags::DEEP_BUFFER:
+ return AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ case media::AudioOutputFlags::COMPRESS_OFFLOAD:
+ return AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+ case media::AudioOutputFlags::NON_BLOCKING:
+ return AUDIO_OUTPUT_FLAG_NON_BLOCKING;
+ case media::AudioOutputFlags::HW_AV_SYNC:
+ return AUDIO_OUTPUT_FLAG_HW_AV_SYNC;
+ case media::AudioOutputFlags::TTS:
+ return AUDIO_OUTPUT_FLAG_TTS;
+ case media::AudioOutputFlags::RAW:
+ return AUDIO_OUTPUT_FLAG_RAW;
+ case media::AudioOutputFlags::SYNC:
+ return AUDIO_OUTPUT_FLAG_SYNC;
+ case media::AudioOutputFlags::IEC958_NONAUDIO:
+ return AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
+ case media::AudioOutputFlags::DIRECT_PCM:
+ return AUDIO_OUTPUT_FLAG_DIRECT_PCM;
+ case media::AudioOutputFlags::MMAP_NOIRQ:
+ return AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+ case media::AudioOutputFlags::VOIP_RX:
+ return AUDIO_OUTPUT_FLAG_VOIP_RX;
+ case media::AudioOutputFlags::INCALL_MUSIC:
+ return AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
+ case media::AudioOutputFlags::GAPLESS_OFFLOAD:
+ return AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+ audio_output_flags_t legacy) {
+ switch (legacy) {
+ case AUDIO_OUTPUT_FLAG_NONE:
+ break; // shouldn't get here. must be listed -Werror,-Wswitch
+ case AUDIO_OUTPUT_FLAG_DIRECT:
+ return media::AudioOutputFlags::DIRECT;
+ case AUDIO_OUTPUT_FLAG_PRIMARY:
+ return media::AudioOutputFlags::PRIMARY;
+ case AUDIO_OUTPUT_FLAG_FAST:
+ return media::AudioOutputFlags::FAST;
+ case AUDIO_OUTPUT_FLAG_DEEP_BUFFER:
+ return media::AudioOutputFlags::DEEP_BUFFER;
+ case AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD:
+ return media::AudioOutputFlags::COMPRESS_OFFLOAD;
+ case AUDIO_OUTPUT_FLAG_NON_BLOCKING:
+ return media::AudioOutputFlags::NON_BLOCKING;
+ case AUDIO_OUTPUT_FLAG_HW_AV_SYNC:
+ return media::AudioOutputFlags::HW_AV_SYNC;
+ case AUDIO_OUTPUT_FLAG_TTS:
+ return media::AudioOutputFlags::TTS;
+ case AUDIO_OUTPUT_FLAG_RAW:
+ return media::AudioOutputFlags::RAW;
+ case AUDIO_OUTPUT_FLAG_SYNC:
+ return media::AudioOutputFlags::SYNC;
+ case AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO:
+ return media::AudioOutputFlags::IEC958_NONAUDIO;
+ case AUDIO_OUTPUT_FLAG_DIRECT_PCM:
+ return media::AudioOutputFlags::DIRECT_PCM;
+ case AUDIO_OUTPUT_FLAG_MMAP_NOIRQ:
+ return media::AudioOutputFlags::MMAP_NOIRQ;
+ case AUDIO_OUTPUT_FLAG_VOIP_RX:
+ return media::AudioOutputFlags::VOIP_RX;
+ case AUDIO_OUTPUT_FLAG_INCALL_MUSIC:
+ return media::AudioOutputFlags::INCALL_MUSIC;
+ case AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD:
+ return media::AudioOutputFlags::GAPLESS_OFFLOAD;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
+ int32_t aidl) {
+ using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
+
+ LegacyMask converted = VALUE_OR_RETURN(
+ (convertBitmask<LegacyMask, int32_t, audio_input_flags_t, media::AudioInputFlags>(
+ aidl, aidl2legacy_AudioInputFlags_audio_input_flags_t,
+ index2enum_index<media::AudioInputFlags>,
+ enumToMask_bitmask<LegacyMask, audio_input_flags_t>)));
+ return static_cast<audio_input_flags_t>(converted);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
+ audio_input_flags_t legacy) {
+ using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
+
+ LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+ return convertBitmask<int32_t, LegacyMask, media::AudioInputFlags, audio_input_flags_t>(
+ legacyMask, legacy2aidl_audio_input_flags_t_AudioInputFlags,
+ index2enum_bitmask<audio_input_flags_t>,
+ enumToMask_index<int32_t, media::AudioInputFlags>);
+}
+
+ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
+ int32_t aidl) {
+ return convertBitmask<audio_output_flags_t,
+ int32_t,
+ audio_output_flags_t,
+ media::AudioOutputFlags>(
+ aidl, aidl2legacy_AudioOutputFlags_audio_output_flags_t,
+ index2enum_index<media::AudioOutputFlags>,
+ enumToMask_bitmask<audio_output_flags_t, audio_output_flags_t>);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
+ audio_output_flags_t legacy) {
+ using LegacyMask = std::underlying_type_t<audio_output_flags_t>;
+
+ LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+ return convertBitmask<int32_t, LegacyMask, media::AudioOutputFlags, audio_output_flags_t>(
+ legacyMask, legacy2aidl_audio_output_flags_t_AudioOutputFlags,
+ index2enum_bitmask<audio_output_flags_t>,
+ enumToMask_index<int32_t, media::AudioOutputFlags>);
+}
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+ const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type) {
+ audio_io_flags legacy;
+ Direction dir = VALUE_OR_RETURN(direction(role, type));
+ switch (dir) {
+ case Direction::INPUT: {
+ legacy.input = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_input_flags_t_mask(
+ VALUE_OR_RETURN(UNION_GET(aidl, input))));
+ }
+ break;
+
+ case Direction::OUTPUT: {
+ legacy.output = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_output_flags_t_mask(
+ VALUE_OR_RETURN(UNION_GET(aidl, output))));
+ }
+ break;
+ }
+
+ return legacy;
+}
+
+ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+ const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type) {
+ media::AudioIoFlags aidl;
+
+ Direction dir = VALUE_OR_RETURN(direction(role, type));
+ switch (dir) {
+ case Direction::INPUT:
+ UNION_SET(aidl, input,
+ VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(
+ legacy.input)));
+ break;
+ case Direction::OUTPUT:
+ UNION_SET(aidl, output,
+ VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(
+ legacy.output)));
+ break;
+ }
+ return aidl;
+}
+
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortConfigDeviceExt_audio_port_config_device_ext(
+ const media::AudioPortConfigDeviceExt& aidl) {
+ audio_port_config_device_ext legacy;
+ legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+ legacy.type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.type));
+ RETURN_IF_ERROR(aidl2legacy_string(aidl.address, legacy.address, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+ return legacy;
+}
+
+ConversionResult<media::AudioPortConfigDeviceExt>
+legacy2aidl_audio_port_config_device_ext_AudioPortConfigDeviceExt(
+ const audio_port_config_device_ext& legacy) {
+ media::AudioPortConfigDeviceExt aidl;
+ aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+ aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.type));
+ aidl.address = VALUE_OR_RETURN(
+ legacy2aidl_string(legacy.address, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+ return aidl;
+}
+
+ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
+ media::AudioStreamType aidl) {
+ switch (aidl) {
+ case media::AudioStreamType::DEFAULT:
+ return AUDIO_STREAM_DEFAULT;
+ case media::AudioStreamType::VOICE_CALL:
+ return AUDIO_STREAM_VOICE_CALL;
+ case media::AudioStreamType::SYSTEM:
+ return AUDIO_STREAM_SYSTEM;
+ case media::AudioStreamType::RING:
+ return AUDIO_STREAM_RING;
+ case media::AudioStreamType::MUSIC:
+ return AUDIO_STREAM_MUSIC;
+ case media::AudioStreamType::ALARM:
+ return AUDIO_STREAM_ALARM;
+ case media::AudioStreamType::NOTIFICATION:
+ return AUDIO_STREAM_NOTIFICATION;
+ case media::AudioStreamType::BLUETOOTH_SCO:
+ return AUDIO_STREAM_BLUETOOTH_SCO;
+ case media::AudioStreamType::ENFORCED_AUDIBLE:
+ return AUDIO_STREAM_ENFORCED_AUDIBLE;
+ case media::AudioStreamType::DTMF:
+ return AUDIO_STREAM_DTMF;
+ case media::AudioStreamType::TTS:
+ return AUDIO_STREAM_TTS;
+ case media::AudioStreamType::ACCESSIBILITY:
+ return AUDIO_STREAM_ACCESSIBILITY;
+ case media::AudioStreamType::ASSISTANT:
+ return AUDIO_STREAM_ASSISTANT;
+ case media::AudioStreamType::REROUTING:
+ return AUDIO_STREAM_REROUTING;
+ case media::AudioStreamType::PATCH:
+ return AUDIO_STREAM_PATCH;
+ case media::AudioStreamType::CALL_ASSISTANT:
+ return AUDIO_STREAM_CALL_ASSISTANT;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
+ audio_stream_type_t legacy) {
+ switch (legacy) {
+ case AUDIO_STREAM_DEFAULT:
+ return media::AudioStreamType::DEFAULT;
+ case AUDIO_STREAM_VOICE_CALL:
+ return media::AudioStreamType::VOICE_CALL;
+ case AUDIO_STREAM_SYSTEM:
+ return media::AudioStreamType::SYSTEM;
+ case AUDIO_STREAM_RING:
+ return media::AudioStreamType::RING;
+ case AUDIO_STREAM_MUSIC:
+ return media::AudioStreamType::MUSIC;
+ case AUDIO_STREAM_ALARM:
+ return media::AudioStreamType::ALARM;
+ case AUDIO_STREAM_NOTIFICATION:
+ return media::AudioStreamType::NOTIFICATION;
+ case AUDIO_STREAM_BLUETOOTH_SCO:
+ return media::AudioStreamType::BLUETOOTH_SCO;
+ case AUDIO_STREAM_ENFORCED_AUDIBLE:
+ return media::AudioStreamType::ENFORCED_AUDIBLE;
+ case AUDIO_STREAM_DTMF:
+ return media::AudioStreamType::DTMF;
+ case AUDIO_STREAM_TTS:
+ return media::AudioStreamType::TTS;
+ case AUDIO_STREAM_ACCESSIBILITY:
+ return media::AudioStreamType::ACCESSIBILITY;
+ case AUDIO_STREAM_ASSISTANT:
+ return media::AudioStreamType::ASSISTANT;
+ case AUDIO_STREAM_REROUTING:
+ return media::AudioStreamType::REROUTING;
+ case AUDIO_STREAM_PATCH:
+ return media::AudioStreamType::PATCH;
+ case AUDIO_STREAM_CALL_ASSISTANT:
+ return media::AudioStreamType::CALL_ASSISTANT;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSourceType_audio_source_t(
+ media::AudioSourceType aidl) {
+ switch (aidl) {
+ case media::AudioSourceType::INVALID:
+ // This value does not have an enum
+ return AUDIO_SOURCE_INVALID;
+ case media::AudioSourceType::DEFAULT:
+ return AUDIO_SOURCE_DEFAULT;
+ case media::AudioSourceType::MIC:
+ return AUDIO_SOURCE_MIC;
+ case media::AudioSourceType::VOICE_UPLINK:
+ return AUDIO_SOURCE_VOICE_UPLINK;
+ case media::AudioSourceType::VOICE_DOWNLINK:
+ return AUDIO_SOURCE_VOICE_DOWNLINK;
+ case media::AudioSourceType::VOICE_CALL:
+ return AUDIO_SOURCE_VOICE_CALL;
+ case media::AudioSourceType::CAMCORDER:
+ return AUDIO_SOURCE_CAMCORDER;
+ case media::AudioSourceType::VOICE_RECOGNITION:
+ return AUDIO_SOURCE_VOICE_RECOGNITION;
+ case media::AudioSourceType::VOICE_COMMUNICATION:
+ return AUDIO_SOURCE_VOICE_COMMUNICATION;
+ case media::AudioSourceType::REMOTE_SUBMIX:
+ return AUDIO_SOURCE_REMOTE_SUBMIX;
+ case media::AudioSourceType::UNPROCESSED:
+ return AUDIO_SOURCE_UNPROCESSED;
+ case media::AudioSourceType::VOICE_PERFORMANCE:
+ return AUDIO_SOURCE_VOICE_PERFORMANCE;
+ case media::AudioSourceType::ECHO_REFERENCE:
+ return AUDIO_SOURCE_ECHO_REFERENCE;
+ case media::AudioSourceType::FM_TUNER:
+ return AUDIO_SOURCE_FM_TUNER;
+ case media::AudioSourceType::HOTWORD:
+ return AUDIO_SOURCE_HOTWORD;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioSourceType> legacy2aidl_audio_source_t_AudioSourceType(
+ audio_source_t legacy) {
+ switch (legacy) {
+ case AUDIO_SOURCE_INVALID:
+ return media::AudioSourceType::INVALID;
+ case AUDIO_SOURCE_DEFAULT:
+ return media::AudioSourceType::DEFAULT;
+ case AUDIO_SOURCE_MIC:
+ return media::AudioSourceType::MIC;
+ case AUDIO_SOURCE_VOICE_UPLINK:
+ return media::AudioSourceType::VOICE_UPLINK;
+ case AUDIO_SOURCE_VOICE_DOWNLINK:
+ return media::AudioSourceType::VOICE_DOWNLINK;
+ case AUDIO_SOURCE_VOICE_CALL:
+ return media::AudioSourceType::VOICE_CALL;
+ case AUDIO_SOURCE_CAMCORDER:
+ return media::AudioSourceType::CAMCORDER;
+ case AUDIO_SOURCE_VOICE_RECOGNITION:
+ return media::AudioSourceType::VOICE_RECOGNITION;
+ case AUDIO_SOURCE_VOICE_COMMUNICATION:
+ return media::AudioSourceType::VOICE_COMMUNICATION;
+ case AUDIO_SOURCE_REMOTE_SUBMIX:
+ return media::AudioSourceType::REMOTE_SUBMIX;
+ case AUDIO_SOURCE_UNPROCESSED:
+ return media::AudioSourceType::UNPROCESSED;
+ case AUDIO_SOURCE_VOICE_PERFORMANCE:
+ return media::AudioSourceType::VOICE_PERFORMANCE;
+ case AUDIO_SOURCE_ECHO_REFERENCE:
+ return media::AudioSourceType::ECHO_REFERENCE;
+ case AUDIO_SOURCE_FM_TUNER:
+ return media::AudioSourceType::FM_TUNER;
+ case AUDIO_SOURCE_HOTWORD:
+ return media::AudioSourceType::HOTWORD;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl) {
+ return convertReinterpret<audio_session_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy) {
+ return convertReinterpret<int32_t>(legacy);
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
+
+ConversionResult<audio_port_config_mix_ext_usecase> aidl2legacy_AudioPortConfigMixExtUseCase(
+ const media::AudioPortConfigMixExtUseCase& aidl, media::AudioPortRole role) {
+ audio_port_config_mix_ext_usecase legacy;
+
+ switch (role) {
+ case media::AudioPortRole::NONE:
+ // Just verify that the union is empty.
+ VALUE_OR_RETURN(UNION_GET(aidl, unspecified));
+ return legacy;
+
+ case media::AudioPortRole::SOURCE:
+ // This is not a bug. A SOURCE role corresponds to the stream field.
+ legacy.stream = VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
+ VALUE_OR_RETURN(UNION_GET(aidl, stream))));
+ return legacy;
+
+ case media::AudioPortRole::SINK:
+ // This is not a bug. A SINK role corresponds to the source field.
+ legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(
+ VALUE_OR_RETURN(UNION_GET(aidl, source))));
+ return legacy;
+ }
+ LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<media::AudioPortConfigMixExtUseCase> legacy2aidl_AudioPortConfigMixExtUseCase(
+ const audio_port_config_mix_ext_usecase& legacy, audio_port_role_t role) {
+ media::AudioPortConfigMixExtUseCase aidl;
+
+ switch (role) {
+ case AUDIO_PORT_ROLE_NONE:
+ UNION_SET(aidl, unspecified, false);
+ return aidl;
+ case AUDIO_PORT_ROLE_SOURCE:
+ // This is not a bug. A SOURCE role corresponds to the stream field.
+ UNION_SET(aidl, stream, VALUE_OR_RETURN(
+ legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream)));
+ return aidl;
+ case AUDIO_PORT_ROLE_SINK:
+ // This is not a bug. A SINK role corresponds to the source field.
+ UNION_SET(aidl, source,
+ VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source)));
+ return aidl;
+ }
+ LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortConfigMixExt(
+ const media::AudioPortConfigMixExt& aidl, media::AudioPortRole role) {
+ audio_port_config_mix_ext legacy;
+ legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+ legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
+ legacy.usecase = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigMixExtUseCase(aidl.usecase, role));
+ return legacy;
+}
+
+ConversionResult<media::AudioPortConfigMixExt> legacy2aidl_AudioPortConfigMixExt(
+ const audio_port_config_mix_ext& legacy, audio_port_role_t role) {
+ media::AudioPortConfigMixExt aidl;
+ aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+ aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+ aidl.usecase = VALUE_OR_RETURN(legacy2aidl_AudioPortConfigMixExtUseCase(legacy.usecase, role));
+ return aidl;
+}
+
+ConversionResult<audio_port_config_session_ext>
+aidl2legacy_AudioPortConfigSessionExt_audio_port_config_session_ext(
+ const media::AudioPortConfigSessionExt& aidl) {
+ audio_port_config_session_ext legacy;
+ legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.session));
+ return legacy;
+}
+
+ConversionResult<media::AudioPortConfigSessionExt>
+legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
+ const audio_port_config_session_ext& legacy) {
+ media::AudioPortConfigSessionExt aidl;
+ aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.session));
+ return aidl;
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_ext = decltype(audio_port_config::ext);
+
+ConversionResult<audio_port_config_ext> aidl2legacy_AudioPortConfigExt(
+ const media::AudioPortConfigExt& aidl, media::AudioPortType type,
+ media::AudioPortRole role) {
+ audio_port_config_ext legacy;
+ switch (type) {
+ case media::AudioPortType::NONE:
+ // Just verify that the union is empty.
+ VALUE_OR_RETURN(UNION_GET(aidl, unspecified));
+ return legacy;
+ case media::AudioPortType::DEVICE:
+ legacy.device = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortConfigDeviceExt_audio_port_config_device_ext(
+ VALUE_OR_RETURN(UNION_GET(aidl, device))));
+ return legacy;
+ case media::AudioPortType::MIX:
+ legacy.mix = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortConfigMixExt(VALUE_OR_RETURN(UNION_GET(aidl, mix)), role));
+ return legacy;
+ case media::AudioPortType::SESSION:
+ legacy.session = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortConfigSessionExt_audio_port_config_session_ext(
+ VALUE_OR_RETURN(UNION_GET(aidl, session))));
+ return legacy;
+
+ }
+ LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<media::AudioPortConfigExt> legacy2aidl_AudioPortConfigExt(
+ const audio_port_config_ext& legacy, audio_port_type_t type, audio_port_role_t role) {
+ media::AudioPortConfigExt aidl;
+
+ switch (type) {
+ case AUDIO_PORT_TYPE_NONE:
+ UNION_SET(aidl, unspecified, false);
+ return aidl;
+ case AUDIO_PORT_TYPE_DEVICE:
+ UNION_SET(aidl, device,
+ VALUE_OR_RETURN(
+ legacy2aidl_audio_port_config_device_ext_AudioPortConfigDeviceExt(
+ legacy.device)));
+ return aidl;
+ case AUDIO_PORT_TYPE_MIX:
+ UNION_SET(aidl, mix,
+ VALUE_OR_RETURN(legacy2aidl_AudioPortConfigMixExt(legacy.mix, role)));
+ return aidl;
+ case AUDIO_PORT_TYPE_SESSION:
+ UNION_SET(aidl, session,
+ VALUE_OR_RETURN(
+ legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
+ legacy.session)));
+ return aidl;
+ }
+ LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
+ const media::AudioPortConfig& aidl) {
+ audio_port_config legacy;
+ legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
+ legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.role));
+ legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.type));
+ legacy.config_mask = VALUE_OR_RETURN(aidl2legacy_int32_t_config_mask(aidl.configMask));
+ if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::SAMPLE_RATE)) {
+ legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sampleRate));
+ }
+ if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::CHANNEL_MASK)) {
+ legacy.channel_mask =
+ VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+ }
+ if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::FORMAT)) {
+ legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+ }
+ if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::GAIN)) {
+ legacy.gain = VALUE_OR_RETURN(
+ aidl2legacy_AudioGainConfig_audio_gain_config(aidl.gain, aidl.role, aidl.type));
+ }
+ if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::FLAGS)) {
+ legacy.flags = VALUE_OR_RETURN(
+ aidl2legacy_AudioIoFlags_audio_io_flags(aidl.flags, aidl.role, aidl.type));
+ }
+ legacy.ext = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigExt(aidl.ext, aidl.type, aidl.role));
+ return legacy;
+}
+
+ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
+ const audio_port_config& legacy) {
+ media::AudioPortConfig aidl;
+ aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+ aidl.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
+ aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
+ aidl.configMask = VALUE_OR_RETURN(legacy2aidl_config_mask_int32_t(legacy.config_mask));
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
+ aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) {
+ aidl.channelMask =
+ VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_FORMAT) {
+ aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_GAIN) {
+ aidl.gain = VALUE_OR_RETURN(legacy2aidl_audio_gain_config_AudioGainConfig(
+ legacy.gain, legacy.role, legacy.type));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_FLAGS) {
+ aidl.flags = VALUE_OR_RETURN(
+ legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, legacy.role, legacy.type));
+ }
+ aidl.ext =
+ VALUE_OR_RETURN(legacy2aidl_AudioPortConfigExt(legacy.ext, legacy.type, legacy.role));
+ return aidl;
+}
+
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
+ const media::AudioPatch& aidl) {
+ struct audio_patch legacy;
+ legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_patch_handle_t(aidl.id));
+ legacy.num_sinks = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sinks.size()));
+ if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
+ return unexpected(BAD_VALUE);
+ }
+ for (size_t i = 0; i < legacy.num_sinks; ++i) {
+ legacy.sinks[i] =
+ VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sinks[i]));
+ }
+ legacy.num_sources = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sources.size()));
+ if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
+ return unexpected(BAD_VALUE);
+ }
+ for (size_t i = 0; i < legacy.num_sources; ++i) {
+ legacy.sources[i] =
+ VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sources[i]));
+ }
+ return legacy;
+}
+
+ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+ const struct audio_patch& legacy) {
+ media::AudioPatch aidl;
+ aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_patch_handle_t_int32_t(legacy.id));
+
+ if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
+ return unexpected(BAD_VALUE);
+ }
+ for (unsigned int i = 0; i < legacy.num_sinks; ++i) {
+ aidl.sinks.push_back(
+ VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sinks[i])));
+ }
+ if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
+ return unexpected(BAD_VALUE);
+ }
+ for (unsigned int i = 0; i < legacy.num_sources; ++i) {
+ aidl.sources.push_back(
+ VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sources[i])));
+ }
+ return aidl;
+}
+
+ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
+ const media::AudioIoDescriptor& aidl) {
+ sp<AudioIoDescriptor> legacy(new AudioIoDescriptor());
+ legacy->mIoHandle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.ioHandle));
+ legacy->mPatch = VALUE_OR_RETURN(aidl2legacy_AudioPatch_audio_patch(aidl.patch));
+ legacy->mSamplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.samplingRate));
+ legacy->mFormat = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+ legacy->mChannelMask =
+ VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+ legacy->mFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+ legacy->mFrameCountHAL = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCountHAL));
+ legacy->mLatency = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.latency));
+ legacy->mPortId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+ return legacy;
+}
+
+ConversionResult<media::AudioIoDescriptor> legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(
+ const sp<AudioIoDescriptor>& legacy) {
+ media::AudioIoDescriptor aidl;
+ aidl.ioHandle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy->mIoHandle));
+ aidl.patch = VALUE_OR_RETURN(legacy2aidl_audio_patch_AudioPatch(legacy->mPatch));
+ aidl.samplingRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->mSamplingRate));
+ aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy->mFormat));
+ aidl.channelMask = VALUE_OR_RETURN(
+ legacy2aidl_audio_channel_mask_t_int32_t(legacy->mChannelMask));
+ aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->mFrameCount));
+ aidl.frameCountHAL = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->mFrameCountHAL));
+ aidl.latency = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->mLatency));
+ aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy->mPortId));
+ return aidl;
+}
+
+ConversionResult<AudioClient> aidl2legacy_AudioClient_AudioClient(
+ const media::AudioClient& aidl) {
+ AudioClient legacy;
+ legacy.clientUid = VALUE_OR_RETURN(aidl2legacy_int32_t_uid_t(aidl.clientUid));
+ legacy.clientPid = VALUE_OR_RETURN(aidl2legacy_int32_t_pid_t(aidl.clientPid));
+ legacy.clientTid = VALUE_OR_RETURN(aidl2legacy_int32_t_pid_t(aidl.clientTid));
+ legacy.packageName = VALUE_OR_RETURN(aidl2legacy_string_view_String16(aidl.packageName));
+ return legacy;
+}
+
+ConversionResult<media::AudioClient> legacy2aidl_AudioClient_AudioClient(
+ const AudioClient& legacy) {
+ media::AudioClient aidl;
+ aidl.clientUid = VALUE_OR_RETURN(legacy2aidl_uid_t_int32_t(legacy.clientUid));
+ aidl.clientPid = VALUE_OR_RETURN(legacy2aidl_pid_t_int32_t(legacy.clientPid));
+ aidl.clientTid = VALUE_OR_RETURN(legacy2aidl_pid_t_int32_t(legacy.clientTid));
+ aidl.packageName = VALUE_OR_RETURN(legacy2aidl_String16_string(legacy.packageName));
+ return aidl;
+}
+
+ConversionResult<audio_content_type_t>
+aidl2legacy_AudioContentType_audio_content_type_t(media::AudioContentType aidl) {
+ switch (aidl) {
+ case media::AudioContentType::UNKNOWN:
+ return AUDIO_CONTENT_TYPE_UNKNOWN;
+ case media::AudioContentType::SPEECH:
+ return AUDIO_CONTENT_TYPE_SPEECH;
+ case media::AudioContentType::MUSIC:
+ return AUDIO_CONTENT_TYPE_MUSIC;
+ case media::AudioContentType::MOVIE:
+ return AUDIO_CONTENT_TYPE_MOVIE;
+ case media::AudioContentType::SONIFICATION:
+ return AUDIO_CONTENT_TYPE_SONIFICATION;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioContentType>
+legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy) {
+ switch (legacy) {
+ case AUDIO_CONTENT_TYPE_UNKNOWN:
+ return media::AudioContentType::UNKNOWN;
+ case AUDIO_CONTENT_TYPE_SPEECH:
+ return media::AudioContentType::SPEECH;
+ case AUDIO_CONTENT_TYPE_MUSIC:
+ return media::AudioContentType::MUSIC;
+ case AUDIO_CONTENT_TYPE_MOVIE:
+ return media::AudioContentType::MOVIE;
+ case AUDIO_CONTENT_TYPE_SONIFICATION:
+ return media::AudioContentType::SONIFICATION;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_usage_t>
+aidl2legacy_AudioUsage_audio_usage_t(media::AudioUsage aidl) {
+ switch (aidl) {
+ case media::AudioUsage::UNKNOWN:
+ return AUDIO_USAGE_UNKNOWN;
+ case media::AudioUsage::MEDIA:
+ return AUDIO_USAGE_MEDIA;
+ case media::AudioUsage::VOICE_COMMUNICATION:
+ return AUDIO_USAGE_VOICE_COMMUNICATION;
+ case media::AudioUsage::VOICE_COMMUNICATION_SIGNALLING:
+ return AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+ case media::AudioUsage::ALARM:
+ return AUDIO_USAGE_ALARM;
+ case media::AudioUsage::NOTIFICATION:
+ return AUDIO_USAGE_NOTIFICATION;
+ case media::AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE:
+ return AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+ case media::AudioUsage::NOTIFICATION_COMMUNICATION_REQUEST:
+ return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST;
+ case media::AudioUsage::NOTIFICATION_COMMUNICATION_INSTANT:
+ return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT;
+ case media::AudioUsage::NOTIFICATION_COMMUNICATION_DELAYED:
+ return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED;
+ case media::AudioUsage::NOTIFICATION_EVENT:
+ return AUDIO_USAGE_NOTIFICATION_EVENT;
+ case media::AudioUsage::ASSISTANCE_ACCESSIBILITY:
+ return AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+ case media::AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE:
+ return AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE;
+ case media::AudioUsage::ASSISTANCE_SONIFICATION:
+ return AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+ case media::AudioUsage::GAME:
+ return AUDIO_USAGE_GAME;
+ case media::AudioUsage::VIRTUAL_SOURCE:
+ return AUDIO_USAGE_VIRTUAL_SOURCE;
+ case media::AudioUsage::ASSISTANT:
+ return AUDIO_USAGE_ASSISTANT;
+ case media::AudioUsage::CALL_ASSISTANT:
+ return AUDIO_USAGE_CALL_ASSISTANT;
+ case media::AudioUsage::EMERGENCY:
+ return AUDIO_USAGE_EMERGENCY;
+ case media::AudioUsage::SAFETY:
+ return AUDIO_USAGE_SAFETY;
+ case media::AudioUsage::VEHICLE_STATUS:
+ return AUDIO_USAGE_VEHICLE_STATUS;
+ case media::AudioUsage::ANNOUNCEMENT:
+ return AUDIO_USAGE_ANNOUNCEMENT;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioUsage>
+legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy) {
+ switch (legacy) {
+ case AUDIO_USAGE_UNKNOWN:
+ return media::AudioUsage::UNKNOWN;
+ case AUDIO_USAGE_MEDIA:
+ return media::AudioUsage::MEDIA;
+ case AUDIO_USAGE_VOICE_COMMUNICATION:
+ return media::AudioUsage::VOICE_COMMUNICATION;
+ case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
+ return media::AudioUsage::VOICE_COMMUNICATION_SIGNALLING;
+ case AUDIO_USAGE_ALARM:
+ return media::AudioUsage::ALARM;
+ case AUDIO_USAGE_NOTIFICATION:
+ return media::AudioUsage::NOTIFICATION;
+ case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE:
+ return media::AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE;
+ case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
+ return media::AudioUsage::NOTIFICATION_COMMUNICATION_REQUEST;
+ case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
+ return media::AudioUsage::NOTIFICATION_COMMUNICATION_INSTANT;
+ case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
+ return media::AudioUsage::NOTIFICATION_COMMUNICATION_DELAYED;
+ case AUDIO_USAGE_NOTIFICATION_EVENT:
+ return media::AudioUsage::NOTIFICATION_EVENT;
+ case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
+ return media::AudioUsage::ASSISTANCE_ACCESSIBILITY;
+ case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+ return media::AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE;
+ case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
+ return media::AudioUsage::ASSISTANCE_SONIFICATION;
+ case AUDIO_USAGE_GAME:
+ return media::AudioUsage::GAME;
+ case AUDIO_USAGE_VIRTUAL_SOURCE:
+ return media::AudioUsage::VIRTUAL_SOURCE;
+ case AUDIO_USAGE_ASSISTANT:
+ return media::AudioUsage::ASSISTANT;
+ case AUDIO_USAGE_CALL_ASSISTANT:
+ return media::AudioUsage::CALL_ASSISTANT;
+ case AUDIO_USAGE_EMERGENCY:
+ return media::AudioUsage::EMERGENCY;
+ case AUDIO_USAGE_SAFETY:
+ return media::AudioUsage::SAFETY;
+ case AUDIO_USAGE_VEHICLE_STATUS:
+ return media::AudioUsage::VEHICLE_STATUS;
+ case AUDIO_USAGE_ANNOUNCEMENT:
+ return media::AudioUsage::ANNOUNCEMENT;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_AudioFlag_audio_flags_mask_t(media::AudioFlag aidl) {
+ switch (aidl) {
+ case media::AudioFlag::AUDIBILITY_ENFORCED:
+ return AUDIO_FLAG_AUDIBILITY_ENFORCED;
+ case media::AudioFlag::SECURE:
+ return AUDIO_FLAG_SECURE;
+ case media::AudioFlag::SCO:
+ return AUDIO_FLAG_SCO;
+ case media::AudioFlag::BEACON:
+ return AUDIO_FLAG_BEACON;
+ case media::AudioFlag::HW_AV_SYNC:
+ return AUDIO_FLAG_HW_AV_SYNC;
+ case media::AudioFlag::HW_HOTWORD:
+ return AUDIO_FLAG_HW_HOTWORD;
+ case media::AudioFlag::BYPASS_INTERRUPTION_POLICY:
+ return AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY;
+ case media::AudioFlag::BYPASS_MUTE:
+ return AUDIO_FLAG_BYPASS_MUTE;
+ case media::AudioFlag::LOW_LATENCY:
+ return AUDIO_FLAG_LOW_LATENCY;
+ case media::AudioFlag::DEEP_BUFFER:
+ return AUDIO_FLAG_DEEP_BUFFER;
+ case media::AudioFlag::NO_MEDIA_PROJECTION:
+ return AUDIO_FLAG_NO_MEDIA_PROJECTION;
+ case media::AudioFlag::MUTE_HAPTIC:
+ return AUDIO_FLAG_MUTE_HAPTIC;
+ case media::AudioFlag::NO_SYSTEM_CAPTURE:
+ return AUDIO_FLAG_NO_SYSTEM_CAPTURE;
+ case media::AudioFlag::CAPTURE_PRIVATE:
+ return AUDIO_FLAG_CAPTURE_PRIVATE;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioFlag>
+legacy2aidl_audio_flags_mask_t_AudioFlag(audio_flags_mask_t legacy) {
+ switch (legacy) {
+ case AUDIO_FLAG_NONE:
+ return unexpected(BAD_VALUE);
+ case AUDIO_FLAG_AUDIBILITY_ENFORCED:
+ return media::AudioFlag::AUDIBILITY_ENFORCED;
+ case AUDIO_FLAG_SECURE:
+ return media::AudioFlag::SECURE;
+ case AUDIO_FLAG_SCO:
+ return media::AudioFlag::SCO;
+ case AUDIO_FLAG_BEACON:
+ return media::AudioFlag::BEACON;
+ case AUDIO_FLAG_HW_AV_SYNC:
+ return media::AudioFlag::HW_AV_SYNC;
+ case AUDIO_FLAG_HW_HOTWORD:
+ return media::AudioFlag::HW_HOTWORD;
+ case AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY:
+ return media::AudioFlag::BYPASS_INTERRUPTION_POLICY;
+ case AUDIO_FLAG_BYPASS_MUTE:
+ return media::AudioFlag::BYPASS_MUTE;
+ case AUDIO_FLAG_LOW_LATENCY:
+ return media::AudioFlag::LOW_LATENCY;
+ case AUDIO_FLAG_DEEP_BUFFER:
+ return media::AudioFlag::DEEP_BUFFER;
+ case AUDIO_FLAG_NO_MEDIA_PROJECTION:
+ return media::AudioFlag::NO_MEDIA_PROJECTION;
+ case AUDIO_FLAG_MUTE_HAPTIC:
+ return media::AudioFlag::MUTE_HAPTIC;
+ case AUDIO_FLAG_NO_SYSTEM_CAPTURE:
+ return media::AudioFlag::NO_SYSTEM_CAPTURE;
+ case AUDIO_FLAG_CAPTURE_PRIVATE:
+ return media::AudioFlag::CAPTURE_PRIVATE;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_int32_t_audio_flags_mask_t_mask(int32_t aidl) {
+ return convertBitmask<audio_flags_mask_t, int32_t, audio_flags_mask_t, media::AudioFlag>(
+ aidl, aidl2legacy_AudioFlag_audio_flags_mask_t, index2enum_index<media::AudioFlag>,
+ enumToMask_bitmask<audio_flags_mask_t, audio_flags_mask_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_audio_flags_mask_t_int32_t_mask(audio_flags_mask_t legacy) {
+ return convertBitmask<int32_t, audio_flags_mask_t, media::AudioFlag, audio_flags_mask_t>(
+ legacy, legacy2aidl_audio_flags_mask_t_AudioFlag,
+ index2enum_bitmask<audio_flags_mask_t>,
+ enumToMask_index<int32_t, media::AudioFlag>);
+}
+
+ConversionResult<audio_attributes_t>
+aidl2legacy_AudioAttributesInternal_audio_attributes_t(const media::AudioAttributesInternal& aidl) {
+ audio_attributes_t legacy;
+ legacy.content_type = VALUE_OR_RETURN(
+ aidl2legacy_AudioContentType_audio_content_type_t(aidl.contentType));
+ legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+ legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(aidl.source));
+ legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_flags_mask_t_mask(aidl.flags));
+ RETURN_IF_ERROR(aidl2legacy_string(aidl.tags, legacy.tags, sizeof(legacy.tags)));
+ return legacy;
+}
+
+ConversionResult<media::AudioAttributesInternal>
+legacy2aidl_audio_attributes_t_AudioAttributesInternal(const audio_attributes_t& legacy) {
+ media::AudioAttributesInternal aidl;
+ aidl.contentType = VALUE_OR_RETURN(
+ legacy2aidl_audio_content_type_t_AudioContentType(legacy.content_type));
+ aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
+ aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source));
+ aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_flags_mask_t_int32_t_mask(legacy.flags));
+ aidl.tags = VALUE_OR_RETURN(legacy2aidl_string(legacy.tags, sizeof(legacy.tags)));
+ return aidl;
+}
+
+ConversionResult<audio_encapsulation_mode_t>
+aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(media::AudioEncapsulationMode aidl) {
+ switch (aidl) {
+ case media::AudioEncapsulationMode::NONE:
+ return AUDIO_ENCAPSULATION_MODE_NONE;
+ case media::AudioEncapsulationMode::ELEMENTARY_STREAM:
+ return AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM;
+ case media::AudioEncapsulationMode::HANDLE:
+ return AUDIO_ENCAPSULATION_MODE_HANDLE;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioEncapsulationMode>
+legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy) {
+ switch (legacy) {
+ case AUDIO_ENCAPSULATION_MODE_NONE:
+ return media::AudioEncapsulationMode::NONE;
+ case AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM:
+ return media::AudioEncapsulationMode::ELEMENTARY_STREAM;
+ case AUDIO_ENCAPSULATION_MODE_HANDLE:
+ return media::AudioEncapsulationMode::HANDLE;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_offload_info_t>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const media::AudioOffloadInfo& aidl) {
+ audio_offload_info_t legacy;
+ legacy.version = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.version));
+ legacy.size = sizeof(audio_offload_info_t);
+ audio_config_base_t config = VALUE_OR_RETURN(
+ aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config));
+ legacy.sample_rate = config.sample_rate;
+ legacy.channel_mask = config.channel_mask;
+ legacy.format = config.format;
+ legacy.stream_type = VALUE_OR_RETURN(
+ aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
+ legacy.bit_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.bitRate));
+ legacy.duration_us = VALUE_OR_RETURN(convertIntegral<int64_t>(aidl.durationUs));
+ legacy.has_video = aidl.hasVideo;
+ legacy.is_streaming = aidl.isStreaming;
+ legacy.bit_width = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.bitWidth));
+ legacy.offload_buffer_size = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.offloadBufferSize));
+ legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+ legacy.encapsulation_mode = VALUE_OR_RETURN(
+ aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(aidl.encapsulationMode));
+ legacy.content_id = VALUE_OR_RETURN(convertReinterpret<int32_t>(aidl.contentId));
+ legacy.sync_id = VALUE_OR_RETURN(convertReinterpret<int32_t>(aidl.syncId));
+ return legacy;
+}
+
+ConversionResult<media::AudioOffloadInfo>
+legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy) {
+ media::AudioOffloadInfo aidl;
+ // Version 0.1 fields.
+ if (legacy.size < offsetof(audio_offload_info_t, usage) + sizeof(audio_offload_info_t::usage)) {
+ return unexpected(BAD_VALUE);
+ }
+ aidl.version = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.version));
+ aidl.config.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+ aidl.config.channelMask = VALUE_OR_RETURN(
+ legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+ aidl.config.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+ aidl.streamType = VALUE_OR_RETURN(
+ legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream_type));
+ aidl.bitRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_rate));
+ aidl.durationUs = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.duration_us));
+ aidl.hasVideo = legacy.has_video;
+ aidl.isStreaming = legacy.is_streaming;
+ aidl.bitWidth = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_width));
+ aidl.offloadBufferSize = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.offload_buffer_size));
+ aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
+
+ // Version 0.2 fields.
+ if (legacy.version >= AUDIO_OFFLOAD_INFO_VERSION_0_2) {
+ if (legacy.size <
+ offsetof(audio_offload_info_t, sync_id) + sizeof(audio_offload_info_t::sync_id)) {
+ return unexpected(BAD_VALUE);
+ }
+ aidl.encapsulationMode = VALUE_OR_RETURN(
+ legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(
+ legacy.encapsulation_mode));
+ aidl.contentId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.content_id));
+ aidl.syncId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.sync_id));
+ }
+ return aidl;
+}
+
+ConversionResult<audio_config_t>
+aidl2legacy_AudioConfig_audio_config_t(const media::AudioConfig& aidl) {
+ audio_config_t legacy;
+ legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+ legacy.channel_mask = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+ legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+ legacy.offload_info = VALUE_OR_RETURN(
+ aidl2legacy_AudioOffloadInfo_audio_offload_info_t(aidl.offloadInfo));
+ legacy.frame_count = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.frameCount));
+ return legacy;
+}
+
+ConversionResult<media::AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy) {
+ media::AudioConfig aidl;
+ aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+ aidl.channelMask = VALUE_OR_RETURN(
+ legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+ aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+ aidl.offloadInfo = VALUE_OR_RETURN(
+ legacy2aidl_audio_offload_info_t_AudioOffloadInfo(legacy.offload_info));
+ aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.frame_count));
+ return aidl;
+}
+
+ConversionResult<audio_config_base_t>
+aidl2legacy_AudioConfigBase_audio_config_base_t(const media::AudioConfigBase& aidl) {
+ audio_config_base_t legacy;
+ legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+ legacy.channel_mask = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+ legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+ return legacy;
+}
+
+ConversionResult<media::AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy) {
+ media::AudioConfigBase aidl;
+ aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+ aidl.channelMask = VALUE_OR_RETURN(
+ legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+ aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+ return aidl;
+}
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl) {
+ sp<IMemory> legacy;
+ if (!convertSharedFileRegionToIMemory(aidl, &legacy)) {
+ return unexpected(BAD_VALUE);
+ }
+ return legacy;
+}
+
+ConversionResult<media::SharedFileRegion>
+legacy2aidl_IMemory_SharedFileRegion(const sp<IMemory>& legacy) {
+ media::SharedFileRegion aidl;
+ if (!convertIMemoryToSharedFileRegion(legacy, &aidl)) {
+ return unexpected(BAD_VALUE);
+ }
+ return aidl;
+}
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_NullableSharedFileRegion_IMemory(const std::optional<media::SharedFileRegion>& aidl) {
+ sp<IMemory> legacy;
+ if (!convertNullableSharedFileRegionToIMemory(aidl, &legacy)) {
+ return unexpected(BAD_VALUE);
+ }
+ return legacy;
+}
+
+ConversionResult<std::optional<media::SharedFileRegion>>
+legacy2aidl_NullableIMemory_SharedFileRegion(const sp<IMemory>& legacy) {
+ std::optional<media::SharedFileRegion> aidl;
+ if (!convertNullableIMemoryToSharedFileRegion(legacy, &aidl)) {
+ return unexpected(BAD_VALUE);
+ }
+ return aidl;
+}
+
+ConversionResult<AudioTimestamp>
+aidl2legacy_AudioTimestampInternal_AudioTimestamp(const media::AudioTimestampInternal& aidl) {
+ AudioTimestamp legacy;
+ legacy.mPosition = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.position));
+ legacy.mTime.tv_sec = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sec));
+ legacy.mTime.tv_nsec = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.nsec));
+ return legacy;
+}
+
+ConversionResult<media::AudioTimestampInternal>
+legacy2aidl_AudioTimestamp_AudioTimestampInternal(const AudioTimestamp& legacy) {
+ media::AudioTimestampInternal aidl;
+ aidl.position = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.mPosition));
+ aidl.sec = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.mTime.tv_sec));
+ aidl.nsec = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.mTime.tv_nsec));
+ return aidl;
+}
+
+ConversionResult<audio_uuid_t>
+aidl2legacy_AudioUuid_audio_uuid_t(const media::AudioUuid& aidl) {
+ audio_uuid_t legacy;
+ legacy.timeLow = VALUE_OR_RETURN(convertReinterpret<uint32_t>(aidl.timeLow));
+ legacy.timeMid = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.timeMid));
+ legacy.timeHiAndVersion = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.timeHiAndVersion));
+ legacy.clockSeq = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.clockSeq));
+ if (aidl.node.size() != std::size(legacy.node)) {
+ return unexpected(BAD_VALUE);
+ }
+ std::copy(aidl.node.begin(), aidl.node.end(), legacy.node);
+ return legacy;
+}
+
+ConversionResult<media::AudioUuid>
+legacy2aidl_audio_uuid_t_AudioUuid(const audio_uuid_t& legacy) {
+ media::AudioUuid aidl;
+ aidl.timeLow = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.timeLow));
+ aidl.timeMid = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.timeMid));
+ aidl.timeHiAndVersion = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.timeHiAndVersion));
+ aidl.clockSeq = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.clockSeq));
+ std::copy(legacy.node, legacy.node + std::size(legacy.node), std::back_inserter(aidl.node));
+ return aidl;
+}
+
+ConversionResult<effect_descriptor_t>
+aidl2legacy_EffectDescriptor_effect_descriptor_t(const media::EffectDescriptor& aidl) {
+ effect_descriptor_t legacy;
+ legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.type));
+ legacy.uuid = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.uuid));
+ legacy.apiVersion = VALUE_OR_RETURN(convertReinterpret<uint32_t>(aidl.apiVersion));
+ legacy.flags = VALUE_OR_RETURN(convertReinterpret<uint32_t>(aidl.flags));
+ legacy.cpuLoad = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.cpuLoad));
+ legacy.memoryUsage = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.memoryUsage));
+ RETURN_IF_ERROR(aidl2legacy_string(aidl.name, legacy.name, sizeof(legacy.name)));
+ RETURN_IF_ERROR(
+ aidl2legacy_string(aidl.implementor, legacy.implementor, sizeof(legacy.implementor)));
+ return legacy;
+}
+
+ConversionResult<media::EffectDescriptor>
+legacy2aidl_effect_descriptor_t_EffectDescriptor(const effect_descriptor_t& legacy) {
+ media::EffectDescriptor aidl;
+ aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_uuid_t_AudioUuid(legacy.type));
+ aidl.uuid = VALUE_OR_RETURN(legacy2aidl_audio_uuid_t_AudioUuid(legacy.uuid));
+ aidl.apiVersion = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.apiVersion));
+ aidl.flags = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.flags));
+ aidl.cpuLoad = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.cpuLoad));
+ aidl.memoryUsage = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.memoryUsage));
+ aidl.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
+ aidl.implementor = VALUE_OR_RETURN(
+ legacy2aidl_string(legacy.implementor, sizeof(legacy.implementor)));
+ return aidl;
+}
+
+ConversionResult<audio_encapsulation_metadata_type_t>
+aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
+ media::AudioEncapsulationMetadataType aidl) {
+ switch (aidl) {
+ case media::AudioEncapsulationMetadataType::NONE:
+ return AUDIO_ENCAPSULATION_METADATA_TYPE_NONE;
+ case media::AudioEncapsulationMetadataType::FRAMEWORK_TUNER:
+ return AUDIO_ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER;
+ case media::AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR:
+ return AUDIO_ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioEncapsulationMetadataType>
+legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
+ audio_encapsulation_metadata_type_t legacy) {
+ switch (legacy) {
+ case AUDIO_ENCAPSULATION_METADATA_TYPE_NONE:
+ return media::AudioEncapsulationMetadataType::NONE;
+ case AUDIO_ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER:
+ return media::AudioEncapsulationMetadataType::FRAMEWORK_TUNER;
+ case AUDIO_ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR:
+ return media::AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl) {
+ return convertBitmask<uint32_t,
+ int32_t,
+ audio_encapsulation_mode_t,
+ media::AudioEncapsulationMode>(
+ aidl, aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t,
+ index2enum_index<media::AudioEncapsulationMode>,
+ enumToMask_index<uint32_t, audio_encapsulation_mode_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy) {
+ return convertBitmask<int32_t,
+ uint32_t,
+ media::AudioEncapsulationMode,
+ audio_encapsulation_mode_t>(
+ legacy, legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode,
+ index2enum_index<audio_encapsulation_mode_t>,
+ enumToMask_index<int32_t, media::AudioEncapsulationMode>);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl) {
+ return convertBitmask<uint32_t,
+ int32_t,
+ audio_encapsulation_metadata_type_t,
+ media::AudioEncapsulationMetadataType>(
+ aidl, aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t,
+ index2enum_index<media::AudioEncapsulationMetadataType>,
+ enumToMask_index<uint32_t, audio_encapsulation_metadata_type_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy) {
+ return convertBitmask<int32_t,
+ uint32_t,
+ media::AudioEncapsulationMetadataType,
+ audio_encapsulation_metadata_type_t>(
+ legacy, legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType,
+ index2enum_index<audio_encapsulation_metadata_type_t>,
+ enumToMask_index<int32_t, media::AudioEncapsulationMetadataType>);
+}
+
+ConversionResult<audio_mix_latency_class_t>
+aidl2legacy_AudioMixLatencyClass_audio_mix_latency_class_t(
+ media::AudioMixLatencyClass aidl) {
+ switch (aidl) {
+ case media::AudioMixLatencyClass::LOW:
+ return AUDIO_LATENCY_LOW;
+ case media::AudioMixLatencyClass::NORMAL:
+ return AUDIO_LATENCY_NORMAL;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioMixLatencyClass>
+legacy2aidl_audio_mix_latency_class_t_AudioMixLatencyClass(
+ audio_mix_latency_class_t legacy) {
+ switch (legacy) {
+ case AUDIO_LATENCY_LOW:
+ return media::AudioMixLatencyClass::LOW;
+ case AUDIO_LATENCY_NORMAL:
+ return media::AudioMixLatencyClass::NORMAL;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(const media::AudioPortDeviceExt& aidl) {
+ audio_port_device_ext legacy;
+ legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+ legacy.type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.device.type));
+ RETURN_IF_ERROR(
+ aidl2legacy_string(aidl.device.address, legacy.address, sizeof(legacy.address)));
+ legacy.encapsulation_modes = VALUE_OR_RETURN(
+ aidl2legacy_AudioEncapsulationMode_mask(aidl.encapsulationModes));
+ legacy.encapsulation_metadata_types = VALUE_OR_RETURN(
+ aidl2legacy_AudioEncapsulationMetadataType_mask(aidl.encapsulationMetadataTypes));
+ return legacy;
+}
+
+ConversionResult<media::AudioPortDeviceExt>
+legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(const audio_port_device_ext& legacy) {
+ media::AudioPortDeviceExt aidl;
+ aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+ aidl.device.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.type));
+ aidl.device.address = VALUE_OR_RETURN(
+ legacy2aidl_string(legacy.address, sizeof(legacy.address)));
+ aidl.encapsulationModes = VALUE_OR_RETURN(
+ legacy2aidl_AudioEncapsulationMode_mask(legacy.encapsulation_modes));
+ aidl.encapsulationMetadataTypes = VALUE_OR_RETURN(
+ legacy2aidl_AudioEncapsulationMetadataType_mask(legacy.encapsulation_metadata_types));
+ return aidl;
+}
+
+ConversionResult<audio_port_mix_ext>
+aidl2legacy_AudioPortMixExt_audio_port_mix_ext(const media::AudioPortMixExt& aidl) {
+ audio_port_mix_ext legacy;
+ legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+ legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
+ legacy.latency_class = VALUE_OR_RETURN(
+ aidl2legacy_AudioMixLatencyClass_audio_mix_latency_class_t(aidl.latencyClass));
+ return legacy;
+}
+
+ConversionResult<media::AudioPortMixExt>
+legacy2aidl_audio_port_mix_ext_AudioPortMixExt(const audio_port_mix_ext& legacy) {
+ media::AudioPortMixExt aidl;
+ aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+ aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+ aidl.latencyClass = VALUE_OR_RETURN(
+ legacy2aidl_audio_mix_latency_class_t_AudioMixLatencyClass(legacy.latency_class));
+ return aidl;
+}
+
+ConversionResult<audio_port_session_ext>
+aidl2legacy_AudioPortSessionExt_audio_port_session_ext(const media::AudioPortSessionExt& aidl) {
+ audio_port_session_ext legacy;
+ legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.session));
+ return legacy;
+}
+
+ConversionResult<media::AudioPortSessionExt>
+legacy2aidl_audio_port_session_ext_AudioPortSessionExt(const audio_port_session_ext& legacy) {
+ media::AudioPortSessionExt aidl;
+ aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.session));
+ return aidl;
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_v7_ext = decltype(audio_port_v7::ext);
+
+ConversionResult<audio_port_v7_ext> aidl2legacy_AudioPortExt(
+ const media::AudioPortExt& aidl, media::AudioPortType type) {
+ audio_port_v7_ext legacy;
+ switch (type) {
+ case media::AudioPortType::NONE:
+ // Just verify that the union is empty.
+ VALUE_OR_RETURN(UNION_GET(aidl, unspecified));
+ return legacy;
+ case media::AudioPortType::DEVICE:
+ legacy.device = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+ VALUE_OR_RETURN(UNION_GET(aidl, device))));
+ return legacy;
+ case media::AudioPortType::MIX:
+ legacy.mix = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+ VALUE_OR_RETURN(UNION_GET(aidl, mix))));
+ return legacy;
+ case media::AudioPortType::SESSION:
+ legacy.session = VALUE_OR_RETURN(aidl2legacy_AudioPortSessionExt_audio_port_session_ext(
+ VALUE_OR_RETURN(UNION_GET(aidl, session))));
+ return legacy;
+
+ }
+ LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<media::AudioPortExt> legacy2aidl_AudioPortExt(
+ const audio_port_v7_ext& legacy, audio_port_type_t type) {
+ media::AudioPortExt aidl;
+ switch (type) {
+ case AUDIO_PORT_TYPE_NONE:
+ UNION_SET(aidl, unspecified, false);
+ return aidl;
+ case AUDIO_PORT_TYPE_DEVICE:
+ UNION_SET(aidl, device,
+ VALUE_OR_RETURN(
+ legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(legacy.device)));
+ return aidl;
+ case AUDIO_PORT_TYPE_MIX:
+ UNION_SET(aidl, mix,
+ VALUE_OR_RETURN(legacy2aidl_audio_port_mix_ext_AudioPortMixExt(legacy.mix)));
+ return aidl;
+ case AUDIO_PORT_TYPE_SESSION:
+ UNION_SET(aidl, session,
+ VALUE_OR_RETURN(legacy2aidl_audio_port_session_ext_AudioPortSessionExt(
+ legacy.session)));
+ return aidl;
+ }
+ LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<audio_profile>
+aidl2legacy_AudioProfile_audio_profile(const media::AudioProfile& aidl) {
+ audio_profile legacy;
+ legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+
+ if (aidl.samplingRates.size() > std::size(legacy.sample_rates)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(
+ convertRange(aidl.samplingRates.begin(), aidl.samplingRates.end(), legacy.sample_rates,
+ convertIntegral<int32_t, unsigned int>));
+ legacy.num_sample_rates = aidl.samplingRates.size();
+
+ if (aidl.channelMasks.size() > std::size(legacy.channel_masks)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(
+ convertRange(aidl.channelMasks.begin(), aidl.channelMasks.end(), legacy.channel_masks,
+ aidl2legacy_int32_t_audio_channel_mask_t));
+ legacy.num_channel_masks = aidl.channelMasks.size();
+ return legacy;
+}
+
+ConversionResult<media::AudioProfile>
+legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy) {
+ media::AudioProfile aidl;
+ aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+
+ if (legacy.num_sample_rates > std::size(legacy.sample_rates)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(
+ convertRange(legacy.sample_rates, legacy.sample_rates + legacy.num_sample_rates,
+ std::back_inserter(aidl.samplingRates),
+ convertIntegral<unsigned int, int32_t>));
+
+ if (legacy.num_channel_masks > std::size(legacy.channel_masks)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(
+ convertRange(legacy.channel_masks, legacy.channel_masks + legacy.num_channel_masks,
+ std::back_inserter(aidl.channelMasks),
+ legacy2aidl_audio_channel_mask_t_int32_t));
+ return aidl;
+}
+
+ConversionResult<audio_gain>
+aidl2legacy_AudioGain_audio_gain(const media::AudioGain& aidl) {
+ audio_gain legacy;
+ legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t_mask(aidl.mode));
+ legacy.channel_mask = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+ legacy.min_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.minValue));
+ legacy.max_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.maxValue));
+ legacy.default_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.defaultValue));
+ legacy.step_value = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.stepValue));
+ legacy.min_ramp_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.minRampMs));
+ legacy.max_ramp_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.maxRampMs));
+ return legacy;
+}
+
+ConversionResult<media::AudioGain>
+legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy) {
+ media::AudioGain aidl;
+ aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t_mask(legacy.mode));
+ aidl.channelMask = VALUE_OR_RETURN(
+ legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+ aidl.minValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.min_value));
+ aidl.maxValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.max_value));
+ aidl.defaultValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.default_value));
+ aidl.stepValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.step_value));
+ aidl.minRampMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.min_ramp_ms));
+ aidl.maxRampMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.max_ramp_ms));
+ return aidl;
+}
+
+ConversionResult<audio_port_v7>
+aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl) {
+ audio_port_v7 legacy;
+ legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
+ legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.role));
+ legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.type));
+ RETURN_IF_ERROR(aidl2legacy_string(aidl.name, legacy.name, sizeof(legacy.name)));
+
+ if (aidl.profiles.size() > std::size(legacy.audio_profiles)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(convertRange(aidl.profiles.begin(), aidl.profiles.end(), legacy.audio_profiles,
+ aidl2legacy_AudioProfile_audio_profile));
+ legacy.num_audio_profiles = aidl.profiles.size();
+
+ if (aidl.gains.size() > std::size(legacy.gains)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(convertRange(aidl.gains.begin(), aidl.gains.end(), legacy.gains,
+ aidl2legacy_AudioGain_audio_gain));
+ legacy.num_gains = aidl.gains.size();
+
+ legacy.active_config = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortConfig_audio_port_config(aidl.activeConfig));
+ legacy.ext = VALUE_OR_RETURN(aidl2legacy_AudioPortExt(aidl.ext, aidl.type));
+ return legacy;
+}
+
+ConversionResult<media::AudioPort>
+legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy) {
+ media::AudioPort aidl;
+ aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+ aidl.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
+ aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
+ aidl.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
+
+ if (legacy.num_audio_profiles > std::size(legacy.audio_profiles)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(
+ convertRange(legacy.audio_profiles, legacy.audio_profiles + legacy.num_audio_profiles,
+ std::back_inserter(aidl.profiles),
+ legacy2aidl_audio_profile_AudioProfile));
+
+ if (legacy.num_gains > std::size(legacy.gains)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(
+ convertRange(legacy.gains, legacy.gains + legacy.num_gains,
+ std::back_inserter(aidl.gains),
+ legacy2aidl_audio_gain_AudioGain));
+
+ aidl.activeConfig = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_config_AudioPortConfig(legacy.active_config));
+ aidl.ext = VALUE_OR_RETURN(legacy2aidl_AudioPortExt(legacy.ext, legacy.type));
+ return aidl;
+}
+
+ConversionResult<audio_mode_t>
+aidl2legacy_AudioMode_audio_mode_t(media::AudioMode aidl) {
+ switch (aidl) {
+ case media::AudioMode::INVALID:
+ return AUDIO_MODE_INVALID;
+ case media::AudioMode::CURRENT:
+ return AUDIO_MODE_CURRENT;
+ case media::AudioMode::NORMAL:
+ return AUDIO_MODE_NORMAL;
+ case media::AudioMode::RINGTONE:
+ return AUDIO_MODE_RINGTONE;
+ case media::AudioMode::IN_CALL:
+ return AUDIO_MODE_IN_CALL;
+ case media::AudioMode::IN_COMMUNICATION:
+ return AUDIO_MODE_IN_COMMUNICATION;
+ case media::AudioMode::CALL_SCREEN:
+ return AUDIO_MODE_CALL_SCREEN;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioMode>
+legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy) {
+ switch (legacy) {
+ case AUDIO_MODE_INVALID:
+ return media::AudioMode::INVALID;
+ case AUDIO_MODE_CURRENT:
+ return media::AudioMode::CURRENT;
+ case AUDIO_MODE_NORMAL:
+ return media::AudioMode::NORMAL;
+ case AUDIO_MODE_RINGTONE:
+ return media::AudioMode::RINGTONE;
+ case AUDIO_MODE_IN_CALL:
+ return media::AudioMode::IN_CALL;
+ case AUDIO_MODE_IN_COMMUNICATION:
+ return media::AudioMode::IN_COMMUNICATION;
+ case AUDIO_MODE_CALL_SCREEN:
+ return media::AudioMode::CALL_SCREEN;
+ case AUDIO_MODE_CNT:
+ break;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_unique_id_use_t>
+aidl2legacy_AudioUniqueIdUse_audio_unique_id_use_t(media::AudioUniqueIdUse aidl) {
+ switch (aidl) {
+ case media::AudioUniqueIdUse::UNSPECIFIED:
+ return AUDIO_UNIQUE_ID_USE_UNSPECIFIED;
+ case media::AudioUniqueIdUse::SESSION:
+ return AUDIO_UNIQUE_ID_USE_SESSION;
+ case media::AudioUniqueIdUse::MODULE:
+ return AUDIO_UNIQUE_ID_USE_MODULE;
+ case media::AudioUniqueIdUse::EFFECT:
+ return AUDIO_UNIQUE_ID_USE_EFFECT;
+ case media::AudioUniqueIdUse::PATCH:
+ return AUDIO_UNIQUE_ID_USE_PATCH;
+ case media::AudioUniqueIdUse::OUTPUT:
+ return AUDIO_UNIQUE_ID_USE_OUTPUT;
+ case media::AudioUniqueIdUse::INPUT:
+ return AUDIO_UNIQUE_ID_USE_INPUT;
+ case media::AudioUniqueIdUse::CLIENT:
+ return AUDIO_UNIQUE_ID_USE_CLIENT;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioUniqueIdUse>
+legacy2aidl_audio_unique_id_use_t_AudioUniqueIdUse(audio_unique_id_use_t legacy) {
+ switch (legacy) {
+ case AUDIO_UNIQUE_ID_USE_UNSPECIFIED:
+ return media::AudioUniqueIdUse::UNSPECIFIED;
+ case AUDIO_UNIQUE_ID_USE_SESSION:
+ return media::AudioUniqueIdUse::SESSION;
+ case AUDIO_UNIQUE_ID_USE_MODULE:
+ return media::AudioUniqueIdUse::MODULE;
+ case AUDIO_UNIQUE_ID_USE_EFFECT:
+ return media::AudioUniqueIdUse::EFFECT;
+ case AUDIO_UNIQUE_ID_USE_PATCH:
+ return media::AudioUniqueIdUse::PATCH;
+ case AUDIO_UNIQUE_ID_USE_OUTPUT:
+ return media::AudioUniqueIdUse::OUTPUT;
+ case AUDIO_UNIQUE_ID_USE_INPUT:
+ return media::AudioUniqueIdUse::INPUT;
+ case AUDIO_UNIQUE_ID_USE_CLIENT:
+ return media::AudioUniqueIdUse::CLIENT;
+ case AUDIO_UNIQUE_ID_USE_MAX:
+ break;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<volume_group_t>
+aidl2legacy_int32_t_volume_group_t(int32_t aidl) {
+ return convertReinterpret<volume_group_t>(aidl);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_volume_group_t_int32_t(volume_group_t legacy) {
+ return convertReinterpret<int32_t>(legacy);
+}
+
+} // namespace android
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 6d14954..81394cb 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -1,6 +1,9 @@
cc_library_headers {
name: "libaudioclient_headers",
vendor_available: true,
+ min_sdk_version: "29",
+ host_supported: true,
+
header_libs: [
"libaudiofoundation_headers",
],
@@ -10,7 +13,16 @@
export_header_lib_headers: [
"libaudiofoundation_headers",
],
- host_supported: true,
+ static_libs: [
+ "audioflinger-aidl-unstable-cpp",
+ "audiopolicy-aidl-unstable-cpp",
+ "av-types-aidl-unstable-cpp",
+ ],
+ export_static_lib_headers: [
+ "audioflinger-aidl-unstable-cpp",
+ "audiopolicy-aidl-unstable-cpp",
+ "av-types-aidl-unstable-cpp",
+ ],
target: {
darwin: {
enabled: false,
@@ -27,6 +39,7 @@
"AudioVolumeGroup.cpp",
],
shared_libs: [
+ "audioflinger-aidl-unstable-cpp",
"capture_state_listener-aidl-cpp",
"libaudiofoundation",
"libaudioutils",
@@ -42,8 +55,10 @@
include_dirs: ["system/media/audio_utils/include"],
export_include_dirs: ["include"],
export_shared_lib_headers: [
+ "audioflinger-aidl-unstable-cpp",
"capture_state_listener-aidl-cpp",
],
+ header_libs: ["libaudioclient_headers"],
}
cc_library_shared {
@@ -53,7 +68,7 @@
export_aidl_headers: true,
local_include_dirs: ["aidl"],
include_dirs: [
- "frameworks/av/media/libaudioclient/aidl",
+ "frameworks/av/aidl",
],
},
@@ -61,8 +76,6 @@
// AIDL files for audioclient interfaces
// The headers for these interfaces will be available to any modules that
// include libaudioclient, at the path "aidl/package/path/BnFoo.h"
- ":libaudioclient_aidl_callback",
- ":libaudioclient_aidl_private",
":libaudioclient_aidl",
"AudioEffect.cpp",
@@ -71,19 +84,19 @@
"AudioTrack.cpp",
"AudioTrackShared.cpp",
"IAudioFlinger.cpp",
- "IAudioFlingerClient.cpp",
"IAudioPolicyService.cpp",
- "IAudioPolicyServiceClient.cpp",
- "IAudioTrack.cpp",
- "IEffect.cpp",
- "IEffectClient.cpp",
"ToneGenerator.cpp",
"PlayerBase.cpp",
"RecordingActivityTracker.cpp",
"TrackPlayerBase.cpp",
],
shared_libs: [
+ "audioclient-types-aidl-unstable-cpp",
+ "audioflinger-aidl-unstable-cpp",
+ "audiopolicy-aidl-unstable-cpp",
+ "av-types-aidl-unstable-cpp",
"capture_state_listener-aidl-cpp",
+ "libaudioclient_aidl_conversion",
"libaudiofoundation",
"libaudioutils",
"libaudiopolicy",
@@ -97,16 +110,22 @@
"libmediautils",
"libnblog",
"libprocessgroup",
+ "libshmemcompat",
"libutils",
"libvibrator",
],
- export_shared_lib_headers: ["libbinder"],
+ export_shared_lib_headers: [
+ "audioflinger-aidl-unstable-cpp",
+ "audiopolicy-aidl-unstable-cpp",
+ "libbinder",
+ ],
include_dirs: [
"frameworks/av/media/libnbaio/include_mono/",
],
local_include_dirs: [
- "include/media", "aidl"
+ "include/media",
+ "aidl",
],
header_libs: [
"libaudioclient_headers",
@@ -114,10 +133,16 @@
"libmedia_headers",
],
export_header_lib_headers: ["libaudioclient_headers"],
+ export_static_lib_headers: [
+ "effect-aidl-unstable-cpp",
+ "shared-file-region-aidl-unstable-cpp",
+ ],
- // for memory heap analysis
static_libs: [
+ "effect-aidl-unstable-cpp",
+ // for memory heap analysis
"libc_malloc_debug_backtrace",
+ "shared-file-region-aidl-unstable-cpp",
],
cflags: [
"-Wall",
@@ -125,13 +150,90 @@
"-Wno-error=deprecated-declarations",
],
sanitize: {
- misc_undefined : [
+ misc_undefined: [
"unsigned-integer-overflow",
"signed-integer-overflow",
],
},
}
+// This is intended for clients needing to include AidlConversionUtil.h, without dragging in a lot of extra
+// dependencies.
+cc_library_headers {
+ name: "libaudioclient_aidl_conversion_util",
+ host_supported: true,
+ vendor_available: true,
+ double_loadable: true,
+ min_sdk_version: "29",
+ export_include_dirs: [
+ "include",
+ ],
+ header_libs: [
+ "libbase_headers",
+ ],
+ export_header_lib_headers: [
+ "libbase_headers",
+ ],
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.bluetooth.updatable",
+ "com.android.media",
+ "com.android.media.swcodec",
+ ],
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
+cc_library {
+ name: "libaudioclient_aidl_conversion",
+ srcs: ["AidlConversion.cpp"],
+ export_include_dirs: ["include"],
+ host_supported: true,
+ vendor_available: true,
+ double_loadable: true,
+ min_sdk_version: "29",
+ header_libs: [
+ "libaudioclient_aidl_conversion_util",
+ "libaudio_system_headers",
+ ],
+ export_header_lib_headers: [
+ "libaudioclient_aidl_conversion_util",
+ ],
+ shared_libs: [
+ "audioclient-types-aidl-unstable-cpp",
+ "libbase",
+ "libbinder",
+ "liblog",
+ "libshmemcompat",
+ "libutils",
+ "shared-file-region-aidl-unstable-cpp",
+ ],
+ export_shared_lib_headers: [
+ "audioclient-types-aidl-unstable-cpp",
+ "libbase",
+ "shared-file-region-aidl-unstable-cpp",
+ ],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wno-error=deprecated-declarations",
+ ],
+ sanitize: {
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
// AIDL interface between libaudioclient and framework.jar
filegroup {
name: "libaudioclient_aidl",
@@ -141,25 +243,6 @@
path: "aidl",
}
-// Used to strip the "aidl/" from the path, so the build system can predict the
-// output filename.
-filegroup {
- name: "libaudioclient_aidl_private",
- srcs: [
- "aidl/android/media/IAudioRecord.aidl",
- ],
- path: "aidl",
-}
-
-// AIDL interface for audio track callback
-filegroup {
- name: "libaudioclient_aidl_callback",
- srcs: [
- "aidl/android/media/IAudioTrackCallback.aidl",
- ],
- path: "aidl",
-}
-
aidl_interface {
name: "capture_state_listener-aidl",
unstable: true,
@@ -168,3 +251,155 @@
"aidl/android/media/ICaptureStateListener.aidl",
],
}
+
+aidl_interface {
+ name: "effect-aidl",
+ unstable: true,
+ local_include_dir: "aidl",
+ host_supported: true,
+ double_loadable: true,
+ vendor_available: true,
+ srcs: [
+ "aidl/android/media/IEffect.aidl",
+ "aidl/android/media/IEffectClient.aidl",
+ ],
+ imports: [
+ "shared-file-region-aidl",
+ ],
+}
+
+aidl_interface {
+ name: "audioclient-types-aidl",
+ unstable: true,
+ host_supported: true,
+ vendor_available: true,
+ double_loadable: true,
+ local_include_dir: "aidl",
+ srcs: [
+ "aidl/android/media/AudioAttributesInternal.aidl",
+ "aidl/android/media/AudioClient.aidl",
+ "aidl/android/media/AudioConfig.aidl",
+ "aidl/android/media/AudioConfigBase.aidl",
+ "aidl/android/media/AudioContentType.aidl",
+ "aidl/android/media/AudioDevice.aidl",
+ "aidl/android/media/AudioEncapsulationMode.aidl",
+ "aidl/android/media/AudioEncapsulationMetadataType.aidl",
+ "aidl/android/media/AudioFlag.aidl",
+ "aidl/android/media/AudioGain.aidl",
+ "aidl/android/media/AudioGainConfig.aidl",
+ "aidl/android/media/AudioGainMode.aidl",
+ "aidl/android/media/AudioInputFlags.aidl",
+ "aidl/android/media/AudioIoConfigEvent.aidl",
+ "aidl/android/media/AudioIoDescriptor.aidl",
+ "aidl/android/media/AudioIoFlags.aidl",
+ "aidl/android/media/AudioMixLatencyClass.aidl",
+ "aidl/android/media/AudioMode.aidl",
+ "aidl/android/media/AudioOffloadInfo.aidl",
+ "aidl/android/media/AudioOutputFlags.aidl",
+ "aidl/android/media/AudioPatch.aidl",
+ "aidl/android/media/AudioPort.aidl",
+ "aidl/android/media/AudioPortConfig.aidl",
+ "aidl/android/media/AudioPortConfigType.aidl",
+ "aidl/android/media/AudioPortConfigDeviceExt.aidl",
+ "aidl/android/media/AudioPortConfigExt.aidl",
+ "aidl/android/media/AudioPortConfigMixExt.aidl",
+ "aidl/android/media/AudioPortConfigMixExtUseCase.aidl",
+ "aidl/android/media/AudioPortConfigSessionExt.aidl",
+ "aidl/android/media/AudioPortDeviceExt.aidl",
+ "aidl/android/media/AudioPortExt.aidl",
+ "aidl/android/media/AudioPortMixExt.aidl",
+ "aidl/android/media/AudioPortRole.aidl",
+ "aidl/android/media/AudioPortSessionExt.aidl",
+ "aidl/android/media/AudioPortType.aidl",
+ "aidl/android/media/AudioProfile.aidl",
+ "aidl/android/media/AudioSourceType.aidl",
+ "aidl/android/media/AudioStreamType.aidl",
+ "aidl/android/media/AudioTimestampInternal.aidl",
+ "aidl/android/media/AudioUniqueIdUse.aidl",
+ "aidl/android/media/AudioUsage.aidl",
+ "aidl/android/media/AudioUuid.aidl",
+ "aidl/android/media/EffectDescriptor.aidl",
+ ],
+ imports: [
+ "audio_common-aidl",
+ ],
+ backend: {
+ cpp: {
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ },
+ },
+}
+
+aidl_interface {
+ name: "audioflinger-aidl",
+ unstable: true,
+ local_include_dir: "aidl",
+ host_supported: true,
+ vendor_available: true,
+ srcs: [
+ "aidl/android/media/CreateEffectRequest.aidl",
+ "aidl/android/media/CreateEffectResponse.aidl",
+ "aidl/android/media/CreateRecordRequest.aidl",
+ "aidl/android/media/CreateRecordResponse.aidl",
+ "aidl/android/media/CreateTrackRequest.aidl",
+ "aidl/android/media/CreateTrackResponse.aidl",
+ "aidl/android/media/OpenInputRequest.aidl",
+ "aidl/android/media/OpenInputResponse.aidl",
+ "aidl/android/media/OpenOutputRequest.aidl",
+ "aidl/android/media/OpenOutputResponse.aidl",
+ "aidl/android/media/RenderPosition.aidl",
+
+ "aidl/android/media/IAudioFlingerService.aidl",
+ "aidl/android/media/IAudioFlingerClient.aidl",
+ "aidl/android/media/IAudioRecord.aidl",
+ "aidl/android/media/IAudioTrack.aidl",
+ "aidl/android/media/IAudioTrackCallback.aidl",
+ ],
+ imports: [
+ "audio_common-aidl",
+ "audioclient-types-aidl",
+ "av-types-aidl",
+ "effect-aidl",
+ "shared-file-region-aidl",
+ ],
+ double_loadable: true,
+ backend: {
+ cpp: {
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ },
+ },
+}
+
+aidl_interface {
+ name: "audiopolicy-aidl",
+ unstable: true,
+ local_include_dir: "aidl",
+ host_supported: true,
+ vendor_available: true,
+ srcs: [
+ "aidl/android/media/RecordClientInfo.aidl",
+
+ "aidl/android/media/IAudioPolicyServiceClient.aidl",
+ ],
+ imports: [
+ "audioclient-types-aidl",
+ ],
+ double_loadable: true,
+ backend: {
+ cpp: {
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ },
+ },
+}
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 3ead6cb..79ea1bb 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -23,77 +23,35 @@
#include <sys/types.h>
#include <limits.h>
-#include <private/media/AudioEffectShared.h>
-#include <media/AudioEffect.h>
-
-#include <utils/Log.h>
#include <binder/IPCThreadState.h>
-
-
+#include <media/AudioEffect.h>
+#include <media/ShmemCompat.h>
+#include <private/media/AudioEffectShared.h>
+#include <utils/Log.h>
namespace android {
+using aidl_utils::statusTFromBinderStatus;
+using binder::Status;
+
+namespace {
+
+// Copy from a raw pointer + size into a vector of bytes.
+void appendToBuffer(const void* data,
+ size_t size,
+ std::vector<uint8_t>* buffer) {
+ const uint8_t* p = reinterpret_cast<const uint8_t*>(data);
+ buffer->insert(buffer->end(), p, p + size);
+}
+
+} // namespace
// ---------------------------------------------------------------------------
AudioEffect::AudioEffect(const String16& opPackageName)
- : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
+ : mOpPackageName(opPackageName)
{
}
-
-AudioEffect::AudioEffect(const effect_uuid_t *type,
- const String16& opPackageName,
- const effect_uuid_t *uuid,
- int32_t priority,
- effect_callback_t cbf,
- void* user,
- audio_session_t sessionId,
- audio_io_handle_t io,
- const AudioDeviceTypeAddr& device,
- bool probe
- )
- : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
-{
- AutoMutex lock(mConstructLock);
- mStatus = set(type, uuid, priority, cbf, user, sessionId, io, device, probe);
-}
-
-AudioEffect::AudioEffect(const char *typeStr,
- const String16& opPackageName,
- const char *uuidStr,
- int32_t priority,
- effect_callback_t cbf,
- void* user,
- audio_session_t sessionId,
- audio_io_handle_t io,
- const AudioDeviceTypeAddr& device,
- bool probe
- )
- : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
-{
- effect_uuid_t type;
- effect_uuid_t *pType = NULL;
- effect_uuid_t uuid;
- effect_uuid_t *pUuid = NULL;
-
- ALOGV("Constructor string\n - type: %s\n - uuid: %s", typeStr, uuidStr);
-
- if (typeStr != NULL) {
- if (stringToGuid(typeStr, &type) == NO_ERROR) {
- pType = &type;
- }
- }
-
- if (uuidStr != NULL) {
- if (stringToGuid(uuidStr, &uuid) == NO_ERROR) {
- pUuid = &uuid;
- }
- }
-
- AutoMutex lock(mConstructLock);
- mStatus = set(pType, pUuid, priority, cbf, user, sessionId, io, device, probe);
-}
-
status_t AudioEffect::set(const effect_uuid_t *type,
const effect_uuid_t *uuid,
int32_t priority,
@@ -104,7 +62,7 @@
const AudioDeviceTypeAddr& device,
bool probe)
{
- sp<IEffect> iEffect;
+ sp<media::IEffect> iEffect;
sp<IMemory> cblk;
int enabled;
@@ -143,9 +101,29 @@
mClientPid = IPCThreadState::self()->getCallingPid();
mClientUid = IPCThreadState::self()->getCallingUid();
- iEffect = audioFlinger->createEffect((effect_descriptor_t *)&mDescriptor,
- mIEffectClient, priority, io, mSessionId, device, mOpPackageName, mClientPid,
- probe, &mStatus, &mId, &enabled);
+ media::CreateEffectRequest request;
+ request.desc = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_effect_descriptor_t_EffectDescriptor(mDescriptor));
+ request.client = mIEffectClient;
+ request.priority = priority;
+ request.output = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(io));
+ request.sessionId = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(mSessionId));
+ request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(device));
+ request.opPackageName = VALUE_OR_RETURN_STATUS(legacy2aidl_String16_string(mOpPackageName));
+ request.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(mClientPid));
+ request.probe = probe;
+
+ media::CreateEffectResponse response;
+
+ mStatus = audioFlinger->createEffect(request, &response);
+
+ if (mStatus == OK) {
+ mId = response.id;
+ enabled = response.enabled;
+ iEffect = response.effect;
+ mDescriptor = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_EffectDescriptor_effect_descriptor_t(response.desc));
+ }
// In probe mode, we stop here and return the status: the IEffect interface to
// audio flinger will not be retained. initCheck() will return the creation status
@@ -166,8 +144,10 @@
mEnabled = (volatile int32_t)enabled;
- cblk = iEffect->getCblk();
- if (cblk == 0) {
+ if (media::SharedFileRegion shmem;
+ !iEffect->getCblk(&shmem).isOk()
+ || !convertSharedFileRegionToIMemory(shmem, &cblk)
+ || cblk == 0) {
mStatus = NO_INIT;
ALOGE("Could not get control block");
return mStatus;
@@ -194,6 +174,34 @@
return mStatus;
}
+status_t AudioEffect::set(const char *typeStr,
+ const char *uuidStr,
+ int32_t priority,
+ effect_callback_t cbf,
+ void* user,
+ audio_session_t sessionId,
+ audio_io_handle_t io,
+ const AudioDeviceTypeAddr& device,
+ bool probe)
+{
+ effect_uuid_t type;
+ effect_uuid_t *pType = nullptr;
+ effect_uuid_t uuid;
+ effect_uuid_t *pUuid = nullptr;
+
+ ALOGV("AudioEffect::set string\n - type: %s\n - uuid: %s",
+ typeStr ? typeStr : "nullptr", uuidStr ? uuidStr : "nullptr");
+
+ if (stringToGuid(typeStr, &type) == NO_ERROR) {
+ pType = &type;
+ }
+ if (stringToGuid(uuidStr, &uuid) == NO_ERROR) {
+ pUuid = &uuid;
+ }
+
+ return set(pType, pUuid, priority, cbf, user, sessionId, io, device, probe);
+}
+
AudioEffect::~AudioEffect()
{
@@ -242,15 +250,19 @@
}
status_t status = NO_ERROR;
-
AutoMutex lock(mLock);
if (enabled != mEnabled) {
+ Status bs;
+
if (enabled) {
ALOGV("enable %p", this);
- status = mIEffect->enable();
+ bs = mIEffect->enable(&status);
} else {
ALOGV("disable %p", this);
- status = mIEffect->disable();
+ bs = mIEffect->disable(&status);
+ }
+ if (!bs.isOk()) {
+ status = statusTFromBinderStatus(bs);
}
if (status == NO_ERROR) {
mEnabled = enabled;
@@ -283,7 +295,20 @@
mLock.lock();
}
- status_t status = mIEffect->command(cmdCode, cmdSize, cmdData, replySize, replyData);
+ std::vector<uint8_t> data;
+ appendToBuffer(cmdData, cmdSize, &data);
+
+ status_t status;
+ std::vector<uint8_t> response;
+
+ Status bs = mIEffect->command(cmdCode, data, *replySize, &response, &status);
+ if (!bs.isOk()) {
+ status = statusTFromBinderStatus(bs);
+ }
+ if (status == NO_ERROR) {
+ memcpy(replyData, response.data(), response.size());
+ *replySize = response.size();
+ }
if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) {
if (status == NO_ERROR) {
@@ -298,7 +323,6 @@
return status;
}
-
status_t AudioEffect::setParameter(effect_param_t *param)
{
if (mProbe) {
@@ -312,14 +336,27 @@
return BAD_VALUE;
}
- uint32_t size = sizeof(int);
uint32_t psize = ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + param->vsize;
ALOGV("setParameter: param: %d, param2: %d", *(int *)param->data,
(param->psize == 8) ? *((int *)param->data + 1): -1);
- return mIEffect->command(EFFECT_CMD_SET_PARAM, sizeof (effect_param_t) + psize, param, &size,
- ¶m->status);
+ std::vector<uint8_t> cmd;
+ appendToBuffer(param, sizeof(effect_param_t) + psize, &cmd);
+ std::vector<uint8_t> response;
+ status_t status;
+ Status bs = mIEffect->command(EFFECT_CMD_SET_PARAM,
+ cmd,
+ sizeof(int),
+ &response,
+ &status);
+ if (!bs.isOk()) {
+ status = statusTFromBinderStatus(bs);
+ return status;
+ }
+ assert(response.size() == sizeof(int));
+ memcpy(¶m->status, response.data(), response.size());
+ return status;
}
status_t AudioEffect::setParameterDeferred(effect_param_t *param)
@@ -364,8 +401,18 @@
if (mCblk->clientIndex == 0) {
return INVALID_OPERATION;
}
- uint32_t size = 0;
- return mIEffect->command(EFFECT_CMD_SET_PARAM_COMMIT, 0, NULL, &size, NULL);
+ std::vector<uint8_t> cmd;
+ std::vector<uint8_t> response;
+ status_t status;
+ Status bs = mIEffect->command(EFFECT_CMD_SET_PARAM_COMMIT,
+ cmd,
+ 0,
+ &response,
+ &status);
+ if (!bs.isOk()) {
+ status = statusTFromBinderStatus(bs);
+ }
+ return status;
}
status_t AudioEffect::getParameter(effect_param_t *param)
@@ -387,8 +434,18 @@
uint32_t psize = sizeof(effect_param_t) + ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) +
param->vsize;
- return mIEffect->command(EFFECT_CMD_GET_PARAM, sizeof(effect_param_t) + param->psize, param,
- &psize, param);
+ status_t status;
+ std::vector<uint8_t> cmd;
+ std::vector<uint8_t> response;
+ appendToBuffer(param, sizeof(effect_param_t) + param->psize, &cmd);
+
+ Status bs = mIEffect->command(EFFECT_CMD_GET_PARAM, cmd, psize, &response, &status);
+ if (!bs.isOk()) {
+ status = statusTFromBinderStatus(bs);
+ return status;
+ }
+ memcpy(param, response.data(), response.size());
+ return status;
}
@@ -436,19 +493,18 @@
}
}
-void AudioEffect::commandExecuted(uint32_t cmdCode,
- uint32_t cmdSize __unused,
- void *cmdData,
- uint32_t replySize __unused,
- void *replyData)
+void AudioEffect::commandExecuted(int32_t cmdCode,
+ const std::vector<uint8_t>& cmdData,
+ const std::vector<uint8_t>& replyData)
{
- if (cmdData == NULL || replyData == NULL) {
+ if (cmdData.empty() || replyData.empty()) {
return;
}
if (mCbf != NULL && cmdCode == EFFECT_CMD_SET_PARAM) {
- effect_param_t *cmd = (effect_param_t *)cmdData;
- cmd->status = *(int32_t *)replyData;
+ std::vector<uint8_t> cmdDataCopy(cmdData);
+ effect_param_t* cmd = reinterpret_cast<effect_param_t *>(cmdDataCopy.data());
+ cmd->status = *reinterpret_cast<const int32_t *>(replyData.data());
mCbf(EVENT_PARAMETER_CHANGED, mUserData, cmd);
}
}
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index d4c421a..112cb67 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -35,9 +35,20 @@
#include <media/MediaMetricsItem.h>
#include <media/TypeConverter.h>
+#define VALUE_OR_FATAL(result) \
+ ({ \
+ auto _tmp = (result); \
+ LOG_ALWAYS_FATAL_IF(!_tmp.ok(), \
+ "Failed result (%d)", \
+ _tmp.error()); \
+ std::move(_tmp.value()); \
+ })
+
#define WAIT_PERIOD_MS 10
namespace android {
+using aidl_utils::statusTFromBinderStatus;
+
// ---------------------------------------------------------------------------
// static
@@ -279,7 +290,8 @@
mAttributes.source = inputSource;
if (inputSource == AUDIO_SOURCE_VOICE_COMMUNICATION
|| inputSource == AUDIO_SOURCE_CAMCORDER) {
- mAttributes.flags |= AUDIO_FLAG_CAPTURE_PRIVATE;
+ mAttributes.flags = static_cast<audio_flags_mask_t>(
+ mAttributes.flags | AUDIO_FLAG_CAPTURE_PRIVATE);
}
} else {
// stream type shouldn't be looked at, this track has audio attributes
@@ -440,7 +452,7 @@
mActive = true;
if (!(flags & CBLK_INVALID)) {
- status = mAudioRecord->start(event, triggerSession).transactionError();
+ status = statusTFromBinderStatus(mAudioRecord->start(event, triggerSession));
if (status == DEAD_OBJECT) {
flags |= CBLK_INVALID;
}
@@ -738,12 +750,11 @@
IAudioFlinger::CreateRecordInput input;
IAudioFlinger::CreateRecordOutput output;
audio_session_t originalSessionId;
- sp<media::IAudioRecord> record;
void *iMemPointer;
audio_track_cblk_t* cblk;
status_t status;
- std::string flagsAsString;
- std::string originalFlagsAsString;
+ static const int32_t kMaxCreateAttempts = 3;
+ int32_t remainingAttempts = kMaxCreateAttempts;
if (audioFlinger == 0) {
ALOGE("%s(%d): Could not get audioflinger", __func__, mPortId);
@@ -773,7 +784,7 @@
// use case 3: obtain/release mode
(mTransfer == TRANSFER_OBTAIN);
if (!useCaseAllowed) {
- ALOGW("%s(%d): AUDIO_INPUT_FLAG_FAST denied, incompatible transfer = %s",
+ ALOGD("%s(%d): AUDIO_INPUT_FLAG_FAST denied, incompatible transfer = %s",
__func__, mPortId,
convertTransferToText(mTransfer));
mFlags = (audio_input_flags_t) (mFlags & ~(AUDIO_INPUT_FLAG_FAST |
@@ -805,15 +816,26 @@
input.sessionId = mSessionId;
originalSessionId = mSessionId;
- record = audioFlinger->createRecord(input,
- output,
- &status);
+ do {
+ media::CreateRecordResponse response;
+ status = audioFlinger->createRecord(VALUE_OR_FATAL(input.toAidl()), response);
+ output = VALUE_OR_FATAL(IAudioFlinger::CreateRecordOutput::fromAidl(response));
+ if (status == NO_ERROR) {
+ break;
+ }
+ if (status != FAILED_TRANSACTION || --remainingAttempts <= 0) {
+ ALOGE("%s(%d): AudioFlinger could not create record track, status: %d",
+ __func__, mPortId, status);
+ goto exit;
+ }
+ // FAILED_TRANSACTION happens under very specific conditions causing a state mismatch
+ // between audio policy manager and audio flinger during the input stream open sequence
+ // and can be recovered by retrying.
+ // Leave time for race condition to clear before retrying and randomize delay
+ // to reduce the probability of concurrent retries in locked steps.
+ usleep((20 + rand() % 30) * 10000);
+ } while (1);
- if (status != NO_ERROR) {
- ALOGE("%s(%d): AudioFlinger could not create record track, status: %d",
- __func__, mPortId, status);
- goto exit;
- }
ALOG_ASSERT(record != 0);
// AudioFlinger now owns the reference to the I/O handle,
@@ -872,7 +894,7 @@
IInterface::asBinder(mAudioRecord)->unlinkToDeath(mDeathNotifier, this);
mDeathNotifier.clear();
}
- mAudioRecord = record;
+ mAudioRecord = output.audioRecord;
mCblkMemory = output.cblk;
mBufferMemory = output.buffers;
IPCThreadState::self()->flushCommands();
@@ -922,15 +944,13 @@
mDeathNotifier = new DeathNotifier(this);
IInterface::asBinder(mAudioRecord)->linkToDeath(mDeathNotifier, this);
- InputFlagConverter::toString(mFlags, flagsAsString);
- InputFlagConverter::toString(mOrigFlags, originalFlagsAsString);
mMetricsId = std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD) + std::to_string(mPortId);
mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
.set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
// the following are immutable (at least until restore)
- .set(AMEDIAMETRICS_PROP_FLAGS, flagsAsString.c_str())
- .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, originalFlagsAsString.c_str())
+ .set(AMEDIAMETRICS_PROP_FLAGS, toString(mFlags).c_str())
+ .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, toString(mOrigFlags).c_str())
.set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)mSessionId)
.set(AMEDIAMETRICS_PROP_TRACKID, mPortId)
.set(AMEDIAMETRICS_PROP_SOURCE, toString(mAttributes.source).c_str())
@@ -1096,7 +1116,7 @@
}
if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) {
- // sanity-check. user is most-likely passing an error code, and it would
+ // Validation. user is most-likely passing an error code, and it would
// make the return value ambiguous (actualSize vs error).
ALOGE("%s(%d) (buffer=%p, size=%zu (%zu)",
__func__, mPortId, buffer, userSize, userSize);
@@ -1323,7 +1343,7 @@
mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer);
size_t readSize = audioBuffer.size;
- // Sanity check on returned size
+ // Validate on returned size
if (ssize_t(readSize) < 0 || readSize > reqSize) {
ALOGE("%s(%d): EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
__func__, mPortId, reqSize, ssize_t(readSize));
@@ -1421,8 +1441,8 @@
if (mActive) {
// callback thread or sync event hasn't changed
// FIXME this fails if we have a new AudioFlinger instance
- result = mAudioRecord->start(
- AudioSystem::SYNC_EVENT_SAME, AUDIO_SESSION_NONE).transactionError();
+ result = statusTFromBinderStatus(mAudioRecord->start(
+ AudioSystem::SYNC_EVENT_SAME, AUDIO_SESSION_NONE));
}
mFramesReadServerOffset = mFramesRead; // server resets to zero so we need an offset.
}
@@ -1512,7 +1532,13 @@
status_t AudioRecord::getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones)
{
AutoMutex lock(mLock);
- return mAudioRecord->getActiveMicrophones(activeMicrophones).transactionError();
+ std::vector<media::MicrophoneInfoData> mics;
+ status_t status = statusTFromBinderStatus(mAudioRecord->getActiveMicrophones(&mics));
+ activeMicrophones->resize(mics.size());
+ for (size_t i = 0; status == OK && i < mics.size(); ++i) {
+ status = activeMicrophones->at(i).readFromParcelable(mics[i]);
+ }
+ return status;
}
status_t AudioRecord::setPreferredMicrophoneDirection(audio_microphone_direction_t direction)
@@ -1528,7 +1554,7 @@
// the internal AudioRecord hasn't be created yet, so just stash the attribute.
return OK;
} else {
- return mAudioRecord->setPreferredMicrophoneDirection(direction).transactionError();
+ return statusTFromBinderStatus(mAudioRecord->setPreferredMicrophoneDirection(direction));
}
}
@@ -1544,7 +1570,7 @@
// the internal AudioRecord hasn't be created yet, so just stash the attribute.
return OK;
} else {
- return mAudioRecord->setPreferredMicrophoneFieldDimension(zoom).transactionError();
+ return statusTFromBinderStatus(mAudioRecord->setPreferredMicrophoneFieldDimension(zoom));
}
}
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index f621aa5..84a75dd 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -23,6 +23,7 @@
#include <binder/IServiceManager.h>
#include <binder/ProcessState.h>
#include <binder/IPCThreadState.h>
+#include <media/AidlConversion.h>
#include <media/AudioResamplerPublic.h>
#include <media/AudioSystem.h>
#include <media/IAudioFlinger.h>
@@ -32,10 +33,17 @@
#include <system/audio.h>
+#define VALUE_OR_RETURN_BINDER_STATUS(x) \
+ ({ auto _tmp = (x); \
+ if (!_tmp.ok()) return aidl_utils::binderStatusFromStatusT(_tmp.error()); \
+ std::move(_tmp.value()); })
+
// ----------------------------------------------------------------------------
namespace android {
+using binder::Status;
+
// client singleton for AudioFlinger binder interface
Mutex AudioSystem::gLock;
Mutex AudioSystem::gLockErrorCallbacks;
@@ -47,8 +55,9 @@
record_config_callback AudioSystem::gRecordConfigCallback = NULL;
// Required to be held while calling into gSoundTriggerCaptureStateListener.
+class CaptureStateListenerImpl;
Mutex gSoundTriggerCaptureStateListenerLock;
-sp<AudioSystem::CaptureStateListener> gSoundTriggerCaptureStateListener = nullptr;
+sp<CaptureStateListenerImpl> gSoundTriggerCaptureStateListener = nullptr;
// establish binder interface to AudioFlinger service
const sp<IAudioFlinger> AudioSystem::get_audio_flinger()
@@ -62,7 +71,7 @@
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder;
do {
- binder = sm->getService(String16("media.audio_flinger"));
+ binder = sm->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
if (binder != 0)
break;
ALOGW("AudioFlinger not published, waiting...");
@@ -74,7 +83,8 @@
reportNoError = true;
}
binder->linkToDeath(gAudioFlingerClient);
- gAudioFlinger = interface_cast<IAudioFlinger>(binder);
+ gAudioFlinger = new AudioFlingerClientAdapter(
+ interface_cast<media::IAudioFlingerService>(binder));
LOG_ALWAYS_FATAL_IF(gAudioFlinger == 0);
afc = gAudioFlingerClient;
// Make sure callbacks can be received by gAudioFlingerClient
@@ -520,11 +530,17 @@
ALOGW("AudioFlinger server died!");
}
-void AudioSystem::AudioFlingerClient::ioConfigChanged(audio_io_config_event event,
- const sp<AudioIoDescriptor>& ioDesc) {
+Status AudioSystem::AudioFlingerClient::ioConfigChanged(
+ media::AudioIoConfigEvent _event,
+ const media::AudioIoDescriptor& _ioDesc) {
+ audio_io_config_event event = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioIoConfigEvent_audio_io_config_event(_event));
+ sp<AudioIoDescriptor> ioDesc(
+ VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(_ioDesc)));
+
ALOGV("ioConfigChanged() event %d", event);
- if (ioDesc == 0 || ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return;
+ if (ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return Status::ok();
audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
std::vector<sp<AudioDeviceCallback>> callbacksToCall;
@@ -639,6 +655,8 @@
// If callbacksToCall is not empty, it implies ioDesc->mIoHandle and deviceId are valid
cb->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId);
}
+
+ return Status::ok();
}
status_t AudioSystem::AudioFlingerClient::getInputBufferSize(
@@ -1170,18 +1188,18 @@
return aps->setAllowedCapturePolicy(uid, flags);
}
-bool AudioSystem::isOffloadSupported(const audio_offload_info_t& info)
+audio_offload_mode_t AudioSystem::getOffloadSupport(const audio_offload_info_t& info)
{
- ALOGV("isOffloadSupported()");
+ ALOGV("%s", __func__);
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
- if (aps == 0) return false;
- return aps->isOffloadSupported(info);
+ if (aps == 0) return AUDIO_OFFLOAD_NOT_SUPPORTED;
+ return aps->getOffloadSupport(info);
}
status_t AudioSystem::listAudioPorts(audio_port_role_t role,
audio_port_type_t type,
unsigned int *num_ports,
- struct audio_port *ports,
+ struct audio_port_v7 *ports,
unsigned int *generation)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
@@ -1189,7 +1207,7 @@
return aps->listAudioPorts(role, type, num_ports, ports, generation);
}
-status_t AudioSystem::getAudioPort(struct audio_port *port)
+status_t AudioSystem::getAudioPort(struct audio_port_v7 *port)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
@@ -1362,7 +1380,7 @@
return aps->registerPolicyMixes(mixes, registration);
}
-status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
@@ -1376,7 +1394,7 @@
}
status_t AudioSystem::setUserIdDeviceAffinities(int userId,
- const Vector<AudioDeviceTypeAddr>& devices)
+ const AudioDeviceTypeAddrVector& devices)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
@@ -1603,74 +1621,141 @@
return aps->isCallScreenModeSupported();
}
-status_t AudioSystem::setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device)
+status_t AudioSystem::setDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
- return aps->setPreferredDeviceForStrategy(strategy, device);
+ return aps->setDevicesRoleForStrategy(strategy, role, devices);
}
-status_t AudioSystem::removePreferredDeviceForStrategy(product_strategy_t strategy)
+status_t AudioSystem::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
- return aps->removePreferredDeviceForStrategy(strategy);
+ return aps->removeDevicesRoleForStrategy(strategy, role);
}
-status_t AudioSystem::getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device)
+status_t AudioSystem::getDevicesForRoleAndStrategy(product_strategy_t strategy,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
- return aps->getPreferredDeviceForStrategy(strategy, device);
+ return aps->getDevicesForRoleAndStrategy(strategy, role, devices);
+}
+
+status_t AudioSystem::setDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) {
+ return PERMISSION_DENIED;
+ }
+ return aps->setDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioSystem::addDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) {
+ return PERMISSION_DENIED;
+ }
+ return aps->addDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioSystem::removeDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) {
+ return PERMISSION_DENIED;
+ }
+ return aps->removeDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioSystem::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) {
+ return PERMISSION_DENIED;
+ }
+ return aps->clearDevicesRoleForCapturePreset(audioSource, role);
+}
+
+status_t AudioSystem::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) {
+ return PERMISSION_DENIED;
+ }
+ return aps->getDevicesForRoleAndCapturePreset(audioSource, role, devices);
}
class CaptureStateListenerImpl : public media::BnCaptureStateListener,
public IBinder::DeathRecipient {
public:
+ CaptureStateListenerImpl(
+ const sp<IAudioPolicyService>& aps,
+ const sp<AudioSystem::CaptureStateListener>& listener)
+ : mAps(aps), mListener(listener) {}
+
+ void init() {
+ bool active;
+ status_t status = mAps->registerSoundTriggerCaptureStateListener(this, &active);
+ if (status != NO_ERROR) {
+ mListener->onServiceDied();
+ return;
+ }
+ mListener->onStateChanged(active);
+ IInterface::asBinder(mAps)->linkToDeath(this);
+ }
+
binder::Status setCaptureState(bool active) override {
Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
- gSoundTriggerCaptureStateListener->onStateChanged(active);
+ mListener->onStateChanged(active);
return binder::Status::ok();
}
void binderDied(const wp<IBinder>&) override {
Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
- gSoundTriggerCaptureStateListener->onServiceDied();
+ mListener->onServiceDied();
gSoundTriggerCaptureStateListener = nullptr;
}
+
+private:
+ // Need this in order to keep the death receipent alive.
+ sp<IAudioPolicyService> mAps;
+ sp<AudioSystem::CaptureStateListener> mListener;
};
status_t AudioSystem::registerSoundTriggerCaptureStateListener(
const sp<CaptureStateListener>& listener) {
+ LOG_ALWAYS_FATAL_IF(listener == nullptr);
+
const sp<IAudioPolicyService>& aps =
AudioSystem::get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
- sp<CaptureStateListenerImpl> wrapper = new CaptureStateListenerImpl();
-
Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+ gSoundTriggerCaptureStateListener = new CaptureStateListenerImpl(aps, listener);
+ gSoundTriggerCaptureStateListener->init();
- bool active;
- status_t status =
- aps->registerSoundTriggerCaptureStateListener(wrapper, &active);
- if (status != NO_ERROR) {
- listener->onServiceDied();
- return NO_ERROR;
- }
- gSoundTriggerCaptureStateListener = listener;
- listener->onStateChanged(active);
- sp<IBinder> binder = IInterface::asBinder(aps);
- binder->linkToDeath(wrapper);
return NO_ERROR;
}
@@ -1707,20 +1792,22 @@
}
-void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()
+Status AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()
{
Mutex::Autolock _l(mLock);
for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
mAudioPortCallbacks[i]->onAudioPortListUpdate();
}
+ return Status::ok();
}
-void AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate()
+Status AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate()
{
Mutex::Autolock _l(mLock);
for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
mAudioPortCallbacks[i]->onAudioPatchListUpdate();
}
+ return Status::ok();
}
// ----------------------------------------------------------------------------
@@ -1754,20 +1841,26 @@
return mAudioVolumeGroupCallback.size();
}
-void AudioSystem::AudioPolicyServiceClient::onAudioVolumeGroupChanged(volume_group_t group,
- int flags)
-{
+Status AudioSystem::AudioPolicyServiceClient::onAudioVolumeGroupChanged(int32_t group,
+ int32_t flags) {
+ volume_group_t groupLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_int32_t_volume_group_t(group));
+ int flagsLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(flags));
+
Mutex::Autolock _l(mLock);
for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
- mAudioVolumeGroupCallback[i]->onAudioVolumeGroupChanged(group, flags);
+ mAudioVolumeGroupCallback[i]->onAudioVolumeGroupChanged(groupLegacy, flagsLegacy);
}
+ return Status::ok();
}
// ----------------------------------------------------------------------------
-void AudioSystem::AudioPolicyServiceClient::onDynamicPolicyMixStateUpdate(
- String8 regId, int32_t state)
-{
- ALOGV("AudioPolicyServiceClient::onDynamicPolicyMixStateUpdate(%s, %d)", regId.string(), state);
+Status AudioSystem::AudioPolicyServiceClient::onDynamicPolicyMixStateUpdate(
+ const ::std::string& regId, int32_t state) {
+ ALOGV("AudioPolicyServiceClient::onDynamicPolicyMixStateUpdate(%s, %d)", regId.c_str(), state);
+
+ String8 regIdLegacy = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_string_view_String8(regId));
+ int stateLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(state));
dynamic_policy_callback cb = NULL;
{
Mutex::Autolock _l(AudioSystem::gLock);
@@ -1775,19 +1868,20 @@
}
if (cb != NULL) {
- cb(DYNAMIC_POLICY_EVENT_MIX_STATE_UPDATE, regId, state);
+ cb(DYNAMIC_POLICY_EVENT_MIX_STATE_UPDATE, regIdLegacy, stateLegacy);
}
+ return Status::ok();
}
-void AudioSystem::AudioPolicyServiceClient::onRecordingConfigurationUpdate(
- int event,
- const record_client_info_t *clientInfo,
- const audio_config_base_t *clientConfig,
- std::vector<effect_descriptor_t> clientEffects,
- const audio_config_base_t *deviceConfig,
- std::vector<effect_descriptor_t> effects,
- audio_patch_handle_t patchHandle,
- audio_source_t source) {
+Status AudioSystem::AudioPolicyServiceClient::onRecordingConfigurationUpdate(
+ int32_t event,
+ const media::RecordClientInfo& clientInfo,
+ const media::AudioConfigBase& clientConfig,
+ const std::vector<media::EffectDescriptor>& clientEffects,
+ const media::AudioConfigBase& deviceConfig,
+ const std::vector<media::EffectDescriptor>& effects,
+ int32_t patchHandle,
+ media::AudioSourceType source) {
record_config_callback cb = NULL;
{
Mutex::Autolock _l(AudioSystem::gLock);
@@ -1795,9 +1889,29 @@
}
if (cb != NULL) {
- cb(event, clientInfo, clientConfig, clientEffects,
- deviceConfig, effects, patchHandle, source);
+ int eventLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(event));
+ record_client_info_t clientInfoLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_RecordClientInfo_record_client_info_t(clientInfo));
+ audio_config_base_t clientConfigLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioConfigBase_audio_config_base_t(clientConfig));
+ std::vector<effect_descriptor_t> clientEffectsLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+ convertContainer<std::vector<effect_descriptor_t>>(
+ clientEffects,
+ aidl2legacy_EffectDescriptor_effect_descriptor_t));
+ audio_config_base_t deviceConfigLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioConfigBase_audio_config_base_t(deviceConfig));
+ std::vector<effect_descriptor_t> effectsLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+ convertContainer<std::vector<effect_descriptor_t>>(
+ effects,
+ aidl2legacy_EffectDescriptor_effect_descriptor_t));
+ audio_patch_handle_t patchHandleLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_int32_t_audio_patch_handle_t(patchHandle));
+ audio_source_t sourceLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioSourceType_audio_source_t(source));
+ cb(eventLegacy, &clientInfoLegacy, &clientConfigLegacy, clientEffectsLegacy,
+ &deviceConfigLegacy, effectsLegacy, patchHandleLegacy, sourceLegacy);
}
+ return Status::ok();
}
void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused)
@@ -1819,4 +1933,28 @@
ALOGW("AudioPolicyService server died!");
}
+ConversionResult<record_client_info_t>
+aidl2legacy_RecordClientInfo_record_client_info_t(const media::RecordClientInfo& aidl) {
+ record_client_info_t legacy;
+ legacy.riid = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_unique_id_t(aidl.riid));
+ legacy.uid = VALUE_OR_RETURN(aidl2legacy_int32_t_uid_t(aidl.uid));
+ legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.session));
+ legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(aidl.source));
+ legacy.port_id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+ legacy.silenced = aidl.silenced;
+ return legacy;
+}
+
+ConversionResult<media::RecordClientInfo>
+legacy2aidl_record_client_info_t_RecordClientInfo(const record_client_info_t& legacy) {
+ media::RecordClientInfo aidl;
+ aidl.riid = VALUE_OR_RETURN(legacy2aidl_audio_unique_id_t_int32_t(legacy.riid));
+ aidl.uid = VALUE_OR_RETURN(legacy2aidl_uid_t_int32_t(legacy.uid));
+ aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.session));
+ aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source));
+ aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.port_id));
+ aidl.silenced = legacy.silenced;
+ return aidl;
+}
+
} // namespace android
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 011b0fa..1b1e143 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -38,6 +38,15 @@
#include <media/MediaMetricsItem.h>
#include <media/TypeConverter.h>
+#define VALUE_OR_FATAL(result) \
+ ({ \
+ auto _tmp = (result); \
+ LOG_ALWAYS_FATAL_IF(!_tmp.ok(), \
+ "Failed result (%d)", \
+ _tmp.error()); \
+ std::move(_tmp.value()); \
+ })
+
#define WAIT_PERIOD_MS 10
#define WAIT_STREAM_END_TIMEOUT_SEC 120
static const int kMaxLoopCountNotifications = 32;
@@ -210,7 +219,11 @@
return NO_ERROR;
}
-AudioTrack::AudioTrack()
+AudioTrack::AudioTrack() : AudioTrack("" /*opPackageName*/)
+{
+}
+
+AudioTrack::AudioTrack(const std::string& opPackageName)
: mStatus(NO_INIT),
mState(STATE_STOPPED),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -218,11 +231,12 @@
mPausedPosition(0),
mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
+ mOpPackageName(opPackageName),
mAudioTrackCallback(new AudioTrackCallback())
{
mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
mAttributes.usage = AUDIO_USAGE_UNKNOWN;
- mAttributes.flags = 0x0;
+ mAttributes.flags = AUDIO_FLAG_NONE;
strcpy(mAttributes.tags, "");
}
@@ -244,12 +258,14 @@
const audio_attributes_t* pAttributes,
bool doNotReconnect,
float maxRequiredSpeed,
- audio_port_handle_t selectedDeviceId)
+ audio_port_handle_t selectedDeviceId,
+ const std::string& opPackageName)
: mStatus(NO_INIT),
mState(STATE_STOPPED),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
mPreviousSchedulingGroup(SP_DEFAULT),
mPausedPosition(0),
+ mOpPackageName(opPackageName),
mAudioTrackCallback(new AudioTrackCallback())
{
mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -277,13 +293,15 @@
pid_t pid,
const audio_attributes_t* pAttributes,
bool doNotReconnect,
- float maxRequiredSpeed)
+ float maxRequiredSpeed,
+ const std::string& opPackageName)
: mStatus(NO_INIT),
mState(STATE_STOPPED),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
mPreviousSchedulingGroup(SP_DEFAULT),
mPausedPosition(0),
mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
+ mOpPackageName(opPackageName),
mAudioTrackCallback(new AudioTrackCallback())
{
mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -458,7 +476,7 @@
if (format == AUDIO_FORMAT_DEFAULT) {
format = AUDIO_FORMAT_PCM_16_BIT;
} else if (format == AUDIO_FORMAT_IEC61937) { // HDMI pass-through?
- mAttributes.flags |= AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
+ flags = static_cast<audio_output_flags_t>(flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO);
}
// validate parameters
@@ -529,6 +547,7 @@
} else {
mOffloadInfo = NULL;
memset(&mOffloadInfoCopy, 0, sizeof(audio_offload_info_t));
+ mOffloadInfoCopy = AUDIO_INFO_INITIALIZER;
}
mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
@@ -635,13 +654,51 @@
return status;
}
+
+status_t AudioTrack::set(
+ audio_stream_type_t streamType,
+ uint32_t sampleRate,
+ audio_format_t format,
+ uint32_t channelMask,
+ size_t frameCount,
+ audio_output_flags_t flags,
+ callback_t cbf,
+ void* user,
+ int32_t notificationFrames,
+ const sp<IMemory>& sharedBuffer,
+ bool threadCanCallJava,
+ audio_session_t sessionId,
+ transfer_type transferType,
+ const audio_offload_info_t *offloadInfo,
+ uid_t uid,
+ pid_t pid,
+ const audio_attributes_t* pAttributes,
+ bool doNotReconnect,
+ float maxRequiredSpeed,
+ audio_port_handle_t selectedDeviceId)
+{
+ return set(streamType, sampleRate, format,
+ static_cast<audio_channel_mask_t>(channelMask),
+ frameCount, flags, cbf, user, notificationFrames, sharedBuffer,
+ threadCanCallJava, sessionId, transferType, offloadInfo, uid, pid,
+ pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
+}
+
// -------------------------------------------------------------------------
status_t AudioTrack::start()
{
- const int64_t beginNs = systemTime();
AutoMutex lock(mLock);
+ if (mState == STATE_ACTIVE) {
+ return INVALID_OPERATION;
+ }
+
+ ALOGV("%s(%d): prior state:%s", __func__, mPortId, stateToString(mState));
+
+ // Defer logging here due to OpenSL ES repeated start calls.
+ // TODO(b/154868033) after fix, restore this logging back to the beginning of start().
+ const int64_t beginNs = systemTime();
status_t status = NO_ERROR; // logged: make sure to set this before returning.
mediametrics::Defer defer([&] {
mediametrics::LogItem(mMetricsId)
@@ -655,12 +712,6 @@
.set(AMEDIAMETRICS_PROP_STATUS, (int32_t)status)
.record(); });
- ALOGV("%s(%d): prior state:%s", __func__, mPortId, stateToString(mState));
-
- if (mState == STATE_ACTIVE) {
- status = INVALID_OPERATION;
- return status;
- }
mInUnderrun = true;
@@ -733,7 +784,7 @@
int32_t flags = android_atomic_and(~(CBLK_STREAM_END_DONE | CBLK_DISABLED), &mCblk->mFlags);
if (!(flags & CBLK_INVALID)) {
- status = mAudioTrack->start();
+ mAudioTrack->start(&status);
if (status == DEAD_OBJECT) {
flags |= CBLK_INVALID;
}
@@ -785,8 +836,9 @@
.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
.set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
.set(AMEDIAMETRICS_PROP_STATE, stateToString(mState))
+ .set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, (int32_t)mProxy->getBufferSizeInFrames())
+ .set(AMEDIAMETRICS_PROP_UNDERRUN, (int32_t) getUnderrunCount_l())
.record();
- logBufferSizeUnderruns();
});
ALOGV("%s(%d): prior state:%s", __func__, mPortId, stateToString(mState));
@@ -1139,16 +1191,6 @@
return NO_ERROR;
}
-void AudioTrack::logBufferSizeUnderruns() {
- LOG_ALWAYS_FATAL_IF(mMetricsId.size() == 0, "mMetricsId is empty!");
- ALOGD("%s(), mMetricsId = %s", __func__, mMetricsId.c_str());
- // FIXME THis hangs! Why?
-// android::mediametrics::LogItem(mMetricsId)
-// .set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, (int32_t) getBufferSizeInFrames())
-// .set(AMEDIAMETRICS_PROP_UNDERRUN, (int32_t) getUnderrunCount())
-// .record();
-}
-
ssize_t AudioTrack::setBufferSizeInFrames(size_t bufferSizeInFrames)
{
AutoMutex lock(mLock);
@@ -1163,7 +1205,11 @@
ssize_t originalBufferSize = mProxy->getBufferSizeInFrames();
ssize_t finalBufferSize = mProxy->setBufferSizeInFrames((uint32_t) bufferSizeInFrames);
if (originalBufferSize != finalBufferSize) {
- logBufferSizeUnderruns();
+ android::mediametrics::LogItem(mMetricsId)
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETBUFFERSIZE)
+ .set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, (int32_t)mProxy->getBufferSizeInFrames())
+ .set(AMEDIAMETRICS_PROP_UNDERRUN, (int32_t)getUnderrunCount_l())
+ .record();
}
return finalBufferSize;
}
@@ -1431,7 +1477,8 @@
status_t AudioTrack::attachAuxEffect(int effectId)
{
AutoMutex lock(mLock);
- status_t status = mAudioTrack->attachAuxEffect(effectId);
+ status_t status;
+ mAudioTrack->attachAuxEffect(effectId, &status);
if (status == NO_ERROR) {
mAuxEffectId = effectId;
}
@@ -1558,12 +1605,13 @@
input.selectedDeviceId = mSelectedDeviceId;
input.sessionId = mSessionId;
input.audioTrackCallback = mAudioTrackCallback;
+ input.opPackageName = mOpPackageName;
- IAudioFlinger::CreateTrackOutput output;
-
- sp<IAudioTrack> track = audioFlinger->createTrack(input,
- output,
- &status);
+ media::CreateTrackResponse response;
+ status = audioFlinger->createTrack(VALUE_OR_FATAL(input.toAidl()), response);
+ IAudioFlinger::CreateTrackOutput output = VALUE_OR_FATAL(
+ IAudioFlinger::CreateTrackOutput::fromAidl(
+ response));
if (status != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
ALOGE("%s(%d): AudioFlinger could not create track, status: %d output %d",
@@ -1573,7 +1621,7 @@
}
goto exit;
}
- ALOG_ASSERT(track != 0);
+ ALOG_ASSERT(output.audioTrack != 0);
mFrameCount = output.frameCount;
mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
@@ -1595,7 +1643,9 @@
// so we are no longer responsible for releasing it.
// FIXME compare to AudioRecord
- sp<IMemory> iMem = track->getCblk();
+ std::optional<media::SharedFileRegion> sfr;
+ output.audioTrack->getCblk(&sfr);
+ sp<IMemory> iMem = VALUE_OR_FATAL(aidl2legacy_NullableSharedFileRegion_IMemory(sfr));
if (iMem == 0) {
ALOGE("%s(%d): Could not get control block", __func__, mPortId);
status = NO_INIT;
@@ -1616,7 +1666,7 @@
IInterface::asBinder(mAudioTrack)->unlinkToDeath(mDeathNotifier, this);
mDeathNotifier.clear();
}
- mAudioTrack = track;
+ mAudioTrack = output.audioTrack;
mCblkMemory = iMem;
IPCThreadState::self()->flushCommands();
@@ -1632,7 +1682,7 @@
mAwaitBoost = true;
}
} else {
- ALOGW("%s(%d): AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu -> %zu",
+ ALOGD("%s(%d): AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu -> %zu",
__func__, mPortId, mReqFrameCount, mFrameCount);
}
}
@@ -1672,7 +1722,7 @@
}
}
- mAudioTrack->attachAuxEffect(mAuxEffectId);
+ mAudioTrack->attachAuxEffect(mAuxEffectId, &status);
// If IAudioTrack is re-created, don't let the requested frameCount
// decrease. This can confuse clients that cache frameCount().
@@ -1716,16 +1766,12 @@
// is the first log of the AudioTrack and must be present before
// any AudioTrack client logs will be accepted.
- std::string flagsAsString;
- OutputFlagConverter::toString(mFlags, flagsAsString);
- std::string originalFlagsAsString;
- OutputFlagConverter::toString(mOrigFlags, originalFlagsAsString);
mMetricsId = std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK) + std::to_string(mPortId);
mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
// the following are immutable
- .set(AMEDIAMETRICS_PROP_FLAGS, flagsAsString.c_str())
- .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, originalFlagsAsString.c_str())
+ .set(AMEDIAMETRICS_PROP_FLAGS, toString(mFlags).c_str())
+ .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, toString(mOrigFlags).c_str())
.set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)mSessionId)
.set(AMEDIAMETRICS_PROP_TRACKID, mPortId) // dup from key
.set(AMEDIAMETRICS_PROP_CONTENTTYPE, toString(mAttributes.content_type).c_str())
@@ -1920,7 +1966,8 @@
ALOGW("%s(%d): releaseBuffer() track %p disabled due to previous underrun, restarting",
__func__, mPortId, this);
// FIXME ignoring status
- mAudioTrack->start();
+ status_t status;
+ mAudioTrack->start(&status);
}
}
@@ -1943,7 +1990,7 @@
}
if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) {
- // Sanity-check: user is most-likely passing an error code, and it would
+ // Validation: user is most-likely passing an error code, and it would
// make the return value ambiguous (actualSize vs error).
ALOGE("%s(%d): AudioTrack::write(buffer=%p, size=%zu (%zd)",
__func__, mPortId, buffer, userSize, userSize);
@@ -2333,7 +2380,7 @@
mUserData, &audioBuffer);
size_t writtenSize = audioBuffer.size;
- // Sanity check on returned size
+ // Validate on returned size
if (ssize_t(writtenSize) < 0 || writtenSize > reqSize) {
ALOGE("%s(%d): EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
__func__, mPortId, reqSize, ssize_t(writtenSize));
@@ -2528,11 +2575,17 @@
if (shaper.isStarted()) {
operationToEnd->setNormalizedTime(1.f);
}
- return mAudioTrack->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
+ media::VolumeShaperConfiguration config;
+ shaper.mConfiguration->writeToParcelable(&config);
+ media::VolumeShaperOperation operation;
+ operationToEnd->writeToParcelable(&operation);
+ status_t status;
+ mAudioTrack->applyVolumeShaper(config, operation, &status);
+ return status;
});
if (mState == STATE_ACTIVE) {
- result = mAudioTrack->start();
+ mAudioTrack->start(&result);
}
// server resets to zero so we offset
mFramesWrittenServerOffset =
@@ -2602,7 +2655,9 @@
status_t AudioTrack::setParameters(const String8& keyValuePairs)
{
AutoMutex lock(mLock);
- return mAudioTrack->setParameters(keyValuePairs);
+ status_t status;
+ mAudioTrack->setParameters(keyValuePairs.c_str(), &status);
+ return status;
}
status_t AudioTrack::selectPresentation(int presentationId, int programId)
@@ -2614,7 +2669,9 @@
ALOGV("%s(%d): PresentationId/ProgramId[%s]",
__func__, mPortId, param.toString().string());
- return mAudioTrack->setParameters(param.toString());
+ status_t status;
+ mAudioTrack->setParameters(param.toString().c_str(), &status);
+ return status;
}
VolumeShaper::Status AudioTrack::applyVolumeShaper(
@@ -2623,11 +2680,16 @@
{
AutoMutex lock(mLock);
mVolumeHandler->setIdIfNecessary(configuration);
- VolumeShaper::Status status = mAudioTrack->applyVolumeShaper(configuration, operation);
+ media::VolumeShaperConfiguration config;
+ configuration->writeToParcelable(&config);
+ media::VolumeShaperOperation op;
+ operation->writeToParcelable(&op);
+ VolumeShaper::Status status;
+ mAudioTrack->applyVolumeShaper(config, op, &status);
if (status == DEAD_OBJECT) {
if (restoreTrack_l("applyVolumeShaper") == OK) {
- status = mAudioTrack->applyVolumeShaper(configuration, operation);
+ mAudioTrack->applyVolumeShaper(config, op, &status);
}
}
if (status >= 0) {
@@ -2647,10 +2709,20 @@
sp<VolumeShaper::State> AudioTrack::getVolumeShaperState(int id)
{
AutoMutex lock(mLock);
- sp<VolumeShaper::State> state = mAudioTrack->getVolumeShaperState(id);
+ std::optional<media::VolumeShaperState> vss;
+ mAudioTrack->getVolumeShaperState(id, &vss);
+ sp<VolumeShaper::State> state;
+ if (vss.has_value()) {
+ state = new VolumeShaper::State();
+ state->readFromParcelable(vss.value());
+ }
if (state.get() == nullptr && (mCblk->mFlags & CBLK_INVALID) != 0) {
if (restoreTrack_l("getVolumeShaperState") == OK) {
- state = mAudioTrack->getVolumeShaperState(id);
+ mAudioTrack->getVolumeShaperState(id, &vss);
+ if (vss.has_value()) {
+ state = new VolumeShaper::State();
+ state->readFromParcelable(vss.value());
+ }
}
}
return state;
@@ -2744,7 +2816,11 @@
status_t status;
if (isOffloadedOrDirect_l()) {
// use Binder to get timestamp
- status = mAudioTrack->getTimestamp(timestamp);
+ media::AudioTimestampInternal ts;
+ mAudioTrack->getTimestamp(&ts, &status);
+ if (status == OK) {
+ timestamp = VALUE_OR_FATAL(aidl2legacy_AudioTimestampInternal_AudioTimestamp(ts));
+ }
} else {
// read timestamp from shared memory
ExtendedTimestamp ets;
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index f1f8f9c..e2c9698 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -900,11 +900,8 @@
}
audio_track_cblk_t* cblk = mCblk;
- int32_t flush = cblk->u.mStreaming.mFlush;
- if (flush != mFlush) {
- // FIXME should return an accurate value, but over-estimate is better than under-estimate
- return mFrameCount;
- }
+ flushBufferIfNeeded();
+
const int32_t rear = getRear();
ssize_t filled = audio_utils::safe_sub_overflow(rear, cblk->u.mStreaming.mFront);
// pipe should not already be overfull
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 16d2232..20124df 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -24,1588 +24,1158 @@
#include <binder/IPCThreadState.h>
#include <binder/Parcel.h>
-#include <mediautils/ServiceUtilities.h>
-#include <mediautils/TimeCheck.h>
#include "IAudioFlinger.h"
namespace android {
-enum {
- CREATE_TRACK = IBinder::FIRST_CALL_TRANSACTION,
- CREATE_RECORD,
- SAMPLE_RATE,
- RESERVED, // obsolete, was CHANNEL_COUNT
- FORMAT,
- FRAME_COUNT,
- LATENCY,
- SET_MASTER_VOLUME,
- SET_MASTER_MUTE,
- MASTER_VOLUME,
- MASTER_MUTE,
- SET_STREAM_VOLUME,
- SET_STREAM_MUTE,
- STREAM_VOLUME,
- STREAM_MUTE,
- SET_MODE,
- SET_MIC_MUTE,
- GET_MIC_MUTE,
- SET_RECORD_SILENCED,
- SET_PARAMETERS,
- GET_PARAMETERS,
- REGISTER_CLIENT,
- GET_INPUTBUFFERSIZE,
- OPEN_OUTPUT,
- OPEN_DUPLICATE_OUTPUT,
- CLOSE_OUTPUT,
- SUSPEND_OUTPUT,
- RESTORE_OUTPUT,
- OPEN_INPUT,
- CLOSE_INPUT,
- INVALIDATE_STREAM,
- SET_VOICE_VOLUME,
- GET_RENDER_POSITION,
- GET_INPUT_FRAMES_LOST,
- NEW_AUDIO_UNIQUE_ID,
- ACQUIRE_AUDIO_SESSION_ID,
- RELEASE_AUDIO_SESSION_ID,
- QUERY_NUM_EFFECTS,
- QUERY_EFFECT,
- GET_EFFECT_DESCRIPTOR,
- CREATE_EFFECT,
- MOVE_EFFECTS,
- LOAD_HW_MODULE,
- GET_PRIMARY_OUTPUT_SAMPLING_RATE,
- GET_PRIMARY_OUTPUT_FRAME_COUNT,
- SET_LOW_RAM_DEVICE,
- LIST_AUDIO_PORTS,
- GET_AUDIO_PORT,
- CREATE_AUDIO_PATCH,
- RELEASE_AUDIO_PATCH,
- LIST_AUDIO_PATCHES,
- SET_AUDIO_PORT_CONFIG,
- GET_AUDIO_HW_SYNC_FOR_SESSION,
- SYSTEM_READY,
- FRAME_COUNT_HAL,
- GET_MICROPHONES,
- SET_MASTER_BALANCE,
- GET_MASTER_BALANCE,
- SET_EFFECT_SUSPENDED,
- SET_AUDIO_HAL_PIDS
-};
+using aidl_utils::statusTFromBinderStatus;
+using binder::Status;
#define MAX_ITEMS_PER_LIST 1024
+#define VALUE_OR_RETURN_BINDER(x) \
+ ({ \
+ auto _tmp = (x); \
+ if (!_tmp.ok()) return Status::fromStatusT(_tmp.error()); \
+ std::move(_tmp.value()); \
+ })
-class BpAudioFlinger : public BpInterface<IAudioFlinger>
-{
-public:
- explicit BpAudioFlinger(const sp<IBinder>& impl)
- : BpInterface<IAudioFlinger>(impl)
- {
+#define RETURN_STATUS_IF_ERROR(x) \
+ { \
+ auto _tmp = (x); \
+ if (_tmp != OK) return _tmp; \
}
- virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
- CreateTrackOutput& output,
- status_t *status)
- {
- Parcel data, reply;
- sp<IAudioTrack> track;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-
- if (status == nullptr) {
- return track;
- }
-
- input.writeToParcel(&data);
-
- status_t lStatus = remote()->transact(CREATE_TRACK, data, &reply);
- if (lStatus != NO_ERROR) {
- ALOGE("createTrack transaction error %d", lStatus);
- *status = DEAD_OBJECT;
- return track;
- }
- *status = reply.readInt32();
- if (*status != NO_ERROR) {
- ALOGE("createTrack returned error %d", *status);
- return track;
- }
- track = interface_cast<IAudioTrack>(reply.readStrongBinder());
- if (track == 0) {
- ALOGE("createTrack returned an NULL IAudioTrack with status OK");
- *status = DEAD_OBJECT;
- return track;
- }
- output.readFromParcel(&reply);
- return track;
+#define RETURN_BINDER_IF_ERROR(x) \
+ { \
+ auto _tmp = (x); \
+ if (_tmp != OK) return Status::fromStatusT(_tmp); \
}
- virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
- CreateRecordOutput& output,
- status_t *status)
- {
- Parcel data, reply;
- sp<media::IAudioRecord> record;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-
- if (status == nullptr) {
- return record;
- }
-
- input.writeToParcel(&data);
-
- status_t lStatus = remote()->transact(CREATE_RECORD, data, &reply);
- if (lStatus != NO_ERROR) {
- ALOGE("createRecord transaction error %d", lStatus);
- *status = DEAD_OBJECT;
- return record;
- }
- *status = reply.readInt32();
- if (*status != NO_ERROR) {
- ALOGE("createRecord returned error %d", *status);
- return record;
- }
-
- record = interface_cast<media::IAudioRecord>(reply.readStrongBinder());
- if (record == 0) {
- ALOGE("createRecord returned a NULL IAudioRecord with status OK");
- *status = DEAD_OBJECT;
- return record;
- }
- output.readFromParcel(&reply);
- return record;
- }
-
- virtual uint32_t sampleRate(audio_io_handle_t ioHandle) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) ioHandle);
- remote()->transact(SAMPLE_RATE, data, &reply);
- return reply.readInt32();
- }
-
- // RESERVED for channelCount()
-
- virtual audio_format_t format(audio_io_handle_t output) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) output);
- remote()->transact(FORMAT, data, &reply);
- return (audio_format_t) reply.readInt32();
- }
-
- virtual size_t frameCount(audio_io_handle_t ioHandle) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) ioHandle);
- remote()->transact(FRAME_COUNT, data, &reply);
- return reply.readInt64();
- }
-
- virtual uint32_t latency(audio_io_handle_t output) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) output);
- remote()->transact(LATENCY, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t setMasterVolume(float value)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeFloat(value);
- remote()->transact(SET_MASTER_VOLUME, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t setMasterMute(bool muted)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(muted);
- remote()->transact(SET_MASTER_MUTE, data, &reply);
- return reply.readInt32();
- }
-
- virtual float masterVolume() const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- remote()->transact(MASTER_VOLUME, data, &reply);
- return reply.readFloat();
- }
-
- virtual bool masterMute() const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- remote()->transact(MASTER_MUTE, data, &reply);
- return reply.readInt32();
- }
-
- status_t setMasterBalance(float balance) override
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeFloat(balance);
- status_t status = remote()->transact(SET_MASTER_BALANCE, data, &reply);
- if (status != NO_ERROR) {
- return status;
- }
- return reply.readInt32();
- }
-
- status_t getMasterBalance(float *balance) const override
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- status_t status = remote()->transact(GET_MASTER_BALANCE, data, &reply);
- if (status != NO_ERROR) {
- return status;
- }
- status = (status_t)reply.readInt32();
- if (status != NO_ERROR) {
- return status;
- }
- *balance = reply.readFloat();
- return NO_ERROR;
- }
-
- virtual status_t setStreamVolume(audio_stream_type_t stream, float value,
- audio_io_handle_t output)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) stream);
- data.writeFloat(value);
- data.writeInt32((int32_t) output);
- remote()->transact(SET_STREAM_VOLUME, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t setStreamMute(audio_stream_type_t stream, bool muted)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) stream);
- data.writeInt32(muted);
- remote()->transact(SET_STREAM_MUTE, data, &reply);
- return reply.readInt32();
- }
-
- virtual float streamVolume(audio_stream_type_t stream, audio_io_handle_t output) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) stream);
- data.writeInt32((int32_t) output);
- remote()->transact(STREAM_VOLUME, data, &reply);
- return reply.readFloat();
- }
-
- virtual bool streamMute(audio_stream_type_t stream) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) stream);
- remote()->transact(STREAM_MUTE, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t setMode(audio_mode_t mode)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(mode);
- remote()->transact(SET_MODE, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t setMicMute(bool state)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(state);
- remote()->transact(SET_MIC_MUTE, data, &reply);
- return reply.readInt32();
- }
-
- virtual bool getMicMute() const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- remote()->transact(GET_MIC_MUTE, data, &reply);
- return reply.readInt32();
- }
-
- virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(portId);
- data.writeInt32(silenced ? 1 : 0);
- remote()->transact(SET_RECORD_SILENCED, data, &reply);
- }
-
- virtual status_t setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) ioHandle);
- data.writeString8(keyValuePairs);
- remote()->transact(SET_PARAMETERS, data, &reply);
- return reply.readInt32();
- }
-
- virtual String8 getParameters(audio_io_handle_t ioHandle, const String8& keys) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) ioHandle);
- data.writeString8(keys);
- remote()->transact(GET_PARAMETERS, data, &reply);
- return reply.readString8();
- }
-
- virtual void registerClient(const sp<IAudioFlingerClient>& client)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeStrongBinder(IInterface::asBinder(client));
- remote()->transact(REGISTER_CLIENT, data, &reply);
- }
-
- virtual size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
- audio_channel_mask_t channelMask) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(sampleRate);
- data.writeInt32(format);
- data.writeInt32(channelMask);
- remote()->transact(GET_INPUTBUFFERSIZE, data, &reply);
- return reply.readInt64();
- }
-
- virtual status_t openOutput(audio_module_handle_t module,
- audio_io_handle_t *output,
- audio_config_t *config,
- const sp<DeviceDescriptorBase>& device,
- uint32_t *latencyMs,
- audio_output_flags_t flags)
- {
- if (output == nullptr || config == nullptr || device == nullptr || latencyMs == nullptr) {
- return BAD_VALUE;
- }
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(module);
- data.write(config, sizeof(audio_config_t));
- data.writeParcelable(*device);
- data.writeInt32((int32_t) flags);
- status_t status = remote()->transact(OPEN_OUTPUT, data, &reply);
- if (status != NO_ERROR) {
- *output = AUDIO_IO_HANDLE_NONE;
- return status;
- }
- status = (status_t)reply.readInt32();
- if (status != NO_ERROR) {
- *output = AUDIO_IO_HANDLE_NONE;
- return status;
- }
- *output = (audio_io_handle_t)reply.readInt32();
- ALOGV("openOutput() returned output, %d", *output);
- reply.read(config, sizeof(audio_config_t));
- *latencyMs = reply.readInt32();
- return NO_ERROR;
- }
-
- virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
- audio_io_handle_t output2)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) output1);
- data.writeInt32((int32_t) output2);
- remote()->transact(OPEN_DUPLICATE_OUTPUT, data, &reply);
- return (audio_io_handle_t) reply.readInt32();
- }
-
- virtual status_t closeOutput(audio_io_handle_t output)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) output);
- remote()->transact(CLOSE_OUTPUT, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t suspendOutput(audio_io_handle_t output)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) output);
- remote()->transact(SUSPEND_OUTPUT, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t restoreOutput(audio_io_handle_t output)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) output);
- remote()->transact(RESTORE_OUTPUT, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t openInput(audio_module_handle_t module,
- audio_io_handle_t *input,
- audio_config_t *config,
- audio_devices_t *device,
- const String8& address,
- audio_source_t source,
- audio_input_flags_t flags)
- {
- if (input == NULL || config == NULL || device == NULL) {
- return BAD_VALUE;
- }
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(module);
- data.writeInt32(*input);
- data.write(config, sizeof(audio_config_t));
- data.writeInt32(*device);
- data.writeString8(address);
- data.writeInt32(source);
- data.writeInt32(flags);
- status_t status = remote()->transact(OPEN_INPUT, data, &reply);
- if (status != NO_ERROR) {
- *input = AUDIO_IO_HANDLE_NONE;
- return status;
- }
- status = (status_t)reply.readInt32();
- if (status != NO_ERROR) {
- *input = AUDIO_IO_HANDLE_NONE;
- return status;
- }
- *input = (audio_io_handle_t)reply.readInt32();
- reply.read(config, sizeof(audio_config_t));
- *device = (audio_devices_t)reply.readInt32();
- return NO_ERROR;
- }
-
- virtual status_t closeInput(int input)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(input);
- remote()->transact(CLOSE_INPUT, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t invalidateStream(audio_stream_type_t stream)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) stream);
- remote()->transact(INVALIDATE_STREAM, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t setVoiceVolume(float volume)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeFloat(volume);
- remote()->transact(SET_VOICE_VOLUME, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames,
- audio_io_handle_t output) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) output);
- remote()->transact(GET_RENDER_POSITION, data, &reply);
- status_t status = reply.readInt32();
- if (status == NO_ERROR) {
- uint32_t tmp = reply.readInt32();
- if (halFrames != NULL) {
- *halFrames = tmp;
- }
- tmp = reply.readInt32();
- if (dspFrames != NULL) {
- *dspFrames = tmp;
- }
- }
- return status;
- }
-
- virtual uint32_t getInputFramesLost(audio_io_handle_t ioHandle) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) ioHandle);
- status_t status = remote()->transact(GET_INPUT_FRAMES_LOST, data, &reply);
- if (status != NO_ERROR) {
- return 0;
- }
- return (uint32_t) reply.readInt32();
- }
-
- virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) use);
- status_t status = remote()->transact(NEW_AUDIO_UNIQUE_ID, data, &reply);
- audio_unique_id_t id = AUDIO_UNIQUE_ID_ALLOCATE;
- if (status == NO_ERROR) {
- id = reply.readInt32();
- }
- return id;
- }
-
- void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) override
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(audioSession);
- data.writeInt32((int32_t)pid);
- data.writeInt32((int32_t)uid);
- remote()->transact(ACQUIRE_AUDIO_SESSION_ID, data, &reply);
- }
-
- virtual void releaseAudioSessionId(audio_session_t audioSession, int pid)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(audioSession);
- data.writeInt32(pid);
- remote()->transact(RELEASE_AUDIO_SESSION_ID, data, &reply);
- }
-
- virtual status_t queryNumberEffects(uint32_t *numEffects) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- status_t status = remote()->transact(QUERY_NUM_EFFECTS, data, &reply);
- if (status != NO_ERROR) {
- return status;
- }
- status = reply.readInt32();
- if (status != NO_ERROR) {
- return status;
- }
- if (numEffects != NULL) {
- *numEffects = (uint32_t)reply.readInt32();
- }
- return NO_ERROR;
- }
-
- virtual status_t queryEffect(uint32_t index, effect_descriptor_t *pDescriptor) const
- {
- if (pDescriptor == NULL) {
- return BAD_VALUE;
- }
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(index);
- status_t status = remote()->transact(QUERY_EFFECT, data, &reply);
- if (status != NO_ERROR) {
- return status;
- }
- status = reply.readInt32();
- if (status != NO_ERROR) {
- return status;
- }
- reply.read(pDescriptor, sizeof(effect_descriptor_t));
- return NO_ERROR;
- }
-
- virtual status_t getEffectDescriptor(const effect_uuid_t *pUuid,
- const effect_uuid_t *pType,
- uint32_t preferredTypeFlag,
- effect_descriptor_t *pDescriptor) const
- {
- if (pUuid == NULL || pType == NULL || pDescriptor == NULL) {
- return BAD_VALUE;
- }
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.write(pUuid, sizeof(effect_uuid_t));
- data.write(pType, sizeof(effect_uuid_t));
- data.writeUint32(preferredTypeFlag);
- status_t status = remote()->transact(GET_EFFECT_DESCRIPTOR, data, &reply);
- if (status != NO_ERROR) {
- return status;
- }
- status = reply.readInt32();
- if (status != NO_ERROR) {
- return status;
- }
- reply.read(pDescriptor, sizeof(effect_descriptor_t));
- return NO_ERROR;
- }
-
- virtual sp<IEffect> createEffect(
- effect_descriptor_t *pDesc,
- const sp<IEffectClient>& client,
- int32_t priority,
- audio_io_handle_t output,
- audio_session_t sessionId,
- const AudioDeviceTypeAddr& device,
- const String16& opPackageName,
- pid_t pid,
- bool probe,
- status_t *status,
- int *id,
- int *enabled)
- {
- Parcel data, reply;
- sp<IEffect> effect;
- if (pDesc == NULL) {
- if (status != NULL) {
- *status = BAD_VALUE;
- }
- return nullptr;
- }
-
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.write(pDesc, sizeof(effect_descriptor_t));
- data.writeStrongBinder(IInterface::asBinder(client));
- data.writeInt32(priority);
- data.writeInt32((int32_t) output);
- data.writeInt32(sessionId);
- if (data.writeParcelable(device) != NO_ERROR) {
- if (status != NULL) {
- *status = NO_INIT;
- }
- return nullptr;
- }
- data.writeString16(opPackageName);
- data.writeInt32((int32_t) pid);
- data.writeInt32(probe ? 1 : 0);
-
- status_t lStatus = remote()->transact(CREATE_EFFECT, data, &reply);
- if (lStatus != NO_ERROR) {
- ALOGE("createEffect error: %s", strerror(-lStatus));
- } else {
- lStatus = reply.readInt32();
- int tmp = reply.readInt32();
- if (id != NULL) {
- *id = tmp;
- }
- tmp = reply.readInt32();
- if (enabled != NULL) {
- *enabled = tmp;
- }
- effect = interface_cast<IEffect>(reply.readStrongBinder());
- reply.read(pDesc, sizeof(effect_descriptor_t));
- }
- if (status != NULL) {
- *status = lStatus;
- }
-
- return effect;
- }
-
- virtual status_t moveEffects(audio_session_t session, audio_io_handle_t srcOutput,
- audio_io_handle_t dstOutput)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(session);
- data.writeInt32((int32_t) srcOutput);
- data.writeInt32((int32_t) dstOutput);
- remote()->transact(MOVE_EFFECTS, data, &reply);
- return reply.readInt32();
- }
-
- virtual void setEffectSuspended(int effectId,
- audio_session_t sessionId,
- bool suspended)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(effectId);
- data.writeInt32(sessionId);
- data.writeInt32(suspended ? 1 : 0);
- remote()->transact(SET_EFFECT_SUSPENDED, data, &reply);
- }
-
- virtual audio_module_handle_t loadHwModule(const char *name)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeCString(name);
- remote()->transact(LOAD_HW_MODULE, data, &reply);
- return (audio_module_handle_t) reply.readInt32();
- }
-
- virtual uint32_t getPrimaryOutputSamplingRate()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- remote()->transact(GET_PRIMARY_OUTPUT_SAMPLING_RATE, data, &reply);
- return reply.readInt32();
- }
-
- virtual size_t getPrimaryOutputFrameCount()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- remote()->transact(GET_PRIMARY_OUTPUT_FRAME_COUNT, data, &reply);
- return reply.readInt64();
- }
-
- virtual status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) override
- {
- Parcel data, reply;
-
- static_assert(NO_ERROR == 0, "NO_ERROR must be 0");
- return data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor())
- ?: data.writeInt32((int) isLowRamDevice)
- ?: data.writeInt64(totalMemory)
- ?: remote()->transact(SET_LOW_RAM_DEVICE, data, &reply)
- ?: reply.readInt32();
- }
-
- virtual status_t listAudioPorts(unsigned int *num_ports,
- struct audio_port *ports)
- {
- if (num_ports == NULL || *num_ports == 0 || ports == NULL) {
- return BAD_VALUE;
- }
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(*num_ports);
- status_t status = remote()->transact(LIST_AUDIO_PORTS, data, &reply);
- if (status != NO_ERROR ||
- (status = (status_t)reply.readInt32()) != NO_ERROR) {
- return status;
- }
- *num_ports = (unsigned int)reply.readInt32();
- reply.read(ports, *num_ports * sizeof(struct audio_port));
- return status;
- }
- virtual status_t getAudioPort(struct audio_port *port)
- {
- if (port == NULL) {
- return BAD_VALUE;
- }
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.write(port, sizeof(struct audio_port));
- status_t status = remote()->transact(GET_AUDIO_PORT, data, &reply);
- if (status != NO_ERROR ||
- (status = (status_t)reply.readInt32()) != NO_ERROR) {
- return status;
- }
- reply.read(port, sizeof(struct audio_port));
- return status;
- }
- virtual status_t createAudioPatch(const struct audio_patch *patch,
- audio_patch_handle_t *handle)
- {
- if (patch == NULL || handle == NULL) {
- return BAD_VALUE;
- }
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.write(patch, sizeof(struct audio_patch));
- data.write(handle, sizeof(audio_patch_handle_t));
- status_t status = remote()->transact(CREATE_AUDIO_PATCH, data, &reply);
- if (status != NO_ERROR ||
- (status = (status_t)reply.readInt32()) != NO_ERROR) {
- return status;
- }
- reply.read(handle, sizeof(audio_patch_handle_t));
- return status;
- }
- virtual status_t releaseAudioPatch(audio_patch_handle_t handle)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.write(&handle, sizeof(audio_patch_handle_t));
- status_t status = remote()->transact(RELEASE_AUDIO_PATCH, data, &reply);
- if (status != NO_ERROR) {
- status = (status_t)reply.readInt32();
- }
- return status;
- }
- virtual status_t listAudioPatches(unsigned int *num_patches,
- struct audio_patch *patches)
- {
- if (num_patches == NULL || *num_patches == 0 || patches == NULL) {
- return BAD_VALUE;
- }
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(*num_patches);
- status_t status = remote()->transact(LIST_AUDIO_PATCHES, data, &reply);
- if (status != NO_ERROR ||
- (status = (status_t)reply.readInt32()) != NO_ERROR) {
- return status;
- }
- *num_patches = (unsigned int)reply.readInt32();
- reply.read(patches, *num_patches * sizeof(struct audio_patch));
- return status;
- }
- virtual status_t setAudioPortConfig(const struct audio_port_config *config)
- {
- if (config == NULL) {
- return BAD_VALUE;
- }
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.write(config, sizeof(struct audio_port_config));
- status_t status = remote()->transact(SET_AUDIO_PORT_CONFIG, data, &reply);
- if (status != NO_ERROR) {
- status = (status_t)reply.readInt32();
- }
- return status;
- }
- virtual audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(sessionId);
- status_t status = remote()->transact(GET_AUDIO_HW_SYNC_FOR_SESSION, data, &reply);
- if (status != NO_ERROR) {
- return AUDIO_HW_SYNC_INVALID;
- }
- return (audio_hw_sync_t)reply.readInt32();
- }
- virtual status_t systemReady()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- return remote()->transact(SYSTEM_READY, data, &reply, IBinder::FLAG_ONEWAY);
- }
- virtual size_t frameCountHAL(audio_io_handle_t ioHandle) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32((int32_t) ioHandle);
- status_t status = remote()->transact(FRAME_COUNT_HAL, data, &reply);
- if (status != NO_ERROR) {
- return 0;
- }
- return reply.readInt64();
- }
- virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- status_t status = remote()->transact(GET_MICROPHONES, data, &reply);
- if (status != NO_ERROR ||
- (status = (status_t)reply.readInt32()) != NO_ERROR) {
- return status;
- }
- status = reply.readParcelableVector(microphones);
- return status;
- }
- virtual status_t setAudioHalPids(const std::vector<pid_t>& pids)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(pids.size());
- for (auto pid : pids) {
- data.writeInt32(pid);
- }
- status_t status = remote()->transact(SET_AUDIO_HAL_PIDS, data, &reply);
- if (status != NO_ERROR) {
- return status;
- }
- return static_cast <status_t> (reply.readInt32());
- }
-};
-
-IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioFlinger::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- // make sure transactions reserved to AudioPolicyManager do not come from other processes
- switch (code) {
- case SET_STREAM_VOLUME:
- case SET_STREAM_MUTE:
- case OPEN_OUTPUT:
- case OPEN_DUPLICATE_OUTPUT:
- case CLOSE_OUTPUT:
- case SUSPEND_OUTPUT:
- case RESTORE_OUTPUT:
- case OPEN_INPUT:
- case CLOSE_INPUT:
- case INVALIDATE_STREAM:
- case SET_VOICE_VOLUME:
- case MOVE_EFFECTS:
- case SET_EFFECT_SUSPENDED:
- case LOAD_HW_MODULE:
- case LIST_AUDIO_PORTS:
- case GET_AUDIO_PORT:
- case CREATE_AUDIO_PATCH:
- case RELEASE_AUDIO_PATCH:
- case LIST_AUDIO_PATCHES:
- case SET_AUDIO_PORT_CONFIG:
- case SET_RECORD_SILENCED:
- ALOGW("%s: transaction %d received from PID %d",
- __func__, code, IPCThreadState::self()->getCallingPid());
- // return status only for non void methods
- switch (code) {
- case SET_RECORD_SILENCED:
- case SET_EFFECT_SUSPENDED:
- break;
- default:
- reply->writeInt32(static_cast<int32_t> (INVALID_OPERATION));
- break;
- }
- return OK;
- default:
- break;
- }
-
- // make sure the following transactions come from system components
- switch (code) {
- case SET_MASTER_VOLUME:
- case SET_MASTER_MUTE:
- case SET_MODE:
- case SET_MIC_MUTE:
- case SET_LOW_RAM_DEVICE:
- case SYSTEM_READY:
- case SET_AUDIO_HAL_PIDS: {
- if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
- ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
- __func__, code, IPCThreadState::self()->getCallingPid(),
- IPCThreadState::self()->getCallingUid());
- // return status only for non void methods
- switch (code) {
- case SYSTEM_READY:
- break;
- default:
- reply->writeInt32(static_cast<int32_t> (INVALID_OPERATION));
- break;
- }
- return OK;
- }
- } break;
- default:
- break;
- }
-
- // Whitelist of relevant events to trigger log merging.
- // Log merging should activate during audio activity of any kind. This are considered the
- // most relevant events.
- // TODO should select more wisely the items from the list
- switch (code) {
- case CREATE_TRACK:
- case CREATE_RECORD:
- case SET_MASTER_VOLUME:
- case SET_MASTER_MUTE:
- case SET_MIC_MUTE:
- case SET_PARAMETERS:
- case CREATE_EFFECT:
- case SYSTEM_READY: {
- requestLogMerge();
- break;
- }
- default:
- break;
- }
-
- std::string tag("IAudioFlinger command " + std::to_string(code));
- TimeCheck check(tag.c_str());
-
- switch (code) {
- case CREATE_TRACK: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
-
- CreateTrackInput input;
- if (input.readFromParcel((Parcel*)&data) != NO_ERROR) {
- reply->writeInt32(DEAD_OBJECT);
- return NO_ERROR;
- }
-
- status_t status;
- CreateTrackOutput output;
-
- sp<IAudioTrack> track= createTrack(input,
- output,
- &status);
-
- LOG_ALWAYS_FATAL_IF((track != 0) != (status == NO_ERROR));
- reply->writeInt32(status);
- if (status != NO_ERROR) {
- return NO_ERROR;
- }
- reply->writeStrongBinder(IInterface::asBinder(track));
- output.writeToParcel(reply);
- return NO_ERROR;
- } break;
- case CREATE_RECORD: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
-
- CreateRecordInput input;
- if (input.readFromParcel((Parcel*)&data) != NO_ERROR) {
- reply->writeInt32(DEAD_OBJECT);
- return NO_ERROR;
- }
-
- status_t status;
- CreateRecordOutput output;
-
- sp<media::IAudioRecord> record = createRecord(input,
- output,
- &status);
-
- LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));
- reply->writeInt32(status);
- if (status != NO_ERROR) {
- return NO_ERROR;
- }
- reply->writeStrongBinder(IInterface::asBinder(record));
- output.writeToParcel(reply);
- return NO_ERROR;
- } break;
- case SAMPLE_RATE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( sampleRate((audio_io_handle_t) data.readInt32()) );
- return NO_ERROR;
- } break;
-
- // RESERVED for channelCount()
-
- case FORMAT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( format((audio_io_handle_t) data.readInt32()) );
- return NO_ERROR;
- } break;
- case FRAME_COUNT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt64( frameCount((audio_io_handle_t) data.readInt32()) );
- return NO_ERROR;
- } break;
- case LATENCY: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( latency((audio_io_handle_t) data.readInt32()) );
- return NO_ERROR;
- } break;
- case SET_MASTER_VOLUME: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( setMasterVolume(data.readFloat()) );
- return NO_ERROR;
- } break;
- case SET_MASTER_MUTE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( setMasterMute(data.readInt32()) );
- return NO_ERROR;
- } break;
- case MASTER_VOLUME: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeFloat( masterVolume() );
- return NO_ERROR;
- } break;
- case MASTER_MUTE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( masterMute() );
- return NO_ERROR;
- } break;
- case SET_MASTER_BALANCE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( setMasterBalance(data.readFloat()) );
- return NO_ERROR;
- } break;
- case GET_MASTER_BALANCE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- float f;
- const status_t status = getMasterBalance(&f);
- reply->writeInt32((int32_t)status);
- if (status == NO_ERROR) {
- (void)reply->writeFloat(f);
- }
- return NO_ERROR;
- } break;
- case SET_STREAM_VOLUME: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- int stream = data.readInt32();
- float volume = data.readFloat();
- audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
- reply->writeInt32( setStreamVolume((audio_stream_type_t) stream, volume, output) );
- return NO_ERROR;
- } break;
- case SET_STREAM_MUTE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- int stream = data.readInt32();
- reply->writeInt32( setStreamMute((audio_stream_type_t) stream, data.readInt32()) );
- return NO_ERROR;
- } break;
- case STREAM_VOLUME: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- int stream = data.readInt32();
- int output = data.readInt32();
- reply->writeFloat( streamVolume((audio_stream_type_t) stream, output) );
- return NO_ERROR;
- } break;
- case STREAM_MUTE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- int stream = data.readInt32();
- reply->writeInt32( streamMute((audio_stream_type_t) stream) );
- return NO_ERROR;
- } break;
- case SET_MODE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_mode_t mode = (audio_mode_t) data.readInt32();
- reply->writeInt32( setMode(mode) );
- return NO_ERROR;
- } break;
- case SET_MIC_MUTE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- int state = data.readInt32();
- reply->writeInt32( setMicMute(state) );
- return NO_ERROR;
- } break;
- case GET_MIC_MUTE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( getMicMute() );
- return NO_ERROR;
- } break;
- case SET_RECORD_SILENCED: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_port_handle_t portId = data.readInt32();
- bool silenced = data.readInt32() == 1;
- setRecordSilenced(portId, silenced);
- return NO_ERROR;
- } break;
- case SET_PARAMETERS: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32();
- String8 keyValuePairs(data.readString8());
- reply->writeInt32(setParameters(ioHandle, keyValuePairs));
- return NO_ERROR;
- } break;
- case GET_PARAMETERS: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32();
- String8 keys(data.readString8());
- reply->writeString8(getParameters(ioHandle, keys));
- return NO_ERROR;
- } break;
-
- case REGISTER_CLIENT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- sp<IAudioFlingerClient> client = interface_cast<IAudioFlingerClient>(
- data.readStrongBinder());
- registerClient(client);
- return NO_ERROR;
- } break;
- case GET_INPUTBUFFERSIZE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- uint32_t sampleRate = data.readInt32();
- audio_format_t format = (audio_format_t) data.readInt32();
- audio_channel_mask_t channelMask = data.readInt32();
- reply->writeInt64( getInputBufferSize(sampleRate, format, channelMask) );
- return NO_ERROR;
- } break;
- case OPEN_OUTPUT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_module_handle_t module = (audio_module_handle_t)data.readInt32();
- audio_config_t config = {};
- if (data.read(&config, sizeof(audio_config_t)) != NO_ERROR) {
- ALOGE("b/23905951");
- }
- sp<DeviceDescriptorBase> device = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
- status_t status = NO_ERROR;
- if ((status = data.readParcelable(device.get())) != NO_ERROR) {
- reply->writeInt32((int32_t)status);
- return NO_ERROR;
- }
- audio_output_flags_t flags = (audio_output_flags_t) data.readInt32();
- uint32_t latencyMs = 0;
- audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- status = openOutput(module, &output, &config, device, &latencyMs, flags);
- ALOGV("OPEN_OUTPUT output, %d", output);
- reply->writeInt32((int32_t)status);
- if (status == NO_ERROR) {
- reply->writeInt32((int32_t)output);
- reply->write(&config, sizeof(audio_config_t));
- reply->writeInt32(latencyMs);
- }
- return NO_ERROR;
- } break;
- case OPEN_DUPLICATE_OUTPUT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_io_handle_t output1 = (audio_io_handle_t) data.readInt32();
- audio_io_handle_t output2 = (audio_io_handle_t) data.readInt32();
- reply->writeInt32((int32_t) openDuplicateOutput(output1, output2));
- return NO_ERROR;
- } break;
- case CLOSE_OUTPUT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32(closeOutput((audio_io_handle_t) data.readInt32()));
- return NO_ERROR;
- } break;
- case SUSPEND_OUTPUT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32(suspendOutput((audio_io_handle_t) data.readInt32()));
- return NO_ERROR;
- } break;
- case RESTORE_OUTPUT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32(restoreOutput((audio_io_handle_t) data.readInt32()));
- return NO_ERROR;
- } break;
- case OPEN_INPUT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_module_handle_t module = (audio_module_handle_t)data.readInt32();
- audio_io_handle_t input = (audio_io_handle_t)data.readInt32();
- audio_config_t config = {};
- if (data.read(&config, sizeof(audio_config_t)) != NO_ERROR) {
- ALOGE("b/23905951");
- }
- audio_devices_t device = (audio_devices_t)data.readInt32();
- String8 address(data.readString8());
- audio_source_t source = (audio_source_t)data.readInt32();
- audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
-
- status_t status = openInput(module, &input, &config,
- &device, address, source, flags);
- reply->writeInt32((int32_t) status);
- if (status == NO_ERROR) {
- reply->writeInt32((int32_t) input);
- reply->write(&config, sizeof(audio_config_t));
- reply->writeInt32(device);
- }
- return NO_ERROR;
- } break;
- case CLOSE_INPUT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32(closeInput((audio_io_handle_t) data.readInt32()));
- return NO_ERROR;
- } break;
- case INVALIDATE_STREAM: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_stream_type_t stream = (audio_stream_type_t) data.readInt32();
- reply->writeInt32(invalidateStream(stream));
- return NO_ERROR;
- } break;
- case SET_VOICE_VOLUME: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- float volume = data.readFloat();
- reply->writeInt32( setVoiceVolume(volume) );
- return NO_ERROR;
- } break;
- case GET_RENDER_POSITION: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
- uint32_t halFrames = 0;
- uint32_t dspFrames = 0;
- status_t status = getRenderPosition(&halFrames, &dspFrames, output);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->writeInt32(halFrames);
- reply->writeInt32(dspFrames);
- }
- return NO_ERROR;
- }
- case GET_INPUT_FRAMES_LOST: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32();
- reply->writeInt32((int32_t) getInputFramesLost(ioHandle));
- return NO_ERROR;
- } break;
- case NEW_AUDIO_UNIQUE_ID: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32(newAudioUniqueId((audio_unique_id_use_t) data.readInt32()));
- return NO_ERROR;
- } break;
- case ACQUIRE_AUDIO_SESSION_ID: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_session_t audioSession = (audio_session_t) data.readInt32();
- const pid_t pid = (pid_t)data.readInt32();
- const uid_t uid = (uid_t)data.readInt32();
- acquireAudioSessionId(audioSession, pid, uid);
- return NO_ERROR;
- } break;
- case RELEASE_AUDIO_SESSION_ID: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_session_t audioSession = (audio_session_t) data.readInt32();
- int pid = data.readInt32();
- releaseAudioSessionId(audioSession, pid);
- return NO_ERROR;
- } break;
- case QUERY_NUM_EFFECTS: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- uint32_t numEffects = 0;
- status_t status = queryNumberEffects(&numEffects);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->writeInt32((int32_t)numEffects);
- }
- return NO_ERROR;
- }
- case QUERY_EFFECT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- effect_descriptor_t desc = {};
- status_t status = queryEffect(data.readInt32(), &desc);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->write(&desc, sizeof(effect_descriptor_t));
- }
- return NO_ERROR;
- }
- case GET_EFFECT_DESCRIPTOR: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- effect_uuid_t uuid = {};
- if (data.read(&uuid, sizeof(effect_uuid_t)) != NO_ERROR) {
- android_errorWriteLog(0x534e4554, "139417189");
- }
- effect_uuid_t type = {};
- if (data.read(&type, sizeof(effect_uuid_t)) != NO_ERROR) {
- android_errorWriteLog(0x534e4554, "139417189");
- }
- uint32_t preferredTypeFlag = data.readUint32();
- effect_descriptor_t desc = {};
- status_t status = getEffectDescriptor(&uuid, &type, preferredTypeFlag, &desc);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->write(&desc, sizeof(effect_descriptor_t));
- }
- return NO_ERROR;
- }
- case CREATE_EFFECT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- effect_descriptor_t desc = {};
- if (data.read(&desc, sizeof(effect_descriptor_t)) != NO_ERROR) {
- ALOGE("b/23905951");
- }
- sp<IEffectClient> client = interface_cast<IEffectClient>(data.readStrongBinder());
- int32_t priority = data.readInt32();
- audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
- audio_session_t sessionId = (audio_session_t) data.readInt32();
- AudioDeviceTypeAddr device;
- status_t status = NO_ERROR;
- if ((status = data.readParcelable(&device)) != NO_ERROR) {
- return status;
- }
- const String16 opPackageName = data.readString16();
- pid_t pid = (pid_t)data.readInt32();
- bool probe = data.readInt32() == 1;
-
- int id = 0;
- int enabled = 0;
-
- sp<IEffect> effect = createEffect(&desc, client, priority, output, sessionId, device,
- opPackageName, pid, probe, &status, &id, &enabled);
- reply->writeInt32(status);
- reply->writeInt32(id);
- reply->writeInt32(enabled);
- reply->writeStrongBinder(IInterface::asBinder(effect));
- reply->write(&desc, sizeof(effect_descriptor_t));
- return NO_ERROR;
- } break;
- case MOVE_EFFECTS: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_session_t session = (audio_session_t) data.readInt32();
- audio_io_handle_t srcOutput = (audio_io_handle_t) data.readInt32();
- audio_io_handle_t dstOutput = (audio_io_handle_t) data.readInt32();
- reply->writeInt32(moveEffects(session, srcOutput, dstOutput));
- return NO_ERROR;
- } break;
- case SET_EFFECT_SUSPENDED: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- int effectId = data.readInt32();
- audio_session_t sessionId = (audio_session_t) data.readInt32();
- bool suspended = data.readInt32() == 1;
- setEffectSuspended(effectId, sessionId, suspended);
- return NO_ERROR;
- } break;
- case LOAD_HW_MODULE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32(loadHwModule(data.readCString()));
- return NO_ERROR;
- } break;
- case GET_PRIMARY_OUTPUT_SAMPLING_RATE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32(getPrimaryOutputSamplingRate());
- return NO_ERROR;
- } break;
- case GET_PRIMARY_OUTPUT_FRAME_COUNT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt64(getPrimaryOutputFrameCount());
- return NO_ERROR;
- } break;
- case SET_LOW_RAM_DEVICE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- int32_t isLowRamDevice;
- int64_t totalMemory;
- const status_t status =
- data.readInt32(&isLowRamDevice) ?:
- data.readInt64(&totalMemory) ?:
- setLowRamDevice(isLowRamDevice != 0, totalMemory);
- (void)reply->writeInt32(status);
- return NO_ERROR;
- } break;
- case LIST_AUDIO_PORTS: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- unsigned int numPortsReq = data.readInt32();
- if (numPortsReq > MAX_ITEMS_PER_LIST) {
- numPortsReq = MAX_ITEMS_PER_LIST;
- }
- unsigned int numPorts = numPortsReq;
- struct audio_port *ports =
- (struct audio_port *)calloc(numPortsReq,
- sizeof(struct audio_port));
- if (ports == NULL) {
- reply->writeInt32(NO_MEMORY);
- reply->writeInt32(0);
- return NO_ERROR;
- }
- status_t status = listAudioPorts(&numPorts, ports);
- reply->writeInt32(status);
- reply->writeInt32(numPorts);
- if (status == NO_ERROR) {
- if (numPortsReq > numPorts) {
- numPortsReq = numPorts;
- }
- reply->write(ports, numPortsReq * sizeof(struct audio_port));
- }
- free(ports);
- return NO_ERROR;
- } break;
- case GET_AUDIO_PORT: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- struct audio_port port = {};
- if (data.read(&port, sizeof(struct audio_port)) != NO_ERROR) {
- ALOGE("b/23905951");
- }
- status_t status = getAudioPort(&port);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->write(&port, sizeof(struct audio_port));
- }
- return NO_ERROR;
- } break;
- case CREATE_AUDIO_PATCH: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- struct audio_patch patch;
- data.read(&patch, sizeof(struct audio_patch));
- audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
- if (data.read(&handle, sizeof(audio_patch_handle_t)) != NO_ERROR) {
- ALOGE("b/23905951");
- }
- status_t status = createAudioPatch(&patch, &handle);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->write(&handle, sizeof(audio_patch_handle_t));
- }
- return NO_ERROR;
- } break;
- case RELEASE_AUDIO_PATCH: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- audio_patch_handle_t handle;
- data.read(&handle, sizeof(audio_patch_handle_t));
- status_t status = releaseAudioPatch(handle);
- reply->writeInt32(status);
- return NO_ERROR;
- } break;
- case LIST_AUDIO_PATCHES: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- unsigned int numPatchesReq = data.readInt32();
- if (numPatchesReq > MAX_ITEMS_PER_LIST) {
- numPatchesReq = MAX_ITEMS_PER_LIST;
- }
- unsigned int numPatches = numPatchesReq;
- struct audio_patch *patches =
- (struct audio_patch *)calloc(numPatchesReq,
- sizeof(struct audio_patch));
- if (patches == NULL) {
- reply->writeInt32(NO_MEMORY);
- reply->writeInt32(0);
- return NO_ERROR;
- }
- status_t status = listAudioPatches(&numPatches, patches);
- reply->writeInt32(status);
- reply->writeInt32(numPatches);
- if (status == NO_ERROR) {
- if (numPatchesReq > numPatches) {
- numPatchesReq = numPatches;
- }
- reply->write(patches, numPatchesReq * sizeof(struct audio_patch));
- }
- free(patches);
- return NO_ERROR;
- } break;
- case SET_AUDIO_PORT_CONFIG: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- struct audio_port_config config;
- data.read(&config, sizeof(struct audio_port_config));
- status_t status = setAudioPortConfig(&config);
- reply->writeInt32(status);
- return NO_ERROR;
- } break;
- case GET_AUDIO_HW_SYNC_FOR_SESSION: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32(getAudioHwSyncForSession((audio_session_t) data.readInt32()));
- return NO_ERROR;
- } break;
- case SYSTEM_READY: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- systemReady();
- return NO_ERROR;
- } break;
- case FRAME_COUNT_HAL: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt64( frameCountHAL((audio_io_handle_t) data.readInt32()) );
- return NO_ERROR;
- } break;
- case GET_MICROPHONES: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- std::vector<media::MicrophoneInfo> microphones;
- status_t status = getMicrophones(µphones);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->writeParcelableVector(microphones);
- }
- return NO_ERROR;
- }
- case SET_AUDIO_HAL_PIDS: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- std::vector<pid_t> pids;
- int32_t size;
- status_t status = data.readInt32(&size);
- if (status != NO_ERROR) {
- return status;
- }
- if (size < 0) {
- return BAD_VALUE;
- }
- if (size > MAX_ITEMS_PER_LIST) {
- size = MAX_ITEMS_PER_LIST;
- }
- for (int32_t i = 0; i < size; i++) {
- int32_t pid;
- status = data.readInt32(&pid);
- if (status != NO_ERROR) {
- return status;
- }
- pids.push_back(pid);
- }
- reply->writeInt32(setAudioHalPids(pids));
- return NO_ERROR;
- }
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
+ConversionResult<media::CreateTrackRequest> IAudioFlinger::CreateTrackInput::toAidl() const {
+ media::CreateTrackRequest aidl;
+ aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+ aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_t_AudioConfig(config));
+ aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient_AudioClient(clientInfo));
+ aidl.sharedBuffer = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(sharedBuffer));
+ aidl.notificationsPerBuffer = VALUE_OR_RETURN(convertIntegral<int32_t>(notificationsPerBuffer));
+ aidl.speed = speed;
+ aidl.audioTrackCallback = audioTrackCallback;
+ aidl.opPackageName = opPackageName;
+ aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+ aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+ aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+ aidl.selectedDeviceId = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+ aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+ return aidl;
}
-// ----------------------------------------------------------------------------
+ConversionResult<IAudioFlinger::CreateTrackInput>
+IAudioFlinger::CreateTrackInput::fromAidl(const media::CreateTrackRequest& aidl) {
+ IAudioFlinger::CreateTrackInput legacy;
+ legacy.attr = VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
+ legacy.config = VALUE_OR_RETURN(aidl2legacy_AudioConfig_audio_config_t(aidl.config));
+ legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient_AudioClient(aidl.clientInfo));
+ legacy.sharedBuffer = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.sharedBuffer));
+ legacy.notificationsPerBuffer = VALUE_OR_RETURN(
+ convertIntegral<uint32_t>(aidl.notificationsPerBuffer));
+ legacy.speed = aidl.speed;
+ legacy.audioTrackCallback = aidl.audioTrackCallback;
+ legacy.opPackageName = aidl.opPackageName;
+ legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_output_flags_t_mask(aidl.flags));
+ legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+ legacy.notificationFrameCount = VALUE_OR_RETURN(
+ convertIntegral<size_t>(aidl.notificationFrameCount));
+ legacy.selectedDeviceId = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+ legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+ return legacy;
+}
+
+ConversionResult<media::CreateTrackResponse>
+IAudioFlinger::CreateTrackOutput::toAidl() const {
+ media::CreateTrackResponse aidl;
+ aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+ aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+ aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+ aidl.selectedDeviceId = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+ aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+ aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
+ aidl.afFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(afFrameCount));
+ aidl.afSampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(afSampleRate));
+ aidl.afLatencyMs = VALUE_OR_RETURN(convertIntegral<int32_t>(afLatencyMs));
+ aidl.outputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(outputId));
+ aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
+ aidl.audioTrack = audioTrack;
+ return aidl;
+}
+
+ConversionResult<IAudioFlinger::CreateTrackOutput>
+IAudioFlinger::CreateTrackOutput::fromAidl(
+ const media::CreateTrackResponse& aidl) {
+ IAudioFlinger::CreateTrackOutput legacy;
+ legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_output_flags_t_mask(aidl.flags));
+ legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+ legacy.notificationFrameCount = VALUE_OR_RETURN(
+ convertIntegral<size_t>(aidl.notificationFrameCount));
+ legacy.selectedDeviceId = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+ legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+ legacy.sampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+ legacy.afFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.afFrameCount));
+ legacy.afSampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afSampleRate));
+ legacy.afLatencyMs = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afLatencyMs));
+ legacy.outputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.outputId));
+ legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+ legacy.audioTrack = aidl.audioTrack;
+ return legacy;
+}
+
+ConversionResult<media::CreateRecordRequest>
+IAudioFlinger::CreateRecordInput::toAidl() const {
+ media::CreateRecordRequest aidl;
+ aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+ aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(config));
+ aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient_AudioClient(clientInfo));
+ aidl.opPackageName = VALUE_OR_RETURN(legacy2aidl_String16_string(opPackageName));
+ aidl.riid = VALUE_OR_RETURN(legacy2aidl_audio_unique_id_t_int32_t(riid));
+ aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
+ aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+ aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+ aidl.selectedDeviceId = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+ aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+ return aidl;
+}
+
+ConversionResult<IAudioFlinger::CreateRecordInput>
+IAudioFlinger::CreateRecordInput::fromAidl(
+ const media::CreateRecordRequest& aidl) {
+ IAudioFlinger::CreateRecordInput legacy;
+ legacy.attr = VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
+ legacy.config = VALUE_OR_RETURN(aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config));
+ legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient_AudioClient(aidl.clientInfo));
+ legacy.opPackageName = VALUE_OR_RETURN(aidl2legacy_string_view_String16(aidl.opPackageName));
+ legacy.riid = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_unique_id_t(aidl.riid));
+ legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_input_flags_t_mask(aidl.flags));
+ legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+ legacy.notificationFrameCount = VALUE_OR_RETURN(
+ convertIntegral<size_t>(aidl.notificationFrameCount));
+ legacy.selectedDeviceId = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+ legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+ return legacy;
+}
+
+ConversionResult<media::CreateRecordResponse>
+IAudioFlinger::CreateRecordOutput::toAidl() const {
+ media::CreateRecordResponse aidl;
+ aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
+ aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+ aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+ aidl.selectedDeviceId = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+ aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+ aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
+ aidl.inputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(inputId));
+ aidl.cblk = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(cblk));
+ aidl.buffers = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(buffers));
+ aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
+ aidl.audioRecord = audioRecord;
+ return aidl;
+}
+
+ConversionResult<IAudioFlinger::CreateRecordOutput>
+IAudioFlinger::CreateRecordOutput::fromAidl(
+ const media::CreateRecordResponse& aidl) {
+ IAudioFlinger::CreateRecordOutput legacy;
+ legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_input_flags_t_mask(aidl.flags));
+ legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+ legacy.notificationFrameCount = VALUE_OR_RETURN(
+ convertIntegral<size_t>(aidl.notificationFrameCount));
+ legacy.selectedDeviceId = VALUE_OR_RETURN(
+ aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+ legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+ legacy.sampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+ legacy.inputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.inputId));
+ legacy.cblk = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.cblk));
+ legacy.buffers = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.buffers));
+ legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+ legacy.audioRecord = aidl.audioRecord;
+ return legacy;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// AudioFlingerClientAdapter
+
+AudioFlingerClientAdapter::AudioFlingerClientAdapter(
+ const sp<media::IAudioFlingerService> delegate) : mDelegate(delegate) {}
+
+status_t AudioFlingerClientAdapter::createTrack(const media::CreateTrackRequest& input,
+ media::CreateTrackResponse& output) {
+ return statusTFromBinderStatus(mDelegate->createTrack(input, &output));
+}
+
+status_t AudioFlingerClientAdapter::createRecord(const media::CreateRecordRequest& input,
+ media::CreateRecordResponse& output) {
+ return statusTFromBinderStatus(mDelegate->createRecord(input, &output));
+}
+
+uint32_t AudioFlingerClientAdapter::sampleRate(audio_io_handle_t ioHandle) const {
+ auto result = [&]() -> ConversionResult<uint32_t> {
+ int32_t ioHandleAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+ int32_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->sampleRate(ioHandleAidl, &aidlRet)));
+ return convertIntegral<uint32_t>(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(0);
+}
+
+audio_format_t AudioFlingerClientAdapter::format(audio_io_handle_t output) const {
+ auto result = [&]() -> ConversionResult<audio_format_t> {
+ int32_t outputAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(output));
+ media::audio::common::AudioFormat aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->format(outputAidl, &aidlRet)));
+ return aidl2legacy_AudioFormat_audio_format_t(aidlRet);
+ }();
+ return result.value_or(AUDIO_FORMAT_INVALID);
+}
+
+size_t AudioFlingerClientAdapter::frameCount(audio_io_handle_t ioHandle) const {
+ auto result = [&]() -> ConversionResult<size_t> {
+ int32_t ioHandleAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+ int64_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->frameCount(ioHandleAidl, &aidlRet)));
+ return convertIntegral<size_t>(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(0);
+}
+
+uint32_t AudioFlingerClientAdapter::latency(audio_io_handle_t output) const {
+ auto result = [&]() -> ConversionResult<uint32_t> {
+ int32_t outputAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(output));
+ int32_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->latency(outputAidl, &aidlRet)));
+ return convertIntegral<uint32_t>(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(0);
+}
+
+status_t AudioFlingerClientAdapter::setMasterVolume(float value) {
+ return statusTFromBinderStatus(mDelegate->setMasterVolume(value));
+}
+
+status_t AudioFlingerClientAdapter::setMasterMute(bool muted) {
+ return statusTFromBinderStatus(mDelegate->setMasterMute(muted));
+}
+
+float AudioFlingerClientAdapter::masterVolume() const {
+ auto result = [&]() -> ConversionResult<float> {
+ float aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->masterVolume(&aidlRet)));
+ return aidlRet;
+ }();
+ // Failure is ignored.
+ return result.value_or(0.f);
+}
+
+bool AudioFlingerClientAdapter::masterMute() const {
+ auto result = [&]() -> ConversionResult<bool> {
+ bool aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->masterMute(&aidlRet)));
+ return aidlRet;
+ }();
+ // Failure is ignored.
+ return result.value_or(false);
+}
+
+status_t AudioFlingerClientAdapter::setMasterBalance(float balance) {
+ return statusTFromBinderStatus(mDelegate->setMasterBalance(balance));
+}
+
+status_t AudioFlingerClientAdapter::getMasterBalance(float* balance) const{
+ return statusTFromBinderStatus(mDelegate->getMasterBalance(balance));
+}
+
+status_t AudioFlingerClientAdapter::setStreamVolume(audio_stream_type_t stream, float value,
+ audio_io_handle_t output) {
+ media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+ int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+ return statusTFromBinderStatus(mDelegate->setStreamVolume(streamAidl, value, outputAidl));
+}
+
+status_t AudioFlingerClientAdapter::setStreamMute(audio_stream_type_t stream, bool muted) {
+ media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+ return statusTFromBinderStatus(mDelegate->setStreamMute(streamAidl, muted));
+}
+
+float AudioFlingerClientAdapter::streamVolume(audio_stream_type_t stream,
+ audio_io_handle_t output) const {
+ auto result = [&]() -> ConversionResult<float> {
+ media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+ int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+ float aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->streamVolume(streamAidl, outputAidl, &aidlRet)));
+ return aidlRet;
+ }();
+ // Failure is ignored.
+ return result.value_or(0.f);
+}
+
+bool AudioFlingerClientAdapter::streamMute(audio_stream_type_t stream) const {
+ auto result = [&]() -> ConversionResult<bool> {
+ media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+ bool aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->streamMute(streamAidl, &aidlRet)));
+ return aidlRet;
+ }();
+ // Failure is ignored.
+ return result.value_or(false);
+}
+
+status_t AudioFlingerClientAdapter::setMode(audio_mode_t mode) {
+ media::AudioMode modeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(mode));
+ return statusTFromBinderStatus(mDelegate->setMode(modeAidl));
+}
+
+status_t AudioFlingerClientAdapter::setMicMute(bool state) {
+ return statusTFromBinderStatus(mDelegate->setMicMute(state));
+}
+
+bool AudioFlingerClientAdapter::getMicMute() const {
+ auto result = [&]() -> ConversionResult<bool> {
+ bool aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getMicMute(&aidlRet)));
+ return aidlRet;
+ }();
+ // Failure is ignored.
+ return result.value_or(false);
+}
+
+void AudioFlingerClientAdapter::setRecordSilenced(audio_port_handle_t portId, bool silenced) {
+ auto result = [&]() -> status_t {
+ int32_t portIdAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_port_handle_t_int32_t(portId));
+ return statusTFromBinderStatus(mDelegate->setRecordSilenced(portIdAidl, silenced));
+ }();
+ // Failure is ignored.
+ (void) result;
+}
+
+status_t AudioFlingerClientAdapter::setParameters(audio_io_handle_t ioHandle,
+ const String8& keyValuePairs) {
+ int32_t ioHandleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+ std::string keyValuePairsAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_String8_string(keyValuePairs));
+ return statusTFromBinderStatus(mDelegate->setParameters(ioHandleAidl, keyValuePairsAidl));
+}
+
+String8 AudioFlingerClientAdapter::getParameters(audio_io_handle_t ioHandle, const String8& keys)
+const {
+ auto result = [&]() -> ConversionResult<String8> {
+ int32_t ioHandleAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+ std::string keysAidl = VALUE_OR_RETURN(legacy2aidl_String8_string(keys));
+ std::string aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getParameters(ioHandleAidl, keysAidl, &aidlRet)));
+ return aidl2legacy_string_view_String8(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(String8());
+}
+
+void AudioFlingerClientAdapter::registerClient(const sp<media::IAudioFlingerClient>& client) {
+ mDelegate->registerClient(client);
+ // Failure is ignored.
+}
+
+size_t AudioFlingerClientAdapter::getInputBufferSize(uint32_t sampleRate, audio_format_t format,
+ audio_channel_mask_t channelMask) const {
+ auto result = [&]() -> ConversionResult<size_t> {
+ int32_t sampleRateAidl = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
+ media::audio::common::AudioFormat formatAidl = VALUE_OR_RETURN(
+ legacy2aidl_audio_format_t_AudioFormat(format));
+ int32_t channelMaskAidl = VALUE_OR_RETURN(
+ legacy2aidl_audio_channel_mask_t_int32_t(channelMask));
+ int64_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getInputBufferSize(sampleRateAidl, formatAidl, channelMaskAidl,
+ &aidlRet)));
+ return convertIntegral<size_t>(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(0);
+}
+
+status_t AudioFlingerClientAdapter::openOutput(const media::OpenOutputRequest& request,
+ media::OpenOutputResponse* response) {
+ return statusTFromBinderStatus(mDelegate->openOutput(request, response));
+}
+
+audio_io_handle_t AudioFlingerClientAdapter::openDuplicateOutput(audio_io_handle_t output1,
+ audio_io_handle_t output2) {
+ auto result = [&]() -> ConversionResult<audio_io_handle_t> {
+ int32_t output1Aidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(output1));
+ int32_t output2Aidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(output2));
+ int32_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->openDuplicateOutput(output1Aidl, output2Aidl, &aidlRet)));
+ return aidl2legacy_int32_t_audio_io_handle_t(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(0);
+}
+
+status_t AudioFlingerClientAdapter::closeOutput(audio_io_handle_t output) {
+ int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+ return statusTFromBinderStatus(mDelegate->closeOutput(outputAidl));
+}
+
+status_t AudioFlingerClientAdapter::suspendOutput(audio_io_handle_t output) {
+ int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+ return statusTFromBinderStatus(mDelegate->suspendOutput(outputAidl));
+}
+
+status_t AudioFlingerClientAdapter::restoreOutput(audio_io_handle_t output) {
+ int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+ return statusTFromBinderStatus(mDelegate->restoreOutput(outputAidl));
+}
+
+status_t AudioFlingerClientAdapter::openInput(const media::OpenInputRequest& request,
+ media::OpenInputResponse* response) {
+ return statusTFromBinderStatus(mDelegate->openInput(request, response));
+}
+
+status_t AudioFlingerClientAdapter::closeInput(audio_io_handle_t input) {
+ int32_t inputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(input));
+ return statusTFromBinderStatus(mDelegate->closeInput(inputAidl));
+}
+
+status_t AudioFlingerClientAdapter::invalidateStream(audio_stream_type_t stream) {
+ media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+ return statusTFromBinderStatus(mDelegate->invalidateStream(streamAidl));
+}
+
+status_t AudioFlingerClientAdapter::setVoiceVolume(float volume) {
+ return statusTFromBinderStatus(mDelegate->setVoiceVolume(volume));
+}
+
+status_t AudioFlingerClientAdapter::getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames,
+ audio_io_handle_t output) const {
+ int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+ media::RenderPosition aidlRet;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getRenderPosition(outputAidl, &aidlRet)));
+ if (halFrames != nullptr) {
+ *halFrames = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(aidlRet.halFrames));
+ }
+ if (dspFrames != nullptr) {
+ *dspFrames = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(aidlRet.dspFrames));
+ }
+ return OK;
+}
+
+uint32_t AudioFlingerClientAdapter::getInputFramesLost(audio_io_handle_t ioHandle) const {
+ auto result = [&]() -> ConversionResult<uint32_t> {
+ int32_t ioHandleAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+ int32_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getInputFramesLost(ioHandleAidl, &aidlRet)));
+ return convertIntegral<uint32_t>(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(0);
+}
+
+audio_unique_id_t AudioFlingerClientAdapter::newAudioUniqueId(audio_unique_id_use_t use) {
+ auto result = [&]() -> ConversionResult<audio_unique_id_t> {
+ media::AudioUniqueIdUse useAidl = VALUE_OR_RETURN(
+ legacy2aidl_audio_unique_id_use_t_AudioUniqueIdUse(use));
+ int32_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->newAudioUniqueId(useAidl, &aidlRet)));
+ return aidl2legacy_int32_t_audio_unique_id_t(aidlRet);
+ }();
+ return result.value_or(AUDIO_UNIQUE_ID_ALLOCATE);
+}
+
+void AudioFlingerClientAdapter::acquireAudioSessionId(audio_session_t audioSession, pid_t pid,
+ uid_t uid) {
+ [&]() -> status_t {
+ int32_t audioSessionAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_session_t_int32_t(audioSession));
+ int32_t pidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(pid));
+ int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(uid));
+ return statusTFromBinderStatus(
+ mDelegate->acquireAudioSessionId(audioSessionAidl, pidAidl, uidAidl));
+ }();
+ // Failure is ignored.
+}
+
+void AudioFlingerClientAdapter::releaseAudioSessionId(audio_session_t audioSession, pid_t pid) {
+ [&]() -> status_t {
+ int32_t audioSessionAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_session_t_int32_t(audioSession));
+ int32_t pidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(pid));
+ return statusTFromBinderStatus(
+ mDelegate->releaseAudioSessionId(audioSessionAidl, pidAidl));
+ }();
+ // Failure is ignored.
+}
+
+status_t AudioFlingerClientAdapter::queryNumberEffects(uint32_t* numEffects) const {
+ int32_t aidlRet;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->queryNumberEffects(&aidlRet)));
+ if (numEffects != nullptr) {
+ *numEffects = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(aidlRet));
+ }
+ return OK;
+}
+
+status_t
+AudioFlingerClientAdapter::queryEffect(uint32_t index, effect_descriptor_t* pDescriptor) const {
+ int32_t indexAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(index));
+ media::EffectDescriptor aidlRet;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->queryEffect(indexAidl, &aidlRet)));
+ if (pDescriptor != nullptr) {
+ *pDescriptor = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_EffectDescriptor_effect_descriptor_t(aidlRet));
+ }
+ return OK;
+}
+
+status_t AudioFlingerClientAdapter::getEffectDescriptor(const effect_uuid_t* pEffectUUID,
+ const effect_uuid_t* pTypeUUID,
+ uint32_t preferredTypeFlag,
+ effect_descriptor_t* pDescriptor) const {
+ media::AudioUuid effectUuidAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_uuid_t_AudioUuid(*pEffectUUID));
+ media::AudioUuid typeUuidAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_uuid_t_AudioUuid(*pTypeUUID));
+ int32_t preferredTypeFlagAidl = VALUE_OR_RETURN_STATUS(
+ convertReinterpret<int32_t>(preferredTypeFlag));
+ media::EffectDescriptor aidlRet;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getEffectDescriptor(effectUuidAidl, typeUuidAidl, preferredTypeFlagAidl,
+ &aidlRet)));
+ if (pDescriptor != nullptr) {
+ *pDescriptor = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_EffectDescriptor_effect_descriptor_t(aidlRet));
+ }
+ return OK;
+}
+
+status_t AudioFlingerClientAdapter::createEffect(const media::CreateEffectRequest& request,
+ media::CreateEffectResponse* response) {
+ return statusTFromBinderStatus(mDelegate->createEffect(request, response));
+}
+
+status_t
+AudioFlingerClientAdapter::moveEffects(audio_session_t session, audio_io_handle_t srcOutput,
+ audio_io_handle_t dstOutput) {
+ int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
+ int32_t srcOutputAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_io_handle_t_int32_t(srcOutput));
+ int32_t dstOutputAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_io_handle_t_int32_t(dstOutput));
+ return statusTFromBinderStatus(
+ mDelegate->moveEffects(sessionAidl, srcOutputAidl, dstOutputAidl));
+}
+
+void AudioFlingerClientAdapter::setEffectSuspended(int effectId,
+ audio_session_t sessionId,
+ bool suspended) {
+ [&]() -> status_t {
+ int32_t effectIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(effectId));
+ int32_t sessionIdAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_session_t_int32_t(sessionId));
+ return statusTFromBinderStatus(
+ mDelegate->setEffectSuspended(effectIdAidl, sessionIdAidl, suspended));
+ }();
+ // Failure is ignored.
+}
+
+audio_module_handle_t AudioFlingerClientAdapter::loadHwModule(const char* name) {
+ auto result = [&]() -> ConversionResult<audio_module_handle_t> {
+ std::string nameAidl(name);
+ int32_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->loadHwModule(nameAidl, &aidlRet)));
+ return aidl2legacy_int32_t_audio_module_handle_t(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(0);
+}
+
+uint32_t AudioFlingerClientAdapter::getPrimaryOutputSamplingRate() {
+ auto result = [&]() -> ConversionResult<uint32_t> {
+ int32_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getPrimaryOutputSamplingRate(&aidlRet)));
+ return convertIntegral<uint32_t>(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(0);
+}
+
+size_t AudioFlingerClientAdapter::getPrimaryOutputFrameCount() {
+ auto result = [&]() -> ConversionResult<size_t> {
+ int64_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getPrimaryOutputFrameCount(&aidlRet)));
+ return convertIntegral<size_t>(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(0);
+}
+
+status_t AudioFlingerClientAdapter::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) {
+ return statusTFromBinderStatus(mDelegate->setLowRamDevice(isLowRamDevice, totalMemory));
+}
+
+status_t AudioFlingerClientAdapter::getAudioPort(struct audio_port_v7* port) {
+ media::AudioPort portAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_v7_AudioPort(*port));
+ media::AudioPort aidlRet;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getAudioPort(portAidl, &aidlRet)));
+ *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPort_audio_port_v7(aidlRet));
+ return OK;
+}
+
+status_t AudioFlingerClientAdapter::createAudioPatch(const struct audio_patch* patch,
+ audio_patch_handle_t* handle) {
+ media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_AudioPatch(*patch));
+ int32_t aidlRet;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->createAudioPatch(patchAidl, &aidlRet)));
+ if (handle != nullptr) {
+ *handle = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_patch_handle_t(aidlRet));
+ }
+ return OK;
+}
+
+status_t AudioFlingerClientAdapter::releaseAudioPatch(audio_patch_handle_t handle) {
+ int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(handle));
+ return statusTFromBinderStatus(mDelegate->releaseAudioPatch(handleAidl));
+}
+
+status_t AudioFlingerClientAdapter::listAudioPatches(unsigned int* num_patches,
+ struct audio_patch* patches) {
+ std::vector<media::AudioPatch> aidlRet;
+ int32_t maxPatches = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_patches));
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->listAudioPatches(maxPatches, &aidlRet)));
+ *num_patches = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(aidlRet.size()));
+ return convertRange(aidlRet.begin(), aidlRet.end(), patches,
+ aidl2legacy_AudioPatch_audio_patch);
+}
+
+status_t AudioFlingerClientAdapter::setAudioPortConfig(const struct audio_port_config* config) {
+ media::AudioPortConfig configAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_port_config_AudioPortConfig(*config));
+ return statusTFromBinderStatus(mDelegate->setAudioPortConfig(configAidl));
+}
+
+audio_hw_sync_t AudioFlingerClientAdapter::getAudioHwSyncForSession(audio_session_t sessionId) {
+ auto result = [&]() -> ConversionResult<audio_hw_sync_t> {
+ int32_t sessionIdAidl = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+ int32_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getAudioHwSyncForSession(sessionIdAidl, &aidlRet)));
+ return aidl2legacy_int32_t_audio_hw_sync_t(aidlRet);
+ }();
+ return result.value_or(AUDIO_HW_SYNC_INVALID);
+}
+
+status_t AudioFlingerClientAdapter::systemReady() {
+ return statusTFromBinderStatus(mDelegate->systemReady());
+}
+
+size_t AudioFlingerClientAdapter::frameCountHAL(audio_io_handle_t ioHandle) const {
+ auto result = [&]() -> ConversionResult<size_t> {
+ int32_t ioHandleAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+ int64_t aidlRet;
+ RETURN_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->frameCountHAL(ioHandleAidl, &aidlRet)));
+ return convertIntegral<size_t>(aidlRet);
+ }();
+ // Failure is ignored.
+ return result.value_or(0);
+}
+
+status_t
+AudioFlingerClientAdapter::getMicrophones(std::vector<media::MicrophoneInfo>* microphones) {
+ std::vector<media::MicrophoneInfoData> aidlRet;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mDelegate->getMicrophones(&aidlRet)));
+ if (microphones != nullptr) {
+ *microphones = VALUE_OR_RETURN_STATUS(
+ convertContainer<std::vector<media::MicrophoneInfo>>(aidlRet,
+ media::aidl2legacy_MicrophoneInfo));
+ }
+ return OK;
+}
+
+status_t AudioFlingerClientAdapter::setAudioHalPids(const std::vector<pid_t>& pids) {
+ std::vector<int32_t> pidsAidl = VALUE_OR_RETURN_STATUS(
+ convertContainer<std::vector<int32_t>>(pids, legacy2aidl_pid_t_int32_t));
+ return statusTFromBinderStatus(mDelegate->setAudioHalPids(pidsAidl));
+}
+
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// AudioFlingerServerAdapter
+AudioFlingerServerAdapter::AudioFlingerServerAdapter(
+ const sp<AudioFlingerServerAdapter::Delegate>& delegate) : mDelegate(delegate) {}
+
+status_t AudioFlingerServerAdapter::onTransact(uint32_t code, const Parcel& data, Parcel* reply,
+ uint32_t flags) {
+ return mDelegate->onPreTransact(static_cast<Delegate::TransactionCode>(code), data, flags)
+ ?: BnAudioFlingerService::onTransact(code, data, reply, flags);
+}
+
+status_t AudioFlingerServerAdapter::dump(int fd, const Vector<String16>& args) {
+ return mDelegate->dump(fd, args);
+}
+
+Status AudioFlingerServerAdapter::createTrack(const media::CreateTrackRequest& request,
+ media::CreateTrackResponse* _aidl_return) {
+ return Status::fromStatusT(mDelegate->createTrack(request, *_aidl_return));
+}
+
+Status AudioFlingerServerAdapter::createRecord(const media::CreateRecordRequest& request,
+ media::CreateRecordResponse* _aidl_return) {
+ return Status::fromStatusT(mDelegate->createRecord(request, *_aidl_return));
+}
+
+Status AudioFlingerServerAdapter::sampleRate(int32_t ioHandle, int32_t* _aidl_return) {
+ audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(
+ convertIntegral<int32_t>(mDelegate->sampleRate(ioHandleLegacy)));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::format(int32_t output,
+ media::audio::common::AudioFormat* _aidl_return) {
+ audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(output));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(
+ legacy2aidl_audio_format_t_AudioFormat(mDelegate->format(outputLegacy)));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::frameCount(int32_t ioHandle, int64_t* _aidl_return) {
+ audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(
+ convertIntegral<int64_t>(mDelegate->frameCount(ioHandleLegacy)));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::latency(int32_t output, int32_t* _aidl_return) {
+ audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(output));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(
+ convertIntegral<int32_t>(mDelegate->latency(outputLegacy)));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setMasterVolume(float value) {
+ return Status::fromStatusT(mDelegate->setMasterVolume(value));
+}
+
+Status AudioFlingerServerAdapter::setMasterMute(bool muted) {
+ return Status::fromStatusT(mDelegate->setMasterMute(muted));
+}
+
+Status AudioFlingerServerAdapter::masterVolume(float* _aidl_return) {
+ *_aidl_return = mDelegate->masterVolume();
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::masterMute(bool* _aidl_return) {
+ *_aidl_return = mDelegate->masterMute();
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setMasterBalance(float balance) {
+ return Status::fromStatusT(mDelegate->setMasterBalance(balance));
+}
+
+Status AudioFlingerServerAdapter::getMasterBalance(float* _aidl_return) {
+ return Status::fromStatusT(mDelegate->getMasterBalance(_aidl_return));
+}
+
+Status AudioFlingerServerAdapter::setStreamVolume(media::AudioStreamType stream, float value,
+ int32_t output) {
+ audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+ audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(output));
+ return Status::fromStatusT(mDelegate->setStreamVolume(streamLegacy, value, outputLegacy));
+}
+
+Status AudioFlingerServerAdapter::setStreamMute(media::AudioStreamType stream, bool muted) {
+ audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+ return Status::fromStatusT(mDelegate->setStreamMute(streamLegacy, muted));
+}
+
+Status AudioFlingerServerAdapter::streamVolume(media::AudioStreamType stream, int32_t output,
+ float* _aidl_return) {
+ audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+ audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(output));
+ *_aidl_return = mDelegate->streamVolume(streamLegacy, outputLegacy);
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::streamMute(media::AudioStreamType stream, bool* _aidl_return) {
+ audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+ *_aidl_return = mDelegate->streamMute(streamLegacy);
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setMode(media::AudioMode mode) {
+ audio_mode_t modeLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioMode_audio_mode_t(mode));
+ return Status::fromStatusT(mDelegate->setMode(modeLegacy));
+}
+
+Status AudioFlingerServerAdapter::setMicMute(bool state) {
+ return Status::fromStatusT(mDelegate->setMicMute(state));
+}
+
+Status AudioFlingerServerAdapter::getMicMute(bool* _aidl_return) {
+ *_aidl_return = mDelegate->getMicMute();
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setRecordSilenced(int32_t portId, bool silenced) {
+ audio_port_handle_t portIdLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_port_handle_t(portId));
+ mDelegate->setRecordSilenced(portIdLegacy, silenced);
+ return Status::ok();
+}
+
+Status
+AudioFlingerServerAdapter::setParameters(int32_t ioHandle, const std::string& keyValuePairs) {
+ audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+ String8 keyValuePairsLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_string_view_String8(keyValuePairs));
+ return Status::fromStatusT(mDelegate->setParameters(ioHandleLegacy, keyValuePairsLegacy));
+}
+
+Status AudioFlingerServerAdapter::getParameters(int32_t ioHandle, const std::string& keys,
+ std::string* _aidl_return) {
+ audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+ String8 keysLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_string_view_String8(keys));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(
+ legacy2aidl_String8_string(mDelegate->getParameters(ioHandleLegacy, keysLegacy)));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::registerClient(const sp<media::IAudioFlingerClient>& client) {
+ mDelegate->registerClient(client);
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getInputBufferSize(int32_t sampleRate,
+ media::audio::common::AudioFormat format,
+ int32_t channelMask, int64_t* _aidl_return) {
+ uint32_t sampleRateLegacy = VALUE_OR_RETURN_BINDER(convertIntegral<uint32_t>(sampleRate));
+ audio_format_t formatLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_AudioFormat_audio_format_t(format));
+ audio_channel_mask_t channelMaskLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_channel_mask_t(channelMask));
+ size_t size = mDelegate->getInputBufferSize(sampleRateLegacy, formatLegacy, channelMaskLegacy);
+ *_aidl_return = VALUE_OR_RETURN_BINDER(convertIntegral<int64_t>(size));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::openOutput(const media::OpenOutputRequest& request,
+ media::OpenOutputResponse* _aidl_return) {
+ return Status::fromStatusT(mDelegate->openOutput(request, _aidl_return));
+}
+
+Status AudioFlingerServerAdapter::openDuplicateOutput(int32_t output1, int32_t output2,
+ int32_t* _aidl_return) {
+ audio_io_handle_t output1Legacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(output1));
+ audio_io_handle_t output2Legacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(output2));
+ audio_io_handle_t result = mDelegate->openDuplicateOutput(output1Legacy, output2Legacy);
+ *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_io_handle_t_int32_t(result));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::closeOutput(int32_t output) {
+ audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(output));
+ return Status::fromStatusT(mDelegate->closeOutput(outputLegacy));
+}
+
+Status AudioFlingerServerAdapter::suspendOutput(int32_t output) {
+ audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(output));
+ return Status::fromStatusT(mDelegate->suspendOutput(outputLegacy));
+}
+
+Status AudioFlingerServerAdapter::restoreOutput(int32_t output) {
+ audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(output));
+ return Status::fromStatusT(mDelegate->restoreOutput(outputLegacy));
+}
+
+Status AudioFlingerServerAdapter::openInput(const media::OpenInputRequest& request,
+ media::OpenInputResponse* _aidl_return) {
+ return Status::fromStatusT(mDelegate->openInput(request, _aidl_return));
+}
+
+Status AudioFlingerServerAdapter::closeInput(int32_t input) {
+ audio_io_handle_t inputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(input));
+ return Status::fromStatusT(mDelegate->closeInput(inputLegacy));
+}
+
+Status AudioFlingerServerAdapter::invalidateStream(media::AudioStreamType stream) {
+ audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+ return Status::fromStatusT(mDelegate->invalidateStream(streamLegacy));
+}
+
+Status AudioFlingerServerAdapter::setVoiceVolume(float volume) {
+ return Status::fromStatusT(mDelegate->setVoiceVolume(volume));
+}
+
+Status
+AudioFlingerServerAdapter::getRenderPosition(int32_t output, media::RenderPosition* _aidl_return) {
+ audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(output));
+ uint32_t halFramesLegacy;
+ uint32_t dspFramesLegacy;
+ RETURN_BINDER_IF_ERROR(
+ mDelegate->getRenderPosition(&halFramesLegacy, &dspFramesLegacy, outputLegacy));
+ _aidl_return->halFrames = VALUE_OR_RETURN_BINDER(convertIntegral<int32_t>(halFramesLegacy));
+ _aidl_return->dspFrames = VALUE_OR_RETURN_BINDER(convertIntegral<int32_t>(dspFramesLegacy));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getInputFramesLost(int32_t ioHandle, int32_t* _aidl_return) {
+ audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+ uint32_t result = mDelegate->getInputFramesLost(ioHandleLegacy);
+ *_aidl_return = VALUE_OR_RETURN_BINDER(convertIntegral<int32_t>(result));
+ return Status::ok();
+}
+
+Status
+AudioFlingerServerAdapter::newAudioUniqueId(media::AudioUniqueIdUse use, int32_t* _aidl_return) {
+ audio_unique_id_use_t useLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_AudioUniqueIdUse_audio_unique_id_use_t(use));
+ audio_unique_id_t result = mDelegate->newAudioUniqueId(useLegacy);
+ *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_unique_id_t_int32_t(result));
+ return Status::ok();
+}
+
+Status
+AudioFlingerServerAdapter::acquireAudioSessionId(int32_t audioSession, int32_t pid, int32_t uid) {
+ audio_session_t audioSessionLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_session_t(audioSession));
+ pid_t pidLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_int32_t_pid_t(pid));
+ uid_t uidLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_int32_t_uid_t(uid));
+ mDelegate->acquireAudioSessionId(audioSessionLegacy, pidLegacy, uidLegacy);
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::releaseAudioSessionId(int32_t audioSession, int32_t pid) {
+ audio_session_t audioSessionLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_session_t(audioSession));
+ pid_t pidLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_int32_t_pid_t(pid));
+ mDelegate->releaseAudioSessionId(audioSessionLegacy, pidLegacy);
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::queryNumberEffects(int32_t* _aidl_return) {
+ uint32_t result;
+ RETURN_BINDER_IF_ERROR(mDelegate->queryNumberEffects(&result));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(convertIntegral<uint32_t>(result));
+ return Status::ok();
+}
+
+Status
+AudioFlingerServerAdapter::queryEffect(int32_t index, media::EffectDescriptor* _aidl_return) {
+ uint32_t indexLegacy = VALUE_OR_RETURN_BINDER(convertIntegral<uint32_t>(index));
+ effect_descriptor_t result;
+ RETURN_BINDER_IF_ERROR(mDelegate->queryEffect(indexLegacy, &result));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(
+ legacy2aidl_effect_descriptor_t_EffectDescriptor(result));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getEffectDescriptor(const media::AudioUuid& effectUUID,
+ const media::AudioUuid& typeUUID,
+ int32_t preferredTypeFlag,
+ media::EffectDescriptor* _aidl_return) {
+ effect_uuid_t effectUuidLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_AudioUuid_audio_uuid_t(effectUUID));
+ effect_uuid_t typeUuidLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_AudioUuid_audio_uuid_t(typeUUID));
+ uint32_t preferredTypeFlagLegacy = VALUE_OR_RETURN_BINDER(
+ convertReinterpret<uint32_t>(preferredTypeFlag));
+ effect_descriptor_t result;
+ RETURN_BINDER_IF_ERROR(mDelegate->getEffectDescriptor(&effectUuidLegacy, &typeUuidLegacy,
+ preferredTypeFlagLegacy, &result));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(
+ legacy2aidl_effect_descriptor_t_EffectDescriptor(result));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::createEffect(const media::CreateEffectRequest& request,
+ media::CreateEffectResponse* _aidl_return) {
+ return Status::fromStatusT(mDelegate->createEffect(request, _aidl_return));
+}
+
+Status
+AudioFlingerServerAdapter::moveEffects(int32_t session, int32_t srcOutput, int32_t dstOutput) {
+ audio_session_t sessionLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_session_t(session));
+ audio_io_handle_t srcOutputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(srcOutput));
+ audio_io_handle_t dstOutputLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(dstOutput));
+ return Status::fromStatusT(
+ mDelegate->moveEffects(sessionLegacy, srcOutputLegacy, dstOutputLegacy));
+}
+
+Status AudioFlingerServerAdapter::setEffectSuspended(int32_t effectId, int32_t sessionId,
+ bool suspended) {
+ int effectIdLegacy = VALUE_OR_RETURN_BINDER(convertReinterpret<int>(effectId));
+ audio_session_t sessionIdLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_session_t(sessionId));
+ mDelegate->setEffectSuspended(effectIdLegacy, sessionIdLegacy, suspended);
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::loadHwModule(const std::string& name, int32_t* _aidl_return) {
+ audio_module_handle_t result = mDelegate->loadHwModule(name.c_str());
+ *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_module_handle_t_int32_t(result));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getPrimaryOutputSamplingRate(int32_t* _aidl_return) {
+ *_aidl_return = VALUE_OR_RETURN_BINDER(
+ convertIntegral<int32_t>(mDelegate->getPrimaryOutputSamplingRate()));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getPrimaryOutputFrameCount(int64_t* _aidl_return) {
+ *_aidl_return = VALUE_OR_RETURN_BINDER(
+ convertIntegral<int64_t>(mDelegate->getPrimaryOutputFrameCount()));
+ return Status::ok();
+
+}
+
+Status AudioFlingerServerAdapter::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) {
+ return Status::fromStatusT(mDelegate->setLowRamDevice(isLowRamDevice, totalMemory));
+}
+
+Status AudioFlingerServerAdapter::getAudioPort(const media::AudioPort& port,
+ media::AudioPort* _aidl_return) {
+ audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
+ RETURN_BINDER_IF_ERROR(mDelegate->getAudioPort(&portLegacy));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_port_v7_AudioPort(portLegacy));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::createAudioPatch(const media::AudioPatch& patch,
+ int32_t* _aidl_return) {
+ audio_patch patchLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPatch_audio_patch(patch));
+ audio_patch_handle_t handleLegacy;
+ RETURN_BINDER_IF_ERROR(mDelegate->createAudioPatch(&patchLegacy, &handleLegacy));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_patch_handle_t_int32_t(handleLegacy));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::releaseAudioPatch(int32_t handle) {
+ audio_patch_handle_t handleLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_patch_handle_t(handle));
+ return Status::fromStatusT(mDelegate->releaseAudioPatch(handleLegacy));
+}
+
+Status AudioFlingerServerAdapter::listAudioPatches(int32_t maxCount,
+ std::vector<media::AudioPatch>* _aidl_return) {
+ unsigned int count = VALUE_OR_RETURN_BINDER(convertIntegral<unsigned int>(maxCount));
+ count = std::min(count, static_cast<unsigned int>(MAX_ITEMS_PER_LIST));
+ std::unique_ptr<audio_patch[]> patchesLegacy(new audio_patch[count]);
+ RETURN_BINDER_IF_ERROR(mDelegate->listAudioPatches(&count, patchesLegacy.get()));
+ RETURN_BINDER_IF_ERROR(convertRange(&patchesLegacy[0],
+ &patchesLegacy[count],
+ std::back_inserter(*_aidl_return),
+ legacy2aidl_audio_patch_AudioPatch));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setAudioPortConfig(const media::AudioPortConfig& config) {
+ audio_port_config configLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_AudioPortConfig_audio_port_config(config));
+ return Status::fromStatusT(mDelegate->setAudioPortConfig(&configLegacy));
+}
+
+Status AudioFlingerServerAdapter::getAudioHwSyncForSession(int32_t sessionId,
+ int32_t* _aidl_return) {
+ audio_session_t sessionIdLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_session_t(sessionId));
+ audio_hw_sync_t result = mDelegate->getAudioHwSyncForSession(sessionIdLegacy);
+ *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_hw_sync_t_int32_t(result));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::systemReady() {
+ return Status::fromStatusT(mDelegate->systemReady());
+}
+
+Status AudioFlingerServerAdapter::frameCountHAL(int32_t ioHandle, int64_t* _aidl_return) {
+ audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+ size_t result = mDelegate->frameCountHAL(ioHandleLegacy);
+ *_aidl_return = VALUE_OR_RETURN_BINDER(convertIntegral<int64_t>(result));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getMicrophones(
+ std::vector<media::MicrophoneInfoData>* _aidl_return) {
+ std::vector<media::MicrophoneInfo> resultLegacy;
+ RETURN_BINDER_IF_ERROR(mDelegate->getMicrophones(&resultLegacy));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(convertContainer<std::vector<media::MicrophoneInfoData>>(
+ resultLegacy, media::legacy2aidl_MicrophoneInfo));
+ return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setAudioHalPids(const std::vector<int32_t>& pids) {
+ std::vector<pid_t> pidsLegacy = VALUE_OR_RETURN_BINDER(
+ convertContainer<std::vector<pid_t>>(pids, aidl2legacy_int32_t_pid_t));
+ RETURN_BINDER_IF_ERROR(mDelegate->setAudioHalPids(pidsLegacy));
+ return Status::ok();
+}
} // namespace android
diff --git a/media/libaudioclient/IAudioFlingerClient.cpp b/media/libaudioclient/IAudioFlingerClient.cpp
deleted file mode 100644
index 47eb7dc..0000000
--- a/media/libaudioclient/IAudioFlingerClient.cpp
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "IAudioFlingerClient"
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-
-#include <media/IAudioFlingerClient.h>
-#include <media/AudioSystem.h>
-
-namespace android {
-
-enum {
- IO_CONFIG_CHANGED = IBinder::FIRST_CALL_TRANSACTION
-};
-
-class BpAudioFlingerClient : public BpInterface<IAudioFlingerClient>
-{
-public:
- explicit BpAudioFlingerClient(const sp<IBinder>& impl)
- : BpInterface<IAudioFlingerClient>(impl)
- {
- }
-
- void ioConfigChanged(audio_io_config_event event, const sp<AudioIoDescriptor>& ioDesc)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlingerClient::getInterfaceDescriptor());
- data.writeInt32(event);
- data.writeInt32((int32_t)ioDesc->mIoHandle);
- data.write(&ioDesc->mPatch, sizeof(struct audio_patch));
- data.writeInt32(ioDesc->mSamplingRate);
- data.writeInt32(ioDesc->mFormat);
- data.writeInt32(ioDesc->mChannelMask);
- data.writeInt64(ioDesc->mFrameCount);
- data.writeInt64(ioDesc->mFrameCountHAL);
- data.writeInt32(ioDesc->mLatency);
- data.writeInt32(ioDesc->mPortId);
- remote()->transact(IO_CONFIG_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
- }
-};
-
-IMPLEMENT_META_INTERFACE(AudioFlingerClient, "android.media.IAudioFlingerClient");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioFlingerClient::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- switch (code) {
- case IO_CONFIG_CHANGED: {
- CHECK_INTERFACE(IAudioFlingerClient, data, reply);
- audio_io_config_event event = (audio_io_config_event)data.readInt32();
- sp<AudioIoDescriptor> ioDesc = new AudioIoDescriptor();
- ioDesc->mIoHandle = (audio_io_handle_t) data.readInt32();
- data.read(&ioDesc->mPatch, sizeof(struct audio_patch));
- ioDesc->mSamplingRate = data.readInt32();
- ioDesc->mFormat = (audio_format_t) data.readInt32();
- ioDesc->mChannelMask = (audio_channel_mask_t) data.readInt32();
- ioDesc->mFrameCount = data.readInt64();
- ioDesc->mFrameCountHAL = data.readInt64();
- ioDesc->mLatency = data.readInt32();
- ioDesc->mPortId = data.readInt32();
- ioConfigChanged(event, ioDesc);
- return NO_ERROR;
- } break;
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 60af84b..0849e61 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -26,6 +26,7 @@
#include <binder/IPCThreadState.h>
#include <binder/Parcel.h>
#include <media/AudioEffect.h>
+#include <media/AudioValidator.h>
#include <media/IAudioPolicyService.h>
#include <mediautils/ServiceUtilities.h>
#include <mediautils/TimeCheck.h>
@@ -68,7 +69,7 @@
QUERY_DEFAULT_PRE_PROCESSING,
SET_EFFECT_ENABLED,
IS_STREAM_ACTIVE_REMOTELY,
- IS_OFFLOAD_SUPPORTED,
+ GET_OFFLOAD_MODE_SUPPORTED,
IS_DIRECT_OUTPUT_SUPPORTED,
LIST_AUDIO_PORTS,
GET_AUDIO_PORT,
@@ -112,13 +113,18 @@
MOVE_EFFECTS_TO_IO,
SET_RTT_ENABLED,
IS_CALL_SCREEN_MODE_SUPPORTED,
- SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
- REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
- GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
+ SET_DEVICES_ROLE_FOR_PRODUCT_STRATEGY,
+ REMOVE_DEVICES_ROLE_FOR_PRODUCT_STRATEGY,
+ GET_DEVICES_FOR_ROLE_AND_PRODUCT_STRATEGY,
GET_DEVICES_FOR_ATTRIBUTES,
AUDIO_MODULES_UPDATED, // oneway
SET_CURRENT_IME_UID,
REGISTER_SOUNDTRIGGER_CAPTURE_STATE_LISTENER,
+ SET_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+ ADD_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+ REMOVE_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+ CLEAR_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+ GET_DEVICES_FOR_ROLE_AND_CAPTURE_PRESET,
};
#define MAX_ITEMS_PER_LIST 1024
@@ -523,7 +529,11 @@
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.write(desc, sizeof(effect_descriptor_t));
- remote()->transact(GET_OUTPUT_FOR_EFFECT, data, &reply);
+ status_t status = remote()->transact(GET_OUTPUT_FOR_EFFECT, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return AUDIO_IO_HANDLE_NONE;
+ }
return static_cast <audio_io_handle_t> (reply.readInt32());
}
@@ -656,13 +666,13 @@
return reply.readInt32();
}
- virtual bool isOffloadSupported(const audio_offload_info_t& info)
+ virtual audio_offload_mode_t getOffloadSupport(const audio_offload_info_t& info)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.write(&info, sizeof(audio_offload_info_t));
- remote()->transact(IS_OFFLOAD_SUPPORTED, data, &reply);
- return reply.readInt32();
+ remote()->transact(GET_OFFLOAD_MODE_SUPPORTED, data, &reply);
+ return static_cast<audio_offload_mode_t>(reply.readInt32());
}
virtual bool isDirectOutputSupported(const audio_config_base_t& config,
@@ -678,7 +688,7 @@
virtual status_t listAudioPorts(audio_port_role_t role,
audio_port_type_t type,
unsigned int *num_ports,
- struct audio_port *ports,
+ struct audio_port_v7 *ports,
unsigned int *generation)
{
if (num_ports == NULL || (*num_ports != 0 && ports == NULL) ||
@@ -701,27 +711,27 @@
numPortsReq = *num_ports;
}
if (numPortsReq > 0) {
- reply.read(ports, numPortsReq * sizeof(struct audio_port));
+ reply.read(ports, numPortsReq * sizeof(struct audio_port_v7));
}
*generation = reply.readInt32();
}
return status;
}
- virtual status_t getAudioPort(struct audio_port *port)
+ virtual status_t getAudioPort(struct audio_port_v7 *port)
{
if (port == NULL) {
return BAD_VALUE;
}
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
- data.write(port, sizeof(struct audio_port));
+ data.write(port, sizeof(struct audio_port_v7));
status_t status = remote()->transact(GET_AUDIO_PORT, data, &reply);
if (status != NO_ERROR ||
(status = (status_t)reply.readInt32()) != NO_ERROR) {
return status;
}
- reply.read(port, sizeof(struct audio_port));
+ reply.read(port, sizeof(struct audio_port_v7));
return status;
}
@@ -800,7 +810,7 @@
return status;
}
- virtual void registerClient(const sp<IAudioPolicyServiceClient>& client)
+ virtual void registerClient(const sp<media::IAudioPolicyServiceClient>& client)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
@@ -1173,31 +1183,18 @@
return reply.readBool();
}
- virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+ virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32((int32_t) uid);
- size_t size = devices.size();
- size_t sizePosition = data.dataPosition();
- data.writeInt32((int32_t) size);
- size_t finalSize = size;
- for (size_t i = 0; i < size; i++) {
- size_t position = data.dataPosition();
- if (devices[i].writeToParcel(&data) != NO_ERROR) {
- data.setDataPosition(position);
- finalSize--;
- }
- }
- if (size != finalSize) {
- size_t position = data.dataPosition();
- data.setDataPosition(sizePosition);
- data.writeInt32(finalSize);
- data.setDataPosition(position);
+ status_t status = data.writeParcelableVector(devices);
+ if (status != NO_ERROR) {
+ return status;
}
- status_t status = remote()->transact(SET_UID_DEVICE_AFFINITY, data, &reply);
+ status = remote()->transact(SET_UID_DEVICE_AFFINITY, data, &reply);
if (status == NO_ERROR) {
status = (status_t)reply.readInt32();
}
@@ -1218,51 +1215,37 @@
return status;
}
- virtual status_t setUserIdDeviceAffinities(int userId,
- const Vector<AudioDeviceTypeAddr>& devices)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ virtual status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
- data.writeInt32((int32_t) userId);
- size_t size = devices.size();
- size_t sizePosition = data.dataPosition();
- data.writeInt32((int32_t) size);
- size_t finalSize = size;
- for (size_t i = 0; i < size; i++) {
- size_t position = data.dataPosition();
- if (devices[i].writeToParcel(&data) != NO_ERROR) {
- data.setDataPosition(position);
- finalSize--;
- }
- }
- if (size != finalSize) {
- size_t position = data.dataPosition();
- data.setDataPosition(sizePosition);
- data.writeInt32(finalSize);
- data.setDataPosition(position);
- }
-
- status_t status = remote()->transact(SET_USERID_DEVICE_AFFINITY, data, &reply);
- if (status == NO_ERROR) {
- status = (status_t)reply.readInt32();
- }
+ data.writeInt32((int32_t) userId);
+ status_t status = data.writeParcelableVector(devices);
+ if (status != NO_ERROR) {
return status;
}
- virtual status_t removeUserIdDeviceAffinities(int userId) {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-
- data.writeInt32((int32_t) userId);
-
- status_t status =
- remote()->transact(REMOVE_USERID_DEVICE_AFFINITY, data, &reply);
- if (status == NO_ERROR) {
- status = (status_t) reply.readInt32();
- }
- return status;
+ status = remote()->transact(SET_USERID_DEVICE_AFFINITY, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t)reply.readInt32();
}
+ return status;
+ }
+
+ virtual status_t removeUserIdDeviceAffinities(int userId) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+
+ data.writeInt32((int32_t) userId);
+
+ status_t status =
+ remote()->transact(REMOVE_USERID_DEVICE_AFFINITY, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t) reply.readInt32();
+ }
+ return status;
+ }
virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies)
{
@@ -1384,17 +1367,31 @@
return reply.readBool();
}
- virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device)
+ virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role, const AudioDeviceTypeAddrVector &devices)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeUint32(static_cast<uint32_t>(strategy));
- status_t status = device.writeToParcel(&data);
+ data.writeUint32(static_cast<uint32_t>(role));
+ status_t status = data.writeParcelableVector(devices);
if (status != NO_ERROR) {
return BAD_VALUE;
}
- status = remote()->transact(SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
+ status = remote()->transact(SET_DEVICES_ROLE_FOR_PRODUCT_STRATEGY, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ return static_cast<status_t>(reply.readInt32());
+ }
+
+ virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeUint32(static_cast<uint32_t>(strategy));
+ data.writeUint32(static_cast<uint32_t>(role));
+ status_t status = remote()->transact(REMOVE_DEVICES_ROLE_FOR_PRODUCT_STRATEGY,
data, &reply);
if (status != NO_ERROR) {
return status;
@@ -1402,31 +1399,108 @@
return static_cast<status_t>(reply.readInt32());
}
- virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy)
+ virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+ device_role_t role, AudioDeviceTypeAddrVector &devices)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeUint32(static_cast<uint32_t>(strategy));
- status_t status = remote()->transact(REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
- data, &reply);
- if (status != NO_ERROR) {
- return status;
- }
- return static_cast<status_t>(reply.readInt32());
- }
-
- virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
- data.writeUint32(static_cast<uint32_t>(strategy));
- status_t status = remote()->transact(GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
+ data.writeUint32(static_cast<uint32_t>(role));
+ status_t status = remote()->transact(GET_DEVICES_FOR_ROLE_AND_PRODUCT_STRATEGY,
data, &reply);
if (status != NO_ERROR) {
return status;
}
- status = device.readFromParcel(&reply);
+ status = reply.readParcelableVector(&devices);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ return static_cast<status_t>(reply.readInt32());
+ }
+
+ virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role, const AudioDeviceTypeAddrVector &devices) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeUint32(static_cast<uint32_t>(audioSource));
+ data.writeUint32(static_cast<uint32_t>(role));
+ status_t status = data.writeParcelableVector(devices);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = remote()->transact(SET_DEVICES_ROLE_FOR_CAPTURE_PRESET, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ return static_cast<status_t>(reply.readInt32());
+ }
+
+ virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role, const AudioDeviceTypeAddrVector &devices)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeUint32(static_cast<uint32_t>(audioSource));
+ data.writeUint32(static_cast<uint32_t>(role));
+ status_t status = data.writeParcelableVector(devices);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = remote()->transact(ADD_DEVICES_ROLE_FOR_CAPTURE_PRESET, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ return static_cast<status_t>(reply.readInt32());
+ }
+
+ virtual status_t removeDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector& devices)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeUint32(static_cast<uint32_t>(audioSource));
+ data.writeUint32(static_cast<uint32_t>(role));
+ status_t status = data.writeParcelableVector(devices);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = remote()->transact(REMOVE_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+ data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ return static_cast<status_t>(reply.readInt32());
+ }
+
+ virtual status_t clearDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeUint32(static_cast<uint32_t>(audioSource));
+ data.writeUint32(static_cast<uint32_t>(role));
+ status_t status = remote()->transact(CLEAR_DEVICES_ROLE_FOR_CAPTURE_PRESET,
+ data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ return static_cast<status_t>(reply.readInt32());
+ }
+
+ virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role, AudioDeviceTypeAddrVector &devices)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeUint32(static_cast<uint32_t>(audioSource));
+ data.writeUint32(static_cast<uint32_t>(role));
+ status_t status = remote()->transact(GET_DEVICES_FOR_ROLE_AND_CAPTURE_PRESET,
+ data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = reply.readParcelableVector(&devices);
if (status != NO_ERROR) {
return status;
}
@@ -1517,6 +1591,7 @@
case REGISTER_EFFECT:
case UNREGISTER_EFFECT:
case SET_EFFECT_ENABLED:
+ case GET_STRATEGY_FOR_STREAM:
case GET_OUTPUT_FOR_ATTR:
case MOVE_EFFECTS_TO_IO:
ALOGW("%s: transaction %d received from PID %d",
@@ -1544,6 +1619,15 @@
// case SET_FORCE_USE:
case INIT_STREAM_VOLUME:
case SET_STREAM_VOLUME:
+ case SET_VOLUME_ATTRIBUTES:
+ case GET_STREAM_VOLUME:
+ case GET_VOLUME_ATTRIBUTES:
+ case GET_MIN_VOLUME_FOR_ATTRIBUTES:
+ case GET_MAX_VOLUME_FOR_ATTRIBUTES:
+ case IS_STREAM_ACTIVE:
+ case IS_STREAM_ACTIVE_REMOTELY:
+ case IS_SOURCE_ACTIVE:
+ case GET_DEVICES_FOR_STREAM:
case REGISTER_POLICY_MIXES:
case SET_MASTER_MONO:
case GET_SURROUND_FORMATS:
@@ -1561,15 +1645,20 @@
case RELEASE_SOUNDTRIGGER_SESSION:
case SET_RTT_ENABLED:
case IS_CALL_SCREEN_MODE_SUPPORTED:
- case SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
+ case SET_DEVICES_ROLE_FOR_PRODUCT_STRATEGY:
case SET_SUPPORTED_SYSTEM_USAGES:
- case REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
- case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
+ case REMOVE_DEVICES_ROLE_FOR_PRODUCT_STRATEGY:
+ case GET_DEVICES_FOR_ROLE_AND_PRODUCT_STRATEGY:
case GET_DEVICES_FOR_ATTRIBUTES:
case SET_ALLOWED_CAPTURE_POLICY:
case AUDIO_MODULES_UPDATED:
case SET_CURRENT_IME_UID:
- case REGISTER_SOUNDTRIGGER_CAPTURE_STATE_LISTENER: {
+ case REGISTER_SOUNDTRIGGER_CAPTURE_STATE_LISTENER:
+ case SET_DEVICES_ROLE_FOR_CAPTURE_PRESET:
+ case ADD_DEVICES_ROLE_FOR_CAPTURE_PRESET:
+ case REMOVE_DEVICES_ROLE_FOR_CAPTURE_PRESET:
+ case CLEAR_DEVICES_ROLE_FOR_CAPTURE_PRESET:
+ case GET_DEVICES_FOR_ROLE_AND_CAPTURE_PRESET: {
if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
__func__, code, IPCThreadState::self()->getCallingPid(),
@@ -1685,7 +1774,6 @@
if (status != NO_ERROR) {
return status;
}
- sanetizeAudioAttributes(&attr);
audio_session_t session = (audio_session_t)data.readInt32();
audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
bool hasStream = data.readInt32() != 0;
@@ -1703,10 +1791,16 @@
audio_port_handle_t portId = (audio_port_handle_t)data.readInt32();
audio_io_handle_t output = 0;
std::vector<audio_io_handle_t> secondaryOutputs;
+
+ status = AudioValidator::validateAudioAttributes(attr, "68953950");
+ if (status != NO_ERROR) {
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
status = getOutputForAttr(&attr,
- &output, session, &stream, pid, uid,
- &config,
- flags, &selectedDeviceId, &portId, &secondaryOutputs);
+ &output, session, &stream, pid, uid,
+ &config,
+ flags, &selectedDeviceId, &portId, &secondaryOutputs);
reply->writeInt32(status);
status = reply->write(&attr, sizeof(audio_attributes_t));
if (status != NO_ERROR) {
@@ -1745,8 +1839,11 @@
case GET_INPUT_FOR_ATTR: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_attributes_t attr = {};
- data.read(&attr, sizeof(audio_attributes_t));
- sanetizeAudioAttributes(&attr);
+ status_t status = data.read(&attr, sizeof(audio_attributes_t));
+ if (status != NO_ERROR) {
+ return status;
+ }
+
audio_io_handle_t input = (audio_io_handle_t)data.readInt32();
audio_unique_id_t riid = (audio_unique_id_t)data.readInt32();
audio_session_t session = (audio_session_t)data.readInt32();
@@ -1759,9 +1856,13 @@
audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
audio_port_handle_t selectedDeviceId = (audio_port_handle_t) data.readInt32();
audio_port_handle_t portId = (audio_port_handle_t)data.readInt32();
- status_t status = getInputForAttr(&attr, &input, riid, session, pid, uid,
- opPackageName, &config,
- flags, &selectedDeviceId, &portId);
+
+ status = AudioValidator::validateAudioAttributes(attr, "68953950");
+ if (status == NO_ERROR) {
+ status = getInputForAttr(&attr, &input, riid, session, pid, uid,
+ opPackageName, &config,
+ flags, &selectedDeviceId, &portId);
+ }
reply->writeInt32(status);
if (status == NO_ERROR) {
reply->writeInt32(input);
@@ -1842,11 +1943,15 @@
if (status != NO_ERROR) {
return status;
}
+
int index = data.readInt32();
audio_devices_t device = static_cast <audio_devices_t>(data.readInt32());
- reply->writeInt32(static_cast <uint32_t>(setVolumeIndexForAttributes(attributes,
- index, device)));
+ status = AudioValidator::validateAudioAttributes(attributes, "169572641");
+ if (status == NO_ERROR) {
+ status = setVolumeIndexForAttributes(attributes, index, device);
+ }
+ reply->writeInt32(static_cast <int32_t>(status));
return NO_ERROR;
} break;
@@ -1860,8 +1965,11 @@
audio_devices_t device = static_cast <audio_devices_t>(data.readInt32());
int index = 0;
- status = getVolumeIndexForAttributes(attributes, index, device);
- reply->writeInt32(static_cast <uint32_t>(status));
+ status = AudioValidator::validateAudioAttributes(attributes, "169572641");
+ if (status == NO_ERROR) {
+ status = getVolumeIndexForAttributes(attributes, index, device);
+ }
+ reply->writeInt32(static_cast <int32_t>(status));
if (status == NO_ERROR) {
reply->writeInt32(index);
}
@@ -1877,8 +1985,11 @@
}
int index = 0;
- status = getMinVolumeIndexForAttributes(attributes, index);
- reply->writeInt32(static_cast <uint32_t>(status));
+ status = AudioValidator::validateAudioAttributes(attributes, "169572641");
+ if (status == NO_ERROR) {
+ status = getMinVolumeIndexForAttributes(attributes, index);
+ }
+ reply->writeInt32(static_cast <int32_t>(status));
if (status == NO_ERROR) {
reply->writeInt32(index);
}
@@ -1894,8 +2005,11 @@
}
int index = 0;
- status = getMaxVolumeIndexForAttributes(attributes, index);
- reply->writeInt32(static_cast <uint32_t>(status));
+ status = AudioValidator::validateAudioAttributes(attributes, "169572641");
+ if (status == NO_ERROR) {
+ status = getMaxVolumeIndexForAttributes(attributes, index);
+ }
+ reply->writeInt32(static_cast <int32_t>(status));
if (status == NO_ERROR) {
reply->writeInt32(index);
}
@@ -1913,31 +2027,37 @@
case GET_OUTPUT_FOR_EFFECT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
effect_descriptor_t desc = {};
- if (data.read(&desc, sizeof(desc)) != NO_ERROR) {
+ status_t status = data.read(&desc, sizeof(desc));
+ if (status != NO_ERROR) {
android_errorWriteLog(0x534e4554, "73126106");
+ return status;
}
- (void)sanitizeEffectDescriptor(&desc);
- audio_io_handle_t output = getOutputForEffect(&desc);
- reply->writeInt32(static_cast <int>(output));
+ status = AudioValidator::validateEffectDescriptor(desc, "73126106");
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ audio_io_handle_t output = getOutputForEffect(&desc);
+ reply->writeInt32(static_cast <int32_t>(output));
+ }
return NO_ERROR;
} break;
case REGISTER_EFFECT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
effect_descriptor_t desc = {};
- if (data.read(&desc, sizeof(desc)) != NO_ERROR) {
+ status_t status = data.read(&desc, sizeof(desc));
+ if (status != NO_ERROR) {
android_errorWriteLog(0x534e4554, "73126106");
+ return status;
}
- (void)sanitizeEffectDescriptor(&desc);
audio_io_handle_t io = data.readInt32();
uint32_t strategy = data.readInt32();
audio_session_t session = (audio_session_t) data.readInt32();
int id = data.readInt32();
- reply->writeInt32(static_cast <int32_t>(registerEffect(&desc,
- io,
- strategy,
- session,
- id)));
+ status = AudioValidator::validateEffectDescriptor(desc, "73126106");
+ if (status == NO_ERROR) {
+ status = registerEffect(&desc, io, strategy, session, id);
+ }
+ reply->writeInt32(static_cast <int32_t>(status));
return NO_ERROR;
} break;
@@ -2029,12 +2149,11 @@
return status;
}
- case IS_OFFLOAD_SUPPORTED: {
+ case GET_OFFLOAD_MODE_SUPPORTED: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_offload_info_t info = {};
data.read(&info, sizeof(audio_offload_info_t));
- bool isSupported = isOffloadSupported(info);
- reply->writeInt32(isSupported);
+ reply->writeInt32(static_cast<int32_t>(getOffloadSupport(info)));
return NO_ERROR;
}
@@ -2046,7 +2165,11 @@
if (status != NO_ERROR) return status;
status = data.read(&attributes, sizeof(audio_attributes_t));
if (status != NO_ERROR) return status;
- reply->writeInt32(isDirectOutputSupported(config, attributes));
+ status = AudioValidator::validateAudioAttributes(attributes, "169572641");
+ if (status == NO_ERROR) {
+ status = isDirectOutputSupported(config, attributes);
+ }
+ reply->writeInt32(static_cast <int32_t>(status));
return NO_ERROR;
}
@@ -2059,8 +2182,8 @@
numPortsReq = MAX_ITEMS_PER_LIST;
}
unsigned int numPorts = numPortsReq;
- struct audio_port *ports =
- (struct audio_port *)calloc(numPortsReq, sizeof(struct audio_port));
+ struct audio_port_v7 *ports =
+ (struct audio_port_v7 *)calloc(numPortsReq, sizeof(struct audio_port_v7));
if (ports == NULL) {
reply->writeInt32(NO_MEMORY);
reply->writeInt32(0);
@@ -2075,7 +2198,7 @@
if (numPortsReq > numPorts) {
numPortsReq = numPorts;
}
- reply->write(ports, numPortsReq * sizeof(struct audio_port));
+ reply->write(ports, numPortsReq * sizeof(struct audio_port_v7));
reply->writeInt32(generation);
}
free(ports);
@@ -2084,14 +2207,19 @@
case GET_AUDIO_PORT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- struct audio_port port = {};
- if (data.read(&port, sizeof(struct audio_port)) != NO_ERROR) {
+ struct audio_port_v7 port = {};
+ status_t status = data.read(&port, sizeof(struct audio_port_v7));
+ if (status != NO_ERROR) {
ALOGE("b/23912202");
+ return status;
}
- status_t status = getAudioPort(&port);
+ status = AudioValidator::validateAudioPort(port);
+ if (status == NO_ERROR) {
+ status = getAudioPort(&port);
+ }
reply->writeInt32(status);
if (status == NO_ERROR) {
- reply->write(&port, sizeof(struct audio_port));
+ reply->write(&port, sizeof(struct audio_port_v7));
}
return NO_ERROR;
}
@@ -2099,12 +2227,20 @@
case CREATE_AUDIO_PATCH: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
struct audio_patch patch = {};
- data.read(&patch, sizeof(struct audio_patch));
- audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
- if (data.read(&handle, sizeof(audio_patch_handle_t)) != NO_ERROR) {
- ALOGE("b/23912202");
+ status_t status = data.read(&patch, sizeof(struct audio_patch));
+ if (status != NO_ERROR) {
+ return status;
}
- status_t status = createAudioPatch(&patch, &handle);
+ audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
+ status = data.read(&handle, sizeof(audio_patch_handle_t));
+ if (status != NO_ERROR) {
+ ALOGE("b/23912202");
+ return status;
+ }
+ status = AudioValidator::validateAudioPatch(patch);
+ if (status == NO_ERROR) {
+ status = createAudioPatch(&patch, &handle);
+ }
reply->writeInt32(status);
if (status == NO_ERROR) {
reply->write(&handle, sizeof(audio_patch_handle_t));
@@ -2154,17 +2290,22 @@
case SET_AUDIO_PORT_CONFIG: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
struct audio_port_config config = {};
- data.read(&config, sizeof(struct audio_port_config));
- (void)sanitizeAudioPortConfig(&config);
- status_t status = setAudioPortConfig(&config);
+ status_t status = data.read(&config, sizeof(struct audio_port_config));
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = AudioValidator::validateAudioPortConfig(config);
+ if (status == NO_ERROR) {
+ status = setAudioPortConfig(&config);
+ }
reply->writeInt32(status);
return NO_ERROR;
}
case REGISTER_CLIENT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- sp<IAudioPolicyServiceClient> client = interface_cast<IAudioPolicyServiceClient>(
- data.readStrongBinder());
+ sp<media::IAudioPolicyServiceClient> client =
+ interface_cast<media::IAudioPolicyServiceClient>(data.readStrongBinder());
registerClient(client);
return NO_ERROR;
} break;
@@ -2232,13 +2373,25 @@
case START_AUDIO_SOURCE: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
struct audio_port_config source = {};
- data.read(&source, sizeof(struct audio_port_config));
- (void)sanitizeAudioPortConfig(&source);
+ status_t status = data.read(&source, sizeof(struct audio_port_config));
+ if (status != NO_ERROR) {
+ return status;
+ }
audio_attributes_t attributes = {};
- data.read(&attributes, sizeof(audio_attributes_t));
- sanetizeAudioAttributes(&attributes);
+ status = data.read(&attributes, sizeof(audio_attributes_t));
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = AudioValidator::validateAudioPortConfig(source);
+ if (status == NO_ERROR) {
+ // OK to not always sanitize attributes as startAudioSource() is not called if
+ // the port config is invalid.
+ status = AudioValidator::validateAudioAttributes(attributes, "68953950");
+ }
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
- status_t status = startAudioSource(&source, &attributes, &portId);
+ if (status == NO_ERROR) {
+ status = startAudioSource(&source, &attributes, &portId);
+ }
reply->writeInt32(status);
reply->writeInt32(portId);
return NO_ERROR;
@@ -2460,15 +2613,12 @@
case SET_UID_DEVICE_AFFINITY: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
const uid_t uid = (uid_t) data.readInt32();
- Vector<AudioDeviceTypeAddr> devices;
- size_t size = (size_t)data.readInt32();
- for (size_t i = 0; i < size; i++) {
- AudioDeviceTypeAddr device;
- if (device.readFromParcel((Parcel*)&data) == NO_ERROR) {
- devices.add(device);
- }
+ AudioDeviceTypeAddrVector devices;
+ status_t status = data.readParcelableVector(&devices);
+ if (status != NO_ERROR) {
+ return status;
}
- status_t status = setUidDeviceAffinities(uid, devices);
+ status = setUidDeviceAffinities(uid, devices);
reply->writeInt32(status);
return NO_ERROR;
}
@@ -2484,15 +2634,12 @@
case SET_USERID_DEVICE_AFFINITY: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
const int userId = (int) data.readInt32();
- Vector<AudioDeviceTypeAddr> devices;
- size_t size = (size_t)data.readInt32();
- for (size_t i = 0; i < size; i++) {
- AudioDeviceTypeAddr device;
- if (device.readFromParcel((Parcel*)&data) == NO_ERROR) {
- devices.add(device);
- }
+ AudioDeviceTypeAddrVector devices;
+ status_t status = data.readParcelableVector(&devices);
+ if (status != NO_ERROR) {
+ return status;
}
- status_t status = setUserIdDeviceAffinities(userId, devices);
+ status = setUserIdDeviceAffinities(userId, devices);
reply->writeInt32(status);
return NO_ERROR;
}
@@ -2628,7 +2775,7 @@
case SET_ALLOWED_CAPTURE_POLICY: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
uid_t uid = data.readInt32();
- audio_flags_mask_t flags = data.readInt32();
+ audio_flags_mask_t flags = static_cast<audio_flags_mask_t>(data.readInt32());
status_t status = setAllowedCapturePolicy(uid, flags);
reply->writeInt32(status);
return NO_ERROR;
@@ -2649,33 +2796,36 @@
return NO_ERROR;
}
- case SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
+ case SET_DEVICES_ROLE_FOR_PRODUCT_STRATEGY: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
product_strategy_t strategy = (product_strategy_t) data.readUint32();
- AudioDeviceTypeAddr device;
- status_t status = device.readFromParcel((Parcel*)&data);
+ device_role_t role = (device_role_t) data.readUint32();
+ AudioDeviceTypeAddrVector devices;
+ status_t status = data.readParcelableVector(&devices);
if (status != NO_ERROR) {
return status;
}
- status = setPreferredDeviceForStrategy(strategy, device);
+ status = setDevicesRoleForStrategy(strategy, role, devices);
reply->writeInt32(status);
return NO_ERROR;
}
- case REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
+ case REMOVE_DEVICES_ROLE_FOR_PRODUCT_STRATEGY: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
product_strategy_t strategy = (product_strategy_t) data.readUint32();
- status_t status = removePreferredDeviceForStrategy(strategy);
+ device_role_t role = (device_role_t) data.readUint32();
+ status_t status = removeDevicesRoleForStrategy(strategy, role);
reply->writeInt32(status);
return NO_ERROR;
}
- case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
+ case GET_DEVICES_FOR_ROLE_AND_PRODUCT_STRATEGY: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
product_strategy_t strategy = (product_strategy_t) data.readUint32();
- AudioDeviceTypeAddr device;
- status_t status = getPreferredDeviceForStrategy(strategy, device);
- status_t marshall_status = device.writeToParcel(reply);
+ device_role_t role = (device_role_t) data.readUint32();
+ AudioDeviceTypeAddrVector devices;
+ status_t status = getDevicesForRoleAndStrategy(strategy, role, devices);
+ status_t marshall_status = reply->writeParcelableVector(devices);
if (marshall_status != NO_ERROR) {
return marshall_status;
}
@@ -2757,49 +2907,76 @@
return NO_ERROR;
} break;
+ case SET_DEVICES_ROLE_FOR_CAPTURE_PRESET: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_source_t audioSource = (audio_source_t) data.readUint32();
+ device_role_t role = (device_role_t) data.readUint32();
+ AudioDeviceTypeAddrVector devices;
+ status_t status = data.readParcelableVector(&devices);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = setDevicesRoleForCapturePreset(audioSource, role, devices);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
+ case ADD_DEVICES_ROLE_FOR_CAPTURE_PRESET: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_source_t audioSource = (audio_source_t) data.readUint32();
+ device_role_t role = (device_role_t) data.readUint32();
+ AudioDeviceTypeAddrVector devices;
+ status_t status = data.readParcelableVector(&devices);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = addDevicesRoleForCapturePreset(audioSource, role, devices);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
+ case REMOVE_DEVICES_ROLE_FOR_CAPTURE_PRESET: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_source_t audioSource = (audio_source_t) data.readUint32();
+ device_role_t role = (device_role_t) data.readUint32();
+ AudioDeviceTypeAddrVector devices;
+ status_t status = data.readParcelableVector(&devices);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = removeDevicesRoleForCapturePreset(audioSource, role, devices);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
+ case CLEAR_DEVICES_ROLE_FOR_CAPTURE_PRESET: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_source_t audioSource = (audio_source_t) data.readUint32();
+ device_role_t role = (device_role_t) data.readUint32();
+ status_t status = clearDevicesRoleForCapturePreset(audioSource, role);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
+ case GET_DEVICES_FOR_ROLE_AND_CAPTURE_PRESET: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_source_t audioSource = (audio_source_t) data.readUint32();
+ device_role_t role = (device_role_t) data.readUint32();
+ AudioDeviceTypeAddrVector devices;
+ status_t status = getDevicesForRoleAndCapturePreset(audioSource, role, devices);
+ status_t marshall_status = reply->writeParcelableVector(devices);
+ if (marshall_status != NO_ERROR) {
+ return marshall_status;
+ }
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
-/** returns true if string overflow was prevented by zero termination */
-template <size_t size>
-static bool preventStringOverflow(char (&s)[size]) {
- if (strnlen(s, size) < size) return false;
- s[size - 1] = '\0';
- return true;
-}
-
-void BnAudioPolicyService::sanetizeAudioAttributes(audio_attributes_t* attr)
-{
- const size_t tagsMaxSize = AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
- if (strnlen(attr->tags, tagsMaxSize) >= tagsMaxSize) {
- android_errorWriteLog(0x534e4554, "68953950"); // SafetyNet logging
- }
- attr->tags[tagsMaxSize - 1] = '\0';
-}
-
-/** returns BAD_VALUE if sanitization was required. */
-status_t BnAudioPolicyService::sanitizeEffectDescriptor(effect_descriptor_t* desc)
-{
- if (preventStringOverflow(desc->name)
- | /* always */ preventStringOverflow(desc->implementor)) {
- android_errorWriteLog(0x534e4554, "73126106"); // SafetyNet logging
- return BAD_VALUE;
- }
- return NO_ERROR;
-}
-
-/** returns BAD_VALUE if sanitization was required. */
-status_t BnAudioPolicyService::sanitizeAudioPortConfig(struct audio_port_config* config)
-{
- if (config->type == AUDIO_PORT_TYPE_DEVICE &&
- preventStringOverflow(config->ext.device.address)) {
- return BAD_VALUE;
- }
- return NO_ERROR;
-}
-
// ----------------------------------------------------------------------------
} // namespace android
diff --git a/media/libaudioclient/IAudioPolicyServiceClient.cpp b/media/libaudioclient/IAudioPolicyServiceClient.cpp
deleted file mode 100644
index 0f9580c..0000000
--- a/media/libaudioclient/IAudioPolicyServiceClient.cpp
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "IAudioPolicyServiceClient"
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-
-#include <media/IAudioPolicyServiceClient.h>
-#include <media/AudioSystem.h>
-
-namespace android {
-
-enum {
- PORT_LIST_UPDATE = IBinder::FIRST_CALL_TRANSACTION,
- PATCH_LIST_UPDATE,
- MIX_STATE_UPDATE,
- RECORDING_CONFIGURATION_UPDATE,
- VOLUME_GROUP_CHANGED,
-};
-
-// ----------------------------------------------------------------------
-inline void readAudioConfigBaseFromParcel(const Parcel& data, audio_config_base_t *config) {
- config->sample_rate = data.readUint32();
- config->channel_mask = (audio_channel_mask_t) data.readInt32();
- config->format = (audio_format_t) data.readInt32();
-}
-
-inline void writeAudioConfigBaseToParcel(Parcel& data, const audio_config_base_t *config)
-{
- data.writeUint32(config->sample_rate);
- data.writeInt32((int32_t) config->channel_mask);
- data.writeInt32((int32_t) config->format);
-}
-
-inline void readRecordClientInfoFromParcel(const Parcel& data, record_client_info_t *clientInfo) {
- clientInfo->riid = (audio_unique_id_t) data.readInt32();
- clientInfo->uid = (uid_t) data.readUint32();
- clientInfo->session = (audio_session_t) data.readInt32();
- clientInfo->source = (audio_source_t) data.readInt32();
- data.read(&clientInfo->port_id, sizeof(audio_port_handle_t));
- clientInfo->silenced = data.readBool();
-}
-
-inline void writeRecordClientInfoToParcel(Parcel& data, const record_client_info_t *clientInfo) {
- data.writeInt32((int32_t) clientInfo->riid);
- data.writeUint32((uint32_t) clientInfo->uid);
- data.writeInt32((int32_t) clientInfo->session);
- data.writeInt32((int32_t) clientInfo->source);
- data.write(&clientInfo->port_id, sizeof(audio_port_handle_t));
- data.writeBool(clientInfo->silenced);
-}
-
-inline void readEffectVectorFromParcel(const Parcel& data,
- std::vector<effect_descriptor_t> *effects) {
- int32_t numEffects = data.readInt32();
- for (int32_t i = 0; i < numEffects; i++) {
- effect_descriptor_t effect;
- if (data.read(&effect, sizeof(effect_descriptor_t)) != NO_ERROR) {
- break;
- }
- (*effects).push_back(effect);
- }
-}
-
-inline void writeEffectVectorToParcel(Parcel& data, std::vector<effect_descriptor_t> effects) {
- data.writeUint32((uint32_t) effects.size());
- for (const auto& effect : effects) {
- if (data.write(&effect, sizeof(effect_descriptor_t)) != NO_ERROR) {
- break;
- }
- }
-}
-
-// ----------------------------------------------------------------------
-class BpAudioPolicyServiceClient : public BpInterface<IAudioPolicyServiceClient>
-{
-public:
- explicit BpAudioPolicyServiceClient(const sp<IBinder>& impl)
- : BpInterface<IAudioPolicyServiceClient>(impl)
- {
- }
-
- void onAudioPortListUpdate()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
- remote()->transact(PORT_LIST_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
- }
-
- void onAudioPatchListUpdate()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
- remote()->transact(PATCH_LIST_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
- }
-
- void onAudioVolumeGroupChanged(volume_group_t group, int flags)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
- data.writeUint32(group);
- data.writeInt32(flags);
- remote()->transact(VOLUME_GROUP_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
- }
-
- void onDynamicPolicyMixStateUpdate(String8 regId, int32_t state)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
- data.writeString8(regId);
- data.writeInt32(state);
- remote()->transact(MIX_STATE_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
- }
-
- void onRecordingConfigurationUpdate(int event,
- const record_client_info_t *clientInfo,
- const audio_config_base_t *clientConfig,
- std::vector<effect_descriptor_t> clientEffects,
- const audio_config_base_t *deviceConfig,
- std::vector<effect_descriptor_t> effects,
- audio_patch_handle_t patchHandle,
- audio_source_t source) {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
- data.writeInt32(event);
- writeRecordClientInfoToParcel(data, clientInfo);
- writeAudioConfigBaseToParcel(data, clientConfig);
- writeEffectVectorToParcel(data, clientEffects);
- writeAudioConfigBaseToParcel(data, deviceConfig);
- writeEffectVectorToParcel(data, effects);
- data.writeInt32(patchHandle);
- data.writeInt32((int32_t) source);
- remote()->transact(RECORDING_CONFIGURATION_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
- }
-};
-
-IMPLEMENT_META_INTERFACE(AudioPolicyServiceClient, "android.media.IAudioPolicyServiceClient");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioPolicyServiceClient::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- switch (code) {
- case PORT_LIST_UPDATE: {
- CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
- onAudioPortListUpdate();
- return NO_ERROR;
- } break;
- case PATCH_LIST_UPDATE: {
- CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
- onAudioPatchListUpdate();
- return NO_ERROR;
- } break;
- case VOLUME_GROUP_CHANGED: {
- CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
- volume_group_t group = static_cast<volume_group_t>(data.readUint32());
- int flags = data.readInt32();
- onAudioVolumeGroupChanged(group, flags);
- return NO_ERROR;
- } break;
- case MIX_STATE_UPDATE: {
- CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
- String8 regId = data.readString8();
- int32_t state = data.readInt32();
- onDynamicPolicyMixStateUpdate(regId, state);
- return NO_ERROR;
- } break;
- case RECORDING_CONFIGURATION_UPDATE: {
- CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
- int event = (int) data.readInt32();
- record_client_info_t clientInfo;
- audio_config_base_t clientConfig;
- audio_config_base_t deviceConfig;
- readRecordClientInfoFromParcel(data, &clientInfo);
- readAudioConfigBaseFromParcel(data, &clientConfig);
- std::vector<effect_descriptor_t> clientEffects;
- readEffectVectorFromParcel(data, &clientEffects);
- readAudioConfigBaseFromParcel(data, &deviceConfig);
- std::vector<effect_descriptor_t> effects;
- readEffectVectorFromParcel(data, &effects);
- audio_patch_handle_t patchHandle = (audio_patch_handle_t) data.readInt32();
- audio_source_t source = (audio_source_t) data.readInt32();
- onRecordingConfigurationUpdate(event, &clientInfo, &clientConfig, clientEffects,
- &deviceConfig, effects, patchHandle, source);
- return NO_ERROR;
- } break;
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/IAudioTrack.cpp b/media/libaudioclient/IAudioTrack.cpp
deleted file mode 100644
index 6219e7a..0000000
--- a/media/libaudioclient/IAudioTrack.cpp
+++ /dev/null
@@ -1,317 +0,0 @@
-/*
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#define LOG_TAG "IAudioTrack"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-
-#include <media/IAudioTrack.h>
-
-namespace android {
-
-using media::VolumeShaper;
-
-enum {
- GET_CBLK = IBinder::FIRST_CALL_TRANSACTION,
- START,
- STOP,
- FLUSH,
- RESERVED, // was MUTE
- PAUSE,
- ATTACH_AUX_EFFECT,
- SET_PARAMETERS,
- SELECT_PRESENTATION,
- GET_TIMESTAMP,
- SIGNAL,
- APPLY_VOLUME_SHAPER,
- GET_VOLUME_SHAPER_STATE,
-};
-
-class BpAudioTrack : public BpInterface<IAudioTrack>
-{
-public:
- explicit BpAudioTrack(const sp<IBinder>& impl)
- : BpInterface<IAudioTrack>(impl)
- {
- }
-
- virtual sp<IMemory> getCblk() const
- {
- Parcel data, reply;
- sp<IMemory> cblk;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- status_t status = remote()->transact(GET_CBLK, data, &reply);
- if (status == NO_ERROR) {
- cblk = interface_cast<IMemory>(reply.readStrongBinder());
- if (cblk != 0 && cblk->unsecurePointer() == NULL) {
- cblk.clear();
- }
- }
- return cblk;
- }
-
- virtual status_t start()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- status_t status = remote()->transact(START, data, &reply);
- if (status == NO_ERROR) {
- status = reply.readInt32();
- } else {
- ALOGW("start() error: %s", strerror(-status));
- }
- return status;
- }
-
- virtual void stop()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- remote()->transact(STOP, data, &reply);
- }
-
- virtual void flush()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- remote()->transact(FLUSH, data, &reply);
- }
-
- virtual void pause()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- remote()->transact(PAUSE, data, &reply);
- }
-
- virtual status_t attachAuxEffect(int effectId)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- data.writeInt32(effectId);
- status_t status = remote()->transact(ATTACH_AUX_EFFECT, data, &reply);
- if (status == NO_ERROR) {
- status = reply.readInt32();
- } else {
- ALOGW("attachAuxEffect() error: %s", strerror(-status));
- }
- return status;
- }
-
- virtual status_t setParameters(const String8& keyValuePairs) {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- data.writeString8(keyValuePairs);
- status_t status = remote()->transact(SET_PARAMETERS, data, &reply);
- if (status == NO_ERROR) {
- status = reply.readInt32();
- }
- return status;
- }
-
- /* Selects the presentation (if available) */
- virtual status_t selectPresentation(int presentationId, int programId) {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- data.writeInt32(presentationId);
- data.writeInt32(programId);
- status_t status = remote()->transact(SELECT_PRESENTATION, data, &reply);
- if (status == NO_ERROR) {
- status = reply.readInt32();
- }
- return status;
- }
-
- virtual status_t getTimestamp(AudioTimestamp& timestamp) {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- status_t status = remote()->transact(GET_TIMESTAMP, data, &reply);
- if (status == NO_ERROR) {
- status = reply.readInt32();
- if (status == NO_ERROR) {
- timestamp.mPosition = reply.readInt32();
- timestamp.mTime.tv_sec = reply.readInt32();
- timestamp.mTime.tv_nsec = reply.readInt32();
- }
- }
- return status;
- }
-
- virtual void signal() {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- remote()->transact(SIGNAL, data, &reply);
- }
-
- virtual VolumeShaper::Status applyVolumeShaper(
- const sp<VolumeShaper::Configuration>& configuration,
- const sp<VolumeShaper::Operation>& operation) {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-
- status_t status = configuration.get() == nullptr
- ? data.writeInt32(0)
- : data.writeInt32(1)
- ?: configuration->writeToParcel(&data);
- if (status != NO_ERROR) {
- return VolumeShaper::Status(status);
- }
-
- status = operation.get() == nullptr
- ? status = data.writeInt32(0)
- : data.writeInt32(1)
- ?: operation->writeToParcel(&data);
- if (status != NO_ERROR) {
- return VolumeShaper::Status(status);
- }
-
- int32_t remoteVolumeShaperStatus;
- status = remote()->transact(APPLY_VOLUME_SHAPER, data, &reply)
- ?: reply.readInt32(&remoteVolumeShaperStatus);
-
- return VolumeShaper::Status(status ?: remoteVolumeShaperStatus);
- }
-
- virtual sp<VolumeShaper::State> getVolumeShaperState(int id) {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-
- data.writeInt32(id);
- status_t status = remote()->transact(GET_VOLUME_SHAPER_STATE, data, &reply);
- if (status != NO_ERROR) {
- return nullptr;
- }
- sp<VolumeShaper::State> state = new VolumeShaper::State;
- status = state->readFromParcel(&reply);
- if (status != NO_ERROR) {
- return nullptr;
- }
- return state;
- }
-};
-
-IMPLEMENT_META_INTERFACE(AudioTrack, "android.media.IAudioTrack");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioTrack::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- switch (code) {
- case GET_CBLK: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- reply->writeStrongBinder(IInterface::asBinder(getCblk()));
- return NO_ERROR;
- } break;
- case START: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- reply->writeInt32(start());
- return NO_ERROR;
- } break;
- case STOP: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- stop();
- return NO_ERROR;
- } break;
- case FLUSH: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- flush();
- return NO_ERROR;
- } break;
- case PAUSE: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- pause();
- return NO_ERROR;
- }
- case ATTACH_AUX_EFFECT: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- reply->writeInt32(attachAuxEffect(data.readInt32()));
- return NO_ERROR;
- } break;
- case SET_PARAMETERS: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- String8 keyValuePairs(data.readString8());
- reply->writeInt32(setParameters(keyValuePairs));
- return NO_ERROR;
- } break;
- case SELECT_PRESENTATION: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- reply->writeInt32(selectPresentation(data.readInt32(), data.readInt32()));
- return NO_ERROR;
- } break;
- case GET_TIMESTAMP: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- AudioTimestamp timestamp;
- status_t status = getTimestamp(timestamp);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->writeInt32(timestamp.mPosition);
- reply->writeInt32(timestamp.mTime.tv_sec);
- reply->writeInt32(timestamp.mTime.tv_nsec);
- }
- return NO_ERROR;
- } break;
- case SIGNAL: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- signal();
- return NO_ERROR;
- } break;
- case APPLY_VOLUME_SHAPER: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- sp<VolumeShaper::Configuration> configuration;
- sp<VolumeShaper::Operation> operation;
-
- int32_t present;
- status_t status = data.readInt32(&present);
- if (status == NO_ERROR && present != 0) {
- configuration = new VolumeShaper::Configuration();
- status = configuration->readFromParcel(&data);
- }
- status = status ?: data.readInt32(&present);
- if (status == NO_ERROR && present != 0) {
- operation = new VolumeShaper::Operation();
- status = operation->readFromParcel(&data);
- }
- if (status == NO_ERROR) {
- status = (status_t)applyVolumeShaper(configuration, operation);
- }
- reply->writeInt32(status);
- return NO_ERROR;
- } break;
- case GET_VOLUME_SHAPER_STATE: {
- CHECK_INTERFACE(IAudioTrack, data, reply);
- int id;
- status_t status = data.readInt32(&id);
- if (status == NO_ERROR) {
- sp<VolumeShaper::State> state = getVolumeShaperState(id);
- if (state.get() != nullptr) {
- status = state->writeToParcel(reply);
- }
- }
- return NO_ERROR;
- } break;
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-} // namespace android
diff --git a/media/libaudioclient/IEffect.cpp b/media/libaudioclient/IEffect.cpp
deleted file mode 100644
index 5d47dff..0000000
--- a/media/libaudioclient/IEffect.cpp
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
-**
-** Copyright 2010, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IEffect"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <binder/Parcel.h>
-#include <media/IEffect.h>
-
-namespace android {
-
-// Maximum command/reply size expected
-#define EFFECT_PARAM_SIZE_MAX 65536
-
-enum {
- ENABLE = IBinder::FIRST_CALL_TRANSACTION,
- DISABLE,
- COMMAND,
- DISCONNECT,
- GET_CBLK
-};
-
-class BpEffect: public BpInterface<IEffect>
-{
-public:
- explicit BpEffect(const sp<IBinder>& impl)
- : BpInterface<IEffect>(impl)
- {
- }
-
- status_t enable()
- {
- ALOGV("enable");
- Parcel data, reply;
- data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
- remote()->transact(ENABLE, data, &reply);
- return reply.readInt32();
- }
-
- status_t disable()
- {
- ALOGV("disable");
- Parcel data, reply;
- data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
- remote()->transact(DISABLE, data, &reply);
- return reply.readInt32();
- }
-
- status_t command(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t *pReplySize,
- void *pReplyData)
- {
- ALOGV("command");
- Parcel data, reply;
- data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
- data.writeInt32(cmdCode);
- int size = cmdSize;
- if (pCmdData == NULL) {
- size = 0;
- }
- data.writeInt32(size);
- if (size) {
- data.write(pCmdData, size);
- }
- if (pReplySize == NULL) {
- size = 0;
- } else {
- size = *pReplySize;
- }
- data.writeInt32(size);
-
- status_t status = remote()->transact(COMMAND, data, &reply);
- if (status == NO_ERROR) {
- status = reply.readInt32();
- }
- if (status != NO_ERROR) {
- if (pReplySize != NULL)
- *pReplySize = 0;
- return status;
- }
-
- size = reply.readInt32();
- if (size != 0 && pReplyData != NULL && pReplySize != NULL) {
- reply.read(pReplyData, size);
- *pReplySize = size;
- }
- return status;
- }
-
- void disconnect()
- {
- ALOGV("disconnect");
- Parcel data, reply;
- data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
- remote()->transact(DISCONNECT, data, &reply);
- return;
- }
-
- virtual sp<IMemory> getCblk() const
- {
- Parcel data, reply;
- sp<IMemory> cblk;
- data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
- status_t status = remote()->transact(GET_CBLK, data, &reply);
- if (status == NO_ERROR) {
- cblk = interface_cast<IMemory>(reply.readStrongBinder());
- if (cblk != 0 && cblk->unsecurePointer() == NULL) {
- cblk.clear();
- }
- }
- return cblk;
- }
- };
-
-IMPLEMENT_META_INTERFACE(Effect, "android.media.IEffect");
-
-// ----------------------------------------------------------------------
-
-status_t BnEffect::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- switch (code) {
- case ENABLE: {
- ALOGV("ENABLE");
- CHECK_INTERFACE(IEffect, data, reply);
- reply->writeInt32(enable());
- return NO_ERROR;
- } break;
-
- case DISABLE: {
- ALOGV("DISABLE");
- CHECK_INTERFACE(IEffect, data, reply);
- reply->writeInt32(disable());
- return NO_ERROR;
- } break;
-
- case COMMAND: {
- ALOGV("COMMAND");
- CHECK_INTERFACE(IEffect, data, reply);
- uint32_t cmdCode = data.readInt32();
- uint32_t cmdSize = data.readInt32();
- char *cmd = NULL;
- if (cmdSize) {
- if (cmdSize > EFFECT_PARAM_SIZE_MAX) {
- reply->writeInt32(NO_MEMORY);
- return NO_ERROR;
- }
- cmd = (char *)calloc(cmdSize, 1);
- if (cmd == NULL) {
- reply->writeInt32(NO_MEMORY);
- return NO_ERROR;
- }
- data.read(cmd, cmdSize);
- }
- uint32_t replySize = data.readInt32();
- uint32_t replySz = replySize;
- char *resp = NULL;
- if (replySize) {
- if (replySize > EFFECT_PARAM_SIZE_MAX) {
- free(cmd);
- reply->writeInt32(NO_MEMORY);
- return NO_ERROR;
- }
- resp = (char *)calloc(replySize, 1);
- if (resp == NULL) {
- free(cmd);
- reply->writeInt32(NO_MEMORY);
- return NO_ERROR;
- }
- }
- status_t status = command(cmdCode, cmdSize, cmd, &replySz, resp);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- if (replySz < replySize) {
- replySize = replySz;
- }
- reply->writeInt32(replySize);
- if (replySize) {
- reply->write(resp, replySize);
- }
- }
- if (cmd) {
- free(cmd);
- }
- if (resp) {
- free(resp);
- }
- return NO_ERROR;
- } break;
-
- case DISCONNECT: {
- ALOGV("DISCONNECT");
- CHECK_INTERFACE(IEffect, data, reply);
- disconnect();
- return NO_ERROR;
- } break;
-
- case GET_CBLK: {
- CHECK_INTERFACE(IEffect, data, reply);
- reply->writeStrongBinder(IInterface::asBinder(getCblk()));
- return NO_ERROR;
- } break;
-
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/IEffectClient.cpp b/media/libaudioclient/IEffectClient.cpp
deleted file mode 100644
index 3f2c67d..0000000
--- a/media/libaudioclient/IEffectClient.cpp
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
-**
-** Copyright 2010, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IEffectClient"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <media/IEffectClient.h>
-
-namespace android {
-
-enum {
- CONTROL_STATUS_CHANGED = IBinder::FIRST_CALL_TRANSACTION,
- ENABLE_STATUS_CHANGED,
- COMMAND_EXECUTED
-};
-
-class BpEffectClient: public BpInterface<IEffectClient>
-{
-public:
- explicit BpEffectClient(const sp<IBinder>& impl)
- : BpInterface<IEffectClient>(impl)
- {
- }
-
- void controlStatusChanged(bool controlGranted)
- {
- ALOGV("controlStatusChanged");
- Parcel data, reply;
- data.writeInterfaceToken(IEffectClient::getInterfaceDescriptor());
- data.writeInt32((uint32_t)controlGranted);
- remote()->transact(CONTROL_STATUS_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
- }
-
- void enableStatusChanged(bool enabled)
- {
- ALOGV("enableStatusChanged");
- Parcel data, reply;
- data.writeInterfaceToken(IEffectClient::getInterfaceDescriptor());
- data.writeInt32((uint32_t)enabled);
- remote()->transact(ENABLE_STATUS_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
- }
-
- void commandExecuted(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t replySize,
- void *pReplyData)
- {
- ALOGV("commandExecuted");
- Parcel data, reply;
- data.writeInterfaceToken(IEffectClient::getInterfaceDescriptor());
- data.writeInt32(cmdCode);
- int size = cmdSize;
- if (pCmdData == NULL) {
- size = 0;
- }
- data.writeInt32(size);
- if (size) {
- data.write(pCmdData, size);
- }
- size = replySize;
- if (pReplyData == NULL) {
- size = 0;
- }
- data.writeInt32(size);
- if (size) {
- data.write(pReplyData, size);
- }
- remote()->transact(COMMAND_EXECUTED, data, &reply, IBinder::FLAG_ONEWAY);
- }
-
-};
-
-IMPLEMENT_META_INTERFACE(EffectClient, "android.media.IEffectClient");
-
-// ----------------------------------------------------------------------
-
-status_t BnEffectClient::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- switch (code) {
- case CONTROL_STATUS_CHANGED: {
- ALOGV("CONTROL_STATUS_CHANGED");
- CHECK_INTERFACE(IEffectClient, data, reply);
- bool hasControl = (bool)data.readInt32();
- controlStatusChanged(hasControl);
- return NO_ERROR;
- } break;
- case ENABLE_STATUS_CHANGED: {
- ALOGV("ENABLE_STATUS_CHANGED");
- CHECK_INTERFACE(IEffectClient, data, reply);
- bool enabled = (bool)data.readInt32();
- enableStatusChanged(enabled);
- return NO_ERROR;
- } break;
- case COMMAND_EXECUTED: {
- ALOGV("COMMAND_EXECUTED");
- CHECK_INTERFACE(IEffectClient, data, reply);
- uint32_t cmdCode = data.readInt32();
- uint32_t cmdSize = data.readInt32();
- char *cmd = NULL;
- if (cmdSize) {
- cmd = (char *)malloc(cmdSize);
- data.read(cmd, cmdSize);
- }
- uint32_t replySize = data.readInt32();
- char *resp = NULL;
- if (replySize) {
- resp = (char *)malloc(replySize);
- data.read(resp, replySize);
- }
- commandExecuted(cmdCode, cmdSize, cmd, replySize, resp);
- if (cmd) {
- free(cmd);
- }
- if (resp) {
- free(resp);
- }
- return NO_ERROR;
- } break;
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/OWNERS b/media/libaudioclient/OWNERS
index 482b9fb..034d161 100644
--- a/media/libaudioclient/OWNERS
+++ b/media/libaudioclient/OWNERS
@@ -1,3 +1,4 @@
gkasten@google.com
+hunga@google.com
jmtrivi@google.com
mnaganov@google.com
diff --git a/media/libaudioclient/PlayerBase.cpp b/media/libaudioclient/PlayerBase.cpp
index b0c68e5..9e7d89e 100644
--- a/media/libaudioclient/PlayerBase.cpp
+++ b/media/libaudioclient/PlayerBase.cpp
@@ -15,14 +15,16 @@
*/
#include <binder/IServiceManager.h>
+#include <media/AidlConversionUtil.h>
#include <media/PlayerBase.h>
#define max(a, b) ((a) > (b) ? (a) : (b))
#define min(a, b) ((a) < (b) ? (a) : (b))
namespace android {
-
-using media::VolumeShaper;
+using aidl_utils::binderStatusFromStatusT;
+using media::VolumeShaperConfiguration;
+using media::VolumeShaperOperation;
//--------------------------------------------------------------------------------------------------
PlayerBase::PlayerBase() : BnPlayer(),
@@ -149,7 +151,7 @@
if (status != NO_ERROR) {
ALOGW("PlayerBase::setVolume() error %d", status);
}
- return binder::Status::fromStatusT(status);
+ return binderStatusFromStatusT(status);
}
binder::Status PlayerBase::setPan(float pan) {
@@ -169,7 +171,7 @@
if (status != NO_ERROR) {
ALOGW("PlayerBase::setPan() error %d", status);
}
- return binder::Status::fromStatusT(status);
+ return binderStatusFromStatusT(status);
}
binder::Status PlayerBase::setStartDelayMs(int32_t delayMs __unused) {
@@ -178,8 +180,8 @@
}
binder::Status PlayerBase::applyVolumeShaper(
- const VolumeShaper::Configuration& configuration __unused,
- const VolumeShaper::Operation& operation __unused) {
+ const VolumeShaperConfiguration& configuration __unused,
+ const VolumeShaperOperation& operation __unused) {
ALOGW("applyVolumeShaper() is not supported");
return binder::Status::ok();
}
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 050ad65..c9f3ab9 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -17,6 +17,8 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ToneGenerator"
+#include <utility>
+
#include <math.h>
#include <utils/Log.h>
#include <cutils/properties.h>
@@ -740,6 +742,11 @@
{ .duration = 0 , .waveFreq = { 0 }, 0, 0}},
.repeatCnt = ToneGenerator::TONEGEN_INF,
.repeatSegment = 0 }, // TONE_JAPAN_RADIO_ACK
+ { .segments = { { .duration = 1000, .waveFreq = { 400, 0 }, 0, 0 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_JAPAN_RINGTONE
{ .segments = { { .duration = 375, .waveFreq = { 400, 0 }, 0, 0 },
{ .duration = 375, .waveFreq = { 0 }, 0, 0 },
{ .duration = 0 , .waveFreq = { 0 }, 0, 0}},
@@ -853,6 +860,11 @@
{ .duration = 0 , .waveFreq = { 0 }, 0, 0}},
.repeatCnt = ToneGenerator::TONEGEN_INF,
.repeatSegment = 0 }, // TONE_INDIA_RINGTONE
+ { .segments = { { .duration = 1000, .waveFreq = { 440, 480, 0 }, 0, 0 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_TW_RINGTONE
};
// Used by ToneGenerator::getToneForRegion() to convert user specified supervisory tone type
@@ -876,7 +888,7 @@
TONE_SUP_RADIO_NOTAVAIL, // TONE_SUP_RADIO_NOTAVAIL
TONE_SUP_ERROR, // TONE_SUP_ERROR
TONE_SUP_CALL_WAITING, // TONE_SUP_CALL_WAITING
- TONE_SUP_RINGTONE // TONE_SUP_RINGTONE
+ TONE_JAPAN_RINGTONE // TONE_SUP_RINGTONE
},
{ // GB
TONE_ANSI_DIAL, // TONE_SUP_DIAL
@@ -937,6 +949,16 @@
TONE_SUP_ERROR, // TONE_SUP_ERROR
TONE_INDIA_CALL_WAITING, // TONE_SUP_CALL_WAITING
TONE_INDIA_RINGTONE // TONE_SUP_RINGTONE
+ },
+ { // TAIWAN
+ TONE_SUP_DIAL, // TONE_SUP_DIAL
+ TONE_SUP_BUSY, // TONE_SUP_BUSY
+ TONE_SUP_CONGESTION, // TONE_SUP_CONGESTION
+ TONE_SUP_RADIO_ACK, // TONE_SUP_RADIO_ACK
+ TONE_SUP_RADIO_NOTAVAIL, // TONE_SUP_RADIO_NOTAVAIL
+ TONE_SUP_ERROR, // TONE_SUP_ERROR
+ TONE_SUP_CALL_WAITING, // TONE_SUP_CALL_WAITING
+ TONE_TW_RINGTONE // TONE_SUP_RINGTONE
}
};
@@ -964,7 +986,9 @@
// none
//
////////////////////////////////////////////////////////////////////////////////
-ToneGenerator::ToneGenerator(audio_stream_type_t streamType, float volume, bool threadCanCallJava) {
+ToneGenerator::ToneGenerator(audio_stream_type_t streamType, float volume, bool threadCanCallJava,
+ std::string opPackageName)
+ : mOpPackageName(std::move(opPackageName)) {
ALOGV("ToneGenerator constructor: streamType=%d, volume=%f", streamType, volume);
@@ -1010,6 +1034,8 @@
mRegion = IRELAND;
} else if (strstr(value, "in") != NULL) {
mRegion = INDIA;
+ } else if (strstr(value, "tw") != NULL) {
+ mRegion = TAIWAN;
} else {
mRegion = CEPT;
}
@@ -1233,7 +1259,7 @@
////////////////////////////////////////////////////////////////////////////////
bool ToneGenerator::initAudioTrack() {
// Open audio track in mono, PCM 16bit, default sampling rate.
- mpAudioTrack = new AudioTrack();
+ mpAudioTrack = new AudioTrack(mOpPackageName);
ALOGV("AudioTrack(%p) created", mpAudioTrack.get());
audio_attributes_t attr;
diff --git a/media/libaudioclient/TrackPlayerBase.cpp b/media/libaudioclient/TrackPlayerBase.cpp
index 0a914fc..372b7c3 100644
--- a/media/libaudioclient/TrackPlayerBase.cpp
+++ b/media/libaudioclient/TrackPlayerBase.cpp
@@ -17,7 +17,7 @@
#include <media/TrackPlayerBase.h>
namespace android {
-
+using aidl_utils::binderStatusFromStatusT;
using media::VolumeShaper;
//--------------------------------------------------------------------------------------------------
@@ -106,11 +106,17 @@
binder::Status TrackPlayerBase::applyVolumeShaper(
- const VolumeShaper::Configuration& configuration,
- const VolumeShaper::Operation& operation) {
+ const media::VolumeShaperConfiguration& configuration,
+ const media::VolumeShaperOperation& operation) {
- sp<VolumeShaper::Configuration> spConfiguration = new VolumeShaper::Configuration(configuration);
- sp<VolumeShaper::Operation> spOperation = new VolumeShaper::Operation(operation);
+ sp<VolumeShaper::Configuration> spConfiguration = new VolumeShaper::Configuration();
+ sp<VolumeShaper::Operation> spOperation = new VolumeShaper::Operation();
+
+ status_t s = spConfiguration->readFromParcelable(configuration)
+ ?: spOperation->readFromParcelable(operation);
+ if (s != OK) {
+ return binderStatusFromStatusT(s);
+ }
if (mAudioTrack != 0) {
ALOGD("TrackPlayerBase::applyVolumeShaper() from IPlayer");
@@ -118,7 +124,7 @@
if (status < 0) { // a non-negative value is the volume shaper id.
ALOGE("TrackPlayerBase::applyVolumeShaper() failed with status %d", status);
}
- return binder::Status::fromStatusT(status);
+ return binderStatusFromStatusT(status);
} else {
ALOGD("TrackPlayerBase::applyVolumeShaper()"
" no AudioTrack for volume control from IPlayer");
diff --git a/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl b/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
new file mode 100644
index 0000000..699df0a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioContentType;
+import android.media.AudioSourceType;
+import android.media.AudioUsage;
+
+/**
+ * The "Internal" suffix of this type name is to disambiguate it from the
+ * android.media.AudioAttributes SDK type.
+ * {@hide}
+ */
+parcelable AudioAttributesInternal {
+ AudioContentType contentType;
+ AudioUsage usage;
+ AudioSourceType source;
+ // Bitmask, indexed by AudioFlag.
+ int flags;
+ @utf8InCpp String tags; /* UTF8 */
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioClient.aidl b/media/libaudioclient/aidl/android/media/AudioClient.aidl
new file mode 100644
index 0000000..7bff0d6
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioClient.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioClient {
+ /** Interpreted as uid_t. */
+ int clientUid;
+ /** Interpreted as pid_t. */
+ int clientPid;
+ /** Interpreted as pid_t. */
+ int clientTid;
+ @utf8InCpp String packageName;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioConfig.aidl b/media/libaudioclient/aidl/android/media/AudioConfig.aidl
new file mode 100644
index 0000000..8dc97d3
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioConfig.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioOffloadInfo;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioConfig {
+ int sampleRate;
+ /**
+ * Interpreted as audio_channel_mask_t.
+ * TODO(ytai): Create a designated type.
+ */
+ int channelMask;
+ AudioFormat format;
+ AudioOffloadInfo offloadInfo;
+ long frameCount;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl b/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl
new file mode 100644
index 0000000..8353c0d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioConfigBase {
+ int sampleRate;
+ /** Interpreted as audio_channel_mask_t. */
+ int channelMask;
+ AudioFormat format;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioContentType.aidl b/media/libaudioclient/aidl/android/media/AudioContentType.aidl
new file mode 100644
index 0000000..f734fba
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioContentType.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioContentType {
+ UNKNOWN = 0,
+ SPEECH = 1,
+ MUSIC = 2,
+ MOVIE = 3,
+ SONIFICATION = 4,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioDevice.aidl b/media/libaudioclient/aidl/android/media/AudioDevice.aidl
new file mode 100644
index 0000000..b200697
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioDevice.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioDevice {
+ /** Interpreted as audio_devices_t. */
+ int type;
+ @utf8InCpp String address;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMetadataType.aidl b/media/libaudioclient/aidl/android/media/AudioEncapsulationMetadataType.aidl
new file mode 100644
index 0000000..b03adfe
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioEncapsulationMetadataType.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioEncapsulationMetadataType {
+ NONE = 0,
+ FRAMEWORK_TUNER = 1,
+ DVB_AD_DESCRIPTOR = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl b/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
new file mode 100644
index 0000000..9e04e82
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioEncapsulationMode {
+ NONE = 0,
+ ELEMENTARY_STREAM = 1,
+ HANDLE = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioFlag.aidl b/media/libaudioclient/aidl/android/media/AudioFlag.aidl
new file mode 100644
index 0000000..58b493b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioFlag.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioFlag {
+ AUDIBILITY_ENFORCED = 0,
+ SECURE = 1,
+ SCO = 2,
+ BEACON = 3,
+ HW_AV_SYNC = 4,
+ HW_HOTWORD = 5,
+ BYPASS_INTERRUPTION_POLICY = 6,
+ BYPASS_MUTE = 7,
+ LOW_LATENCY = 8,
+ DEEP_BUFFER = 9,
+ NO_MEDIA_PROJECTION = 10,
+ MUTE_HAPTIC = 11,
+ NO_SYSTEM_CAPTURE = 12,
+ CAPTURE_PRIVATE = 13,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioGain.aidl b/media/libaudioclient/aidl/android/media/AudioGain.aidl
new file mode 100644
index 0000000..048b295
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioGain.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioGain {
+ int index;
+ boolean useInChannelMask;
+ boolean useForVolume;
+ /** Bitmask, indexed by AudioGainMode. */
+ int mode;
+ /** Interpreted as audio_channel_mask_t. */
+ int channelMask;
+ int minValue;
+ int maxValue;
+ int defaultValue;
+ int stepValue;
+ int minRampMs;
+ int maxRampMs;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl b/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl
new file mode 100644
index 0000000..b93c2dc
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioGainConfig {
+ /** Index of the corresponding audio_gain in the audio_port gains[] table. */
+ int index;
+
+ /** Mode requested for this command. Bitfield indexed by AudioGainMode. */
+ int mode;
+
+ /**
+ * Channels which gain value follows. N/A in joint mode.
+ * Interpreted as audio_channel_mask_t.
+ */
+ int channelMask;
+
+ /**
+ * Gain values in millibels.
+ * For each channel ordered from LSb to MSb in channel mask. The number of values is 1 in joint
+ * mode, otherwise equals the number of bits implied by channelMask.
+ */
+ int[] values;
+
+ /** Ramp duration in ms. */
+ int rampDurationMs;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioGainMode.aidl b/media/libaudioclient/aidl/android/media/AudioGainMode.aidl
new file mode 100644
index 0000000..e1b9f0b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioGainMode.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioGainMode {
+ JOINT = 0,
+ CHANNELS = 1,
+ RAMP = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
new file mode 100644
index 0000000..bfc0eb0
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioInputFlags {
+ FAST = 0,
+ HW_HOTWORD = 1,
+ RAW = 2,
+ SYNC = 3,
+ MMAP_NOIRQ = 4,
+ VOIP_TX = 5,
+ HW_AV_SYNC = 6,
+ DIRECT = 7,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl b/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl
new file mode 100644
index 0000000..d5f23a1
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioIoConfigEvent {
+ OUTPUT_REGISTERED = 0,
+ OUTPUT_OPENED = 1,
+ OUTPUT_CLOSED = 2,
+ OUTPUT_CONFIG_CHANGED = 3,
+ INPUT_REGISTERED = 4,
+ INPUT_OPENED = 5,
+ INPUT_CLOSED = 6,
+ INPUT_CONFIG_CHANGED = 7,
+ CLIENT_STARTED = 8,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
new file mode 100644
index 0000000..876ef9b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPatch;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioIoDescriptor {
+ /** Interpreted as audio_io_handle_t. */
+ int ioHandle;
+ AudioPatch patch;
+ int samplingRate;
+ AudioFormat format;
+ /** Interpreted as audio_channel_mask_t. */
+ int channelMask;
+ long frameCount;
+ long frameCountHAL;
+ /** Only valid for output. */
+ int latency;
+ /**
+ * Interpreted as audio_port_handle_t.
+ * valid for event AUDIO_CLIENT_STARTED.
+ */
+ int portId;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl b/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
new file mode 100644
index 0000000..f9b25bf
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+union AudioIoFlags {
+ /** Bitmask indexed by AudioInputFlags. */
+ int input;
+ /** Bitmask indexed by AudioOutputFlags. */
+ int output;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixLatencyClass.aidl b/media/libaudioclient/aidl/android/media/AudioMixLatencyClass.aidl
new file mode 100644
index 0000000..d70b364
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixLatencyClass.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioMixLatencyClass {
+ LOW = 0,
+ NORMAL = 1,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMode.aidl b/media/libaudioclient/aidl/android/media/AudioMode.aidl
new file mode 100644
index 0000000..7067dd3
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMode.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioMode {
+ INVALID = -2,
+ CURRENT = -1,
+ NORMAL = 0,
+ RINGTONE = 1,
+ IN_CALL = 2,
+ IN_COMMUNICATION = 3,
+ CALL_SCREEN = 4,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl b/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl
new file mode 100644
index 0000000..c86b3f0
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfigBase;
+import android.media.AudioEncapsulationMode;
+import android.media.AudioStreamType;
+import android.media.AudioUsage;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioOffloadInfo {
+ /** Version of the info structure. Interpreted as a uint16_t version constant. */
+ int version;
+ /** Audio configuration. */
+ AudioConfigBase config;
+ /** Stream type. */
+ AudioStreamType streamType;
+ /** Bit rate in bits per second. */
+ int bitRate;
+ /** Duration in microseconds, -1 if unknown. */
+ long durationUs;
+ /** true if stream is tied to a video stream. */
+ boolean hasVideo;
+ /** true if streaming, false if local playback. */
+ boolean isStreaming;
+ int bitWidth;
+ /** Offload fragment size. */
+ int offloadBufferSize;
+ AudioUsage usage;
+ AudioEncapsulationMode encapsulationMode;
+ /** Content id from tuner HAL (0 if none). */
+ int contentId;
+ /** Sync id from tuner HAL (0 if none). */
+ int syncId;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
new file mode 100644
index 0000000..cebd8f0
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioOutputFlags {
+ DIRECT = 0,
+ PRIMARY = 1,
+ FAST = 2,
+ DEEP_BUFFER = 3,
+ COMPRESS_OFFLOAD = 4,
+ NON_BLOCKING = 5,
+ HW_AV_SYNC = 6,
+ TTS = 7,
+ RAW = 8,
+ SYNC = 9,
+ IEC958_NONAUDIO = 10,
+ DIRECT_PCM = 11,
+ MMAP_NOIRQ = 12,
+ VOIP_RX = 13,
+ INCALL_MUSIC = 14,
+ GAPLESS_OFFLOAD = 15,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPatch.aidl b/media/libaudioclient/aidl/android/media/AudioPatch.aidl
new file mode 100644
index 0000000..8519faf
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPatch.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfig;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPatch {
+ /**
+ * Patch unique ID.
+ * Interpreted as audio_patch_handle_t.
+ */
+ int id;
+ AudioPortConfig[] sources;
+ AudioPortConfig[] sinks;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPort.aidl b/media/libaudioclient/aidl/android/media/AudioPort.aidl
new file mode 100644
index 0000000..123aeb0
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPort.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioGain;
+import android.media.AudioPortConfig;
+import android.media.AudioPortExt;
+import android.media.AudioPortRole;
+import android.media.AudioPortType;
+import android.media.AudioProfile;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPort {
+ /** Port unique ID. Interpreted as audio_port_handle_t. */
+ int id;
+ /** Sink or source. */
+ AudioPortRole role;
+ /** Device, mix ... */
+ AudioPortType type;
+ @utf8InCpp String name;
+ /** AudioProfiles supported by this port (format, Rates, Channels). */
+ AudioProfile[] profiles;
+ /** Gain controllers. */
+ AudioGain[] gains;
+ /** Current audio port configuration. */
+ AudioPortConfig activeConfig;
+ AudioPortExt ext;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
new file mode 100644
index 0000000..2dd30a4
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioGainConfig;
+import android.media.AudioIoFlags;
+import android.media.AudioPortConfigExt;
+import android.media.AudioPortConfigType;
+import android.media.AudioPortRole;
+import android.media.AudioPortType;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfig {
+ /**
+ * Port unique ID.
+ * Interpreted as audio_port_handle_t.
+ */
+ int id;
+ /** Sink or source. */
+ AudioPortRole role;
+ /** Device, mix ... */
+ AudioPortType type;
+ /** Bitmask, indexed by AudioPortConfigType. */
+ int configMask;
+ /** Sampling rate in Hz. */
+ int sampleRate;
+ /**
+ * Channel mask, if applicable.
+ * Interpreted as audio_channel_mask_t.
+ * TODO: bitmask?
+ */
+ int channelMask;
+ /**
+ * Format, if applicable.
+ */
+ AudioFormat format;
+ /** Gain to apply, if applicable. */
+ AudioGainConfig gain;
+ /** Framework only: HW_AV_SYNC, DIRECT, ... */
+ AudioIoFlags flags;
+ AudioPortConfigExt ext;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl
new file mode 100644
index 0000000..a99aa9b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigDeviceExt {
+ /**
+ * Module the device is attached to.
+ * Interpreted as audio_module_handle_t.
+ */
+ int hwModule;
+ /**
+ * Device type (e.g AUDIO_DEVICE_OUT_SPEAKER).
+ * Interpreted as audio_devices_t.
+ * TODO: Convert to a standalone AIDL representation.
+ */
+ int type;
+ /** Device address. "" if N/A. */
+ @utf8InCpp String address;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl
new file mode 100644
index 0000000..5d635b6
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfigDeviceExt;
+import android.media.AudioPortConfigMixExt;
+import android.media.AudioPortConfigSessionExt;
+
+/**
+ * {@hide}
+ */
+union AudioPortConfigExt {
+ /**
+ * This represents an empty union. Value is ignored.
+ * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
+ * established.
+ */
+ boolean unspecified;
+ /** Device specific info. */
+ AudioPortConfigDeviceExt device;
+ /** Mix specific info. */
+ AudioPortConfigMixExt mix;
+ /** Session specific info. */
+ AudioPortConfigSessionExt session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl
new file mode 100644
index 0000000..d3226f2
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfigMixExtUseCase;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigMixExt {
+ /**
+ * Module the stream is attached to.
+ * Interpreted as audio_module_handle_t.
+ */
+ int hwModule;
+ /**
+ * I/O handle of the input/output stream.
+ * Interpreted as audio_io_handle_t.
+ */
+ int handle;
+ AudioPortConfigMixExtUseCase usecase;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl
new file mode 100644
index 0000000..c61f044
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioSourceType;
+import android.media.AudioStreamType;
+
+/**
+ * {@hide}
+ */
+union AudioPortConfigMixExtUseCase {
+ /**
+ * This to be set if the containing config has the AudioPortRole::NONE role.
+ * This represents an empty value (value is ignored).
+ * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
+ * established.
+ */
+ boolean unspecified;
+ /** This to be set if the containing config has the AudioPortRole::SOURCE role. */
+ AudioStreamType stream;
+ /** This to be set if the containing config has the AudioPortRole::SINK role. */
+ AudioSourceType source;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl
new file mode 100644
index 0000000..a2cbf62
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigSessionExt {
+ int session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl
new file mode 100644
index 0000000..6e22b8d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPortConfigType {
+ SAMPLE_RATE = 0,
+ CHANNEL_MASK = 1,
+ FORMAT = 2,
+ GAIN = 3,
+ FLAGS = 4,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortDeviceExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortDeviceExt.aidl
new file mode 100644
index 0000000..b758f23
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortDeviceExt.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioDevice;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortDeviceExt {
+ /** Module the device is attached to. Interpreted as audio_module_handle_t. */
+ int hwModule;
+ AudioDevice device;
+ /** Bitmask, indexed by AudioEncapsulationMode. */
+ int encapsulationModes;
+ /** Bitmask, indexed by AudioEncapsulationMetadataType. */
+ int encapsulationMetadataTypes;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortExt.aidl
new file mode 100644
index 0000000..453784b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortExt.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortDeviceExt;
+import android.media.AudioPortMixExt;
+import android.media.AudioPortSessionExt;
+
+/**
+ * {@hide}
+ */
+union AudioPortExt {
+ /**
+ * This represents an empty union. Value is ignored.
+ * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
+ * established.
+ */
+ boolean unspecified;
+ /** Device specific info. */
+ AudioPortDeviceExt device;
+ /** Mix specific info. */
+ AudioPortMixExt mix;
+ /** Session specific info. */
+ AudioPortSessionExt session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortMixExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortMixExt.aidl
new file mode 100644
index 0000000..62cdb8e
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortMixExt.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioMixLatencyClass;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortMixExt {
+ /** Module the stream is attached to. Interpreted as audio_module_handle_t. */
+ int hwModule;
+ /** I/O handle of the input/output stream. Interpreted as audio_io_handle_t. */
+ int handle;
+ /** Latency class */
+ AudioMixLatencyClass latencyClass;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortRole.aidl b/media/libaudioclient/aidl/android/media/AudioPortRole.aidl
new file mode 100644
index 0000000..ea2ef3a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortRole.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPortRole {
+ NONE = 0,
+ SOURCE = 1,
+ SINK = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortSessionExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortSessionExt.aidl
new file mode 100644
index 0000000..dbca168
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortSessionExt.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortSessionExt {
+ /** Audio session. Interpreted as audio_session_t. */
+ int session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortType.aidl b/media/libaudioclient/aidl/android/media/AudioPortType.aidl
new file mode 100644
index 0000000..9e6af49
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortType.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPortType {
+ NONE = 0,
+ DEVICE = 1,
+ MIX = 2,
+ SESSION = 3,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioProfile.aidl b/media/libaudioclient/aidl/android/media/AudioProfile.aidl
new file mode 100644
index 0000000..e5e8812
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioProfile.aidl
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioProfile {
+ @utf8InCpp String name;
+ /** The format for an audio profile should only be set when initialized. */
+ AudioFormat format;
+ /** Interpreted as audio_channel_mask_t. */
+ int[] channelMasks;
+ int[] samplingRates;
+ boolean isDynamicFormat;
+ boolean isDynamicChannels;
+ boolean isDynamicRate;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioSourceType.aidl b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
new file mode 100644
index 0000000..8673b92
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioSourceType {
+ INVALID = -1,
+ DEFAULT = 0,
+ MIC = 1,
+ VOICE_UPLINK = 2,
+ VOICE_DOWNLINK = 3,
+ VOICE_CALL = 4,
+ CAMCORDER = 5,
+ VOICE_RECOGNITION = 6,
+ VOICE_COMMUNICATION = 7,
+ REMOTE_SUBMIX = 8,
+ UNPROCESSED = 9,
+ VOICE_PERFORMANCE = 10,
+ ECHO_REFERENCE = 1997,
+ FM_TUNER = 1998,
+ /**
+ * A low-priority, preemptible audio source for for background software
+ * hotword detection. Same tuning as VOICE_RECOGNITION.
+ * Used only internally by the framework.
+ */
+ HOTWORD = 1999,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioStreamType.aidl b/media/libaudioclient/aidl/android/media/AudioStreamType.aidl
new file mode 100644
index 0000000..d777882
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioStreamType.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioStreamType {
+ DEFAULT = -1,
+ VOICE_CALL = 0,
+ SYSTEM = 1,
+ RING = 2,
+ MUSIC = 3,
+ ALARM = 4,
+ NOTIFICATION = 5,
+ BLUETOOTH_SCO = 6,
+ ENFORCED_AUDIBLE = 7,
+ DTMF = 8,
+ TTS = 9,
+ ACCESSIBILITY = 10,
+ ASSISTANT = 11,
+ /** For dynamic policy output mixes. Only used by the audio policy */
+ REROUTING = 12,
+ /** For audio flinger tracks volume. Only used by the audioflinger */
+ PATCH = 13,
+ /** stream for corresponding to AUDIO_USAGE_CALL_ASSISTANT */
+ CALL_ASSISTANT = 14,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioTimestampInternal.aidl b/media/libaudioclient/aidl/android/media/AudioTimestampInternal.aidl
new file mode 100644
index 0000000..8bbfb57
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioTimestampInternal.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * The "Internal" timestamp is intended to disambiguate from the android.media.AudioTimestamp type.
+ *
+ * {@hide}
+ */
+parcelable AudioTimestampInternal {
+ /** A frame position in AudioTrack::getPosition() units. */
+ int position;
+ /** corresponding CLOCK_MONOTONIC when frame is expected to present. */
+ long sec;
+ int nsec;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioUniqueIdUse.aidl b/media/libaudioclient/aidl/android/media/AudioUniqueIdUse.aidl
new file mode 100644
index 0000000..fdb6d2d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioUniqueIdUse.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioUniqueIdUse {
+ UNSPECIFIED = 0,
+ SESSION = 1, // audio_session_t
+ // for allocated sessions, not special AUDIO_SESSION_*
+ MODULE = 2, // audio_module_handle_t
+ EFFECT = 3, // audio_effect_handle_t
+ PATCH = 4, // audio_patch_handle_t
+ OUTPUT = 5, // audio_io_handle_t
+ INPUT = 6, // audio_io_handle_t
+ CLIENT = 7, // client-side players and recorders
+ // FIXME should move to a separate namespace;
+ // these IDs are allocated by AudioFlinger on client request,
+ // but are never used by AudioFlinger
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioUsage.aidl b/media/libaudioclient/aidl/android/media/AudioUsage.aidl
new file mode 100644
index 0000000..66c5c30
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioUsage.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioUsage {
+ UNKNOWN = 0,
+ MEDIA = 1,
+ VOICE_COMMUNICATION = 2,
+ VOICE_COMMUNICATION_SIGNALLING = 3,
+ ALARM = 4,
+ NOTIFICATION = 5,
+ NOTIFICATION_TELEPHONY_RINGTONE = 6,
+ NOTIFICATION_COMMUNICATION_REQUEST = 7,
+ NOTIFICATION_COMMUNICATION_INSTANT = 8,
+ NOTIFICATION_COMMUNICATION_DELAYED = 9,
+ NOTIFICATION_EVENT = 10,
+ ASSISTANCE_ACCESSIBILITY = 11,
+ ASSISTANCE_NAVIGATION_GUIDANCE = 12,
+ ASSISTANCE_SONIFICATION = 13,
+ GAME = 14,
+ VIRTUAL_SOURCE = 15,
+ ASSISTANT = 16,
+ CALL_ASSISTANT = 17,
+ EMERGENCY = 1000,
+ SAFETY = 1001,
+ VEHICLE_STATUS = 1002,
+ ANNOUNCEMENT = 1003,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioUuid.aidl b/media/libaudioclient/aidl/android/media/AudioUuid.aidl
new file mode 100644
index 0000000..bba9039
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioUuid.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioUuid {
+ int timeLow;
+ int timeMid;
+ int timeHiAndVersion;
+ int clockSeq;
+ byte[] node; // Length = 6
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl b/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl
new file mode 100644
index 0000000..8368854
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioDevice;
+import android.media.EffectDescriptor;
+import android.media.IEffectClient;
+
+/**
+ * Input arguments of the createEffect() method.
+ *
+ * {@hide}
+ */
+parcelable CreateEffectRequest {
+ EffectDescriptor desc;
+ @nullable IEffectClient client;
+ int priority;
+ /** Interpreted as audio_io_handle_t. */
+ int output;
+ /** Interpreted as audio_session_t. */
+ int sessionId;
+ AudioDevice device;
+ @utf8InCpp String opPackageName;
+ /** Interpreted as pid_t. */
+ int pid;
+ boolean probe;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateEffectResponse.aidl b/media/libaudioclient/aidl/android/media/CreateEffectResponse.aidl
new file mode 100644
index 0000000..0aa640a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateEffectResponse.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.EffectDescriptor;
+import android.media.IEffect;
+
+/**
+ * Output arguments of the createEffect() method.
+ *
+ * {@hide}
+ */
+parcelable CreateEffectResponse {
+ int id;
+ boolean enabled;
+ @nullable IEffect effect;
+ EffectDescriptor desc;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
new file mode 100644
index 0000000..6da743a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioAttributesInternal;
+import android.media.AudioClient;
+import android.media.AudioConfigBase;
+
+/**
+ * CreateRecordRequest contains all input arguments sent by AudioRecord to AudioFlinger
+ * when calling createRecord() including arguments that will be updated by AudioFlinger
+ * and returned in CreateRecordResponse object.
+ *
+ * {@hide}
+ */
+parcelable CreateRecordRequest {
+ AudioAttributesInternal attr;
+ AudioConfigBase config;
+ AudioClient clientInfo;
+ @utf8InCpp String opPackageName;
+ /** Interpreted as audio_unique_id_t. */
+ int riid;
+ /** Bitmask, indexed by AudioInputFlags. */
+ int flags;
+ long frameCount;
+ long notificationFrameCount;
+ /** Interpreted as audio_port_handle_t. */
+ int selectedDeviceId;
+ int sessionId;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
new file mode 100644
index 0000000..d78b3fc
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.IAudioRecord;
+import android.media.SharedFileRegion;
+
+/**
+ * CreateRecordResponse contains all output arguments returned by AudioFlinger to AudioRecord
+ * when calling createRecord() including arguments that were passed as I/O for update by
+ * CreateRecordRequest.
+ *
+ * {@hide}
+ */
+parcelable CreateRecordResponse {
+ /** Bitmask, indexed by AudioInputFlags. */
+ int flags;
+ long frameCount;
+ long notificationFrameCount;
+ /** Interpreted as audio_port_handle_t. */
+ int selectedDeviceId;
+ int sessionId;
+ int sampleRate;
+ /** Interpreted as audio_io_handle_t. */
+ int inputId;
+ @nullable SharedFileRegion cblk;
+ @nullable SharedFileRegion buffers;
+ /** Interpreted as audio_port_handle_t. */
+ int portId;
+ /** The newly created record. */
+ @nullable IAudioRecord audioRecord;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl b/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
new file mode 100644
index 0000000..014b3ca
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioAttributesInternal;
+import android.media.AudioClient;
+import android.media.AudioConfig;
+import android.media.IAudioTrackCallback;
+import android.media.SharedFileRegion;
+
+/**
+ * CreateTrackInput contains all input arguments sent by AudioTrack to AudioFlinger
+ * when calling createTrack() including arguments that will be updated by AudioFlinger
+ * and returned in CreateTrackResponse object.
+ *
+ * {@hide}
+ */
+parcelable CreateTrackRequest {
+ AudioAttributesInternal attr;
+ AudioConfig config;
+ AudioClient clientInfo;
+ @nullable SharedFileRegion sharedBuffer;
+ int notificationsPerBuffer;
+ float speed;
+ IAudioTrackCallback audioTrackCallback;
+ @utf8InCpp String opPackageName;
+ /** Bitmask, indexed by AudioOutputFlags. */
+ int flags;
+ long frameCount;
+ long notificationFrameCount;
+ /** Interpreted as audio_port_handle_t. */
+ int selectedDeviceId;
+ int sessionId;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
new file mode 100644
index 0000000..6bdd8e4
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.IAudioTrack;
+
+/**
+ * CreateTrackOutput contains all output arguments returned by AudioFlinger to AudioTrack
+ * when calling createTrack() including arguments that were passed as I/O for update by
+ * CreateTrackRequest.
+ *
+ * {@hide}
+ */
+parcelable CreateTrackResponse {
+ /** Bitmask, indexed by AudioOutputFlags. */
+ int flags;
+ long frameCount;
+ long notificationFrameCount;
+ /** Interpreted as audio_port_handle_t. */
+ int selectedDeviceId;
+ int sessionId;
+ int sampleRate;
+ long afFrameCount;
+ int afSampleRate;
+ int afLatencyMs;
+ /** Interpreted as audio_io_handle_t. */
+ int outputId;
+ /** Interpreted as audio_port_handle_t. */
+ int portId;
+ /** The newly created track. */
+ @nullable IAudioTrack audioTrack;
+}
diff --git a/media/libaudioclient/aidl/android/media/EffectDescriptor.aidl b/media/libaudioclient/aidl/android/media/EffectDescriptor.aidl
new file mode 100644
index 0000000..35a3d74
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/EffectDescriptor.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioUuid;
+
+/**
+ * {@hide}
+ */
+parcelable EffectDescriptor {
+ /** UUID of to the OpenSL ES interface implemented by this effect. */
+ AudioUuid type;
+ /** UUID for this particular implementation. */
+ AudioUuid uuid;
+ /** Version of the effect control API implemented. */
+ int apiVersion;
+ /** Effect engine capabilities/requirements flags. */
+ int flags;
+ /** CPU load indication.. */
+ int cpuLoad;
+ /** Data Memory usage.. */
+ int memoryUsage;
+ /** Human readable effect name. */
+ @utf8InCpp String name;
+ /** Human readable effect implementor name. */
+ @utf8InCpp String implementor;
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
new file mode 100644
index 0000000..421c31c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioIoConfigEvent;
+import android.media.AudioIoDescriptor;
+
+/**
+ * A callback interface for AudioFlinger.
+ *
+ * {@hide}
+ */
+interface IAudioFlingerClient {
+ oneway void ioConfigChanged(AudioIoConfigEvent event,
+ in AudioIoDescriptor ioDesc);
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
new file mode 100644
index 0000000..e63f391
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -0,0 +1,205 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioMode;
+import android.media.AudioPatch;
+import android.media.AudioPort;
+import android.media.AudioPortConfig;
+import android.media.AudioStreamType;
+import android.media.AudioUniqueIdUse;
+import android.media.AudioUuid;
+import android.media.CreateEffectRequest;
+import android.media.CreateEffectResponse;
+import android.media.CreateRecordRequest;
+import android.media.CreateRecordResponse;
+import android.media.CreateTrackRequest;
+import android.media.CreateTrackResponse;
+import android.media.OpenInputRequest;
+import android.media.OpenInputResponse;
+import android.media.OpenOutputRequest;
+import android.media.OpenOutputResponse;
+import android.media.EffectDescriptor;
+import android.media.IAudioFlingerClient;
+import android.media.IAudioRecord;
+import android.media.IAudioTrack;
+import android.media.MicrophoneInfoData;
+import android.media.RenderPosition;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+interface IAudioFlingerService {
+ /**
+ * Creates an audio track and registers it with AudioFlinger, or null if the track cannot be
+ * created.
+ */
+ CreateTrackResponse createTrack(in CreateTrackRequest request);
+
+ CreateRecordResponse createRecord(in CreateRecordRequest request);
+
+ // FIXME Surprisingly, format/latency don't work for input handles
+
+ /**
+ * Queries the audio hardware state. This state never changes, and therefore can be cached.
+ */
+ int sampleRate(int /* audio_io_handle_t */ ioHandle);
+
+ AudioFormat format(int /* audio_io_handle_t */ output);
+
+ long frameCount(int /* audio_io_handle_t */ ioHandle);
+
+ /**
+ * Return the estimated latency in milliseconds.
+ */
+ int latency(int /* audio_io_handle_t */ output);
+
+ /*
+ * Sets/gets the audio hardware state. This will probably be used by
+ * the preference panel, mostly.
+ */
+ void setMasterVolume(float value);
+ void setMasterMute(boolean muted);
+
+ float masterVolume();
+ boolean masterMute();
+
+ void setMasterBalance(float balance);
+ float getMasterBalance();
+
+ /*
+ * Set/gets stream type state. This will probably be used by
+ * the preference panel, mostly.
+ */
+ void setStreamVolume(AudioStreamType stream, float value, int /* audio_io_handle_t */ output);
+ void setStreamMute(AudioStreamType stream, boolean muted);
+ float streamVolume(AudioStreamType stream, int /* audio_io_handle_t */ output);
+ boolean streamMute(AudioStreamType stream);
+
+ // set audio mode.
+ void setMode(AudioMode mode);
+
+ // mic mute/state
+ void setMicMute(boolean state);
+ boolean getMicMute();
+ void setRecordSilenced(int /* audio_port_handle_t */ portId,
+ boolean silenced);
+
+ void setParameters(int /* audio_io_handle_t */ ioHandle,
+ @utf8InCpp String keyValuePairs);
+ @utf8InCpp String getParameters(int /* audio_io_handle_t */ ioHandle,
+ @utf8InCpp String keys);
+
+ // Register an object to receive audio input/output change and track notifications.
+ // For a given calling pid, AudioFlinger disregards any registrations after the first.
+ // Thus the IAudioFlingerClient must be a singleton per process.
+ void registerClient(IAudioFlingerClient client);
+
+ // Retrieve the audio recording buffer size in bytes.
+ // FIXME This API assumes a route, and so should be deprecated.
+ long getInputBufferSize(int sampleRate,
+ AudioFormat format,
+ int /* audio_channel_mask_t */ channelMask);
+
+ OpenOutputResponse openOutput(in OpenOutputRequest request);
+ int /* audio_io_handle_t */ openDuplicateOutput(int /* audio_io_handle_t */ output1,
+ int /* audio_io_handle_t */ output2);
+ void closeOutput(int /* audio_io_handle_t */ output);
+ void suspendOutput(int /* audio_io_handle_t */ output);
+ void restoreOutput(int /* audio_io_handle_t */ output);
+
+ OpenInputResponse openInput(in OpenInputRequest request);
+ void closeInput(int /* audio_io_handle_t */ input);
+
+ void invalidateStream(AudioStreamType stream);
+
+ void setVoiceVolume(float volume);
+
+ RenderPosition getRenderPosition(int /* audio_io_handle_t */ output);
+
+ int getInputFramesLost(int /* audio_io_handle_t */ ioHandle);
+
+ int /* audio_unique_id_t */ newAudioUniqueId(AudioUniqueIdUse use);
+
+ void acquireAudioSessionId(int /* audio_session_t */ audioSession,
+ int /* pid_t */ pid,
+ int /* uid_t */ uid);
+ void releaseAudioSessionId(int /* audio_session_t */ audioSession,
+ int /* pid_t */ pid);
+
+ int queryNumberEffects();
+
+ EffectDescriptor queryEffect(int index);
+
+ /** preferredTypeFlag is interpreted as a uint32_t with the "effect flag" format. */
+ EffectDescriptor getEffectDescriptor(in AudioUuid effectUUID,
+ in AudioUuid typeUUID,
+ int preferredTypeFlag);
+
+ CreateEffectResponse createEffect(in CreateEffectRequest request);
+
+ void moveEffects(int /* audio_session_t */ session,
+ int /* audio_io_handle_t */ srcOutput,
+ int /* audio_io_handle_t */ dstOutput);
+
+ void setEffectSuspended(int effectId,
+ int /* audio_session_t */ sessionId,
+ boolean suspended);
+
+ int /* audio_module_handle_t */ loadHwModule(@utf8InCpp String name);
+
+ // helpers for android.media.AudioManager.getProperty(), see description there for meaning
+ // FIXME move these APIs to AudioPolicy to permit a more accurate implementation
+ // that looks on primary device for a stream with fast flag, primary flag, or first one.
+ int getPrimaryOutputSamplingRate();
+ long getPrimaryOutputFrameCount();
+
+ // Intended for AudioService to inform AudioFlinger of device's low RAM attribute,
+ // and should be called at most once. For a definition of what "low RAM" means, see
+ // android.app.ActivityManager.isLowRamDevice(). The totalMemory parameter
+ // is obtained from android.app.ActivityManager.MemoryInfo.totalMem.
+ void setLowRamDevice(boolean isLowRamDevice, long totalMemory);
+
+ /* Get attributes for a given audio port */
+ AudioPort getAudioPort(in AudioPort port);
+
+ /* Create an audio patch between several source and sink ports */
+ int /* audio_patch_handle_t */ createAudioPatch(in AudioPatch patch);
+
+ /* Release an audio patch */
+ void releaseAudioPatch(int /* audio_patch_handle_t */ handle);
+
+ /* List existing audio patches */
+ AudioPatch[] listAudioPatches(int maxCount);
+ /* Set audio port configuration */
+ void setAudioPortConfig(in AudioPortConfig config);
+
+ /* Get the HW synchronization source used for an audio session */
+ int /* audio_hw_sync_t */ getAudioHwSyncForSession(int /* audio_session_t */ sessionId);
+
+ /* Indicate JAVA services are ready (scheduling, power management ...) */
+ oneway void systemReady();
+
+ // Returns the number of frames per audio HAL buffer.
+ long frameCountHAL(int /* audio_io_handle_t */ ioHandle);
+
+ /* List available microphones and their characteristics */
+ MicrophoneInfoData[] getMicrophones();
+
+ void setAudioHalPids(in int[] /* pid_t[] */ pids);
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyServiceClient.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyServiceClient.aidl
new file mode 100644
index 0000000..a8d79b5
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyServiceClient.aidl
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfigBase;
+import android.media.AudioSourceType;
+import android.media.EffectDescriptor;
+import android.media.RecordClientInfo;
+
+/**
+ * {@hide}
+ */
+oneway interface IAudioPolicyServiceClient {
+ /** Notifies a change of volume group. */
+ void onAudioVolumeGroupChanged(int /* volume_group_t */ group,
+ int flags);
+ /** Notifies a change of audio port configuration. */
+ void onAudioPortListUpdate();
+ /** Notifies a change of audio patch configuration. */
+ void onAudioPatchListUpdate();
+ /** Notifies a change in the mixing state of a specific mix in a dynamic audio policy. */
+ void onDynamicPolicyMixStateUpdate(@utf8InCpp String regId,
+ int state);
+ /** Notifies a change of audio recording configuration. */
+ void onRecordingConfigurationUpdate(int event,
+ in RecordClientInfo clientInfo,
+ in AudioConfigBase clientConfig,
+ in EffectDescriptor[] clientEffects,
+ in AudioConfigBase deviceConfig,
+ in EffectDescriptor[] effects,
+ int /* audio_patch_handle_t */ patchHandle,
+ AudioSourceType source);
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
index ecf58b6..1772653 100644
--- a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
@@ -16,9 +16,13 @@
package android.media;
-import android.media.MicrophoneInfo;
+import android.media.MicrophoneInfoData;
-/* Native code must specify namespace media (media::IAudioRecord) when referring to this class */
+/**
+ * Native code must specify namespace media (media::IAudioRecord) when referring to this class.
+ *
+ * {@hide}
+ */
interface IAudioRecord {
/* After it's created the track is not active. Call start() to
@@ -35,7 +39,7 @@
/* Get a list of current active microphones.
*/
- void getActiveMicrophones(out MicrophoneInfo[] activeMicrophones);
+ void getActiveMicrophones(out MicrophoneInfoData[] activeMicrophones);
/* Set the microphone direction (for processing).
*/
diff --git a/media/libaudioclient/aidl/android/media/IAudioTrack.aidl b/media/libaudioclient/aidl/android/media/IAudioTrack.aidl
new file mode 100644
index 0000000..2b6c362
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioTrack.aidl
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioTimestampInternal;
+import android.media.SharedFileRegion;
+import android.media.VolumeShaperConfiguration;
+import android.media.VolumeShaperOperation;
+import android.media.VolumeShaperState;
+
+/**
+ * Unless otherwise noted, methods returning int expect it to be interpreted as a status_t.
+ *
+ * {@hide}
+ */
+interface IAudioTrack {
+ /** Get this track's control block */
+ @nullable SharedFileRegion getCblk();
+
+ /**
+ * After it's created the track is not active. Call start() to
+ * make it active.
+ */
+ int start();
+
+ /**
+ * Stop a track. If set, the callback will cease being called and
+ * obtainBuffer will return an error. Buffers that are already released
+ * will continue to be processed, unless/until flush() is called.
+ */
+ void stop();
+
+ /**
+ * Flush a stopped or paused track. All pending/released buffers are discarded.
+ * This function has no effect if the track is not stopped or paused.
+ */
+ void flush();
+
+ /**
+ * Pause a track. If set, the callback will cease being called and
+ * obtainBuffer will return an error. Buffers that are already released
+ * will continue to be processed, unless/until flush() is called.
+ */
+ void pause();
+
+ /**
+ * Attach track auxiliary output to specified effect. Use effectId = 0
+ * to detach track from effect.
+ */
+ int attachAuxEffect(int effectId);
+
+ /** Send parameters to the audio hardware. */
+ int setParameters(@utf8InCpp String keyValuePairs);
+
+ /** Selects the presentation (if available). */
+ int selectPresentation(int presentationId, int programId);
+
+ /** Return NO_ERROR if timestamp is valid. */
+ int getTimestamp(out AudioTimestampInternal timestamp);
+
+ /** Signal the playback thread for a change in control block. */
+ void signal();
+
+ /** Sets the volume shaper. Returns the volume shaper status. */
+ int applyVolumeShaper(in VolumeShaperConfiguration configuration,
+ in VolumeShaperOperation operation);
+
+ /** Gets the volume shaper state. */
+ @nullable VolumeShaperState getVolumeShaperState(int id);
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioTrackCallback.aidl b/media/libaudioclient/aidl/android/media/IAudioTrackCallback.aidl
index 21553b5..f593e22 100644
--- a/media/libaudioclient/aidl/android/media/IAudioTrackCallback.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioTrackCallback.aidl
@@ -17,7 +17,7 @@
package android.media;
/**
- * @hide
+ * {@hide}
*/
interface IAudioTrackCallback {
oneway void onCodecFormatChanged(in byte[] audioMetadata);
diff --git a/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl b/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl
index 8502282..3b2206a 100644
--- a/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl
+++ b/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl
@@ -16,6 +16,9 @@
package android.media;
+/**
+ * {@hide}
+ */
interface ICaptureStateListener {
void setCaptureState(boolean active);
}
diff --git a/media/libaudioclient/aidl/android/media/IEffect.aidl b/media/libaudioclient/aidl/android/media/IEffect.aidl
new file mode 100644
index 0000000..813cd5c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IEffect.aidl
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.SharedFileRegion;
+
+/**
+ * The IEffect interface enables control of the effect module activity and parameters.
+ *
+ * {@hide}
+ */
+interface IEffect {
+ /**
+ * Activates the effect module by connecting it to the audio path.
+ * @return a status_t code.
+ */
+ int enable();
+
+ /**
+ * Deactivates the effect module by disconnecting it from the audio path.
+ * @return a status_t code.
+ */
+ int disable();
+
+ /**
+ * Sends control, reads or writes parameters. Same behavior as the command() method in the
+ * effect control interface.
+ * Refer to system/audio_effect.h for a description of the valid command codes and their
+ * associated parameter and return messages. The cmdData and response parameters are expected to
+ * contain the respective types in a standard C memory layout.
+ *
+ * TODO(ytai): replace opaque byte arrays with strongly typed parameters.
+ */
+ int command(int cmdCode, in byte[] cmdData, int maxResponseSize, out byte[] response);
+
+ /**
+ * Disconnects the IEffect interface from the effect module.
+ * This will also delete the effect module and release the effect engine in the library if this
+ * is the last client disconnected. To release control of the effect module, the application can
+ * disconnect or delete the IEffect interface.
+ */
+ void disconnect();
+
+ /**
+ * returns a pointer to a shared memory area used to pass multiple parameters to the effect
+ * module without multiplying the binder calls.
+ *
+ * TODO(ytai): Explain how this should be used exactly.
+ */
+ SharedFileRegion getCblk();
+}
diff --git a/media/libaudioclient/aidl/android/media/IEffectClient.aidl b/media/libaudioclient/aidl/android/media/IEffectClient.aidl
new file mode 100644
index 0000000..3b6bcf1
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IEffectClient.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * A callback interface for getting effect-related notifications.
+ *
+ * {@hide}
+ */
+interface IEffectClient {
+ /**
+ * Called whenever the status of granting control over the effect to the application
+ * has changed.
+ * @param controlGranted true iff the application has the control of the effect module.
+ */
+ oneway void controlStatusChanged(boolean controlGranted);
+
+ /**
+ * Called whenever the effect has been enabled or disabled. Received only if the client is not
+ * currently controlling the effect.
+ * @param enabled true if the effect module has been activated, false if deactivated.
+ */
+ oneway void enableStatusChanged(boolean enabled);
+
+ /**
+ * A command has been send to the effect engine. Received only if the client is not currently
+ * controlling the effect. See IEffect.command() for a description of buffer contents.
+ *
+ * TODO(ytai): replace opaque byte arrays with strongly typed parameters.
+ */
+ oneway void commandExecuted(int cmdCode, in byte[] cmdData, in byte[] replyData);
+}
diff --git a/media/libaudioclient/aidl/android/media/IPlayer.aidl b/media/libaudioclient/aidl/android/media/IPlayer.aidl
index a90fcdd..43bb7f3 100644
--- a/media/libaudioclient/aidl/android/media/IPlayer.aidl
+++ b/media/libaudioclient/aidl/android/media/IPlayer.aidl
@@ -16,11 +16,11 @@
package android.media;
-import android.media.VolumeShaper.Configuration;
-import android.media.VolumeShaper.Operation;
+import android.media.VolumeShaperConfiguration;
+import android.media.VolumeShaperOperation;
/**
- * @hide
+ * {@hide}
*/
interface IPlayer {
oneway void start();
@@ -29,6 +29,6 @@
oneway void setVolume(float vol);
oneway void setPan(float pan);
oneway void setStartDelayMs(int delayMs);
- oneway void applyVolumeShaper(in Configuration configuration,
- in Operation operation);
+ oneway void applyVolumeShaper(in VolumeShaperConfiguration configuration,
+ in VolumeShaperOperation operation);
}
diff --git a/media/libaudioclient/aidl/android/media/MicrophoneInfo.aidl b/media/libaudioclient/aidl/android/media/MicrophoneInfo.aidl
deleted file mode 100644
index d6e46cb..0000000
--- a/media/libaudioclient/aidl/android/media/MicrophoneInfo.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-parcelable MicrophoneInfo cpp_header "media/MicrophoneInfo.h";
diff --git a/media/libaudioclient/aidl/android/media/OpenInputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenInputRequest.aidl
new file mode 100644
index 0000000..2e55526
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/OpenInputRequest.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfig;
+import android.media.AudioDevice;
+import android.media.AudioSourceType;
+
+/**
+ * {@hide}
+ */
+parcelable OpenInputRequest {
+ /** Interpreted as audio_module_handle_t. */
+ int module;
+ /** Interpreted as audio_io_handle_t. */
+ int input;
+ AudioConfig config;
+ AudioDevice device;
+ AudioSourceType source;
+ /** Bitmask, indexed by AudioInputFlag. */
+ int flags;
+}
diff --git a/media/libaudioclient/aidl/android/media/OpenInputResponse.aidl b/media/libaudioclient/aidl/android/media/OpenInputResponse.aidl
new file mode 100644
index 0000000..b613ba5
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/OpenInputResponse.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfig;
+import android.media.AudioDevice;
+
+/**
+ * {@hide}
+ */
+parcelable OpenInputResponse {
+ /** Interpreted as audio_io_handle_t. */
+ int input;
+ AudioConfig config;
+ AudioDevice device;
+}
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
new file mode 100644
index 0000000..06b12e9
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfig;
+import android.media.AudioPort;
+
+/**
+ * {@hide}
+ */
+parcelable OpenOutputRequest {
+ /** Interpreted as audio_module_handle_t. */
+ int module;
+ AudioConfig config;
+ /** Type must be DEVICE. */
+ AudioPort device;
+ /** Bitmask, indexed by AudioOutputFlag. */
+ int flags;
+}
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputResponse.aidl b/media/libaudioclient/aidl/android/media/OpenOutputResponse.aidl
new file mode 100644
index 0000000..a051969
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/OpenOutputResponse.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfig;
+
+/**
+ * {@hide}
+ */
+parcelable OpenOutputResponse {
+ /** Interpreted as audio_io_handle_t. */
+ int output;
+ AudioConfig config;
+ int latencyMs;
+ /** Bitmask, indexed by AudioOutputFlag. */
+ int flags;
+}
diff --git a/media/libaudioclient/aidl/android/media/RecordClientInfo.aidl b/media/libaudioclient/aidl/android/media/RecordClientInfo.aidl
new file mode 100644
index 0000000..3280460
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/RecordClientInfo.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioSourceType;
+
+/**
+ * {@hide}
+ */
+parcelable RecordClientInfo {
+ /** Interpreted as audio_unique_id_t. */
+ int riid;
+ /** Interpreted as uid_t. */
+ int uid;
+ /** Interpreted as audio_session_t. */
+ int session;
+ AudioSourceType source;
+ /** Interpreted as audio_port_handle_t. */
+ int portId;
+ boolean silenced;
+}
diff --git a/media/libaudioclient/aidl/android/media/RenderPosition.aidl b/media/libaudioclient/aidl/android/media/RenderPosition.aidl
new file mode 100644
index 0000000..98dc17a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/RenderPosition.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable RenderPosition {
+ int halFrames;
+ int dspFrames;
+}
diff --git a/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl b/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl
deleted file mode 100644
index fd0e60f..0000000
--- a/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.VolumeShaper;
-
-parcelable Configuration cpp_header "media/VolumeShaper.h";
diff --git a/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl b/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl
deleted file mode 100644
index 4290d9d..0000000
--- a/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.VolumeShaper;
-
-parcelable Operation cpp_header "media/VolumeShaper.h";
diff --git a/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl b/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl
deleted file mode 100644
index f6a22b8..0000000
--- a/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.VolumeShaper;
-
-parcelable State cpp_header "media/VolumeShaper.h";
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
new file mode 100644
index 0000000..56afe93
--- /dev/null
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -0,0 +1,362 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <limits>
+#include <type_traits>
+
+#include <system/audio.h>
+
+#include <android/media/AudioAttributesInternal.h>
+#include <android/media/AudioClient.h>
+#include <android/media/AudioConfig.h>
+#include <android/media/AudioConfigBase.h>
+#include <android/media/AudioEncapsulationMode.h>
+#include <android/media/AudioEncapsulationMetadataType.h>
+#include <android/media/AudioFlag.h>
+#include <android/media/AudioGain.h>
+#include <android/media/AudioGainMode.h>
+#include <android/media/AudioInputFlags.h>
+#include <android/media/AudioIoConfigEvent.h>
+#include <android/media/AudioIoDescriptor.h>
+#include <android/media/AudioMixLatencyClass.h>
+#include <android/media/AudioMode.h>
+#include <android/media/AudioOutputFlags.h>
+#include <android/media/AudioPort.h>
+#include <android/media/AudioPortConfigType.h>
+#include <android/media/AudioPortDeviceExt.h>
+#include <android/media/AudioPortExt.h>
+#include <android/media/AudioPortMixExt.h>
+#include <android/media/AudioPortSessionExt.h>
+#include <android/media/AudioProfile.h>
+#include <android/media/AudioTimestampInternal.h>
+#include <android/media/AudioUniqueIdUse.h>
+#include <android/media/EffectDescriptor.h>
+
+#include <android/media/SharedFileRegion.h>
+#include <binder/IMemory.h>
+#include <media/AidlConversionUtil.h>
+#include <media/AudioClient.h>
+#include <media/AudioCommonTypes.h>
+#include <media/AudioIoDescriptor.h>
+#include <media/AudioTimestamp.h>
+#include <system/audio_effect.h>
+
+namespace android {
+
+// maxSize is the size of the C-string buffer (including the 0-terminator), NOT the max length of
+// the string.
+status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
+ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize);
+
+ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy);
+
+ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy);
+
+ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy);
+
+ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy);
+
+ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy);
+
+ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy);
+
+// The legacy enum is unnamed. Thus, we use int32_t.
+ConversionResult<int32_t> aidl2legacy_AudioPortConfigType_int32_t(
+ media::AudioPortConfigType aidl);
+// The legacy enum is unnamed. Thus, we use int32_t.
+ConversionResult<media::AudioPortConfigType> legacy2aidl_int32_t_AudioPortConfigType(
+ int32_t legacy);
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_int32_t_audio_channel_mask_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_channel_mask_t_int32_t(audio_channel_mask_t legacy);
+
+ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy);
+
+ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy);
+
+ConversionResult<String8> aidl2legacy_string_view_String8(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String8_string(const String8& legacy);
+
+ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy);
+
+ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
+ media::AudioIoConfigEvent aidl);
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
+ audio_io_config_event legacy);
+
+ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
+ media::AudioPortRole aidl);
+ConversionResult<media::AudioPortRole> legacy2aidl_audio_port_role_t_AudioPortRole(
+ audio_port_role_t legacy);
+
+ConversionResult<audio_port_type_t> aidl2legacy_AudioPortType_audio_port_type_t(
+ media::AudioPortType aidl);
+ConversionResult<media::AudioPortType> legacy2aidl_audio_port_type_t_AudioPortType(
+ audio_port_type_t legacy);
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormat_audio_format_t(
+ media::audio::common::AudioFormat aidl);
+ConversionResult<media::audio::common::AudioFormat> legacy2aidl_audio_format_t_AudioFormat(
+ audio_format_t legacy);
+
+ConversionResult<audio_gain_mode_t>
+aidl2legacy_AudioGainMode_audio_gain_mode_t(media::AudioGainMode aidl);
+ConversionResult<media::AudioGainMode>
+legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy);
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy);
+
+ConversionResult<audio_devices_t> aidl2legacy_int32_t_audio_devices_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_devices_t_int32_t(audio_devices_t legacy);
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+ const media::AudioGainConfig& aidl, media::AudioPortRole role, media::AudioPortType type);
+ConversionResult<media::AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
+ const audio_gain_config& legacy, audio_port_role_t role, audio_port_type_t type);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
+ media::AudioInputFlags aidl);
+ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+ audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
+ media::AudioOutputFlags aidl);
+ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+ audio_output_flags_t legacy);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
+ int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
+ audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
+ int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
+ audio_output_flags_t legacy);
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+ const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type);
+ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+ const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type);
+
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortConfigDeviceExt_audio_port_config_device_ext(
+ const media::AudioPortConfigDeviceExt& aidl);
+ConversionResult<media::AudioPortConfigDeviceExt>
+legacy2aidl_audio_port_config_device_ext_AudioPortConfigDeviceExt(
+ const audio_port_config_device_ext& legacy);
+
+ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
+ media::AudioStreamType aidl);
+ConversionResult<media::AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
+ audio_stream_type_t legacy);
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSourceType_audio_source_t(
+ media::AudioSourceType aidl);
+ConversionResult<media::AudioSourceType> legacy2aidl_audio_source_t_AudioSourceType(
+ audio_source_t legacy);
+
+ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy);
+
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortConfigMixExt(
+ const media::AudioPortConfigMixExt& aidl, media::AudioPortRole role);
+ConversionResult<media::AudioPortConfigMixExt> legacy2aidl_AudioPortConfigMixExt(
+ const audio_port_config_mix_ext& legacy, audio_port_role_t role);
+
+ConversionResult<audio_port_config_session_ext>
+aidl2legacy_AudioPortConfigSessionExt_audio_port_config_session_ext(
+ const media::AudioPortConfigSessionExt& aidl);
+ConversionResult<media::AudioPortConfigSessionExt>
+legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
+ const audio_port_config_session_ext& legacy);
+
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
+ const media::AudioPortConfig& aidl);
+ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
+ const audio_port_config& legacy);
+
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
+ const media::AudioPatch& aidl);
+ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+ const struct audio_patch& legacy);
+
+ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
+ const media::AudioIoDescriptor& aidl);
+
+ConversionResult<media::AudioIoDescriptor> legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(
+ const sp<AudioIoDescriptor>& legacy);
+
+ConversionResult<AudioClient> aidl2legacy_AudioClient_AudioClient(
+ const media::AudioClient& aidl);
+ConversionResult<media::AudioClient> legacy2aidl_AudioClient_AudioClient(
+ const AudioClient& legacy);
+
+ConversionResult<audio_content_type_t>
+aidl2legacy_AudioContentType_audio_content_type_t(media::AudioContentType aidl);
+ConversionResult<media::AudioContentType>
+legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy);
+
+ConversionResult<audio_usage_t>
+aidl2legacy_AudioUsage_audio_usage_t(media::AudioUsage aidl);
+ConversionResult<media::AudioUsage>
+legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy);
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_AudioFlag_audio_flags_mask_t(media::AudioFlag aidl);
+ConversionResult<media::AudioFlag>
+legacy2aidl_audio_flags_mask_t_AudioFlag(audio_flags_mask_t legacy);
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_int32_t_audio_flags_mask_t_mask(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_audio_flags_mask_t_int32_t_mask(audio_flags_mask_t legacy);
+
+ConversionResult<audio_attributes_t>
+aidl2legacy_AudioAttributesInternal_audio_attributes_t(const media::AudioAttributesInternal& aidl);
+ConversionResult<media::AudioAttributesInternal>
+legacy2aidl_audio_attributes_t_AudioAttributesInternal(const audio_attributes_t& legacy);
+
+ConversionResult<audio_encapsulation_mode_t>
+aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(media::AudioEncapsulationMode aidl);
+ConversionResult<media::AudioEncapsulationMode>
+legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy);
+
+ConversionResult<audio_offload_info_t>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const media::AudioOffloadInfo& aidl);
+ConversionResult<media::AudioOffloadInfo>
+legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy);
+
+ConversionResult<audio_config_t>
+aidl2legacy_AudioConfig_audio_config_t(const media::AudioConfig& aidl);
+ConversionResult<media::AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy);
+
+ConversionResult<audio_config_base_t>
+aidl2legacy_AudioConfigBase_audio_config_base_t(const media::AudioConfigBase& aidl);
+ConversionResult<media::AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy);
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl);
+ConversionResult<media::SharedFileRegion>
+legacy2aidl_IMemory_SharedFileRegion(const sp<IMemory>& legacy);
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_NullableSharedFileRegion_IMemory(const std::optional<media::SharedFileRegion>& aidl);
+ConversionResult<std::optional<media::SharedFileRegion>>
+legacy2aidl_NullableIMemory_SharedFileRegion(const sp<IMemory>& legacy);
+
+ConversionResult<AudioTimestamp>
+aidl2legacy_AudioTimestampInternal_AudioTimestamp(const media::AudioTimestampInternal& aidl);
+ConversionResult<media::AudioTimestampInternal>
+legacy2aidl_AudioTimestamp_AudioTimestampInternal(const AudioTimestamp& legacy);
+
+ConversionResult<audio_uuid_t>
+aidl2legacy_AudioUuid_audio_uuid_t(const media::AudioUuid& aidl);
+ConversionResult<media::AudioUuid>
+legacy2aidl_audio_uuid_t_AudioUuid(const audio_uuid_t& legacy);
+
+ConversionResult<effect_descriptor_t>
+aidl2legacy_EffectDescriptor_effect_descriptor_t(const media::EffectDescriptor& aidl);
+ConversionResult<media::EffectDescriptor>
+legacy2aidl_effect_descriptor_t_EffectDescriptor(const effect_descriptor_t& legacy);
+
+ConversionResult<audio_encapsulation_metadata_type_t>
+aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
+ media::AudioEncapsulationMetadataType aidl);
+ConversionResult<media::AudioEncapsulationMetadataType>
+legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
+ audio_encapsulation_metadata_type_t legacy);
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy);
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy);
+
+ConversionResult<audio_mix_latency_class_t>
+aidl2legacy_AudioMixLatencyClass_audio_mix_latency_class_t(
+ media::AudioMixLatencyClass aidl);
+ConversionResult<media::AudioMixLatencyClass>
+legacy2aidl_audio_mix_latency_class_t_AudioMixLatencyClass(
+ audio_mix_latency_class_t legacy);
+
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(const media::AudioPortDeviceExt& aidl);
+ConversionResult<media::AudioPortDeviceExt>
+legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(const audio_port_device_ext& legacy);
+
+ConversionResult<audio_port_mix_ext>
+aidl2legacy_AudioPortMixExt_audio_port_mix_ext(const media::AudioPortMixExt& aidl);
+ConversionResult<media::AudioPortMixExt>
+legacy2aidl_audio_port_mix_ext_AudioPortMixExt(const audio_port_mix_ext& legacy);
+
+ConversionResult<audio_port_session_ext>
+aidl2legacy_AudioPortSessionExt_audio_port_session_ext(const media::AudioPortSessionExt& aidl);
+ConversionResult<media::AudioPortSessionExt>
+legacy2aidl_audio_port_session_ext_AudioPortSessionExt(const audio_port_session_ext& legacy);
+
+ConversionResult<audio_profile>
+aidl2legacy_AudioProfile_audio_profile(const media::AudioProfile& aidl);
+ConversionResult<media::AudioProfile>
+legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy);
+
+ConversionResult<audio_gain>
+aidl2legacy_AudioGain_audio_gain(const media::AudioGain& aidl);
+ConversionResult<media::AudioGain>
+legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy);
+
+ConversionResult<audio_port_v7>
+aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl);
+ConversionResult<media::AudioPort>
+legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy);
+
+ConversionResult<audio_mode_t>
+aidl2legacy_AudioMode_audio_mode_t(media::AudioMode aidl);
+ConversionResult<media::AudioMode>
+legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy);
+
+ConversionResult<audio_unique_id_use_t>
+aidl2legacy_AudioUniqueIdUse_audio_unique_id_use_t(media::AudioUniqueIdUse aidl);
+ConversionResult<media::AudioUniqueIdUse>
+legacy2aidl_audio_unique_id_use_t_AudioUniqueIdUse(audio_unique_id_use_t legacy);
+
+ConversionResult<volume_group_t>
+aidl2legacy_int32_t_volume_group_t(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_volume_group_t_int32_t(volume_group_t legacy);
+
+} // namespace android
diff --git a/media/libaudioclient/include/media/AidlConversionUtil.h b/media/libaudioclient/include/media/AidlConversionUtil.h
new file mode 100644
index 0000000..9453673
--- /dev/null
+++ b/media/libaudioclient/include/media/AidlConversionUtil.h
@@ -0,0 +1,230 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <limits>
+#include <type_traits>
+#include <utility>
+
+#include <android-base/expected.h>
+#include <binder/Status.h>
+
+namespace android {
+
+template <typename T>
+using ConversionResult = base::expected<T, status_t>;
+
+// Convenience macros for working with ConversionResult, useful for writing converted for aggregate
+// types.
+
+#define VALUE_OR_RETURN(result) \
+ ({ \
+ auto _tmp = (result); \
+ if (!_tmp.ok()) return base::unexpected(_tmp.error()); \
+ std::move(_tmp.value()); \
+ })
+
+#define RETURN_IF_ERROR(result) \
+ if (status_t _tmp = (result); _tmp != OK) return base::unexpected(_tmp);
+
+#define VALUE_OR_RETURN_STATUS(x) \
+ ({ \
+ auto _tmp = (x); \
+ if (!_tmp.ok()) return _tmp.error(); \
+ std::move(_tmp.value()); \
+ })
+
+/**
+ * A generic template to safely cast between integral types, respecting limits of the destination
+ * type.
+ */
+template<typename To, typename From>
+ConversionResult<To> convertIntegral(From from) {
+ // Special handling is required for signed / vs. unsigned comparisons, since otherwise we may
+ // have the signed converted to unsigned and produce wrong results.
+ if (std::is_signed_v<From> && !std::is_signed_v<To>) {
+ if (from < 0 || from > std::numeric_limits<To>::max()) {
+ return base::unexpected(BAD_VALUE);
+ }
+ } else if (std::is_signed_v<To> && !std::is_signed_v<From>) {
+ if (from > std::numeric_limits<To>::max()) {
+ return base::unexpected(BAD_VALUE);
+ }
+ } else {
+ if (from < std::numeric_limits<To>::min() || from > std::numeric_limits<To>::max()) {
+ return base::unexpected(BAD_VALUE);
+ }
+ }
+ return static_cast<To>(from);
+}
+
+/**
+ * A generic template to safely cast between types, that are intended to be the same size, but
+ * interpreted differently.
+ */
+template<typename To, typename From>
+ConversionResult<To> convertReinterpret(From from) {
+ static_assert(sizeof(From) == sizeof(To));
+ return static_cast<To>(from);
+}
+
+/**
+ * A generic template that helps convert containers of convertible types, using iterators.
+ */
+template<typename InputIterator, typename OutputIterator, typename Func>
+status_t convertRange(InputIterator start,
+ InputIterator end,
+ OutputIterator out,
+ const Func& itemConversion) {
+ for (InputIterator iter = start; iter != end; ++iter, ++out) {
+ *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
+ }
+ return OK;
+}
+
+/**
+ * A generic template that helps convert containers of convertible types.
+ */
+template<typename OutputContainer, typename InputContainer, typename Func>
+ConversionResult<OutputContainer>
+convertContainer(const InputContainer& input, const Func& itemConversion) {
+ OutputContainer output;
+ auto ins = std::inserter(output, output.begin());
+ for (const auto& item : input) {
+ *ins = VALUE_OR_RETURN(itemConversion(item));
+ }
+ return output;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for working with AIDL unions.
+// UNION_GET(obj, fieldname) returns a ConversionResult<T> containing either the strongly-typed
+// value of the respective field, or BAD_VALUE if the union is not set to the requested field.
+// UNION_SET(obj, fieldname, value) sets the requested field to the given value.
+
+template<typename T, typename T::Tag tag>
+using UnionFieldType = std::decay_t<decltype(std::declval<T>().template get<tag>())>;
+
+template<typename T, typename T::Tag tag>
+ConversionResult<UnionFieldType<T, tag>> unionGetField(const T& u) {
+ if (u.getTag() != tag) {
+ return base::unexpected(BAD_VALUE);
+ }
+ return u.template get<tag>();
+}
+
+#define UNION_GET(u, field) \
+ unionGetField<std::decay_t<decltype(u)>, std::decay_t<decltype(u)>::Tag::field>(u)
+
+#define UNION_SET(u, field, value) \
+ (u).set<std::decay_t<decltype(u)>::Tag::field>(value)
+
+namespace aidl_utils {
+
+/**
+ * Return the equivalent Android status_t from a binder exception code.
+ *
+ * Generally one should use statusTFromBinderStatus() instead.
+ *
+ * Exception codes can be generated from a remote Java service exception, translate
+ * them for use on the Native side.
+ *
+ * Note: for EX_TRANSACTION_FAILED and EX_SERVICE_SPECIFIC a more detailed error code
+ * can be found from transactionError() or serviceSpecificErrorCode().
+ */
+static inline status_t statusTFromExceptionCode(int32_t exceptionCode) {
+ using namespace ::android::binder;
+ switch (exceptionCode) {
+ case Status::EX_NONE:
+ return OK;
+ case Status::EX_SECURITY: // Java SecurityException, rethrows locally in Java
+ return PERMISSION_DENIED;
+ case Status::EX_BAD_PARCELABLE: // Java BadParcelableException, rethrows in Java
+ case Status::EX_ILLEGAL_ARGUMENT: // Java IllegalArgumentException, rethrows in Java
+ case Status::EX_NULL_POINTER: // Java NullPointerException, rethrows in Java
+ return BAD_VALUE;
+ case Status::EX_ILLEGAL_STATE: // Java IllegalStateException, rethrows in Java
+ case Status::EX_UNSUPPORTED_OPERATION: // Java UnsupportedOperationException, rethrows
+ return INVALID_OPERATION;
+ case Status::EX_HAS_REPLY_HEADER: // Native strictmode violation
+ case Status::EX_PARCELABLE: // Java bootclass loader (not standard exception), rethrows
+ case Status::EX_NETWORK_MAIN_THREAD: // Java NetworkOnMainThreadException, rethrows
+ case Status::EX_TRANSACTION_FAILED: // Native - see error code
+ case Status::EX_SERVICE_SPECIFIC: // Java ServiceSpecificException,
+ // rethrows in Java with integer error code
+ return UNKNOWN_ERROR;
+ }
+ return UNKNOWN_ERROR;
+}
+
+/**
+ * Return the equivalent Android status_t from a binder status.
+ *
+ * Used to handle errors from a AIDL method declaration
+ *
+ * [oneway] void method(type0 param0, ...)
+ *
+ * or the following (where return_type is not a status_t)
+ *
+ * return_type method(type0 param0, ...)
+ */
+static inline status_t statusTFromBinderStatus(const ::android::binder::Status &status) {
+ return status.isOk() ? OK // check OK,
+ : status.serviceSpecificErrorCode() // service-side error, not standard Java exception
+ // (fromServiceSpecificError)
+ ?: status.transactionError() // a native binder transaction error (fromStatusT)
+ ?: statusTFromExceptionCode(status.exceptionCode()); // a service-side error with a
+ // standard Java exception (fromExceptionCode)
+}
+
+/**
+ * Return a binder::Status from native service status.
+ *
+ * This is used for methods not returning an explicit status_t,
+ * where Java callers expect an exception, not an integer return value.
+ */
+static inline ::android::binder::Status binderStatusFromStatusT(
+ status_t status, const char *optionalMessage = nullptr) {
+ const char * const emptyIfNull = optionalMessage == nullptr ? "" : optionalMessage;
+ // From binder::Status instructions:
+ // Prefer a generic exception code when possible, then a service specific
+ // code, and finally a status_t for low level failures or legacy support.
+ // Exception codes and service specific errors map to nicer exceptions for
+ // Java clients.
+
+ using namespace ::android::binder;
+ switch (status) {
+ case OK:
+ return Status::ok();
+ case PERMISSION_DENIED: // throw SecurityException on Java side
+ return Status::fromExceptionCode(Status::EX_SECURITY, emptyIfNull);
+ case BAD_VALUE: // throw IllegalArgumentException on Java side
+ return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT, emptyIfNull);
+ case INVALID_OPERATION: // throw IllegalStateException on Java side
+ return Status::fromExceptionCode(Status::EX_ILLEGAL_STATE, emptyIfNull);
+ }
+
+ // A service specific error will not show on status.transactionError() so
+ // be sure to use statusTFromBinderStatus() for reliable error handling.
+
+ // throw a ServiceSpecificException.
+ return Status::fromServiceSpecificError(status, emptyIfNull);
+}
+
+} // namespace aidl_utils
+
+} // namespace android
diff --git a/media/libaudioclient/include/media/AudioClient.h b/media/libaudioclient/include/media/AudioClient.h
index 247af9e..0b89d15 100644
--- a/media/libaudioclient/include/media/AudioClient.h
+++ b/media/libaudioclient/include/media/AudioClient.h
@@ -18,14 +18,12 @@
#ifndef ANDROID_AUDIO_CLIENT_H
#define ANDROID_AUDIO_CLIENT_H
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
-#include <system/audio.h>
+#include <sys/types.h>
#include <utils/String16.h>
namespace android {
-class AudioClient : public Parcelable {
+class AudioClient {
public:
AudioClient() :
clientUid(-1), clientPid(-1), clientTid(-1), packageName("") {}
@@ -34,22 +32,6 @@
pid_t clientPid;
pid_t clientTid;
String16 packageName;
-
- status_t readFromParcel(const Parcel *parcel) override {
- clientUid = parcel->readInt32();
- clientPid = parcel->readInt32();
- clientTid = parcel->readInt32();
- packageName = parcel->readString16();
- return NO_ERROR;
- }
-
- status_t writeToParcel(Parcel *parcel) const override {
- parcel->writeInt32(clientUid);
- parcel->writeInt32(clientPid);
- parcel->writeInt32(clientTid);
- parcel->writeString16(packageName);
- return NO_ERROR;
- }
};
}; // namespace android
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index cb76252..8371711 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -22,8 +22,6 @@
#include <media/IAudioFlinger.h>
#include <media/IAudioPolicyService.h>
-#include <media/IEffect.h>
-#include <media/IEffectClient.h>
#include <media/AudioSystem.h>
#include <system/audio_effect.h>
@@ -31,6 +29,9 @@
#include <utils/Errors.h>
#include <binder/IInterface.h>
+#include "android/media/IEffect.h"
+#include "android/media/BnEffectClient.h"
+
namespace android {
@@ -339,16 +340,21 @@
*
* opPackageName: The package name used for app op checks.
*/
- AudioEffect(const String16& opPackageName);
+ explicit AudioEffect(const String16& opPackageName);
+ /* Terminates the AudioEffect and unregisters it from AudioFlinger.
+ * The effect engine is also destroyed if this AudioEffect was the last controlling
+ * the engine.
+ */
+ ~AudioEffect();
- /* Constructor.
+ /**
+ * Initialize an uninitialized AudioEffect.
*
* Parameters:
*
* type: type of effect created: can be null if uuid is specified. This corresponds to
* the OpenSL ES interface implemented by this effect.
- * opPackageName: The package name used for app op checks.
* uuid: Uuid of effect created: can be null if type is specified. This uuid corresponds to
* a particular implementation of an effect type.
* priority: requested priority for effect control: the priority level corresponds to the
@@ -356,7 +362,7 @@
* higher priorities, 0 being the normal priority.
* cbf: optional callback function (see effect_callback_t)
* user: pointer to context for use by the callback receiver.
- * sessionID: audio session this effect is associated to.
+ * sessionId: audio session this effect is associated to.
* If equal to AUDIO_SESSION_OUTPUT_MIX, the effect will be global to
* the output mix. Otherwise, the effect will be applied to all players
* (AudioTrack or MediaPLayer) within the same audio session.
@@ -369,46 +375,13 @@
* In this mode, no IEffect interface to AudioFlinger is created and all actions
* besides getters implemented in client AudioEffect object are no ops
* after effect creation.
+ *
+ * Returned status (from utils/Errors.h) can be:
+ * - NO_ERROR or ALREADY_EXISTS: successful initialization
+ * - INVALID_OPERATION: AudioEffect is already initialized
+ * - BAD_VALUE: invalid parameter
+ * - NO_INIT: audio flinger or audio hardware not initialized
*/
-
- AudioEffect(const effect_uuid_t *type,
- const String16& opPackageName,
- const effect_uuid_t *uuid = NULL,
- int32_t priority = 0,
- effect_callback_t cbf = NULL,
- void* user = NULL,
- audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
- audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
- const AudioDeviceTypeAddr& device = {},
- bool probe = false);
-
- /* Constructor.
- * Same as above but with type and uuid specified by character strings
- */
- AudioEffect(const char *typeStr,
- const String16& opPackageName,
- const char *uuidStr = NULL,
- int32_t priority = 0,
- effect_callback_t cbf = NULL,
- void* user = NULL,
- audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
- audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
- const AudioDeviceTypeAddr& device = {},
- bool probe = false);
-
- /* Terminates the AudioEffect and unregisters it from AudioFlinger.
- * The effect engine is also destroyed if this AudioEffect was the last controlling
- * the engine.
- */
- ~AudioEffect();
-
- /* Initialize an uninitialized AudioEffect.
- * Returned status (from utils/Errors.h) can be:
- * - NO_ERROR or ALREADY_EXISTS: successful initialization
- * - INVALID_OPERATION: AudioEffect is already initialized
- * - BAD_VALUE: invalid parameter
- * - NO_INIT: audio flinger or audio hardware not initialized
- * */
status_t set(const effect_uuid_t *type,
const effect_uuid_t *uuid = NULL,
int32_t priority = 0,
@@ -418,6 +391,18 @@
audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
const AudioDeviceTypeAddr& device = {},
bool probe = false);
+ /*
+ * Same as above but with type and uuid specified by character strings.
+ */
+ status_t set(const char *typeStr,
+ const char *uuidStr = NULL,
+ int32_t priority = 0,
+ effect_callback_t cbf = NULL,
+ void* user = NULL,
+ audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
+ audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
+ const AudioDeviceTypeAddr& device = {},
+ bool probe = false);
/* Result of constructing the AudioEffect. This must be checked
* before using any AudioEffect API.
@@ -547,90 +532,67 @@
static const uint32_t kMaxPreProcessing = 10;
protected:
- bool mEnabled; // enable state
- audio_session_t mSessionId; // audio session ID
- int32_t mPriority; // priority for effect control
- status_t mStatus; // effect status
- bool mProbe; // effect created in probe mode: all commands
+ const String16 mOpPackageName; // The package name used for app op checks.
+ bool mEnabled = false; // enable state
+ audio_session_t mSessionId = AUDIO_SESSION_OUTPUT_MIX; // audio session ID
+ int32_t mPriority = 0; // priority for effect control
+ status_t mStatus = NO_INIT; // effect status
+ bool mProbe = false; // effect created in probe mode: all commands
// are no ops because mIEffect is NULL
- effect_callback_t mCbf; // callback function for status, control and
+ effect_callback_t mCbf = nullptr; // callback function for status, control and
// parameter changes notifications
- void* mUserData; // client context for callback function
- effect_descriptor_t mDescriptor; // effect descriptor
- int32_t mId; // system wide unique effect engine instance ID
+ void* mUserData = nullptr;// client context for callback function
+ effect_descriptor_t mDescriptor = {}; // effect descriptor
+ int32_t mId = -1; // system wide unique effect engine instance ID
Mutex mLock; // Mutex for mEnabled access
- Mutex mConstructLock; // Mutex for integrality construction
- String16 mOpPackageName; // The package name used for app op checks.
// IEffectClient
virtual void controlStatusChanged(bool controlGranted);
virtual void enableStatusChanged(bool enabled);
- virtual void commandExecuted(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t replySize,
- void *pReplyData);
+ virtual void commandExecuted(int32_t cmdCode,
+ const std::vector<uint8_t>& cmdData,
+ const std::vector<uint8_t>& replyData);
private:
// Implements the IEffectClient interface
class EffectClient :
- public android::BnEffectClient, public android::IBinder::DeathRecipient
+ public media::BnEffectClient, public android::IBinder::DeathRecipient
{
public:
EffectClient(AudioEffect *effect) : mEffect(effect){}
// IEffectClient
- virtual void controlStatusChanged(bool controlGranted) {
+ binder::Status controlStatusChanged(bool controlGranted) override {
sp<AudioEffect> effect = mEffect.promote();
if (effect != 0) {
- {
- // Got the mConstructLock means the construction of AudioEffect
- // has finished, we should release the mConstructLock immediately.
- AutoMutex lock(effect->mConstructLock);
- }
effect->controlStatusChanged(controlGranted);
}
+ return binder::Status::ok();
}
- virtual void enableStatusChanged(bool enabled) {
+ binder::Status enableStatusChanged(bool enabled) override {
sp<AudioEffect> effect = mEffect.promote();
if (effect != 0) {
- {
- // Got the mConstructLock means the construction of AudioEffect
- // has finished, we should release the mConstructLock immediately.
- AutoMutex lock(effect->mConstructLock);
- }
effect->enableStatusChanged(enabled);
}
+ return binder::Status::ok();
}
- virtual void commandExecuted(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t replySize,
- void *pReplyData) {
+ binder::Status commandExecuted(int32_t cmdCode,
+ const std::vector<uint8_t>& cmdData,
+ const std::vector<uint8_t>& replyData) override {
sp<AudioEffect> effect = mEffect.promote();
if (effect != 0) {
- {
- // Got the mConstructLock means the construction of AudioEffect
- // has finished, we should release the mConstructLock immediately.
- AutoMutex lock(effect->mConstructLock);
- }
- effect->commandExecuted(
- cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+ effect->commandExecuted(cmdCode, cmdData, replyData);
}
+ return binder::Status::ok();
}
// IBinder::DeathRecipient
virtual void binderDied(const wp<IBinder>& /*who*/) {
sp<AudioEffect> effect = mEffect.promote();
if (effect != 0) {
- {
- // Got the mConstructLock means the construction of AudioEffect
- // has finished, we should release the mConstructLock immediately.
- AutoMutex lock(effect->mConstructLock);
- }
effect->binderDied();
}
}
@@ -641,10 +603,10 @@
void binderDied();
- sp<IEffect> mIEffect; // IEffect binder interface
+ sp<media::IEffect> mIEffect; // IEffect binder interface
sp<EffectClient> mIEffectClient; // IEffectClient implementation
sp<IMemory> mCblkMemory; // shared memory for deferred parameter setting
- effect_param_cblk_t* mCblk; // control block for deferred parameter setting
+ effect_param_cblk_t* mCblk = nullptr; // control block for deferred parameter setting
pid_t mClientPid = (pid_t)-1;
uid_t mClientUid = (uid_t)-1;
};
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 19c2cbd..17ce56e 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -19,13 +19,14 @@
#include <sys/types.h>
+#include <android/media/BnAudioFlingerClient.h>
+#include <android/media/BnAudioPolicyServiceClient.h>
+#include <media/AidlConversionUtil.h>
#include <media/AudioDeviceTypeAddr.h>
#include <media/AudioPolicy.h>
#include <media/AudioProductStrategy.h>
#include <media/AudioVolumeGroup.h>
#include <media/AudioIoDescriptor.h>
-#include <media/IAudioFlingerClient.h>
-#include <media/IAudioPolicyServiceClient.h>
#include <media/MicrophoneInfo.h>
#include <set>
#include <system/audio.h>
@@ -37,6 +38,23 @@
namespace android {
+struct record_client_info {
+ audio_unique_id_t riid;
+ uid_t uid;
+ audio_session_t session;
+ audio_source_t source;
+ audio_port_handle_t port_id;
+ bool silenced;
+};
+
+typedef struct record_client_info record_client_info_t;
+
+// AIDL conversion functions.
+ConversionResult<record_client_info_t>
+aidl2legacy_RecordClientInfo_record_client_info_t(const media::RecordClientInfo& aidl);
+ConversionResult<media::RecordClientInfo>
+legacy2aidl_record_client_info_t_RecordClientInfo(const record_client_info_t& legacy);
+
typedef void (*audio_error_callback)(status_t err);
typedef void (*dynamic_policy_callback)(int event, String8 regId, int val);
typedef void (*record_config_callback)(int event,
@@ -319,9 +337,10 @@
static status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags);
- // Check if hw offload is possible for given format, stream type, sample rate,
- // bit rate, duration, video and streaming or offload property is enabled
- static bool isOffloadSupported(const audio_offload_info_t& info);
+ // Indicate if hw offload is possible for given format, stream type, sample rate,
+ // bit rate, duration, video and streaming or offload property is enabled and when possible
+ // if gapless transitions are supported.
+ static audio_offload_mode_t getOffloadSupport(const audio_offload_info_t& info);
// check presence of audio flinger service.
// returns NO_ERROR if binding to service succeeds, DEAD_OBJECT otherwise
@@ -331,11 +350,11 @@
static status_t listAudioPorts(audio_port_role_t role,
audio_port_type_t type,
unsigned int *num_ports,
- struct audio_port *ports,
+ struct audio_port_v7 *ports,
unsigned int *generation);
/* Get attributes for a given audio port */
- static status_t getAudioPort(struct audio_port *port);
+ static status_t getAudioPort(struct audio_port_v7 *port);
/* Create an audio patch between several source and sink ports */
static status_t createAudioPatch(const struct audio_patch *patch,
@@ -361,11 +380,11 @@
static status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
- static status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+ static status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices);
static status_t removeUidDeviceAffinities(uid_t uid);
- static status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+ static status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices);
static status_t removeUserIdDeviceAffinities(int userId);
@@ -425,13 +444,29 @@
*/
static status_t setAudioHalPids(const std::vector<pid_t>& pids);
- static status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device);
+ static status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role, const AudioDeviceTypeAddrVector &devices);
- static status_t removePreferredDeviceForStrategy(product_strategy_t strategy);
+ static status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role);
- static status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device);
+ static status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+ device_role_t role, AudioDeviceTypeAddrVector &devices);
+
+ static status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role, const AudioDeviceTypeAddrVector &devices);
+
+ static status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role, const AudioDeviceTypeAddrVector &devices);
+
+ static status_t removeDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector& devices);
+
+ static status_t clearDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role);
+
+ static status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role, AudioDeviceTypeAddrVector &devices);
static status_t getDeviceForStrategy(product_strategy_t strategy,
AudioDeviceTypeAddr &device);
@@ -515,7 +550,7 @@
private:
- class AudioFlingerClient: public IBinder::DeathRecipient, public BnAudioFlingerClient
+ class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
{
public:
AudioFlingerClient() :
@@ -535,9 +570,9 @@
// indicate a change in the configuration of an output or input: keeps the cached
// values for output/input parameters up-to-date in client process
- virtual void ioConfigChanged(audio_io_config_event event,
- const sp<AudioIoDescriptor>& ioDesc);
-
+ binder::Status ioConfigChanged(
+ media::AudioIoConfigEvent event,
+ const media::AudioIoDescriptor& ioDesc) override;
status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
audio_io_handle_t audioIo,
@@ -563,7 +598,7 @@
};
class AudioPolicyServiceClient: public IBinder::DeathRecipient,
- public BnAudioPolicyServiceClient
+ public media::BnAudioPolicyServiceClient
{
public:
AudioPolicyServiceClient() {
@@ -581,18 +616,20 @@
virtual void binderDied(const wp<IBinder>& who);
// IAudioPolicyServiceClient
- virtual void onAudioPortListUpdate();
- virtual void onAudioPatchListUpdate();
- virtual void onAudioVolumeGroupChanged(volume_group_t group, int flags);
- virtual void onDynamicPolicyMixStateUpdate(String8 regId, int32_t state);
- virtual void onRecordingConfigurationUpdate(int event,
- const record_client_info_t *clientInfo,
- const audio_config_base_t *clientConfig,
- std::vector<effect_descriptor_t> clientEffects,
- const audio_config_base_t *deviceConfig,
- std::vector<effect_descriptor_t> effects,
- audio_patch_handle_t patchHandle,
- audio_source_t source);
+ binder::Status onAudioVolumeGroupChanged(int32_t group, int32_t flags) override;
+ binder::Status onAudioPortListUpdate() override;
+ binder::Status onAudioPatchListUpdate() override;
+ binder::Status onDynamicPolicyMixStateUpdate(const std::string& regId,
+ int32_t state) override;
+ binder::Status onRecordingConfigurationUpdate(
+ int32_t event,
+ const media::RecordClientInfo& clientInfo,
+ const media::AudioConfigBase& clientConfig,
+ const std::vector<media::EffectDescriptor>& clientEffects,
+ const media::AudioConfigBase& deviceConfig,
+ const std::vector<media::EffectDescriptor>& effects,
+ int32_t patchHandle,
+ media::AudioSourceType source) override;
private:
Mutex mLock;
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 17af7d4..3728a16 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -17,16 +17,20 @@
#ifndef ANDROID_AUDIOTRACK_H
#define ANDROID_AUDIOTRACK_H
+#include <binder/IMemory.h>
#include <cutils/sched_policy.h>
#include <media/AudioSystem.h>
#include <media/AudioTimestamp.h>
-#include <media/IAudioTrack.h>
#include <media/AudioResamplerPublic.h>
#include <media/MediaMetricsItem.h>
#include <media/Modulo.h>
+#include <media/VolumeShaper.h>
#include <utils/threads.h>
+#include <string>
+
#include "android/media/BnAudioTrackCallback.h"
+#include "android/media/IAudioTrack.h"
#include "android/media/IAudioTrackCallback.h"
namespace android {
@@ -177,6 +181,8 @@
*/
AudioTrack();
+ AudioTrack(const std::string& opPackageName);
+
/* Creates an AudioTrack object and registers it with AudioFlinger.
* Once created, the track needs to be started before it can be used.
* Unspecified values are set to appropriate default values.
@@ -258,7 +264,8 @@
const audio_attributes_t* pAttributes = NULL,
bool doNotReconnect = false,
float maxRequiredSpeed = 1.0f,
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+ const std::string& opPackageName = "");
/* Creates an audio track and registers it with AudioFlinger.
* With this constructor, the track is configured for static buffer mode.
@@ -288,7 +295,8 @@
pid_t pid = -1,
const audio_attributes_t* pAttributes = NULL,
bool doNotReconnect = false,
- float maxRequiredSpeed = 1.0f);
+ float maxRequiredSpeed = 1.0f,
+ const std::string& opPackageName = "");
/* Terminates the AudioTrack and unregisters it from AudioFlinger.
* Also destroys all resources associated with the AudioTrack.
@@ -338,6 +346,27 @@
bool doNotReconnect = false,
float maxRequiredSpeed = 1.0f,
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
+ // FIXME(b/169889714): Vendor code depends on the old method signature at link time
+ status_t set(audio_stream_type_t streamType,
+ uint32_t sampleRate,
+ audio_format_t format,
+ uint32_t channelMask,
+ size_t frameCount = 0,
+ audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+ callback_t cbf = NULL,
+ void* user = NULL,
+ int32_t notificationFrames = 0,
+ const sp<IMemory>& sharedBuffer = 0,
+ bool threadCanCallJava = false,
+ audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
+ transfer_type transferType = TRANSFER_DEFAULT,
+ const audio_offload_info_t *offloadInfo = NULL,
+ uid_t uid = AUDIO_UID_INVALID,
+ pid_t pid = -1,
+ const audio_attributes_t* pAttributes = NULL,
+ bool doNotReconnect = false,
+ float maxRequiredSpeed = 1.0f,
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
/* Result of constructing the AudioTrack. This must be checked for successful initialization
* before using any AudioTrack API (except for set()), because using
@@ -1044,7 +1073,7 @@
void updateRoutedDeviceId_l();
// Next 4 fields may be changed if IAudioTrack is re-created, but always != 0
- sp<IAudioTrack> mAudioTrack;
+ sp<media::IAudioTrack> mAudioTrack;
sp<IMemory> mCblkMemory;
audio_track_cblk_t* mCblk; // re-load after mLock.unlock()
audio_io_handle_t mOutput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getOutputForAttr()
@@ -1236,6 +1265,8 @@
sp<media::VolumeHandler> mVolumeHandler;
+ const std::string mOpPackageName;
+
private:
class DeathNotifier : public IBinder::DeathRecipient {
public:
@@ -1274,8 +1305,6 @@
std::string mMetricsId; // GUARDED_BY(mLock), could change in createTrack_l().
std::string mCallerName; // for example "aaudio"
- void logBufferSizeUnderruns();
-
private:
class AudioTrackCallback : public media::BnAudioTrackCallback {
public:
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 612ce7a..9a8014d 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -24,101 +24,52 @@
#include <utils/RefBase.h>
#include <utils/Errors.h>
#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
+#include <media/AidlConversion.h>
#include <media/AudioClient.h>
#include <media/DeviceDescriptorBase.h>
-#include <media/IAudioTrack.h>
-#include <media/IAudioFlingerClient.h>
#include <system/audio.h>
#include <system/audio_effect.h>
#include <system/audio_policy.h>
-#include <media/IEffect.h>
-#include <media/IEffectClient.h>
#include <utils/String8.h>
#include <media/MicrophoneInfo.h>
+#include <string>
#include <vector>
+#include <android/media/BnAudioFlingerService.h>
+#include <android/media/BpAudioFlingerService.h>
+#include "android/media/CreateEffectRequest.h"
+#include "android/media/CreateEffectResponse.h"
+#include "android/media/CreateRecordRequest.h"
+#include "android/media/CreateRecordResponse.h"
+#include "android/media/CreateTrackRequest.h"
+#include "android/media/CreateTrackResponse.h"
#include "android/media/IAudioRecord.h"
+#include "android/media/IAudioFlingerClient.h"
+#include "android/media/IAudioTrack.h"
#include "android/media/IAudioTrackCallback.h"
+#include "android/media/IEffect.h"
+#include "android/media/IEffectClient.h"
+#include "android/media/OpenInputRequest.h"
+#include "android/media/OpenInputResponse.h"
+#include "android/media/OpenOutputRequest.h"
+#include "android/media/OpenOutputResponse.h"
namespace android {
// ----------------------------------------------------------------------------
-class IAudioFlinger : public IInterface
-{
+class IAudioFlinger : public RefBase {
public:
- DECLARE_META_INTERFACE(AudioFlinger);
+ static constexpr char DEFAULT_SERVICE_NAME[] = "media.audio_flinger";
+
+ virtual ~IAudioFlinger() = default;
/* CreateTrackInput contains all input arguments sent by AudioTrack to AudioFlinger
* when calling createTrack() including arguments that will be updated by AudioFlinger
* and returned in CreateTrackOutput object
*/
- class CreateTrackInput : public Parcelable {
+ class CreateTrackInput {
public:
- status_t readFromParcel(const Parcel *parcel) override {
- /* input arguments*/
- memset(&attr, 0, sizeof(audio_attributes_t));
- if (parcel->read(&attr, sizeof(audio_attributes_t)) != NO_ERROR) {
- return DEAD_OBJECT;
- }
- attr.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE -1] = '\0';
- memset(&config, 0, sizeof(audio_config_t));
- if (parcel->read(&config, sizeof(audio_config_t)) != NO_ERROR) {
- return DEAD_OBJECT;
- }
- if (clientInfo.readFromParcel(parcel) != NO_ERROR) {
- return DEAD_OBJECT;
- }
- if (parcel->readInt32() != 0) {
- // TODO: Using unsecurePointer() has some associated security
- // pitfalls (see declaration for details).
- // Either document why it is safe in this case or address
- // the issue (e.g. by copying).
- sharedBuffer = interface_cast<IMemory>(parcel->readStrongBinder());
- if (sharedBuffer == 0 || sharedBuffer->unsecurePointer() == NULL) {
- return BAD_VALUE;
- }
- }
- notificationsPerBuffer = parcel->readInt32();
- speed = parcel->readFloat();
- audioTrackCallback = interface_cast<media::IAudioTrackCallback>(
- parcel->readStrongBinder());
-
- /* input/output arguments*/
- (void)parcel->read(&flags, sizeof(audio_output_flags_t));
- frameCount = parcel->readInt64();
- notificationFrameCount = parcel->readInt64();
- (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
- (void)parcel->read(&sessionId, sizeof(audio_session_t));
- return NO_ERROR;
- }
-
- status_t writeToParcel(Parcel *parcel) const override {
- /* input arguments*/
- (void)parcel->write(&attr, sizeof(audio_attributes_t));
- (void)parcel->write(&config, sizeof(audio_config_t));
- (void)clientInfo.writeToParcel(parcel);
- if (sharedBuffer != 0) {
- (void)parcel->writeInt32(1);
- (void)parcel->writeStrongBinder(IInterface::asBinder(sharedBuffer));
- } else {
- (void)parcel->writeInt32(0);
- }
- (void)parcel->writeInt32(notificationsPerBuffer);
- (void)parcel->writeFloat(speed);
- (void)parcel->writeStrongBinder(IInterface::asBinder(audioTrackCallback));
-
- /* input/output arguments*/
- (void)parcel->write(&flags, sizeof(audio_output_flags_t));
- (void)parcel->writeInt64(frameCount);
- (void)parcel->writeInt64(notificationFrameCount);
- (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
- (void)parcel->write(&sessionId, sizeof(audio_session_t));
- return NO_ERROR;
- }
-
/* input */
audio_attributes_t attr;
audio_config_t config;
@@ -127,6 +78,7 @@
uint32_t notificationsPerBuffer;
float speed;
sp<media::IAudioTrackCallback> audioTrackCallback;
+ std::string opPackageName;
/* input/output */
audio_output_flags_t flags;
@@ -134,50 +86,17 @@
size_t notificationFrameCount;
audio_port_handle_t selectedDeviceId;
audio_session_t sessionId;
+
+ ConversionResult<media::CreateTrackRequest> toAidl() const;
+ static ConversionResult<CreateTrackInput> fromAidl(const media::CreateTrackRequest& aidl);
};
/* CreateTrackOutput contains all output arguments returned by AudioFlinger to AudioTrack
* when calling createTrack() including arguments that were passed as I/O for update by
* CreateTrackInput.
*/
- class CreateTrackOutput : public Parcelable {
+ class CreateTrackOutput {
public:
- status_t readFromParcel(const Parcel *parcel) override {
- /* input/output arguments*/
- (void)parcel->read(&flags, sizeof(audio_output_flags_t));
- frameCount = parcel->readInt64();
- notificationFrameCount = parcel->readInt64();
- (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
- (void)parcel->read(&sessionId, sizeof(audio_session_t));
-
- /* output arguments*/
- sampleRate = parcel->readUint32();
- afFrameCount = parcel->readInt64();
- afSampleRate = parcel->readInt64();
- afLatencyMs = parcel->readInt32();
- (void)parcel->read(&outputId, sizeof(audio_io_handle_t));
- (void)parcel->read(&portId, sizeof(audio_port_handle_t));
- return NO_ERROR;
- }
-
- status_t writeToParcel(Parcel *parcel) const override {
- /* input/output arguments*/
- (void)parcel->write(&flags, sizeof(audio_output_flags_t));
- (void)parcel->writeInt64(frameCount);
- (void)parcel->writeInt64(notificationFrameCount);
- (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
- (void)parcel->write(&sessionId, sizeof(audio_session_t));
-
- /* output arguments*/
- (void)parcel->writeUint32(sampleRate);
- (void)parcel->writeInt64(afFrameCount);
- (void)parcel->writeInt64(afSampleRate);
- (void)parcel->writeInt32(afLatencyMs);
- (void)parcel->write(&outputId, sizeof(audio_io_handle_t));
- (void)parcel->write(&portId, sizeof(audio_port_handle_t));
- return NO_ERROR;
- }
-
/* input/output */
audio_output_flags_t flags;
size_t frameCount;
@@ -192,59 +111,18 @@
uint32_t afLatencyMs;
audio_io_handle_t outputId;
audio_port_handle_t portId;
+ sp<media::IAudioTrack> audioTrack;
+
+ ConversionResult<media::CreateTrackResponse> toAidl() const;
+ static ConversionResult<CreateTrackOutput> fromAidl(const media::CreateTrackResponse& aidl);
};
/* CreateRecordInput contains all input arguments sent by AudioRecord to AudioFlinger
* when calling createRecord() including arguments that will be updated by AudioFlinger
* and returned in CreateRecordOutput object
*/
- class CreateRecordInput : public Parcelable {
+ class CreateRecordInput {
public:
- status_t readFromParcel(const Parcel *parcel) override {
- /* input arguments*/
- memset(&attr, 0, sizeof(audio_attributes_t));
- if (parcel->read(&attr, sizeof(audio_attributes_t)) != NO_ERROR) {
- return DEAD_OBJECT;
- }
- attr.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE -1] = '\0';
- memset(&config, 0, sizeof(audio_config_base_t));
- if (parcel->read(&config, sizeof(audio_config_base_t)) != NO_ERROR) {
- return DEAD_OBJECT;
- }
- if (clientInfo.readFromParcel(parcel) != NO_ERROR) {
- return DEAD_OBJECT;
- }
- opPackageName = parcel->readString16();
- if (parcel->read(&riid, sizeof(audio_unique_id_t)) != NO_ERROR) {
- return DEAD_OBJECT;
- }
-
- /* input/output arguments*/
- (void)parcel->read(&flags, sizeof(audio_input_flags_t));
- frameCount = parcel->readInt64();
- notificationFrameCount = parcel->readInt64();
- (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
- (void)parcel->read(&sessionId, sizeof(audio_session_t));
- return NO_ERROR;
- }
-
- status_t writeToParcel(Parcel *parcel) const override {
- /* input arguments*/
- (void)parcel->write(&attr, sizeof(audio_attributes_t));
- (void)parcel->write(&config, sizeof(audio_config_base_t));
- (void)clientInfo.writeToParcel(parcel);
- (void)parcel->writeString16(opPackageName);
- (void)parcel->write(&riid, sizeof(audio_unique_id_t));
-
- /* input/output arguments*/
- (void)parcel->write(&flags, sizeof(audio_input_flags_t));
- (void)parcel->writeInt64(frameCount);
- (void)parcel->writeInt64(notificationFrameCount);
- (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
- (void)parcel->write(&sessionId, sizeof(audio_session_t));
- return NO_ERROR;
- }
-
/* input */
audio_attributes_t attr;
audio_config_base_t config;
@@ -258,77 +136,17 @@
size_t notificationFrameCount;
audio_port_handle_t selectedDeviceId;
audio_session_t sessionId;
+
+ ConversionResult<media::CreateRecordRequest> toAidl() const;
+ static ConversionResult<CreateRecordInput> fromAidl(const media::CreateRecordRequest& aidl);
};
/* CreateRecordOutput contains all output arguments returned by AudioFlinger to AudioRecord
* when calling createRecord() including arguments that were passed as I/O for update by
* CreateRecordInput.
*/
- class CreateRecordOutput : public Parcelable {
+ class CreateRecordOutput {
public:
- status_t readFromParcel(const Parcel *parcel) override {
- /* input/output arguments*/
- (void)parcel->read(&flags, sizeof(audio_input_flags_t));
- frameCount = parcel->readInt64();
- notificationFrameCount = parcel->readInt64();
- (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
- (void)parcel->read(&sessionId, sizeof(audio_session_t));
-
- /* output arguments*/
- sampleRate = parcel->readUint32();
- (void)parcel->read(&inputId, sizeof(audio_io_handle_t));
- if (parcel->readInt32() != 0) {
- cblk = interface_cast<IMemory>(parcel->readStrongBinder());
- // TODO: Using unsecurePointer() has some associated security
- // pitfalls (see declaration for details).
- // Either document why it is safe in this case or address
- // the issue (e.g. by copying).
- if (cblk == 0 || cblk->unsecurePointer() == NULL) {
- return BAD_VALUE;
- }
- }
- if (parcel->readInt32() != 0) {
- buffers = interface_cast<IMemory>(parcel->readStrongBinder());
- // TODO: Using unsecurePointer() has some associated security
- // pitfalls (see declaration for details).
- // Either document why it is safe in this case or address
- // the issue (e.g. by copying).
- if (buffers == 0 || buffers->unsecurePointer() == NULL) {
- return BAD_VALUE;
- }
- }
- (void)parcel->read(&portId, sizeof(audio_port_handle_t));
- return NO_ERROR;
- }
-
- status_t writeToParcel(Parcel *parcel) const override {
- /* input/output arguments*/
- (void)parcel->write(&flags, sizeof(audio_input_flags_t));
- (void)parcel->writeInt64(frameCount);
- (void)parcel->writeInt64(notificationFrameCount);
- (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
- (void)parcel->write(&sessionId, sizeof(audio_session_t));
-
- /* output arguments*/
- (void)parcel->writeUint32(sampleRate);
- (void)parcel->write(&inputId, sizeof(audio_io_handle_t));
- if (cblk != 0) {
- (void)parcel->writeInt32(1);
- (void)parcel->writeStrongBinder(IInterface::asBinder(cblk));
- } else {
- (void)parcel->writeInt32(0);
- }
- if (buffers != 0) {
- (void)parcel->writeInt32(1);
- (void)parcel->writeStrongBinder(IInterface::asBinder(buffers));
- } else {
- (void)parcel->writeInt32(0);
- }
- (void)parcel->write(&portId, sizeof(audio_port_handle_t));
-
- return NO_ERROR;
- }
-
/* input/output */
audio_input_flags_t flags;
size_t frameCount;
@@ -342,21 +160,26 @@
sp<IMemory> cblk;
sp<IMemory> buffers;
audio_port_handle_t portId;
+ sp<media::IAudioRecord> audioRecord;
+
+ ConversionResult<media::CreateRecordResponse> toAidl() const;
+ static ConversionResult<CreateRecordOutput>
+ fromAidl(const media::CreateRecordResponse& aidl);
};
- // invariant on exit for all APIs that return an sp<>:
- // (return value != 0) == (*status == NO_ERROR)
-
/* create an audio track and registers it with AudioFlinger.
- * return null if the track cannot be created.
+ * The audioTrack field will be null if the track cannot be created and the status will reflect
+ * failure.
*/
- virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
- CreateTrackOutput& output,
- status_t *status) = 0;
+ virtual status_t createTrack(const media::CreateTrackRequest& input,
+ media::CreateTrackResponse& output) = 0;
- virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
- CreateRecordOutput& output,
- status_t *status) = 0;
+ /* create an audio record and registers it with AudioFlinger.
+ * The audioRecord field will be null if the track cannot be created and the status will reflect
+ * failure.
+ */
+ virtual status_t createRecord(const media::CreateRecordRequest& input,
+ media::CreateRecordResponse& output) = 0;
// FIXME Surprisingly, format/latency don't work for input handles
@@ -412,32 +235,24 @@
// Register an object to receive audio input/output change and track notifications.
// For a given calling pid, AudioFlinger disregards any registrations after the first.
// Thus the IAudioFlingerClient must be a singleton per process.
- virtual void registerClient(const sp<IAudioFlingerClient>& client) = 0;
+ virtual void registerClient(const sp<media::IAudioFlingerClient>& client) = 0;
// retrieve the audio recording buffer size in bytes
// FIXME This API assumes a route, and so should be deprecated.
virtual size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
audio_channel_mask_t channelMask) const = 0;
- virtual status_t openOutput(audio_module_handle_t module,
- audio_io_handle_t *output,
- audio_config_t *config,
- const sp<DeviceDescriptorBase>& device,
- uint32_t *latencyMs,
- audio_output_flags_t flags) = 0;
+ virtual status_t openOutput(const media::OpenOutputRequest& request,
+ media::OpenOutputResponse* response) = 0;
virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
audio_io_handle_t output2) = 0;
virtual status_t closeOutput(audio_io_handle_t output) = 0;
virtual status_t suspendOutput(audio_io_handle_t output) = 0;
virtual status_t restoreOutput(audio_io_handle_t output) = 0;
- virtual status_t openInput(audio_module_handle_t module,
- audio_io_handle_t *input,
- audio_config_t *config,
- audio_devices_t *device,
- const String8& address,
- audio_source_t source,
- audio_input_flags_t flags) = 0;
+ virtual status_t openInput(const media::OpenInputRequest& request,
+ media::OpenInputResponse* response) = 0;
+
virtual status_t closeInput(audio_io_handle_t input) = 0;
virtual status_t invalidateStream(audio_stream_type_t stream) = 0;
@@ -463,20 +278,8 @@
uint32_t preferredTypeFlag,
effect_descriptor_t *pDescriptor) const = 0;
- virtual sp<IEffect> createEffect(
- effect_descriptor_t *pDesc,
- const sp<IEffectClient>& client,
- int32_t priority,
- // AudioFlinger doesn't take over handle reference from client
- audio_io_handle_t output,
- audio_session_t sessionId,
- const AudioDeviceTypeAddr& device,
- const String16& callingPackage,
- pid_t pid,
- bool probe,
- status_t *status,
- int *id,
- int *enabled) = 0;
+ virtual status_t createEffect(const media::CreateEffectRequest& request,
+ media::CreateEffectResponse* response) = 0;
virtual status_t moveEffects(audio_session_t session, audio_io_handle_t srcOutput,
audio_io_handle_t dstOutput) = 0;
@@ -499,12 +302,8 @@
// is obtained from android.app.ActivityManager.MemoryInfo.totalMem.
virtual status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) = 0;
- /* List available audio ports and their attributes */
- virtual status_t listAudioPorts(unsigned int *num_ports,
- struct audio_port *ports) = 0;
-
/* Get attributes for a given audio port */
- virtual status_t getAudioPort(struct audio_port *port) = 0;
+ virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
/* Create an audio patch between several source and sink ports */
virtual status_t createAudioPatch(const struct audio_patch *patch,
@@ -534,22 +333,282 @@
virtual status_t setAudioHalPids(const std::vector<pid_t>& pids) = 0;
};
-
-// ----------------------------------------------------------------------------
-
-class BnAudioFlinger : public BnInterface<IAudioFlinger>
-{
+/**
+ * A client-side adapter, wrapping an IAudioFlingerService instance and presenting it as an
+ * IAudioFlinger. Intended to be used by legacy client code that was written against IAudioFlinger,
+ * before IAudioFlingerService was introduced as an AIDL service.
+ * New clients should not use this adapter, but rather IAudioFlingerService directly, via
+ * BpAudioFlingerService.
+ */
+class AudioFlingerClientAdapter : public IAudioFlinger {
public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
+ explicit AudioFlingerClientAdapter(const sp<media::IAudioFlingerService> delegate);
- // Requests media.log to start merging log buffers
- virtual void requestLogMerge() = 0;
+ status_t createTrack(const media::CreateTrackRequest& input,
+ media::CreateTrackResponse& output) override;
+ status_t createRecord(const media::CreateRecordRequest& input,
+ media::CreateRecordResponse& output) override;
+ uint32_t sampleRate(audio_io_handle_t ioHandle) const override;
+ audio_format_t format(audio_io_handle_t output) const override;
+ size_t frameCount(audio_io_handle_t ioHandle) const override;
+ uint32_t latency(audio_io_handle_t output) const override;
+ status_t setMasterVolume(float value) override;
+ status_t setMasterMute(bool muted) override;
+ float masterVolume() const override;
+ bool masterMute() const override;
+ status_t setMasterBalance(float balance) override;
+ status_t getMasterBalance(float* balance) const override;
+ status_t setStreamVolume(audio_stream_type_t stream, float value,
+ audio_io_handle_t output) override;
+ status_t setStreamMute(audio_stream_type_t stream, bool muted) override;
+ float streamVolume(audio_stream_type_t stream,
+ audio_io_handle_t output) const override;
+ bool streamMute(audio_stream_type_t stream) const override;
+ status_t setMode(audio_mode_t mode) override;
+ status_t setMicMute(bool state) override;
+ bool getMicMute() const override;
+ void setRecordSilenced(audio_port_handle_t portId, bool silenced) override;
+ status_t setParameters(audio_io_handle_t ioHandle,
+ const String8& keyValuePairs) override;
+ String8 getParameters(audio_io_handle_t ioHandle, const String8& keys)
+ const override;
+ void registerClient(const sp<media::IAudioFlingerClient>& client) override;
+ size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
+ audio_channel_mask_t channelMask) const override;
+ status_t openOutput(const media::OpenOutputRequest& request,
+ media::OpenOutputResponse* response) override;
+ audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
+ audio_io_handle_t output2) override;
+ status_t closeOutput(audio_io_handle_t output) override;
+ status_t suspendOutput(audio_io_handle_t output) override;
+ status_t restoreOutput(audio_io_handle_t output) override;
+ status_t openInput(const media::OpenInputRequest& request,
+ media::OpenInputResponse* response) override;
+ status_t closeInput(audio_io_handle_t input) override;
+ status_t invalidateStream(audio_stream_type_t stream) override;
+ status_t setVoiceVolume(float volume) override;
+ status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames,
+ audio_io_handle_t output) const override;
+ uint32_t getInputFramesLost(audio_io_handle_t ioHandle) const override;
+ audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use) override;
+ void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) override;
+ void releaseAudioSessionId(audio_session_t audioSession, pid_t pid) override;
+ status_t queryNumberEffects(uint32_t* numEffects) const override;
+ status_t queryEffect(uint32_t index, effect_descriptor_t* pDescriptor) const override;
+ status_t getEffectDescriptor(const effect_uuid_t* pEffectUUID,
+ const effect_uuid_t* pTypeUUID,
+ uint32_t preferredTypeFlag,
+ effect_descriptor_t* pDescriptor) const override;
+ status_t createEffect(const media::CreateEffectRequest& request,
+ media::CreateEffectResponse* response) override;
+ status_t moveEffects(audio_session_t session, audio_io_handle_t srcOutput,
+ audio_io_handle_t dstOutput) override;
+ void setEffectSuspended(int effectId,
+ audio_session_t sessionId,
+ bool suspended) override;
+ audio_module_handle_t loadHwModule(const char* name) override;
+ uint32_t getPrimaryOutputSamplingRate() override;
+ size_t getPrimaryOutputFrameCount() override;
+ status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) override;
+ status_t getAudioPort(struct audio_port_v7* port) override;
+ status_t createAudioPatch(const struct audio_patch* patch,
+ audio_patch_handle_t* handle) override;
+ status_t releaseAudioPatch(audio_patch_handle_t handle) override;
+ status_t listAudioPatches(unsigned int* num_patches,
+ struct audio_patch* patches) override;
+ status_t setAudioPortConfig(const struct audio_port_config* config) override;
+ audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId) override;
+ status_t systemReady() override;
+ size_t frameCountHAL(audio_io_handle_t ioHandle) const override;
+ status_t getMicrophones(std::vector<media::MicrophoneInfo>* microphones) override;
+ status_t setAudioHalPids(const std::vector<pid_t>& pids) override;
+
+private:
+ const sp<media::IAudioFlingerService> mDelegate;
};
-// ----------------------------------------------------------------------------
+/**
+ * A server-side adapter, wrapping an IAudioFlinger instance and presenting it as an
+ * IAudioFlingerService. Intended to be used by legacy server code that was written against
+ * IAudioFlinger, before IAudioFlingerService was introduced as an AIDL service.
+ * New servers should not use this adapter, but rather implement IAudioFlingerService directly, via
+ * BnAudioFlingerService.
+ */
+class AudioFlingerServerAdapter : public media::BnAudioFlingerService {
+public:
+ using Status = binder::Status;
+
+ /**
+ * Legacy server should implement this interface in order to be wrapped.
+ */
+ class Delegate : public IAudioFlinger {
+ protected:
+ friend class AudioFlingerServerAdapter;
+
+ enum class TransactionCode {
+ CREATE_TRACK = media::BnAudioFlingerService::TRANSACTION_createTrack,
+ CREATE_RECORD = media::BnAudioFlingerService::TRANSACTION_createRecord,
+ SAMPLE_RATE = media::BnAudioFlingerService::TRANSACTION_sampleRate,
+ FORMAT = media::BnAudioFlingerService::TRANSACTION_format,
+ FRAME_COUNT = media::BnAudioFlingerService::TRANSACTION_frameCount,
+ LATENCY = media::BnAudioFlingerService::TRANSACTION_latency,
+ SET_MASTER_VOLUME = media::BnAudioFlingerService::TRANSACTION_setMasterVolume,
+ SET_MASTER_MUTE = media::BnAudioFlingerService::TRANSACTION_setMasterMute,
+ MASTER_VOLUME = media::BnAudioFlingerService::TRANSACTION_masterVolume,
+ MASTER_MUTE = media::BnAudioFlingerService::TRANSACTION_masterMute,
+ SET_STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_setStreamVolume,
+ SET_STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_setStreamMute,
+ STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_streamVolume,
+ STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_streamMute,
+ SET_MODE = media::BnAudioFlingerService::TRANSACTION_setMode,
+ SET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_setMicMute,
+ GET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_getMicMute,
+ SET_RECORD_SILENCED = media::BnAudioFlingerService::TRANSACTION_setRecordSilenced,
+ SET_PARAMETERS = media::BnAudioFlingerService::TRANSACTION_setParameters,
+ GET_PARAMETERS = media::BnAudioFlingerService::TRANSACTION_getParameters,
+ REGISTER_CLIENT = media::BnAudioFlingerService::TRANSACTION_registerClient,
+ GET_INPUTBUFFERSIZE = media::BnAudioFlingerService::TRANSACTION_getInputBufferSize,
+ OPEN_OUTPUT = media::BnAudioFlingerService::TRANSACTION_openOutput,
+ OPEN_DUPLICATE_OUTPUT = media::BnAudioFlingerService::TRANSACTION_openDuplicateOutput,
+ CLOSE_OUTPUT = media::BnAudioFlingerService::TRANSACTION_closeOutput,
+ SUSPEND_OUTPUT = media::BnAudioFlingerService::TRANSACTION_suspendOutput,
+ RESTORE_OUTPUT = media::BnAudioFlingerService::TRANSACTION_restoreOutput,
+ OPEN_INPUT = media::BnAudioFlingerService::TRANSACTION_openInput,
+ CLOSE_INPUT = media::BnAudioFlingerService::TRANSACTION_closeInput,
+ INVALIDATE_STREAM = media::BnAudioFlingerService::TRANSACTION_invalidateStream,
+ SET_VOICE_VOLUME = media::BnAudioFlingerService::TRANSACTION_setVoiceVolume,
+ GET_RENDER_POSITION = media::BnAudioFlingerService::TRANSACTION_getRenderPosition,
+ GET_INPUT_FRAMES_LOST = media::BnAudioFlingerService::TRANSACTION_getInputFramesLost,
+ NEW_AUDIO_UNIQUE_ID = media::BnAudioFlingerService::TRANSACTION_newAudioUniqueId,
+ ACQUIRE_AUDIO_SESSION_ID = media::BnAudioFlingerService::TRANSACTION_acquireAudioSessionId,
+ RELEASE_AUDIO_SESSION_ID = media::BnAudioFlingerService::TRANSACTION_releaseAudioSessionId,
+ QUERY_NUM_EFFECTS = media::BnAudioFlingerService::TRANSACTION_queryNumberEffects,
+ QUERY_EFFECT = media::BnAudioFlingerService::TRANSACTION_queryEffect,
+ GET_EFFECT_DESCRIPTOR = media::BnAudioFlingerService::TRANSACTION_getEffectDescriptor,
+ CREATE_EFFECT = media::BnAudioFlingerService::TRANSACTION_createEffect,
+ MOVE_EFFECTS = media::BnAudioFlingerService::TRANSACTION_moveEffects,
+ LOAD_HW_MODULE = media::BnAudioFlingerService::TRANSACTION_loadHwModule,
+ GET_PRIMARY_OUTPUT_SAMPLING_RATE = media::BnAudioFlingerService::TRANSACTION_getPrimaryOutputSamplingRate,
+ GET_PRIMARY_OUTPUT_FRAME_COUNT = media::BnAudioFlingerService::TRANSACTION_getPrimaryOutputFrameCount,
+ SET_LOW_RAM_DEVICE = media::BnAudioFlingerService::TRANSACTION_setLowRamDevice,
+ GET_AUDIO_PORT = media::BnAudioFlingerService::TRANSACTION_getAudioPort,
+ CREATE_AUDIO_PATCH = media::BnAudioFlingerService::TRANSACTION_createAudioPatch,
+ RELEASE_AUDIO_PATCH = media::BnAudioFlingerService::TRANSACTION_releaseAudioPatch,
+ LIST_AUDIO_PATCHES = media::BnAudioFlingerService::TRANSACTION_listAudioPatches,
+ SET_AUDIO_PORT_CONFIG = media::BnAudioFlingerService::TRANSACTION_setAudioPortConfig,
+ GET_AUDIO_HW_SYNC_FOR_SESSION = media::BnAudioFlingerService::TRANSACTION_getAudioHwSyncForSession,
+ SYSTEM_READY = media::BnAudioFlingerService::TRANSACTION_systemReady,
+ FRAME_COUNT_HAL = media::BnAudioFlingerService::TRANSACTION_frameCountHAL,
+ GET_MICROPHONES = media::BnAudioFlingerService::TRANSACTION_getMicrophones,
+ SET_MASTER_BALANCE = media::BnAudioFlingerService::TRANSACTION_setMasterBalance,
+ GET_MASTER_BALANCE = media::BnAudioFlingerService::TRANSACTION_getMasterBalance,
+ SET_EFFECT_SUSPENDED = media::BnAudioFlingerService::TRANSACTION_setEffectSuspended,
+ SET_AUDIO_HAL_PIDS = media::BnAudioFlingerService::TRANSACTION_setAudioHalPids,
+ };
+
+ /**
+ * And optional hook, called on every transaction, before unparceling the data and
+ * dispatching to the respective method. Useful for bulk operations, such as logging or
+ * permission checks.
+ * If an error status is returned, the transaction will return immediately and will not be
+ * processed.
+ */
+ virtual status_t onPreTransact(TransactionCode code, const Parcel& data, uint32_t flags) {
+ (void) code;
+ (void) data;
+ (void) flags;
+ return OK;
+ };
+
+ /**
+ * An optional hook for implementing diagnostics dumping.
+ */
+ virtual status_t dump(int fd, const Vector<String16>& args) {
+ (void) fd;
+ (void) args;
+ return OK;
+ }
+ };
+
+ explicit AudioFlingerServerAdapter(
+ const sp<AudioFlingerServerAdapter::Delegate>& delegate);
+
+ status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
+ status_t dump(int fd, const Vector<String16>& args) override;
+
+ Status createTrack(const media::CreateTrackRequest& request,
+ media::CreateTrackResponse* _aidl_return) override;
+ Status createRecord(const media::CreateRecordRequest& request,
+ media::CreateRecordResponse* _aidl_return) override;
+ Status sampleRate(int32_t ioHandle, int32_t* _aidl_return) override;
+ Status format(int32_t output, media::audio::common::AudioFormat* _aidl_return) override;
+ Status frameCount(int32_t ioHandle, int64_t* _aidl_return) override;
+ Status latency(int32_t output, int32_t* _aidl_return) override;
+ Status setMasterVolume(float value) override;
+ Status setMasterMute(bool muted) override;
+ Status masterVolume(float* _aidl_return) override;
+ Status masterMute(bool* _aidl_return) override;
+ Status setMasterBalance(float balance) override;
+ Status getMasterBalance(float* _aidl_return) override;
+ Status setStreamVolume(media::AudioStreamType stream, float value, int32_t output) override;
+ Status setStreamMute(media::AudioStreamType stream, bool muted) override;
+ Status
+ streamVolume(media::AudioStreamType stream, int32_t output, float* _aidl_return) override;
+ Status streamMute(media::AudioStreamType stream, bool* _aidl_return) override;
+ Status setMode(media::AudioMode mode) override;
+ Status setMicMute(bool state) override;
+ Status getMicMute(bool* _aidl_return) override;
+ Status setRecordSilenced(int32_t portId, bool silenced) override;
+ Status setParameters(int32_t ioHandle, const std::string& keyValuePairs) override;
+ Status
+ getParameters(int32_t ioHandle, const std::string& keys, std::string* _aidl_return) override;
+ Status registerClient(const sp<media::IAudioFlingerClient>& client) override;
+ Status getInputBufferSize(int32_t sampleRate, media::audio::common::AudioFormat format,
+ int32_t channelMask, int64_t* _aidl_return) override;
+ Status openOutput(const media::OpenOutputRequest& request,
+ media::OpenOutputResponse* _aidl_return) override;
+ Status openDuplicateOutput(int32_t output1, int32_t output2, int32_t* _aidl_return) override;
+ Status closeOutput(int32_t output) override;
+ Status suspendOutput(int32_t output) override;
+ Status restoreOutput(int32_t output) override;
+ Status openInput(const media::OpenInputRequest& request,
+ media::OpenInputResponse* _aidl_return) override;
+ Status closeInput(int32_t input) override;
+ Status invalidateStream(media::AudioStreamType stream) override;
+ Status setVoiceVolume(float volume) override;
+ Status getRenderPosition(int32_t output, media::RenderPosition* _aidl_return) override;
+ Status getInputFramesLost(int32_t ioHandle, int32_t* _aidl_return) override;
+ Status newAudioUniqueId(media::AudioUniqueIdUse use, int32_t* _aidl_return) override;
+ Status acquireAudioSessionId(int32_t audioSession, int32_t pid, int32_t uid) override;
+ Status releaseAudioSessionId(int32_t audioSession, int32_t pid) override;
+ Status queryNumberEffects(int32_t* _aidl_return) override;
+ Status queryEffect(int32_t index, media::EffectDescriptor* _aidl_return) override;
+ Status getEffectDescriptor(const media::AudioUuid& effectUUID, const media::AudioUuid& typeUUID,
+ int32_t preferredTypeFlag,
+ media::EffectDescriptor* _aidl_return) override;
+ Status createEffect(const media::CreateEffectRequest& request,
+ media::CreateEffectResponse* _aidl_return) override;
+ Status moveEffects(int32_t session, int32_t srcOutput, int32_t dstOutput) override;
+ Status setEffectSuspended(int32_t effectId, int32_t sessionId, bool suspended) override;
+ Status loadHwModule(const std::string& name, int32_t* _aidl_return) override;
+ Status getPrimaryOutputSamplingRate(int32_t* _aidl_return) override;
+ Status getPrimaryOutputFrameCount(int64_t* _aidl_return) override;
+ Status setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) override;
+ Status getAudioPort(const media::AudioPort& port, media::AudioPort* _aidl_return) override;
+ Status createAudioPatch(const media::AudioPatch& patch, int32_t* _aidl_return) override;
+ Status releaseAudioPatch(int32_t handle) override;
+ Status listAudioPatches(int32_t maxCount,
+ std::vector<media::AudioPatch>* _aidl_return) override;
+ Status setAudioPortConfig(const media::AudioPortConfig& config) override;
+ Status getAudioHwSyncForSession(int32_t sessionId, int32_t* _aidl_return) override;
+ Status systemReady() override;
+ Status frameCountHAL(int32_t ioHandle, int64_t* _aidl_return) override;
+ Status getMicrophones(std::vector<media::MicrophoneInfoData>* _aidl_return) override;
+ Status setAudioHalPids(const std::vector<int32_t>& pids) override;
+
+private:
+ const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
+};
}; // namespace android
diff --git a/media/libaudioclient/include/media/IAudioFlingerClient.h b/media/libaudioclient/include/media/IAudioFlingerClient.h
deleted file mode 100644
index 0080bc9..0000000
--- a/media/libaudioclient/include/media/IAudioFlingerClient.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IAUDIOFLINGERCLIENT_H
-#define ANDROID_IAUDIOFLINGERCLIENT_H
-
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <utils/KeyedVector.h>
-#include <system/audio.h>
-#include <media/AudioIoDescriptor.h>
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-class IAudioFlingerClient : public IInterface
-{
-public:
- DECLARE_META_INTERFACE(AudioFlingerClient);
-
- // Notifies a change of audio input/output configuration.
- virtual void ioConfigChanged(audio_io_config_event event,
- const sp<AudioIoDescriptor>& ioDesc) = 0;
-
-};
-
-
-// ----------------------------------------------------------------------------
-
-class BnAudioFlingerClient : public BnInterface<IAudioFlingerClient>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif // ANDROID_IAUDIOFLINGERCLIENT_H
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index bb1c07f..3018364 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -20,14 +20,15 @@
#include <stdint.h>
#include <sys/types.h>
#include <unistd.h>
-#include <utils/RefBase.h>
-#include <utils/Errors.h>
+
+#include <android/media/IAudioPolicyServiceClient.h>
#include <binder/IInterface.h>
#include <media/AudioDeviceTypeAddr.h>
#include <media/AudioSystem.h>
#include <media/AudioPolicy.h>
-#include <media/IAudioPolicyServiceClient.h>
#include <system/audio_policy.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
#include <vector>
namespace android {
@@ -150,7 +151,7 @@
virtual status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) = 0;
// Check if offload is possible for given format, stream type, sample rate,
// bit rate, duration, video and streaming or offload property is enabled
- virtual bool isOffloadSupported(const audio_offload_info_t& info) = 0;
+ virtual audio_offload_mode_t getOffloadSupport(const audio_offload_info_t& info) = 0;
// Check if direct playback is possible for given format, sample rate, channel mask and flags.
virtual bool isDirectOutputSupported(const audio_config_base_t& config,
@@ -160,11 +161,11 @@
virtual status_t listAudioPorts(audio_port_role_t role,
audio_port_type_t type,
unsigned int *num_ports,
- struct audio_port *ports,
+ struct audio_port_v7 *ports,
unsigned int *generation) = 0;
/* Get attributes for a given audio port */
- virtual status_t getAudioPort(struct audio_port *port) = 0;
+ virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
/* Create an audio patch between several source and sink ports */
virtual status_t createAudioPatch(const struct audio_patch *patch,
@@ -180,7 +181,7 @@
/* Set audio port configuration */
virtual status_t setAudioPortConfig(const struct audio_port_config *config) = 0;
- virtual void registerClient(const sp<IAudioPolicyServiceClient>& client) = 0;
+ virtual void registerClient(const sp<media::IAudioPolicyServiceClient>& client) = 0;
virtual void setAudioPortCallbacksEnabled(bool enabled) = 0;
@@ -196,13 +197,13 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration) = 0;
- virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+ virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
= 0;
virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
virtual status_t setUserIdDeviceAffinities(int userId,
- const Vector<AudioDeviceTypeAddr>& devices) = 0;
+ const AudioDeviceTypeAddrVector& devices) = 0;
virtual status_t removeUserIdDeviceAffinities(int userId) = 0;
@@ -241,13 +242,35 @@
virtual bool isCallScreenModeSupported() = 0;
- virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device) = 0;
+ virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
- virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy) = 0;
+ virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role) = 0;
- virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device) = 0;
+ virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices) = 0;
+
+ virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
+
+ virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
+
+ virtual status_t removeDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector& devices) = 0;
+
+ virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role) = 0;
+
+ virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices) = 0;
// The return code here is only intended to represent transport errors. The
// actual server implementation should always return NO_ERROR.
@@ -266,10 +289,6 @@
const Parcel& data,
Parcel* reply,
uint32_t flags = 0);
-private:
- void sanetizeAudioAttributes(audio_attributes_t* attr);
- status_t sanitizeEffectDescriptor(effect_descriptor_t* desc);
- status_t sanitizeAudioPortConfig(struct audio_port_config* config);
};
// ----------------------------------------------------------------------------
diff --git a/media/libaudioclient/include/media/IAudioPolicyServiceClient.h b/media/libaudioclient/include/media/IAudioPolicyServiceClient.h
deleted file mode 100644
index 47b31ee..0000000
--- a/media/libaudioclient/include/media/IAudioPolicyServiceClient.h
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IAUDIOPOLICYSERVICECLIENT_H
-#define ANDROID_IAUDIOPOLICYSERVICECLIENT_H
-
-#include <vector>
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <system/audio.h>
-#include <system/audio_effect.h>
-#include <media/AudioPolicy.h>
-#include <media/AudioVolumeGroup.h>
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-struct record_client_info {
- audio_unique_id_t riid;
- uid_t uid;
- audio_session_t session;
- audio_source_t source;
- audio_port_handle_t port_id;
- bool silenced;
-};
-
-typedef struct record_client_info record_client_info_t;
-
-// ----------------------------------------------------------------------------
-
-class IAudioPolicyServiceClient : public IInterface
-{
-public:
- DECLARE_META_INTERFACE(AudioPolicyServiceClient);
-
- // Notifies a change of volume group
- virtual void onAudioVolumeGroupChanged(volume_group_t group, int flags) = 0;
- // Notifies a change of audio port configuration.
- virtual void onAudioPortListUpdate() = 0;
- // Notifies a change of audio patch configuration.
- virtual void onAudioPatchListUpdate() = 0;
- // Notifies a change in the mixing state of a specific mix in a dynamic audio policy
- virtual void onDynamicPolicyMixStateUpdate(String8 regId, int32_t state) = 0;
- // Notifies a change of audio recording configuration
- virtual void onRecordingConfigurationUpdate(int event,
- const record_client_info_t *clientInfo,
- const audio_config_base_t *clientConfig,
- std::vector<effect_descriptor_t> clientEffects,
- const audio_config_base_t *deviceConfig,
- std::vector<effect_descriptor_t> effects,
- audio_patch_handle_t patchHandle,
- audio_source_t source) = 0;
-};
-
-
-// ----------------------------------------------------------------------------
-
-class BnAudioPolicyServiceClient : public BnInterface<IAudioPolicyServiceClient>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif // ANDROID_IAUDIOPOLICYSERVICECLIENT_H
diff --git a/media/libaudioclient/include/media/IAudioTrack.h b/media/libaudioclient/include/media/IAudioTrack.h
deleted file mode 100644
index 06e786d..0000000
--- a/media/libaudioclient/include/media/IAudioTrack.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (C) 2007 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IAUDIOTRACK_H
-#define ANDROID_IAUDIOTRACK_H
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <utils/RefBase.h>
-#include <utils/Errors.h>
-#include <binder/IInterface.h>
-#include <binder/IMemory.h>
-#include <utils/String8.h>
-#include <media/AudioTimestamp.h>
-#include <media/VolumeShaper.h>
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-class IAudioTrack : public IInterface
-{
-public:
- DECLARE_META_INTERFACE(AudioTrack);
-
- /* Get this track's control block */
- virtual sp<IMemory> getCblk() const = 0;
-
- /* After it's created the track is not active. Call start() to
- * make it active.
- */
- virtual status_t start() = 0;
-
- /* Stop a track. If set, the callback will cease being called and
- * obtainBuffer will return an error. Buffers that are already released
- * will continue to be processed, unless/until flush() is called.
- */
- virtual void stop() = 0;
-
- /* Flush a stopped or paused track. All pending/released buffers are discarded.
- * This function has no effect if the track is not stopped or paused.
- */
- virtual void flush() = 0;
-
- /* Pause a track. If set, the callback will cease being called and
- * obtainBuffer will return an error. Buffers that are already released
- * will continue to be processed, unless/until flush() is called.
- */
- virtual void pause() = 0;
-
- /* Attach track auxiliary output to specified effect. Use effectId = 0
- * to detach track from effect.
- */
- virtual status_t attachAuxEffect(int effectId) = 0;
-
- /* Send parameters to the audio hardware */
- virtual status_t setParameters(const String8& keyValuePairs) = 0;
-
- /* Selects the presentation (if available) */
- virtual status_t selectPresentation(int presentationId, int programId) = 0;
-
- /* Return NO_ERROR if timestamp is valid. timestamp is undefined otherwise. */
- virtual status_t getTimestamp(AudioTimestamp& timestamp) = 0;
-
- /* Signal the playback thread for a change in control block */
- virtual void signal() = 0;
-
- /* Sets the volume shaper */
- virtual media::VolumeShaper::Status applyVolumeShaper(
- const sp<media::VolumeShaper::Configuration>& configuration,
- const sp<media::VolumeShaper::Operation>& operation) = 0;
-
- /* gets the volume shaper state */
- virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnAudioTrack : public BnInterface<IAudioTrack>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif // ANDROID_IAUDIOTRACK_H
diff --git a/media/libaudioclient/include/media/IEffect.h b/media/libaudioclient/include/media/IEffect.h
deleted file mode 100644
index ff04869..0000000
--- a/media/libaudioclient/include/media/IEffect.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IEFFECT_H
-#define ANDROID_IEFFECT_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-
-namespace android {
-
-class IEffect: public IInterface
-{
-public:
- DECLARE_META_INTERFACE(Effect);
-
- virtual status_t enable() = 0;
-
- virtual status_t disable() = 0;
-
- virtual status_t command(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t *pReplySize,
- void *pReplyData) = 0;
-
- virtual void disconnect() = 0;
-
- virtual sp<IMemory> getCblk() const = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnEffect: public BnInterface<IEffect>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif // ANDROID_IEFFECT_H
diff --git a/media/libaudioclient/include/media/IEffectClient.h b/media/libaudioclient/include/media/IEffectClient.h
deleted file mode 100644
index 2f78c98..0000000
--- a/media/libaudioclient/include/media/IEffectClient.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IEFFECTCLIENT_H
-#define ANDROID_IEFFECTCLIENT_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-
-namespace android {
-
-class IEffectClient: public IInterface
-{
-public:
- DECLARE_META_INTERFACE(EffectClient);
-
- virtual void controlStatusChanged(bool controlGranted) = 0;
- virtual void enableStatusChanged(bool enabled) = 0;
- virtual void commandExecuted(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t replySize,
- void *pReplyData) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnEffectClient: public BnInterface<IEffectClient>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif // ANDROID_IEFFECTCLIENT_H
diff --git a/media/libaudioclient/include/media/PlayerBase.h b/media/libaudioclient/include/media/PlayerBase.h
index e7a8abc..4aad9b4 100644
--- a/media/libaudioclient/include/media/PlayerBase.h
+++ b/media/libaudioclient/include/media/PlayerBase.h
@@ -19,6 +19,7 @@
#include <audiomanager/AudioManager.h>
#include <audiomanager/IAudioManager.h>
+#include <utils/Mutex.h>
#include "android/media/BnPlayer.h"
@@ -40,8 +41,8 @@
virtual binder::Status setPan(float pan) override;
virtual binder::Status setStartDelayMs(int32_t delayMs) override;
virtual binder::Status applyVolumeShaper(
- const media::VolumeShaper::Configuration& configuration,
- const media::VolumeShaper::Operation& operation) override;
+ const media::VolumeShaperConfiguration& configuration,
+ const media::VolumeShaperOperation& operation) override;
status_t startWithStatus();
status_t pauseWithStatus();
diff --git a/media/libaudioclient/include/media/ToneGenerator.h b/media/libaudioclient/include/media/ToneGenerator.h
index 5b0689a..a575616 100644
--- a/media/libaudioclient/include/media/ToneGenerator.h
+++ b/media/libaudioclient/include/media/ToneGenerator.h
@@ -17,6 +17,8 @@
#ifndef ANDROID_TONEGENERATOR_H_
#define ANDROID_TONEGENERATOR_H_
+#include <string>
+
#include <media/AudioSystem.h>
#include <media/AudioTrack.h>
#include <utils/Compat.h>
@@ -152,7 +154,8 @@
NUM_SUP_TONES = LAST_SUP_TONE-FIRST_SUP_TONE+1
};
- ToneGenerator(audio_stream_type_t streamType, float volume, bool threadCanCallJava = false);
+ ToneGenerator(audio_stream_type_t streamType, float volume, bool threadCanCallJava = false,
+ std::string opPackageName = {});
~ToneGenerator();
bool startTone(tone_type toneType, int durationMs = -1);
@@ -193,6 +196,7 @@
TONE_JAPAN_DIAL, // Dial tone: 400Hz, continuous
TONE_JAPAN_BUSY, // Busy tone: 400Hz, 500ms ON, 500ms OFF...
TONE_JAPAN_RADIO_ACK, // Radio path acknowlegment: 400Hz, 1s ON, 2s OFF...
+ TONE_JAPAN_RINGTONE, // Ring Tone: 400 Hz repeated in a 1 s on, 2 s off pattern.
// GB Supervisory tones
TONE_GB_BUSY, // Busy tone: 400 Hz, 375ms ON, 375ms OFF...
TONE_GB_CONGESTION, // Congestion Tone: 400 Hz, 400ms ON, 350ms OFF, 225ms ON, 525ms OFF...
@@ -218,6 +222,7 @@
TONE_INDIA_CONGESTION, // Congestion tone: 400 Hz, 250ms ON, 250ms OFF...
TONE_INDIA_CALL_WAITING, // Call waiting tone: 400 Hz, tone repeated in a 0.2s on, 0.1s off, 0.2s on, 7.5s off pattern.
TONE_INDIA_RINGTONE, // Ring tone: 400 Hz tone modulated with 25Hz, 0.4 on 0.2 off 0.4 on 2..0 off
+ TONE_TW_RINGTONE, // Ring Tone: 440 Hz + 480 Hz repeated with pattern 1s on, 3s off.
NUM_ALTERNATE_TONES
};
@@ -230,6 +235,7 @@
HONGKONG,
IRELAND,
INDIA,
+ TAIWAN,
CEPT,
NUM_REGIONS
};
@@ -341,6 +347,8 @@
};
KeyedVector<uint16_t, WaveGenerator *> mWaveGens; // list of active wave generators.
+
+ std::string mOpPackageName;
};
}
diff --git a/media/libaudioclient/include/media/TrackPlayerBase.h b/media/libaudioclient/include/media/TrackPlayerBase.h
index 66e9b3b..6d26e63 100644
--- a/media/libaudioclient/include/media/TrackPlayerBase.h
+++ b/media/libaudioclient/include/media/TrackPlayerBase.h
@@ -33,8 +33,8 @@
//IPlayer implementation
virtual binder::Status applyVolumeShaper(
- const media::VolumeShaper::Configuration& configuration,
- const media::VolumeShaper::Operation& operation);
+ const media::VolumeShaperConfiguration& configuration,
+ const media::VolumeShaperOperation& operation);
//FIXME move to protected field, so far made public to minimize changes to AudioTrack logic
sp<AudioTrack> mAudioTrack;
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 350a780..21d18d3 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -7,6 +7,18 @@
}
cc_test {
+ name: "audio_aidl_status_tests",
+ defaults: ["libaudioclient_tests_defaults"],
+ srcs: ["audio_aidl_status_tests.cpp"],
+ shared_libs: [
+ "libaudioclient_aidl_conversion",
+ "libbinder",
+ "libcutils",
+ "libutils",
+ ],
+}
+
+cc_test {
name: "test_create_audiotrack",
defaults: ["libaudioclient_tests_defaults"],
srcs: ["test_create_audiotrack.cpp",
diff --git a/media/libaudioclient/tests/audio_aidl_status_tests.cpp b/media/libaudioclient/tests/audio_aidl_status_tests.cpp
new file mode 100644
index 0000000..5517091
--- /dev/null
+++ b/media/libaudioclient/tests/audio_aidl_status_tests.cpp
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+#include <media/AidlConversionUtil.h>
+#include <utils/Errors.h>
+
+using namespace android;
+using namespace android::aidl_utils;
+using android::binder::Status;
+
+// Tests for statusTFromBinderStatus() and binderStatusFromStatusT().
+
+// STATUS_T_SMALL_VALUE_LIMIT is an arbitrary limit where we exhaustively check status_t errors.
+// It is known that this limit doesn't cover UNKNOWN_ERROR ~ INT32_MIN.
+constexpr status_t STATUS_T_SMALL_VALUE_LIMIT = -1000;
+
+// Small status values are preserved on round trip
+TEST(audio_aidl_status_tests, statusRoundTripSmallValues) {
+ for (status_t status = 0; status > STATUS_T_SMALL_VALUE_LIMIT; --status) {
+ ASSERT_EQ(status, statusTFromBinderStatus(binderStatusFromStatusT(status)));
+ }
+}
+
+// Special status values are preserved on round trip.
+TEST(audio_aidl_status_tests, statusRoundTripSpecialValues) {
+ for (status_t status : {
+ OK,
+ UNKNOWN_ERROR,
+ NO_MEMORY,
+ INVALID_OPERATION,
+ BAD_VALUE,
+ BAD_TYPE,
+ NAME_NOT_FOUND,
+ PERMISSION_DENIED,
+ NO_INIT,
+ ALREADY_EXISTS,
+ DEAD_OBJECT,
+ FAILED_TRANSACTION,
+ BAD_INDEX,
+ NOT_ENOUGH_DATA,
+ WOULD_BLOCK,
+ TIMED_OUT,
+ UNKNOWN_TRANSACTION,
+ FDS_NOT_ALLOWED}) {
+ ASSERT_EQ(status, statusTFromBinderStatus(binderStatusFromStatusT(status)));
+ }
+}
+
+// Binder exceptions show as an error (not fixed at this time); these come fromExceptionCode().
+TEST(audio_aidl_status_tests, binderStatusExceptions) {
+ for (int exceptionCode : {
+ //Status::EX_NONE,
+ Status::EX_SECURITY,
+ Status::EX_BAD_PARCELABLE,
+ Status::EX_ILLEGAL_ARGUMENT,
+ Status::EX_NULL_POINTER,
+ Status::EX_ILLEGAL_STATE,
+ Status::EX_NETWORK_MAIN_THREAD,
+ Status::EX_UNSUPPORTED_OPERATION,
+ //Status::EX_SERVICE_SPECIFIC, -- tested fromServiceSpecificError()
+ Status::EX_PARCELABLE,
+ // This is special and Java specific; see Parcel.java.
+ Status::EX_HAS_REPLY_HEADER,
+ // This is special, and indicates to C++ binder proxies that the
+ // transaction has failed at a low level.
+ //Status::EX_TRANSACTION_FAILED, -- tested fromStatusT().
+ }) {
+ ASSERT_NE(OK, statusTFromBinderStatus(Status::fromExceptionCode(exceptionCode)));
+ }
+}
+
+// Binder transaction errors show exactly in status_t; these come fromStatusT().
+TEST(audio_aidl_status_tests, binderStatusTransactionError) {
+ for (status_t status : {
+ OK, // Note: fromStatusT does check if this is 0, so this is no error.
+ UNKNOWN_ERROR,
+ NO_MEMORY,
+ INVALID_OPERATION,
+ BAD_VALUE,
+ BAD_TYPE,
+ NAME_NOT_FOUND,
+ PERMISSION_DENIED,
+ NO_INIT,
+ ALREADY_EXISTS,
+ DEAD_OBJECT,
+ FAILED_TRANSACTION,
+ BAD_INDEX,
+ NOT_ENOUGH_DATA,
+ WOULD_BLOCK,
+ TIMED_OUT,
+ UNKNOWN_TRANSACTION,
+ FDS_NOT_ALLOWED}) {
+ ASSERT_EQ(status, statusTFromBinderStatus(Status::fromStatusT(status)));
+ }
+}
+
+// Binder service specific errors show in status_t; these come fromServiceSpecificError().
+TEST(audio_aidl_status_tests, binderStatusServiceSpecificError) {
+ // fromServiceSpecificError() still stores exception code if status is 0.
+ for (status_t status = -1; status > STATUS_T_SMALL_VALUE_LIMIT; --status) {
+ ASSERT_EQ(status, statusTFromBinderStatus(Status::fromServiceSpecificError(status)));
+ }
+}
+
+// Binder status with message.
+TEST(audio_aidl_status_tests, binderStatusMessage) {
+ const String8 message("abcd");
+ for (status_t status = -1; status > STATUS_T_SMALL_VALUE_LIMIT; --status) {
+ const Status binderStatus = binderStatusFromStatusT(status, message.c_str());
+ ASSERT_EQ(status, statusTFromBinderStatus(binderStatus));
+ ASSERT_EQ(message, binderStatus.exceptionMessage());
+ }
+}
diff --git a/media/libaudiofoundation/Android.bp b/media/libaudiofoundation/Android.bp
index e361890..9296d0e 100644
--- a/media/libaudiofoundation/Android.bp
+++ b/media/libaudiofoundation/Android.bp
@@ -1,15 +1,25 @@
cc_library_headers {
name: "libaudiofoundation_headers",
vendor_available: true,
+ min_sdk_version: "29",
+
export_include_dirs: ["include"],
header_libs: [
+ "libaudioclient_aidl_conversion_util",
"libaudio_system_headers",
"libmedia_helper_headers",
],
export_header_lib_headers: [
+ "libaudioclient_aidl_conversion_util",
"libaudio_system_headers",
"libmedia_helper_headers",
],
+ static_libs: [
+ "audioclient-types-aidl-unstable-cpp",
+ ],
+ export_static_lib_headers: [
+ "audioclient-types-aidl-unstable-cpp",
+ ],
host_supported: true,
target: {
darwin: {
@@ -33,6 +43,8 @@
],
shared_libs: [
+ "audioclient-types-aidl-unstable-cpp",
+ "libaudioclient_aidl_conversion",
"libaudioutils",
"libbase",
"libbinder",
@@ -41,6 +53,11 @@
"libutils",
],
+ export_shared_lib_headers: [
+ "audioclient-types-aidl-unstable-cpp",
+ "libaudioclient_aidl_conversion",
+ ],
+
header_libs: [
"libaudiofoundation_headers",
],
diff --git a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
index b44043a..8f1e113 100644
--- a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
+++ b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
@@ -16,12 +16,57 @@
#include <media/AudioDeviceTypeAddr.h>
+#include <arpa/inet.h>
+#include <iostream>
+#include <regex>
+#include <set>
+#include <sstream>
+
namespace android {
+namespace {
+
+static const std::string SUPPRESSED = "SUPPRESSED";
+static const std::regex MAC_ADDRESS_REGEX("([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}");
+
+bool isSenstiveAddress(const std::string &address) {
+ if (std::regex_match(address, MAC_ADDRESS_REGEX)) {
+ return true;
+ }
+
+ sockaddr_storage ss4;
+ if (inet_pton(AF_INET, address.c_str(), &ss4) > 0) {
+ return true;
+ }
+
+ sockaddr_storage ss6;
+ if (inet_pton(AF_INET6, address.c_str(), &ss6) > 0) {
+ return true;
+ }
+
+ return false;
+}
+
+} // namespace
+
+AudioDeviceTypeAddr::AudioDeviceTypeAddr(audio_devices_t type, const std::string &address) :
+ mType(type), mAddress(address) {
+ mIsAddressSensitive = isSenstiveAddress(mAddress);
+}
+
const char* AudioDeviceTypeAddr::getAddress() const {
return mAddress.c_str();
}
+const std::string& AudioDeviceTypeAddr::address() const {
+ return mAddress;
+}
+
+void AudioDeviceTypeAddr::setAddress(const std::string& address) {
+ mAddress = address;
+ mIsAddressSensitive = isSenstiveAddress(mAddress);
+}
+
bool AudioDeviceTypeAddr::equals(const AudioDeviceTypeAddr& other) const {
return mType == other.mType && mAddress == other.mAddress;
}
@@ -36,14 +81,34 @@
return false;
}
+bool AudioDeviceTypeAddr::operator==(const AudioDeviceTypeAddr &rhs) const {
+ return equals(rhs);
+}
+
+bool AudioDeviceTypeAddr::operator!=(const AudioDeviceTypeAddr &rhs) const {
+ return !operator==(rhs);
+}
+
void AudioDeviceTypeAddr::reset() {
mType = AUDIO_DEVICE_NONE;
- mAddress = "";
+ setAddress("");
+}
+
+std::string AudioDeviceTypeAddr::toString(bool includeSensitiveInfo) const {
+ std::stringstream sstream;
+ sstream << "type:0x" << std::hex << mType;
+ // IP and MAC address are sensitive information. The sensitive information will be suppressed
+ // is `includeSensitiveInfo` is false.
+ sstream << ",@:"
+ << (!includeSensitiveInfo && mIsAddressSensitive ? SUPPRESSED : mAddress);
+ return sstream.str();
}
status_t AudioDeviceTypeAddr::readFromParcel(const Parcel *parcel) {
status_t status;
- if ((status = parcel->readUint32(&mType)) != NO_ERROR) return status;
+ uint32_t rawDeviceType;
+ if ((status = parcel->readUint32(&rawDeviceType)) != NO_ERROR) return status;
+ mType = static_cast<audio_devices_t>(rawDeviceType);
status = parcel->readUtf8FromUtf16(&mAddress);
return status;
}
@@ -64,4 +129,44 @@
return deviceTypes;
}
-}
\ No newline at end of file
+AudioDeviceTypeAddrVector excludeDeviceTypeAddrsFrom(
+ const AudioDeviceTypeAddrVector& devices,
+ const AudioDeviceTypeAddrVector& devicesToExclude) {
+ std::set<AudioDeviceTypeAddr> devicesToExcludeSet(
+ devicesToExclude.begin(), devicesToExclude.end());
+ AudioDeviceTypeAddrVector remainedDevices;
+ for (const auto& device : devices) {
+ if (devicesToExcludeSet.count(device) == 0) {
+ remainedDevices.push_back(device);
+ }
+ }
+ return remainedDevices;
+}
+
+std::string dumpAudioDeviceTypeAddrVector(const AudioDeviceTypeAddrVector& deviceTypeAddrs,
+ bool includeSensitiveInfo) {
+ std::stringstream stream;
+ for (auto it = deviceTypeAddrs.begin(); it != deviceTypeAddrs.end(); ++it) {
+ if (it != deviceTypeAddrs.begin()) {
+ stream << " ";
+ }
+ stream << it->toString(includeSensitiveInfo);
+ }
+ return stream.str();
+}
+
+ConversionResult<AudioDeviceTypeAddr>
+aidl2legacy_AudioDeviceTypeAddress(const media::AudioDevice& aidl) {
+ audio_devices_t type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.type));
+ return AudioDeviceTypeAddr(type, aidl.address);
+}
+
+ConversionResult<media::AudioDevice>
+legacy2aidl_AudioDeviceTypeAddress(const AudioDeviceTypeAddr& legacy) {
+ media::AudioDevice aidl;
+ aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.mType));
+ aidl.address = legacy.getAddress();
+ return aidl;
+}
+
+} // namespace android
diff --git a/media/libaudiofoundation/AudioGain.cpp b/media/libaudiofoundation/AudioGain.cpp
index 0d28335..1dee938 100644
--- a/media/libaudiofoundation/AudioGain.cpp
+++ b/media/libaudiofoundation/AudioGain.cpp
@@ -129,38 +129,51 @@
mGain.max_ramp_ms == other->mGain.max_ramp_ms;
}
-status_t AudioGain::writeToParcel(android::Parcel *parcel) const
-{
- status_t status = NO_ERROR;
- if ((status = parcel->writeInt32(mIndex)) != NO_ERROR) return status;
- if ((status = parcel->writeBool(mUseInChannelMask)) != NO_ERROR) return status;
- if ((status = parcel->writeBool(mUseForVolume)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mGain.mode)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mGain.channel_mask)) != NO_ERROR) return status;
- if ((status = parcel->writeInt32(mGain.min_value)) != NO_ERROR) return status;
- if ((status = parcel->writeInt32(mGain.max_value)) != NO_ERROR) return status;
- if ((status = parcel->writeInt32(mGain.default_value)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mGain.step_value)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mGain.min_ramp_ms)) != NO_ERROR) return status;
- status = parcel->writeUint32(mGain.max_ramp_ms);
- return status;
+status_t AudioGain::writeToParcel(android::Parcel *parcel) const {
+ media::AudioGain parcelable;
+ return writeToParcelable(&parcelable)
+ ?: parcelable.writeToParcel(parcel);
}
-status_t AudioGain::readFromParcel(const android::Parcel *parcel)
-{
- status_t status = NO_ERROR;
- if ((status = parcel->readInt32(&mIndex)) != NO_ERROR) return status;
- if ((status = parcel->readBool(&mUseInChannelMask)) != NO_ERROR) return status;
- if ((status = parcel->readBool(&mUseForVolume)) != NO_ERROR) return status;
- if ((status = parcel->readUint32(&mGain.mode)) != NO_ERROR) return status;
- if ((status = parcel->readUint32(&mGain.channel_mask)) != NO_ERROR) return status;
- if ((status = parcel->readInt32(&mGain.min_value)) != NO_ERROR) return status;
- if ((status = parcel->readInt32(&mGain.max_value)) != NO_ERROR) return status;
- if ((status = parcel->readInt32(&mGain.default_value)) != NO_ERROR) return status;
- if ((status = parcel->readUint32(&mGain.step_value)) != NO_ERROR) return status;
- if ((status = parcel->readUint32(&mGain.min_ramp_ms)) != NO_ERROR) return status;
- status = parcel->readUint32(&mGain.max_ramp_ms);
- return status;
+status_t AudioGain::writeToParcelable(media::AudioGain* parcelable) const {
+ parcelable->index = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mIndex));
+ parcelable->useInChannelMask = mUseInChannelMask;
+ parcelable->useForVolume = mUseForVolume;
+ parcelable->mode = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_gain_mode_t_int32_t_mask(mGain.mode));
+ parcelable->channelMask = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_channel_mask_t_int32_t(mGain.channel_mask));
+ parcelable->minValue = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.min_value));
+ parcelable->maxValue = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.max_value));
+ parcelable->defaultValue = VALUE_OR_RETURN_STATUS(
+ convertIntegral<int32_t>(mGain.default_value));
+ parcelable->stepValue = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.step_value));
+ parcelable->minRampMs = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.min_ramp_ms));
+ parcelable->maxRampMs = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.max_ramp_ms));
+ return OK;
+}
+
+status_t AudioGain::readFromParcel(const android::Parcel *parcel) {
+ media::AudioGain parcelable;
+ return parcelable.readFromParcel(parcel)
+ ?: readFromParcelable(parcelable);
+}
+
+status_t AudioGain::readFromParcelable(const media::AudioGain& parcelable) {
+ mIndex = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.index));
+ mUseInChannelMask = parcelable.useInChannelMask;
+ mUseForVolume = parcelable.useForVolume;
+ mGain.mode = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_int32_t_audio_gain_mode_t_mask(parcelable.mode));
+ mGain.channel_mask = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_int32_t_audio_channel_mask_t(parcelable.channelMask));
+ mGain.min_value = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.minValue));
+ mGain.max_value = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.maxValue));
+ mGain.default_value = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.defaultValue));
+ mGain.step_value = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.stepValue));
+ mGain.min_ramp_ms = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.minRampMs));
+ mGain.max_ramp_ms = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.maxRampMs));
+ return OK;
}
bool AudioGains::equals(const AudioGains &other) const
@@ -196,4 +209,34 @@
return status;
}
+ConversionResult<sp<AudioGain>>
+aidl2legacy_AudioGain(const media::AudioGain& aidl) {
+ sp<AudioGain> legacy = new AudioGain(0, false);
+ status_t status = legacy->readFromParcelable(aidl);
+ if (status != OK) {
+ return base::unexpected(status);
+ }
+ return legacy;
+}
+
+ConversionResult<media::AudioGain>
+legacy2aidl_AudioGain(const sp<AudioGain>& legacy) {
+ media::AudioGain aidl;
+ status_t status = legacy->writeToParcelable(&aidl);
+ if (status != OK) {
+ return base::unexpected(status);
+ }
+ return aidl;
+}
+
+ConversionResult<AudioGains>
+aidl2legacy_AudioGains(const std::vector<media::AudioGain>& aidl) {
+ return convertContainer<AudioGains>(aidl, aidl2legacy_AudioGain);
+}
+
+ConversionResult<std::vector<media::AudioGain>>
+legacy2aidl_AudioGains(const AudioGains& legacy) {
+ return convertContainer<std::vector<media::AudioGain>>(legacy, legacy2aidl_AudioGain);
+}
+
} // namespace android
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index f988690..20d8632 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -38,6 +38,21 @@
}
}
+void AudioPort::importAudioPort(const audio_port_v7 &port) {
+ for (size_t i = 0; i < port.num_audio_profiles; ++i) {
+ sp<AudioProfile> profile = new AudioProfile(port.audio_profiles[i].format,
+ ChannelMaskSet(port.audio_profiles[i].channel_masks,
+ port.audio_profiles[i].channel_masks +
+ port.audio_profiles->num_channel_masks),
+ SampleRateSet(port.audio_profiles[i].sample_rates,
+ port.audio_profiles[i].sample_rates +
+ port.audio_profiles[i].num_sample_rates));
+ if (!mProfiles.contains(profile)) {
+ addAudioProfile(profile);
+ }
+ }
+}
+
void AudioPort::toAudioPort(struct audio_port *port) const {
// TODO: update this function once audio_port structure reflects the new profile definition.
// For compatibility reason: flatening the AudioProfile into audio_port structure.
@@ -62,21 +77,39 @@
}
}
}
- port->role = mRole;
- port->type = mType;
- strlcpy(port->name, mName.c_str(), AUDIO_PORT_MAX_NAME_LEN);
+ toAudioPortBase(port);
port->num_sample_rates = flatenedRates.size();
port->num_channel_masks = flatenedChannels.size();
port->num_formats = flatenedFormats.size();
std::copy(flatenedRates.begin(), flatenedRates.end(), port->sample_rates);
std::copy(flatenedChannels.begin(), flatenedChannels.end(), port->channel_masks);
std::copy(flatenedFormats.begin(), flatenedFormats.end(), port->formats);
+}
- ALOGV("AudioPort::toAudioPort() num gains %zu", mGains.size());
+void AudioPort::toAudioPort(struct audio_port_v7 *port) const {
+ toAudioPortBase(port);
+ port->num_audio_profiles = 0;
+ for (const auto& profile : mProfiles) {
+ if (profile->isValid()) {
+ const SampleRateSet &sampleRates = profile->getSampleRates();
+ const ChannelMaskSet &channelMasks = profile->getChannels();
- port->num_gains = std::min(mGains.size(), (size_t) AUDIO_PORT_MAX_GAINS);
- for (size_t i = 0; i < port->num_gains; i++) {
- port->gains[i] = mGains[i]->getGain();
+ if (sampleRates.size() > AUDIO_PORT_MAX_SAMPLING_RATES ||
+ channelMasks.size() > AUDIO_PORT_MAX_CHANNEL_MASKS ||
+ port->num_audio_profiles >= AUDIO_PORT_MAX_AUDIO_PROFILES) {
+ ALOGE("%s: bailing out: cannot export profiles to port config", __func__);
+ return;
+ }
+
+ auto& dstProfile = port->audio_profiles[port->num_audio_profiles++];
+ dstProfile.format = profile->getFormat();
+ dstProfile.num_sample_rates = sampleRates.size();
+ std::copy(sampleRates.begin(), sampleRates.end(),
+ std::begin(dstProfile.sample_rates));
+ dstProfile.num_channel_masks = channelMasks.size();
+ std::copy(channelMasks.begin(), channelMasks.end(),
+ std::begin(dstProfile.channel_masks));
+ }
}
}
@@ -117,32 +150,33 @@
status_t AudioPort::writeToParcel(Parcel *parcel) const
{
- status_t status = NO_ERROR;
- if ((status = parcel->writeUtf8AsUtf16(mName)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mType)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mRole)) != NO_ERROR) return status;
- if ((status = parcel->writeParcelable(mProfiles)) != NO_ERROR) return status;
- if ((status = parcel->writeParcelable(mGains)) != NO_ERROR) return status;
- return status;
+ media::AudioPort parcelable;
+ return writeToParcelable(&parcelable)
+ ?: parcelable.writeToParcel(parcel);
}
-status_t AudioPort::readFromParcel(const Parcel *parcel)
-{
- status_t status = NO_ERROR;
- if ((status = parcel->readUtf8FromUtf16(&mName)) != NO_ERROR) return status;
- static_assert(sizeof(mType) == sizeof(uint32_t));
- if ((status = parcel->readUint32(reinterpret_cast<uint32_t*>(&mType))) != NO_ERROR) {
- return status;
- }
- static_assert(sizeof(mRole) == sizeof(uint32_t));
- if ((status = parcel->readUint32(reinterpret_cast<uint32_t*>(&mRole))) != NO_ERROR) {
- return status;
- }
- mProfiles.clear();
- if ((status = parcel->readParcelable(&mProfiles)) != NO_ERROR) return status;
- mGains.clear();
- if ((status = parcel->readParcelable(&mGains)) != NO_ERROR) return status;
- return status;
+status_t AudioPort::writeToParcelable(media::AudioPort* parcelable) const {
+ parcelable->name = mName;
+ parcelable->type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_type_t_AudioPortType(mType));
+ parcelable->role = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_role_t_AudioPortRole(mRole));
+ parcelable->profiles = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioProfileVector(mProfiles));
+ parcelable->gains = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioGains(mGains));
+ return OK;
+}
+
+status_t AudioPort::readFromParcel(const Parcel *parcel) {
+ media::AudioPort parcelable;
+ return parcelable.readFromParcel(parcel)
+ ?: readFromParcelable(parcelable);
+}
+
+status_t AudioPort::readFromParcelable(const media::AudioPort& parcelable) {
+ mName = parcelable.name;
+ mType = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortType_audio_port_type_t(parcelable.type));
+ mRole = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortRole_audio_port_role_t(parcelable.role));
+ mProfiles = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioProfileVector(parcelable.profiles));
+ mGains = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioGains(parcelable.gains));
+ return OK;
}
// --- AudioPortConfig class implementation
@@ -243,45 +277,56 @@
mGain.ramp_duration_ms == other->mGain.ramp_duration_ms;
}
-status_t AudioPortConfig::writeToParcel(Parcel *parcel) const
-{
- status_t status = NO_ERROR;
- if ((status = parcel->writeUint32(mSamplingRate)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mFormat)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mChannelMask)) != NO_ERROR) return status;
- if ((status = parcel->writeInt32(mId)) != NO_ERROR) return status;
- // Write mGain to parcel.
- if ((status = parcel->writeInt32(mGain.index)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mGain.mode)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mGain.channel_mask)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mGain.ramp_duration_ms)) != NO_ERROR) return status;
- std::vector<int> values(std::begin(mGain.values), std::end(mGain.values));
- if ((status = parcel->writeInt32Vector(values)) != NO_ERROR) return status;
- return status;
+status_t AudioPortConfig::writeToParcel(Parcel *parcel) const {
+ media::AudioPortConfig parcelable;
+ return writeToParcelable(&parcelable)
+ ?: parcelable.writeToParcel(parcel);
}
-status_t AudioPortConfig::readFromParcel(const Parcel *parcel)
-{
- status_t status = NO_ERROR;
- if ((status = parcel->readUint32(&mSamplingRate)) != NO_ERROR) return status;
- static_assert(sizeof(mFormat) == sizeof(uint32_t));
- if ((status = parcel->readUint32(reinterpret_cast<uint32_t*>(&mFormat))) != NO_ERROR) {
- return status;
- }
- if ((status = parcel->readUint32(&mChannelMask)) != NO_ERROR) return status;
- if ((status = parcel->readInt32(&mId)) != NO_ERROR) return status;
- // Read mGain from parcel.
- if ((status = parcel->readInt32(&mGain.index)) != NO_ERROR) return status;
- if ((status = parcel->readUint32(&mGain.mode)) != NO_ERROR) return status;
- if ((status = parcel->readUint32(&mGain.channel_mask)) != NO_ERROR) return status;
- if ((status = parcel->readUint32(&mGain.ramp_duration_ms)) != NO_ERROR) return status;
- std::vector<int> values;
- if ((status = parcel->readInt32Vector(&values)) != NO_ERROR) return status;
- if (values.size() != std::size(mGain.values)) {
+status_t AudioPortConfig::writeToParcelable(media::AudioPortConfig* parcelable) const {
+ parcelable->sampleRate = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mSamplingRate));
+ parcelable->format = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_format_t_AudioFormat(mFormat));
+ parcelable->channelMask = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_channel_mask_t_int32_t(mChannelMask));
+ parcelable->id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
+ parcelable->gain.index = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.index));
+ parcelable->gain.mode = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_gain_mode_t_int32_t_mask(mGain.mode));
+ parcelable->gain.channelMask = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_channel_mask_t_int32_t(mGain.channel_mask));
+ parcelable->gain.rampDurationMs = VALUE_OR_RETURN_STATUS(
+ convertIntegral<int32_t>(mGain.ramp_duration_ms));
+ parcelable->gain.values = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<int32_t>>(
+ mGain.values, convertIntegral<int32_t, int>));
+ return OK;
+}
+
+status_t AudioPortConfig::readFromParcel(const Parcel *parcel) {
+ media::AudioPortConfig parcelable;
+ return parcelable.readFromParcel(parcel)
+ ?: readFromParcelable(parcelable);
+}
+
+status_t AudioPortConfig::readFromParcelable(const media::AudioPortConfig& parcelable) {
+ mSamplingRate = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.sampleRate));
+ mFormat = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioFormat_audio_format_t(parcelable.format));
+ mChannelMask = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_int32_t_audio_channel_mask_t(parcelable.channelMask));
+ mId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(parcelable.id));
+ mGain.index = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.gain.index));
+ mGain.mode = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_int32_t_audio_gain_mode_t_mask(parcelable.gain.mode));
+ mGain.channel_mask = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_int32_t_audio_channel_mask_t(parcelable.gain.channelMask));
+ mGain.ramp_duration_ms = VALUE_OR_RETURN_STATUS(
+ convertIntegral<unsigned int>(parcelable.gain.rampDurationMs));
+ if (parcelable.gain.values.size() > std::size(mGain.values)) {
return BAD_VALUE;
}
- std::copy(values.begin(), values.end(), mGain.values);
- return status;
+ for (size_t i = 0; i < parcelable.gain.values.size(); ++i) {
+ mGain.values[i] = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.gain.values[i]));
+ }
+ return OK;
}
} // namespace android
diff --git a/media/libaudiofoundation/AudioProfile.cpp b/media/libaudiofoundation/AudioProfile.cpp
index 91be346..3b47fed 100644
--- a/media/libaudiofoundation/AudioProfile.cpp
+++ b/media/libaudiofoundation/AudioProfile.cpp
@@ -130,42 +130,73 @@
mIsDynamicRate == other->isDynamicRate();
}
-status_t AudioProfile::writeToParcel(Parcel *parcel) const
-{
- status_t status = NO_ERROR;
- if ((status = parcel->writeUtf8AsUtf16(mName)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mFormat)) != NO_ERROR) return status;
- std::vector<int> values(mChannelMasks.begin(), mChannelMasks.end());
- if ((status = parcel->writeInt32Vector(values)) != NO_ERROR) return status;
- values.clear();
- values.assign(mSamplingRates.begin(), mSamplingRates.end());
- if ((status = parcel->writeInt32Vector(values)) != NO_ERROR) return status;
- if ((status = parcel->writeBool(mIsDynamicFormat)) != NO_ERROR) return status;
- if ((status = parcel->writeBool(mIsDynamicChannels)) != NO_ERROR) return status;
- if ((status = parcel->writeBool(mIsDynamicRate)) != NO_ERROR) return status;
- return status;
+AudioProfile& AudioProfile::operator=(const AudioProfile& other) {
+ mName = other.mName;
+ mFormat = other.mFormat;
+ mChannelMasks = other.mChannelMasks;
+ mSamplingRates = other.mSamplingRates;
+ mIsDynamicFormat = other.mIsDynamicFormat;
+ mIsDynamicChannels = other.mIsDynamicChannels;
+ mIsDynamicRate = other.mIsDynamicRate;
+ return *this;
}
-status_t AudioProfile::readFromParcel(const Parcel *parcel)
-{
- status_t status = NO_ERROR;
- if ((status = parcel->readUtf8FromUtf16(&mName)) != NO_ERROR) return status;
- static_assert(sizeof(mFormat) == sizeof(uint32_t));
- if ((status = parcel->readUint32(reinterpret_cast<uint32_t*>(&mFormat))) != NO_ERROR) {
+status_t AudioProfile::writeToParcel(Parcel *parcel) const {
+ media::AudioProfile parcelable = VALUE_OR_RETURN_STATUS(toParcelable());
+ return parcelable.writeToParcel(parcel);
+ }
+
+ConversionResult<media::AudioProfile>
+AudioProfile::toParcelable() const {
+ media::AudioProfile parcelable;
+ parcelable.name = mName;
+ parcelable.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(mFormat));
+ parcelable.channelMasks = VALUE_OR_RETURN(
+ convertContainer<std::vector<int32_t>>(mChannelMasks,
+ legacy2aidl_audio_channel_mask_t_int32_t));
+ parcelable.samplingRates = VALUE_OR_RETURN(
+ convertContainer<std::vector<int32_t>>(mSamplingRates,
+ convertIntegral<int32_t, uint32_t>));
+ parcelable.isDynamicFormat = mIsDynamicFormat;
+ parcelable.isDynamicChannels = mIsDynamicChannels;
+ parcelable.isDynamicRate = mIsDynamicRate;
+ return parcelable;
+}
+
+status_t AudioProfile::readFromParcel(const Parcel *parcel) {
+ media::AudioProfile parcelable;
+ if (status_t status = parcelable.readFromParcel(parcel); status != OK) {
return status;
}
- std::vector<int> values;
- if ((status = parcel->readInt32Vector(&values)) != NO_ERROR) return status;
- mChannelMasks.clear();
- mChannelMasks.insert(values.begin(), values.end());
- values.clear();
- if ((status = parcel->readInt32Vector(&values)) != NO_ERROR) return status;
- mSamplingRates.clear();
- mSamplingRates.insert(values.begin(), values.end());
- if ((status = parcel->readBool(&mIsDynamicFormat)) != NO_ERROR) return status;
- if ((status = parcel->readBool(&mIsDynamicChannels)) != NO_ERROR) return status;
- if ((status = parcel->readBool(&mIsDynamicRate)) != NO_ERROR) return status;
- return status;
+ *this = *VALUE_OR_RETURN_STATUS(fromParcelable(parcelable));
+ return OK;
+}
+
+ConversionResult<sp<AudioProfile>>
+AudioProfile::fromParcelable(const media::AudioProfile& parcelable) {
+ sp<AudioProfile> legacy = new AudioProfile();
+ legacy->mName = parcelable.name;
+ legacy->mFormat = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(parcelable.format));
+ legacy->mChannelMasks = VALUE_OR_RETURN(
+ convertContainer<ChannelMaskSet>(parcelable.channelMasks,
+ aidl2legacy_int32_t_audio_channel_mask_t));
+ legacy->mSamplingRates = VALUE_OR_RETURN(
+ convertContainer<SampleRateSet>(parcelable.samplingRates,
+ convertIntegral<uint32_t, int32_t>));
+ legacy->mIsDynamicFormat = parcelable.isDynamicFormat;
+ legacy->mIsDynamicChannels = parcelable.isDynamicChannels;
+ legacy->mIsDynamicRate = parcelable.isDynamicRate;
+ return legacy;
+}
+
+ConversionResult<sp<AudioProfile>>
+aidl2legacy_AudioProfile(const media::AudioProfile& aidl) {
+ return AudioProfile::fromParcelable(aidl);
+}
+
+ConversionResult<media::AudioProfile>
+legacy2aidl_AudioProfile(const sp<AudioProfile>& legacy) {
+ return legacy->toParcelable();
}
ssize_t AudioProfileVector::add(const sp<AudioProfile> &profile)
@@ -258,6 +289,16 @@
return false;
}
+bool AudioProfileVector::contains(const sp<AudioProfile>& profile) const
+{
+ for (const auto& audioProfile : *this) {
+ if (audioProfile->equals(profile)) {
+ return true;
+ }
+ }
+ return false;
+}
+
void AudioProfileVector::dump(std::string *dst, int spaces) const
{
dst->append(base::StringPrintf("%*s- Profiles:\n", spaces, ""));
@@ -304,4 +345,14 @@
});
}
+ConversionResult<AudioProfileVector>
+aidl2legacy_AudioProfileVector(const std::vector<media::AudioProfile>& aidl) {
+ return convertContainer<AudioProfileVector>(aidl, aidl2legacy_AudioProfile);
+}
+
+ConversionResult<std::vector<media::AudioProfile>>
+legacy2aidl_AudioProfileVector(const AudioProfileVector& legacy) {
+ return convertContainer<std::vector<media::AudioProfile>>(legacy, legacy2aidl_AudioProfile);
+}
+
} // namespace android
diff --git a/media/libaudiofoundation/DeviceDescriptorBase.cpp b/media/libaudiofoundation/DeviceDescriptorBase.cpp
index 3dbe37d..a3e9589 100644
--- a/media/libaudiofoundation/DeviceDescriptorBase.cpp
+++ b/media/libaudiofoundation/DeviceDescriptorBase.cpp
@@ -19,6 +19,7 @@
#include <android-base/stringprintf.h>
#include <audio_utils/string.h>
+#include <media/AidlConversion.h>
#include <media/DeviceDescriptorBase.h>
#include <media/TypeConverter.h>
@@ -40,11 +41,15 @@
AUDIO_PORT_ROLE_SOURCE),
mDeviceTypeAddr(deviceTypeAddr)
{
- if (mDeviceTypeAddr.mAddress.empty() && audio_is_remote_submix_device(mDeviceTypeAddr.mType)) {
- mDeviceTypeAddr.mAddress = "0";
+ if (mDeviceTypeAddr.address().empty() && audio_is_remote_submix_device(mDeviceTypeAddr.mType)) {
+ mDeviceTypeAddr.setAddress("0");
}
}
+void DeviceDescriptorBase::setAddress(const std::string &address) {
+ mDeviceTypeAddr.setAddress(address);
+}
+
void DeviceDescriptorBase::toAudioPortConfig(struct audio_port_config *dstConfig,
const struct audio_port_config *srcConfig) const
{
@@ -76,13 +81,12 @@
void DeviceDescriptorBase::toAudioPort(struct audio_port *port) const
{
ALOGV("DeviceDescriptorBase::toAudioPort() handle %d type %08x", mId, mDeviceTypeAddr.mType);
- AudioPort::toAudioPort(port);
- toAudioPortConfig(&port->active_config);
- port->id = mId;
- port->ext.device.type = mDeviceTypeAddr.mType;
- port->ext.device.encapsulation_modes = mEncapsulationModes;
- port->ext.device.encapsulation_metadata_types = mEncapsulationMetadataTypes;
- (void)audio_utils_strlcpy_zerofill(port->ext.device.address, mDeviceTypeAddr.getAddress());
+ toAudioPortInternal(port);
+}
+
+void DeviceDescriptorBase::toAudioPort(struct audio_port_v7 *port) const {
+ ALOGV("DeviceDescriptorBase::toAudioPort() v7 handle %d type %08x", mId, mDeviceTypeAddr.mType);
+ toAudioPortInternal(port);
}
status_t DeviceDescriptorBase::setEncapsulationModes(uint32_t encapsulationModes) {
@@ -123,18 +127,16 @@
"%*s- supported encapsulation metadata types: %u",
spaces, "", mEncapsulationMetadataTypes));
- if (mDeviceTypeAddr.mAddress.size() != 0) {
+ if (mDeviceTypeAddr.address().size() != 0) {
dst->append(base::StringPrintf(
"%*s- address: %-32s\n", spaces, "", mDeviceTypeAddr.getAddress()));
}
AudioPort::dump(dst, spaces, verbose);
}
-std::string DeviceDescriptorBase::toString() const
+std::string DeviceDescriptorBase::toString(bool includeSensitiveInfo) const
{
- std::stringstream sstream;
- sstream << "type:0x" << std::hex << type() << ",@:" << mDeviceTypeAddr.mAddress;
- return sstream.str();
+ return mDeviceTypeAddr.toString(includeSensitiveInfo);
}
void DeviceDescriptorBase::log() const
@@ -154,26 +156,53 @@
mDeviceTypeAddr.equals(other->mDeviceTypeAddr);
}
+
status_t DeviceDescriptorBase::writeToParcel(Parcel *parcel) const
{
- status_t status = NO_ERROR;
- if ((status = AudioPort::writeToParcel(parcel)) != NO_ERROR) return status;
- if ((status = AudioPortConfig::writeToParcel(parcel)) != NO_ERROR) return status;
- if ((status = parcel->writeParcelable(mDeviceTypeAddr)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mEncapsulationModes)) != NO_ERROR) return status;
- if ((status = parcel->writeUint32(mEncapsulationMetadataTypes)) != NO_ERROR) return status;
- return status;
+ media::AudioPort parcelable;
+ return writeToParcelable(&parcelable)
+ ?: parcelable.writeToParcel(parcel);
}
-status_t DeviceDescriptorBase::readFromParcel(const Parcel *parcel)
-{
- status_t status = NO_ERROR;
- if ((status = AudioPort::readFromParcel(parcel)) != NO_ERROR) return status;
- if ((status = AudioPortConfig::readFromParcel(parcel)) != NO_ERROR) return status;
- if ((status = parcel->readParcelable(&mDeviceTypeAddr)) != NO_ERROR) return status;
- if ((status = parcel->readUint32(&mEncapsulationModes)) != NO_ERROR) return status;
- if ((status = parcel->readUint32(&mEncapsulationMetadataTypes)) != NO_ERROR) return status;
- return status;
+status_t DeviceDescriptorBase::writeToParcelable(media::AudioPort* parcelable) const {
+ AudioPort::writeToParcelable(parcelable);
+ AudioPortConfig::writeToParcelable(&parcelable->activeConfig);
+ parcelable->id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
+
+ media::AudioPortDeviceExt ext;
+ ext.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(mDeviceTypeAddr));
+ ext.encapsulationModes = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_AudioEncapsulationMode_mask(mEncapsulationModes));
+ ext.encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_AudioEncapsulationMetadataType_mask(mEncapsulationMetadataTypes));
+ UNION_SET(parcelable->ext, device, std::move(ext));
+ return OK;
+}
+
+status_t DeviceDescriptorBase::readFromParcel(const Parcel *parcel) {
+ media::AudioPort parcelable;
+ return parcelable.readFromParcel(parcel)
+ ?: readFromParcelable(parcelable);
+}
+
+status_t DeviceDescriptorBase::readFromParcelable(const media::AudioPort& parcelable) {
+ if (parcelable.type != media::AudioPortType::DEVICE) {
+ return BAD_VALUE;
+ }
+ status_t status = AudioPort::readFromParcelable(parcelable)
+ ?: AudioPortConfig::readFromParcelable(parcelable.activeConfig);
+ if (status != OK) {
+ return status;
+ }
+
+ media::AudioPortDeviceExt ext = VALUE_OR_RETURN_STATUS(UNION_GET(parcelable.ext, device));
+ mDeviceTypeAddr = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioDeviceTypeAddress(ext.device));
+ mEncapsulationModes = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioEncapsulationMode_mask(ext.encapsulationModes));
+ mEncapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioEncapsulationMetadataType_mask(ext.encapsulationMetadataTypes));
+ return OK;
}
std::string toString(const DeviceDescriptorBaseVector& devices)
@@ -197,4 +226,24 @@
return deviceTypeAddrs;
}
+ConversionResult<sp<DeviceDescriptorBase>>
+aidl2legacy_DeviceDescriptorBase(const media::AudioPort& aidl) {
+ sp<DeviceDescriptorBase> result = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
+ status_t status = result->readFromParcelable(aidl);
+ if (status != OK) {
+ return base::unexpected(status);
+ }
+ return result;
+}
+
+ConversionResult<media::AudioPort>
+legacy2aidl_DeviceDescriptorBase(const sp<DeviceDescriptorBase>& legacy) {
+ media::AudioPort aidl;
+ status_t status = legacy->writeToParcelable(&aidl);
+ if (status != OK) {
+ return base::unexpected(status);
+ }
+ return aidl;
+}
+
} // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 72fda49..aa7ca69 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -96,7 +96,7 @@
static inline audio_devices_t deviceTypesToBitMask(const DeviceTypeSet& deviceTypes) {
audio_devices_t types = AUDIO_DEVICE_NONE;
for (auto deviceType : deviceTypes) {
- types |= deviceType;
+ types = static_cast<audio_devices_t>(types | deviceType);
}
return types;
}
@@ -131,4 +131,4 @@
std::string toString(const DeviceTypeSet& deviceTypes);
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
index 60ea78e..34da233 100644
--- a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
+++ b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
@@ -19,36 +19,53 @@
#include <string>
#include <vector>
+#include <android/media/AudioDevice.h>
#include <binder/Parcelable.h>
#include <binder/Parcel.h>
#include <media/AudioContainers.h>
+#include <media/AidlConversion.h>
#include <system/audio.h>
#include <utils/Errors.h>
namespace android {
-struct AudioDeviceTypeAddr : public Parcelable {
+class AudioDeviceTypeAddr : public Parcelable {
+public:
AudioDeviceTypeAddr() = default;
- AudioDeviceTypeAddr(audio_devices_t type, const std::string& address) :
- mType(type), mAddress(address) {}
+ AudioDeviceTypeAddr(audio_devices_t type, const std::string& address);
const char* getAddress() const;
+ const std::string& address() const;
+
+ void setAddress(const std::string& address);
+
+ bool isAddressSensitive();
+
bool equals(const AudioDeviceTypeAddr& other) const;
AudioDeviceTypeAddr& operator= (const AudioDeviceTypeAddr&) = default;
bool operator<(const AudioDeviceTypeAddr& other) const;
+ bool operator==(const AudioDeviceTypeAddr& rhs) const;
+
+ bool operator!=(const AudioDeviceTypeAddr& rhs) const;
+
void reset();
+ std::string toString(bool includeSensitiveInfo=false) const;
+
status_t readFromParcel(const Parcel *parcel) override;
status_t writeToParcel(Parcel *parcel) const override;
audio_devices_t mType = AUDIO_DEVICE_NONE;
+
+private:
std::string mAddress;
+ bool mIsAddressSensitive;
};
using AudioDeviceTypeAddrVector = std::vector<AudioDeviceTypeAddr>;
@@ -58,4 +75,21 @@
*/
DeviceTypeSet getAudioDeviceTypes(const AudioDeviceTypeAddrVector& deviceTypeAddrs);
-}
+/**
+ * Return a collection of AudioDeviceTypeAddrs that are shown in `devices` but not
+ * in `devicesToExclude`
+ */
+AudioDeviceTypeAddrVector excludeDeviceTypeAddrsFrom(
+ const AudioDeviceTypeAddrVector& devices,
+ const AudioDeviceTypeAddrVector& devicesToExclude);
+
+std::string dumpAudioDeviceTypeAddrVector(const AudioDeviceTypeAddrVector& deviceTypeAddrs,
+ bool includeSensitiveInfo=false);
+
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<AudioDeviceTypeAddr>
+aidl2legacy_AudioDeviceTypeAddress(const media::AudioDevice& aidl);
+ConversionResult<media::AudioDevice>
+legacy2aidl_AudioDeviceTypeAddress(const AudioDeviceTypeAddr& legacy);
+
+} // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioGain.h b/media/libaudiofoundation/include/media/AudioGain.h
index 859f1e7..a06b686 100644
--- a/media/libaudiofoundation/include/media/AudioGain.h
+++ b/media/libaudiofoundation/include/media/AudioGain.h
@@ -16,8 +16,10 @@
#pragma once
+#include <android/media/AudioGain.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
+#include <media/AidlConversion.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
#include <system/audio.h>
@@ -72,6 +74,9 @@
status_t writeToParcel(Parcel* parcel) const override;
status_t readFromParcel(const Parcel* parcel) override;
+ status_t writeToParcelable(media::AudioGain* parcelable) const;
+ status_t readFromParcelable(const media::AudioGain& parcelable);
+
private:
int mIndex;
struct audio_gain mGain;
@@ -79,6 +84,12 @@
bool mUseForVolume = false;
};
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<sp<AudioGain>>
+aidl2legacy_AudioGain(const media::AudioGain& aidl);
+ConversionResult<media::AudioGain>
+legacy2aidl_AudioGain(const sp<AudioGain>& legacy);
+
class AudioGains : public std::vector<sp<AudioGain> >, public Parcelable
{
public:
@@ -104,4 +115,10 @@
status_t readFromParcel(const Parcel* parcel) override;
};
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<AudioGains>
+aidl2legacy_AudioGains(const std::vector<media::AudioGain>& aidl);
+ConversionResult<std::vector<media::AudioGain>>
+legacy2aidl_AudioGains(const AudioGains& legacy);
+
} // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioPort.h b/media/libaudiofoundation/include/media/AudioPort.h
index 3c013cb..633e4e3 100644
--- a/media/libaudiofoundation/include/media/AudioPort.h
+++ b/media/libaudiofoundation/include/media/AudioPort.h
@@ -17,7 +17,10 @@
#pragma once
#include <string>
+#include <type_traits>
+#include <android/media/AudioPort.h>
+#include <android/media/AudioPortConfig.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
#include <media/AudioGain.h>
@@ -48,6 +51,8 @@
virtual void toAudioPort(struct audio_port *port) const;
+ virtual void toAudioPort(struct audio_port_v7 *port) const;
+
virtual void addAudioProfile(const sp<AudioProfile> &profile) {
mProfiles.add(profile);
}
@@ -64,6 +69,8 @@
virtual void importAudioPort(const sp<AudioPort>& port, bool force = false);
+ virtual void importAudioPort(const audio_port_v7& port);
+
status_t checkGain(const struct audio_gain_config *gainConfig, int index) const {
if (index < 0 || (size_t)index >= mGains.size()) {
return BAD_VALUE;
@@ -86,12 +93,27 @@
status_t writeToParcel(Parcel* parcel) const override;
status_t readFromParcel(const Parcel* parcel) override;
+ status_t writeToParcelable(media::AudioPort* parcelable) const;
+ status_t readFromParcelable(const media::AudioPort& parcelable);
+
AudioGains mGains; // gain controllers
protected:
std::string mName;
audio_port_type_t mType;
audio_port_role_t mRole;
AudioProfileVector mProfiles; // AudioProfiles supported by this port (format, Rates, Channels)
+private:
+ template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
+ || std::is_same<T, struct audio_port_v7>::value, int> = 0>
+ void toAudioPortBase(T* port) const {
+ port->role = mRole;
+ port->type = mType;
+ strlcpy(port->name, mName.c_str(), AUDIO_PORT_MAX_NAME_LEN);
+ port->num_gains = std::min(mGains.size(), (size_t) AUDIO_PORT_MAX_GAINS);
+ for (size_t i = 0; i < port->num_gains; i++) {
+ port->gains[i] = mGains[i]->getGain();
+ }
+ }
};
@@ -119,6 +141,8 @@
status_t writeToParcel(Parcel* parcel) const override;
status_t readFromParcel(const Parcel* parcel) override;
+ status_t writeToParcelable(media::AudioPortConfig* parcelable) const;
+ status_t readFromParcelable(const media::AudioPortConfig& parcelable);
protected:
unsigned int mSamplingRate = 0u;
diff --git a/media/libaudiofoundation/include/media/AudioProfile.h b/media/libaudiofoundation/include/media/AudioProfile.h
index 730138a..57592bc 100644
--- a/media/libaudiofoundation/include/media/AudioProfile.h
+++ b/media/libaudiofoundation/include/media/AudioProfile.h
@@ -19,8 +19,10 @@
#include <string>
#include <vector>
+#include <android/media/AudioProfile.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
+#include <media/AidlConversion.h>
#include <media/AudioContainers.h>
#include <system/audio.h>
#include <utils/RefBase.h>
@@ -73,6 +75,9 @@
status_t writeToParcel(Parcel* parcel) const override;
status_t readFromParcel(const Parcel* parcel) override;
+ ConversionResult<media::AudioProfile> toParcelable() const;
+ static ConversionResult<sp<AudioProfile>> fromParcelable(const media::AudioProfile& parcelable);
+
private:
std::string mName;
audio_format_t mFormat; // The format for an audio profile should only be set when initialized.
@@ -82,8 +87,17 @@
bool mIsDynamicFormat = false;
bool mIsDynamicChannels = false;
bool mIsDynamicRate = false;
+
+ AudioProfile() = default;
+ AudioProfile& operator=(const AudioProfile& other);
};
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<sp<AudioProfile>>
+aidl2legacy_AudioProfile(const media::AudioProfile& aidl);
+ConversionResult<media::AudioProfile>
+legacy2aidl_AudioProfile(const sp<AudioProfile>& legacy);
+
class AudioProfileVector : public std::vector<sp<AudioProfile>>, public Parcelable
{
public:
@@ -105,6 +119,8 @@
bool hasDynamicProfile() const;
bool hasDynamicRateFor(audio_format_t format) const;
+ bool contains(const sp<AudioProfile>& profile) const;
+
virtual void dump(std::string *dst, int spaces) const;
bool equals(const AudioProfileVector& other) const;
@@ -115,4 +131,11 @@
bool operator == (const AudioProfile &left, const AudioProfile &right);
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<AudioProfileVector>
+aidl2legacy_AudioProfileVector(const std::vector<media::AudioProfile>& aidl);
+ConversionResult<std::vector<media::AudioProfile>>
+legacy2aidl_AudioProfileVector(const AudioProfileVector& legacy);
+
+
} // namespace android
diff --git a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
index af04721..140ce36 100644
--- a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
+++ b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
@@ -18,6 +18,7 @@
#include <vector>
+#include <android/media/AudioPort.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
#include <media/AudioContainers.h>
@@ -41,8 +42,8 @@
virtual ~DeviceDescriptorBase() {}
audio_devices_t type() const { return mDeviceTypeAddr.mType; }
- std::string address() const { return mDeviceTypeAddr.mAddress; }
- void setAddress(const std::string &address) { mDeviceTypeAddr.mAddress = address; }
+ const std::string& address() const { return mDeviceTypeAddr.address(); }
+ void setAddress(const std::string &address);
const AudioDeviceTypeAddr& getDeviceTypeAddr() const { return mDeviceTypeAddr; }
// AudioPortConfig
@@ -54,6 +55,7 @@
// AudioPort
virtual void toAudioPort(struct audio_port *port) const;
+ virtual void toAudioPort(struct audio_port_v7 *port) const;
status_t setEncapsulationModes(uint32_t encapsulationModes);
status_t setEncapsulationMetadataTypes(uint32_t encapsulationMetadataTypes);
@@ -61,17 +63,39 @@
void dump(std::string *dst, int spaces, int index,
const char* extraInfo = nullptr, bool verbose = true) const;
void log() const;
- std::string toString() const;
+
+ /**
+ * Return a string to describe the DeviceDescriptor.
+ *
+ * @param includeSensitiveInfo sensitive information will be added when it is true.
+ * @return a string that can be used to describe the DeviceDescriptor.
+ */
+ std::string toString(bool includeSensitiveInfo = false) const;
bool equals(const sp<DeviceDescriptorBase>& other) const;
status_t writeToParcel(Parcel* parcel) const override;
status_t readFromParcel(const Parcel* parcel) override;
+ status_t writeToParcelable(media::AudioPort* parcelable) const;
+ status_t readFromParcelable(const media::AudioPort& parcelable);
+
protected:
AudioDeviceTypeAddr mDeviceTypeAddr;
uint32_t mEncapsulationModes = 0;
uint32_t mEncapsulationMetadataTypes = 0;
+private:
+ template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
+ || std::is_same<T, struct audio_port_v7>::value, int> = 0>
+ void toAudioPortInternal(T* port) const {
+ AudioPort::toAudioPort(port);
+ toAudioPortConfig(&port->active_config);
+ port->id = mId;
+ port->ext.device.type = mDeviceTypeAddr.mType;
+ port->ext.device.encapsulation_modes = mEncapsulationModes;
+ port->ext.device.encapsulation_metadata_types = mEncapsulationMetadataTypes;
+ (void)audio_utils_strlcpy_zerofill(port->ext.device.address, mDeviceTypeAddr.getAddress());
+ }
};
using DeviceDescriptorBaseVector = std::vector<sp<DeviceDescriptorBase>>;
@@ -87,4 +111,10 @@
*/
AudioDeviceTypeAddrVector deviceTypeAddrsFromDescriptors(const DeviceDescriptorBaseVector& devices);
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<sp<DeviceDescriptorBase>>
+aidl2legacy_DeviceDescriptorBase(const media::AudioPort& aidl);
+ConversionResult<media::AudioPort>
+legacy2aidl_DeviceDescriptorBase(const sp<DeviceDescriptorBase>& legacy);
+
} // namespace android
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 1709d1e..482f40e 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -18,6 +18,7 @@
"libaudiohal@4.0",
"libaudiohal@5.0",
"libaudiohal@6.0",
+// "libaudiohal@7.0",
],
shared_libs: [
@@ -62,8 +63,6 @@
export_include_dirs: ["include"],
// This is needed because the stream interface includes media/MicrophoneInfo.h
- // which is not in any library but has a dependency on headers from libbinder.
- header_libs: ["libbinder_headers"],
-
- export_header_lib_headers: ["libbinder_headers"],
+ header_libs: ["av-headers"],
+ export_header_lib_headers: ["av-headers"],
}
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index 5985ef0..7228b22 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -31,6 +31,7 @@
/** Supported HAL versions, in order of preference.
*/
const char* sAudioHALVersions[] = {
+ "7.0",
"6.0",
"5.0",
"4.0",
diff --git a/media/libaudiohal/OWNERS b/media/libaudiohal/OWNERS
index 1456ab6..71b17e6 100644
--- a/media/libaudiohal/OWNERS
+++ b/media/libaudiohal/OWNERS
@@ -1,2 +1 @@
-krocard@google.com
mnaganov@google.com
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 967fba1..fe47881 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -26,6 +26,7 @@
"android.hardware.audio.common-util",
"android.hidl.allocator@1.0",
"android.hidl.memory@1.0",
+ "av-types-aidl-unstable-cpp",
"libaudiofoundation",
"libaudiohal_deathhandler",
"libaudioutils",
@@ -116,3 +117,20 @@
]
}
+cc_library_shared {
+ enabled: false,
+ name: "libaudiohal@7.0",
+ defaults: ["libaudiohal_default"],
+ shared_libs: [
+ "android.hardware.audio.common@7.0",
+ "android.hardware.audio.common@7.0-util",
+ "android.hardware.audio.effect@7.0",
+ "android.hardware.audio@7.0",
+ ],
+ cflags: [
+ "-DMAJOR_VERSION=7",
+ "-DMINOR_VERSION=0",
+ "-include common/all-versions/VersionMacro.h",
+ ]
+}
+
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 7d0d83d..0108816 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -48,6 +48,9 @@
namespace {
+using ::android::hardware::audio::common::CPP_VERSION::AudioPort;
+using ::android::hardware::audio::common::CPP_VERSION::AudioPortConfig;
+
status_t deviceAddressFromHal(
audio_devices_t device, const char* halAddress, DeviceAddress* address) {
address->device = AudioDevice(device);
@@ -212,7 +215,7 @@
const struct audio_config *config, size_t *size) {
if (mDevice == 0) return NO_INIT;
AudioConfig hidlConfig;
- HidlUtils::audioConfigFromHal(*config, &hidlConfig);
+ HidlUtils::audioConfigFromHal(*config, true /*isInput*/, &hidlConfig);
Result retval;
Return<void> ret = mDevice->getInputBufferSize(
hidlConfig,
@@ -237,7 +240,7 @@
status_t status = deviceAddressFromHal(deviceType, address, &hidlDevice);
if (status != OK) return status;
AudioConfig hidlConfig;
- HidlUtils::audioConfigFromHal(*config, &hidlConfig);
+ HidlUtils::audioConfigFromHal(*config, false /*isInput*/, &hidlConfig);
Result retval = Result::NOT_INITIALIZED;
Return<void> ret = mDevice->openOutputStream(
handle,
@@ -272,7 +275,7 @@
status_t status = deviceAddressFromHal(devices, address, &hidlDevice);
if (status != OK) return status;
AudioConfig hidlConfig;
- HidlUtils::audioConfigFromHal(*config, &hidlConfig);
+ HidlUtils::audioConfigFromHal(*config, true /*isInput*/, &hidlConfig);
Result retval = Result::NOT_INITIALIZED;
#if MAJOR_VERSION == 2
auto sinkMetadata = AudioSource(source);
@@ -388,6 +391,33 @@
return processReturn("getAudioPort", ret, retval);
}
+status_t DeviceHalHidl::getAudioPort(struct audio_port_v7 *port) {
+ if (mDevice == 0) return NO_INIT;
+ status_t status = NO_ERROR;
+#if MAJOR_VERSION >= 7
+ AudioPort hidlPort;
+ HidlUtils::audioPortFromHal(*port, &hidlPort);
+ Result retval;
+ Return<void> ret = mDevice->getAudioPort(
+ hidlPort,
+ [&](Result r, const AudioPort& p) {
+ retval = r;
+ if (retval == Result::OK) {
+ HidlUtils::audioPortToHal(p, port);
+ }
+ });
+ status = processReturn("getAudioPort", ret, retval);
+#else
+ struct audio_port audioPort = {};
+ audio_populate_audio_port(port, &audioPort);
+ status = getAudioPort(&audioPort);
+ if (status == NO_ERROR) {
+ audio_populate_audio_port_v7(&audioPort, port);
+ }
+#endif
+ return status;
+}
+
status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
if (mDevice == 0) return NO_INIT;
AudioPortConfig hidlConfig;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index d342d4a..abd4ad5 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -107,6 +107,9 @@
// Fills the list of supported attributes for a given audio port.
virtual status_t getAudioPort(struct audio_port *port);
+ // Fills the list of supported attributes for a given audio port.
+ virtual status_t getAudioPort(struct audio_port_v7 *port);
+
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config);
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
index 8021d92..aa9e477 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ b/media/libaudiohal/impl/DeviceHalLocal.cpp
@@ -180,6 +180,16 @@
return mDev->get_audio_port(mDev, port);
}
+status_t DeviceHalLocal::getAudioPort(struct audio_port_v7 *port) {
+ struct audio_port audioPort = {};
+ audio_populate_audio_port(port, &audioPort);
+ status_t status = getAudioPort(&audioPort);
+ if (status == NO_ERROR) {
+ audio_populate_audio_port_v7(&audioPort, port);
+ }
+ return status;
+}
+
status_t DeviceHalLocal::setAudioPortConfig(const struct audio_port_config *config) {
if (version() >= AUDIO_DEVICE_API_VERSION_3_0)
return mDev->set_audio_port_config(mDev, config);
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
index d85e2a7..195204b 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ b/media/libaudiohal/impl/DeviceHalLocal.h
@@ -100,6 +100,9 @@
// Fills the list of supported attributes for a given audio port.
virtual status_t getAudioPort(struct audio_port *port);
+ // Fills the list of supported attributes for a given audio port.
+ virtual status_t getAudioPort(struct audio_port_v7 *port);
+
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config);
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index caf575c..506feb8 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -25,9 +25,9 @@
#include "EffectBufferHalHidl.h"
#include "EffectHalHidl.h"
-#include "HidlUtils.h"
+#include "UuidUtils.h"
-using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::common::CPP_VERSION::implementation::UuidUtils;
using ::android::hardware::audio::common::utils::EnumBitfield;
using ::android::hardware::hidl_vec;
using ::android::hardware::MQDescriptorSync;
@@ -58,8 +58,8 @@
// static
void EffectHalHidl::effectDescriptorToHal(
const EffectDescriptor& descriptor, effect_descriptor_t* halDescriptor) {
- HidlUtils::uuidToHal(descriptor.type, &halDescriptor->type);
- HidlUtils::uuidToHal(descriptor.uuid, &halDescriptor->uuid);
+ UuidUtils::uuidToHal(descriptor.type, &halDescriptor->type);
+ UuidUtils::uuidToHal(descriptor.uuid, &halDescriptor->uuid);
halDescriptor->flags = static_cast<uint32_t>(descriptor.flags);
halDescriptor->cpuLoad = descriptor.cpuLoad;
halDescriptor->memoryUsage = descriptor.memoryUsage;
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index 9192a31..35ac332 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -23,9 +23,9 @@
#include "EffectBufferHalHidl.h"
#include "EffectHalHidl.h"
#include "EffectsFactoryHalHidl.h"
-#include "HidlUtils.h"
+#include "UuidUtils.h"
-using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::common::CPP_VERSION::implementation::UuidUtils;
using ::android::hardware::Return;
namespace android {
@@ -37,7 +37,7 @@
EffectsFactoryHalHidl::EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory)
: ConversionHelperHidl("EffectsFactory") {
- ALOG_ASSERT(effectsFactory != nullptr, "Provided IDevicesFactory service is NULL");
+ ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
mEffectsFactory = effectsFactory;
}
@@ -85,7 +85,7 @@
// TODO: check for nullptr
if (mEffectsFactory == 0) return NO_INIT;
Uuid hidlUuid;
- HidlUtils::uuidFromHal(*pEffectUuid, &hidlUuid);
+ UuidUtils::uuidFromHal(*pEffectUuid, &hidlUuid);
Result retval = Result::NOT_INITIALIZED;
Return<void> ret = mEffectsFactory->getDescriptor(hidlUuid,
[&](Result r, const EffectDescriptor& result) {
@@ -107,7 +107,7 @@
int32_t deviceId __unused, sp<EffectHalInterface> *effect) {
if (mEffectsFactory == 0) return NO_INIT;
Uuid hidlUuid;
- HidlUtils::uuidFromHal(*pEffectUuid, &hidlUuid);
+ UuidUtils::uuidFromHal(*pEffectUuid, &hidlUuid);
Result retval = Result::NOT_INITIALIZED;
Return<void> ret;
#if MAJOR_VERSION >= 6
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index dece1bb..5fa85e7 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -54,6 +54,8 @@
virtual status_t dumpEffects(int fd);
+ virtual float getHalVersion() { return MAJOR_VERSION + (float)MINOR_VERSION / 10; }
+
status_t allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) override;
status_t mirrorBuffer(void* external, size_t size,
sp<EffectBufferHalInterface>* buffer) override;
diff --git a/media/libaudiohal/impl/StreamPowerLog.h b/media/libaudiohal/impl/StreamPowerLog.h
index 5fd3912..f6a554b 100644
--- a/media/libaudiohal/impl/StreamPowerLog.h
+++ b/media/libaudiohal/impl/StreamPowerLog.h
@@ -19,6 +19,7 @@
#include <audio_utils/clock.h>
#include <audio_utils/PowerLog.h>
+#include <cutils/bitops.h>
#include <cutils/properties.h>
#include <system/audio.h>
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 1e04b21..29ef011 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -106,6 +106,9 @@
// Fills the list of supported attributes for a given audio port.
virtual status_t getAudioPort(struct audio_port *port) = 0;
+ // Fills the list of supported attributes for a given audio port.
+ virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
+
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config) = 0;
diff --git a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
index 3a76f9f..9fb56ae 100644
--- a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
@@ -46,6 +46,8 @@
virtual status_t dumpEffects(int fd) = 0;
+ virtual float getHalVersion() = 0;
+
static sp<EffectsFactoryHalInterface> create();
virtual status_t allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) = 0;
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 1a31420..d85e2e9 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -79,10 +79,14 @@
&& mixerChannelMask == (track->mMixerChannelMask | track->mMixerHapticChannelMask)) {
return false; // no need to change
}
- const audio_channel_mask_t hapticChannelMask = trackChannelMask & AUDIO_CHANNEL_HAPTIC_ALL;
- trackChannelMask &= ~AUDIO_CHANNEL_HAPTIC_ALL;
- const audio_channel_mask_t mixerHapticChannelMask = mixerChannelMask & AUDIO_CHANNEL_HAPTIC_ALL;
- mixerChannelMask &= ~AUDIO_CHANNEL_HAPTIC_ALL;
+ const audio_channel_mask_t hapticChannelMask =
+ static_cast<audio_channel_mask_t>(trackChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
+ trackChannelMask = static_cast<audio_channel_mask_t>(
+ trackChannelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
+ const audio_channel_mask_t mixerHapticChannelMask = static_cast<audio_channel_mask_t>(
+ mixerChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
+ mixerChannelMask = static_cast<audio_channel_mask_t>(
+ mixerChannelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
// always recompute for both channel masks even if only one has changed.
const uint32_t trackChannelCount = audio_channel_count_from_out_mask(trackChannelMask);
const uint32_t mixerChannelCount = audio_channel_count_from_out_mask(mixerChannelMask);
@@ -362,7 +366,8 @@
const audio_channel_mask_t trackChannelMask =
static_cast<audio_channel_mask_t>(valueInt);
if (setChannelMasks(name, trackChannelMask,
- (track->mMixerChannelMask | track->mMixerHapticChannelMask))) {
+ static_cast<audio_channel_mask_t>(
+ track->mMixerChannelMask | track->mMixerHapticChannelMask))) {
ALOGV("setParameter(TRACK, CHANNEL_MASK, %x)", trackChannelMask);
invalidate();
}
@@ -407,7 +412,8 @@
case MIXER_CHANNEL_MASK: {
const audio_channel_mask_t mixerChannelMask =
static_cast<audio_channel_mask_t>(valueInt);
- if (setChannelMasks(name, track->channelMask | track->mHapticChannelMask,
+ if (setChannelMasks(name, static_cast<audio_channel_mask_t>(
+ track->channelMask | track->mHapticChannelMask),
mixerChannelMask)) {
ALOGV("setParameter(TRACK, MIXER_CHANNEL_MASK, %#x)", mixerChannelMask);
invalidate();
@@ -423,7 +429,7 @@
}
} break;
case HAPTIC_INTENSITY: {
- const haptic_intensity_t hapticIntensity = static_cast<haptic_intensity_t>(valueInt);
+ const os::HapticScale hapticIntensity = static_cast<os::HapticScale>(valueInt);
if (track->mHapticIntensity != hapticIntensity) {
track->mHapticIntensity = hapticIntensity;
}
@@ -533,9 +539,10 @@
Track* t = static_cast<Track*>(track);
audio_channel_mask_t channelMask = t->channelMask;
- t->mHapticChannelMask = channelMask & AUDIO_CHANNEL_HAPTIC_ALL;
+ t->mHapticChannelMask = static_cast<audio_channel_mask_t>(
+ channelMask & AUDIO_CHANNEL_HAPTIC_ALL);
t->mHapticChannelCount = audio_channel_count_from_out_mask(t->mHapticChannelMask);
- channelMask &= ~AUDIO_CHANNEL_HAPTIC_ALL;
+ channelMask = static_cast<audio_channel_mask_t>(channelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
t->channelCount = audio_channel_count_from_out_mask(channelMask);
ALOGV_IF(audio_channel_mask_get_bits(channelMask) != AUDIO_CHANNEL_OUT_STEREO,
"Non-stereo channel mask: %d\n", channelMask);
@@ -545,7 +552,7 @@
t->mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
// haptic
t->mHapticPlaybackEnabled = false;
- t->mHapticIntensity = HAPTIC_SCALE_NONE;
+ t->mHapticIntensity = os::HapticScale::NONE;
t->mMixerHapticChannelMask = AUDIO_CHANNEL_NONE;
t->mMixerHapticChannelCount = 0;
t->mAdjustInChannelCount = t->channelCount + t->mHapticChannelCount;
@@ -590,19 +597,12 @@
const std::shared_ptr<Track> &t = getTrack(name);
if (t->mHapticPlaybackEnabled) {
size_t sampleCount = mFrameCount * t->mMixerHapticChannelCount;
- float gamma = t->getHapticScaleGamma();
- float maxAmplitudeRatio = t->getHapticMaxAmplitudeRatio();
uint8_t* buffer = (uint8_t*)pair.first + mFrameCount * audio_bytes_per_frame(
t->mMixerChannelCount, t->mMixerFormat);
switch (t->mMixerFormat) {
// Mixer format should be AUDIO_FORMAT_PCM_FLOAT.
case AUDIO_FORMAT_PCM_FLOAT: {
- float* fout = (float*) buffer;
- for (size_t i = 0; i < sampleCount; i++) {
- float mul = fout[i] >= 0 ? 1.0 : -1.0;
- fout[i] = powf(fabsf(fout[i] / HAPTIC_MAX_AMPLITUDE_FLOAT), gamma)
- * maxAmplitudeRatio * HAPTIC_MAX_AMPLITUDE_FLOAT * mul;
- }
+ os::scaleHapticData((float*) buffer, sampleCount, t->mHapticIntensity);
} break;
default:
LOG_ALWAYS_FATAL("bad mMixerFormat: %#x", t->mMixerFormat);
diff --git a/media/libaudioprocessing/AudioMixerBase.cpp b/media/libaudioprocessing/AudioMixerBase.cpp
index 64f91fe..a54e22f 100644
--- a/media/libaudioprocessing/AudioMixerBase.cpp
+++ b/media/libaudioprocessing/AudioMixerBase.cpp
@@ -1500,7 +1500,7 @@
ALOGVV("track__Resample\n");
mResampler->setSampleRate(sampleRate);
const bool ramp = needsRamp();
- if (MIXTYPE == MIXTYPE_MONOEXPAND || MIXTYPE == MIXTYPE_STEREOEXPAND
+ if (MIXTYPE == MIXTYPE_MONOEXPAND || MIXTYPE == MIXTYPE_STEREOEXPAND // custom volume handling
|| ramp || aux != NULL) {
// if ramp: resample with unity gain to temp buffer and scale/mix in 2nd step.
// if aux != NULL: resample with unity gain to temp buffer then apply send level.
diff --git a/media/libaudioprocessing/AudioMixerOps.h b/media/libaudioprocessing/AudioMixerOps.h
index 2748182..8d374c9 100644
--- a/media/libaudioprocessing/AudioMixerOps.h
+++ b/media/libaudioprocessing/AudioMixerOps.h
@@ -234,16 +234,20 @@
static_assert(NCHAN > 0 && NCHAN <= 8);
static_assert(MIXTYPE == MIXTYPE_MULTI_STEREOVOL
|| MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
- || MIXTYPE == MIXTYPE_STEREOEXPAND);
+ || MIXTYPE == MIXTYPE_STEREOEXPAND
+ || MIXTYPE == MIXTYPE_MONOEXPAND);
auto proc = [](auto& a, const auto& b) {
- if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL) {
+ if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
+ || MIXTYPE == MIXTYPE_STEREOEXPAND
+ || MIXTYPE == MIXTYPE_MONOEXPAND) {
a += b;
} else {
a = b;
}
};
auto inp = [&in]() -> const TI& {
- if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) {
+ if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND
+ || MIXTYPE == MIXTYPE_MONOEXPAND) {
return *in;
} else {
return *in++;
@@ -311,6 +315,8 @@
* TV/TAV: int32_t (U4.28) or int16_t (U4.12) or float
* Input channel count is 1.
* vol: represents volume array.
+ * This uses stereo balanced volume vol[0] and vol[1].
+ * Before R, this was a full volume array but was called only for channels <= 2.
*
* This accumulates into the out pointer.
*
@@ -355,17 +361,13 @@
do {
TA auxaccum = 0;
if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+ static_assert(NCHAN <= 2);
for (int i = 0; i < NCHAN; ++i) {
*out++ += MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
vol[i] += volinc[i];
}
- } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
- for (int i = 0; i < NCHAN; ++i) {
- *out++ += MixMulAux<TO, TI, TV, TA>(*in, vol[i], &auxaccum);
- vol[i] += volinc[i];
- }
- in++;
} else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+ static_assert(NCHAN <= 2);
for (int i = 0; i < NCHAN; ++i) {
*out++ = MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
vol[i] += volinc[i];
@@ -382,11 +384,13 @@
vol[0] += volinc[0];
} else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
|| MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+ || MIXTYPE == MIXTYPE_MONOEXPAND
|| MIXTYPE == MIXTYPE_STEREOEXPAND) {
stereoVolumeHelper<MIXTYPE, NCHAN>(
out, in, vol, [&auxaccum] (auto &a, const auto &b) {
return MixMulAux<TO, TI, TV, TA>(a, b, &auxaccum);
});
+ if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
vol[0] += volinc[0];
vol[1] += volinc[1];
@@ -400,17 +404,13 @@
} else {
do {
if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+ static_assert(NCHAN <= 2);
for (int i = 0; i < NCHAN; ++i) {
*out++ += MixMul<TO, TI, TV>(*in++, vol[i]);
vol[i] += volinc[i];
}
- } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
- for (int i = 0; i < NCHAN; ++i) {
- *out++ += MixMul<TO, TI, TV>(*in, vol[i]);
- vol[i] += volinc[i];
- }
- in++;
} else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+ static_assert(NCHAN <= 2);
for (int i = 0; i < NCHAN; ++i) {
*out++ = MixMul<TO, TI, TV>(*in++, vol[i]);
vol[i] += volinc[i];
@@ -427,10 +427,12 @@
vol[0] += volinc[0];
} else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
|| MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+ || MIXTYPE == MIXTYPE_MONOEXPAND
|| MIXTYPE == MIXTYPE_STEREOEXPAND) {
stereoVolumeHelper<MIXTYPE, NCHAN>(out, in, vol, [] (auto &a, const auto &b) {
return MixMul<TO, TI, TV>(a, b);
});
+ if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
vol[0] += volinc[0];
vol[1] += volinc[1];
@@ -453,15 +455,12 @@
do {
TA auxaccum = 0;
if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+ static_assert(NCHAN <= 2);
for (int i = 0; i < NCHAN; ++i) {
*out++ += MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
}
- } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
- for (int i = 0; i < NCHAN; ++i) {
- *out++ += MixMulAux<TO, TI, TV, TA>(*in, vol[i], &auxaccum);
- }
- in++;
} else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+ static_assert(NCHAN <= 2);
for (int i = 0; i < NCHAN; ++i) {
*out++ = MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
}
@@ -475,11 +474,13 @@
}
} else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
|| MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+ || MIXTYPE == MIXTYPE_MONOEXPAND
|| MIXTYPE == MIXTYPE_STEREOEXPAND) {
stereoVolumeHelper<MIXTYPE, NCHAN>(
out, in, vol, [&auxaccum] (auto &a, const auto &b) {
return MixMulAux<TO, TI, TV, TA>(a, b, &auxaccum);
});
+ if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
} else /* constexpr */ {
static_assert(dependent_false<MIXTYPE>, "invalid mixtype");
@@ -489,16 +490,14 @@
} while (--frameCount);
} else {
do {
+ // ALOGD("Mixtype:%d NCHAN:%d", MIXTYPE, NCHAN);
if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+ static_assert(NCHAN <= 2);
for (int i = 0; i < NCHAN; ++i) {
*out++ += MixMul<TO, TI, TV>(*in++, vol[i]);
}
- } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
- for (int i = 0; i < NCHAN; ++i) {
- *out++ += MixMul<TO, TI, TV>(*in, vol[i]);
- }
- in++;
} else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+ static_assert(NCHAN <= 2);
for (int i = 0; i < NCHAN; ++i) {
*out++ = MixMul<TO, TI, TV>(*in++, vol[i]);
}
@@ -512,10 +511,12 @@
}
} else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
|| MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+ || MIXTYPE == MIXTYPE_MONOEXPAND
|| MIXTYPE == MIXTYPE_STEREOEXPAND) {
stereoVolumeHelper<MIXTYPE, NCHAN>(out, in, vol, [] (auto &a, const auto &b) {
return MixMul<TO, TI, TV>(a, b);
});
+ if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
} else /* constexpr */ {
static_assert(dependent_false<MIXTYPE>, "invalid mixtype");
diff --git a/media/libaudioprocessing/AudioResamplerDyn.cpp b/media/libaudioprocessing/AudioResamplerDyn.cpp
index ec56b00..1aacfd1 100644
--- a/media/libaudioprocessing/AudioResamplerDyn.cpp
+++ b/media/libaudioprocessing/AudioResamplerDyn.cpp
@@ -25,7 +25,6 @@
#include <cutils/compiler.h>
#include <cutils/properties.h>
-#include <utils/Debug.h>
#include <utils/Log.h>
#include <audio_utils/primitives.h>
@@ -636,7 +635,7 @@
const uint32_t phaseWrapLimit = c.mL << c.mShift;
size_t inFrameCount = (phaseIncrement * (uint64_t)outFrameCount + phaseFraction)
/ phaseWrapLimit;
- // sanity check that inFrameCount is in signed 32 bit integer range.
+ // validate that inFrameCount is in signed 32 bit integer range.
ALOG_ASSERT(0 <= inFrameCount && inFrameCount < (1U << 31));
//ALOGV("inFrameCount:%d outFrameCount:%d"
@@ -646,7 +645,7 @@
// NOTE: be very careful when modifying the code here. register
// pressure is very high and a small change might cause the compiler
// to generate far less efficient code.
- // Always sanity check the result with objdump or test-resample.
+ // Always validate the result with objdump or test-resample.
// the following logic is a bit convoluted to keep the main processing loop
// as tight as possible with register allocation.
diff --git a/media/libaudioprocessing/AudioResamplerFirProcess.h b/media/libaudioprocessing/AudioResamplerFirProcess.h
index 9b70a1c..1fcffcc 100644
--- a/media/libaudioprocessing/AudioResamplerFirProcess.h
+++ b/media/libaudioprocessing/AudioResamplerFirProcess.h
@@ -381,7 +381,7 @@
// NOTE: be very careful when modifying the code here. register
// pressure is very high and a small change might cause the compiler
// to generate far less efficient code.
- // Always sanity check the result with objdump or test-resample.
+ // Always validate the result with objdump or test-resample.
if (LOCKED) {
// locked polyphase (no interpolation)
diff --git a/media/libaudioprocessing/AudioResamplerSinc.cpp b/media/libaudioprocessing/AudioResamplerSinc.cpp
index 5a03a0d..f2c386d 100644
--- a/media/libaudioprocessing/AudioResamplerSinc.cpp
+++ b/media/libaudioprocessing/AudioResamplerSinc.cpp
@@ -404,7 +404,7 @@
// NOTE: be very careful when modifying the code here. register
// pressure is very high and a small change might cause the compiler
// to generate far less efficient code.
- // Always sanity check the result with objdump or test-resample.
+ // Always validate the result with objdump or test-resample.
// compute the index of the coefficient on the positive side and
// negative side
diff --git a/media/libaudioprocessing/include/media/AudioMixer.h b/media/libaudioprocessing/include/media/AudioMixer.h
index 3f7cd48..70eafe3 100644
--- a/media/libaudioprocessing/include/media/AudioMixer.h
+++ b/media/libaudioprocessing/include/media/AudioMixer.h
@@ -22,10 +22,10 @@
#include <stdint.h>
#include <sys/types.h>
-#include <android/os/IExternalVibratorService.h>
#include <media/AudioMixerBase.h>
#include <media/BufferProviders.h>
#include <utils/threads.h>
+#include <vibrator/ExternalVibrationUtils.h>
// FIXME This is actually unity gain, which might not be max in future, expressed in U.12
#define MAX_GAIN_INT AudioMixerBase::UNITY_GAIN_INT
@@ -55,32 +55,6 @@
// parameter 'value' is a pointer to the new playback rate.
};
- typedef enum { // Haptic intensity, should keep consistent with VibratorService
- HAPTIC_SCALE_MUTE = os::IExternalVibratorService::SCALE_MUTE,
- HAPTIC_SCALE_VERY_LOW = os::IExternalVibratorService::SCALE_VERY_LOW,
- HAPTIC_SCALE_LOW = os::IExternalVibratorService::SCALE_LOW,
- HAPTIC_SCALE_NONE = os::IExternalVibratorService::SCALE_NONE,
- HAPTIC_SCALE_HIGH = os::IExternalVibratorService::SCALE_HIGH,
- HAPTIC_SCALE_VERY_HIGH = os::IExternalVibratorService::SCALE_VERY_HIGH,
- } haptic_intensity_t;
- static constexpr float HAPTIC_SCALE_VERY_LOW_RATIO = 2.0f / 3.0f;
- static constexpr float HAPTIC_SCALE_LOW_RATIO = 3.0f / 4.0f;
- static const constexpr float HAPTIC_MAX_AMPLITUDE_FLOAT = 1.0f;
-
- static inline bool isValidHapticIntensity(haptic_intensity_t hapticIntensity) {
- switch (hapticIntensity) {
- case HAPTIC_SCALE_MUTE:
- case HAPTIC_SCALE_VERY_LOW:
- case HAPTIC_SCALE_LOW:
- case HAPTIC_SCALE_NONE:
- case HAPTIC_SCALE_HIGH:
- case HAPTIC_SCALE_VERY_HIGH:
- return true;
- default:
- return false;
- }
- }
-
AudioMixer(size_t frameCount, uint32_t sampleRate)
: AudioMixerBase(frameCount, sampleRate) {
pthread_once(&sOnceControl, &sInitRoutine);
@@ -170,7 +144,7 @@
// Haptic
bool mHapticPlaybackEnabled;
- haptic_intensity_t mHapticIntensity;
+ os::HapticScale mHapticIntensity;
audio_channel_mask_t mHapticChannelMask;
uint32_t mHapticChannelCount;
audio_channel_mask_t mMixerHapticChannelMask;
@@ -180,38 +154,6 @@
uint32_t mAdjustNonDestructiveInChannelCount;
uint32_t mAdjustNonDestructiveOutChannelCount;
bool mKeepContractedChannels;
-
- float getHapticScaleGamma() const {
- // Need to keep consistent with the value in VibratorService.
- switch (mHapticIntensity) {
- case HAPTIC_SCALE_VERY_LOW:
- return 2.0f;
- case HAPTIC_SCALE_LOW:
- return 1.5f;
- case HAPTIC_SCALE_HIGH:
- return 0.5f;
- case HAPTIC_SCALE_VERY_HIGH:
- return 0.25f;
- default:
- return 1.0f;
- }
- }
-
- float getHapticMaxAmplitudeRatio() const {
- // Need to keep consistent with the value in VibratorService.
- switch (mHapticIntensity) {
- case HAPTIC_SCALE_VERY_LOW:
- return HAPTIC_SCALE_VERY_LOW_RATIO;
- case HAPTIC_SCALE_LOW:
- return HAPTIC_SCALE_LOW_RATIO;
- case HAPTIC_SCALE_NONE:
- case HAPTIC_SCALE_HIGH:
- case HAPTIC_SCALE_VERY_HIGH:
- return 1.0f;
- default:
- return 0.0f;
- }
- }
};
inline std::shared_ptr<Track> getTrack(int name) {
diff --git a/media/libaudioprocessing/include/media/AudioResamplerPublic.h b/media/libaudioprocessing/include/media/AudioResamplerPublic.h
index 1b39067..200a4c8 100644
--- a/media/libaudioprocessing/include/media/AudioResamplerPublic.h
+++ b/media/libaudioprocessing/include/media/AudioResamplerPublic.h
@@ -59,7 +59,7 @@
static inline bool isAudioPlaybackRateValid(const AudioPlaybackRate &playbackRate) {
if (playbackRate.mFallbackMode == AUDIO_TIMESTRETCH_FALLBACK_FAIL &&
- (playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_SPEECH ||
+ (playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_VOICE ||
playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_DEFAULT)) {
//test sonic specific constraints
return playbackRate.mSpeed >= TIMESTRETCH_SONIC_SPEED_MIN &&
diff --git a/media/libaudioprocessing/tests/fuzzer/Android.bp b/media/libaudioprocessing/tests/fuzzer/Android.bp
index 1df47b7..2a0dec4 100644
--- a/media/libaudioprocessing/tests/fuzzer/Android.bp
+++ b/media/libaudioprocessing/tests/fuzzer/Android.bp
@@ -8,3 +8,14 @@
"libsndfile",
],
}
+
+cc_fuzz {
+ name: "libaudioprocessing_record_buffer_converter_fuzzer",
+ srcs: [
+ "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
+ ],
+ defaults: ["libaudioprocessing_test_defaults"],
+ static_libs: [
+ "libsndfile",
+ ],
+}
diff --git a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_fuzz_utils.h b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_fuzz_utils.h
new file mode 100644
index 0000000..5165925
--- /dev/null
+++ b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_fuzz_utils.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_LIBAUDIOPROCESSING_FUZZ_UTILS_H
+#define ANDROID_LIBAUDIOPROCESSING_FUZZ_UTILS_H
+
+#include <media/AudioBufferProvider.h>
+#include <system/audio.h>
+
+namespace android {
+
+class Provider : public AudioBufferProvider {
+ const void* mAddr; // base address
+ const size_t mNumFrames; // total frames
+ const size_t mFrameSize; // size of each frame in bytes
+ size_t mNextFrame; // index of next frame to provide
+ size_t mUnrel; // number of frames not yet released
+ public:
+ Provider(const void* addr, size_t frames, size_t frameSize)
+ : mAddr(addr),
+ mNumFrames(frames),
+ mFrameSize(frameSize),
+ mNextFrame(0),
+ mUnrel(0) {}
+ status_t getNextBuffer(Buffer* buffer) override {
+ if (buffer->frameCount > mNumFrames - mNextFrame) {
+ buffer->frameCount = mNumFrames - mNextFrame;
+ }
+ mUnrel = buffer->frameCount;
+ if (buffer->frameCount > 0) {
+ buffer->raw = (char*)mAddr + mFrameSize * mNextFrame;
+ return NO_ERROR;
+ } else {
+ buffer->raw = nullptr;
+ return NOT_ENOUGH_DATA;
+ }
+ }
+ void releaseBuffer(Buffer* buffer) override {
+ if (buffer->frameCount > mUnrel) {
+ mNextFrame += mUnrel;
+ mUnrel = 0;
+ } else {
+ mNextFrame += buffer->frameCount;
+ mUnrel -= buffer->frameCount;
+ }
+ buffer->frameCount = 0;
+ buffer->raw = nullptr;
+ }
+ void reset() { mNextFrame = 0; }
+};
+
+} // namespace android
+
+#endif // ANDROID_LIBAUDIOPROCESSING_FUZZ_UTILS_H
diff --git a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_record_buffer_converter_fuzzer.cpp b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_record_buffer_converter_fuzzer.cpp
new file mode 100644
index 0000000..017598c
--- /dev/null
+++ b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_record_buffer_converter_fuzzer.cpp
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "libaudioprocessing_fuzz_utils.h"
+#include "fuzzer/FuzzedDataProvider.h"
+#include <media/AudioResampler.h>
+#include <media/RecordBufferConverter.h>
+#include <stddef.h>
+#include <stdint.h>
+
+using namespace android;
+
+constexpr int MAX_FRAMES = 1024;
+
+#define AUDIO_FORMAT_PCM_MAIN 0
+
+// Copied and simplified from audio-hal-enums.h?l=571
+constexpr uint32_t FUZZ_AUDIO_FORMATS[] = {
+ AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_16_BIT,
+ AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_8_BIT,
+ AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_32_BIT,
+ AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_8_24_BIT,
+ AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_FLOAT,
+ AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_24_BIT_PACKED,
+ 0x01000000u,
+ 0x02000000u,
+ 0x03000000u,
+ 0x04000000u,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_MAIN,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_LC,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_SSR,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_LTP,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_HE_V1,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_SCALABLE,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_ERLC,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_LD,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_HE_V2,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_ELD,
+ AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_XHE,
+ 0x05000000u,
+ 0x06000000u,
+ 0x07000000u,
+ 0x08000000u,
+ 0x09000000u,
+ 0x0A000000u,
+ AUDIO_FORMAT_E_AC3 | AUDIO_FORMAT_E_AC3_SUB_JOC,
+ 0x0B000000u,
+ 0x0C000000u,
+ 0x0D000000u,
+ 0x0E000000u,
+ 0x10000000u,
+ 0x11000000u,
+ 0x12000000u,
+ 0x13000000u,
+ 0x14000000u,
+ 0x15000000u,
+ 0x16000000u,
+ 0x17000000u,
+ 0x18000000u,
+ 0x19000000u,
+ 0x1A000000u,
+ 0x1B000000u,
+ 0x1C000000u,
+ 0x1D000000u,
+ 0x1E000000u,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_MAIN,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_LC,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_SSR,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_LTP,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_HE_V1,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_SCALABLE,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_ERLC,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_LD,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_HE_V2,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_ELD,
+ AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_XHE,
+ 0x1F000000u,
+ 0x20000000u,
+ 0x21000000u,
+ 0x22000000u,
+ 0x23000000u,
+ 0x24000000u,
+ AUDIO_FORMAT_MAT | AUDIO_FORMAT_MAT_SUB_1_0,
+ AUDIO_FORMAT_MAT | AUDIO_FORMAT_MAT_SUB_2_0,
+ AUDIO_FORMAT_MAT | AUDIO_FORMAT_MAT_SUB_2_1,
+ 0x25000000u,
+ AUDIO_FORMAT_AAC_LATM | AUDIO_FORMAT_AAC_SUB_LC,
+ AUDIO_FORMAT_AAC_LATM | AUDIO_FORMAT_AAC_SUB_HE_V1,
+ AUDIO_FORMAT_AAC_LATM | AUDIO_FORMAT_AAC_SUB_HE_V2,
+ 0x26000000u,
+ 0x27000000u,
+ 0x28000000u,
+ 0x29000000u,
+ 0x2A000000u,
+ 0x2B000000u,
+ 0xFFFFFFFFu,
+ AUDIO_FORMAT_PCM_MAIN,
+ AUDIO_FORMAT_PCM,
+};
+constexpr size_t NUM_AUDIO_FORMATS = std::size(FUZZ_AUDIO_FORMATS);
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp(data, size);
+ fdp.ConsumeIntegral<int>();
+
+ const audio_channel_mask_t srcChannelMask = (audio_channel_mask_t)fdp.ConsumeIntegral<int>();
+ const audio_format_t srcFormat =
+ (audio_format_t)FUZZ_AUDIO_FORMATS[fdp.ConsumeIntegralInRange<int>(0, NUM_AUDIO_FORMATS - 1)];
+ const uint32_t srcSampleRate = fdp.ConsumeIntegralInRange<int>(1, 0x7fffffff);
+ const audio_channel_mask_t dstChannelMask = (audio_channel_mask_t)fdp.ConsumeIntegral<int>();
+ const audio_format_t dstFormat =
+ (audio_format_t)FUZZ_AUDIO_FORMATS[fdp.ConsumeIntegralInRange<int>(0, NUM_AUDIO_FORMATS - 1)];
+ const uint32_t dstSampleRate = fdp.ConsumeIntegralInRange<int>(1, 0x7fffffff);
+
+ // Certain formats will result in LOG_ALWAYS_FATAL errors that aren't interesting crashes
+ // for fuzzing. Don't use those ones.
+ const uint32_t dstChannelCount = audio_channel_count_from_in_mask(dstChannelMask);
+ constexpr android::AudioResampler::src_quality quality =
+ android::AudioResampler::DEFAULT_QUALITY;
+ const int maxChannels =
+ quality < android::AudioResampler::DYN_LOW_QUALITY ? 2 : 8;
+ if (dstChannelCount < 1 || dstChannelCount > maxChannels) {
+ return 0;
+ }
+
+ const uint32_t srcChannelCount = audio_channel_count_from_in_mask(srcChannelMask);
+ if (srcChannelCount < 1 || srcChannelCount > maxChannels) {
+ return 0;
+ }
+
+ RecordBufferConverter converter(srcChannelMask, srcFormat, srcSampleRate,
+ dstChannelMask, dstFormat, dstSampleRate);
+ if (converter.initCheck() != NO_ERROR) {
+ return 0;
+ }
+
+ const uint32_t srcFrameSize = srcChannelCount * audio_bytes_per_sample(srcFormat);
+ const int srcNumFrames = fdp.ConsumeIntegralInRange<int>(0, MAX_FRAMES);
+ constexpr size_t metadataSize = 2 + 3 * sizeof(int) + 2 * sizeof(float);
+ std::vector<uint8_t> inputData = fdp.ConsumeBytes<uint8_t>(
+ metadataSize + (srcFrameSize * srcNumFrames));
+ Provider provider(inputData.data(), srcNumFrames, srcFrameSize);
+
+ const uint32_t dstFrameSize = dstChannelCount * audio_bytes_per_sample(dstFormat);
+ const size_t frames = fdp.ConsumeIntegralInRange<size_t>(0, MAX_FRAMES + 1);
+ int8_t dst[dstFrameSize * frames];
+ memset(dst, 0, sizeof(int8_t) * dstFrameSize * frames);
+
+ // Add a small number of loops to see if repeated calls to convert cause
+ // any change in behavior.
+ const int numLoops = fdp.ConsumeIntegralInRange<int>(1, 3);
+ for (int loop = 0; loop < numLoops; ++loop) {
+ switch (fdp.ConsumeIntegralInRange<int>(0, 1)) {
+ case 0:
+ converter.reset();
+ FALLTHROUGH_INTENDED;
+ case 1:
+ converter.convert(dst, &provider, frames);
+ break;
+ }
+ }
+
+ return 0;
+}
diff --git a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp
index 938c610..65c9a3c 100644
--- a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp
+++ b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp
@@ -34,6 +34,8 @@
#include <unistd.h>
#include <utils/Vector.h>
+#include "libaudioprocessing_fuzz_utils.h"
+
#include <memory>
using namespace android;
@@ -53,46 +55,6 @@
AudioResampler::DYN_HIGH_QUALITY,
};
-class Provider : public AudioBufferProvider {
- const void* mAddr; // base address
- const size_t mNumFrames; // total frames
- const size_t mFrameSize; // size of each frame in bytes
- size_t mNextFrame; // index of next frame to provide
- size_t mUnrel; // number of frames not yet released
- public:
- Provider(const void* addr, size_t frames, size_t frameSize)
- : mAddr(addr),
- mNumFrames(frames),
- mFrameSize(frameSize),
- mNextFrame(0),
- mUnrel(0) {}
- status_t getNextBuffer(Buffer* buffer) override {
- if (buffer->frameCount > mNumFrames - mNextFrame) {
- buffer->frameCount = mNumFrames - mNextFrame;
- }
- mUnrel = buffer->frameCount;
- if (buffer->frameCount > 0) {
- buffer->raw = (char*)mAddr + mFrameSize * mNextFrame;
- return NO_ERROR;
- } else {
- buffer->raw = nullptr;
- return NOT_ENOUGH_DATA;
- }
- }
- virtual void releaseBuffer(Buffer* buffer) {
- if (buffer->frameCount > mUnrel) {
- mNextFrame += mUnrel;
- mUnrel = 0;
- } else {
- mNextFrame += buffer->frameCount;
- mUnrel -= buffer->frameCount;
- }
- buffer->frameCount = 0;
- buffer->raw = nullptr;
- }
- void reset() { mNextFrame = 0; }
-};
-
audio_format_t chooseFormat(AudioResampler::src_quality quality,
uint8_t input_byte) {
switch (quality) {
diff --git a/media/libaudioprocessing/tests/mixerops_benchmark.cpp b/media/libaudioprocessing/tests/mixerops_benchmark.cpp
index 86f5429..7a4c5c7 100644
--- a/media/libaudioprocessing/tests/mixerops_benchmark.cpp
+++ b/media/libaudioprocessing/tests/mixerops_benchmark.cpp
@@ -74,28 +74,32 @@
}
}
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 2);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 2);
+// MULTI mode and MULTI_SAVEONLY mode are not used by AudioMixer for channels > 2,
+// which is ensured by a static_assert (won't compile for those configurations).
+// So we benchmark MIXTYPE_MULTI_MONOVOL and MIXTYPE_MULTI_SAVEONLY_MONOVOL compared
+// with MIXTYPE_MULTI_STEREOVOL and MIXTYPE_MULTI_SAVEONLY_STEREOVOL.
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 2);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 2);
BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 2);
BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 2);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 4);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 4);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 4);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 4);
BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 4);
BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 4);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 5);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 5);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 5);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 5);
BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 5);
BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 5);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 8);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 8);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 8);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 8);
BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 8);
BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 8);
-BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI, 8);
-BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY, 8);
+BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_MONOVOL, 8);
+BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 8);
BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_STEREOVOL, 8);
BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 8);
diff --git a/media/libaudioprocessing/tests/test-mixer.cpp b/media/libaudioprocessing/tests/test-mixer.cpp
index bc9d2a6..1bbb863 100644
--- a/media/libaudioprocessing/tests/test-mixer.cpp
+++ b/media/libaudioprocessing/tests/test-mixer.cpp
@@ -241,7 +241,8 @@
// set up the tracks.
for (size_t i = 0; i < providers.size(); ++i) {
//printf("track %d out of %d\n", i, providers.size());
- uint32_t channelMask = audio_channel_out_mask_from_count(providers[i].getNumChannels());
+ audio_channel_mask_t channelMask =
+ audio_channel_out_mask_from_count(providers[i].getNumChannels());
const int name = i;
const status_t status = mixer->create(
name, channelMask, formats[i], AUDIO_SESSION_OUTPUT_MIX);
diff --git a/media/libeffects/OWNERS b/media/libeffects/OWNERS
index 7f9ae81..b7832ea 100644
--- a/media/libeffects/OWNERS
+++ b/media/libeffects/OWNERS
@@ -1,4 +1,3 @@
hunga@google.com
-krocard@google.com
mnaganov@google.com
rago@google.com
diff --git a/media/libeffects/config/src/EffectsConfig.cpp b/media/libeffects/config/src/EffectsConfig.cpp
index 26eaaf8..1696233 100644
--- a/media/libeffects/config/src/EffectsConfig.cpp
+++ b/media/libeffects/config/src/EffectsConfig.cpp
@@ -138,7 +138,7 @@
template <>
bool stringToStreamType(const char *streamName, audio_devices_t* type) {
- return deviceFromString(streamName, *type);
+ return DeviceConverter::fromString(streamName, *type);
}
/** Parse a library xml note and push the result in libraries or return false on failure. */
diff --git a/media/libeffects/data/audio_effects.xml b/media/libeffects/data/audio_effects.xml
index 2e5f529..93a2181 100644
--- a/media/libeffects/data/audio_effects.xml
+++ b/media/libeffects/data/audio_effects.xml
@@ -21,6 +21,7 @@
<library name="downmix" path="libdownmix.so"/>
<library name="loudness_enhancer" path="libldnhncr.so"/>
<library name="dynamics_processing" path="libdynproc.so"/>
+ <library name="haptic_generator" path="libhapticgenerator.so"/>
</libraries>
<!-- list of effects to load.
@@ -58,6 +59,7 @@
<effect name="downmix" library="downmix" uuid="93f04452-e4fe-41cc-91f9-e475b6d1d69f"/>
<effect name="loudness_enhancer" library="loudness_enhancer" uuid="fa415329-2034-4bea-b5dc-5b381c8d1e2c"/>
<effect name="dynamics_processing" library="dynamics_processing" uuid="e0e6539b-1781-7261-676f-6d7573696340"/>
+ <effect name="haptic_generator" library="haptic_generator" uuid="97c4acd1-8b82-4f2f-832e-c2fe5d7a9931"/>
</effects>
<!-- Audio pre processor configurations.
diff --git a/media/libeffects/downmix/tests/build_and_run_all_unit_tests.sh b/media/libeffects/downmix/tests/build_and_run_all_unit_tests.sh
index d0faebe..8aadfbf 100755
--- a/media/libeffects/downmix/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/downmix/tests/build_and_run_all_unit_tests.sh
@@ -39,8 +39,7 @@
echo "testing Downmix"
adb shell mkdir $testdir
-adb push $ANDROID_BUILD_TOP/cts/tests/tests/media/res/raw/sinesweepraw.raw \
-$testdir
+adb push $ANDROID_BUILD_TOP/frameworks/av/media/libeffects/res/raw/sinesweepraw.raw $testdir
adb push $OUT/testcases/downmixtest/arm64/downmixtest $testdir
#run the downmix test application for test.
diff --git a/media/libeffects/factory/EffectsConfigLoader.c b/media/libeffects/factory/EffectsConfigLoader.c
index fcef36f..e23530e 100644
--- a/media/libeffects/factory/EffectsConfigLoader.c
+++ b/media/libeffects/factory/EffectsConfigLoader.c
@@ -394,7 +394,7 @@
}
sub_effect_entry_t *subEntry = (sub_effect_entry_t*)gSubEffectList->sub_elem->object;
effect_descriptor_t *subEffectDesc = (effect_descriptor_t*)(subEntry->object);
- // Since we return a dummy descriptor for the proxy during
+ // Since we return a stub descriptor for the proxy during
// get_descriptor call,we replace it with the correspoding
// sw effect descriptor, but with Proxy UUID
// check for Sw desc
diff --git a/media/libeffects/factory/EffectsXmlConfigLoader.cpp b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
index 505be7c..30a9007 100644
--- a/media/libeffects/factory/EffectsXmlConfigLoader.cpp
+++ b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
@@ -283,7 +283,7 @@
}
listPush(effectLoadResult.effectDesc.get(), subEffectList);
- // Since we return a dummy descriptor for the proxy during
+ // Since we return a stub descriptor for the proxy during
// get_descriptor call, we replace it with the corresponding
// sw effect descriptor, but keep the Proxy UUID
*effectLoadResult.effectDesc = *swEffectLoadResult.effectDesc;
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
new file mode 100644
index 0000000..f947339
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -0,0 +1,51 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// HapticGenerator library
+cc_library_shared {
+ name: "libhapticgenerator",
+
+ vendor: true,
+
+ srcs: [
+ "EffectHapticGenerator.cpp",
+ "Processors.cpp",
+ ],
+
+ cflags: [
+ "-O2", // Turning on the optimization in order to reduce effect processing time.
+ // The latency is around 1/5 less than without the optimization.
+ "-Wall",
+ "-Werror",
+ "-ffast-math", // This is needed for the non-zero coefficients optimization for
+ // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
+ // with/without `-ffast-math` for more context.
+ "-fvisibility=hidden",
+ ],
+
+ shared_libs: [
+ "libaudioutils",
+ "libbinder",
+ "liblog",
+ "libutils",
+ "libvibrator",
+ ],
+
+ relative_install_path: "soundfx",
+
+ header_libs: [
+ "libaudioeffects",
+ ],
+}
+
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
new file mode 100644
index 0000000..9b93659
--- /dev/null
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -0,0 +1,519 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHG"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "EffectHapticGenerator.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+
+#include <errno.h>
+#include <inttypes.h>
+
+#include <audio_effects/effect_hapticgenerator.h>
+#include <audio_utils/format.h>
+#include <system/audio.h>
+
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+ .tag = AUDIO_EFFECT_LIBRARY_TAG,
+ .version = EFFECT_LIBRARY_API_VERSION,
+ .name = "HapticGenerator Library",
+ .implementor = "The Android Open Source Project",
+ .create_effect = android::audio_effect::haptic_generator::HapticGeneratorLib_Create,
+ .release_effect = android::audio_effect::haptic_generator::HapticGeneratorLib_Release,
+ .get_descriptor = android::audio_effect::haptic_generator::HapticGeneratorLib_GetDescriptor,
+};
+
+namespace android::audio_effect::haptic_generator {
+
+// effect_handle_t interface implementation for haptic generator effect
+const struct effect_interface_s gHapticGeneratorInterface = {
+ HapticGenerator_Process,
+ HapticGenerator_Command,
+ HapticGenerator_GetDescriptor,
+ nullptr /* no process_reverse function, no reference stream needed */
+};
+
+//-----------------------------------------------------------------------------
+// Effect Descriptor
+//-----------------------------------------------------------------------------
+
+// UUIDs for effect types have been generated from http://www.itu.int/ITU-T/asn1/uuid.html
+// Haptic Generator
+static const effect_descriptor_t gHgDescriptor = {
+ FX_IID_HAPTICGENERATOR_, // type
+ {0x97c4acd1, 0x8b82, 0x4f2f, 0x832e, {0xc2, 0xfe, 0x5d, 0x7a, 0x99, 0x31}}, // uuid
+ EFFECT_CONTROL_API_VERSION,
+ EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST,
+ 0, // FIXME what value should be reported? // cpu load
+ 0, // FIXME what value should be reported? // memory usage
+ "Haptic Generator",
+ "The Android Open Source Project"
+};
+
+//-----------------------------------------------------------------------------
+// Internal functions
+//-----------------------------------------------------------------------------
+
+namespace {
+
+int HapticGenerator_Init(struct HapticGeneratorContext *context) {
+ context->itfe = &gHapticGeneratorInterface;
+
+ context->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+ context->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ context->config.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+ context->config.inputCfg.samplingRate = 0;
+ context->config.inputCfg.bufferProvider.getBuffer = nullptr;
+ context->config.inputCfg.bufferProvider.releaseBuffer = nullptr;
+ context->config.inputCfg.bufferProvider.cookie = nullptr;
+ context->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+ context->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+ context->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ context->config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+ context->config.outputCfg.samplingRate = 0;
+ context->config.outputCfg.bufferProvider.getBuffer = nullptr;
+ context->config.outputCfg.bufferProvider.releaseBuffer = nullptr;
+ context->config.outputCfg.bufferProvider.cookie = nullptr;
+ context->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+ memset(context->param.hapticChannelSource, 0, sizeof(context->param.hapticChannelSource));
+ context->param.hapticChannelCount = 0;
+ context->param.audioChannelCount = 0;
+ context->param.maxHapticIntensity = os::HapticScale::MUTE;
+
+ context->state = HAPTICGENERATOR_STATE_INITIALIZED;
+ return 0;
+}
+
+void addBiquadFilter(
+ std::vector<std::function<void(float *, const float *, size_t)>> &processingChain,
+ struct HapticGeneratorProcessorsRecord &processorsRecord,
+ std::shared_ptr<HapticBiquadFilter> filter) {
+ // The process chain captures the shared pointer of the filter in lambda.
+ // The process record will keep a shared pointer to the filter so that it is possible to access
+ // the filter outside of the process chain.
+ processorsRecord.filters.push_back(filter);
+ processingChain.push_back([filter](float *out, const float *in, size_t frameCount) {
+ filter->process(out, in, frameCount);
+ });
+}
+
+/**
+ * \brief build haptic generator processing chain.
+ *
+ * \param processingChain
+ * \param processorsRecord a structure to cache all the shared pointers for processors
+ * \param sampleRate the audio sampling rate. Use a float here as it may be used to create filters
+ * \param channelCount haptic channel count
+ */
+void HapticGenerator_buildProcessingChain(
+ std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
+ struct HapticGeneratorProcessorsRecord& processorsRecord,
+ float sampleRate, size_t channelCount) {
+ float highPassCornerFrequency = 100.0f;
+ auto hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, hpf);
+ float lowPassCornerFrequency = 3000.0f;
+ auto lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, lpf);
+
+ auto ramp = std::make_shared<Ramp>(channelCount);
+ // The process chain captures the shared pointer of the ramp in lambda. It will be the only
+ // reference to the ramp.
+ // The process record will keep a weak pointer to the ramp so that it is possible to access
+ // the ramp outside of the process chain.
+ processorsRecord.ramps.push_back(ramp);
+ processingChain.push_back([ramp](float *out, const float *in, size_t frameCount) {
+ ramp->process(out, in, frameCount);
+ });
+
+ highPassCornerFrequency = 60.0f;
+ hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, hpf);
+ lowPassCornerFrequency = 700.0f;
+ lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, lpf);
+
+ lowPassCornerFrequency = 5.0f;
+ float normalizationPower = -0.3f;
+ // The process chain captures the shared pointer of the slow envelope in lambda. It will
+ // be the only reference to the slow envelope.
+ // The process record will keep a weak pointer to the slow envelope so that it is possible
+ // to access the slow envelope outside of the process chain.
+ auto slowEnv = std::make_shared<SlowEnvelope>(
+ lowPassCornerFrequency, sampleRate, normalizationPower, channelCount);
+ processorsRecord.slowEnvs.push_back(slowEnv);
+ processingChain.push_back([slowEnv](float *out, const float *in, size_t frameCount) {
+ slowEnv->process(out, in, frameCount);
+ });
+
+ lowPassCornerFrequency = 400.0f;
+ lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, lpf);
+ lowPassCornerFrequency = 500.0f;
+ lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, lpf);
+
+ auto apf = createAPF2(400.0f, 200.0f, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, apf);
+ apf = createAPF2(100.0f, 50.0f, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, apf);
+ float allPassCornerFrequency = 25.0f;
+ apf = createAPF(allPassCornerFrequency, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, apf);
+
+ float resonantFrequency = 150.0f;
+ float bandpassQ = 1.0f;
+ auto bpf = createBPF(resonantFrequency, bandpassQ, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, bpf);
+
+ float zeroQ = 8.0f;
+ float poleQ = 4.0f;
+ auto bsf = createBSF(resonantFrequency, zeroQ, poleQ, sampleRate, channelCount);
+ addBiquadFilter(processingChain, processorsRecord, bsf);
+}
+
+int HapticGenerator_Configure(struct HapticGeneratorContext *context, effect_config_t *config) {
+ if (config->inputCfg.samplingRate != config->outputCfg.samplingRate ||
+ config->inputCfg.format != config->outputCfg.format ||
+ config->inputCfg.format != AUDIO_FORMAT_PCM_FLOAT ||
+ config->inputCfg.channels != config->outputCfg.channels ||
+ config->inputCfg.buffer.frameCount != config->outputCfg.buffer.frameCount) {
+ return -EINVAL;
+ }
+ if (&context->config != config) {
+ context->processingChain.clear();
+ context->processorsRecord.filters.clear();
+ context->processorsRecord.ramps.clear();
+ context->processorsRecord.slowEnvs.clear();
+ memcpy(&context->config, config, sizeof(effect_config_t));
+ context->param.audioChannelCount = audio_channel_count_from_out_mask(
+ ((audio_channel_mask_t) config->inputCfg.channels) & ~AUDIO_CHANNEL_HAPTIC_ALL);
+ context->param.hapticChannelCount = audio_channel_count_from_out_mask(
+ ((audio_channel_mask_t) config->outputCfg.channels) & AUDIO_CHANNEL_HAPTIC_ALL);
+ ALOG_ASSERT(context->param.hapticChannelCount <= 2,
+ "haptic channel count(%zu) is too large",
+ context->param.hapticChannelCount);
+ context->audioDataBytesPerFrame = audio_bytes_per_frame(
+ context->param.audioChannelCount, (audio_format_t) config->inputCfg.format);
+ for (size_t i = 0; i < context->param.hapticChannelCount; ++i) {
+ // By default, use the first audio channel to generate haptic channels.
+ context->param.hapticChannelSource[i] = 0;
+ }
+
+ HapticGenerator_buildProcessingChain(context->processingChain,
+ context->processorsRecord,
+ config->inputCfg.samplingRate,
+ context->param.hapticChannelCount);
+ }
+ return 0;
+}
+
+int HapticGenerator_Reset(struct HapticGeneratorContext *context) {
+ for (auto& filter : context->processorsRecord.filters) {
+ filter->clear();
+ }
+ for (auto& slowEnv : context->processorsRecord.slowEnvs) {
+ slowEnv->clear();
+ }
+ return 0;
+}
+
+int HapticGenerator_SetParameter(struct HapticGeneratorContext *context,
+ int32_t param,
+ uint32_t size,
+ void *value) {
+ switch (param) {
+ case HG_PARAM_HAPTIC_INTENSITY: {
+ if (value == nullptr || size != (uint32_t) (2 * sizeof(int))) {
+ return -EINVAL;
+ }
+ int id = *(int *) value;
+ os::HapticScale hapticIntensity = static_cast<os::HapticScale>(*((int *) value + 1));
+ if (hapticIntensity == os::HapticScale::MUTE) {
+ context->param.id2Intensity.erase(id);
+ } else {
+ context->param.id2Intensity.emplace(id, hapticIntensity);
+ }
+ context->param.maxHapticIntensity = hapticIntensity;
+ for (const auto&[id, intensity] : context->param.id2Intensity) {
+ context->param.maxHapticIntensity = std::max(
+ context->param.maxHapticIntensity, intensity);
+ }
+ break;
+ }
+
+ default:
+ ALOGW("Unknown param: %d", param);
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+/**
+ * \brief run the processing chain to generate haptic data from audio data
+ *
+ * \param processingChain the processing chain for generating haptic data
+ * \param buf1 a buffer contains raw audio data
+ * \param buf2 a buffer that is large enough to keep all the data
+ * \param frameCount frame count of the data
+ * \return a pointer to the output buffer
+ */
+float* HapticGenerator_runProcessingChain(
+ const std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
+ float* buf1, float* buf2, size_t frameCount) {
+ float *in = buf1;
+ float *out = buf2;
+ for (const auto processingFunc : processingChain) {
+ processingFunc(out, in, frameCount);
+ std::swap(in, out);
+ }
+ return in;
+}
+
+} // namespace (anonymous)
+
+//-----------------------------------------------------------------------------
+// Effect API Implementation
+//-----------------------------------------------------------------------------
+
+/*--- Effect Library Interface Implementation ---*/
+
+int32_t HapticGeneratorLib_Create(const effect_uuid_t *uuid,
+ int32_t sessionId __unused,
+ int32_t ioId __unused,
+ effect_handle_t *handle) {
+ if (handle == nullptr || uuid == nullptr) {
+ return -EINVAL;
+ }
+
+ if (memcmp(uuid, &gHgDescriptor.uuid, sizeof(*uuid)) != 0) {
+ return -EINVAL;
+ }
+
+ HapticGeneratorContext *context = new HapticGeneratorContext;
+ HapticGenerator_Init(context);
+
+ *handle = (effect_handle_t) context;
+ ALOGV("%s context is %p", __func__, context);
+ return 0;
+}
+
+int32_t HapticGeneratorLib_Release(effect_handle_t handle) {
+ HapticGeneratorContext *context = (HapticGeneratorContext *) handle;
+ delete context;
+ return 0;
+}
+
+int32_t HapticGeneratorLib_GetDescriptor(const effect_uuid_t *uuid,
+ effect_descriptor_t *descriptor) {
+
+ if (descriptor == nullptr || uuid == nullptr) {
+ ALOGE("%s() called with NULL pointer", __func__);
+ return -EINVAL;
+ }
+
+ if (memcmp(uuid, &gHgDescriptor.uuid, sizeof(*uuid)) == 0) {
+ *descriptor = gHgDescriptor;
+ return 0;
+ }
+
+ return -EINVAL;
+}
+
+/*--- Effect Control Interface Implementation ---*/
+
+int32_t HapticGenerator_Process(effect_handle_t self,
+ audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
+ HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+ if (inBuffer == nullptr || inBuffer->raw == nullptr
+ || outBuffer == nullptr || outBuffer->raw == nullptr) {
+ return 0;
+ }
+
+ // The audio data must not be modified but just written to
+ // output buffer according the access mode.
+ size_t audioBytes = context->audioDataBytesPerFrame * inBuffer->frameCount;
+ size_t audioSampleCount = inBuffer->frameCount * context->param.audioChannelCount;
+ if (inBuffer->raw != outBuffer->raw) {
+ if (context->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+ for (size_t i = 0; i < audioSampleCount; ++i) {
+ outBuffer->f32[i] += inBuffer->f32[i];
+ }
+ } else {
+ memcpy(outBuffer->raw, inBuffer->raw, audioBytes);
+ }
+ }
+
+ if (context->state != HAPTICGENERATOR_STATE_ACTIVE) {
+ ALOGE("State(%d) is not HAPTICGENERATOR_STATE_ACTIVE when calling %s",
+ context->state, __func__);
+ return -ENODATA;
+ }
+
+ if (context->param.maxHapticIntensity == os::HapticScale::MUTE) {
+ // Haptic channels are muted, not need to generate haptic data.
+ return 0;
+ }
+
+ // Resize buffer if the haptic sample count is greater than buffer size.
+ size_t hapticSampleCount = inBuffer->frameCount * context->param.hapticChannelCount;
+ if (hapticSampleCount > context->inputBuffer.size()) {
+ // The context->inputBuffer and context->outputBuffer must have the same size,
+ // which must be at least the haptic sample count.
+ context->inputBuffer.resize(hapticSampleCount);
+ context->outputBuffer.resize(hapticSampleCount);
+ }
+
+ // Construct input buffer according to haptic channel source
+ for (size_t i = 0; i < inBuffer->frameCount; ++i) {
+ for (size_t j = 0; j < context->param.hapticChannelCount; ++j) {
+ context->inputBuffer[i * context->param.hapticChannelCount + j] =
+ inBuffer->f32[i * context->param.audioChannelCount
+ + context->param.hapticChannelSource[j]];
+ }
+ }
+
+ float* hapticOutBuffer = HapticGenerator_runProcessingChain(
+ context->processingChain, context->inputBuffer.data(),
+ context->outputBuffer.data(), inBuffer->frameCount);
+ os::scaleHapticData(hapticOutBuffer, hapticSampleCount, context->param.maxHapticIntensity);
+
+ // For haptic data, the haptic playback thread will copy the data from effect input buffer,
+ // which contains haptic data at the end of the buffer, directly to sink buffer.
+ // In that case, copy haptic data to input buffer instead of output buffer.
+ // Note: this may not work with rpc/binder calls
+ memcpy_by_audio_format(static_cast<char*>(inBuffer->raw) + audioBytes,
+ static_cast<audio_format_t>(context->config.outputCfg.format),
+ hapticOutBuffer,
+ AUDIO_FORMAT_PCM_FLOAT,
+ hapticSampleCount);
+
+ return 0;
+}
+
+int32_t HapticGenerator_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+ void *cmdData, uint32_t *replySize, void *replyData) {
+ HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+ if (context == nullptr || context->state == HAPTICGENERATOR_STATE_UNINITIALIZED) {
+ return -EINVAL;
+ }
+
+ ALOGV("HapticGenerator_Command command %u cmdSize %u", cmdCode, cmdSize);
+
+ switch (cmdCode) {
+ case EFFECT_CMD_INIT:
+ if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+ return -EINVAL;
+ }
+ *(int *) replyData = HapticGenerator_Init(context);
+ break;
+
+ case EFFECT_CMD_SET_CONFIG:
+ if (cmdData == nullptr || cmdSize != sizeof(effect_config_t)
+ || replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+ return -EINVAL;
+ }
+ *(int *) replyData = HapticGenerator_Configure(
+ context, (effect_config_t *) cmdData);
+ break;
+
+ case EFFECT_CMD_RESET:
+ HapticGenerator_Reset(context);
+ break;
+
+ case EFFECT_CMD_GET_PARAM:
+ ALOGV("HapticGenerator_Command EFFECT_CMD_GET_PARAM cmdData %p,"
+ "*replySize %u, replyData: %p",
+ cmdData, *replySize, replyData);
+ break;
+
+ case EFFECT_CMD_SET_PARAM: {
+ ALOGV("HapticGenerator_Command EFFECT_CMD_SET_PARAM cmdSize %d cmdData %p, "
+ "*replySize %u, replyData %p", cmdSize, cmdData,
+ replySize ? *replySize : 0, replyData);
+ if (cmdData == nullptr || (cmdSize < (int) (sizeof(effect_param_t) + sizeof(int32_t)))
+ || replyData == nullptr || replySize == nullptr ||
+ *replySize != (int) sizeof(int32_t)) {
+ return -EINVAL;
+ }
+ effect_param_t *cmd = (effect_param_t *) cmdData;
+ *(int *) replyData = HapticGenerator_SetParameter(
+ context, *(int32_t *) cmd->data, cmd->vsize, cmd->data + sizeof(int32_t));
+ }
+ break;
+
+ case EFFECT_CMD_ENABLE:
+ if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+ return -EINVAL;
+ }
+ if (context->state != HAPTICGENERATOR_STATE_INITIALIZED) {
+ return -ENOSYS;
+ }
+ context->state = HAPTICGENERATOR_STATE_ACTIVE;
+ ALOGV("EFFECT_CMD_ENABLE() OK");
+ *(int *) replyData = 0;
+ break;
+
+ case EFFECT_CMD_DISABLE:
+ if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+ return -EINVAL;
+ }
+ if (context->state != HAPTICGENERATOR_STATE_ACTIVE) {
+ return -ENOSYS;
+ }
+ context->state = HAPTICGENERATOR_STATE_INITIALIZED;
+ ALOGV("EFFECT_CMD_DISABLE() OK");
+ *(int *) replyData = 0;
+ break;
+
+ case EFFECT_CMD_SET_VOLUME:
+ case EFFECT_CMD_SET_DEVICE:
+ case EFFECT_CMD_SET_AUDIO_MODE:
+ break;
+
+ default:
+ ALOGW("HapticGenerator_Command invalid command %u", cmdCode);
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+int32_t HapticGenerator_GetDescriptor(effect_handle_t self, effect_descriptor_t *descriptor) {
+ HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+ if (context == nullptr ||
+ context->state == HAPTICGENERATOR_STATE_UNINITIALIZED) {
+ return -EINVAL;
+ }
+
+ memcpy(descriptor, &gHgDescriptor, sizeof(effect_descriptor_t));
+
+ return 0;
+}
+
+} // namespace android::audio_effect::haptic_generator
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
new file mode 100644
index 0000000..57b4338
--- /dev/null
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECTHAPTICGENERATOR_H_
+#define ANDROID_EFFECTHAPTICGENERATOR_H_
+
+#include <functional>
+#include <vector>
+#include <map>
+
+#include <hardware/audio_effect.h>
+#include <system/audio_effect.h>
+#include <vibrator/ExternalVibrationUtils.h>
+
+#include "Processors.h"
+
+namespace android::audio_effect::haptic_generator {
+
+//-----------------------------------------------------------------------------
+// Definition
+//-----------------------------------------------------------------------------
+
+enum hapticgenerator_state_t {
+ HAPTICGENERATOR_STATE_UNINITIALIZED,
+ HAPTICGENERATOR_STATE_INITIALIZED,
+ HAPTICGENERATOR_STATE_ACTIVE,
+};
+
+// parameters for each haptic generator
+struct HapticGeneratorParam {
+ uint32_t hapticChannelSource[2]; // The audio channels used to generate haptic channels.
+ // The first channel will be used to generate HAPTIC_A,
+ // The second channel will be used to generate HAPTIC_B
+ // The value will be offset of audio channel
+ uint32_t audioChannelCount;
+ uint32_t hapticChannelCount;
+
+ // A map from track id to haptic intensity.
+ std::map<int, os::HapticScale> id2Intensity;
+ os::HapticScale maxHapticIntensity; // max intensity will be used to scale haptic data.
+};
+
+// A structure to keep all shared pointers for all processors in HapticGenerator.
+struct HapticGeneratorProcessorsRecord {
+ std::vector<std::shared_ptr<HapticBiquadFilter>> filters;
+ std::vector<std::shared_ptr<Ramp>> ramps;
+ std::vector<std::shared_ptr<SlowEnvelope>> slowEnvs;
+};
+
+// A structure to keep all the context for HapticGenerator.
+struct HapticGeneratorContext {
+ const struct effect_interface_s *itfe;
+ effect_config_t config;
+ hapticgenerator_state_t state;
+ struct HapticGeneratorParam param;
+ size_t audioDataBytesPerFrame;
+
+ // A cache for all shared pointers of the HapticGenerator
+ struct HapticGeneratorProcessorsRecord processorsRecord;
+
+ // Using a vector of functions to record the processing chain for haptic-generating algorithm.
+ // The three parameters of the processing functions are pointer to output buffer, pointer to
+ // input buffer and frame count.
+ std::vector<std::function<void(float*, const float*, size_t)>> processingChain;
+
+ // inputBuffer is where to keep input buffer for the generating algorithm. It will be
+ // constructed according to HapticGeneratorParam.hapticChannelSource.
+ std::vector<float> inputBuffer;
+
+ // outputBuffer is a buffer having the same length as inputBuffer. It can be used as
+ // intermediate buffer in the generating algorithm.
+ std::vector<float> outputBuffer;
+};
+
+//-----------------------------------------------------------------------------
+// Effect API
+//-----------------------------------------------------------------------------
+
+int32_t HapticGeneratorLib_Create(const effect_uuid_t *uuid,
+ int32_t sessionId,
+ int32_t ioId,
+ effect_handle_t *handle);
+
+int32_t HapticGeneratorLib_Release(effect_handle_t handle);
+
+int32_t HapticGeneratorLib_GetDescriptor(const effect_uuid_t *uuid,
+ effect_descriptor_t *descriptor);
+
+int32_t HapticGenerator_Process(effect_handle_t self,
+ audio_buffer_t *inBuffer,
+ audio_buffer_t *outBuffer);
+
+int32_t HapticGenerator_Command(effect_handle_t self,
+ uint32_t cmdCode,
+ uint32_t cmdSize,
+ void *cmdData,
+ uint32_t *replySize,
+ void *replyData);
+
+int32_t HapticGenerator_GetDescriptor(effect_handle_t self,
+ effect_descriptor_t *descriptor);
+
+} // namespace android::audio_effect::haptic_generator
+
+#endif // ANDROID_EFFECTHAPTICGENERATOR_H_
diff --git a/media/libstagefright/codecs/amrwb/MODULE_LICENSE_APACHE2 b/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/amrwb/MODULE_LICENSE_APACHE2
rename to media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2
diff --git a/media/libeffects/hapticgenerator/Processors.cpp b/media/libeffects/hapticgenerator/Processors.cpp
new file mode 100644
index 0000000..3157b35
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Processors.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHG_Processors"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <assert.h>
+
+#include <cmath>
+
+#include "Processors.h"
+
+#if defined(__aarch64__) || defined(__ARM_NEON__)
+#ifndef USE_NEON
+#define USE_NEON (true)
+#endif
+#else
+#define USE_NEON (false)
+#endif
+#if USE_NEON
+#include <arm_neon.h>
+#endif
+
+namespace android::audio_effect::haptic_generator {
+
+float getRealPoleZ(float cornerFrequency, float sampleRate) {
+ // This will be a pole of a first order filter.
+ float realPoleS = -2 * M_PI * cornerFrequency;
+ return exp(realPoleS / sampleRate); // zero-pole matching
+}
+
+std::pair<float, float> getComplexPoleZ(float ringingFrequency, float q, float sampleRate) {
+ // This is the pole for 1/(s^2 + s/q + 1) in normalized frequency. The other pole is
+ // the complex conjugate of this.
+ float poleImagS = 2 * M_PI * ringingFrequency;
+ float poleRealS = -poleImagS / (2 * q);
+ float poleRadius = exp(poleRealS / sampleRate);
+ float poleImagZ = poleRadius * sin(poleImagS / sampleRate);
+ float poleRealZ = poleRadius * cos(poleImagS / sampleRate);
+ return {poleRealZ, poleImagZ};
+}
+
+// Implementation of Ramp
+
+Ramp::Ramp(size_t channelCount) : mChannelCount(channelCount) {}
+
+void Ramp::process(float *out, const float *in, size_t frameCount) {
+ size_t i = 0;
+#if USE_NEON
+ size_t sampleCount = frameCount * mChannelCount;
+ float32x2_t allZero = vdup_n_f32(0.0f);
+ while (i + 1 < sampleCount) {
+ vst1_f32(out, vmax_f32(vld1_f32(in), allZero));
+ in += 2;
+ out += 2;
+ i += 2;
+ }
+#endif // USE_NEON
+ for (; i < frameCount * mChannelCount; ++i) {
+ *out = *in >= 0.0f ? *in : 0.0f;
+ out++;
+ in++;
+ }
+}
+
+// Implementation of SlowEnvelope
+
+SlowEnvelope::SlowEnvelope(
+ float cornerFrequency,
+ float sampleRate,
+ float normalizationPower,
+ size_t channelCount)
+ : mLpf(createLPF(cornerFrequency, sampleRate, channelCount)),
+ mNormalizationPower(normalizationPower),
+ mChannelCount(channelCount),
+ mEnv(0.25 * (sampleRate / (2 * M_PI * cornerFrequency))) {}
+
+void SlowEnvelope::process(float* out, const float* in, size_t frameCount) {
+ size_t sampleCount = frameCount * mChannelCount;
+ if (sampleCount > mLpfInBuffer.size()) {
+ mLpfInBuffer.resize(sampleCount, mEnv);
+ mLpfOutBuffer.resize(sampleCount);
+ }
+ mLpf->process(mLpfOutBuffer.data(), mLpfInBuffer.data(), frameCount);
+ for (size_t i = 0; i < sampleCount; ++i) {
+ *out = *in * pow(mLpfOutBuffer[i], mNormalizationPower);
+ out++;
+ in++;
+ }
+}
+
+void SlowEnvelope::clear() {
+ mLpf->clear();
+}
+
+// Implementation of helper functions
+
+BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
+ const BiquadFilterCoefficients &coefs2) {
+ assert(coefs1[2] == 0.0f);
+ assert(coefs2[2] == 0.0f);
+ assert(coefs1[4] == 0.0f);
+ assert(coefs2[4] == 0.0f);
+ return {coefs1[0] * coefs2[0],
+ coefs1[0] * coefs2[1] + coefs1[1] * coefs2[0],
+ coefs1[1] * coefs2[1],
+ coefs1[3] + coefs2[3],
+ coefs1[3] * coefs2[3]};
+}
+
+BiquadFilterCoefficients lpfCoefs(const float cornerFrequency, const float sampleRate) {
+ BiquadFilterCoefficients coefficient;
+ float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+ // This is a zero at nyquist
+ coefficient[0] = 0.5f * (1 - realPoleZ);
+ coefficient[1] = coefficient[0];
+ coefficient[2] = 0.0f;
+ coefficient[3] = -realPoleZ; // This is traditional 1/(s+1) filter
+ coefficient[4] = 0.0f;
+ return coefficient;
+}
+
+std::shared_ptr<HapticBiquadFilter> createLPF(const float cornerFrequency,
+ const float sampleRate,
+ const size_t channelCount) {
+ BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, sampleRate);
+ return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<HapticBiquadFilter> createLPF2(const float cornerFrequency,
+ const float sampleRate,
+ const size_t channelCount) {
+ BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, sampleRate);
+ return std::make_shared<HapticBiquadFilter>(
+ channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
+}
+
+std::shared_ptr<HapticBiquadFilter> createHPF2(const float cornerFrequency,
+ const float sampleRate,
+ const size_t channelCount) {
+ BiquadFilterCoefficients coefficient;
+ // Note: this is valid only when corner frequency is less than nyquist / 2.
+ float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+
+ // Note: this is a zero at DC
+ coefficient[0] = 0.5f * (1 + realPoleZ);
+ coefficient[1] = -coefficient[0];
+ coefficient[2] = 0.0f;
+ coefficient[3] = -realPoleZ;
+ coefficient[4] = 0.0f;
+ return std::make_shared<HapticBiquadFilter>(
+ channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
+}
+
+BiquadFilterCoefficients apfCoefs(const float cornerFrequency, const float sampleRate) {
+ BiquadFilterCoefficients coefficient;
+ float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+ float zeroZ = 1.0f / realPoleZ;
+ coefficient[0] = (1.0f - realPoleZ) / (1.0f - zeroZ);
+ coefficient[1] = -coefficient[0] * zeroZ;
+ coefficient[2] = 0.0f;
+ coefficient[3] = -realPoleZ;
+ coefficient[4] = 0.0f;
+ return coefficient;
+}
+
+std::shared_ptr<HapticBiquadFilter> createAPF(const float cornerFrequency,
+ const float sampleRate,
+ const size_t channelCount) {
+ BiquadFilterCoefficients coefficient = apfCoefs(cornerFrequency, sampleRate);
+ return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<HapticBiquadFilter> createAPF2(const float cornerFrequency1,
+ const float cornerFrequency2,
+ const float sampleRate,
+ const size_t channelCount) {
+ BiquadFilterCoefficients coefs1 = apfCoefs(cornerFrequency1, sampleRate);
+ BiquadFilterCoefficients coefs2 = apfCoefs(cornerFrequency2, sampleRate);
+ return std::make_shared<HapticBiquadFilter>(
+ channelCount, cascadeFirstOrderFilters(coefs1, coefs2));
+}
+
+std::shared_ptr<HapticBiquadFilter> createBPF(const float ringingFrequency,
+ const float q,
+ const float sampleRate,
+ const size_t channelCount) {
+ BiquadFilterCoefficients coefficient;
+ const auto [real, img] = getComplexPoleZ(ringingFrequency, q, sampleRate);
+ // Note: this is not a standard cookbook BPF, but a low pass filter with zero at DC
+ coefficient[0] = 1.0f;
+ coefficient[1] = -1.0f;
+ coefficient[2] = 0.0f;
+ coefficient[3] = -2 * real;
+ coefficient[4] = real * real + img * img;
+ return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<HapticBiquadFilter> createBSF(const float ringingFrequency,
+ const float zq,
+ const float pq,
+ const float sampleRate,
+ const size_t channelCount) {
+ BiquadFilterCoefficients coefficient;
+ const auto [zeroReal, zeroImg] = getComplexPoleZ(ringingFrequency, zq, sampleRate);
+ float zeroCoeff1 = -2 * zeroReal;
+ float zeroCoeff2 = zeroReal* zeroReal + zeroImg * zeroImg;
+ const auto [poleReal, poleImg] = getComplexPoleZ(ringingFrequency, pq, sampleRate);
+ float poleCoeff1 = -2 * poleReal;
+ float poleCoeff2 = poleReal * poleReal + poleImg * poleImg;
+ const float norm = (1.0f + poleCoeff1 + poleCoeff2) / (1.0f + zeroCoeff1 + zeroCoeff2);
+ coefficient[0] = 1.0f * norm;
+ coefficient[1] = zeroCoeff1 * norm;
+ coefficient[2] = zeroCoeff2 * norm;
+ coefficient[3] = poleCoeff1;
+ coefficient[4] = poleCoeff2;
+ return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+} // namespace android::audio_effect::haptic_generator
diff --git a/media/libeffects/hapticgenerator/Processors.h b/media/libeffects/hapticgenerator/Processors.h
new file mode 100644
index 0000000..5cf0557
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Processors.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
+#define _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
+
+#include <sys/types.h>
+
+#include <memory>
+#include <vector>
+
+#include <audio_utils/BiquadFilter.h>
+
+using HapticBiquadFilter = android::audio_utils::BiquadFilter<float>;
+using BiquadFilterCoefficients = std::array<float, android::audio_utils::kBiquadNumCoefs>;
+
+namespace android::audio_effect::haptic_generator {
+
+// A class providing a process function that makes input data non-negative.
+class Ramp {
+public:
+ explicit Ramp(size_t channelCount);
+
+ void process(float *out, const float *in, size_t frameCount);
+
+private:
+ const size_t mChannelCount;
+};
+
+
+class SlowEnvelope {
+public:
+ SlowEnvelope(float cornerFrequency, float sampleRate,
+ float normalizationPower, size_t channelCount);
+
+ void process(float *out, const float *in, size_t frameCount);
+
+ void clear();
+
+private:
+ const std::shared_ptr<HapticBiquadFilter> mLpf;
+ std::vector<float> mLpfInBuffer;
+ std::vector<float> mLpfOutBuffer;
+ const float mNormalizationPower;
+ const float mChannelCount;
+ const float mEnv;
+};
+
+// Helper functions
+
+BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
+ const BiquadFilterCoefficients &coefs2);
+
+std::shared_ptr<HapticBiquadFilter> createLPF(const float cornerFrequency,
+ const float sampleRate,
+ const size_t channelCount);
+
+// Create two cascaded LPF with same corner frequency.
+std::shared_ptr<HapticBiquadFilter> createLPF2(const float cornerFrequency,
+ const float sampleRate,
+ const size_t channelCount);
+
+// Create two cascaded HPF with same corner frequency.
+std::shared_ptr<HapticBiquadFilter> createHPF2(const float cornerFrequency,
+ const float sampleRate,
+ const size_t channelCount);
+
+std::shared_ptr<HapticBiquadFilter> createAPF(const float cornerFrequency,
+ const float sampleRate,
+ const size_t channelCount);
+
+// Create two cascaded APF with two different corner frequency.
+std::shared_ptr<HapticBiquadFilter> createAPF2(const float cornerFrequency1,
+ const float cornerFrequency2,
+ const float sampleRate,
+ const size_t channelCount);
+
+std::shared_ptr<HapticBiquadFilter> createBPF(const float ringingFrequency,
+ const float q,
+ const float sampleRate,
+ const size_t channelCount);
+
+std::shared_ptr<HapticBiquadFilter> createBSF(const float ringingFrequency,
+ const float zq,
+ const float pq,
+ const float sampleRate,
+ const size_t channelCount);
+
+} // namespace android::audio_effect::haptic_generator
+
+#endif // _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
diff --git a/media/libeffects/lvm/.clang-format b/media/libeffects/lvm/.clang-format
new file mode 100644
index 0000000..6f4b13e
--- /dev/null
+++ b/media/libeffects/lvm/.clang-format
@@ -0,0 +1,15 @@
+BasedOnStyle: Google
+Standard: Cpp11
+AccessModifierOffset: -2
+AllowShortFunctionsOnASingleLine: Inline
+ColumnLimit: 100
+CommentPragmas: NOLINT:.*
+DerivePointerAlignment: false
+IncludeBlocks: Preserve
+IndentWidth: 4
+ContinuationIndentWidth: 8
+PointerAlignment: Left
+TabWidth: 4
+UseTab: Never
+# Following are specific to libeffects/lvm
+SortIncludes: false
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
new file mode 100644
index 0000000..420e172
--- /dev/null
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -0,0 +1,16 @@
+cc_benchmark {
+ name: "lvm_benchmark",
+ vendor: true,
+ srcs: ["lvm_benchmark.cpp"],
+ static_libs: [
+ "libbundlewrapper",
+ "libmusicbundle",
+ ],
+ shared_libs: [
+ "libaudioutils",
+ "liblog",
+ ],
+ header_libs: [
+ "libhardware_headers",
+ ],
+}
diff --git a/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp b/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp
new file mode 100644
index 0000000..ee9da3f
--- /dev/null
+++ b/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <climits>
+#include <cstdlib>
+#include <random>
+#include <vector>
+#include <log/log.h>
+#include <benchmark/benchmark.h>
+#include <hardware/audio_effect.h>
+#include <system/audio.h>
+
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+constexpr effect_uuid_t kEffectUuids[] = {
+ // NXP SW BassBoost
+ {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ // NXP SW Virtualizer
+ {0x1d4033c0, 0x8557, 0x11df, 0x9f2d, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ // NXP SW Equalizer
+ {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ // NXP SW Volume
+ {0x119341a0, 0x8469, 0x11df, 0x81f9, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+};
+
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+
+constexpr size_t kFrameCount = 2048;
+
+constexpr audio_channel_mask_t kChMasks[] = {
+ AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_2POINT1,
+ AUDIO_CHANNEL_OUT_QUAD, AUDIO_CHANNEL_OUT_PENTA, AUDIO_CHANNEL_OUT_5POINT1,
+ AUDIO_CHANNEL_OUT_6POINT1, AUDIO_CHANNEL_OUT_7POINT1,
+};
+
+constexpr size_t kNumChMasks = std::size(kChMasks);
+constexpr int kSampleRate = 44100;
+// TODO(b/131240940) Remove once effects are updated to produce mono output
+constexpr size_t kMinOutputChannelCount = 2;
+
+/*******************************************************************
+ * A test result running on Pixel 3 for comparison.
+ * The first parameter indicates the number of channels.
+ * The second parameter indicates the effect.
+ * 0: Bass Boost, 1: Virtualizer, 2: Equalizer, 3: Volume
+ * -----------------------------------------------------
+ * Benchmark Time CPU Iterations
+ * -----------------------------------------------------
+ * BM_LVM/2/0 131279 ns 130855 ns 5195
+ * BM_LVM/2/1 184814 ns 184219 ns 3799
+ * BM_LVM/2/2 91935 ns 91649 ns 7647
+ * BM_LVM/2/3 26707 ns 26623 ns 26281
+ * BM_LVM/3/0 172130 ns 171562 ns 4085
+ * BM_LVM/3/1 192443 ns 191923 ns 3644
+ * BM_LVM/3/2 127444 ns 127107 ns 5483
+ * BM_LVM/3/3 26811 ns 26730 ns 26163
+ * BM_LVM/4/0 223688 ns 223076 ns 3133
+ * BM_LVM/4/1 204961 ns 204408 ns 3425
+ * BM_LVM/4/2 169162 ns 168708 ns 4143
+ * BM_LVM/4/3 37330 ns 37225 ns 18795
+ * BM_LVM/5/0 272628 ns 271668 ns 2568
+ * BM_LVM/5/1 218487 ns 217883 ns 3212
+ * BM_LVM/5/2 211049 ns 210479 ns 3324
+ * BM_LVM/5/3 46962 ns 46835 ns 15051
+ * BM_LVM/6/0 318881 ns 317734 ns 2216
+ * BM_LVM/6/1 231899 ns 231244 ns 3028
+ * BM_LVM/6/2 252655 ns 251963 ns 2771
+ * BM_LVM/6/3 54944 ns 54794 ns 12799
+ * BM_LVM/7/0 366622 ns 365262 ns 1916
+ * BM_LVM/7/1 245076 ns 244388 ns 2866
+ * BM_LVM/7/2 295105 ns 294304 ns 2379
+ * BM_LVM/7/3 63595 ns 63420 ns 11070
+ * BM_LVM/8/0 410957 ns 409387 ns 1706
+ * BM_LVM/8/1 257824 ns 257098 ns 2723
+ * BM_LVM/8/2 342546 ns 341530 ns 2059
+ * BM_LVM/8/3 72896 ns 72700 ns 9685
+ *******************************************************************/
+
+static void BM_LVM(benchmark::State& state) {
+ const size_t chMask = kChMasks[state.range(0) - 1];
+ const effect_uuid_t uuid = kEffectUuids[state.range(1)];
+ const size_t channelCount = audio_channel_count_from_out_mask(chMask);
+
+ // Initialize input buffer with deterministic pseudo-random values
+ std::minstd_rand gen(chMask);
+ std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+ std::vector<float> input(kFrameCount * channelCount);
+ for (auto& in : input) {
+ in = dis(gen);
+ }
+
+ effect_handle_t effectHandle = nullptr;
+ if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&uuid, 1, 1, &effectHandle);
+ status != 0) {
+ ALOGE("create_effect returned an error = %d\n", status);
+ return;
+ }
+
+ effect_config_t config{};
+ config.inputCfg.samplingRate = config.outputCfg.samplingRate = kSampleRate;
+ config.inputCfg.channels = config.outputCfg.channels = chMask;
+ config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ if (int status = (*effectHandle)
+ ->command(effectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+ &config, &replySize, &reply);
+ status != 0) {
+ ALOGE("command returned an error = %d\n", status);
+ return;
+ }
+
+ if (int status =
+ (*effectHandle)
+ ->command(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+ status != 0) {
+ ALOGE("Command enable call returned error %d\n", reply);
+ return;
+ }
+
+ // Run the test
+ for (auto _ : state) {
+ std::vector<float> output(kFrameCount * std::max(channelCount, kMinOutputChannelCount));
+
+ benchmark::DoNotOptimize(input.data());
+ benchmark::DoNotOptimize(output.data());
+
+ audio_buffer_t inBuffer = {.frameCount = kFrameCount, .f32 = input.data()};
+ audio_buffer_t outBuffer = {.frameCount = kFrameCount, .f32 = output.data()};
+ (*effectHandle)->process(effectHandle, &inBuffer, &outBuffer);
+
+ benchmark::ClobberMemory();
+ }
+
+ state.SetComplexityN(state.range(0));
+
+ if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+ ALOGE("release_effect returned an error = %d\n", status);
+ return;
+ }
+}
+
+static void LVMArgs(benchmark::internal::Benchmark* b) {
+ // TODO(b/131240940) Test single channel once effects are updated to process mono data
+ for (int i = 2; i <= kNumChMasks; i++) {
+ for (int j = 0; j < kNumEffectUuids; ++j) {
+ b->Args({i, j});
+ }
+ }
+}
+
+BENCHMARK(BM_LVM)->Apply(LVMArgs);
+
+BENCHMARK_MAIN();
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 1f2a5e1..dbe0d62 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -30,7 +30,6 @@
"Bundle/src/LVM_Control.cpp",
"SpectrumAnalyzer/src/LVPSA_Control.cpp",
"SpectrumAnalyzer/src/LVPSA_Init.cpp",
- "SpectrumAnalyzer/src/LVPSA_Memory.cpp",
"SpectrumAnalyzer/src/LVPSA_Process.cpp",
"SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp",
"SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp",
@@ -132,13 +131,15 @@
shared_libs: [
"liblog",
],
+ static_libs: [
+ "libaudioutils",
+ ],
header_libs: [
- "libhardware_headers"
+ "libhardware_headers",
],
cppflags: [
+ "-DBIQUAD_OPT",
"-fvisibility=hidden",
- "-DSUPPORT_MC",
-
"-Wall",
"-Werror",
],
diff --git a/media/libeffects/lvm/lib/Bass/lib/LVDBE.h b/media/libeffects/lvm/lib/Bass/lib/LVDBE.h
index 948d79c..e60ad07 100644
--- a/media/libeffects/lvm/lib/Bass/lib/LVDBE.h
+++ b/media/libeffects/lvm/lib/Bass/lib/LVDBE.h
@@ -69,15 +69,12 @@
/* */
/****************************************************************************************/
-/* Memory table*/
-#define LVDBE_NR_MEMORY_REGIONS 4 /* Number of memory regions */
-
/* Bass Enhancement effect level */
-#define LVDBE_EFFECT_03DB 3 /* Effect defines for backwards compatibility */
-#define LVDBE_EFFECT_06DB 6
-#define LVDBE_EFFECT_09DB 9
-#define LVDBE_EFFECT_12DB 12
-#define LVDBE_EFFECT_15DB 15
+#define LVDBE_EFFECT_03DB 3 /* Effect defines for backwards compatibility */
+#define LVDBE_EFFECT_06DB 6
+#define LVDBE_EFFECT_09DB 9
+#define LVDBE_EFFECT_12DB 12
+#define LVDBE_EFFECT_15DB 15
/****************************************************************************************/
/* */
@@ -86,52 +83,31 @@
/****************************************************************************************/
/* Instance handle */
-typedef void *LVDBE_Handle_t;
+typedef void* LVDBE_Handle_t;
/* Operating modes */
-typedef enum
-{
- LVDBE_OFF = 0,
- LVDBE_ON = 1,
- LVDBE_MODE_MAX = LVM_MAXINT_32
-} LVDBE_Mode_en;
+typedef enum { LVDBE_OFF = 0, LVDBE_ON = 1, LVDBE_MODE_MAX = LVM_MAXINT_32 } LVDBE_Mode_en;
/* High pass filter */
-typedef enum
-{
+typedef enum {
LVDBE_HPF_OFF = 0,
- LVDBE_HPF_ON = 1,
+ LVDBE_HPF_ON = 1,
LVDBE_HPF_MAX = LVM_MAXINT_32
} LVDBE_FilterSelect_en;
/* Volume control */
-typedef enum
-{
+typedef enum {
LVDBE_VOLUME_OFF = 0,
- LVDBE_VOLUME_ON = 1,
+ LVDBE_VOLUME_ON = 1,
LVDBE_VOLUME_MAX = LVM_MAXINT_32
} LVDBE_Volume_en;
-/* Memory Types */
-typedef enum
-{
- LVDBE_PERSISTENT = 0,
- LVDBE_PERSISTENT_DATA = 1,
- LVDBE_PERSISTENT_COEF = 2,
- LVDBE_SCRATCH = 3,
- LVDBE_MEMORY_MAX = LVM_MAXINT_32
-
-} LVDBE_MemoryTypes_en;
-
/* Function return status */
-typedef enum
-{
- LVDBE_SUCCESS = 0, /* Successful return from a routine */
- LVDBE_ALIGNMENTERROR = 1, /* Memory alignment error */
- LVDBE_NULLADDRESS = 2, /* NULL allocation address */
- LVDBE_TOOMANYSAMPLES = 3, /* Maximum block size exceeded */
- LVDBE_SIZEERROR = 4, /* Incorrect structure size */
- LVDBE_STATUS_MAX = LVM_MAXINT_32
+typedef enum {
+ LVDBE_SUCCESS = 0, /* Successful return from a routine */
+ LVDBE_NULLADDRESS = 1, /* NULL allocation address */
+ LVDBE_TOOMANYSAMPLES = 2, /* Maximum block size exceeded */
+ LVDBE_STATUS_MAX = LVM_MAXINT_32
} LVDBE_ReturnStatus_en;
/****************************************************************************************/
@@ -158,40 +134,38 @@
/*
* Bass Enhancement centre frequency
*/
-#define LVDBE_CAP_CENTRE_55Hz 1
-#define LVDBE_CAP_CENTRE_66Hz 2
-#define LVDBE_CAP_CENTRE_78Hz 4
-#define LVDBE_CAP_CENTRE_90Hz 8
+#define LVDBE_CAP_CENTRE_55Hz 1
+#define LVDBE_CAP_CENTRE_66Hz 2
+#define LVDBE_CAP_CENTRE_78Hz 4
+#define LVDBE_CAP_CENTRE_90Hz 8
-typedef enum
-{
+typedef enum {
LVDBE_CENTRE_55HZ = 0,
LVDBE_CENTRE_66HZ = 1,
LVDBE_CENTRE_78HZ = 2,
LVDBE_CENTRE_90HZ = 3,
- LVDBE_CENTRE_MAX = LVM_MAXINT_32
+ LVDBE_CENTRE_MAX = LVM_MAXINT_32
} LVDBE_CentreFreq_en;
/*
* Supported sample rates in samples per second
*/
-#define LVDBE_CAP_FS_8000 1
-#define LVDBE_CAP_FS_11025 2
-#define LVDBE_CAP_FS_12000 4
-#define LVDBE_CAP_FS_16000 8
-#define LVDBE_CAP_FS_22050 16
-#define LVDBE_CAP_FS_24000 32
-#define LVDBE_CAP_FS_32000 64
-#define LVDBE_CAP_FS_44100 128
-#define LVDBE_CAP_FS_48000 256
-#define LVDBE_CAP_FS_88200 512
-#define LVDBE_CAP_FS_96000 1024
-#define LVDBE_CAP_FS_176400 2048
-#define LVDBE_CAP_FS_192000 4096
+#define LVDBE_CAP_FS_8000 1
+#define LVDBE_CAP_FS_11025 2
+#define LVDBE_CAP_FS_12000 4
+#define LVDBE_CAP_FS_16000 8
+#define LVDBE_CAP_FS_22050 16
+#define LVDBE_CAP_FS_24000 32
+#define LVDBE_CAP_FS_32000 64
+#define LVDBE_CAP_FS_44100 128
+#define LVDBE_CAP_FS_48000 256
+#define LVDBE_CAP_FS_88200 512
+#define LVDBE_CAP_FS_96000 1024
+#define LVDBE_CAP_FS_176400 2048
+#define LVDBE_CAP_FS_192000 4096
-typedef enum
-{
- LVDBE_FS_8000 = 0,
+typedef enum {
+ LVDBE_FS_8000 = 0,
LVDBE_FS_11025 = 1,
LVDBE_FS_12000 = 2,
LVDBE_FS_16000 = 3,
@@ -204,7 +178,7 @@
LVDBE_FS_96000 = 10,
LVDBE_FS_176400 = 11,
LVDBE_FS_192000 = 12,
- LVDBE_FS_MAX = LVM_MAXINT_32
+ LVDBE_FS_MAX = LVM_MAXINT_32
} LVDBE_Fs_en;
/****************************************************************************************/
@@ -213,44 +187,25 @@
/* */
/****************************************************************************************/
-/* Memory region definition */
-typedef struct
-{
- LVM_UINT32 Size; /* Region size in bytes */
- LVM_UINT16 Alignment; /* Region alignment in bytes */
- LVDBE_MemoryTypes_en Type; /* Region type */
- void *pBaseAddress; /* Pointer to the region base address */
-} LVDBE_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
- LVDBE_MemoryRegion_t Region[LVDBE_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVDBE_MemTab_t;
-
/* Parameter structure */
-typedef struct
-{
- LVDBE_Mode_en OperatingMode;
- LVDBE_Fs_en SampleRate;
- LVM_INT16 EffectLevel;
- LVDBE_CentreFreq_en CentreFrequency;
- LVDBE_FilterSelect_en HPFSelect;
- LVDBE_Volume_en VolumeControl;
- LVM_INT16 VolumedB;
- LVM_INT16 HeadroomdB;
-#ifdef SUPPORT_MC
- LVM_INT16 NrChannels;
-#endif
+typedef struct {
+ LVDBE_Mode_en OperatingMode;
+ LVDBE_Fs_en SampleRate;
+ LVM_INT16 EffectLevel;
+ LVDBE_CentreFreq_en CentreFrequency;
+ LVDBE_FilterSelect_en HPFSelect;
+ LVDBE_Volume_en VolumeControl;
+ LVM_INT16 VolumedB;
+ LVM_INT16 HeadroomdB;
+ LVM_INT16 NrChannels;
} LVDBE_Params_t;
/* Capability structure */
-typedef struct
-{
- LVM_UINT16 SampleRate; /* Sampling rate capabilities */
- LVM_UINT16 CentreFrequency; /* Centre frequency capabilities */
- LVM_UINT16 MaxBlockSize; /* Maximum block size in sample pairs */
+typedef struct {
+ LVM_UINT16 SampleRate; /* Sampling rate capabilities */
+ LVM_UINT16 CentreFrequency; /* Centre frequency capabilities */
+ LVM_UINT16 MaxBlockSize; /* Maximum block size in sample pairs */
} LVDBE_Capabilities_t;
/****************************************************************************************/
@@ -261,75 +216,39 @@
/****************************************************************************************/
/* */
-/* FUNCTION: LVDBE_Memory */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. It can be called in */
-/* two ways: */
-/* */
-/* hInstance = NULL Returns the memory requirements */
-/* hInstance = Instance handle Returns the memory requirements and */
-/* allocated base addresses for the instance */
-/* */
-/* When this function is called for memory allocation (hInstance=NULL) the memory */
-/* base address pointers are NULL on return. */
-/* */
-/* When the function is called for free (hInstance = Instance Handle) the memory */
-/* table returns the allocated memory and base addresses used during initialisation. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pMemoryTable Pointer to an empty memory definition table */
-/* pCapabilities Pointer to the default capabilites */
-/* */
-/* RETURNS: */
-/* LVDBE_SUCCESS Succeeded */
-/* */
-/* NOTES: */
-/* 1. This function may be interrupted by the LVDBE_Process function */
-/* */
-/****************************************************************************************/
-
-LVDBE_ReturnStatus_en LVDBE_Memory(LVDBE_Handle_t hInstance,
- LVDBE_MemTab_t *pMemoryTable,
- LVDBE_Capabilities_t *pCapabilities);
-
-/****************************************************************************************/
-/* */
/* FUNCTION: LVDBE_Init */
/* */
/* DESCRIPTION: */
/* Create and initialisation function for the Bass Enhancement module */
/* */
-/* This function can be used to create an algorithm instance by calling with */
-/* hInstance set to NULL. In this case the algorithm returns the new instance */
-/* handle. */
-/* */
-/* This function can be used to force a full re-initialisation of the algorithm */
-/* by calling with hInstance = Instance Handle. In this case the memory table */
-/* should be correct for the instance, this can be ensured by calling the function */
-/* LVDBE_Memory before calling this function. */
-/* */
/* PARAMETERS: */
-/* hInstance Instance handle */
-/* pMemoryTable Pointer to the memory definition table */
+/* phInstance Pointer to instance handle */
/* pCapabilities Pointer to the initialisation capabilities */
+/* pScratch Pointer to the bundle scratch buffer */
/* */
/* RETURNS: */
-/* LVDBE_SUCCESS Initialisation succeeded */
-/* LVDBE_ALIGNMENTERROR Instance or scratch memory on incorrect alignment */
-/* LVDBE_NULLADDRESS One or more memory has a NULL pointer */
+/* LVDBE_SUCCESS Initialisation succeeded */
+/* LVDBE_NULLADDRESS One or more memory has a NULL pointer - malloc failure */
/* */
/* NOTES: */
-/* 1. The instance handle is the pointer to the base address of the first memory */
-/* region. */
-/* 2. This function must not be interrupted by the LVDBE_Process function */
+/* 1. This function must not be interrupted by the LVDBE_Process function */
/* */
/****************************************************************************************/
+LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t* phInstance, LVDBE_Capabilities_t* pCapabilities,
+ void* pScratch);
-LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t *phInstance,
- LVDBE_MemTab_t *pMemoryTable,
- LVDBE_Capabilities_t *pCapabilities);
+/****************************************************************************************/
+/* */
+/* FUNCTION: LVDBE_DeInit */
+/* */
+/* DESCRIPTION: */
+/* Free the memories created during LVDBE_Init including instance handle */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to instance handle */
+/* */
+/****************************************************************************************/
+void LVDBE_DeInit(LVDBE_Handle_t* phInstance);
/****************************************************************************************/
/* */
@@ -351,8 +270,7 @@
/* */
/****************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t hInstance,
- LVDBE_Params_t *pParams);
+LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams);
/****************************************************************************************/
/* */
@@ -374,8 +292,8 @@
/* */
/****************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t hInstance,
- LVDBE_Capabilities_t *pCapabilities);
+LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t hInstance,
+ LVDBE_Capabilities_t* pCapabilities);
/****************************************************************************************/
/* */
@@ -414,8 +332,7 @@
/* */
/****************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t hInstance,
- LVDBE_Params_t *pParams);
+LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams);
/****************************************************************************************/
/* */
@@ -437,9 +354,7 @@
/* NOTES: */
/* */
/****************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_Process(LVDBE_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples);
+LVDBE_ReturnStatus_en LVDBE_Process(LVDBE_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
#endif /* __LVDBE_H__ */
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h b/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
index b364dae..30e1692 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
@@ -24,7 +24,7 @@
/* */
/************************************************************************************/
-#define LVDBE_SCALESHIFT 10 /* As a power of 2 */
+#define LVDBE_SCALESHIFT 10 /* As a power of 2 */
/************************************************************************************/
/* */
@@ -32,289 +32,289 @@
/* */
/************************************************************************************/
- /* Coefficients for centre frequency 55Hz */
-#define HPF_Fs8000_Fc55_A0 0.958849f
-#define HPF_Fs8000_Fc55_A1 (-1.917698f)
-#define HPF_Fs8000_Fc55_A2 0.958849f
-#define HPF_Fs8000_Fc55_B1 (-1.939001f)
-#define HPF_Fs8000_Fc55_B2 0.940807f
-#define HPF_Fs11025_Fc55_A0 0.966909f
-#define HPF_Fs11025_Fc55_A1 (-1.933818f)
-#define HPF_Fs11025_Fc55_A2 0.966909f
-#define HPF_Fs11025_Fc55_B1 (-1.955732f)
-#define HPF_Fs11025_Fc55_B2 0.956690f
-#define HPF_Fs12000_Fc55_A0 0.968650f
-#define HPF_Fs12000_Fc55_A1 (-1.937300f)
-#define HPF_Fs12000_Fc55_A2 0.968650f
-#define HPF_Fs12000_Fc55_B1 (-1.959327f)
-#define HPF_Fs12000_Fc55_B2 0.960138f
-#define HPF_Fs16000_Fc55_A0 0.973588f
-#define HPF_Fs16000_Fc55_A1 (-1.947176f)
-#define HPF_Fs16000_Fc55_A2 0.973588f
-#define HPF_Fs16000_Fc55_B1 (-1.969494f)
-#define HPF_Fs16000_Fc55_B2 0.969952f
-#define HPF_Fs22050_Fc55_A0 0.977671f
-#define HPF_Fs22050_Fc55_A1 (-1.955343f)
-#define HPF_Fs22050_Fc55_A2 0.977671f
-#define HPF_Fs22050_Fc55_B1 (-1.977863f)
-#define HPF_Fs22050_Fc55_B2 0.978105f
-#define HPF_Fs24000_Fc55_A0 0.978551f
-#define HPF_Fs24000_Fc55_A1 (-1.957102f)
-#define HPF_Fs24000_Fc55_A2 0.978551f
-#define HPF_Fs24000_Fc55_B1 (-1.979662f)
-#define HPF_Fs24000_Fc55_B2 0.979866f
-#define HPF_Fs32000_Fc55_A0 0.981042f
-#define HPF_Fs32000_Fc55_A1 (-1.962084f)
-#define HPF_Fs32000_Fc55_A2 0.981042f
-#define HPF_Fs32000_Fc55_B1 (-1.984746f)
-#define HPF_Fs32000_Fc55_B2 0.984861f
-#define HPF_Fs44100_Fc55_A0 0.983097f
-#define HPF_Fs44100_Fc55_A1 (-1.966194f)
-#define HPF_Fs44100_Fc55_A2 0.983097f
-#define HPF_Fs44100_Fc55_B1 (-1.988931f)
-#define HPF_Fs44100_Fc55_B2 0.988992f
-#define HPF_Fs48000_Fc55_A0 0.983539f
-#define HPF_Fs48000_Fc55_A1 (-1.967079f)
-#define HPF_Fs48000_Fc55_A2 0.983539f
-#define HPF_Fs48000_Fc55_B1 (-1.989831f)
-#define HPF_Fs48000_Fc55_B2 0.989882f
+/* Coefficients for centre frequency 55Hz */
+#define HPF_Fs8000_Fc55_A0 0.958849f
+#define HPF_Fs8000_Fc55_A1 (-1.917698f)
+#define HPF_Fs8000_Fc55_A2 0.958849f
+#define HPF_Fs8000_Fc55_B1 (-1.939001f)
+#define HPF_Fs8000_Fc55_B2 0.940807f
+#define HPF_Fs11025_Fc55_A0 0.966909f
+#define HPF_Fs11025_Fc55_A1 (-1.933818f)
+#define HPF_Fs11025_Fc55_A2 0.966909f
+#define HPF_Fs11025_Fc55_B1 (-1.955732f)
+#define HPF_Fs11025_Fc55_B2 0.956690f
+#define HPF_Fs12000_Fc55_A0 0.968650f
+#define HPF_Fs12000_Fc55_A1 (-1.937300f)
+#define HPF_Fs12000_Fc55_A2 0.968650f
+#define HPF_Fs12000_Fc55_B1 (-1.959327f)
+#define HPF_Fs12000_Fc55_B2 0.960138f
+#define HPF_Fs16000_Fc55_A0 0.973588f
+#define HPF_Fs16000_Fc55_A1 (-1.947176f)
+#define HPF_Fs16000_Fc55_A2 0.973588f
+#define HPF_Fs16000_Fc55_B1 (-1.969494f)
+#define HPF_Fs16000_Fc55_B2 0.969952f
+#define HPF_Fs22050_Fc55_A0 0.977671f
+#define HPF_Fs22050_Fc55_A1 (-1.955343f)
+#define HPF_Fs22050_Fc55_A2 0.977671f
+#define HPF_Fs22050_Fc55_B1 (-1.977863f)
+#define HPF_Fs22050_Fc55_B2 0.978105f
+#define HPF_Fs24000_Fc55_A0 0.978551f
+#define HPF_Fs24000_Fc55_A1 (-1.957102f)
+#define HPF_Fs24000_Fc55_A2 0.978551f
+#define HPF_Fs24000_Fc55_B1 (-1.979662f)
+#define HPF_Fs24000_Fc55_B2 0.979866f
+#define HPF_Fs32000_Fc55_A0 0.981042f
+#define HPF_Fs32000_Fc55_A1 (-1.962084f)
+#define HPF_Fs32000_Fc55_A2 0.981042f
+#define HPF_Fs32000_Fc55_B1 (-1.984746f)
+#define HPF_Fs32000_Fc55_B2 0.984861f
+#define HPF_Fs44100_Fc55_A0 0.983097f
+#define HPF_Fs44100_Fc55_A1 (-1.966194f)
+#define HPF_Fs44100_Fc55_A2 0.983097f
+#define HPF_Fs44100_Fc55_B1 (-1.988931f)
+#define HPF_Fs44100_Fc55_B2 0.988992f
+#define HPF_Fs48000_Fc55_A0 0.983539f
+#define HPF_Fs48000_Fc55_A1 (-1.967079f)
+#define HPF_Fs48000_Fc55_A2 0.983539f
+#define HPF_Fs48000_Fc55_B1 (-1.989831f)
+#define HPF_Fs48000_Fc55_B2 0.989882f
-#define HPF_Fs88200_Fc55_A0 0.985818f
-#define HPF_Fs88200_Fc55_A1 (-1.971636f)
-#define HPF_Fs88200_Fc55_A2 0.985818f
-#define HPF_Fs88200_Fc55_B1 (-1.994466f)
-#define HPF_Fs88200_Fc55_B2 0.994481f
+#define HPF_Fs88200_Fc55_A0 0.985818f
+#define HPF_Fs88200_Fc55_A1 (-1.971636f)
+#define HPF_Fs88200_Fc55_A2 0.985818f
+#define HPF_Fs88200_Fc55_B1 (-1.994466f)
+#define HPF_Fs88200_Fc55_B2 0.994481f
-#define HPF_Fs96000_Fc55_A0 0.986040f
-#define HPF_Fs96000_Fc55_A1 (-1.972080f)
-#define HPF_Fs96000_Fc55_A2 0.986040f
-#define HPF_Fs96000_Fc55_B1 (-1.994915f)
-#define HPF_Fs96000_Fc55_B2 0.994928f
+#define HPF_Fs96000_Fc55_A0 0.986040f
+#define HPF_Fs96000_Fc55_A1 (-1.972080f)
+#define HPF_Fs96000_Fc55_A2 0.986040f
+#define HPF_Fs96000_Fc55_B1 (-1.994915f)
+#define HPF_Fs96000_Fc55_B2 0.994928f
-#define HPF_Fs176400_Fc55_A0 0.987183f
-#define HPF_Fs176400_Fc55_A1 (-1.974366f)
-#define HPF_Fs176400_Fc55_A2 0.987183f
-#define HPF_Fs176400_Fc55_B1 (-1.997233f)
-#define HPF_Fs176400_Fc55_B2 0.997237f
+#define HPF_Fs176400_Fc55_A0 0.987183f
+#define HPF_Fs176400_Fc55_A1 (-1.974366f)
+#define HPF_Fs176400_Fc55_A2 0.987183f
+#define HPF_Fs176400_Fc55_B1 (-1.997233f)
+#define HPF_Fs176400_Fc55_B2 0.997237f
-#define HPF_Fs192000_Fc55_A0 0.987294f
-#define HPF_Fs192000_Fc55_A1 (-1.974588f)
-#define HPF_Fs192000_Fc55_A2 0.987294f
-#define HPF_Fs192000_Fc55_B1 (-1.997458f)
-#define HPF_Fs192000_Fc55_B2 0.997461f
+#define HPF_Fs192000_Fc55_A0 0.987294f
+#define HPF_Fs192000_Fc55_A1 (-1.974588f)
+#define HPF_Fs192000_Fc55_A2 0.987294f
+#define HPF_Fs192000_Fc55_B1 (-1.997458f)
+#define HPF_Fs192000_Fc55_B2 0.997461f
- /* Coefficients for centre frequency 66Hz */
-#define HPF_Fs8000_Fc66_A0 0.953016f
-#define HPF_Fs8000_Fc66_A1 (-1.906032f)
-#define HPF_Fs8000_Fc66_A2 0.953016f
-#define HPF_Fs8000_Fc66_B1 (-1.926810f)
-#define HPF_Fs8000_Fc66_B2 0.929396f
-#define HPF_Fs11025_Fc66_A0 0.962638f
-#define HPF_Fs11025_Fc66_A1 (-1.925275f)
-#define HPF_Fs11025_Fc66_A2 0.962638f
-#define HPF_Fs11025_Fc66_B1 (-1.946881f)
-#define HPF_Fs11025_Fc66_B2 0.948256f
-#define HPF_Fs12000_Fc66_A0 0.964718f
-#define HPF_Fs12000_Fc66_A1 (-1.929435f)
-#define HPF_Fs12000_Fc66_A2 0.964718f
-#define HPF_Fs12000_Fc66_B1 (-1.951196f)
-#define HPF_Fs12000_Fc66_B2 0.952359f
-#define HPF_Fs16000_Fc66_A0 0.970622f
-#define HPF_Fs16000_Fc66_A1 (-1.941244f)
-#define HPF_Fs16000_Fc66_A2 0.970622f
-#define HPF_Fs16000_Fc66_B1 (-1.963394f)
-#define HPF_Fs16000_Fc66_B2 0.964052f
-#define HPF_Fs22050_Fc66_A0 0.975509f
-#define HPF_Fs22050_Fc66_A1 (-1.951019f)
-#define HPF_Fs22050_Fc66_A2 0.975509f
-#define HPF_Fs22050_Fc66_B1 (-1.973436f)
-#define HPF_Fs22050_Fc66_B2 0.973784f
-#define HPF_Fs24000_Fc66_A0 0.976563f
-#define HPF_Fs24000_Fc66_A1 (-1.953125f)
-#define HPF_Fs24000_Fc66_A2 0.976563f
-#define HPF_Fs24000_Fc66_B1 (-1.975594f)
-#define HPF_Fs24000_Fc66_B2 0.975889f
-#define HPF_Fs32000_Fc66_A0 0.979547f
-#define HPF_Fs32000_Fc66_A1 (-1.959093f)
-#define HPF_Fs32000_Fc66_A2 0.979547f
-#define HPF_Fs32000_Fc66_B1 (-1.981695f)
-#define HPF_Fs32000_Fc66_B2 0.981861f
-#define HPF_Fs44100_Fc66_A0 0.982010f
-#define HPF_Fs44100_Fc66_A1 (-1.964019f)
-#define HPF_Fs44100_Fc66_A2 0.982010f
-#define HPF_Fs44100_Fc66_B1 (-1.986718f)
-#define HPF_Fs44100_Fc66_B2 0.986805f
-#define HPF_Fs48000_Fc66_A0 0.982540f
-#define HPF_Fs48000_Fc66_A1 (-1.965079f)
-#define HPF_Fs48000_Fc66_A2 0.982540f
-#define HPF_Fs48000_Fc66_B1 (-1.987797f)
-#define HPF_Fs48000_Fc66_B2 0.987871f
+/* Coefficients for centre frequency 66Hz */
+#define HPF_Fs8000_Fc66_A0 0.953016f
+#define HPF_Fs8000_Fc66_A1 (-1.906032f)
+#define HPF_Fs8000_Fc66_A2 0.953016f
+#define HPF_Fs8000_Fc66_B1 (-1.926810f)
+#define HPF_Fs8000_Fc66_B2 0.929396f
+#define HPF_Fs11025_Fc66_A0 0.962638f
+#define HPF_Fs11025_Fc66_A1 (-1.925275f)
+#define HPF_Fs11025_Fc66_A2 0.962638f
+#define HPF_Fs11025_Fc66_B1 (-1.946881f)
+#define HPF_Fs11025_Fc66_B2 0.948256f
+#define HPF_Fs12000_Fc66_A0 0.964718f
+#define HPF_Fs12000_Fc66_A1 (-1.929435f)
+#define HPF_Fs12000_Fc66_A2 0.964718f
+#define HPF_Fs12000_Fc66_B1 (-1.951196f)
+#define HPF_Fs12000_Fc66_B2 0.952359f
+#define HPF_Fs16000_Fc66_A0 0.970622f
+#define HPF_Fs16000_Fc66_A1 (-1.941244f)
+#define HPF_Fs16000_Fc66_A2 0.970622f
+#define HPF_Fs16000_Fc66_B1 (-1.963394f)
+#define HPF_Fs16000_Fc66_B2 0.964052f
+#define HPF_Fs22050_Fc66_A0 0.975509f
+#define HPF_Fs22050_Fc66_A1 (-1.951019f)
+#define HPF_Fs22050_Fc66_A2 0.975509f
+#define HPF_Fs22050_Fc66_B1 (-1.973436f)
+#define HPF_Fs22050_Fc66_B2 0.973784f
+#define HPF_Fs24000_Fc66_A0 0.976563f
+#define HPF_Fs24000_Fc66_A1 (-1.953125f)
+#define HPF_Fs24000_Fc66_A2 0.976563f
+#define HPF_Fs24000_Fc66_B1 (-1.975594f)
+#define HPF_Fs24000_Fc66_B2 0.975889f
+#define HPF_Fs32000_Fc66_A0 0.979547f
+#define HPF_Fs32000_Fc66_A1 (-1.959093f)
+#define HPF_Fs32000_Fc66_A2 0.979547f
+#define HPF_Fs32000_Fc66_B1 (-1.981695f)
+#define HPF_Fs32000_Fc66_B2 0.981861f
+#define HPF_Fs44100_Fc66_A0 0.982010f
+#define HPF_Fs44100_Fc66_A1 (-1.964019f)
+#define HPF_Fs44100_Fc66_A2 0.982010f
+#define HPF_Fs44100_Fc66_B1 (-1.986718f)
+#define HPF_Fs44100_Fc66_B2 0.986805f
+#define HPF_Fs48000_Fc66_A0 0.982540f
+#define HPF_Fs48000_Fc66_A1 (-1.965079f)
+#define HPF_Fs48000_Fc66_A2 0.982540f
+#define HPF_Fs48000_Fc66_B1 (-1.987797f)
+#define HPF_Fs48000_Fc66_B2 0.987871f
-#define HPF_Fs88200_Fc66_A0 0.985273f
-#define HPF_Fs88200_Fc66_A1 (-1.970546f)
-#define HPF_Fs88200_Fc66_A2 0.985273f
-#define HPF_Fs88200_Fc66_B1 (-1.993359f)
-#define HPF_Fs88200_Fc66_B2 0.993381f
+#define HPF_Fs88200_Fc66_A0 0.985273f
+#define HPF_Fs88200_Fc66_A1 (-1.970546f)
+#define HPF_Fs88200_Fc66_A2 0.985273f
+#define HPF_Fs88200_Fc66_B1 (-1.993359f)
+#define HPF_Fs88200_Fc66_B2 0.993381f
-#define HPF_Fs96000_Fc66_A0 0.985539f
-#define HPF_Fs96000_Fc66_A1 (-1.971077f)
-#define HPF_Fs96000_Fc66_A2 0.985539f
-#define HPF_Fs96000_Fc66_B1 (-1.993898f)
-#define HPF_Fs96000_Fc66_B2 0.993917f
+#define HPF_Fs96000_Fc66_A0 0.985539f
+#define HPF_Fs96000_Fc66_A1 (-1.971077f)
+#define HPF_Fs96000_Fc66_A2 0.985539f
+#define HPF_Fs96000_Fc66_B1 (-1.993898f)
+#define HPF_Fs96000_Fc66_B2 0.993917f
-#define HPF_Fs176400_Fc66_A0 0.986910f
-#define HPF_Fs176400_Fc66_A1 (-1.973820f)
-#define HPF_Fs176400_Fc66_A2 0.986910f
-#define HPF_Fs176400_Fc66_B1 (-1.996679f)
-#define HPF_Fs176400_Fc66_B2 0.996685f
+#define HPF_Fs176400_Fc66_A0 0.986910f
+#define HPF_Fs176400_Fc66_A1 (-1.973820f)
+#define HPF_Fs176400_Fc66_A2 0.986910f
+#define HPF_Fs176400_Fc66_B1 (-1.996679f)
+#define HPF_Fs176400_Fc66_B2 0.996685f
-#define HPF_Fs192000_Fc66_A0 0.987043f
-#define HPF_Fs192000_Fc66_A1 (-1.974086f)
-#define HPF_Fs192000_Fc66_A2 0.987043f
-#define HPF_Fs192000_Fc66_B1 (-1.996949f)
-#define HPF_Fs192000_Fc66_B2 0.996954f
+#define HPF_Fs192000_Fc66_A0 0.987043f
+#define HPF_Fs192000_Fc66_A1 (-1.974086f)
+#define HPF_Fs192000_Fc66_A2 0.987043f
+#define HPF_Fs192000_Fc66_B1 (-1.996949f)
+#define HPF_Fs192000_Fc66_B2 0.996954f
/* Coefficients for centre frequency 78Hz */
-#define HPF_Fs8000_Fc78_A0 0.946693f
-#define HPF_Fs8000_Fc78_A1 (-1.893387f)
-#define HPF_Fs8000_Fc78_A2 0.946693f
-#define HPF_Fs8000_Fc78_B1 (-1.913517f)
-#define HPF_Fs8000_Fc78_B2 0.917105f
-#define HPF_Fs11025_Fc78_A0 0.957999f
-#define HPF_Fs11025_Fc78_A1 (-1.915998f)
-#define HPF_Fs11025_Fc78_A2 0.957999f
-#define HPF_Fs11025_Fc78_B1 (-1.937229f)
-#define HPF_Fs11025_Fc78_B2 0.939140f
-#define HPF_Fs12000_Fc78_A0 0.960446f
-#define HPF_Fs12000_Fc78_A1 (-1.920892f)
-#define HPF_Fs12000_Fc78_A2 0.960446f
-#define HPF_Fs12000_Fc78_B1 (-1.942326f)
-#define HPF_Fs12000_Fc78_B2 0.943944f
-#define HPF_Fs16000_Fc78_A0 0.967397f
-#define HPF_Fs16000_Fc78_A1 (-1.934794f)
-#define HPF_Fs16000_Fc78_A2 0.967397f
-#define HPF_Fs16000_Fc78_B1 (-1.956740f)
-#define HPF_Fs16000_Fc78_B2 0.957656f
-#define HPF_Fs22050_Fc78_A0 0.973156f
-#define HPF_Fs22050_Fc78_A1 (-1.946313f)
-#define HPF_Fs22050_Fc78_A2 0.973156f
-#define HPF_Fs22050_Fc78_B1 (-1.968607f)
-#define HPF_Fs22050_Fc78_B2 0.969092f
-#define HPF_Fs24000_Fc78_A0 0.974398f
-#define HPF_Fs24000_Fc78_A1 (-1.948797f)
-#define HPF_Fs24000_Fc78_A2 0.974398f
-#define HPF_Fs24000_Fc78_B1 (-1.971157f)
-#define HPF_Fs24000_Fc78_B2 0.971568f
-#define HPF_Fs32000_Fc78_A0 0.977918f
-#define HPF_Fs32000_Fc78_A1 (-1.955836f)
-#define HPF_Fs32000_Fc78_A2 0.977918f
-#define HPF_Fs32000_Fc78_B1 (-1.978367f)
-#define HPF_Fs32000_Fc78_B2 0.978599f
-#define HPF_Fs44100_Fc78_A0 0.980824f
-#define HPF_Fs44100_Fc78_A1 (-1.961649f)
-#define HPF_Fs44100_Fc78_A2 0.980824f
-#define HPF_Fs44100_Fc78_B1 (-1.984303f)
-#define HPF_Fs44100_Fc78_B2 0.984425f
-#define HPF_Fs48000_Fc78_A0 0.981450f
-#define HPF_Fs48000_Fc78_A1 (-1.962900f)
-#define HPF_Fs48000_Fc78_A2 0.981450f
-#define HPF_Fs48000_Fc78_B1 (-1.985578f)
-#define HPF_Fs48000_Fc78_B2 0.985681f
+#define HPF_Fs8000_Fc78_A0 0.946693f
+#define HPF_Fs8000_Fc78_A1 (-1.893387f)
+#define HPF_Fs8000_Fc78_A2 0.946693f
+#define HPF_Fs8000_Fc78_B1 (-1.913517f)
+#define HPF_Fs8000_Fc78_B2 0.917105f
+#define HPF_Fs11025_Fc78_A0 0.957999f
+#define HPF_Fs11025_Fc78_A1 (-1.915998f)
+#define HPF_Fs11025_Fc78_A2 0.957999f
+#define HPF_Fs11025_Fc78_B1 (-1.937229f)
+#define HPF_Fs11025_Fc78_B2 0.939140f
+#define HPF_Fs12000_Fc78_A0 0.960446f
+#define HPF_Fs12000_Fc78_A1 (-1.920892f)
+#define HPF_Fs12000_Fc78_A2 0.960446f
+#define HPF_Fs12000_Fc78_B1 (-1.942326f)
+#define HPF_Fs12000_Fc78_B2 0.943944f
+#define HPF_Fs16000_Fc78_A0 0.967397f
+#define HPF_Fs16000_Fc78_A1 (-1.934794f)
+#define HPF_Fs16000_Fc78_A2 0.967397f
+#define HPF_Fs16000_Fc78_B1 (-1.956740f)
+#define HPF_Fs16000_Fc78_B2 0.957656f
+#define HPF_Fs22050_Fc78_A0 0.973156f
+#define HPF_Fs22050_Fc78_A1 (-1.946313f)
+#define HPF_Fs22050_Fc78_A2 0.973156f
+#define HPF_Fs22050_Fc78_B1 (-1.968607f)
+#define HPF_Fs22050_Fc78_B2 0.969092f
+#define HPF_Fs24000_Fc78_A0 0.974398f
+#define HPF_Fs24000_Fc78_A1 (-1.948797f)
+#define HPF_Fs24000_Fc78_A2 0.974398f
+#define HPF_Fs24000_Fc78_B1 (-1.971157f)
+#define HPF_Fs24000_Fc78_B2 0.971568f
+#define HPF_Fs32000_Fc78_A0 0.977918f
+#define HPF_Fs32000_Fc78_A1 (-1.955836f)
+#define HPF_Fs32000_Fc78_A2 0.977918f
+#define HPF_Fs32000_Fc78_B1 (-1.978367f)
+#define HPF_Fs32000_Fc78_B2 0.978599f
+#define HPF_Fs44100_Fc78_A0 0.980824f
+#define HPF_Fs44100_Fc78_A1 (-1.961649f)
+#define HPF_Fs44100_Fc78_A2 0.980824f
+#define HPF_Fs44100_Fc78_B1 (-1.984303f)
+#define HPF_Fs44100_Fc78_B2 0.984425f
+#define HPF_Fs48000_Fc78_A0 0.981450f
+#define HPF_Fs48000_Fc78_A1 (-1.962900f)
+#define HPF_Fs48000_Fc78_A2 0.981450f
+#define HPF_Fs48000_Fc78_B1 (-1.985578f)
+#define HPF_Fs48000_Fc78_B2 0.985681f
-#define HPF_Fs88200_Fc78_A0 0.984678f
-#define HPF_Fs88200_Fc78_A1 (-1.969356f)
-#define HPF_Fs88200_Fc78_A2 0.984678f
-#define HPF_Fs88200_Fc78_B1 (-1.992151f)
-#define HPF_Fs88200_Fc78_B2 0.992182f
+#define HPF_Fs88200_Fc78_A0 0.984678f
+#define HPF_Fs88200_Fc78_A1 (-1.969356f)
+#define HPF_Fs88200_Fc78_A2 0.984678f
+#define HPF_Fs88200_Fc78_B1 (-1.992151f)
+#define HPF_Fs88200_Fc78_B2 0.992182f
-#define HPF_Fs96000_Fc78_A0 0.984992f
-#define HPF_Fs96000_Fc78_A1 (-1.969984f)
-#define HPF_Fs96000_Fc78_A2 0.984992f
-#define HPF_Fs96000_Fc78_B1 (-1.992789f)
-#define HPF_Fs96000_Fc78_B2 0.992815f
+#define HPF_Fs96000_Fc78_A0 0.984992f
+#define HPF_Fs96000_Fc78_A1 (-1.969984f)
+#define HPF_Fs96000_Fc78_A2 0.984992f
+#define HPF_Fs96000_Fc78_B1 (-1.992789f)
+#define HPF_Fs96000_Fc78_B2 0.992815f
-#define HPF_Fs176400_Fc78_A0 0.986612f
-#define HPF_Fs176400_Fc78_A1 (-1.973224f)
-#define HPF_Fs176400_Fc78_A2 0.986612f
-#define HPF_Fs176400_Fc78_B1 (-1.996076f)
-#define HPF_Fs176400_Fc78_B2 0.996083f
+#define HPF_Fs176400_Fc78_A0 0.986612f
+#define HPF_Fs176400_Fc78_A1 (-1.973224f)
+#define HPF_Fs176400_Fc78_A2 0.986612f
+#define HPF_Fs176400_Fc78_B1 (-1.996076f)
+#define HPF_Fs176400_Fc78_B2 0.996083f
-#define HPF_Fs192000_Fc78_A0 0.986769f
-#define HPF_Fs192000_Fc78_A1 (-1.973539f)
-#define HPF_Fs192000_Fc78_A2 0.986769f
-#define HPF_Fs192000_Fc78_B1 (-1.996394f)
-#define HPF_Fs192000_Fc78_B2 0.996401f
+#define HPF_Fs192000_Fc78_A0 0.986769f
+#define HPF_Fs192000_Fc78_A1 (-1.973539f)
+#define HPF_Fs192000_Fc78_A2 0.986769f
+#define HPF_Fs192000_Fc78_B1 (-1.996394f)
+#define HPF_Fs192000_Fc78_B2 0.996401f
/* Coefficients for centre frequency 90Hz */
-#define HPF_Fs8000_Fc90_A0 0.940412f
-#define HPF_Fs8000_Fc90_A1 (-1.880825f)
-#define HPF_Fs8000_Fc90_A2 0.940412f
-#define HPF_Fs8000_Fc90_B1 (-1.900231f)
-#define HPF_Fs8000_Fc90_B2 0.904977f
-#define HPF_Fs11025_Fc90_A0 0.953383f
-#define HPF_Fs11025_Fc90_A1 (-1.906766f)
-#define HPF_Fs11025_Fc90_A2 0.953383f
-#define HPF_Fs11025_Fc90_B1 (-1.927579f)
-#define HPF_Fs11025_Fc90_B2 0.930111f
-#define HPF_Fs12000_Fc90_A0 0.956193f
-#define HPF_Fs12000_Fc90_A1 (-1.912387f)
-#define HPF_Fs12000_Fc90_A2 0.956193f
-#define HPF_Fs12000_Fc90_B1 (-1.933459f)
-#define HPF_Fs12000_Fc90_B2 0.935603f
-#define HPF_Fs16000_Fc90_A0 0.964183f
-#define HPF_Fs16000_Fc90_A1 (-1.928365f)
-#define HPF_Fs16000_Fc90_A2 0.964183f
-#define HPF_Fs16000_Fc90_B1 (-1.950087f)
-#define HPF_Fs16000_Fc90_B2 0.951303f
-#define HPF_Fs22050_Fc90_A0 0.970809f
-#define HPF_Fs22050_Fc90_A1 (-1.941618f)
-#define HPF_Fs22050_Fc90_A2 0.970809f
-#define HPF_Fs22050_Fc90_B1 (-1.963778f)
-#define HPF_Fs22050_Fc90_B2 0.964423f
-#define HPF_Fs24000_Fc90_A0 0.972239f
-#define HPF_Fs24000_Fc90_A1 (-1.944477f)
-#define HPF_Fs24000_Fc90_A2 0.972239f
-#define HPF_Fs24000_Fc90_B1 (-1.966721f)
-#define HPF_Fs24000_Fc90_B2 0.967266f
-#define HPF_Fs32000_Fc90_A0 0.976292f
-#define HPF_Fs32000_Fc90_A1 (-1.952584f)
-#define HPF_Fs32000_Fc90_A2 0.976292f
-#define HPF_Fs32000_Fc90_B1 (-1.975040f)
-#define HPF_Fs32000_Fc90_B2 0.975347f
-#define HPF_Fs44100_Fc90_A0 0.979641f
-#define HPF_Fs44100_Fc90_A1 (-1.959282f)
-#define HPF_Fs44100_Fc90_A2 0.979641f
-#define HPF_Fs44100_Fc90_B1 (-1.981888f)
-#define HPF_Fs44100_Fc90_B2 0.982050f
-#define HPF_Fs48000_Fc90_A0 0.980362f
-#define HPF_Fs48000_Fc90_A1 (-1.960724f)
-#define HPF_Fs48000_Fc90_A2 0.980362f
-#define HPF_Fs48000_Fc90_B1 (-1.983359f)
-#define HPF_Fs48000_Fc90_B2 0.983497f
+#define HPF_Fs8000_Fc90_A0 0.940412f
+#define HPF_Fs8000_Fc90_A1 (-1.880825f)
+#define HPF_Fs8000_Fc90_A2 0.940412f
+#define HPF_Fs8000_Fc90_B1 (-1.900231f)
+#define HPF_Fs8000_Fc90_B2 0.904977f
+#define HPF_Fs11025_Fc90_A0 0.953383f
+#define HPF_Fs11025_Fc90_A1 (-1.906766f)
+#define HPF_Fs11025_Fc90_A2 0.953383f
+#define HPF_Fs11025_Fc90_B1 (-1.927579f)
+#define HPF_Fs11025_Fc90_B2 0.930111f
+#define HPF_Fs12000_Fc90_A0 0.956193f
+#define HPF_Fs12000_Fc90_A1 (-1.912387f)
+#define HPF_Fs12000_Fc90_A2 0.956193f
+#define HPF_Fs12000_Fc90_B1 (-1.933459f)
+#define HPF_Fs12000_Fc90_B2 0.935603f
+#define HPF_Fs16000_Fc90_A0 0.964183f
+#define HPF_Fs16000_Fc90_A1 (-1.928365f)
+#define HPF_Fs16000_Fc90_A2 0.964183f
+#define HPF_Fs16000_Fc90_B1 (-1.950087f)
+#define HPF_Fs16000_Fc90_B2 0.951303f
+#define HPF_Fs22050_Fc90_A0 0.970809f
+#define HPF_Fs22050_Fc90_A1 (-1.941618f)
+#define HPF_Fs22050_Fc90_A2 0.970809f
+#define HPF_Fs22050_Fc90_B1 (-1.963778f)
+#define HPF_Fs22050_Fc90_B2 0.964423f
+#define HPF_Fs24000_Fc90_A0 0.972239f
+#define HPF_Fs24000_Fc90_A1 (-1.944477f)
+#define HPF_Fs24000_Fc90_A2 0.972239f
+#define HPF_Fs24000_Fc90_B1 (-1.966721f)
+#define HPF_Fs24000_Fc90_B2 0.967266f
+#define HPF_Fs32000_Fc90_A0 0.976292f
+#define HPF_Fs32000_Fc90_A1 (-1.952584f)
+#define HPF_Fs32000_Fc90_A2 0.976292f
+#define HPF_Fs32000_Fc90_B1 (-1.975040f)
+#define HPF_Fs32000_Fc90_B2 0.975347f
+#define HPF_Fs44100_Fc90_A0 0.979641f
+#define HPF_Fs44100_Fc90_A1 (-1.959282f)
+#define HPF_Fs44100_Fc90_A2 0.979641f
+#define HPF_Fs44100_Fc90_B1 (-1.981888f)
+#define HPF_Fs44100_Fc90_B2 0.982050f
+#define HPF_Fs48000_Fc90_A0 0.980362f
+#define HPF_Fs48000_Fc90_A1 (-1.960724f)
+#define HPF_Fs48000_Fc90_A2 0.980362f
+#define HPF_Fs48000_Fc90_B1 (-1.983359f)
+#define HPF_Fs48000_Fc90_B2 0.983497f
-#define HPF_Fs88200_Fc90_A0 0.984084f
-#define HPF_Fs88200_Fc90_A1 (-1.968168f)
-#define HPF_Fs88200_Fc90_A2 0.984084f
-#define HPF_Fs88200_Fc90_B1 (-1.990944f)
-#define HPF_Fs88200_Fc90_B2 0.990985f
+#define HPF_Fs88200_Fc90_A0 0.984084f
+#define HPF_Fs88200_Fc90_A1 (-1.968168f)
+#define HPF_Fs88200_Fc90_A2 0.984084f
+#define HPF_Fs88200_Fc90_B1 (-1.990944f)
+#define HPF_Fs88200_Fc90_B2 0.990985f
-#define HPF_Fs96000_Fc90_A0 0.984446f
-#define HPF_Fs96000_Fc90_A1 (-1.968892f)
-#define HPF_Fs96000_Fc90_A2 0.984446f
-#define HPF_Fs96000_Fc90_B1 (-1.991680f)
-#define HPF_Fs96000_Fc90_B2 0.991714f
+#define HPF_Fs96000_Fc90_A0 0.984446f
+#define HPF_Fs96000_Fc90_A1 (-1.968892f)
+#define HPF_Fs96000_Fc90_A2 0.984446f
+#define HPF_Fs96000_Fc90_B1 (-1.991680f)
+#define HPF_Fs96000_Fc90_B2 0.991714f
-#define HPF_Fs176400_Fc90_A0 0.986314f
-#define HPF_Fs176400_Fc90_A1 (-1.972629f)
-#define HPF_Fs176400_Fc90_A2 0.986314f
-#define HPF_Fs176400_Fc90_B1 (-1.995472f)
-#define HPF_Fs176400_Fc90_B2 0.995482f
+#define HPF_Fs176400_Fc90_A0 0.986314f
+#define HPF_Fs176400_Fc90_A1 (-1.972629f)
+#define HPF_Fs176400_Fc90_A2 0.986314f
+#define HPF_Fs176400_Fc90_B1 (-1.995472f)
+#define HPF_Fs176400_Fc90_B2 0.995482f
-#define HPF_Fs192000_Fc90_A0 0.986496f
-#define HPF_Fs192000_Fc90_A1 (-1.972992f)
-#define HPF_Fs192000_Fc90_A2 0.986496f
-#define HPF_Fs192000_Fc90_B1 (-1.995840f)
-#define HPF_Fs192000_Fc90_B2 0.995848f
+#define HPF_Fs192000_Fc90_A0 0.986496f
+#define HPF_Fs192000_Fc90_A1 (-1.972992f)
+#define HPF_Fs192000_Fc90_A2 0.986496f
+#define HPF_Fs192000_Fc90_B1 (-1.995840f)
+#define HPF_Fs192000_Fc90_B2 0.995848f
/************************************************************************************/
/* */
@@ -323,288 +323,288 @@
/************************************************************************************/
/* Coefficients for centre frequency 55Hz */
-#define BPF_Fs8000_Fc55_A0 0.009197f
-#define BPF_Fs8000_Fc55_A1 0.000000f
-#define BPF_Fs8000_Fc55_A2 (-0.009197f)
-#define BPF_Fs8000_Fc55_B1 (-1.979545f)
-#define BPF_Fs8000_Fc55_B2 0.981393f
-#define BPF_Fs11025_Fc55_A0 0.006691f
-#define BPF_Fs11025_Fc55_A1 0.000000f
-#define BPF_Fs11025_Fc55_A2 (-0.006691f)
-#define BPF_Fs11025_Fc55_B1 (-1.985488f)
-#define BPF_Fs11025_Fc55_B2 0.986464f
-#define BPF_Fs12000_Fc55_A0 0.006150f
-#define BPF_Fs12000_Fc55_A1 0.000000f
-#define BPF_Fs12000_Fc55_A2 (-0.006150f)
-#define BPF_Fs12000_Fc55_B1 (-1.986733f)
-#define BPF_Fs12000_Fc55_B2 0.987557f
-#define BPF_Fs16000_Fc55_A0 0.004620f
-#define BPF_Fs16000_Fc55_A1 0.000000f
-#define BPF_Fs16000_Fc55_A2 (-0.004620f)
-#define BPF_Fs16000_Fc55_B1 (-1.990189f)
-#define BPF_Fs16000_Fc55_B2 0.990653f
-#define BPF_Fs22050_Fc55_A0 0.003357f
-#define BPF_Fs22050_Fc55_A1 0.000000f
-#define BPF_Fs22050_Fc55_A2 (-0.003357f)
-#define BPF_Fs22050_Fc55_B1 (-1.992964f)
-#define BPF_Fs22050_Fc55_B2 0.993209f
-#define BPF_Fs24000_Fc55_A0 0.003085f
-#define BPF_Fs24000_Fc55_A1 0.000000f
-#define BPF_Fs24000_Fc55_A2 (-0.003085f)
-#define BPF_Fs24000_Fc55_B1 (-1.993552f)
-#define BPF_Fs24000_Fc55_B2 0.993759f
-#define BPF_Fs32000_Fc55_A0 0.002315f
-#define BPF_Fs32000_Fc55_A1 0.000000f
-#define BPF_Fs32000_Fc55_A2 (-0.002315f)
-#define BPF_Fs32000_Fc55_B1 (-1.995199f)
-#define BPF_Fs32000_Fc55_B2 0.995316f
-#define BPF_Fs44100_Fc55_A0 0.001681f
-#define BPF_Fs44100_Fc55_A1 0.000000f
-#define BPF_Fs44100_Fc55_A2 (-0.001681f)
-#define BPF_Fs44100_Fc55_B1 (-1.996537f)
-#define BPF_Fs44100_Fc55_B2 0.996599f
-#define BPF_Fs48000_Fc55_A0 0.001545f
-#define BPF_Fs48000_Fc55_A1 0.000000f
-#define BPF_Fs48000_Fc55_A2 (-0.001545f)
-#define BPF_Fs48000_Fc55_B1 (-1.996823f)
-#define BPF_Fs48000_Fc55_B2 0.996875f
+#define BPF_Fs8000_Fc55_A0 0.009197f
+#define BPF_Fs8000_Fc55_A1 0.000000f
+#define BPF_Fs8000_Fc55_A2 (-0.009197f)
+#define BPF_Fs8000_Fc55_B1 (-1.979545f)
+#define BPF_Fs8000_Fc55_B2 0.981393f
+#define BPF_Fs11025_Fc55_A0 0.006691f
+#define BPF_Fs11025_Fc55_A1 0.000000f
+#define BPF_Fs11025_Fc55_A2 (-0.006691f)
+#define BPF_Fs11025_Fc55_B1 (-1.985488f)
+#define BPF_Fs11025_Fc55_B2 0.986464f
+#define BPF_Fs12000_Fc55_A0 0.006150f
+#define BPF_Fs12000_Fc55_A1 0.000000f
+#define BPF_Fs12000_Fc55_A2 (-0.006150f)
+#define BPF_Fs12000_Fc55_B1 (-1.986733f)
+#define BPF_Fs12000_Fc55_B2 0.987557f
+#define BPF_Fs16000_Fc55_A0 0.004620f
+#define BPF_Fs16000_Fc55_A1 0.000000f
+#define BPF_Fs16000_Fc55_A2 (-0.004620f)
+#define BPF_Fs16000_Fc55_B1 (-1.990189f)
+#define BPF_Fs16000_Fc55_B2 0.990653f
+#define BPF_Fs22050_Fc55_A0 0.003357f
+#define BPF_Fs22050_Fc55_A1 0.000000f
+#define BPF_Fs22050_Fc55_A2 (-0.003357f)
+#define BPF_Fs22050_Fc55_B1 (-1.992964f)
+#define BPF_Fs22050_Fc55_B2 0.993209f
+#define BPF_Fs24000_Fc55_A0 0.003085f
+#define BPF_Fs24000_Fc55_A1 0.000000f
+#define BPF_Fs24000_Fc55_A2 (-0.003085f)
+#define BPF_Fs24000_Fc55_B1 (-1.993552f)
+#define BPF_Fs24000_Fc55_B2 0.993759f
+#define BPF_Fs32000_Fc55_A0 0.002315f
+#define BPF_Fs32000_Fc55_A1 0.000000f
+#define BPF_Fs32000_Fc55_A2 (-0.002315f)
+#define BPF_Fs32000_Fc55_B1 (-1.995199f)
+#define BPF_Fs32000_Fc55_B2 0.995316f
+#define BPF_Fs44100_Fc55_A0 0.001681f
+#define BPF_Fs44100_Fc55_A1 0.000000f
+#define BPF_Fs44100_Fc55_A2 (-0.001681f)
+#define BPF_Fs44100_Fc55_B1 (-1.996537f)
+#define BPF_Fs44100_Fc55_B2 0.996599f
+#define BPF_Fs48000_Fc55_A0 0.001545f
+#define BPF_Fs48000_Fc55_A1 0.000000f
+#define BPF_Fs48000_Fc55_A2 (-0.001545f)
+#define BPF_Fs48000_Fc55_B1 (-1.996823f)
+#define BPF_Fs48000_Fc55_B2 0.996875f
-#define BPF_Fs88200_Fc55_A0 0.000831f
-#define BPF_Fs88200_Fc55_A1 0.000000f
-#define BPF_Fs88200_Fc55_A2 (-0.000831f)
-#define BPF_Fs88200_Fc55_B1 (-1.998321f)
-#define BPF_Fs88200_Fc55_B2 0.998338f
+#define BPF_Fs88200_Fc55_A0 0.000831f
+#define BPF_Fs88200_Fc55_A1 0.000000f
+#define BPF_Fs88200_Fc55_A2 (-0.000831f)
+#define BPF_Fs88200_Fc55_B1 (-1.998321f)
+#define BPF_Fs88200_Fc55_B2 0.998338f
-#define BPF_Fs96000_Fc55_A0 0.000762f
-#define BPF_Fs96000_Fc55_A1 0.000000f
-#define BPF_Fs96000_Fc55_A2 (-0.000762f)
-#define BPF_Fs96000_Fc55_B1 (-1.998461f)
-#define BPF_Fs96000_Fc55_B2 0.998477f
+#define BPF_Fs96000_Fc55_A0 0.000762f
+#define BPF_Fs96000_Fc55_A1 0.000000f
+#define BPF_Fs96000_Fc55_A2 (-0.000762f)
+#define BPF_Fs96000_Fc55_B1 (-1.998461f)
+#define BPF_Fs96000_Fc55_B2 0.998477f
-#define BPF_Fs176400_Fc55_A0 0.000416f
-#define BPF_Fs176400_Fc55_A1 0.000000f
-#define BPF_Fs176400_Fc55_A2 (-0.000416f)
-#define BPF_Fs176400_Fc55_B1 (-1.999164f)
-#define BPF_Fs176400_Fc55_B2 0.999169f
+#define BPF_Fs176400_Fc55_A0 0.000416f
+#define BPF_Fs176400_Fc55_A1 0.000000f
+#define BPF_Fs176400_Fc55_A2 (-0.000416f)
+#define BPF_Fs176400_Fc55_B1 (-1.999164f)
+#define BPF_Fs176400_Fc55_B2 0.999169f
-#define BPF_Fs192000_Fc55_A0 0.000381f
-#define BPF_Fs192000_Fc55_A1 0.000000f
-#define BPF_Fs192000_Fc55_A2 (-0.000381f)
-#define BPF_Fs192000_Fc55_B1 (-1.999234f)
-#define BPF_Fs192000_Fc55_B2 0.999238f
+#define BPF_Fs192000_Fc55_A0 0.000381f
+#define BPF_Fs192000_Fc55_A1 0.000000f
+#define BPF_Fs192000_Fc55_A2 (-0.000381f)
+#define BPF_Fs192000_Fc55_B1 (-1.999234f)
+#define BPF_Fs192000_Fc55_B2 0.999238f
/* Coefficients for centre frequency 66Hz */
-#define BPF_Fs8000_Fc66_A0 0.012648f
-#define BPF_Fs8000_Fc66_A1 0.000000f
-#define BPF_Fs8000_Fc66_A2 (-0.012648f)
-#define BPF_Fs8000_Fc66_B1 (-1.971760f)
-#define BPF_Fs8000_Fc66_B2 0.974412f
-#define BPF_Fs11025_Fc66_A0 0.009209f
-#define BPF_Fs11025_Fc66_A1 0.000000f
-#define BPF_Fs11025_Fc66_A2 (-0.009209f)
-#define BPF_Fs11025_Fc66_B1 (-1.979966f)
-#define BPF_Fs11025_Fc66_B2 0.981368f
-#define BPF_Fs12000_Fc66_A0 0.008468f
-#define BPF_Fs12000_Fc66_A1 0.000000f
-#define BPF_Fs12000_Fc66_A2 (-0.008468f)
-#define BPF_Fs12000_Fc66_B1 (-1.981685f)
-#define BPF_Fs12000_Fc66_B2 0.982869f
-#define BPF_Fs16000_Fc66_A0 0.006364f
-#define BPF_Fs16000_Fc66_A1 0.000000f
-#define BPF_Fs16000_Fc66_A2 (-0.006364f)
-#define BPF_Fs16000_Fc66_B1 (-1.986457f)
-#define BPF_Fs16000_Fc66_B2 0.987124f
-#define BPF_Fs22050_Fc66_A0 0.004626f
-#define BPF_Fs22050_Fc66_A1 0.000000f
-#define BPF_Fs22050_Fc66_A2 (-0.004626f)
-#define BPF_Fs22050_Fc66_B1 (-1.990288f)
-#define BPF_Fs22050_Fc66_B2 0.990641f
-#define BPF_Fs24000_Fc66_A0 0.004252f
-#define BPF_Fs24000_Fc66_A1 0.000000f
-#define BPF_Fs24000_Fc66_A2 (-0.004252f)
-#define BPF_Fs24000_Fc66_B1 (-1.991100f)
-#define BPF_Fs24000_Fc66_B2 0.991398f
-#define BPF_Fs32000_Fc66_A0 0.003192f
-#define BPF_Fs32000_Fc66_A1 0.000000f
-#define BPF_Fs32000_Fc66_A2 (-0.003192f)
-#define BPF_Fs32000_Fc66_B1 (-1.993374f)
-#define BPF_Fs32000_Fc66_B2 0.993541f
-#define BPF_Fs44100_Fc66_A0 0.002318f
-#define BPF_Fs44100_Fc66_A1 0.000000f
-#define BPF_Fs44100_Fc66_A2 (-0.002318f)
-#define BPF_Fs44100_Fc66_B1 (-1.995221f)
-#define BPF_Fs44100_Fc66_B2 0.995309f
-#define BPF_Fs48000_Fc66_A0 0.002131f
-#define BPF_Fs48000_Fc66_A1 0.000000f
-#define BPF_Fs48000_Fc66_A2 (-0.002131f)
-#define BPF_Fs48000_Fc66_B1 (-1.995615f)
-#define BPF_Fs48000_Fc66_B2 0.995690f
+#define BPF_Fs8000_Fc66_A0 0.012648f
+#define BPF_Fs8000_Fc66_A1 0.000000f
+#define BPF_Fs8000_Fc66_A2 (-0.012648f)
+#define BPF_Fs8000_Fc66_B1 (-1.971760f)
+#define BPF_Fs8000_Fc66_B2 0.974412f
+#define BPF_Fs11025_Fc66_A0 0.009209f
+#define BPF_Fs11025_Fc66_A1 0.000000f
+#define BPF_Fs11025_Fc66_A2 (-0.009209f)
+#define BPF_Fs11025_Fc66_B1 (-1.979966f)
+#define BPF_Fs11025_Fc66_B2 0.981368f
+#define BPF_Fs12000_Fc66_A0 0.008468f
+#define BPF_Fs12000_Fc66_A1 0.000000f
+#define BPF_Fs12000_Fc66_A2 (-0.008468f)
+#define BPF_Fs12000_Fc66_B1 (-1.981685f)
+#define BPF_Fs12000_Fc66_B2 0.982869f
+#define BPF_Fs16000_Fc66_A0 0.006364f
+#define BPF_Fs16000_Fc66_A1 0.000000f
+#define BPF_Fs16000_Fc66_A2 (-0.006364f)
+#define BPF_Fs16000_Fc66_B1 (-1.986457f)
+#define BPF_Fs16000_Fc66_B2 0.987124f
+#define BPF_Fs22050_Fc66_A0 0.004626f
+#define BPF_Fs22050_Fc66_A1 0.000000f
+#define BPF_Fs22050_Fc66_A2 (-0.004626f)
+#define BPF_Fs22050_Fc66_B1 (-1.990288f)
+#define BPF_Fs22050_Fc66_B2 0.990641f
+#define BPF_Fs24000_Fc66_A0 0.004252f
+#define BPF_Fs24000_Fc66_A1 0.000000f
+#define BPF_Fs24000_Fc66_A2 (-0.004252f)
+#define BPF_Fs24000_Fc66_B1 (-1.991100f)
+#define BPF_Fs24000_Fc66_B2 0.991398f
+#define BPF_Fs32000_Fc66_A0 0.003192f
+#define BPF_Fs32000_Fc66_A1 0.000000f
+#define BPF_Fs32000_Fc66_A2 (-0.003192f)
+#define BPF_Fs32000_Fc66_B1 (-1.993374f)
+#define BPF_Fs32000_Fc66_B2 0.993541f
+#define BPF_Fs44100_Fc66_A0 0.002318f
+#define BPF_Fs44100_Fc66_A1 0.000000f
+#define BPF_Fs44100_Fc66_A2 (-0.002318f)
+#define BPF_Fs44100_Fc66_B1 (-1.995221f)
+#define BPF_Fs44100_Fc66_B2 0.995309f
+#define BPF_Fs48000_Fc66_A0 0.002131f
+#define BPF_Fs48000_Fc66_A1 0.000000f
+#define BPF_Fs48000_Fc66_A2 (-0.002131f)
+#define BPF_Fs48000_Fc66_B1 (-1.995615f)
+#define BPF_Fs48000_Fc66_B2 0.995690f
-#define BPF_Fs88200_Fc66_A0 0.001146f
-#define BPF_Fs88200_Fc66_A1 0.000000f
-#define BPF_Fs88200_Fc66_A2 (-0.001146f)
-#define BPF_Fs88200_Fc66_B1 (-1.997684f)
-#define BPF_Fs88200_Fc66_B2 0.997708f
+#define BPF_Fs88200_Fc66_A0 0.001146f
+#define BPF_Fs88200_Fc66_A1 0.000000f
+#define BPF_Fs88200_Fc66_A2 (-0.001146f)
+#define BPF_Fs88200_Fc66_B1 (-1.997684f)
+#define BPF_Fs88200_Fc66_B2 0.997708f
-#define BPF_Fs96000_Fc66_A0 0.001055f
-#define BPF_Fs96000_Fc66_A1 0.000000f
-#define BPF_Fs96000_Fc66_A2 (-0.001055f)
-#define BPF_Fs96000_Fc66_B1 (-1.997868f)
-#define BPF_Fs96000_Fc66_B2 0.997891f
+#define BPF_Fs96000_Fc66_A0 0.001055f
+#define BPF_Fs96000_Fc66_A1 0.000000f
+#define BPF_Fs96000_Fc66_A2 (-0.001055f)
+#define BPF_Fs96000_Fc66_B1 (-1.997868f)
+#define BPF_Fs96000_Fc66_B2 0.997891f
-#define BPF_Fs176400_Fc66_A0 0.000573f
-#define BPF_Fs176400_Fc66_A1 0.000000f
-#define BPF_Fs176400_Fc66_A2 (-0.000573f)
-#define BPF_Fs176400_Fc66_B1 (-1.998847f)
-#define BPF_Fs176400_Fc66_B2 0.998853f
+#define BPF_Fs176400_Fc66_A0 0.000573f
+#define BPF_Fs176400_Fc66_A1 0.000000f
+#define BPF_Fs176400_Fc66_A2 (-0.000573f)
+#define BPF_Fs176400_Fc66_B1 (-1.998847f)
+#define BPF_Fs176400_Fc66_B2 0.998853f
-#define BPF_Fs192000_Fc66_A0 0.000528f
-#define BPF_Fs192000_Fc66_A1 0.000000f
-#define BPF_Fs192000_Fc66_A2 (-0.000528f)
-#define BPF_Fs192000_Fc66_B1 (-1.998939f)
-#define BPF_Fs192000_Fc66_B2 0.998945f
+#define BPF_Fs192000_Fc66_A0 0.000528f
+#define BPF_Fs192000_Fc66_A1 0.000000f
+#define BPF_Fs192000_Fc66_A2 (-0.000528f)
+#define BPF_Fs192000_Fc66_B1 (-1.998939f)
+#define BPF_Fs192000_Fc66_B2 0.998945f
/* Coefficients for centre frequency 78Hz */
-#define BPF_Fs8000_Fc78_A0 0.018572f
-#define BPF_Fs8000_Fc78_A1 0.000000f
-#define BPF_Fs8000_Fc78_A2 (-0.018572f)
-#define BPF_Fs8000_Fc78_B1 (-1.958745f)
-#define BPF_Fs8000_Fc78_B2 0.962427f
-#define BPF_Fs11025_Fc78_A0 0.013545f
-#define BPF_Fs11025_Fc78_A1 0.000000f
-#define BPF_Fs11025_Fc78_A2 (-0.013545f)
-#define BPF_Fs11025_Fc78_B1 (-1.970647f)
-#define BPF_Fs11025_Fc78_B2 0.972596f
-#define BPF_Fs12000_Fc78_A0 0.012458f
-#define BPF_Fs12000_Fc78_A1 0.000000f
-#define BPF_Fs12000_Fc78_A2 (-0.012458f)
-#define BPF_Fs12000_Fc78_B1 (-1.973148f)
-#define BPF_Fs12000_Fc78_B2 0.974795f
-#define BPF_Fs16000_Fc78_A0 0.009373f
-#define BPF_Fs16000_Fc78_A1 0.000000f
-#define BPF_Fs16000_Fc78_A2 (-0.009373f)
-#define BPF_Fs16000_Fc78_B1 (-1.980108f)
-#define BPF_Fs16000_Fc78_B2 0.981037f
-#define BPF_Fs22050_Fc78_A0 0.006819f
-#define BPF_Fs22050_Fc78_A1 0.000000f
-#define BPF_Fs22050_Fc78_A2 (-0.006819f)
-#define BPF_Fs22050_Fc78_B1 (-1.985714f)
-#define BPF_Fs22050_Fc78_B2 0.986204f
-#define BPF_Fs24000_Fc78_A0 0.006268f
-#define BPF_Fs24000_Fc78_A1 0.000000f
-#define BPF_Fs24000_Fc78_A2 (-0.006268f)
-#define BPF_Fs24000_Fc78_B1 (-1.986904f)
-#define BPF_Fs24000_Fc78_B2 0.987318f
-#define BPF_Fs32000_Fc78_A0 0.004709f
-#define BPF_Fs32000_Fc78_A1 0.000000f
-#define BPF_Fs32000_Fc78_A2 (-0.004709f)
-#define BPF_Fs32000_Fc78_B1 (-1.990240f)
-#define BPF_Fs32000_Fc78_B2 0.990473f
-#define BPF_Fs44100_Fc78_A0 0.003421f
-#define BPF_Fs44100_Fc78_A1 0.000000f
-#define BPF_Fs44100_Fc78_A2 (-0.003421f)
-#define BPF_Fs44100_Fc78_B1 (-1.992955f)
-#define BPF_Fs44100_Fc78_B2 0.993078f
-#define BPF_Fs48000_Fc78_A0 0.003144f
-#define BPF_Fs48000_Fc78_A1 0.000000f
-#define BPF_Fs48000_Fc78_A2 (-0.003144f)
-#define BPF_Fs48000_Fc78_B1 (-1.993535f)
-#define BPF_Fs48000_Fc78_B2 0.993639f
+#define BPF_Fs8000_Fc78_A0 0.018572f
+#define BPF_Fs8000_Fc78_A1 0.000000f
+#define BPF_Fs8000_Fc78_A2 (-0.018572f)
+#define BPF_Fs8000_Fc78_B1 (-1.958745f)
+#define BPF_Fs8000_Fc78_B2 0.962427f
+#define BPF_Fs11025_Fc78_A0 0.013545f
+#define BPF_Fs11025_Fc78_A1 0.000000f
+#define BPF_Fs11025_Fc78_A2 (-0.013545f)
+#define BPF_Fs11025_Fc78_B1 (-1.970647f)
+#define BPF_Fs11025_Fc78_B2 0.972596f
+#define BPF_Fs12000_Fc78_A0 0.012458f
+#define BPF_Fs12000_Fc78_A1 0.000000f
+#define BPF_Fs12000_Fc78_A2 (-0.012458f)
+#define BPF_Fs12000_Fc78_B1 (-1.973148f)
+#define BPF_Fs12000_Fc78_B2 0.974795f
+#define BPF_Fs16000_Fc78_A0 0.009373f
+#define BPF_Fs16000_Fc78_A1 0.000000f
+#define BPF_Fs16000_Fc78_A2 (-0.009373f)
+#define BPF_Fs16000_Fc78_B1 (-1.980108f)
+#define BPF_Fs16000_Fc78_B2 0.981037f
+#define BPF_Fs22050_Fc78_A0 0.006819f
+#define BPF_Fs22050_Fc78_A1 0.000000f
+#define BPF_Fs22050_Fc78_A2 (-0.006819f)
+#define BPF_Fs22050_Fc78_B1 (-1.985714f)
+#define BPF_Fs22050_Fc78_B2 0.986204f
+#define BPF_Fs24000_Fc78_A0 0.006268f
+#define BPF_Fs24000_Fc78_A1 0.000000f
+#define BPF_Fs24000_Fc78_A2 (-0.006268f)
+#define BPF_Fs24000_Fc78_B1 (-1.986904f)
+#define BPF_Fs24000_Fc78_B2 0.987318f
+#define BPF_Fs32000_Fc78_A0 0.004709f
+#define BPF_Fs32000_Fc78_A1 0.000000f
+#define BPF_Fs32000_Fc78_A2 (-0.004709f)
+#define BPF_Fs32000_Fc78_B1 (-1.990240f)
+#define BPF_Fs32000_Fc78_B2 0.990473f
+#define BPF_Fs44100_Fc78_A0 0.003421f
+#define BPF_Fs44100_Fc78_A1 0.000000f
+#define BPF_Fs44100_Fc78_A2 (-0.003421f)
+#define BPF_Fs44100_Fc78_B1 (-1.992955f)
+#define BPF_Fs44100_Fc78_B2 0.993078f
+#define BPF_Fs48000_Fc78_A0 0.003144f
+#define BPF_Fs48000_Fc78_A1 0.000000f
+#define BPF_Fs48000_Fc78_A2 (-0.003144f)
+#define BPF_Fs48000_Fc78_B1 (-1.993535f)
+#define BPF_Fs48000_Fc78_B2 0.993639f
-#define BPF_Fs88200_Fc78_A0 0.001693f
-#define BPF_Fs88200_Fc78_A1 0.000000f
-#define BPF_Fs88200_Fc78_A2 (-0.001693f)
-#define BPF_Fs88200_Fc78_B1 (-1.996582f)
-#define BPF_Fs88200_Fc78_B2 0.996615f
+#define BPF_Fs88200_Fc78_A0 0.001693f
+#define BPF_Fs88200_Fc78_A1 0.000000f
+#define BPF_Fs88200_Fc78_A2 (-0.001693f)
+#define BPF_Fs88200_Fc78_B1 (-1.996582f)
+#define BPF_Fs88200_Fc78_B2 0.996615f
-#define BPF_Fs96000_Fc78_A0 0.001555f
-#define BPF_Fs96000_Fc78_A1 0.000000f
-#define BPF_Fs96000_Fc78_A2 (-0.0015555f)
-#define BPF_Fs96000_Fc78_B1 (-1.996860f)
-#define BPF_Fs96000_Fc78_B2 0.996891f
+#define BPF_Fs96000_Fc78_A0 0.001555f
+#define BPF_Fs96000_Fc78_A1 0.000000f
+#define BPF_Fs96000_Fc78_A2 (-0.0015555f)
+#define BPF_Fs96000_Fc78_B1 (-1.996860f)
+#define BPF_Fs96000_Fc78_B2 0.996891f
-#define BPF_Fs176400_Fc78_A0 0.000847f
-#define BPF_Fs176400_Fc78_A1 0.000000f
-#define BPF_Fs176400_Fc78_A2 (-0.000847f)
-#define BPF_Fs176400_Fc78_B1 (-1.998298f)
-#define BPF_Fs176400_Fc78_B2 0.998306f
+#define BPF_Fs176400_Fc78_A0 0.000847f
+#define BPF_Fs176400_Fc78_A1 0.000000f
+#define BPF_Fs176400_Fc78_A2 (-0.000847f)
+#define BPF_Fs176400_Fc78_B1 (-1.998298f)
+#define BPF_Fs176400_Fc78_B2 0.998306f
-#define BPF_Fs192000_Fc78_A0 0.000778f
-#define BPF_Fs192000_Fc78_A1 0.000000f
-#define BPF_Fs192000_Fc78_A2 (-0.000778f)
-#define BPF_Fs192000_Fc78_B1 (-1.998437f)
-#define BPF_Fs192000_Fc78_B2 0.998444f
+#define BPF_Fs192000_Fc78_A0 0.000778f
+#define BPF_Fs192000_Fc78_A1 0.000000f
+#define BPF_Fs192000_Fc78_A2 (-0.000778f)
+#define BPF_Fs192000_Fc78_B1 (-1.998437f)
+#define BPF_Fs192000_Fc78_B2 0.998444f
/* Coefficients for centre frequency 90Hz */
-#define BPF_Fs8000_Fc90_A0 0.022760f
-#define BPF_Fs8000_Fc90_A1 0.000000f
-#define BPF_Fs8000_Fc90_A2 (-0.022760f)
-#define BPF_Fs8000_Fc90_B1 (-1.949073f)
-#define BPF_Fs8000_Fc90_B2 0.953953f
-#define BPF_Fs11025_Fc90_A0 0.016619f
-#define BPF_Fs11025_Fc90_A1 0.000000f
-#define BPF_Fs11025_Fc90_A2 (-0.016619f)
-#define BPF_Fs11025_Fc90_B1 (-1.963791f)
-#define BPF_Fs11025_Fc90_B2 0.966377f
-#define BPF_Fs12000_Fc90_A0 0.015289f
-#define BPF_Fs12000_Fc90_A1 0.000000f
-#define BPF_Fs12000_Fc90_A2 (-0.015289f)
-#define BPF_Fs12000_Fc90_B1 (-1.966882f)
-#define BPF_Fs12000_Fc90_B2 0.969067f
-#define BPF_Fs16000_Fc90_A0 0.011511f
-#define BPF_Fs16000_Fc90_A1 0.000000f
-#define BPF_Fs16000_Fc90_A2 (-0.011511f)
-#define BPF_Fs16000_Fc90_B1 (-1.975477f)
-#define BPF_Fs16000_Fc90_B2 0.976711f
-#define BPF_Fs22050_Fc90_A0 0.008379f
-#define BPF_Fs22050_Fc90_A1 0.000000f
-#define BPF_Fs22050_Fc90_A2 (-0.008379f)
-#define BPF_Fs22050_Fc90_B1 (-1.982395f)
-#define BPF_Fs22050_Fc90_B2 0.983047f
-#define BPF_Fs24000_Fc90_A0 0.007704f
-#define BPF_Fs24000_Fc90_A1 0.000000f
-#define BPF_Fs24000_Fc90_A2 (-0.007704f)
-#define BPF_Fs24000_Fc90_B1 (-1.983863f)
-#define BPF_Fs24000_Fc90_B2 0.984414f
-#define BPF_Fs32000_Fc90_A0 0.005789f
-#define BPF_Fs32000_Fc90_A1 0.000000f
-#define BPF_Fs32000_Fc90_A2 (-0.005789f)
-#define BPF_Fs32000_Fc90_B1 (-1.987977f)
-#define BPF_Fs32000_Fc90_B2 0.988288f
-#define BPF_Fs44100_Fc90_A0 0.004207f
-#define BPF_Fs44100_Fc90_A1 0.000000f
-#define BPF_Fs44100_Fc90_A2 (-0.004207f)
-#define BPF_Fs44100_Fc90_B1 (-1.991324f)
-#define BPF_Fs44100_Fc90_B2 0.991488f
-#define BPF_Fs48000_Fc90_A0 0.003867f
-#define BPF_Fs48000_Fc90_A1 0.000000f
-#define BPF_Fs48000_Fc90_A2 (-0.003867f)
-#define BPF_Fs48000_Fc90_B1 (-1.992038f)
-#define BPF_Fs48000_Fc90_B2 0.992177f
+#define BPF_Fs8000_Fc90_A0 0.022760f
+#define BPF_Fs8000_Fc90_A1 0.000000f
+#define BPF_Fs8000_Fc90_A2 (-0.022760f)
+#define BPF_Fs8000_Fc90_B1 (-1.949073f)
+#define BPF_Fs8000_Fc90_B2 0.953953f
+#define BPF_Fs11025_Fc90_A0 0.016619f
+#define BPF_Fs11025_Fc90_A1 0.000000f
+#define BPF_Fs11025_Fc90_A2 (-0.016619f)
+#define BPF_Fs11025_Fc90_B1 (-1.963791f)
+#define BPF_Fs11025_Fc90_B2 0.966377f
+#define BPF_Fs12000_Fc90_A0 0.015289f
+#define BPF_Fs12000_Fc90_A1 0.000000f
+#define BPF_Fs12000_Fc90_A2 (-0.015289f)
+#define BPF_Fs12000_Fc90_B1 (-1.966882f)
+#define BPF_Fs12000_Fc90_B2 0.969067f
+#define BPF_Fs16000_Fc90_A0 0.011511f
+#define BPF_Fs16000_Fc90_A1 0.000000f
+#define BPF_Fs16000_Fc90_A2 (-0.011511f)
+#define BPF_Fs16000_Fc90_B1 (-1.975477f)
+#define BPF_Fs16000_Fc90_B2 0.976711f
+#define BPF_Fs22050_Fc90_A0 0.008379f
+#define BPF_Fs22050_Fc90_A1 0.000000f
+#define BPF_Fs22050_Fc90_A2 (-0.008379f)
+#define BPF_Fs22050_Fc90_B1 (-1.982395f)
+#define BPF_Fs22050_Fc90_B2 0.983047f
+#define BPF_Fs24000_Fc90_A0 0.007704f
+#define BPF_Fs24000_Fc90_A1 0.000000f
+#define BPF_Fs24000_Fc90_A2 (-0.007704f)
+#define BPF_Fs24000_Fc90_B1 (-1.983863f)
+#define BPF_Fs24000_Fc90_B2 0.984414f
+#define BPF_Fs32000_Fc90_A0 0.005789f
+#define BPF_Fs32000_Fc90_A1 0.000000f
+#define BPF_Fs32000_Fc90_A2 (-0.005789f)
+#define BPF_Fs32000_Fc90_B1 (-1.987977f)
+#define BPF_Fs32000_Fc90_B2 0.988288f
+#define BPF_Fs44100_Fc90_A0 0.004207f
+#define BPF_Fs44100_Fc90_A1 0.000000f
+#define BPF_Fs44100_Fc90_A2 (-0.004207f)
+#define BPF_Fs44100_Fc90_B1 (-1.991324f)
+#define BPF_Fs44100_Fc90_B2 0.991488f
+#define BPF_Fs48000_Fc90_A0 0.003867f
+#define BPF_Fs48000_Fc90_A1 0.000000f
+#define BPF_Fs48000_Fc90_A2 (-0.003867f)
+#define BPF_Fs48000_Fc90_B1 (-1.992038f)
+#define BPF_Fs48000_Fc90_B2 0.992177f
-#define BPF_Fs88200_Fc90_A0 0.002083f
-#define BPF_Fs88200_Fc90_A1 0.000000f
-#define BPF_Fs88200_Fc90_A2 (-0.002083f)
-#define BPF_Fs88200_Fc90_B1 (-1.995791f)
-#define BPF_Fs88200_Fc90_B2 0.995835f
+#define BPF_Fs88200_Fc90_A0 0.002083f
+#define BPF_Fs88200_Fc90_A1 0.000000f
+#define BPF_Fs88200_Fc90_A2 (-0.002083f)
+#define BPF_Fs88200_Fc90_B1 (-1.995791f)
+#define BPF_Fs88200_Fc90_B2 0.995835f
-#define BPF_Fs96000_Fc90_A0 0.001913f
-#define BPF_Fs96000_Fc90_A1 0.000000f
-#define BPF_Fs96000_Fc90_A2 (-0.001913f)
-#define BPF_Fs96000_Fc90_B1 (-1.996134f)
-#define BPF_Fs96000_Fc90_B2 0.996174f
+#define BPF_Fs96000_Fc90_A0 0.001913f
+#define BPF_Fs96000_Fc90_A1 0.000000f
+#define BPF_Fs96000_Fc90_A2 (-0.001913f)
+#define BPF_Fs96000_Fc90_B1 (-1.996134f)
+#define BPF_Fs96000_Fc90_B2 0.996174f
-#define BPF_Fs176400_Fc90_A0 0.001042f
-#define BPF_Fs176400_Fc90_A1 0.000000f
-#define BPF_Fs176400_Fc90_A2 (-0.001042f)
-#define BPF_Fs176400_Fc90_B1 (-1.997904f)
-#define BPF_Fs176400_Fc90_B2 0.997915f
+#define BPF_Fs176400_Fc90_A0 0.001042f
+#define BPF_Fs176400_Fc90_A1 0.000000f
+#define BPF_Fs176400_Fc90_A2 (-0.001042f)
+#define BPF_Fs176400_Fc90_B1 (-1.997904f)
+#define BPF_Fs176400_Fc90_B2 0.997915f
-#define BPF_Fs192000_Fc90_A0 0.000958f
-#define BPF_Fs192000_Fc90_A1 0.000000f
-#define BPF_Fs192000_Fc90_A2 (-0.000958f)
-#define BPF_Fs192000_Fc90_B1 (-1.998075f)
-#define BPF_Fs192000_Fc90_B2 0.998085f
+#define BPF_Fs192000_Fc90_A0 0.000958f
+#define BPF_Fs192000_Fc90_A1 0.000000f
+#define BPF_Fs192000_Fc90_A2 (-0.000958f)
+#define BPF_Fs192000_Fc90_B1 (-1.998075f)
+#define BPF_Fs192000_Fc90_B2 0.998085f
/************************************************************************************/
/* */
@@ -613,74 +613,74 @@
/************************************************************************************/
/* AGC Time constants */
-#define AGC_ATTACK_Fs8000 0.841395f
-#define AGC_ATTACK_Fs11025 0.882223f
-#define AGC_ATTACK_Fs12000 0.891251f
-#define AGC_ATTACK_Fs16000 0.917276f
-#define AGC_ATTACK_Fs22050 0.939267f
-#define AGC_ATTACK_Fs24000 0.944061f
-#define AGC_ATTACK_Fs32000 0.957745f
-#define AGC_ATTACK_Fs44100 0.969158f
-#define AGC_ATTACK_Fs48000 0.971628f
+#define AGC_ATTACK_Fs8000 0.841395f
+#define AGC_ATTACK_Fs11025 0.882223f
+#define AGC_ATTACK_Fs12000 0.891251f
+#define AGC_ATTACK_Fs16000 0.917276f
+#define AGC_ATTACK_Fs22050 0.939267f
+#define AGC_ATTACK_Fs24000 0.944061f
+#define AGC_ATTACK_Fs32000 0.957745f
+#define AGC_ATTACK_Fs44100 0.969158f
+#define AGC_ATTACK_Fs48000 0.971628f
-#define AGC_ATTACK_Fs88200 0.984458f
-#define AGC_ATTACK_Fs96000 0.985712f
-#define AGC_ATTACK_Fs176400 0.992199f
-#define AGC_ATTACK_Fs192000 0.992830f
+#define AGC_ATTACK_Fs88200 0.984458f
+#define AGC_ATTACK_Fs96000 0.985712f
+#define AGC_ATTACK_Fs176400 0.992199f
+#define AGC_ATTACK_Fs192000 0.992830f
-#define DECAY_SHIFT 10
+#define DECAY_SHIFT 10
-#define AGC_DECAY_Fs8000 0.000042f
-#define AGC_DECAY_Fs11025 0.000030f
-#define AGC_DECAY_Fs12000 0.000028f
-#define AGC_DECAY_Fs16000 0.000021f
-#define AGC_DECAY_Fs22050 0.000015f
-#define AGC_DECAY_Fs24000 0.000014f
-#define AGC_DECAY_Fs32000 0.000010f
-#define AGC_DECAY_Fs44100 0.000008f
-#define AGC_DECAY_Fs48000 0.000007f
+#define AGC_DECAY_Fs8000 0.000042f
+#define AGC_DECAY_Fs11025 0.000030f
+#define AGC_DECAY_Fs12000 0.000028f
+#define AGC_DECAY_Fs16000 0.000021f
+#define AGC_DECAY_Fs22050 0.000015f
+#define AGC_DECAY_Fs24000 0.000014f
+#define AGC_DECAY_Fs32000 0.000010f
+#define AGC_DECAY_Fs44100 0.000008f
+#define AGC_DECAY_Fs48000 0.000007f
-#define AGC_DECAY_Fs88200 0.0000038f
-#define AGC_DECAY_FS96000 0.0000035f
-#define AGC_DECAY_Fs176400 0.00000188f
-#define AGC_DECAY_FS192000 0.00000175f
+#define AGC_DECAY_Fs88200 0.0000038f
+#define AGC_DECAY_FS96000 0.0000035f
+#define AGC_DECAY_Fs176400 0.00000188f
+#define AGC_DECAY_FS192000 0.00000175f
/* AGC Gain settings */
-#define AGC_GAIN_SCALE 31 /* As a power of 2 */
-#define AGC_GAIN_SHIFT 4 /* As a power of 2 */
-#define AGC_TARGETLEVEL 0.988553f
-#define AGC_HPFGAIN_0dB 0.412538f
-#define AGC_GAIN_0dB 0.000000f
-#define AGC_HPFGAIN_1dB 0.584893f
-#define AGC_GAIN_1dB 0.122018f
-#define AGC_HPFGAIN_2dB 0.778279f
-#define AGC_GAIN_2dB 0.258925f
-#define AGC_HPFGAIN_3dB 0.995262f
-#define AGC_GAIN_3dB 0.412538f
-#define AGC_HPFGAIN_4dB 1.238721f
-#define AGC_GAIN_4dB 0.584893f
-#define AGC_HPFGAIN_5dB 1.511886f
-#define AGC_GAIN_5dB 0.778279f
-#define AGC_HPFGAIN_6dB 1.818383f
-#define AGC_GAIN_6dB 0.995262f
-#define AGC_HPFGAIN_7dB 2.162278f
-#define AGC_GAIN_7dB 1.238721f
-#define AGC_HPFGAIN_8dB 2.548134f
-#define AGC_GAIN_8dB 1.511886f
-#define AGC_HPFGAIN_9dB 2.981072f
-#define AGC_GAIN_9dB 1.818383f
-#define AGC_HPFGAIN_10dB 3.466836f
-#define AGC_GAIN_10dB 2.162278f
-#define AGC_HPFGAIN_11dB 4.011872f
-#define AGC_GAIN_11dB 2.548134f
-#define AGC_HPFGAIN_12dB 4.623413f
-#define AGC_GAIN_12dB 2.981072f
-#define AGC_HPFGAIN_13dB 5.309573f
-#define AGC_GAIN_13dB 3.466836f
-#define AGC_HPFGAIN_14dB 6.079458f
-#define AGC_GAIN_14dB 4.011872f
-#define AGC_HPFGAIN_15dB 6.943282f
-#define AGC_GAIN_15dB 4.623413f
+#define AGC_GAIN_SCALE 31 /* As a power of 2 */
+#define AGC_GAIN_SHIFT 4 /* As a power of 2 */
+#define AGC_TARGETLEVEL 0.988553f
+#define AGC_HPFGAIN_0dB 0.412538f
+#define AGC_GAIN_0dB 0.000000f
+#define AGC_HPFGAIN_1dB 0.584893f
+#define AGC_GAIN_1dB 0.122018f
+#define AGC_HPFGAIN_2dB 0.778279f
+#define AGC_GAIN_2dB 0.258925f
+#define AGC_HPFGAIN_3dB 0.995262f
+#define AGC_GAIN_3dB 0.412538f
+#define AGC_HPFGAIN_4dB 1.238721f
+#define AGC_GAIN_4dB 0.584893f
+#define AGC_HPFGAIN_5dB 1.511886f
+#define AGC_GAIN_5dB 0.778279f
+#define AGC_HPFGAIN_6dB 1.818383f
+#define AGC_GAIN_6dB 0.995262f
+#define AGC_HPFGAIN_7dB 2.162278f
+#define AGC_GAIN_7dB 1.238721f
+#define AGC_HPFGAIN_8dB 2.548134f
+#define AGC_GAIN_8dB 1.511886f
+#define AGC_HPFGAIN_9dB 2.981072f
+#define AGC_GAIN_9dB 1.818383f
+#define AGC_HPFGAIN_10dB 3.466836f
+#define AGC_GAIN_10dB 2.162278f
+#define AGC_HPFGAIN_11dB 4.011872f
+#define AGC_GAIN_11dB 2.548134f
+#define AGC_HPFGAIN_12dB 4.623413f
+#define AGC_GAIN_12dB 2.981072f
+#define AGC_HPFGAIN_13dB 5.309573f
+#define AGC_GAIN_13dB 3.466836f
+#define AGC_HPFGAIN_14dB 6.079458f
+#define AGC_GAIN_14dB 4.011872f
+#define AGC_HPFGAIN_15dB 6.943282f
+#define AGC_GAIN_15dB 4.623413f
/************************************************************************************/
/* */
@@ -689,38 +689,38 @@
/************************************************************************************/
/* Volume control gain */
-#define VOLUME_MAX 0 /* In dBs */
-#define VOLUME_SHIFT 0 /* In dBs */
+#define VOLUME_MAX 0 /* In dBs */
+#define VOLUME_SHIFT 0 /* In dBs */
/* Volume control time constants */
-#define VOL_TC_SHIFT 21 /* As a power of 2 */
-#define VOL_TC_Fs8000 0.024690f
-#define VOL_TC_Fs11025 0.017977f
-#define VOL_TC_Fs12000 0.016529f
-#define VOL_TC_Fs16000 0.012422f
-#define VOL_TC_Fs22050 0.009029f
-#define VOL_TC_Fs24000 0.008299f
-#define VOL_TC_Fs32000 0.006231f
-#define VOL_TC_Fs44100 0.004525f
-#define VOL_TC_Fs48000 0.004158f
-#define VOL_TC_Fs88200 0.002263f
-#define VOL_TC_Fs96000 0.002079f
-#define VOL_TC_Fs176400 0.001131f
-#define VOL_TC_Fs192000 0.001039f
-#define MIX_TC_Fs8000 29365 /* Floating point value 0.896151 */
-#define MIX_TC_Fs11025 30230 /* Floating point value 0.922548 */
-#define MIX_TC_Fs12000 30422 /* Floating point value 0.928415 */
-#define MIX_TC_Fs16000 30978 /* Floating point value 0.945387 */
-#define MIX_TC_Fs22050 31451 /* Floating point value 0.959804 */
-#define MIX_TC_Fs24000 31554 /* Floating point value 0.962956 */
-#define MIX_TC_Fs32000 31850 /* Floating point value 0.971973 */
-#define MIX_TC_Fs44100 32097 /* Floating point value 0.979515 */
-#define MIX_TC_Fs48000 32150 /* Floating point value 0.981150 */
+#define VOL_TC_SHIFT 21 /* As a power of 2 */
+#define VOL_TC_Fs8000 0.024690f
+#define VOL_TC_Fs11025 0.017977f
+#define VOL_TC_Fs12000 0.016529f
+#define VOL_TC_Fs16000 0.012422f
+#define VOL_TC_Fs22050 0.009029f
+#define VOL_TC_Fs24000 0.008299f
+#define VOL_TC_Fs32000 0.006231f
+#define VOL_TC_Fs44100 0.004525f
+#define VOL_TC_Fs48000 0.004158f
+#define VOL_TC_Fs88200 0.002263f
+#define VOL_TC_Fs96000 0.002079f
+#define VOL_TC_Fs176400 0.001131f
+#define VOL_TC_Fs192000 0.001039f
+#define MIX_TC_Fs8000 29365 /* Floating point value 0.896151 */
+#define MIX_TC_Fs11025 30230 /* Floating point value 0.922548 */
+#define MIX_TC_Fs12000 30422 /* Floating point value 0.928415 */
+#define MIX_TC_Fs16000 30978 /* Floating point value 0.945387 */
+#define MIX_TC_Fs22050 31451 /* Floating point value 0.959804 */
+#define MIX_TC_Fs24000 31554 /* Floating point value 0.962956 */
+#define MIX_TC_Fs32000 31850 /* Floating point value 0.971973 */
+#define MIX_TC_Fs44100 32097 /* Floating point value 0.979515 */
+#define MIX_TC_Fs48000 32150 /* Floating point value 0.981150 */
/* Floating point value 0.989704 */
-#define MIX_TC_Fs88200 32430
-#define MIX_TC_Fs96000 32456 /* Floating point value 0.990530 */
+#define MIX_TC_Fs88200 32430
+#define MIX_TC_Fs96000 32456 /* Floating point value 0.990530 */
/* Floating point value 0.994838 */
-#define MIX_TC_Fs176400 32598
-#define MIX_TC_Fs192000 32611 /* Floating point value 0.992524 */
+#define MIX_TC_Fs176400 32598
+#define MIX_TC_Fs192000 32611 /* Floating point value 0.992524 */
#endif
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
index 53feae8..1f0b459 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
@@ -21,6 +21,9 @@
/* */
/****************************************************************************************/
+#ifdef BIQUAD_OPT
+#include <audio_utils/BiquadFilter.h>
+#endif
#include "LVDBE.h"
#include "LVDBE_Private.h"
#include "VectorArithmetic.h"
@@ -47,15 +50,12 @@
/* */
/****************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t hInstance,
- LVDBE_Params_t *pParams)
-{
-
- LVDBE_Instance_t *pInstance =(LVDBE_Instance_t *)hInstance;
+LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams) {
+ LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
*pParams = pInstance->Params;
- return(LVDBE_SUCCESS);
+ return (LVDBE_SUCCESS);
}
/************************************************************************************/
@@ -77,15 +77,13 @@
/* */
/************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t hInstance,
- LVDBE_Capabilities_t *pCapabilities)
-{
-
- LVDBE_Instance_t *pInstance =(LVDBE_Instance_t *)hInstance;
+LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t hInstance,
+ LVDBE_Capabilities_t* pCapabilities) {
+ LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
*pCapabilities = pInstance->Capabilities;
- return(LVDBE_SUCCESS);
+ return (LVDBE_SUCCESS);
}
/************************************************************************************/
@@ -101,35 +99,41 @@
/* */
/************************************************************************************/
-void LVDBE_SetFilters(LVDBE_Instance_t *pInstance,
- LVDBE_Params_t *pParams)
-{
-
+void LVDBE_SetFilters(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams) {
/*
* Calculate the table offsets
*/
- LVM_UINT16 Offset = (LVM_UINT16)((LVM_UINT16)pParams->SampleRate + \
- (LVM_UINT16)(pParams->CentreFrequency * (1+LVDBE_FS_192000)));
+ LVM_UINT16 Offset =
+ (LVM_UINT16)((LVM_UINT16)pParams->SampleRate +
+ (LVM_UINT16)(pParams->CentreFrequency * (1 + LVDBE_FS_192000)));
/*
* Setup the high pass filter
*/
- LoadConst_Float(0, /* Clear the history, value 0 */
- (LVM_FLOAT *)&pInstance->pData->HPFTaps, /* Destination */
+#ifdef BIQUAD_OPT
+ std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+ LVDBE_HPF_Table[Offset].A0, LVDBE_HPF_Table[Offset].A1, LVDBE_HPF_Table[Offset].A2,
+ -(LVDBE_HPF_Table[Offset].B1), -(LVDBE_HPF_Table[Offset].B2)};
+ pInstance->pBqInstance
+ ->setCoefficients<std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs>>(coefs);
+#else
+ LoadConst_Float(0, /* Clear the history, value 0 */
+ (LVM_FLOAT*)&pInstance->pData->HPFTaps, /* Destination */
sizeof(pInstance->pData->HPFTaps) / sizeof(LVM_FLOAT)); /* Number of words */
- BQ_2I_D32F32Cll_TRC_WRA_01_Init(&pInstance->pCoef->HPFInstance, /* Initialise the filter */
+ BQ_2I_D32F32Cll_TRC_WRA_01_Init(&pInstance->pCoef->HPFInstance, /* Initialise the filter */
&pInstance->pData->HPFTaps,
- (BQ_FLOAT_Coefs_t *)&LVDBE_HPF_Table[Offset]);
+ (BQ_FLOAT_Coefs_t*)&LVDBE_HPF_Table[Offset]);
+#endif
/*
* Setup the band pass filter
*/
- LoadConst_Float(0, /* Clear the history, value 0 */
- (LVM_FLOAT *)&pInstance->pData->BPFTaps, /* Destination */
- sizeof(pInstance->pData->BPFTaps) / sizeof(LVM_FLOAT)); /* Number of words */
- BP_1I_D32F32Cll_TRC_WRA_02_Init(&pInstance->pCoef->BPFInstance, /* Initialise the filter */
+ LoadConst_Float(0, /* Clear the history, value 0 */
+ (LVM_FLOAT*)&pInstance->pData->BPFTaps, /* Destination */
+ sizeof(pInstance->pData->BPFTaps) / sizeof(LVM_FLOAT)); /* Number of words */
+ BP_1I_D32F32Cll_TRC_WRA_02_Init(&pInstance->pCoef->BPFInstance, /* Initialise the filter */
&pInstance->pData->BPFTaps,
- (BP_FLOAT_Coefs_t *)&LVDBE_BPF_Table[Offset]);
+ (BP_FLOAT_Coefs_t*)&LVDBE_BPF_Table[Offset]);
}
/************************************************************************************/
@@ -145,29 +149,26 @@
/* */
/************************************************************************************/
-void LVDBE_SetAGC(LVDBE_Instance_t *pInstance,
- LVDBE_Params_t *pParams)
-{
-
+void LVDBE_SetAGC(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams) {
/*
* Get the attack and decay time constants
*/
- pInstance->pData->AGCInstance.AGC_Attack = LVDBE_AGC_ATTACK_Table[(LVM_UINT16)pParams->SampleRate]; /* Attack multiplier */
- pInstance->pData->AGCInstance.AGC_Decay = LVDBE_AGC_DECAY_Table[(LVM_UINT16)pParams->SampleRate]; /* Decay multipler */
+ pInstance->pData->AGCInstance.AGC_Attack =
+ LVDBE_AGC_ATTACK_Table[(LVM_UINT16)pParams->SampleRate]; /* Attack multiplier */
+ pInstance->pData->AGCInstance.AGC_Decay =
+ LVDBE_AGC_DECAY_Table[(LVM_UINT16)pParams->SampleRate]; /* Decay multipler */
/*
* Get the boost gain
*/
- if (pParams->HPFSelect == LVDBE_HPF_ON)
- {
- pInstance->pData->AGCInstance.AGC_MaxGain = LVDBE_AGC_HPFGAIN_Table[(LVM_UINT16)pParams->EffectLevel]; /* High pass filter on */
- }
- else
- {
- pInstance->pData->AGCInstance.AGC_MaxGain = LVDBE_AGC_GAIN_Table[(LVM_UINT16)pParams->EffectLevel]; /* High pass filter off */
+ if (pParams->HPFSelect == LVDBE_HPF_ON) {
+ pInstance->pData->AGCInstance.AGC_MaxGain =
+ LVDBE_AGC_HPFGAIN_Table[(LVM_UINT16)pParams->EffectLevel]; /* High pass filter on */
+ } else {
+ pInstance->pData->AGCInstance.AGC_MaxGain =
+ LVDBE_AGC_GAIN_Table[(LVM_UINT16)pParams->EffectLevel]; /* High pass filter off */
}
pInstance->pData->AGCInstance.AGC_Target = AGC_TARGETLEVEL;
-
}
/************************************************************************************/
@@ -193,29 +194,22 @@
/* */
/************************************************************************************/
-void LVDBE_SetVolume(LVDBE_Instance_t *pInstance,
- LVDBE_Params_t *pParams)
-{
+void LVDBE_SetVolume(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams) {
+ LVM_UINT16 dBShifts; /* 6dB shifts */
+ LVM_UINT16 dBOffset; /* Table offset */
+ LVM_INT16 Volume = 0; /* Required volume in dBs */
- LVM_UINT16 dBShifts; /* 6dB shifts */
- LVM_UINT16 dBOffset; /* Table offset */
- LVM_INT16 Volume = 0; /* Required volume in dBs */
-
- LVM_FLOAT dBShifts_fac;
+ LVM_FLOAT dBShifts_fac;
/*
* Apply the volume if enabled
*/
- if (pParams->VolumeControl == LVDBE_VOLUME_ON)
- {
+ if (pParams->VolumeControl == LVDBE_VOLUME_ON) {
/*
* Limit the gain to the maximum allowed
*/
- if (pParams->VolumedB > VOLUME_MAX)
- {
+ if (pParams->VolumedB > VOLUME_MAX) {
Volume = VOLUME_MAX;
- }
- else
- {
+ } else {
Volume = pParams->VolumedB;
}
}
@@ -223,8 +217,8 @@
/*
* Calculate the required gain and shifts
*/
- dBOffset = (LVM_UINT16)(6 + Volume % 6); /* Get the dBs 0-5 */
- dBShifts = (LVM_UINT16)(Volume / -6); /* Get the 6dB shifts */
+ dBOffset = (LVM_UINT16)(6 + Volume % 6); /* Get the dBs 0-5 */
+ dBShifts = (LVM_UINT16)(Volume / -6); /* Get the 6dB shifts */
dBShifts_fac = (LVM_FLOAT)(1 << dBShifts);
/*
@@ -232,27 +226,23 @@
*/
pInstance->pData->AGCInstance.Target = (LVDBE_VolumeTable[dBOffset]);
pInstance->pData->AGCInstance.Target = pInstance->pData->AGCInstance.Target / dBShifts_fac;
- pInstance->pData->AGCInstance.VolumeTC = LVDBE_VolumeTCTable[(LVM_UINT16)pParams->SampleRate]; /* Volume update time constant */
+ pInstance->pData->AGCInstance.VolumeTC =
+ LVDBE_VolumeTCTable[(LVM_UINT16)pParams->SampleRate]; /* Volume update time constant */
/*
* When DBE is disabled use the bypass volume control
*/
- if(dBShifts > 0)
- {
+ if (dBShifts > 0) {
LVC_Mixer_SetTarget(&pInstance->pData->BypassVolume.MixerStream[0],
LVDBE_VolumeTable[dBOffset] / dBShifts_fac);
- }
- else
- {
+ } else {
LVC_Mixer_SetTarget(&pInstance->pData->BypassVolume.MixerStream[0],
LVDBE_VolumeTable[dBOffset]);
}
pInstance->pData->BypassVolume.MixerStream[0].CallbackSet = 1;
LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->pData->BypassVolume.MixerStream[0],
- LVDBE_MIXER_TC,
- (LVM_Fs_en)pInstance->Params.SampleRate,
- 2);
+ LVDBE_MIXER_TC, (LVM_Fs_en)pInstance->Params.SampleRate, 2);
}
/****************************************************************************************/
@@ -292,21 +282,26 @@
/* */
/****************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t hInstance,
- LVDBE_Params_t *pParams)
-{
+LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams) {
+ LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
+ LVMixer3_2St_FLOAT_st* pBypassMixer_Instance = &pInstance->pData->BypassMixer;
- LVDBE_Instance_t *pInstance =(LVDBE_Instance_t *)hInstance;
- LVMixer3_2St_FLOAT_st *pBypassMixer_Instance = &pInstance->pData->BypassMixer;
+#ifdef BIQUAD_OPT
+ /*
+ * Create biquad instance
+ */
+ pInstance->pBqInstance.reset(
+ new android::audio_utils::BiquadFilter<LVM_FLOAT>(pParams->NrChannels));
+ pInstance->pBqInstance->clear();
+#endif
/*
* Update the filters
*/
if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
- (pInstance->Params.CentreFrequency != pParams->CentreFrequency))
- {
- LVDBE_SetFilters(pInstance, /* Instance pointer */
- pParams); /* New parameters */
+ (pInstance->Params.CentreFrequency != pParams->CentreFrequency)) {
+ LVDBE_SetFilters(pInstance, /* Instance pointer */
+ pParams); /* New parameters */
}
/*
@@ -314,16 +309,14 @@
*/
if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
(pInstance->Params.EffectLevel != pParams->EffectLevel) ||
- (pInstance->Params.HPFSelect != pParams->HPFSelect))
- {
- LVDBE_SetAGC(pInstance, /* Instance pointer */
- pParams); /* New parameters */
- LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0],
- LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate, 2);
+ (pInstance->Params.HPFSelect != pParams->HPFSelect)) {
+ LVDBE_SetAGC(pInstance, /* Instance pointer */
+ pParams); /* New parameters */
+ LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0], LVDBE_BYPASS_MIXER_TC,
+ (LVM_Fs_en)pParams->SampleRate, 2);
- LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1],
- LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate, 2);
-
+ LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1], LVDBE_BYPASS_MIXER_TC,
+ (LVM_Fs_en)pParams->SampleRate, 2);
}
/*
@@ -332,19 +325,16 @@
if ((pInstance->Params.VolumedB != pParams->VolumedB) ||
(pInstance->Params.SampleRate != pParams->SampleRate) ||
(pInstance->Params.HeadroomdB != pParams->HeadroomdB) ||
- (pInstance->Params.VolumeControl != pParams->VolumeControl))
- {
- LVDBE_SetVolume(pInstance, /* Instance pointer */
- pParams); /* New parameters */
+ (pInstance->Params.VolumeControl != pParams->VolumeControl)) {
+ LVDBE_SetVolume(pInstance, /* Instance pointer */
+ pParams); /* New parameters */
}
- if (pInstance->Params.OperatingMode==LVDBE_ON && pParams->OperatingMode==LVDBE_OFF)
- {
+ if (pInstance->Params.OperatingMode == LVDBE_ON && pParams->OperatingMode == LVDBE_OFF) {
LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[0], 0);
LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[1], 1.0f);
}
- if (pInstance->Params.OperatingMode==LVDBE_OFF && pParams->OperatingMode==LVDBE_ON)
- {
+ if (pInstance->Params.OperatingMode == LVDBE_OFF && pParams->OperatingMode == LVDBE_ON) {
LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[0], 1.0f);
LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[1], 0);
}
@@ -354,5 +344,5 @@
*/
pInstance->Params = *pParams;
- return(LVDBE_SUCCESS);
+ return (LVDBE_SUCCESS);
}
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
index ad77696..611b762 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
@@ -20,210 +20,101 @@
/* Includes */
/* */
/****************************************************************************************/
+#include <stdlib.h>
#include "LVDBE.h"
#include "LVDBE_Private.h"
/****************************************************************************************/
/* */
-/* FUNCTION: LVDBE_Memory */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. It can be called in */
-/* two ways: */
-/* */
-/* hInstance = NULL Returns the memory requirements */
-/* hInstance = Instance handle Returns the memory requirements and */
-/* allocated base addresses for the instance */
-/* */
-/* When this function is called for memory allocation (hInstance=NULL) the memory */
-/* base address pointers are NULL on return. */
-/* */
-/* When the function is called for free (hInstance = Instance Handle) the memory */
-/* table returns the allocated memory and base addresses used during initialisation. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pMemoryTable Pointer to an empty memory definition table */
-/* pCapabilities Pointer to the instance capabilities */
-/* */
-/* RETURNS: */
-/* LVDBE_SUCCESS Succeeded */
-/* */
-/* NOTES: */
-/* 1. This function may be interrupted by the LVDBE_Process function */
-/* */
-/****************************************************************************************/
-
-LVDBE_ReturnStatus_en LVDBE_Memory(LVDBE_Handle_t hInstance,
- LVDBE_MemTab_t *pMemoryTable,
- LVDBE_Capabilities_t *pCapabilities)
-{
-
- LVM_UINT32 ScratchSize;
- LVDBE_Instance_t *pInstance = (LVDBE_Instance_t *)hInstance;
-
- /*
- * Fill in the memory table
- */
- if (hInstance == LVM_NULL)
- {
- /*
- * Instance memory
- */
- pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].Size = sizeof(LVDBE_Instance_t);
- pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].Alignment = LVDBE_INSTANCE_ALIGN;
- pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].Type = LVDBE_PERSISTENT;
- pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].pBaseAddress = LVM_NULL;
-
- /*
- * Data memory
- */
- pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].Size = sizeof(LVDBE_Data_FLOAT_t);
- pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].Alignment = LVDBE_PERSISTENT_DATA_ALIGN;
- pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].Type = LVDBE_PERSISTENT_DATA;
- pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].pBaseAddress = LVM_NULL;
-
- /*
- * Coef memory
- */
- pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].Size = sizeof(LVDBE_Coef_FLOAT_t);
- pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].Alignment = LVDBE_PERSISTENT_COEF_ALIGN;
- pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].Type = LVDBE_PERSISTENT_COEF;
- pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].pBaseAddress = LVM_NULL;
-
- /*
- * Scratch memory
- */
- ScratchSize = (LVM_UINT32)(LVDBE_SCRATCHBUFFERS_INPLACE*sizeof(LVM_FLOAT) * \
- pCapabilities->MaxBlockSize);
- pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].Size = ScratchSize;
- pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].Alignment = LVDBE_SCRATCH_ALIGN;
- pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].Type = LVDBE_SCRATCH;
- pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].pBaseAddress = LVM_NULL;
- }
- else
- {
- /* Read back memory allocation table */
- *pMemoryTable = pInstance->MemoryTable;
- }
-
- return(LVDBE_SUCCESS);
-}
-
-/****************************************************************************************/
-/* */
/* FUNCTION: LVDBE_Init */
/* */
/* DESCRIPTION: */
-/* Create and initialisation function for the Dynamic Bass Enhancement module */
-/* */
-/* This function can be used to create an algorithm instance by calling with */
-/* hInstance set to NULL. In this case the algorithm returns the new instance */
-/* handle. */
-/* */
-/* This function can be used to force a full re-initialisation of the algorithm */
-/* by calling with hInstance = Instance Handle. In this case the memory table */
-/* should be correct for the instance, this can be ensured by calling the function */
-/* DBE_Memory before calling this function. */
+/* Create and initialisation function for the Bass Enhancement module */
/* */
/* PARAMETERS: */
-/* hInstance Instance handle */
-/* pMemoryTable Pointer to the memory definition table */
-/* pCapabilities Pointer to the instance capabilities */
+/* phInstance Pointer to instance handle */
+/* pCapabilities Pointer to the initialisation capabilities */
+/* pScratch Pointer to the bundle scratch buffer */
/* */
/* RETURNS: */
/* LVDBE_SUCCESS Initialisation succeeded */
-/* LVDBE_ALIGNMENTERROR Instance or scratch memory on incorrect alignment */
-/* LVDBE_NULLADDRESS Instance or scratch memory has a NULL pointer */
+/* LVDBE_NULLADDRESS One or more memory has a NULL pointer - malloc failure */
/* */
/* NOTES: */
-/* 1. The instance handle is the pointer to the base address of the first memory */
-/* region. */
-/* 2. This function must not be interrupted by the LVDBE_Process function */
+/* 1. This function must not be interrupted by the LVDBE_Process function */
/* */
/****************************************************************************************/
-
-LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t *phInstance,
- LVDBE_MemTab_t *pMemoryTable,
- LVDBE_Capabilities_t *pCapabilities)
-{
-
- LVDBE_Instance_t *pInstance;
- LVMixer3_1St_FLOAT_st *pMixer_Instance;
- LVMixer3_2St_FLOAT_st *pBypassMixer_Instance;
- LVM_FLOAT MixGain;
- LVM_INT16 i;
+LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t* phInstance, LVDBE_Capabilities_t* pCapabilities,
+ void* pScratch) {
+ LVDBE_Instance_t* pInstance;
+ LVMixer3_1St_FLOAT_st* pMixer_Instance;
+ LVMixer3_2St_FLOAT_st* pBypassMixer_Instance;
+ LVM_FLOAT MixGain;
/*
- * Set the instance handle if not already initialised
+ * Create the instance handle if not already initialised
*/
- if (*phInstance == LVM_NULL)
- {
- *phInstance = (LVDBE_Handle_t)pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].pBaseAddress;
+ if (*phInstance == LVM_NULL) {
+ *phInstance = calloc(1, sizeof(*pInstance));
}
- pInstance =(LVDBE_Instance_t *)*phInstance;
-
- /*
- * Check the memory table for NULL pointers and incorrectly aligned data
- */
- for (i=0; i<LVDBE_NR_MEMORY_REGIONS; i++)
- {
- if (pMemoryTable->Region[i].Size!=0)
- {
- if (pMemoryTable->Region[i].pBaseAddress==LVM_NULL)
- {
- return(LVDBE_NULLADDRESS);
- }
- if (((uintptr_t)pMemoryTable->Region[i].pBaseAddress % pMemoryTable->Region[i].Alignment)!=0){
- return(LVDBE_ALIGNMENTERROR);
- }
- }
+ if (*phInstance == LVM_NULL) {
+ return LVDBE_NULLADDRESS;
}
+ pInstance = (LVDBE_Instance_t*)*phInstance;
/*
* Save the memory table in the instance structure
*/
pInstance->Capabilities = *pCapabilities;
- /*
- * Save the memory table in the instance structure
- */
- pInstance->MemoryTable = *pMemoryTable;
+ pInstance->pScratch = pScratch;
/*
* Set the default instance parameters
*/
- pInstance->Params.CentreFrequency = LVDBE_CENTRE_55HZ;
- pInstance->Params.EffectLevel = 0;
- pInstance->Params.HeadroomdB = 0;
- pInstance->Params.HPFSelect = LVDBE_HPF_OFF;
- pInstance->Params.OperatingMode = LVDBE_OFF;
- pInstance->Params.SampleRate = LVDBE_FS_8000;
- pInstance->Params.VolumeControl = LVDBE_VOLUME_OFF;
- pInstance->Params.VolumedB = 0;
+ pInstance->Params.CentreFrequency = LVDBE_CENTRE_55HZ;
+ pInstance->Params.EffectLevel = 0;
+ pInstance->Params.HeadroomdB = 0;
+ pInstance->Params.HPFSelect = LVDBE_HPF_OFF;
+ pInstance->Params.OperatingMode = LVDBE_OFF;
+ pInstance->Params.SampleRate = LVDBE_FS_8000;
+ pInstance->Params.VolumeControl = LVDBE_VOLUME_OFF;
+ pInstance->Params.VolumedB = 0;
/*
- * Set pointer to data and coef memory
+ * Create pointer to data and coef memory
*/
- pInstance->pData =
- (LVDBE_Data_FLOAT_t *)pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].pBaseAddress;
- pInstance->pCoef =
- (LVDBE_Coef_FLOAT_t *)pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].pBaseAddress;
+ pInstance->pData = (LVDBE_Data_FLOAT_t*)calloc(1, sizeof(*(pInstance->pData)));
+ if (pInstance->pData == NULL) {
+ return LVDBE_NULLADDRESS;
+ }
+ pInstance->pCoef = (LVDBE_Coef_FLOAT_t*)calloc(1, sizeof(*(pInstance->pCoef)));
+ if (pInstance->pCoef == NULL) {
+ return LVDBE_NULLADDRESS;
+ }
+
+#ifdef BIQUAD_OPT
+ /*
+ * Create biquad instance
+ */
+ pInstance->pBqInstance.reset(
+ new android::audio_utils::BiquadFilter<LVM_FLOAT>(LVM_MAX_CHANNELS));
+#endif
/*
* Initialise the filters
*/
- LVDBE_SetFilters(pInstance, /* Set the filter taps and coefficients */
+ LVDBE_SetFilters(pInstance, /* Set the filter taps and coefficients */
&pInstance->Params);
/*
* Initialise the AGC
*/
- LVDBE_SetAGC(pInstance, /* Set the AGC gain */
+ LVDBE_SetAGC(pInstance, /* Set the AGC gain */
&pInstance->Params);
pInstance->pData->AGCInstance.AGC_Gain = pInstance->pData->AGCInstance.AGC_MaxGain;
- /* Default to the bass boost setting */
+ /* Default to the bass boost setting */
// initialize the mixer with some fixes values since otherwise LVDBE_SetVolume ends up
// reading uninitialized data
@@ -233,11 +124,11 @@
/*
* Initialise the volume
*/
- LVDBE_SetVolume(pInstance, /* Set the Volume */
+ LVDBE_SetVolume(pInstance, /* Set the Volume */
&pInstance->Params);
pInstance->pData->AGCInstance.Volume = pInstance->pData->AGCInstance.Target;
- /* Initialise as the target */
+ /* Initialise as the target */
MixGain = LVC_Mixer_GetTarget(&pMixer_Instance->MixerStream[0]);
LVC_Mixer_Init(&pMixer_Instance->MixerStream[0], MixGain, MixGain);
@@ -259,11 +150,11 @@
pBypassMixer_Instance->MixerStream[0].CallbackParam = 0;
pBypassMixer_Instance->MixerStream[0].pCallbackHandle = LVM_NULL;
pBypassMixer_Instance->MixerStream[0].pCallBack = LVM_NULL;
- pBypassMixer_Instance->MixerStream[0].CallbackSet=0;
+ pBypassMixer_Instance->MixerStream[0].CallbackSet = 0;
- LVC_Mixer_Init(&pBypassMixer_Instance->MixerStream[0],0,0);
- LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0],
- LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pInstance->Params.SampleRate,2);
+ LVC_Mixer_Init(&pBypassMixer_Instance->MixerStream[0], 0, 0);
+ LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0], LVDBE_BYPASS_MIXER_TC,
+ (LVM_Fs_en)pInstance->Params.SampleRate, 2);
/*
* Setup the mixer gain for the unprocessed path
@@ -271,10 +162,38 @@
pBypassMixer_Instance->MixerStream[1].CallbackParam = 0;
pBypassMixer_Instance->MixerStream[1].pCallbackHandle = LVM_NULL;
pBypassMixer_Instance->MixerStream[1].pCallBack = LVM_NULL;
- pBypassMixer_Instance->MixerStream[1].CallbackSet=0;
+ pBypassMixer_Instance->MixerStream[1].CallbackSet = 0;
LVC_Mixer_Init(&pBypassMixer_Instance->MixerStream[1], 1.0, 1.0);
- LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1],
- LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pInstance->Params.SampleRate, 2);
+ LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1], LVDBE_BYPASS_MIXER_TC,
+ (LVM_Fs_en)pInstance->Params.SampleRate, 2);
- return(LVDBE_SUCCESS);
+ return (LVDBE_SUCCESS);
+}
+
+/****************************************************************************************/
+/* */
+/* FUNCTION: LVDBE_DeInit */
+/* */
+/* DESCRIPTION: */
+/* Free the memories created during LVDBE_Init including instance handle */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to instance handle */
+/* */
+/****************************************************************************************/
+void LVDBE_DeInit(LVDBE_Handle_t* phInstance) {
+ LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)*phInstance;
+ if (pInstance == LVM_NULL) {
+ return;
+ }
+ if (pInstance->pData != LVM_NULL) {
+ free(pInstance->pData);
+ pInstance->pData = LVM_NULL;
+ }
+ if (pInstance->pCoef != LVM_NULL) {
+ free(pInstance->pCoef);
+ pInstance->pCoef = LVM_NULL;
+ }
+ free(pInstance);
+ *phInstance = LVM_NULL;
}
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h b/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h
index f3faaed..fa85638 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h
@@ -33,7 +33,10 @@
/* */
/****************************************************************************************/
-#include "LVDBE.h" /* Calling or Application layer definitions */
+#ifdef BIQUAD_OPT
+#include <audio_utils/BiquadFilter.h>
+#endif
+#include "LVDBE.h" /* Calling or Application layer definitions */
#include "BIQUAD.h"
#include "LVC_Mixer.h"
#include "AGC.h"
@@ -45,28 +48,10 @@
/****************************************************************************************/
/* General */
-#define LVDBE_INVALID 0xFFFF /* Invalid init parameter */
+#define LVDBE_INVALID 0xFFFF /* Invalid init parameter */
-/* Memory */
-#define LVDBE_MEMREGION_INSTANCE 0 /* Offset to the instance memory region */
-#define LVDBE_MEMREGION_PERSISTENT_DATA 1 /* Offset to persistent data memory region */
-#define LVDBE_MEMREGION_PERSISTENT_COEF 2 /* Offset to persistent coefficient region */
-#define LVDBE_MEMREGION_SCRATCH 3 /* Offset to data scratch memory region */
-
-#define LVDBE_INSTANCE_ALIGN 4 /* 32-bit alignment for structures */
-#define LVDBE_PERSISTENT_DATA_ALIGN 4 /* 32-bit alignment for data */
-#define LVDBE_PERSISTENT_COEF_ALIGN 4 /* 32-bit alignment for coef */
-#define LVDBE_SCRATCH_ALIGN 4 /* 32-bit alignment for long data */
-
-#ifdef SUPPORT_MC
-/* Number of buffers required for inplace processing */
-#define LVDBE_SCRATCHBUFFERS_INPLACE (LVM_MAX_CHANNELS * 3)
-#else
-#define LVDBE_SCRATCHBUFFERS_INPLACE 6 /* Number of buffers required for inplace processing */
-#endif
-
-#define LVDBE_MIXER_TC 5 /* Mixer time */
-#define LVDBE_BYPASS_MIXER_TC 100 /* Bypass mixer time */
+#define LVDBE_MIXER_TC 5 /* Mixer time */
+#define LVDBE_BYPASS_MIXER_TC 100 /* Bypass mixer time */
/****************************************************************************************/
/* */
@@ -76,37 +61,42 @@
/* Data structure */
/* Data structure */
-typedef struct
-{
+typedef struct {
/* AGC parameters */
- AGC_MIX_VOL_2St1Mon_FLOAT_t AGCInstance; /* AGC instance parameters */
+ AGC_MIX_VOL_2St1Mon_FLOAT_t AGCInstance; /* AGC instance parameters */
/* Process variables */
- Biquad_2I_Order2_FLOAT_Taps_t HPFTaps; /* High pass filter taps */
- Biquad_1I_Order2_FLOAT_Taps_t BPFTaps; /* Band pass filter taps */
- LVMixer3_1St_FLOAT_st BypassVolume; /* Bypass volume scaler */
- LVMixer3_2St_FLOAT_st BypassMixer; /* Bypass Mixer for Click Removal */
+#ifndef BIQUAD_OPT
+ Biquad_2I_Order2_FLOAT_Taps_t HPFTaps; /* High pass filter taps */
+#endif
+ Biquad_1I_Order2_FLOAT_Taps_t BPFTaps; /* Band pass filter taps */
+ LVMixer3_1St_FLOAT_st BypassVolume; /* Bypass volume scaler */
+ LVMixer3_2St_FLOAT_st BypassMixer; /* Bypass Mixer for Click Removal */
} LVDBE_Data_FLOAT_t;
/* Coefs structure */
-typedef struct
-{
+typedef struct {
/* Process variables */
- Biquad_FLOAT_Instance_t HPFInstance; /* High pass filter instance */
- Biquad_FLOAT_Instance_t BPFInstance; /* Band pass filter instance */
+#ifndef BIQUAD_OPT
+ Biquad_FLOAT_Instance_t HPFInstance; /* High pass filter instance */
+#endif
+ Biquad_FLOAT_Instance_t BPFInstance; /* Band pass filter instance */
} LVDBE_Coef_FLOAT_t;
/* Instance structure */
-typedef struct
-{
+typedef struct {
/* Public parameters */
- LVDBE_MemTab_t MemoryTable; /* Instance memory allocation table */
- LVDBE_Params_t Params; /* Instance parameters */
- LVDBE_Capabilities_t Capabilities; /* Instance capabilities */
+ LVDBE_Params_t Params; /* Instance parameters */
+ LVDBE_Capabilities_t Capabilities; /* Instance capabilities */
/* Data and coefficient pointers */
- LVDBE_Data_FLOAT_t *pData; /* Instance data */
- LVDBE_Coef_FLOAT_t *pCoef; /* Instance coefficients */
+ LVDBE_Data_FLOAT_t* pData; /* Instance data */
+ LVDBE_Coef_FLOAT_t* pCoef; /* Instance coefficients */
+ void* pScratch; /* scratch pointer */
+#ifdef BIQUAD_OPT
+ std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+ pBqInstance; /* Biquad filter instance */
+#endif
} LVDBE_Instance_t;
/****************************************************************************************/
@@ -115,13 +105,10 @@
/* */
/****************************************************************************************/
-void LVDBE_SetAGC(LVDBE_Instance_t *pInstance,
- LVDBE_Params_t *pParams);
+void LVDBE_SetAGC(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams);
-void LVDBE_SetVolume(LVDBE_Instance_t *pInstance,
- LVDBE_Params_t *pParams);
+void LVDBE_SetVolume(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams);
-void LVDBE_SetFilters(LVDBE_Instance_t *pInstance,
- LVDBE_Params_t *pParams);
+void LVDBE_SetFilters(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams);
-#endif /* __LVDBE_PRIVATE_H__ */
+#endif /* __LVDBE_PRIVATE_H__ */
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
index b4a71c7..bd04a02 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
@@ -20,13 +20,16 @@
/* Includes */
/* */
/****************************************************************************************/
+#ifdef BIQUAD_OPT
+#include <audio_utils/BiquadFilter.h>
+#endif
-#include <string.h> // memset
+#include <string.h> // memset
#include "LVDBE.h"
#include "LVDBE_Private.h"
#include "VectorArithmetic.h"
#include "AGC.h"
-#include "LVDBE_Coeffs.h" /* Filter coefficients */
+#include "LVDBE_Coeffs.h" /* Filter coefficients */
#include <log/log.h>
/********************************************************************************************/
@@ -73,179 +76,126 @@
/* overall end to end gain is odB. */
/* */
/********************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_Process(LVDBE_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- const LVM_UINT16 NrFrames) // updated to use samples = frames * channels.
+LVDBE_ReturnStatus_en LVDBE_Process(
+ LVDBE_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT* pOutData,
+ const LVM_UINT16 NrFrames) // updated to use samples = frames * channels.
{
- LVDBE_Instance_t *pInstance =(LVDBE_Instance_t *)hInstance;
+ LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
- /*Extract number of Channels info*/
-#ifdef SUPPORT_MC
- // Mono passed in as stereo
- const LVM_INT32 NrChannels = pInstance->Params.NrChannels == 1
- ? 2 : pInstance->Params.NrChannels;
-#else
- const LVM_INT32 NrChannels = 2; // FCC_2
-#endif
- const LVM_INT32 NrSamples = NrChannels * NrFrames;
+ /*Extract number of Channels info*/
+ // Mono passed in as stereo
+ const LVM_INT32 NrChannels =
+ pInstance->Params.NrChannels == 1 ? 2 : pInstance->Params.NrChannels;
+ const LVM_INT32 NrSamples = NrChannels * NrFrames;
- /* Space to store DBE path computation */
- LVM_FLOAT * const pScratch =
- (LVM_FLOAT *)pInstance->MemoryTable.Region[LVDBE_MEMREGION_SCRATCH].pBaseAddress;
-
- /*
- * Scratch for Mono path starts at offset of
- * NrSamples float values from pScratch.
- */
- LVM_FLOAT * const pMono = pScratch + NrSamples;
-
- /*
- * TRICKY: pMono is used and discarded by the DBE path.
- * so it is available for use for the pScratchVol
- * path which is computed afterwards.
- *
- * Space to store Volume Control path computation.
- * This is identical to pMono (see TRICKY comment).
- */
- LVM_FLOAT * const pScratchVol = pMono;
-
- /*
- * Check the number of frames is not too large
- */
- if (NrFrames > pInstance->Capabilities.MaxBlockSize)
- {
- return LVDBE_TOOMANYSAMPLES;
- }
-
- /*
- * Check if the algorithm is enabled
- */
- /* DBE path is processed when DBE is ON or during On/Off transitions */
- if ((pInstance->Params.OperatingMode == LVDBE_ON)||
- (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[0])
- !=LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[0])))
- {
- // make copy of input data
- Copy_Float(pInData,
- pScratch,
- (LVM_INT16)NrSamples);
+ /* Space to store DBE path computation */
+ LVM_FLOAT* const pScratch = (LVM_FLOAT*)pInstance->pScratch;
/*
- * Apply the high pass filter if selected
+ * Scratch for Mono path starts at offset of
+ * NrSamples float values from pScratch.
*/
- if (pInstance->Params.HPFSelect == LVDBE_HPF_ON)
- {
-#ifdef SUPPORT_MC
- BQ_MC_D32F32C30_TRC_WRA_01(&pInstance->pCoef->HPFInstance, /* Filter instance */
- pScratch, /* Source */
- pScratch, /* Destination */
- (LVM_INT16)NrFrames,
- (LVM_INT16)NrChannels);
-#else
- BQ_2I_D32F32C30_TRC_WRA_01(&pInstance->pCoef->HPFInstance,/* Filter instance */
- pScratch, /* Source */
- pScratch, /* Destination */
- (LVM_INT16)NrFrames);
-#endif
+ LVM_FLOAT* const pMono = pScratch + NrSamples;
+
+ /*
+ * TRICKY: pMono is used and discarded by the DBE path.
+ * so it is available for use for the pScratchVol
+ * path which is computed afterwards.
+ *
+ * Space to store Volume Control path computation.
+ * This is identical to pMono (see TRICKY comment).
+ */
+ LVM_FLOAT* const pScratchVol = pMono;
+
+ /*
+ * Check the number of frames is not too large
+ */
+ if (NrFrames > pInstance->Capabilities.MaxBlockSize) {
+ return LVDBE_TOOMANYSAMPLES;
}
/*
- * Create the mono stream
+ * Check if the algorithm is enabled
*/
-#ifdef SUPPORT_MC
- FromMcToMono_Float(pScratch, /* Source */
- pMono, /* Mono destination */
- (LVM_INT16)NrFrames, /* Number of frames */
- (LVM_INT16)NrChannels);
+ /* DBE path is processed when DBE is ON or during On/Off transitions */
+ if ((pInstance->Params.OperatingMode == LVDBE_ON) ||
+ (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[0]) !=
+ LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[0]))) {
+ // make copy of input data
+ Copy_Float(pInData, pScratch, (LVM_INT16)NrSamples);
+
+ /*
+ * Apply the high pass filter if selected
+ */
+ if (pInstance->Params.HPFSelect == LVDBE_HPF_ON) {
+#ifdef BIQUAD_OPT
+ pInstance->pBqInstance->process(pScratch, pScratch, NrFrames);
#else
- From2iToMono_Float(pScratch, /* Stereo source */
- pMono, /* Mono destination */
- (LVM_INT16)NrFrames);
+ BQ_MC_D32F32C30_TRC_WRA_01(&pInstance->pCoef->HPFInstance, /* Filter instance */
+ pScratch, /* Source */
+ pScratch, /* Destination */
+ (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
#endif
+ }
- /*
- * Apply the band pass filter
- */
- BP_1I_D32F32C30_TRC_WRA_02(&pInstance->pCoef->BPFInstance, /* Filter instance */
- pMono, /* Source */
- pMono, /* Destination */
- (LVM_INT16)NrFrames);
+ /*
+ * Create the mono stream
+ */
+ FromMcToMono_Float(pScratch, /* Source */
+ pMono, /* Mono destination */
+ (LVM_INT16)NrFrames, /* Number of frames */
+ (LVM_INT16)NrChannels);
- /*
- * Apply the AGC and mix
- */
-#ifdef SUPPORT_MC
- AGC_MIX_VOL_Mc1Mon_D32_WRA(&pInstance->pData->AGCInstance, /* Instance pointer */
- pScratch, /* Source */
- pMono, /* Mono band pass source */
- pScratch, /* Destination */
- NrFrames, /* Number of frames */
- NrChannels); /* Number of channels */
-#else
- AGC_MIX_VOL_2St1Mon_D32_WRA(&pInstance->pData->AGCInstance, /* Instance pointer */
- pScratch, /* Stereo source */
- pMono, /* Mono band pass source */
- pScratch, /* Stereo destination */
- NrFrames);
-#endif
+ /*
+ * Apply the band pass filter
+ */
+ BP_1I_D32F32C30_TRC_WRA_02(&pInstance->pCoef->BPFInstance, /* Filter instance */
+ pMono, /* Source */
+ pMono, /* Destination */
+ (LVM_INT16)NrFrames);
- for (LVM_INT32 ii = 0; ii < NrSamples; ++ii) {
- //TODO: replace with existing clamping function
- if (pScratch[ii] < -1.0) {
- pScratch[ii] = -1.0;
- } else if (pScratch[ii] > 1.0) {
- pScratch[ii] = 1.0;
- }
+ /*
+ * Apply the AGC and mix
+ */
+ AGC_MIX_VOL_Mc1Mon_D32_WRA(&pInstance->pData->AGCInstance, /* Instance pointer */
+ pScratch, /* Source */
+ pMono, /* Mono band pass source */
+ pScratch, /* Destination */
+ NrFrames, /* Number of frames */
+ NrChannels); /* Number of channels */
+
+ for (LVM_INT32 ii = 0; ii < NrSamples; ++ii) {
+ // TODO: replace with existing clamping function
+ if (pScratch[ii] < -1.0) {
+ pScratch[ii] = -1.0;
+ } else if (pScratch[ii] > 1.0) {
+ pScratch[ii] = 1.0;
+ }
+ }
+ } else {
+ // clear DBE processed path
+ memset(pScratch, 0, sizeof(*pScratch) * NrSamples);
}
- } else {
- // clear DBE processed path
- memset(pScratch, 0, sizeof(*pScratch) * NrSamples);
- }
- /* Bypass Volume path is processed when DBE is OFF or during On/Off transitions */
- if ((pInstance->Params.OperatingMode == LVDBE_OFF)||
- (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[1])
- !=LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[1])))
- {
+ /* Bypass Volume path is processed when DBE is OFF or during On/Off transitions */
+ if ((pInstance->Params.OperatingMode == LVDBE_OFF) ||
+ (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[1]) !=
+ LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[1]))) {
+ /*
+ * The algorithm is disabled but volume management is required to compensate for
+ * headroom and volume (if enabled)
+ */
+ LVC_MixSoft_Mc_D16C31_SAT(&pInstance->pData->BypassVolume, pInData, pScratchVol,
+ (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
+ } else {
+ // clear bypass volume path
+ memset(pScratchVol, 0, sizeof(*pScratchVol) * NrSamples);
+ }
/*
- * The algorithm is disabled but volume management is required to compensate for
- * headroom and volume (if enabled)
+ * Mix DBE processed path and bypass volume path
*/
-#ifdef SUPPORT_MC
- LVC_MixSoft_Mc_D16C31_SAT(&pInstance->pData->BypassVolume,
- pInData,
- pScratchVol,
- (LVM_INT16)NrFrames,
- (LVM_INT16)NrChannels);
-#else
- LVC_MixSoft_1St_D16C31_SAT(&pInstance->pData->BypassVolume,
- pInData,
- pScratchVol,
- (LVM_INT16)NrSamples); /* Left and right, really # samples */
-#endif
- } else {
- // clear bypass volume path
- memset(pScratchVol, 0, sizeof(*pScratchVol) * NrSamples);
- }
-
- /*
- * Mix DBE processed path and bypass volume path
- */
-#ifdef SUPPORT_MC
- LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->pData->BypassMixer,
- pScratch,
- pScratchVol,
- pOutData,
- (LVM_INT16)NrFrames,
- (LVM_INT16)NrChannels);
-#else
- LVC_MixSoft_2St_D16C31_SAT(&pInstance->pData->BypassMixer,
- pScratch,
- pScratchVol,
- pOutData,
- (LVM_INT16)NrSamples);
-#endif
- return LVDBE_SUCCESS;
+ LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->pData->BypassMixer, pScratch, pScratchVol, pOutData,
+ (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
+ return LVDBE_SUCCESS;
}
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp
index 728575c..1b95812 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp
@@ -22,7 +22,7 @@
/************************************************************************************/
#include "LVDBE.h"
-#include "LVDBE_Coeffs.h" /* Filter coefficients */
+#include "LVDBE_Coeffs.h" /* Filter coefficients */
#include "LVDBE_Tables.h"
#include "BIQUAD.h"
@@ -36,275 +36,119 @@
* High Pass Filter Coefficient table
*/
const BQ_FLOAT_Coefs_t LVDBE_HPF_Table[] = {
- /* Coefficients for 55Hz centre frequency */
- {HPF_Fs8000_Fc55_A2, /* 8kS/s coefficients */
- HPF_Fs8000_Fc55_A1,
- HPF_Fs8000_Fc55_A0,
- -HPF_Fs8000_Fc55_B2,
- -HPF_Fs8000_Fc55_B1},
- {HPF_Fs11025_Fc55_A2, /* 11kS/s coefficients */
- HPF_Fs11025_Fc55_A1,
- HPF_Fs11025_Fc55_A0,
- -HPF_Fs11025_Fc55_B2,
- -HPF_Fs11025_Fc55_B1},
- {HPF_Fs12000_Fc55_A2, /* 12kS/s coefficients */
- HPF_Fs12000_Fc55_A1,
- HPF_Fs12000_Fc55_A0,
- -HPF_Fs12000_Fc55_B2,
- -HPF_Fs12000_Fc55_B1},
- {HPF_Fs16000_Fc55_A2, /* 16kS/s coefficients */
- HPF_Fs16000_Fc55_A1,
- HPF_Fs16000_Fc55_A0,
- -HPF_Fs16000_Fc55_B2,
- -HPF_Fs16000_Fc55_B1},
- {HPF_Fs22050_Fc55_A2, /* 22kS/s coefficients */
- HPF_Fs22050_Fc55_A1,
- HPF_Fs22050_Fc55_A0,
- -HPF_Fs22050_Fc55_B2,
- -HPF_Fs22050_Fc55_B1},
- {HPF_Fs24000_Fc55_A2, /* 24kS/s coefficients */
- HPF_Fs24000_Fc55_A1,
- HPF_Fs24000_Fc55_A0,
- -HPF_Fs24000_Fc55_B2,
- -HPF_Fs24000_Fc55_B1},
- {HPF_Fs32000_Fc55_A2, /* 32kS/s coefficients */
- HPF_Fs32000_Fc55_A1,
- HPF_Fs32000_Fc55_A0,
- -HPF_Fs32000_Fc55_B2,
- -HPF_Fs32000_Fc55_B1},
- {HPF_Fs44100_Fc55_A2, /* 44kS/s coefficients */
- HPF_Fs44100_Fc55_A1,
- HPF_Fs44100_Fc55_A0,
- -HPF_Fs44100_Fc55_B2,
- -HPF_Fs44100_Fc55_B1},
- {HPF_Fs48000_Fc55_A2, /* 48kS/s coefficients */
- HPF_Fs48000_Fc55_A1,
- HPF_Fs48000_Fc55_A0,
- -HPF_Fs48000_Fc55_B2,
- -HPF_Fs48000_Fc55_B1},
- {HPF_Fs88200_Fc55_A2, /* 88kS/s coefficients */
- HPF_Fs88200_Fc55_A1,
- HPF_Fs88200_Fc55_A0,
- -HPF_Fs88200_Fc55_B2,
- -HPF_Fs88200_Fc55_B1},
- {HPF_Fs96000_Fc55_A2, /* 96kS/s coefficients */
- HPF_Fs96000_Fc55_A1,
- HPF_Fs96000_Fc55_A0,
- -HPF_Fs96000_Fc55_B2,
- -HPF_Fs96000_Fc55_B1},
- {HPF_Fs176400_Fc55_A2, /* 176kS/s coefficients */
- HPF_Fs176400_Fc55_A1,
- HPF_Fs176400_Fc55_A0,
- -HPF_Fs176400_Fc55_B2,
- -HPF_Fs176400_Fc55_B1},
- {HPF_Fs192000_Fc55_A2, /* 192kS/s coefficients */
- HPF_Fs192000_Fc55_A1,
- HPF_Fs192000_Fc55_A0,
- -HPF_Fs192000_Fc55_B2,
- -HPF_Fs192000_Fc55_B1},
+ /* Coefficients for 55Hz centre frequency */
+ {HPF_Fs8000_Fc55_A2, /* 8kS/s coefficients */
+ HPF_Fs8000_Fc55_A1, HPF_Fs8000_Fc55_A0, -HPF_Fs8000_Fc55_B2, -HPF_Fs8000_Fc55_B1},
+ {HPF_Fs11025_Fc55_A2, /* 11kS/s coefficients */
+ HPF_Fs11025_Fc55_A1, HPF_Fs11025_Fc55_A0, -HPF_Fs11025_Fc55_B2, -HPF_Fs11025_Fc55_B1},
+ {HPF_Fs12000_Fc55_A2, /* 12kS/s coefficients */
+ HPF_Fs12000_Fc55_A1, HPF_Fs12000_Fc55_A0, -HPF_Fs12000_Fc55_B2, -HPF_Fs12000_Fc55_B1},
+ {HPF_Fs16000_Fc55_A2, /* 16kS/s coefficients */
+ HPF_Fs16000_Fc55_A1, HPF_Fs16000_Fc55_A0, -HPF_Fs16000_Fc55_B2, -HPF_Fs16000_Fc55_B1},
+ {HPF_Fs22050_Fc55_A2, /* 22kS/s coefficients */
+ HPF_Fs22050_Fc55_A1, HPF_Fs22050_Fc55_A0, -HPF_Fs22050_Fc55_B2, -HPF_Fs22050_Fc55_B1},
+ {HPF_Fs24000_Fc55_A2, /* 24kS/s coefficients */
+ HPF_Fs24000_Fc55_A1, HPF_Fs24000_Fc55_A0, -HPF_Fs24000_Fc55_B2, -HPF_Fs24000_Fc55_B1},
+ {HPF_Fs32000_Fc55_A2, /* 32kS/s coefficients */
+ HPF_Fs32000_Fc55_A1, HPF_Fs32000_Fc55_A0, -HPF_Fs32000_Fc55_B2, -HPF_Fs32000_Fc55_B1},
+ {HPF_Fs44100_Fc55_A2, /* 44kS/s coefficients */
+ HPF_Fs44100_Fc55_A1, HPF_Fs44100_Fc55_A0, -HPF_Fs44100_Fc55_B2, -HPF_Fs44100_Fc55_B1},
+ {HPF_Fs48000_Fc55_A2, /* 48kS/s coefficients */
+ HPF_Fs48000_Fc55_A1, HPF_Fs48000_Fc55_A0, -HPF_Fs48000_Fc55_B2, -HPF_Fs48000_Fc55_B1},
+ {HPF_Fs88200_Fc55_A2, /* 88kS/s coefficients */
+ HPF_Fs88200_Fc55_A1, HPF_Fs88200_Fc55_A0, -HPF_Fs88200_Fc55_B2, -HPF_Fs88200_Fc55_B1},
+ {HPF_Fs96000_Fc55_A2, /* 96kS/s coefficients */
+ HPF_Fs96000_Fc55_A1, HPF_Fs96000_Fc55_A0, -HPF_Fs96000_Fc55_B2, -HPF_Fs96000_Fc55_B1},
+ {HPF_Fs176400_Fc55_A2, /* 176kS/s coefficients */
+ HPF_Fs176400_Fc55_A1, HPF_Fs176400_Fc55_A0, -HPF_Fs176400_Fc55_B2, -HPF_Fs176400_Fc55_B1},
+ {HPF_Fs192000_Fc55_A2, /* 192kS/s coefficients */
+ HPF_Fs192000_Fc55_A1, HPF_Fs192000_Fc55_A0, -HPF_Fs192000_Fc55_B2, -HPF_Fs192000_Fc55_B1},
- /* Coefficients for 66Hz centre frequency */
- {HPF_Fs8000_Fc66_A2, /* 8kS/s coefficients */
- HPF_Fs8000_Fc66_A1,
- HPF_Fs8000_Fc66_A0,
- -HPF_Fs8000_Fc66_B2,
- -HPF_Fs8000_Fc66_B1},
- {HPF_Fs11025_Fc66_A2, /* 11kS/s coefficients */
- HPF_Fs11025_Fc66_A1,
- HPF_Fs11025_Fc66_A0,
- -HPF_Fs11025_Fc66_B2,
- -HPF_Fs11025_Fc66_B1},
- {HPF_Fs12000_Fc66_A2, /* 12kS/s coefficients */
- HPF_Fs12000_Fc66_A1,
- HPF_Fs12000_Fc66_A0,
- -HPF_Fs12000_Fc66_B2,
- -HPF_Fs12000_Fc66_B1},
- {HPF_Fs16000_Fc66_A2, /* 16kS/s coefficients */
- HPF_Fs16000_Fc66_A1,
- HPF_Fs16000_Fc66_A0,
- -HPF_Fs16000_Fc66_B2,
- -HPF_Fs16000_Fc66_B1},
- {HPF_Fs22050_Fc66_A2, /* 22kS/s coefficients */
- HPF_Fs22050_Fc66_A1,
- HPF_Fs22050_Fc66_A0,
- -HPF_Fs22050_Fc66_B2,
- -HPF_Fs22050_Fc66_B1},
- {HPF_Fs24000_Fc66_A2, /* 24kS/s coefficients */
- HPF_Fs24000_Fc66_A1,
- HPF_Fs24000_Fc66_A0,
- -HPF_Fs24000_Fc66_B2,
- -HPF_Fs24000_Fc66_B1},
- {HPF_Fs32000_Fc66_A2, /* 32kS/s coefficients */
- HPF_Fs32000_Fc66_A1,
- HPF_Fs32000_Fc66_A0,
- -HPF_Fs32000_Fc66_B2,
- -HPF_Fs32000_Fc66_B1},
- {HPF_Fs44100_Fc66_A2, /* 44kS/s coefficients */
- HPF_Fs44100_Fc66_A1,
- HPF_Fs44100_Fc66_A0,
- -HPF_Fs44100_Fc66_B2,
- -HPF_Fs44100_Fc66_B1},
- {HPF_Fs48000_Fc66_A2, /* 48kS/s coefficients */
- HPF_Fs48000_Fc66_A1,
- HPF_Fs48000_Fc66_A0,
- -HPF_Fs48000_Fc66_B2,
- -HPF_Fs48000_Fc66_B1},
- {HPF_Fs88200_Fc66_A2, /* 88kS/s coefficients */
- HPF_Fs88200_Fc66_A1,
- HPF_Fs88200_Fc66_A0,
- -HPF_Fs88200_Fc66_B2,
- -HPF_Fs88200_Fc66_B1},
- {HPF_Fs96000_Fc66_A2, /* 96kS/s coefficients */
- HPF_Fs96000_Fc66_A1,
- HPF_Fs96000_Fc66_A0,
- -HPF_Fs96000_Fc66_B2,
- -HPF_Fs96000_Fc66_B1},
- {HPF_Fs176400_Fc66_A2, /* 176kS/s coefficients */
- HPF_Fs176400_Fc66_A1,
- HPF_Fs176400_Fc66_A0,
- -HPF_Fs176400_Fc66_B2,
- -HPF_Fs176400_Fc66_B1},
- {HPF_Fs192000_Fc66_A2, /* 192kS/s coefficients */
- HPF_Fs192000_Fc66_A1,
- HPF_Fs192000_Fc66_A0,
- -HPF_Fs192000_Fc66_B2,
- -HPF_Fs192000_Fc66_B1},
+ /* Coefficients for 66Hz centre frequency */
+ {HPF_Fs8000_Fc66_A2, /* 8kS/s coefficients */
+ HPF_Fs8000_Fc66_A1, HPF_Fs8000_Fc66_A0, -HPF_Fs8000_Fc66_B2, -HPF_Fs8000_Fc66_B1},
+ {HPF_Fs11025_Fc66_A2, /* 11kS/s coefficients */
+ HPF_Fs11025_Fc66_A1, HPF_Fs11025_Fc66_A0, -HPF_Fs11025_Fc66_B2, -HPF_Fs11025_Fc66_B1},
+ {HPF_Fs12000_Fc66_A2, /* 12kS/s coefficients */
+ HPF_Fs12000_Fc66_A1, HPF_Fs12000_Fc66_A0, -HPF_Fs12000_Fc66_B2, -HPF_Fs12000_Fc66_B1},
+ {HPF_Fs16000_Fc66_A2, /* 16kS/s coefficients */
+ HPF_Fs16000_Fc66_A1, HPF_Fs16000_Fc66_A0, -HPF_Fs16000_Fc66_B2, -HPF_Fs16000_Fc66_B1},
+ {HPF_Fs22050_Fc66_A2, /* 22kS/s coefficients */
+ HPF_Fs22050_Fc66_A1, HPF_Fs22050_Fc66_A0, -HPF_Fs22050_Fc66_B2, -HPF_Fs22050_Fc66_B1},
+ {HPF_Fs24000_Fc66_A2, /* 24kS/s coefficients */
+ HPF_Fs24000_Fc66_A1, HPF_Fs24000_Fc66_A0, -HPF_Fs24000_Fc66_B2, -HPF_Fs24000_Fc66_B1},
+ {HPF_Fs32000_Fc66_A2, /* 32kS/s coefficients */
+ HPF_Fs32000_Fc66_A1, HPF_Fs32000_Fc66_A0, -HPF_Fs32000_Fc66_B2, -HPF_Fs32000_Fc66_B1},
+ {HPF_Fs44100_Fc66_A2, /* 44kS/s coefficients */
+ HPF_Fs44100_Fc66_A1, HPF_Fs44100_Fc66_A0, -HPF_Fs44100_Fc66_B2, -HPF_Fs44100_Fc66_B1},
+ {HPF_Fs48000_Fc66_A2, /* 48kS/s coefficients */
+ HPF_Fs48000_Fc66_A1, HPF_Fs48000_Fc66_A0, -HPF_Fs48000_Fc66_B2, -HPF_Fs48000_Fc66_B1},
+ {HPF_Fs88200_Fc66_A2, /* 88kS/s coefficients */
+ HPF_Fs88200_Fc66_A1, HPF_Fs88200_Fc66_A0, -HPF_Fs88200_Fc66_B2, -HPF_Fs88200_Fc66_B1},
+ {HPF_Fs96000_Fc66_A2, /* 96kS/s coefficients */
+ HPF_Fs96000_Fc66_A1, HPF_Fs96000_Fc66_A0, -HPF_Fs96000_Fc66_B2, -HPF_Fs96000_Fc66_B1},
+ {HPF_Fs176400_Fc66_A2, /* 176kS/s coefficients */
+ HPF_Fs176400_Fc66_A1, HPF_Fs176400_Fc66_A0, -HPF_Fs176400_Fc66_B2, -HPF_Fs176400_Fc66_B1},
+ {HPF_Fs192000_Fc66_A2, /* 192kS/s coefficients */
+ HPF_Fs192000_Fc66_A1, HPF_Fs192000_Fc66_A0, -HPF_Fs192000_Fc66_B2, -HPF_Fs192000_Fc66_B1},
- /* Coefficients for 78Hz centre frequency */
- {HPF_Fs8000_Fc78_A2, /* 8kS/s coefficients */
- HPF_Fs8000_Fc78_A1,
- HPF_Fs8000_Fc78_A0,
- -HPF_Fs8000_Fc78_B2,
- -HPF_Fs8000_Fc78_B1},
- {HPF_Fs11025_Fc78_A2, /* 11kS/s coefficients */
- HPF_Fs11025_Fc78_A1,
- HPF_Fs11025_Fc78_A0,
- -HPF_Fs11025_Fc78_B2,
- -HPF_Fs11025_Fc78_B1},
- {HPF_Fs12000_Fc78_A2, /* 12kS/s coefficients */
- HPF_Fs12000_Fc78_A1,
- HPF_Fs12000_Fc78_A0,
- -HPF_Fs12000_Fc78_B2,
- -HPF_Fs12000_Fc78_B1},
- {HPF_Fs16000_Fc78_A2, /* 16kS/s coefficients */
- HPF_Fs16000_Fc78_A1,
- HPF_Fs16000_Fc78_A0,
- -HPF_Fs16000_Fc78_B2,
- -HPF_Fs16000_Fc78_B1},
- {HPF_Fs22050_Fc78_A2, /* 22kS/s coefficients */
- HPF_Fs22050_Fc78_A1,
- HPF_Fs22050_Fc78_A0,
- -HPF_Fs22050_Fc78_B2,
- -HPF_Fs22050_Fc78_B1},
- {HPF_Fs24000_Fc78_A2, /* 24kS/s coefficients */
- HPF_Fs24000_Fc78_A1,
- HPF_Fs24000_Fc78_A0,
- -HPF_Fs24000_Fc78_B2,
- -HPF_Fs24000_Fc78_B1},
- {HPF_Fs32000_Fc78_A2, /* 32kS/s coefficients */
- HPF_Fs32000_Fc78_A1,
- HPF_Fs32000_Fc78_A0,
- -HPF_Fs32000_Fc78_B2,
- -HPF_Fs32000_Fc78_B1},
- {HPF_Fs44100_Fc78_A2, /* 44kS/s coefficients */
- HPF_Fs44100_Fc78_A1,
- HPF_Fs44100_Fc78_A0,
- -HPF_Fs44100_Fc78_B2,
- -HPF_Fs44100_Fc78_B1},
- {HPF_Fs48000_Fc78_A2, /* 48kS/s coefficients */
- HPF_Fs48000_Fc78_A1,
- HPF_Fs48000_Fc78_A0,
- -HPF_Fs48000_Fc78_B2,
- -HPF_Fs48000_Fc78_B1},
- {HPF_Fs88200_Fc78_A2, /* 88kS/s coefficients */
- HPF_Fs88200_Fc78_A1,
- HPF_Fs88200_Fc78_A0,
- -HPF_Fs88200_Fc78_B2,
- -HPF_Fs88200_Fc78_B1},
- {HPF_Fs96000_Fc78_A2, /* 96kS/s coefficients */
- HPF_Fs96000_Fc78_A1,
- HPF_Fs96000_Fc78_A0,
- -HPF_Fs96000_Fc78_B2,
- -HPF_Fs96000_Fc78_B1},
- {HPF_Fs176400_Fc78_A2, /* 176kS/s coefficients */
- HPF_Fs176400_Fc78_A1,
- HPF_Fs176400_Fc78_A0,
- -HPF_Fs176400_Fc78_B2,
- -HPF_Fs176400_Fc78_B1},
- {HPF_Fs192000_Fc78_A2, /* 192kS/s coefficients */
- HPF_Fs192000_Fc78_A1,
- HPF_Fs192000_Fc78_A0,
- -HPF_Fs192000_Fc78_B2,
- -HPF_Fs192000_Fc78_B1},
+ /* Coefficients for 78Hz centre frequency */
+ {HPF_Fs8000_Fc78_A2, /* 8kS/s coefficients */
+ HPF_Fs8000_Fc78_A1, HPF_Fs8000_Fc78_A0, -HPF_Fs8000_Fc78_B2, -HPF_Fs8000_Fc78_B1},
+ {HPF_Fs11025_Fc78_A2, /* 11kS/s coefficients */
+ HPF_Fs11025_Fc78_A1, HPF_Fs11025_Fc78_A0, -HPF_Fs11025_Fc78_B2, -HPF_Fs11025_Fc78_B1},
+ {HPF_Fs12000_Fc78_A2, /* 12kS/s coefficients */
+ HPF_Fs12000_Fc78_A1, HPF_Fs12000_Fc78_A0, -HPF_Fs12000_Fc78_B2, -HPF_Fs12000_Fc78_B1},
+ {HPF_Fs16000_Fc78_A2, /* 16kS/s coefficients */
+ HPF_Fs16000_Fc78_A1, HPF_Fs16000_Fc78_A0, -HPF_Fs16000_Fc78_B2, -HPF_Fs16000_Fc78_B1},
+ {HPF_Fs22050_Fc78_A2, /* 22kS/s coefficients */
+ HPF_Fs22050_Fc78_A1, HPF_Fs22050_Fc78_A0, -HPF_Fs22050_Fc78_B2, -HPF_Fs22050_Fc78_B1},
+ {HPF_Fs24000_Fc78_A2, /* 24kS/s coefficients */
+ HPF_Fs24000_Fc78_A1, HPF_Fs24000_Fc78_A0, -HPF_Fs24000_Fc78_B2, -HPF_Fs24000_Fc78_B1},
+ {HPF_Fs32000_Fc78_A2, /* 32kS/s coefficients */
+ HPF_Fs32000_Fc78_A1, HPF_Fs32000_Fc78_A0, -HPF_Fs32000_Fc78_B2, -HPF_Fs32000_Fc78_B1},
+ {HPF_Fs44100_Fc78_A2, /* 44kS/s coefficients */
+ HPF_Fs44100_Fc78_A1, HPF_Fs44100_Fc78_A0, -HPF_Fs44100_Fc78_B2, -HPF_Fs44100_Fc78_B1},
+ {HPF_Fs48000_Fc78_A2, /* 48kS/s coefficients */
+ HPF_Fs48000_Fc78_A1, HPF_Fs48000_Fc78_A0, -HPF_Fs48000_Fc78_B2, -HPF_Fs48000_Fc78_B1},
+ {HPF_Fs88200_Fc78_A2, /* 88kS/s coefficients */
+ HPF_Fs88200_Fc78_A1, HPF_Fs88200_Fc78_A0, -HPF_Fs88200_Fc78_B2, -HPF_Fs88200_Fc78_B1},
+ {HPF_Fs96000_Fc78_A2, /* 96kS/s coefficients */
+ HPF_Fs96000_Fc78_A1, HPF_Fs96000_Fc78_A0, -HPF_Fs96000_Fc78_B2, -HPF_Fs96000_Fc78_B1},
+ {HPF_Fs176400_Fc78_A2, /* 176kS/s coefficients */
+ HPF_Fs176400_Fc78_A1, HPF_Fs176400_Fc78_A0, -HPF_Fs176400_Fc78_B2, -HPF_Fs176400_Fc78_B1},
+ {HPF_Fs192000_Fc78_A2, /* 192kS/s coefficients */
+ HPF_Fs192000_Fc78_A1, HPF_Fs192000_Fc78_A0, -HPF_Fs192000_Fc78_B2, -HPF_Fs192000_Fc78_B1},
- /* Coefficients for 90Hz centre frequency */
- {HPF_Fs8000_Fc90_A2, /* 8kS/s coefficients */
- HPF_Fs8000_Fc90_A1,
- HPF_Fs8000_Fc90_A0,
- -HPF_Fs8000_Fc90_B2,
- -HPF_Fs8000_Fc90_B1},
- {HPF_Fs11025_Fc90_A2, /* 11kS/s coefficients */
- HPF_Fs11025_Fc90_A1,
- HPF_Fs11025_Fc90_A0,
- -HPF_Fs11025_Fc90_B2,
- -HPF_Fs11025_Fc90_B1},
- {HPF_Fs12000_Fc90_A2, /* 12kS/s coefficients */
- HPF_Fs12000_Fc90_A1,
- HPF_Fs12000_Fc90_A0,
- -HPF_Fs12000_Fc90_B2,
- -HPF_Fs12000_Fc90_B1},
- {HPF_Fs16000_Fc90_A2, /* 16kS/s coefficients */
- HPF_Fs16000_Fc90_A1,
- HPF_Fs16000_Fc90_A0,
- -HPF_Fs16000_Fc90_B2,
- -HPF_Fs16000_Fc90_B1},
- {HPF_Fs22050_Fc90_A2, /* 22kS/s coefficients */
- HPF_Fs22050_Fc90_A1,
- HPF_Fs22050_Fc90_A0,
- -HPF_Fs22050_Fc90_B2,
- -HPF_Fs22050_Fc90_B1},
- {HPF_Fs24000_Fc90_A2, /* 24kS/s coefficients */
- HPF_Fs24000_Fc90_A1,
- HPF_Fs24000_Fc90_A0,
- -HPF_Fs24000_Fc90_B2,
- -HPF_Fs24000_Fc90_B1},
- {HPF_Fs32000_Fc90_A2, /* 32kS/s coefficients */
- HPF_Fs32000_Fc90_A1,
- HPF_Fs32000_Fc90_A0,
- -HPF_Fs32000_Fc90_B2,
- -HPF_Fs32000_Fc90_B1},
- {HPF_Fs44100_Fc90_A2, /* 44kS/s coefficients */
- HPF_Fs44100_Fc90_A1,
- HPF_Fs44100_Fc90_A0,
- -HPF_Fs44100_Fc90_B2,
- -HPF_Fs44100_Fc90_B1},
- {HPF_Fs48000_Fc90_A2, /* 48kS/s coefficients */
- HPF_Fs48000_Fc90_A1,
- HPF_Fs48000_Fc90_A0,
- -HPF_Fs48000_Fc90_B2,
- -HPF_Fs48000_Fc90_B1}
+ /* Coefficients for 90Hz centre frequency */
+ {HPF_Fs8000_Fc90_A2, /* 8kS/s coefficients */
+ HPF_Fs8000_Fc90_A1, HPF_Fs8000_Fc90_A0, -HPF_Fs8000_Fc90_B2, -HPF_Fs8000_Fc90_B1},
+ {HPF_Fs11025_Fc90_A2, /* 11kS/s coefficients */
+ HPF_Fs11025_Fc90_A1, HPF_Fs11025_Fc90_A0, -HPF_Fs11025_Fc90_B2, -HPF_Fs11025_Fc90_B1},
+ {HPF_Fs12000_Fc90_A2, /* 12kS/s coefficients */
+ HPF_Fs12000_Fc90_A1, HPF_Fs12000_Fc90_A0, -HPF_Fs12000_Fc90_B2, -HPF_Fs12000_Fc90_B1},
+ {HPF_Fs16000_Fc90_A2, /* 16kS/s coefficients */
+ HPF_Fs16000_Fc90_A1, HPF_Fs16000_Fc90_A0, -HPF_Fs16000_Fc90_B2, -HPF_Fs16000_Fc90_B1},
+ {HPF_Fs22050_Fc90_A2, /* 22kS/s coefficients */
+ HPF_Fs22050_Fc90_A1, HPF_Fs22050_Fc90_A0, -HPF_Fs22050_Fc90_B2, -HPF_Fs22050_Fc90_B1},
+ {HPF_Fs24000_Fc90_A2, /* 24kS/s coefficients */
+ HPF_Fs24000_Fc90_A1, HPF_Fs24000_Fc90_A0, -HPF_Fs24000_Fc90_B2, -HPF_Fs24000_Fc90_B1},
+ {HPF_Fs32000_Fc90_A2, /* 32kS/s coefficients */
+ HPF_Fs32000_Fc90_A1, HPF_Fs32000_Fc90_A0, -HPF_Fs32000_Fc90_B2, -HPF_Fs32000_Fc90_B1},
+ {HPF_Fs44100_Fc90_A2, /* 44kS/s coefficients */
+ HPF_Fs44100_Fc90_A1, HPF_Fs44100_Fc90_A0, -HPF_Fs44100_Fc90_B2, -HPF_Fs44100_Fc90_B1},
+ {HPF_Fs48000_Fc90_A2, /* 48kS/s coefficients */
+ HPF_Fs48000_Fc90_A1, HPF_Fs48000_Fc90_A0, -HPF_Fs48000_Fc90_B2, -HPF_Fs48000_Fc90_B1}
- ,
- {HPF_Fs88200_Fc90_A2, /* 88kS/s coefficients */
- HPF_Fs88200_Fc90_A1,
- HPF_Fs88200_Fc90_A0,
- -HPF_Fs88200_Fc90_B2,
- -HPF_Fs88200_Fc90_B1},
- {HPF_Fs96000_Fc90_A2, /* 96kS/s coefficients */
- HPF_Fs96000_Fc90_A1,
- HPF_Fs96000_Fc90_A0,
- -HPF_Fs96000_Fc90_B2,
- -HPF_Fs96000_Fc90_B1},
- {HPF_Fs176400_Fc90_A2, /* 176kS/s coefficients */
- HPF_Fs176400_Fc90_A1,
- HPF_Fs176400_Fc90_A0,
- -HPF_Fs176400_Fc90_B2,
- -HPF_Fs176400_Fc90_B1},
- {HPF_Fs192000_Fc90_A2, /* 192kS/s coefficients */
- HPF_Fs192000_Fc90_A1,
- HPF_Fs192000_Fc90_A0,
- -HPF_Fs192000_Fc90_B2,
- -HPF_Fs192000_Fc90_B1}
+ ,
+ {HPF_Fs88200_Fc90_A2, /* 88kS/s coefficients */
+ HPF_Fs88200_Fc90_A1, HPF_Fs88200_Fc90_A0, -HPF_Fs88200_Fc90_B2, -HPF_Fs88200_Fc90_B1},
+ {HPF_Fs96000_Fc90_A2, /* 96kS/s coefficients */
+ HPF_Fs96000_Fc90_A1, HPF_Fs96000_Fc90_A0, -HPF_Fs96000_Fc90_B2, -HPF_Fs96000_Fc90_B1},
+ {HPF_Fs176400_Fc90_A2, /* 176kS/s coefficients */
+ HPF_Fs176400_Fc90_A1, HPF_Fs176400_Fc90_A0, -HPF_Fs176400_Fc90_B2, -HPF_Fs176400_Fc90_B1},
+ {HPF_Fs192000_Fc90_A2, /* 192kS/s coefficients */
+ HPF_Fs192000_Fc90_A1, HPF_Fs192000_Fc90_A0, -HPF_Fs192000_Fc90_B2, -HPF_Fs192000_Fc90_B1}
};
@@ -312,170 +156,117 @@
* Band Pass Filter coefficient table
*/
const BP_FLOAT_Coefs_t LVDBE_BPF_Table[] = {
- /* Coefficients for 55Hz centre frequency */
- {BPF_Fs8000_Fc55_A0, /* 8kS/s coefficients */
- -BPF_Fs8000_Fc55_B2,
- -BPF_Fs8000_Fc55_B1},
- {BPF_Fs11025_Fc55_A0, /* 11kS/s coefficients */
- -BPF_Fs11025_Fc55_B2,
- -BPF_Fs11025_Fc55_B1},
- {BPF_Fs12000_Fc55_A0, /* 12kS/s coefficients */
- -BPF_Fs12000_Fc55_B2,
- -BPF_Fs12000_Fc55_B1},
- {BPF_Fs16000_Fc55_A0, /* 16kS/s coefficients */
- -BPF_Fs16000_Fc55_B2,
- -BPF_Fs16000_Fc55_B1},
- {BPF_Fs22050_Fc55_A0, /* 22kS/s coefficients */
- -BPF_Fs22050_Fc55_B2,
- -BPF_Fs22050_Fc55_B1},
- {BPF_Fs24000_Fc55_A0, /* 24kS/s coefficients */
- -BPF_Fs24000_Fc55_B2,
- -BPF_Fs24000_Fc55_B1},
- {BPF_Fs32000_Fc55_A0, /* 32kS/s coefficients */
- -BPF_Fs32000_Fc55_B2,
- -BPF_Fs32000_Fc55_B1},
- {BPF_Fs44100_Fc55_A0, /* 44kS/s coefficients */
- -BPF_Fs44100_Fc55_B2,
- -BPF_Fs44100_Fc55_B1},
- {BPF_Fs48000_Fc55_A0, /* 48kS/s coefficients */
- -BPF_Fs48000_Fc55_B2,
- -BPF_Fs48000_Fc55_B1},
- {BPF_Fs88200_Fc55_A0, /* 88kS/s coefficients */
- -BPF_Fs88200_Fc55_B2,
- -BPF_Fs88200_Fc55_B1},
- {BPF_Fs96000_Fc55_A0, /* 96kS/s coefficients */
- -BPF_Fs96000_Fc55_B2,
- -BPF_Fs96000_Fc55_B1},
- {BPF_Fs176400_Fc55_A0, /* 176kS/s coefficients */
- -BPF_Fs176400_Fc55_B2,
- -BPF_Fs176400_Fc55_B1},
- {BPF_Fs192000_Fc55_A0, /* 192kS/s coefficients */
- -BPF_Fs192000_Fc55_B2,
- -BPF_Fs192000_Fc55_B1},
+ /* Coefficients for 55Hz centre frequency */
+ {BPF_Fs8000_Fc55_A0, /* 8kS/s coefficients */
+ -BPF_Fs8000_Fc55_B2, -BPF_Fs8000_Fc55_B1},
+ {BPF_Fs11025_Fc55_A0, /* 11kS/s coefficients */
+ -BPF_Fs11025_Fc55_B2, -BPF_Fs11025_Fc55_B1},
+ {BPF_Fs12000_Fc55_A0, /* 12kS/s coefficients */
+ -BPF_Fs12000_Fc55_B2, -BPF_Fs12000_Fc55_B1},
+ {BPF_Fs16000_Fc55_A0, /* 16kS/s coefficients */
+ -BPF_Fs16000_Fc55_B2, -BPF_Fs16000_Fc55_B1},
+ {BPF_Fs22050_Fc55_A0, /* 22kS/s coefficients */
+ -BPF_Fs22050_Fc55_B2, -BPF_Fs22050_Fc55_B1},
+ {BPF_Fs24000_Fc55_A0, /* 24kS/s coefficients */
+ -BPF_Fs24000_Fc55_B2, -BPF_Fs24000_Fc55_B1},
+ {BPF_Fs32000_Fc55_A0, /* 32kS/s coefficients */
+ -BPF_Fs32000_Fc55_B2, -BPF_Fs32000_Fc55_B1},
+ {BPF_Fs44100_Fc55_A0, /* 44kS/s coefficients */
+ -BPF_Fs44100_Fc55_B2, -BPF_Fs44100_Fc55_B1},
+ {BPF_Fs48000_Fc55_A0, /* 48kS/s coefficients */
+ -BPF_Fs48000_Fc55_B2, -BPF_Fs48000_Fc55_B1},
+ {BPF_Fs88200_Fc55_A0, /* 88kS/s coefficients */
+ -BPF_Fs88200_Fc55_B2, -BPF_Fs88200_Fc55_B1},
+ {BPF_Fs96000_Fc55_A0, /* 96kS/s coefficients */
+ -BPF_Fs96000_Fc55_B2, -BPF_Fs96000_Fc55_B1},
+ {BPF_Fs176400_Fc55_A0, /* 176kS/s coefficients */
+ -BPF_Fs176400_Fc55_B2, -BPF_Fs176400_Fc55_B1},
+ {BPF_Fs192000_Fc55_A0, /* 192kS/s coefficients */
+ -BPF_Fs192000_Fc55_B2, -BPF_Fs192000_Fc55_B1},
- /* Coefficients for 66Hz centre frequency */
- {BPF_Fs8000_Fc66_A0, /* 8kS/s coefficients */
- -BPF_Fs8000_Fc66_B2,
- -BPF_Fs8000_Fc66_B1},
- {BPF_Fs11025_Fc66_A0, /* 11kS/s coefficients */
- -BPF_Fs11025_Fc66_B2,
- -BPF_Fs11025_Fc66_B1},
- {BPF_Fs12000_Fc66_A0, /* 12kS/s coefficients */
- -BPF_Fs12000_Fc66_B2,
- -BPF_Fs12000_Fc66_B1},
- {BPF_Fs16000_Fc66_A0, /* 16kS/s coefficients */
- -BPF_Fs16000_Fc66_B2,
- -BPF_Fs16000_Fc66_B1},
- {BPF_Fs22050_Fc66_A0, /* 22kS/s coefficients */
- -BPF_Fs22050_Fc66_B2,
- -BPF_Fs22050_Fc66_B1},
- {BPF_Fs24000_Fc66_A0, /* 24kS/s coefficients */
- -BPF_Fs24000_Fc66_B2,
- -BPF_Fs24000_Fc66_B1},
- {BPF_Fs32000_Fc66_A0, /* 32kS/s coefficients */
- -BPF_Fs32000_Fc66_B2,
- -BPF_Fs32000_Fc66_B1},
- {BPF_Fs44100_Fc66_A0, /* 44kS/s coefficients */
- -BPF_Fs44100_Fc66_B2,
- -BPF_Fs44100_Fc66_B1},
- {BPF_Fs48000_Fc66_A0, /* 48kS/s coefficients */
- -BPF_Fs48000_Fc66_B2,
- -BPF_Fs48000_Fc66_B1},
- {BPF_Fs88200_Fc66_A0, /* 88kS/s coefficients */
- -BPF_Fs88200_Fc66_B2,
- -BPF_Fs88200_Fc66_B1},
- {BPF_Fs96000_Fc66_A0, /* 96kS/s coefficients */
- -BPF_Fs96000_Fc66_B2,
- -BPF_Fs96000_Fc66_B1},
- {BPF_Fs176400_Fc66_A0, /* 176kS/s coefficients */
- -BPF_Fs176400_Fc66_B2,
- -BPF_Fs176400_Fc66_B1},
- {BPF_Fs192000_Fc66_A0, /* 192kS/s coefficients */
- -BPF_Fs192000_Fc66_B2,
- -BPF_Fs192000_Fc66_B1},
+ /* Coefficients for 66Hz centre frequency */
+ {BPF_Fs8000_Fc66_A0, /* 8kS/s coefficients */
+ -BPF_Fs8000_Fc66_B2, -BPF_Fs8000_Fc66_B1},
+ {BPF_Fs11025_Fc66_A0, /* 11kS/s coefficients */
+ -BPF_Fs11025_Fc66_B2, -BPF_Fs11025_Fc66_B1},
+ {BPF_Fs12000_Fc66_A0, /* 12kS/s coefficients */
+ -BPF_Fs12000_Fc66_B2, -BPF_Fs12000_Fc66_B1},
+ {BPF_Fs16000_Fc66_A0, /* 16kS/s coefficients */
+ -BPF_Fs16000_Fc66_B2, -BPF_Fs16000_Fc66_B1},
+ {BPF_Fs22050_Fc66_A0, /* 22kS/s coefficients */
+ -BPF_Fs22050_Fc66_B2, -BPF_Fs22050_Fc66_B1},
+ {BPF_Fs24000_Fc66_A0, /* 24kS/s coefficients */
+ -BPF_Fs24000_Fc66_B2, -BPF_Fs24000_Fc66_B1},
+ {BPF_Fs32000_Fc66_A0, /* 32kS/s coefficients */
+ -BPF_Fs32000_Fc66_B2, -BPF_Fs32000_Fc66_B1},
+ {BPF_Fs44100_Fc66_A0, /* 44kS/s coefficients */
+ -BPF_Fs44100_Fc66_B2, -BPF_Fs44100_Fc66_B1},
+ {BPF_Fs48000_Fc66_A0, /* 48kS/s coefficients */
+ -BPF_Fs48000_Fc66_B2, -BPF_Fs48000_Fc66_B1},
+ {BPF_Fs88200_Fc66_A0, /* 88kS/s coefficients */
+ -BPF_Fs88200_Fc66_B2, -BPF_Fs88200_Fc66_B1},
+ {BPF_Fs96000_Fc66_A0, /* 96kS/s coefficients */
+ -BPF_Fs96000_Fc66_B2, -BPF_Fs96000_Fc66_B1},
+ {BPF_Fs176400_Fc66_A0, /* 176kS/s coefficients */
+ -BPF_Fs176400_Fc66_B2, -BPF_Fs176400_Fc66_B1},
+ {BPF_Fs192000_Fc66_A0, /* 192kS/s coefficients */
+ -BPF_Fs192000_Fc66_B2, -BPF_Fs192000_Fc66_B1},
- /* Coefficients for 78Hz centre frequency */
- {BPF_Fs8000_Fc78_A0, /* 8kS/s coefficients */
- -BPF_Fs8000_Fc78_B2,
- -BPF_Fs8000_Fc78_B1},
- {BPF_Fs11025_Fc78_A0, /* 11kS/s coefficients */
- -BPF_Fs11025_Fc78_B2,
- -BPF_Fs11025_Fc78_B1},
- {BPF_Fs12000_Fc78_A0, /* 12kS/s coefficients */
- -BPF_Fs12000_Fc78_B2,
- -BPF_Fs12000_Fc78_B1},
- {BPF_Fs16000_Fc78_A0, /* 16kS/s coefficients */
- -BPF_Fs16000_Fc78_B2,
- -BPF_Fs16000_Fc78_B1},
- {BPF_Fs22050_Fc78_A0, /* 22kS/s coefficients */
- -BPF_Fs22050_Fc78_B2,
- -BPF_Fs22050_Fc78_B1},
- {BPF_Fs24000_Fc78_A0, /* 24kS/s coefficients */
- -BPF_Fs24000_Fc78_B2,
- -BPF_Fs24000_Fc78_B1},
- {BPF_Fs32000_Fc78_A0, /* 32kS/s coefficients */
- -BPF_Fs32000_Fc78_B2,
- -BPF_Fs32000_Fc78_B1},
- {BPF_Fs44100_Fc78_A0, /* 44kS/s coefficients */
- -BPF_Fs44100_Fc78_B2,
- -BPF_Fs44100_Fc78_B1},
- {BPF_Fs48000_Fc78_A0, /* 48kS/s coefficients */
- -BPF_Fs48000_Fc78_B2,
- -BPF_Fs48000_Fc78_B1},
- {BPF_Fs88200_Fc66_A0, /* 88kS/s coefficients */
- -BPF_Fs88200_Fc66_B2,
- -BPF_Fs88200_Fc66_B1},
- {BPF_Fs96000_Fc78_A0, /* 96kS/s coefficients */
- -BPF_Fs96000_Fc78_B2,
- -BPF_Fs96000_Fc78_B1},
- {BPF_Fs176400_Fc66_A0, /* 176kS/s coefficients */
- -BPF_Fs176400_Fc66_B2,
- -BPF_Fs176400_Fc66_B1},
- {BPF_Fs192000_Fc78_A0, /* 192kS/s coefficients */
- -BPF_Fs192000_Fc78_B2,
- -BPF_Fs192000_Fc78_B1},
+ /* Coefficients for 78Hz centre frequency */
+ {BPF_Fs8000_Fc78_A0, /* 8kS/s coefficients */
+ -BPF_Fs8000_Fc78_B2, -BPF_Fs8000_Fc78_B1},
+ {BPF_Fs11025_Fc78_A0, /* 11kS/s coefficients */
+ -BPF_Fs11025_Fc78_B2, -BPF_Fs11025_Fc78_B1},
+ {BPF_Fs12000_Fc78_A0, /* 12kS/s coefficients */
+ -BPF_Fs12000_Fc78_B2, -BPF_Fs12000_Fc78_B1},
+ {BPF_Fs16000_Fc78_A0, /* 16kS/s coefficients */
+ -BPF_Fs16000_Fc78_B2, -BPF_Fs16000_Fc78_B1},
+ {BPF_Fs22050_Fc78_A0, /* 22kS/s coefficients */
+ -BPF_Fs22050_Fc78_B2, -BPF_Fs22050_Fc78_B1},
+ {BPF_Fs24000_Fc78_A0, /* 24kS/s coefficients */
+ -BPF_Fs24000_Fc78_B2, -BPF_Fs24000_Fc78_B1},
+ {BPF_Fs32000_Fc78_A0, /* 32kS/s coefficients */
+ -BPF_Fs32000_Fc78_B2, -BPF_Fs32000_Fc78_B1},
+ {BPF_Fs44100_Fc78_A0, /* 44kS/s coefficients */
+ -BPF_Fs44100_Fc78_B2, -BPF_Fs44100_Fc78_B1},
+ {BPF_Fs48000_Fc78_A0, /* 48kS/s coefficients */
+ -BPF_Fs48000_Fc78_B2, -BPF_Fs48000_Fc78_B1},
+ {BPF_Fs88200_Fc66_A0, /* 88kS/s coefficients */
+ -BPF_Fs88200_Fc66_B2, -BPF_Fs88200_Fc66_B1},
+ {BPF_Fs96000_Fc78_A0, /* 96kS/s coefficients */
+ -BPF_Fs96000_Fc78_B2, -BPF_Fs96000_Fc78_B1},
+ {BPF_Fs176400_Fc66_A0, /* 176kS/s coefficients */
+ -BPF_Fs176400_Fc66_B2, -BPF_Fs176400_Fc66_B1},
+ {BPF_Fs192000_Fc78_A0, /* 192kS/s coefficients */
+ -BPF_Fs192000_Fc78_B2, -BPF_Fs192000_Fc78_B1},
- /* Coefficients for 90Hz centre frequency */
- {BPF_Fs8000_Fc90_A0, /* 8kS/s coefficients */
- -BPF_Fs8000_Fc90_B2,
- -BPF_Fs8000_Fc90_B1},
- {BPF_Fs11025_Fc90_A0, /* 11kS/s coefficients */
- -BPF_Fs11025_Fc90_B2,
- -BPF_Fs11025_Fc90_B1},
- {BPF_Fs12000_Fc90_A0, /* 12kS/s coefficients */
- -BPF_Fs12000_Fc90_B2,
- -BPF_Fs12000_Fc90_B1},
- {BPF_Fs16000_Fc90_A0, /* 16kS/s coefficients */
- -BPF_Fs16000_Fc90_B2,
- -BPF_Fs16000_Fc90_B1},
- {BPF_Fs22050_Fc90_A0, /* 22kS/s coefficients */
- -BPF_Fs22050_Fc90_B2,
- -BPF_Fs22050_Fc90_B1},
- {BPF_Fs24000_Fc90_A0, /* 24kS/s coefficients */
- -BPF_Fs24000_Fc90_B2,
- -BPF_Fs24000_Fc90_B1},
- {BPF_Fs32000_Fc90_A0, /* 32kS/s coefficients */
- -BPF_Fs32000_Fc90_B2,
- -BPF_Fs32000_Fc90_B1},
- {BPF_Fs44100_Fc90_A0, /* 44kS/s coefficients */
- -BPF_Fs44100_Fc90_B2,
- -BPF_Fs44100_Fc90_B1},
- {BPF_Fs48000_Fc90_A0, /* 48kS/s coefficients */
- -BPF_Fs48000_Fc90_B2,
- -BPF_Fs48000_Fc90_B1}
- ,
- {BPF_Fs88200_Fc90_A0, /* 88kS/s coefficients */
- -BPF_Fs88200_Fc90_B2,
- -BPF_Fs88200_Fc90_B1},
- {BPF_Fs96000_Fc90_A0, /* 96kS/s coefficients */
- -BPF_Fs96000_Fc90_B2,
- -BPF_Fs96000_Fc90_B1},
- {BPF_Fs176400_Fc90_A0, /* 176kS/s coefficients */
- -BPF_Fs176400_Fc90_B2,
- -BPF_Fs176400_Fc90_B1},
- {BPF_Fs192000_Fc90_A0, /* 192kS/s coefficients */
- -BPF_Fs192000_Fc90_B2,
- -BPF_Fs192000_Fc90_B1}
+ /* Coefficients for 90Hz centre frequency */
+ {BPF_Fs8000_Fc90_A0, /* 8kS/s coefficients */
+ -BPF_Fs8000_Fc90_B2, -BPF_Fs8000_Fc90_B1},
+ {BPF_Fs11025_Fc90_A0, /* 11kS/s coefficients */
+ -BPF_Fs11025_Fc90_B2, -BPF_Fs11025_Fc90_B1},
+ {BPF_Fs12000_Fc90_A0, /* 12kS/s coefficients */
+ -BPF_Fs12000_Fc90_B2, -BPF_Fs12000_Fc90_B1},
+ {BPF_Fs16000_Fc90_A0, /* 16kS/s coefficients */
+ -BPF_Fs16000_Fc90_B2, -BPF_Fs16000_Fc90_B1},
+ {BPF_Fs22050_Fc90_A0, /* 22kS/s coefficients */
+ -BPF_Fs22050_Fc90_B2, -BPF_Fs22050_Fc90_B1},
+ {BPF_Fs24000_Fc90_A0, /* 24kS/s coefficients */
+ -BPF_Fs24000_Fc90_B2, -BPF_Fs24000_Fc90_B1},
+ {BPF_Fs32000_Fc90_A0, /* 32kS/s coefficients */
+ -BPF_Fs32000_Fc90_B2, -BPF_Fs32000_Fc90_B1},
+ {BPF_Fs44100_Fc90_A0, /* 44kS/s coefficients */
+ -BPF_Fs44100_Fc90_B2, -BPF_Fs44100_Fc90_B1},
+ {BPF_Fs48000_Fc90_A0, /* 48kS/s coefficients */
+ -BPF_Fs48000_Fc90_B2, -BPF_Fs48000_Fc90_B1},
+ {BPF_Fs88200_Fc90_A0, /* 88kS/s coefficients */
+ -BPF_Fs88200_Fc90_B2, -BPF_Fs88200_Fc90_B1},
+ {BPF_Fs96000_Fc90_A0, /* 96kS/s coefficients */
+ -BPF_Fs96000_Fc90_B2, -BPF_Fs96000_Fc90_B1},
+ {BPF_Fs176400_Fc90_A0, /* 176kS/s coefficients */
+ -BPF_Fs176400_Fc90_B2, -BPF_Fs176400_Fc90_B1},
+ {BPF_Fs192000_Fc90_A0, /* 192kS/s coefficients */
+ -BPF_Fs192000_Fc90_B2, -BPF_Fs192000_Fc90_B1}
};
@@ -487,77 +278,34 @@
/* Attack time (signal too large) */
const LVM_FLOAT LVDBE_AGC_ATTACK_Table[] = {
- AGC_ATTACK_Fs8000,
- AGC_ATTACK_Fs11025,
- AGC_ATTACK_Fs12000,
- AGC_ATTACK_Fs16000,
- AGC_ATTACK_Fs22050,
- AGC_ATTACK_Fs24000,
- AGC_ATTACK_Fs32000,
- AGC_ATTACK_Fs44100,
- AGC_ATTACK_Fs48000
- ,AGC_ATTACK_Fs88200
- ,AGC_ATTACK_Fs96000
- ,AGC_ATTACK_Fs176400
- ,AGC_ATTACK_Fs192000
+ AGC_ATTACK_Fs8000, AGC_ATTACK_Fs11025, AGC_ATTACK_Fs12000, AGC_ATTACK_Fs16000,
+ AGC_ATTACK_Fs22050, AGC_ATTACK_Fs24000, AGC_ATTACK_Fs32000, AGC_ATTACK_Fs44100,
+ AGC_ATTACK_Fs48000, AGC_ATTACK_Fs88200, AGC_ATTACK_Fs96000, AGC_ATTACK_Fs176400,
+ AGC_ATTACK_Fs192000
};
/* Decay time (signal too small) */
-const LVM_FLOAT LVDBE_AGC_DECAY_Table[] = {
- AGC_DECAY_Fs8000,
- AGC_DECAY_Fs11025,
- AGC_DECAY_Fs12000,
- AGC_DECAY_Fs16000,
- AGC_DECAY_Fs22050,
- AGC_DECAY_Fs24000,
- AGC_DECAY_Fs32000,
- AGC_DECAY_Fs44100,
- AGC_DECAY_Fs48000
- ,AGC_DECAY_Fs88200
- ,AGC_DECAY_FS96000
- ,AGC_DECAY_Fs176400
- ,AGC_DECAY_FS192000
+const LVM_FLOAT LVDBE_AGC_DECAY_Table[] = {AGC_DECAY_Fs8000, AGC_DECAY_Fs11025, AGC_DECAY_Fs12000,
+ AGC_DECAY_Fs16000, AGC_DECAY_Fs22050, AGC_DECAY_Fs24000,
+ AGC_DECAY_Fs32000, AGC_DECAY_Fs44100, AGC_DECAY_Fs48000,
+ AGC_DECAY_Fs88200, AGC_DECAY_FS96000, AGC_DECAY_Fs176400,
+ AGC_DECAY_FS192000
};
/* Gain for use without the high pass filter */
const LVM_FLOAT LVDBE_AGC_GAIN_Table[] = {
- AGC_GAIN_0dB,
- AGC_GAIN_1dB,
- AGC_GAIN_2dB,
- AGC_GAIN_3dB,
- AGC_GAIN_4dB,
- AGC_GAIN_5dB,
- AGC_GAIN_6dB,
- AGC_GAIN_7dB,
- AGC_GAIN_8dB,
- AGC_GAIN_9dB,
- AGC_GAIN_10dB,
- AGC_GAIN_11dB,
- AGC_GAIN_12dB,
- AGC_GAIN_13dB,
- AGC_GAIN_14dB,
- AGC_GAIN_15dB};
+ AGC_GAIN_0dB, AGC_GAIN_1dB, AGC_GAIN_2dB, AGC_GAIN_3dB, AGC_GAIN_4dB, AGC_GAIN_5dB,
+ AGC_GAIN_6dB, AGC_GAIN_7dB, AGC_GAIN_8dB, AGC_GAIN_9dB, AGC_GAIN_10dB, AGC_GAIN_11dB,
+ AGC_GAIN_12dB, AGC_GAIN_13dB, AGC_GAIN_14dB, AGC_GAIN_15dB};
/* Gain for use with the high pass filter */
const LVM_FLOAT LVDBE_AGC_HPFGAIN_Table[] = {
- AGC_HPFGAIN_0dB,
- AGC_HPFGAIN_1dB,
- AGC_HPFGAIN_2dB,
- AGC_HPFGAIN_3dB,
- AGC_HPFGAIN_4dB,
- AGC_HPFGAIN_5dB,
- AGC_HPFGAIN_6dB,
- AGC_HPFGAIN_7dB,
- AGC_HPFGAIN_8dB,
- AGC_HPFGAIN_9dB,
- AGC_HPFGAIN_10dB,
- AGC_HPFGAIN_11dB,
- AGC_HPFGAIN_12dB,
- AGC_HPFGAIN_13dB,
- AGC_HPFGAIN_14dB,
- AGC_HPFGAIN_15dB};
+ AGC_HPFGAIN_0dB, AGC_HPFGAIN_1dB, AGC_HPFGAIN_2dB, AGC_HPFGAIN_3dB,
+ AGC_HPFGAIN_4dB, AGC_HPFGAIN_5dB, AGC_HPFGAIN_6dB, AGC_HPFGAIN_7dB,
+ AGC_HPFGAIN_8dB, AGC_HPFGAIN_9dB, AGC_HPFGAIN_10dB, AGC_HPFGAIN_11dB,
+ AGC_HPFGAIN_12dB, AGC_HPFGAIN_13dB, AGC_HPFGAIN_14dB, AGC_HPFGAIN_15dB};
/************************************************************************************/
/* */
@@ -566,45 +314,23 @@
/************************************************************************************/
/* dB to linear conversion table */
-const LVM_FLOAT LVDBE_VolumeTable[] = {
- 0.500000f, /* -6dB */
- 0.562341f, /* -5dB */
- 0.630957f, /* -4dB */
- 0.707946f, /* -3dB */
- 0.794328f, /* -2dB */
- 0.891251f, /* -1dB */
- 1.000000f}; /* 0dB */
+const LVM_FLOAT LVDBE_VolumeTable[] = {0.500000f, /* -6dB */
+ 0.562341f, /* -5dB */
+ 0.630957f, /* -4dB */
+ 0.707946f, /* -3dB */
+ 0.794328f, /* -2dB */
+ 0.891251f, /* -1dB */
+ 1.000000f}; /* 0dB */
const LVM_FLOAT LVDBE_VolumeTCTable[] = {
- VOL_TC_Fs8000,
- VOL_TC_Fs11025,
- VOL_TC_Fs12000,
- VOL_TC_Fs16000,
- VOL_TC_Fs22050,
- VOL_TC_Fs24000,
- VOL_TC_Fs32000,
- VOL_TC_Fs44100,
- VOL_TC_Fs48000
- ,VOL_TC_Fs88200
- ,VOL_TC_Fs96000
- ,VOL_TC_Fs176400
- ,VOL_TC_Fs192000
-};
+ VOL_TC_Fs8000, VOL_TC_Fs11025, VOL_TC_Fs12000, VOL_TC_Fs16000, VOL_TC_Fs22050,
+ VOL_TC_Fs24000, VOL_TC_Fs32000, VOL_TC_Fs44100, VOL_TC_Fs48000, VOL_TC_Fs88200,
+ VOL_TC_Fs96000, VOL_TC_Fs176400, VOL_TC_Fs192000};
const LVM_INT16 LVDBE_MixerTCTable[] = {
- MIX_TC_Fs8000,
- MIX_TC_Fs11025,
- MIX_TC_Fs12000,
- MIX_TC_Fs16000,
- MIX_TC_Fs22050,
- MIX_TC_Fs24000,
- MIX_TC_Fs32000,
- MIX_TC_Fs44100,
- MIX_TC_Fs48000
- ,MIX_TC_Fs88200
- ,MIX_TC_Fs96000
- ,MIX_TC_Fs176400
- ,MIX_TC_Fs192000
+ MIX_TC_Fs8000, MIX_TC_Fs11025, MIX_TC_Fs12000, MIX_TC_Fs16000, MIX_TC_Fs22050,
+ MIX_TC_Fs24000, MIX_TC_Fs32000, MIX_TC_Fs44100, MIX_TC_Fs48000, MIX_TC_Fs88200,
+ MIX_TC_Fs96000, MIX_TC_Fs176400, MIX_TC_Fs192000
};
diff --git a/media/libeffects/lvm/lib/Bundle/lib/LVM.h b/media/libeffects/lvm/lib/Bundle/lib/LVM.h
index e4e8450..c90c5cc 100644
--- a/media/libeffects/lvm/lib/Bundle/lib/LVM.h
+++ b/media/libeffects/lvm/lib/Bundle/lib/LVM.h
@@ -67,31 +67,28 @@
/* */
/****************************************************************************************/
-/* Memory table*/
-#define LVM_NR_MEMORY_REGIONS 4 /* Number of memory regions */
-
/* Concert Sound effect level presets */
-#define LVM_CS_EFFECT_NONE 0 /* 0% effect, minimum value */
-#define LVM_CS_EFFECT_LOW 16384 /* 50% effect */
-#define LVM_CS_EFFECT_MED 24576 /* 75% effect */
-#define LVM_CS_EFFECT_HIGH 32767 /* 100% effect, maximum value */
+#define LVM_CS_EFFECT_NONE 0 /* 0% effect, minimum value */
+#define LVM_CS_EFFECT_LOW 16384 /* 50% effect */
+#define LVM_CS_EFFECT_MED 24576 /* 75% effect */
+#define LVM_CS_EFFECT_HIGH 32767 /* 100% effect, maximum value */
/* Treble enhancement */
-#define LVM_TE_LOW_MIPS 32767
+#define LVM_TE_LOW_MIPS 32767
/* Bass enhancement effect level presets */
-#define LVM_BE_0DB 0 /* 0dB boost, no effect */
-#define LVM_BE_3DB 3 /* +3dB boost */
-#define LVM_BE_6DB 6 /* +6dB boost */
-#define LVM_BE_9DB 9 /* +9dB boost */
-#define LVM_BE_12DB 12 /* +12dB boost */
-#define LVM_BE_15DB 15 /* +15dB boost */
+#define LVM_BE_0DB 0 /* 0dB boost, no effect */
+#define LVM_BE_3DB 3 /* +3dB boost */
+#define LVM_BE_6DB 6 /* +6dB boost */
+#define LVM_BE_9DB 9 /* +9dB boost */
+#define LVM_BE_12DB 12 /* +12dB boost */
+#define LVM_BE_15DB 15 /* +15dB boost */
/* N-Band Equalizer */
-#define LVM_EQ_NBANDS 5 /* Number of bands for equalizer */
+#define LVM_EQ_NBANDS 5 /* Number of bands for equalizer */
/* Headroom management */
-#define LVM_HEADROOM_MAX_NBANDS 5
+#define LVM_HEADROOM_MAX_NBANDS 5
/****************************************************************************************/
/* */
@@ -100,123 +97,89 @@
/****************************************************************************************/
/* Instance handle */
-typedef void *LVM_Handle_t;
+typedef void* LVM_Handle_t;
/* Status return values */
-typedef enum
-{
- LVM_SUCCESS = 0, /* Successful return from a routine */
- LVM_ALIGNMENTERROR = 1, /* Memory alignment error */
- LVM_NULLADDRESS = 2, /* NULL allocation address */
- LVM_OUTOFRANGE = 3, /* Out of range control parameter */
- LVM_INVALIDNUMSAMPLES = 4, /* Invalid number of samples */
- LVM_WRONGAUDIOTIME = 5, /* Wrong time value for audio time*/
- LVM_ALGORITHMDISABLED = 6, /* Algorithm is disabled*/
- LVM_ALGORITHMPSA = 7, /* Algorithm PSA returns an error */
+typedef enum {
+ LVM_SUCCESS = 0, /* Successful return from a routine */
+ LVM_ALIGNMENTERROR = 1, /* Memory alignment error */
+ LVM_NULLADDRESS = 2, /* NULL allocation address */
+ LVM_OUTOFRANGE = 3, /* Out of range control parameter */
+ LVM_INVALIDNUMSAMPLES = 4, /* Invalid number of samples */
+ LVM_WRONGAUDIOTIME = 5, /* Wrong time value for audio time*/
+ LVM_ALGORITHMDISABLED = 6, /* Algorithm is disabled*/
+ LVM_ALGORITHMPSA = 7, /* Algorithm PSA returns an error */
LVM_RETURNSTATUS_DUMMY = LVM_MAXENUM
} LVM_ReturnStatus_en;
/* Buffer Management mode */
-typedef enum
-{
- LVM_MANAGED_BUFFERS = 0,
+typedef enum {
+ LVM_MANAGED_BUFFERS = 0,
LVM_UNMANAGED_BUFFERS = 1,
- LVM_BUFFERS_DUMMY = LVM_MAXENUM
+ LVM_BUFFERS_DUMMY = LVM_MAXENUM
} LVM_BufferMode_en;
/* Output device type */
-typedef enum
-{
- LVM_HEADPHONES = 0,
- LVM_EX_HEADPHONES = 1,
- LVM_SPEAKERTYPE_MAX = LVM_MAXENUM
+typedef enum {
+ LVM_HEADPHONES = 0,
+ LVM_EX_HEADPHONES = 1,
+ LVM_SPEAKERTYPE_MAX = LVM_MAXENUM
} LVM_OutputDeviceType_en;
/* Virtualizer mode selection*/
-typedef enum
-{
- LVM_CONCERTSOUND = 0,
- LVM_VIRTUALIZERTYPE_DUMMY = LVM_MAXENUM
+typedef enum {
+ LVM_CONCERTSOUND = 0,
+ LVM_VIRTUALIZERTYPE_DUMMY = LVM_MAXENUM
} LVM_VirtualizerType_en;
/* N-Band Equaliser operating mode */
-typedef enum
-{
- LVM_EQNB_OFF = 0,
- LVM_EQNB_ON = 1,
- LVM_EQNB_DUMMY = LVM_MAXENUM
-} LVM_EQNB_Mode_en;
+typedef enum { LVM_EQNB_OFF = 0, LVM_EQNB_ON = 1, LVM_EQNB_DUMMY = LVM_MAXENUM } LVM_EQNB_Mode_en;
/* Bass Enhancement operating mode */
-typedef enum
-{
- LVM_BE_OFF = 0,
- LVM_BE_ON = 1,
- LVM_BE_DUMMY = LVM_MAXENUM
-} LVM_BE_Mode_en;
+typedef enum { LVM_BE_OFF = 0, LVM_BE_ON = 1, LVM_BE_DUMMY = LVM_MAXENUM } LVM_BE_Mode_en;
/* Bass Enhancement centre frequency selection control */
-typedef enum
-{
- LVM_BE_CENTRE_55Hz = 0,
- LVM_BE_CENTRE_66Hz = 1,
- LVM_BE_CENTRE_78Hz = 2,
- LVM_BE_CENTRE_90Hz = 3,
+typedef enum {
+ LVM_BE_CENTRE_55Hz = 0,
+ LVM_BE_CENTRE_66Hz = 1,
+ LVM_BE_CENTRE_78Hz = 2,
+ LVM_BE_CENTRE_90Hz = 3,
LVM_BE_CENTRE_DUMMY = LVM_MAXENUM
} LVM_BE_CentreFreq_en;
/* Bass Enhancement HPF selection control */
-typedef enum
-{
- LVM_BE_HPF_OFF = 0,
- LVM_BE_HPF_ON = 1,
+typedef enum {
+ LVM_BE_HPF_OFF = 0,
+ LVM_BE_HPF_ON = 1,
LVM_BE_HPF_DUMMY = LVM_MAXENUM
} LVM_BE_FilterSelect_en;
/* Volume Control operating mode */
-typedef enum
-{
- LVM_VC_OFF = 0,
- LVM_VC_ON = 1,
- LVM_VC_DUMMY = LVM_MAXENUM
-} LVM_VC_Mode_en;
+typedef enum { LVM_VC_OFF = 0, LVM_VC_ON = 1, LVM_VC_DUMMY = LVM_MAXENUM } LVM_VC_Mode_en;
/* Treble Enhancement operating mode */
-typedef enum
-{
- LVM_TE_OFF = 0,
- LVM_TE_ON = 1,
- LVM_TE_DUMMY = LVM_MAXENUM
-} LVM_TE_Mode_en;
+typedef enum { LVM_TE_OFF = 0, LVM_TE_ON = 1, LVM_TE_DUMMY = LVM_MAXENUM } LVM_TE_Mode_en;
/* Headroom management operating mode */
-typedef enum
-{
- LVM_HEADROOM_OFF = 0,
- LVM_HEADROOM_ON = 1,
+typedef enum {
+ LVM_HEADROOM_OFF = 0,
+ LVM_HEADROOM_ON = 1,
LVM_Headroom_DUMMY = LVM_MAXENUM
} LVM_Headroom_Mode_en;
-typedef enum
-{
- LVM_PSA_SPEED_SLOW, /* Peak decaying at slow speed */
- LVM_PSA_SPEED_MEDIUM, /* Peak decaying at medium speed */
- LVM_PSA_SPEED_FAST, /* Peak decaying at fast speed */
+typedef enum {
+ LVM_PSA_SPEED_SLOW, /* Peak decaying at slow speed */
+ LVM_PSA_SPEED_MEDIUM, /* Peak decaying at medium speed */
+ LVM_PSA_SPEED_FAST, /* Peak decaying at fast speed */
LVM_PSA_SPEED_DUMMY = LVM_MAXENUM
} LVM_PSA_DecaySpeed_en;
-typedef enum
-{
- LVM_PSA_OFF = 0,
- LVM_PSA_ON = 1,
- LVM_PSA_DUMMY = LVM_MAXENUM
-} LVM_PSA_Mode_en;
+typedef enum { LVM_PSA_OFF = 0, LVM_PSA_ON = 1, LVM_PSA_DUMMY = LVM_MAXENUM } LVM_PSA_Mode_en;
/* Version information */
-typedef struct
-{
- LVM_CHAR *pVersionNumber; /* Pointer to the version number in the format X.YY.ZZ */
- LVM_CHAR *pPlatform; /* Pointer to the library platform type */
+typedef struct {
+ LVM_CHAR* pVersionNumber; /* Pointer to the version number in the format X.YY.ZZ */
+ LVM_CHAR* pPlatform; /* Pointer to the library platform type */
} LVM_VersionInfo_st;
/****************************************************************************************/
@@ -225,93 +188,80 @@
/* */
/****************************************************************************************/
-/* Memory table containing the region definitions */
-typedef struct
-{
- LVM_MemoryRegion_st Region[LVM_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVM_MemTab_t;
-
/* N-Band equaliser band definition */
-typedef struct
-{
- LVM_INT16 Gain; /* Band gain in dB */
- LVM_UINT16 Frequency; /* Band centre frequency in Hz */
- LVM_UINT16 QFactor; /* Band quality factor (x100) */
+typedef struct {
+ LVM_INT16 Gain; /* Band gain in dB */
+ LVM_UINT16 Frequency; /* Band centre frequency in Hz */
+ LVM_UINT16 QFactor; /* Band quality factor (x100) */
} LVM_EQNB_BandDef_t;
/* Headroom band definition */
-typedef struct
-{
- LVM_UINT16 Limit_Low; /* Low frequency limit of the band in Hertz */
- LVM_UINT16 Limit_High; /* High frequency limit of the band in Hertz */
- LVM_INT16 Headroom_Offset; /* Headroom = biggest band gain - Headroom_Offset */
+typedef struct {
+ LVM_UINT16 Limit_Low; /* Low frequency limit of the band in Hertz */
+ LVM_UINT16 Limit_High; /* High frequency limit of the band in Hertz */
+ LVM_INT16 Headroom_Offset; /* Headroom = biggest band gain - Headroom_Offset */
} LVM_HeadroomBandDef_t;
/* Control Parameter structure */
-typedef struct
-{
+typedef struct {
/* General parameters */
- LVM_Mode_en OperatingMode; /* Bundle operating mode On/Bypass */
- LVM_Fs_en SampleRate; /* Sample rate */
- LVM_Format_en SourceFormat; /* Input data format */
- LVM_OutputDeviceType_en SpeakerType; /* Output device type */
+ LVM_Mode_en OperatingMode; /* Bundle operating mode On/Bypass */
+ LVM_Fs_en SampleRate; /* Sample rate */
+ LVM_Format_en SourceFormat; /* Input data format */
+ LVM_OutputDeviceType_en SpeakerType; /* Output device type */
/* Concert Sound Virtualizer parameters*/
- LVM_Mode_en VirtualizerOperatingMode; /* Virtualizer operating mode On/Off */
- LVM_VirtualizerType_en VirtualizerType; /* Virtualizer type: ConcertSound */
- LVM_UINT16 VirtualizerReverbLevel; /* Virtualizer reverb level in % */
- LVM_INT16 CS_EffectLevel; /* Concert Sound effect level */
+ LVM_Mode_en VirtualizerOperatingMode; /* Virtualizer operating mode On/Off */
+ LVM_VirtualizerType_en VirtualizerType; /* Virtualizer type: ConcertSound */
+ LVM_UINT16 VirtualizerReverbLevel; /* Virtualizer reverb level in % */
+ LVM_INT16 CS_EffectLevel; /* Concert Sound effect level */
/* N-Band Equaliser parameters */
- LVM_EQNB_Mode_en EQNB_OperatingMode; /* N-Band Equaliser operating mode */
- LVM_UINT16 EQNB_NBands; /* Number of bands */
- LVM_EQNB_BandDef_t *pEQNB_BandDefinition; /* Pointer to equaliser definitions */
+ LVM_EQNB_Mode_en EQNB_OperatingMode; /* N-Band Equaliser operating mode */
+ LVM_UINT16 EQNB_NBands; /* Number of bands */
+ LVM_EQNB_BandDef_t* pEQNB_BandDefinition; /* Pointer to equaliser definitions */
/* Bass Enhancement parameters */
- LVM_BE_Mode_en BE_OperatingMode; /* Bass Enhancement operating mode */
- LVM_INT16 BE_EffectLevel; /* Bass Enhancement effect level */
- LVM_BE_CentreFreq_en BE_CentreFreq; /* Bass Enhancement centre frequency */
- LVM_BE_FilterSelect_en BE_HPF; /* Bass Enhancement high pass filter selector */
+ LVM_BE_Mode_en BE_OperatingMode; /* Bass Enhancement operating mode */
+ LVM_INT16 BE_EffectLevel; /* Bass Enhancement effect level */
+ LVM_BE_CentreFreq_en BE_CentreFreq; /* Bass Enhancement centre frequency */
+ LVM_BE_FilterSelect_en BE_HPF; /* Bass Enhancement high pass filter selector */
/* Volume Control parameters */
- LVM_INT16 VC_EffectLevel; /* Volume Control setting in dBs */
- LVM_INT16 VC_Balance; /* Left Right Balance control in dB (-96 to 96 dB), -ve values reduce
- Right channel while +ve value reduces Left channel*/
+ LVM_INT16 VC_EffectLevel; /* Volume Control setting in dBs */
+ LVM_INT16 VC_Balance; /* Left Right Balance control in dB (-96 to 96 dB), -ve values reduce
+ Right channel while +ve value reduces Left channel*/
/* Treble Enhancement parameters */
- LVM_TE_Mode_en TE_OperatingMode; /* Treble Enhancement On/Off */
- LVM_INT16 TE_EffectLevel; /* Treble Enhancement gain dBs */
+ LVM_TE_Mode_en TE_OperatingMode; /* Treble Enhancement On/Off */
+ LVM_INT16 TE_EffectLevel; /* Treble Enhancement gain dBs */
/* Spectrum Analyzer parameters Control */
- LVM_PSA_Mode_en PSA_Enable;
- LVM_PSA_DecaySpeed_en PSA_PeakDecayRate; /* Peak value decay rate*/
-#ifdef SUPPORT_MC
- LVM_INT32 NrChannels;
- LVM_INT32 ChMask;
-#endif
+ LVM_PSA_Mode_en PSA_Enable;
+ LVM_PSA_DecaySpeed_en PSA_PeakDecayRate; /* Peak value decay rate*/
+ LVM_INT32 NrChannels;
+ LVM_INT32 ChMask;
} LVM_ControlParams_t;
/* Instance Parameter structure */
-typedef struct
-{
+typedef struct {
/* General */
- LVM_BufferMode_en BufferMode; /* Buffer management mode */
- LVM_UINT16 MaxBlockSize; /* Maximum processing block size */
+ LVM_BufferMode_en BufferMode; /* Buffer management mode */
+ LVM_UINT16 MaxBlockSize; /* Maximum processing block size */
/* N-Band Equaliser */
- LVM_UINT16 EQNB_NumBands; /* Maximum number of equaliser bands */
+ LVM_UINT16 EQNB_NumBands; /* Maximum number of equaliser bands */
/* PSA */
- LVM_PSA_Mode_en PSA_Included; /* Controls the instance memory allocation for PSA: ON/OFF */
+ LVM_PSA_Mode_en PSA_Included; /* Controls the instance memory allocation for PSA: ON/OFF */
} LVM_InstParams_t;
/* Headroom management parameter structure */
-typedef struct
-{
- LVM_Headroom_Mode_en Headroom_OperatingMode; /* Headroom Control On/Off */
- LVM_HeadroomBandDef_t *pHeadroomDefinition; /* Pointer to headroom bands definition */
- LVM_UINT16 NHeadroomBands; /* Number of headroom bands */
+typedef struct {
+ LVM_Headroom_Mode_en Headroom_OperatingMode; /* Headroom Control On/Off */
+ LVM_HeadroomBandDef_t* pHeadroomDefinition; /* Pointer to headroom bands definition */
+ LVM_UINT16 NHeadroomBands; /* Number of headroom bands */
} LVM_HeadroomParams_t;
@@ -339,55 +289,18 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetVersionInfo(LVM_VersionInfo_st *pVersion);
-
-/****************************************************************************************/
-/* */
-/* FUNCTION: LVM_GetMemoryTable */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. It can be called in */
-/* two ways: */
-/* */
-/* hInstance = NULL Returns the memory requirements */
-/* hInstance = Instance handle Returns the memory requirements and */
-/* allocated base addresses for the instance */
-/* */
-/* When this function is called for memory allocation (hInstance=NULL) the memory */
-/* base address pointers are NULL on return. */
-/* */
-/* When the function is called for free (hInstance = Instance Handle) the memory */
-/* table returns the allocated memory and base addresses used during initialisation. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pMemoryTable Pointer to an empty memory definition table */
-/* pInstParams Pointer to the instance parameters */
-/* */
-/* RETURNS: */
-/* LVM_SUCCESS Succeeded */
-/* LVM_NULLADDRESS When one of pMemoryTable or pInstParams is NULL */
-/* LVM_OUTOFRANGE When any of the Instance parameters are out of range */
-/* */
-/* NOTES: */
-/* 1. This function may be interrupted by the LVM_Process function */
-/* */
-/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetMemoryTable(LVM_Handle_t hInstance,
- LVM_MemTab_t *pMemoryTable,
- LVM_InstParams_t *pInstParams);
+LVM_ReturnStatus_en LVM_GetVersionInfo(LVM_VersionInfo_st* pVersion);
/****************************************************************************************/
/* */
/* FUNCTION: LVM_GetInstanceHandle */
/* */
/* DESCRIPTION: */
-/* This function is used to create a bundle instance. It returns the created instance */
-/* handle through phInstance. All parameters are set to their default, inactive state. */
+/* This function is used to create a bundle instance. */
+/* All parameters are set to their default, inactive state. */
/* */
/* PARAMETERS: */
-/* phInstance pointer to the instance handle */
-/* pMemoryTable Pointer to the memory definition table */
+/* phInstance Pointer to the instance handle */
/* pInstParams Pointer to the instance parameters */
/* */
/* RETURNS: */
@@ -399,9 +312,24 @@
/* 1. This function must not be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t *phInstance,
- LVM_MemTab_t *pMemoryTable,
- LVM_InstParams_t *pInstParams);
+LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t* phInstance, LVM_InstParams_t* pInstParams);
+
+/****************************************************************************************/
+/* */
+/* FUNCTION: LVM_DelInstanceHandle */
+/* */
+/* DESCRIPTION: */
+/* This function is used to create a bundle instance. It returns the created instance */
+/* handle through phInstance. All parameters are set to their default, inactive state. */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to the instance handle */
+/* */
+/* NOTES: */
+/* 1. This function must not be interrupted by the LVM_Process function */
+/* */
+/****************************************************************************************/
+void LVM_DelInstanceHandle(LVM_Handle_t* phInstance);
/****************************************************************************************/
/* */
@@ -421,7 +349,7 @@
/* 1. This function must not be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t hInstance);
+LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t hInstance);
/****************************************************************************************/
/* */
@@ -443,8 +371,7 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t hInstance,
- LVM_ControlParams_t *pParams);
+LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams);
/****************************************************************************************/
/* */
@@ -466,8 +393,7 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t hInstance,
- LVM_ControlParams_t *pParams);
+LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams);
/****************************************************************************************/
/* */
@@ -499,11 +425,8 @@
/* STEREO the number of sample pairs in the block */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_Process(LVM_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples,
- LVM_UINT32 AudioTime);
+LVM_ReturnStatus_en LVM_Process(LVM_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, LVM_UINT16 NumSamples, LVM_UINT32 AudioTime);
/****************************************************************************************/
/* */
@@ -524,8 +447,8 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetHeadroomParams( LVM_Handle_t hInstance,
- LVM_HeadroomParams_t *pHeadroomParams);
+LVM_ReturnStatus_en LVM_SetHeadroomParams(LVM_Handle_t hInstance,
+ LVM_HeadroomParams_t* pHeadroomParams);
/****************************************************************************************/
/* */
@@ -546,8 +469,8 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetHeadroomParams( LVM_Handle_t hInstance,
- LVM_HeadroomParams_t *pHeadroomParams);
+LVM_ReturnStatus_en LVM_GetHeadroomParams(LVM_Handle_t hInstance,
+ LVM_HeadroomParams_t* pHeadroomParams);
/****************************************************************************************/
/* */
@@ -574,10 +497,8 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetSpectrum( LVM_Handle_t hInstance,
- LVM_UINT8 *pCurrentPeaks,
- LVM_UINT8 *pPastPeaks,
- LVM_INT32 AudioTime);
+LVM_ReturnStatus_en LVM_GetSpectrum(LVM_Handle_t hInstance, LVM_UINT8* pCurrentPeaks,
+ LVM_UINT8* pPastPeaks, LVM_INT32 AudioTime);
/****************************************************************************************/
/* */
@@ -599,8 +520,6 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing( LVM_Handle_t hInstance,
- LVM_ControlParams_t *pParams);
+LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams);
-#endif /* __LVM_H__ */
-
+#endif /* __LVM_H__ */
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp
index e241cdd..cea964c 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp
@@ -47,69 +47,52 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetSpectrum(
- LVM_Handle_t hInstance,
- LVM_UINT8 *pCurrentPeaks,
- LVM_UINT8 *pPastPeaks,
- LVM_INT32 AudioTime
- )
-{
- LVM_Instance_t *pInstance = (LVM_Instance_t *)hInstance;
+LVM_ReturnStatus_en LVM_GetSpectrum(LVM_Handle_t hInstance, LVM_UINT8* pCurrentPeaks,
+ LVM_UINT8* pPastPeaks, LVM_INT32 AudioTime) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
- pLVPSA_Handle_t *hPSAInstance;
- LVPSA_RETURN LVPSA_Status;
+ pLVPSA_Handle_t* hPSAInstance;
+ LVPSA_RETURN LVPSA_Status;
- if(pInstance == LVM_NULL)
- {
+ if (pInstance == LVM_NULL) {
return LVM_NULLADDRESS;
}
/*If PSA is not included at the time of instance creation, return without any processing*/
- if(pInstance->InstParams.PSA_Included!=LVM_PSA_ON)
- {
+ if (pInstance->InstParams.PSA_Included != LVM_PSA_ON) {
return LVM_SUCCESS;
}
- hPSAInstance = (pLVPSA_Handle_t *)pInstance->hPSAInstance;
+ hPSAInstance = (pLVPSA_Handle_t*)pInstance->hPSAInstance;
- if((pCurrentPeaks == LVM_NULL) ||
- (pPastPeaks == LVM_NULL))
- {
+ if ((pCurrentPeaks == LVM_NULL) || (pPastPeaks == LVM_NULL)) {
return LVM_NULLADDRESS;
}
/*
* Update new parameters if necessary
*/
- if (pInstance->ControlPending == LVM_TRUE)
- {
+ if (pInstance->ControlPending == LVM_TRUE) {
LVM_ApplyNewSettings(hInstance);
}
/* If PSA module is disabled, do nothing */
- if(pInstance->Params.PSA_Enable==LVM_PSA_OFF)
- {
+ if (pInstance->Params.PSA_Enable == LVM_PSA_OFF) {
return LVM_ALGORITHMDISABLED;
}
- LVPSA_Status = LVPSA_GetSpectrum(hPSAInstance,
- (LVPSA_Time) (AudioTime),
- (LVM_UINT8*) pCurrentPeaks,
- (LVM_UINT8*) pPastPeaks );
+ LVPSA_Status = LVPSA_GetSpectrum(hPSAInstance, (LVPSA_Time)(AudioTime),
+ (LVM_UINT8*)pCurrentPeaks, (LVM_UINT8*)pPastPeaks);
- if(LVPSA_Status != LVPSA_OK)
- {
- if(LVPSA_Status == LVPSA_ERROR_WRONGTIME)
- {
- return (LVM_ReturnStatus_en) LVM_WRONGAUDIOTIME;
- }
- else
- {
- return (LVM_ReturnStatus_en) LVM_NULLADDRESS;
+ if (LVPSA_Status != LVPSA_OK) {
+ if (LVPSA_Status == LVPSA_ERROR_WRONGTIME) {
+ return (LVM_ReturnStatus_en)LVM_WRONGAUDIOTIME;
+ } else {
+ return (LVM_ReturnStatus_en)LVM_NULLADDRESS;
}
}
- return(LVM_SUCCESS);
+ return (LVM_SUCCESS);
}
/****************************************************************************************/
@@ -132,15 +115,12 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing( LVM_Handle_t hInstance,
- LVM_ControlParams_t *pParams)
-{
- LVM_Instance_t *pInstance =(LVM_Instance_t *)hInstance;
+LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
LVM_ReturnStatus_en Error;
/*Apply new controls*/
- Error = LVM_SetControlParameters(hInstance,pParams);
+ Error = LVM_SetControlParameters(hInstance, pParams);
pInstance->NoSmoothVolume = LVM_TRUE;
return Error;
}
-
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
index 3aeddbb..fbb0fe1 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
@@ -49,109 +49,90 @@
/* NOTES: */
/* */
/****************************************************************************************/
-void LVM_BufferManagedIn(LVM_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT **pToProcess,
- LVM_FLOAT **pProcessed,
- LVM_UINT16 *pNumSamples)
-{
-
- LVM_INT16 SampleCount; /* Number of samples to be processed this call */
- LVM_INT16 NumSamples; /* Number of samples in scratch buffer */
- LVM_FLOAT *pStart;
- LVM_Instance_t *pInstance = (LVM_Instance_t *)hInstance;
- LVM_Buffer_t *pBuffer;
- LVM_FLOAT *pDest;
-#ifdef SUPPORT_MC
- LVM_INT16 NumChannels = pInstance->NrChannels;
-#else
- LVM_INT16 NumChannels = 2;
-#endif
+void LVM_BufferManagedIn(LVM_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT** pToProcess,
+ LVM_FLOAT** pProcessed, LVM_UINT16* pNumSamples) {
+ LVM_INT16 SampleCount; /* Number of samples to be processed this call */
+ LVM_INT16 NumSamples; /* Number of samples in scratch buffer */
+ LVM_FLOAT* pStart;
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+ LVM_Buffer_t* pBuffer;
+ LVM_FLOAT* pDest;
+ LVM_INT16 NumChannels = pInstance->NrChannels;
/*
* Set the processing address pointers
*/
- pBuffer = pInstance->pBufferManagement;
- pDest = pBuffer->pScratch;
+ pBuffer = pInstance->pBufferManagement;
+ pDest = pBuffer->pScratch;
*pToProcess = pBuffer->pScratch;
*pProcessed = pBuffer->pScratch;
/*
* Check if it is the first call of a block
*/
- if (pInstance->SamplesToProcess == 0)
- {
+ if (pInstance->SamplesToProcess == 0) {
/*
* First call for a new block of samples
*/
pInstance->SamplesToProcess = (LVM_INT16)(*pNumSamples + pBuffer->InDelaySamples);
- pInstance->pInputSamples = (LVM_FLOAT *)pInData;
- pBuffer->BufferState = LVM_FIRSTCALL;
+ pInstance->pInputSamples = (LVM_FLOAT*)pInData;
+ pBuffer->BufferState = LVM_FIRSTCALL;
}
- pStart = pInstance->pInputSamples; /* Pointer to the input samples */
- pBuffer->SamplesToOutput = 0; /* Samples to output is same as
- number read for inplace processing */
+ pStart = pInstance->pInputSamples; /* Pointer to the input samples */
+ pBuffer->SamplesToOutput = 0; /* Samples to output is same as
+ number read for inplace processing */
/*
* Calculate the number of samples to process this call and update the buffer state
*/
- if (pInstance->SamplesToProcess > pInstance->InternalBlockSize)
- {
+ if (pInstance->SamplesToProcess > pInstance->InternalBlockSize) {
/*
* Process the maximum bock size of samples.
*/
SampleCount = pInstance->InternalBlockSize;
- NumSamples = pInstance->InternalBlockSize;
- }
- else
- {
+ NumSamples = pInstance->InternalBlockSize;
+ } else {
/*
* Last call for the block, so calculate how many frames and samples to process
- */
- LVM_INT16 NumFrames;
+ */
+ LVM_INT16 NumFrames;
- NumSamples = pInstance->SamplesToProcess;
- NumFrames = (LVM_INT16)(NumSamples >> MIN_INTERNAL_BLOCKSHIFT);
+ NumSamples = pInstance->SamplesToProcess;
+ NumFrames = (LVM_INT16)(NumSamples >> MIN_INTERNAL_BLOCKSHIFT);
SampleCount = (LVM_INT16)(NumFrames << MIN_INTERNAL_BLOCKSHIFT);
/*
* Update the buffer state
*/
- if (pBuffer->BufferState == LVM_FIRSTCALL)
- {
+ if (pBuffer->BufferState == LVM_FIRSTCALL) {
pBuffer->BufferState = LVM_FIRSTLASTCALL;
- }
- else
- {
+ } else {
pBuffer->BufferState = LVM_LASTCALL;
}
}
- *pNumSamples = (LVM_UINT16)SampleCount; /* Set the number of samples to process this call */
+ *pNumSamples = (LVM_UINT16)SampleCount; /* Set the number of samples to process this call */
/*
* Copy samples from the delay buffer as required
*/
- if (((pBuffer->BufferState == LVM_FIRSTCALL) ||
- (pBuffer->BufferState == LVM_FIRSTLASTCALL)) &&
- (pBuffer->InDelaySamples != 0))
- {
- Copy_Float(&pBuffer->InDelayBuffer[0], /* Source */
- pDest, /* Destination */
- (LVM_INT16)(NumChannels * pBuffer->InDelaySamples)); /* Number of delay \
- samples, left and right */
+ if (((pBuffer->BufferState == LVM_FIRSTCALL) || (pBuffer->BufferState == LVM_FIRSTLASTCALL)) &&
+ (pBuffer->InDelaySamples != 0)) {
+ Copy_Float(&pBuffer->InDelayBuffer[0], /* Source */
+ pDest, /* Destination */
+ (LVM_INT16)(NumChannels * pBuffer->InDelaySamples)); /* Number of delay \
+ samples, left and right */
NumSamples = (LVM_INT16)(NumSamples - pBuffer->InDelaySamples); /* Update sample count */
- pDest += NumChannels * pBuffer->InDelaySamples; /* Update the destination pointer */
+ pDest += NumChannels * pBuffer->InDelaySamples; /* Update the destination pointer */
}
/*
* Copy the rest of the samples for this call from the input buffer
*/
- if (NumSamples > 0)
- {
- Copy_Float(pStart, /* Source */
- pDest, /* Destination */
- (LVM_INT16)(NumChannels * NumSamples)); /* Number of input samples */
- pStart += NumChannels * NumSamples; /* Update the input pointer */
+ if (NumSamples > 0) {
+ Copy_Float(pStart, /* Source */
+ pDest, /* Destination */
+ (LVM_INT16)(NumChannels * NumSamples)); /* Number of input samples */
+ pStart += NumChannels * NumSamples; /* Update the input pointer */
/*
* Update the input data pointer and samples to output
@@ -161,33 +142,30 @@
}
/*
- * Update the sample count and input pointer
+ * Update the sample count and input pointer
*/
/* Update the count of samples */
- pInstance->SamplesToProcess = (LVM_INT16)(pInstance->SamplesToProcess - SampleCount);
- pInstance->pInputSamples = pStart; /* Update input sample pointer */
+ pInstance->SamplesToProcess = (LVM_INT16)(pInstance->SamplesToProcess - SampleCount);
+ pInstance->pInputSamples = pStart; /* Update input sample pointer */
/*
* Save samples to the delay buffer if any left unprocessed
*/
- if ((pBuffer->BufferState == LVM_FIRSTLASTCALL) ||
- (pBuffer->BufferState == LVM_LASTCALL))
- {
+ if ((pBuffer->BufferState == LVM_FIRSTLASTCALL) || (pBuffer->BufferState == LVM_LASTCALL)) {
NumSamples = pInstance->SamplesToProcess;
- pStart = pBuffer->pScratch; /* Start of the buffer */
- pStart += NumChannels * SampleCount; /* Offset by the number of processed samples */
- if (NumSamples != 0)
- {
- Copy_Float(pStart, /* Source */
- &pBuffer->InDelayBuffer[0], /* Destination */
- (LVM_INT16)(NumChannels * NumSamples)); /* Number of input samples */
+ pStart = pBuffer->pScratch; /* Start of the buffer */
+ pStart += NumChannels * SampleCount; /* Offset by the number of processed samples */
+ if (NumSamples != 0) {
+ Copy_Float(pStart, /* Source */
+ &pBuffer->InDelayBuffer[0], /* Destination */
+ (LVM_INT16)(NumChannels * NumSamples)); /* Number of input samples */
}
/*
* Update the delay sample count
*/
- pBuffer->InDelaySamples = NumSamples; /* Number of delay sample pairs */
- pInstance->SamplesToProcess = 0; /* All Samples used */
+ pBuffer->InDelaySamples = NumSamples; /* Number of delay sample pairs */
+ pInstance->SamplesToProcess = 0; /* All Samples used */
}
}
@@ -213,33 +191,25 @@
/* NOTES: */
/* */
/****************************************************************************************/
-void LVM_BufferUnmanagedIn(LVM_Handle_t hInstance,
- LVM_FLOAT **pToProcess,
- LVM_FLOAT **pProcessed,
- LVM_UINT16 *pNumSamples)
-{
-
- LVM_Instance_t *pInstance = (LVM_Instance_t *)hInstance;
+void LVM_BufferUnmanagedIn(LVM_Handle_t hInstance, LVM_FLOAT** pToProcess, LVM_FLOAT** pProcessed,
+ LVM_UINT16* pNumSamples) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
/*
* Check if this is the first call of a block
*/
- if (pInstance->SamplesToProcess == 0)
- {
- pInstance->SamplesToProcess = (LVM_INT16)*pNumSamples; /* Get the number of samples
- on first call */
- pInstance->pInputSamples = *pToProcess; /* Get the I/O pointers */
- pInstance->pOutputSamples = *pProcessed;
+ if (pInstance->SamplesToProcess == 0) {
+ pInstance->SamplesToProcess = (LVM_INT16)*pNumSamples; /* Get the number of samples
+ on first call */
+ pInstance->pInputSamples = *pToProcess; /* Get the I/O pointers */
+ pInstance->pOutputSamples = *pProcessed;
/*
* Set te block size to process
*/
- if (pInstance->SamplesToProcess > pInstance->InternalBlockSize)
- {
+ if (pInstance->SamplesToProcess > pInstance->InternalBlockSize) {
*pNumSamples = (LVM_UINT16)pInstance->InternalBlockSize;
- }
- else
- {
+ } else {
*pNumSamples = (LVM_UINT16)pInstance->SamplesToProcess;
}
}
@@ -329,32 +299,17 @@
/* NOTES: */
/* */
/****************************************************************************************/
-void LVM_BufferIn(LVM_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT **pToProcess,
- LVM_FLOAT **pProcessed,
- LVM_UINT16 *pNumSamples)
-{
-
- LVM_Instance_t *pInstance = (LVM_Instance_t *)hInstance;
+void LVM_BufferIn(LVM_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT** pToProcess,
+ LVM_FLOAT** pProcessed, LVM_UINT16* pNumSamples) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
/*
* Check which mode, managed or unmanaged
*/
- if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS)
- {
- LVM_BufferManagedIn(hInstance,
- pInData,
- pToProcess,
- pProcessed,
- pNumSamples);
- }
- else
- {
- LVM_BufferUnmanagedIn(hInstance,
- pToProcess,
- pProcessed,
- pNumSamples);
+ if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS) {
+ LVM_BufferManagedIn(hInstance, pInData, pToProcess, pProcessed, pNumSamples);
+ } else {
+ LVM_BufferUnmanagedIn(hInstance, pToProcess, pProcessed, pNumSamples);
}
}
/****************************************************************************************/
@@ -377,196 +332,124 @@
/* NOTES: */
/* */
/****************************************************************************************/
-void LVM_BufferManagedOut(LVM_Handle_t hInstance,
- LVM_FLOAT *pOutData,
- LVM_UINT16 *pNumSamples)
-{
-
- LVM_Instance_t *pInstance = (LVM_Instance_t *)hInstance;
- LVM_Buffer_t *pBuffer = pInstance->pBufferManagement;
- LVM_INT16 SampleCount = (LVM_INT16)*pNumSamples;
- LVM_INT16 NumSamples;
- LVM_FLOAT *pStart;
- LVM_FLOAT *pDest;
-#ifdef SUPPORT_MC
- LVM_INT32 NrChannels = pInstance->NrChannels;
+void LVM_BufferManagedOut(LVM_Handle_t hInstance, LVM_FLOAT* pOutData, LVM_UINT16* pNumSamples) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+ LVM_Buffer_t* pBuffer = pInstance->pBufferManagement;
+ LVM_INT16 SampleCount = (LVM_INT16)*pNumSamples;
+ LVM_INT16 NumSamples;
+ LVM_FLOAT* pStart;
+ LVM_FLOAT* pDest;
+ LVM_INT32 NrChannels = pInstance->NrChannels;
#define NrFrames NumSamples // alias for clarity
#define FrameCount SampleCount
-#endif
/*
* Set the pointers
*/
NumSamples = pBuffer->SamplesToOutput;
- pStart = pBuffer->pScratch;
+ pStart = pBuffer->pScratch;
/*
* check if it is the first call of a block
- */
- if ((pBuffer->BufferState == LVM_FIRSTCALL) ||
- (pBuffer->BufferState == LVM_FIRSTLASTCALL))
- {
+ */
+ if ((pBuffer->BufferState == LVM_FIRSTCALL) || (pBuffer->BufferState == LVM_FIRSTLASTCALL)) {
/* First call for a new block */
- pInstance->pOutputSamples = pOutData; /* Initialise the destination */
+ pInstance->pOutputSamples = pOutData; /* Initialise the destination */
}
- pDest = pInstance->pOutputSamples; /* Set the output address */
+ pDest = pInstance->pOutputSamples; /* Set the output address */
/*
* If the number of samples is non-zero then there are still samples to send to
* the output buffer
*/
- if ((NumSamples != 0) &&
- (pBuffer->OutDelaySamples != 0))
- {
+ if ((NumSamples != 0) && (pBuffer->OutDelaySamples != 0)) {
/*
* Copy the delayed output buffer samples to the output
*/
- if (pBuffer->OutDelaySamples <= NumSamples)
- {
+ if (pBuffer->OutDelaySamples <= NumSamples) {
/*
* Copy all output delay samples to the output
*/
-#ifdef SUPPORT_MC
- Copy_Float(&pBuffer->OutDelayBuffer[0], /* Source */
- pDest, /* Destination */
+ Copy_Float(&pBuffer->OutDelayBuffer[0], /* Source */
+ pDest, /* Destination */
/* Number of delay samples */
(LVM_INT16)(NrChannels * pBuffer->OutDelaySamples));
-#else
- Copy_Float(&pBuffer->OutDelayBuffer[0], /* Source */
- pDest, /* Destination */
- (LVM_INT16)(2 * pBuffer->OutDelaySamples)); /* Number of delay samples */
-#endif
/*
* Update the pointer and sample counts
*/
-#ifdef SUPPORT_MC
pDest += NrChannels * pBuffer->OutDelaySamples; /* Output sample pointer */
-#else
- pDest += 2 * pBuffer->OutDelaySamples; /* Output sample pointer */
-#endif
NumSamples = (LVM_INT16)(NumSamples - pBuffer->OutDelaySamples); /* Samples left \
to send */
pBuffer->OutDelaySamples = 0; /* No samples left in the buffer */
- }
- else
- {
+ } else {
/*
- * Copy only some of the ouput delay samples to the output
+ * Copy only some of the output delay samples to the output
*/
-#ifdef SUPPORT_MC
- Copy_Float(&pBuffer->OutDelayBuffer[0], /* Source */
- pDest, /* Destination */
- (LVM_INT16)(NrChannels * NrFrames)); /* Number of delay samples */
-#else
- Copy_Float(&pBuffer->OutDelayBuffer[0], /* Source */
- pDest, /* Destination */
- (LVM_INT16)(2 * NumSamples)); /* Number of delay samples */
-#endif
+ Copy_Float(&pBuffer->OutDelayBuffer[0], /* Source */
+ pDest, /* Destination */
+ (LVM_INT16)(NrChannels * NrFrames)); /* Number of delay samples */
/*
* Update the pointer and sample counts
*/
-#ifdef SUPPORT_MC
pDest += NrChannels * NrFrames; /* Output sample pointer */
-#else
- pDest += 2 * NumSamples; /* Output sample pointer */
-#endif
/* No samples left in the buffer */
pBuffer->OutDelaySamples = (LVM_INT16)(pBuffer->OutDelaySamples - NumSamples);
/*
* Realign the delay buffer data to avoid using circular buffer management
*/
-#ifdef SUPPORT_MC
- Copy_Float(&pBuffer->OutDelayBuffer[NrChannels * NrFrames], /* Source */
- &pBuffer->OutDelayBuffer[0], /* Destination */
+ Copy_Float(&pBuffer->OutDelayBuffer[NrChannels * NrFrames], /* Source */
+ &pBuffer->OutDelayBuffer[0], /* Destination */
/* Number of samples to move */
(LVM_INT16)(NrChannels * pBuffer->OutDelaySamples));
-#else
- Copy_Float(&pBuffer->OutDelayBuffer[2 * NumSamples], /* Source */
- &pBuffer->OutDelayBuffer[0], /* Destination */
- (LVM_INT16)(2 * pBuffer->OutDelaySamples)); /* Number of samples to move */
-#endif
- NumSamples = 0; /* Samples left to send */
+ NumSamples = 0; /* Samples left to send */
}
}
/*
* Copy the processed results to the output
*/
- if ((NumSamples != 0) &&
- (SampleCount != 0))
- {
- if (SampleCount <= NumSamples)
- {
+ if ((NumSamples != 0) && (SampleCount != 0)) {
+ if (SampleCount <= NumSamples) {
/*
* Copy all processed samples to the output
*/
-#ifdef SUPPORT_MC
- Copy_Float(pStart, /* Source */
- pDest, /* Destination */
+ Copy_Float(pStart, /* Source */
+ pDest, /* Destination */
(LVM_INT16)(NrChannels * FrameCount)); /* Number of processed samples */
-#else
- Copy_Float(pStart, /* Source */
- pDest, /* Destination */
- (LVM_INT16)(2 * SampleCount)); /* Number of processed samples */
-#endif
/*
* Update the pointer and sample counts
*/
-#ifdef SUPPORT_MC
- pDest += NrChannels * FrameCount; /* Output sample pointer */
-#else
- pDest += 2 * SampleCount; /* Output sample pointer */
-#endif
- NumSamples = (LVM_INT16)(NumSamples - SampleCount); /* Samples left to send */
- SampleCount = 0; /* No samples left in the buffer */
- }
- else
- {
+ pDest += NrChannels * FrameCount; /* Output sample pointer */
+ NumSamples = (LVM_INT16)(NumSamples - SampleCount); /* Samples left to send */
+ SampleCount = 0; /* No samples left in the buffer */
+ } else {
/*
* Copy only some processed samples to the output
*/
-#ifdef SUPPORT_MC
- Copy_Float(pStart, /* Source */
- pDest, /* Destination */
- (LVM_INT16)(NrChannels * NrFrames)); /* Number of processed samples */
-#else
- Copy_Float(pStart, /* Source */
- pDest, /* Destination */
- (LVM_INT16)(2 * NumSamples)); /* Number of processed samples */
-#endif
+ Copy_Float(pStart, /* Source */
+ pDest, /* Destination */
+ (LVM_INT16)(NrChannels * NrFrames)); /* Number of processed samples */
/*
* Update the pointers and sample counts
- */
-#ifdef SUPPORT_MC
- pStart += NrChannels * NrFrames; /* Processed sample pointer */
- pDest += NrChannels * NrFrames; /* Output sample pointer */
-#else
- pStart += 2 * NumSamples; /* Processed sample pointer */
- pDest += 2 * NumSamples; /* Output sample pointer */
-#endif
- SampleCount = (LVM_INT16)(SampleCount - NumSamples); /* Processed samples left */
- NumSamples = 0; /* Clear the sample count */
+ */
+ pStart += NrChannels * NrFrames; /* Processed sample pointer */
+ pDest += NrChannels * NrFrames; /* Output sample pointer */
+ SampleCount = (LVM_INT16)(SampleCount - NumSamples); /* Processed samples left */
+ NumSamples = 0; /* Clear the sample count */
}
}
/*
* Copy the remaining processed data to the output delay buffer
*/
- if (SampleCount != 0)
- {
-#ifdef SUPPORT_MC
- Copy_Float(pStart, /* Source */
+ if (SampleCount != 0) {
+ Copy_Float(pStart, /* Source */
/* Destination */
&pBuffer->OutDelayBuffer[NrChannels * pBuffer->OutDelaySamples],
- (LVM_INT16)(NrChannels * FrameCount)); /* Number of processed samples */
-#else
- Copy_Float(pStart, /* Source */
- &pBuffer->OutDelayBuffer[2 * pBuffer->OutDelaySamples], /* Destination */
- (LVM_INT16)(2 * SampleCount)); /* Number of processed samples */
-#endif
+ (LVM_INT16)(NrChannels * FrameCount)); /* Number of processed samples */
/* Update the buffer count */
pBuffer->OutDelaySamples = (LVM_INT16)(pBuffer->OutDelaySamples + SampleCount);
}
@@ -574,10 +457,10 @@
/*
* pointers, counts and set default buffer processing
*/
- pBuffer->SamplesToOutput = NumSamples; /* Samples left to send */
- pInstance->pOutputSamples = pDest; /* Output sample pointer */
- pBuffer->BufferState = LVM_MAXBLOCKCALL; /* Set for the default call \
- block size */
+ pBuffer->SamplesToOutput = NumSamples; /* Samples left to send */
+ pInstance->pOutputSamples = pDest; /* Output sample pointer */
+ pBuffer->BufferState = LVM_MAXBLOCKCALL; /* Set for the default call \
+ block size */
/* This will terminate the loop when all samples processed */
*pNumSamples = (LVM_UINT16)pInstance->SamplesToProcess;
}
@@ -601,44 +484,31 @@
/* */
/****************************************************************************************/
-void LVM_BufferUnmanagedOut(LVM_Handle_t hInstance,
- LVM_UINT16 *pNumSamples)
-{
-
- LVM_Instance_t *pInstance = (LVM_Instance_t *)hInstance;
-#ifdef SUPPORT_MC
- LVM_INT16 NumChannels = pInstance->NrChannels;
- if (NumChannels == 1)
- {
+void LVM_BufferUnmanagedOut(LVM_Handle_t hInstance, LVM_UINT16* pNumSamples) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+ LVM_INT16 NumChannels = pInstance->NrChannels;
+ if (NumChannels == 1) {
/* Mono input is processed as stereo by LVM module */
NumChannels = 2;
}
#undef NrFrames
-#define NrFrames (*pNumSamples) // alias for clarity
-#else
- LVM_INT16 NumChannels = 2;
-#endif
+#define NrFrames (*pNumSamples) // alias for clarity
/*
* Update sample counts
*/
- pInstance->pInputSamples += (LVM_INT16)(*pNumSamples * NumChannels); /* Update the I/O pointers */
-#ifdef SUPPORT_MC
- pInstance->pOutputSamples += (LVM_INT16)(NrFrames * NumChannels);
-#else
- pInstance->pOutputSamples += (LVM_INT16)(*pNumSamples * 2);
-#endif
- pInstance->SamplesToProcess = (LVM_INT16)(pInstance->SamplesToProcess - *pNumSamples); /* Update the sample count */
+ pInstance->pInputSamples +=
+ (LVM_INT16)(*pNumSamples * NumChannels); /* Update the I/O pointers */
+ pInstance->pOutputSamples += (LVM_INT16)(NrFrames * NumChannels);
+ pInstance->SamplesToProcess =
+ (LVM_INT16)(pInstance->SamplesToProcess - *pNumSamples); /* Update the sample count */
/*
* Set te block size to process
*/
- if (pInstance->SamplesToProcess > pInstance->InternalBlockSize)
- {
+ if (pInstance->SamplesToProcess > pInstance->InternalBlockSize) {
*pNumSamples = (LVM_UINT16)pInstance->InternalBlockSize;
- }
- else
- {
+ } else {
*pNumSamples = (LVM_UINT16)pInstance->SamplesToProcess;
}
}
@@ -698,25 +568,15 @@
/* NOTES: */
/* */
/****************************************************************************************/
-void LVM_BufferOut(LVM_Handle_t hInstance,
- LVM_FLOAT *pOutData,
- LVM_UINT16 *pNumSamples)
-{
-
- LVM_Instance_t *pInstance = (LVM_Instance_t *)hInstance;
+void LVM_BufferOut(LVM_Handle_t hInstance, LVM_FLOAT* pOutData, LVM_UINT16* pNumSamples) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
/*
* Check which mode, managed or unmanaged
*/
- if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS)
- {
- LVM_BufferManagedOut(hInstance,
- pOutData,
- pNumSamples);
- }
- else
- {
- LVM_BufferUnmanagedOut(hInstance,
- pNumSamples);
+ if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS) {
+ LVM_BufferManagedOut(hInstance, pOutData, pNumSamples);
+ } else {
+ LVM_BufferUnmanagedOut(hInstance, pNumSamples);
}
}
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
index 812f8e5..c02caa1 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
@@ -24,832 +24,832 @@
/* */
/************************************************************************************/
-#define TrebleBoostCorner 8000
-#define TrebleBoostMinRate 4
-#define TrebleBoostSteps 15
+#define TrebleBoostCorner 8000
+#define TrebleBoostMinRate 4
+#define TrebleBoostSteps 15
/* Coefficients for sample rate 22050Hz */
- /* Gain = 1.000000 dB */
-#define HPF_Fs22050_Gain1_A0 1.038434
-#define HPF_Fs22050_Gain1_A1 0.331599
-#define HPF_Fs22050_Gain1_A2 0.000000
-#define HPF_Fs22050_Gain1_B1 0.370033
-#define HPF_Fs22050_Gain1_B2 0.000000
- /* Gain = 2.000000 dB */
-#define HPF_Fs22050_Gain2_A0 1.081557
-#define HPF_Fs22050_Gain2_A1 0.288475
-#define HPF_Fs22050_Gain2_A2 0.000000
-#define HPF_Fs22050_Gain2_B1 0.370033
-#define HPF_Fs22050_Gain2_B2 0.000000
- /* Gain = 3.000000 dB */
-#define HPF_Fs22050_Gain3_A0 1.129943
-#define HPF_Fs22050_Gain3_A1 0.240090
-#define HPF_Fs22050_Gain3_A2 0.000000
-#define HPF_Fs22050_Gain3_B1 0.370033
-#define HPF_Fs22050_Gain3_B2 0.000000
- /* Gain = 4.000000 dB */
-#define HPF_Fs22050_Gain4_A0 1.184232
-#define HPF_Fs22050_Gain4_A1 0.185801
-#define HPF_Fs22050_Gain4_A2 0.000000
-#define HPF_Fs22050_Gain4_B1 0.370033
-#define HPF_Fs22050_Gain4_B2 0.000000
- /* Gain = 5.000000 dB */
-#define HPF_Fs22050_Gain5_A0 1.245145
-#define HPF_Fs22050_Gain5_A1 0.124887
-#define HPF_Fs22050_Gain5_A2 0.000000
-#define HPF_Fs22050_Gain5_B1 0.370033
-#define HPF_Fs22050_Gain5_B2 0.000000
- /* Gain = 6.000000 dB */
-#define HPF_Fs22050_Gain6_A0 1.313491
-#define HPF_Fs22050_Gain6_A1 0.056541
-#define HPF_Fs22050_Gain6_A2 0.000000
-#define HPF_Fs22050_Gain6_B1 0.370033
-#define HPF_Fs22050_Gain6_B2 0.000000
- /* Gain = 7.000000 dB */
-#define HPF_Fs22050_Gain7_A0 1.390177
-#define HPF_Fs22050_Gain7_A1 (-0.020144)
-#define HPF_Fs22050_Gain7_A2 0.000000
-#define HPF_Fs22050_Gain7_B1 0.370033
-#define HPF_Fs22050_Gain7_B2 0.000000
- /* Gain = 8.000000 dB */
-#define HPF_Fs22050_Gain8_A0 1.476219
-#define HPF_Fs22050_Gain8_A1 (-0.106187)
-#define HPF_Fs22050_Gain8_A2 0.000000
-#define HPF_Fs22050_Gain8_B1 0.370033
-#define HPF_Fs22050_Gain8_B2 0.000000
- /* Gain = 9.000000 dB */
-#define HPF_Fs22050_Gain9_A0 1.572761
-#define HPF_Fs22050_Gain9_A1 (-0.202728)
-#define HPF_Fs22050_Gain9_A2 0.000000
-#define HPF_Fs22050_Gain9_B1 0.370033
-#define HPF_Fs22050_Gain9_B2 0.000000
- /* Gain = 10.000000 dB */
-#define HPF_Fs22050_Gain10_A0 1.681082
-#define HPF_Fs22050_Gain10_A1 (-0.311049)
-#define HPF_Fs22050_Gain10_A2 0.000000
-#define HPF_Fs22050_Gain10_B1 0.370033
-#define HPF_Fs22050_Gain10_B2 0.000000
- /* Gain = 11.000000 dB */
-#define HPF_Fs22050_Gain11_A0 1.802620
-#define HPF_Fs22050_Gain11_A1 (-0.432588)
-#define HPF_Fs22050_Gain11_A2 0.000000
-#define HPF_Fs22050_Gain11_B1 0.370033
-#define HPF_Fs22050_Gain11_B2 0.000000
- /* Gain = 12.000000 dB */
-#define HPF_Fs22050_Gain12_A0 1.938989
-#define HPF_Fs22050_Gain12_A1 (-0.568956)
-#define HPF_Fs22050_Gain12_A2 0.000000
-#define HPF_Fs22050_Gain12_B1 0.370033
-#define HPF_Fs22050_Gain12_B2 0.000000
- /* Gain = 13.000000 dB */
-#define HPF_Fs22050_Gain13_A0 2.091997
-#define HPF_Fs22050_Gain13_A1 (-0.721964)
-#define HPF_Fs22050_Gain13_A2 0.000000
-#define HPF_Fs22050_Gain13_B1 0.370033
-#define HPF_Fs22050_Gain13_B2 0.000000
- /* Gain = 14.000000 dB */
-#define HPF_Fs22050_Gain14_A0 2.263674
-#define HPF_Fs22050_Gain14_A1 (-0.893641)
-#define HPF_Fs22050_Gain14_A2 0.000000
-#define HPF_Fs22050_Gain14_B1 0.370033
-#define HPF_Fs22050_Gain14_B2 0.000000
- /* Gain = 15.000000 dB */
-#define HPF_Fs22050_Gain15_A0 2.456300
-#define HPF_Fs22050_Gain15_A1 (-1.086267)
-#define HPF_Fs22050_Gain15_A2 0.000000
-#define HPF_Fs22050_Gain15_B1 0.370033
-#define HPF_Fs22050_Gain15_B2 0.000000
+/* Gain = 1.000000 dB */
+#define HPF_Fs22050_Gain1_A0 1.038434
+#define HPF_Fs22050_Gain1_A1 0.331599
+#define HPF_Fs22050_Gain1_A2 0.000000
+#define HPF_Fs22050_Gain1_B1 0.370033
+#define HPF_Fs22050_Gain1_B2 0.000000
+/* Gain = 2.000000 dB */
+#define HPF_Fs22050_Gain2_A0 1.081557
+#define HPF_Fs22050_Gain2_A1 0.288475
+#define HPF_Fs22050_Gain2_A2 0.000000
+#define HPF_Fs22050_Gain2_B1 0.370033
+#define HPF_Fs22050_Gain2_B2 0.000000
+/* Gain = 3.000000 dB */
+#define HPF_Fs22050_Gain3_A0 1.129943
+#define HPF_Fs22050_Gain3_A1 0.240090
+#define HPF_Fs22050_Gain3_A2 0.000000
+#define HPF_Fs22050_Gain3_B1 0.370033
+#define HPF_Fs22050_Gain3_B2 0.000000
+/* Gain = 4.000000 dB */
+#define HPF_Fs22050_Gain4_A0 1.184232
+#define HPF_Fs22050_Gain4_A1 0.185801
+#define HPF_Fs22050_Gain4_A2 0.000000
+#define HPF_Fs22050_Gain4_B1 0.370033
+#define HPF_Fs22050_Gain4_B2 0.000000
+/* Gain = 5.000000 dB */
+#define HPF_Fs22050_Gain5_A0 1.245145
+#define HPF_Fs22050_Gain5_A1 0.124887
+#define HPF_Fs22050_Gain5_A2 0.000000
+#define HPF_Fs22050_Gain5_B1 0.370033
+#define HPF_Fs22050_Gain5_B2 0.000000
+/* Gain = 6.000000 dB */
+#define HPF_Fs22050_Gain6_A0 1.313491
+#define HPF_Fs22050_Gain6_A1 0.056541
+#define HPF_Fs22050_Gain6_A2 0.000000
+#define HPF_Fs22050_Gain6_B1 0.370033
+#define HPF_Fs22050_Gain6_B2 0.000000
+/* Gain = 7.000000 dB */
+#define HPF_Fs22050_Gain7_A0 1.390177
+#define HPF_Fs22050_Gain7_A1 (-0.020144)
+#define HPF_Fs22050_Gain7_A2 0.000000
+#define HPF_Fs22050_Gain7_B1 0.370033
+#define HPF_Fs22050_Gain7_B2 0.000000
+/* Gain = 8.000000 dB */
+#define HPF_Fs22050_Gain8_A0 1.476219
+#define HPF_Fs22050_Gain8_A1 (-0.106187)
+#define HPF_Fs22050_Gain8_A2 0.000000
+#define HPF_Fs22050_Gain8_B1 0.370033
+#define HPF_Fs22050_Gain8_B2 0.000000
+/* Gain = 9.000000 dB */
+#define HPF_Fs22050_Gain9_A0 1.572761
+#define HPF_Fs22050_Gain9_A1 (-0.202728)
+#define HPF_Fs22050_Gain9_A2 0.000000
+#define HPF_Fs22050_Gain9_B1 0.370033
+#define HPF_Fs22050_Gain9_B2 0.000000
+/* Gain = 10.000000 dB */
+#define HPF_Fs22050_Gain10_A0 1.681082
+#define HPF_Fs22050_Gain10_A1 (-0.311049)
+#define HPF_Fs22050_Gain10_A2 0.000000
+#define HPF_Fs22050_Gain10_B1 0.370033
+#define HPF_Fs22050_Gain10_B2 0.000000
+/* Gain = 11.000000 dB */
+#define HPF_Fs22050_Gain11_A0 1.802620
+#define HPF_Fs22050_Gain11_A1 (-0.432588)
+#define HPF_Fs22050_Gain11_A2 0.000000
+#define HPF_Fs22050_Gain11_B1 0.370033
+#define HPF_Fs22050_Gain11_B2 0.000000
+/* Gain = 12.000000 dB */
+#define HPF_Fs22050_Gain12_A0 1.938989
+#define HPF_Fs22050_Gain12_A1 (-0.568956)
+#define HPF_Fs22050_Gain12_A2 0.000000
+#define HPF_Fs22050_Gain12_B1 0.370033
+#define HPF_Fs22050_Gain12_B2 0.000000
+/* Gain = 13.000000 dB */
+#define HPF_Fs22050_Gain13_A0 2.091997
+#define HPF_Fs22050_Gain13_A1 (-0.721964)
+#define HPF_Fs22050_Gain13_A2 0.000000
+#define HPF_Fs22050_Gain13_B1 0.370033
+#define HPF_Fs22050_Gain13_B2 0.000000
+/* Gain = 14.000000 dB */
+#define HPF_Fs22050_Gain14_A0 2.263674
+#define HPF_Fs22050_Gain14_A1 (-0.893641)
+#define HPF_Fs22050_Gain14_A2 0.000000
+#define HPF_Fs22050_Gain14_B1 0.370033
+#define HPF_Fs22050_Gain14_B2 0.000000
+/* Gain = 15.000000 dB */
+#define HPF_Fs22050_Gain15_A0 2.456300
+#define HPF_Fs22050_Gain15_A1 (-1.086267)
+#define HPF_Fs22050_Gain15_A2 0.000000
+#define HPF_Fs22050_Gain15_B1 0.370033
+#define HPF_Fs22050_Gain15_B2 0.000000
/* Coefficients for sample rate 24000Hz */
- /* Gain = 1.000000 dB */
-#define HPF_Fs24000_Gain1_A0 1.044662
-#define HPF_Fs24000_Gain1_A1 0.223287
-#define HPF_Fs24000_Gain1_A2 0.000000
-#define HPF_Fs24000_Gain1_B1 0.267949
-#define HPF_Fs24000_Gain1_B2 0.000000
- /* Gain = 2.000000 dB */
-#define HPF_Fs24000_Gain2_A0 1.094773
-#define HPF_Fs24000_Gain2_A1 0.173176
-#define HPF_Fs24000_Gain2_A2 0.000000
-#define HPF_Fs24000_Gain2_B1 0.267949
-#define HPF_Fs24000_Gain2_B2 0.000000
- /* Gain = 3.000000 dB */
-#define HPF_Fs24000_Gain3_A0 1.150999
-#define HPF_Fs24000_Gain3_A1 0.116950
-#define HPF_Fs24000_Gain3_A2 0.000000
-#define HPF_Fs24000_Gain3_B1 0.267949
-#define HPF_Fs24000_Gain3_B2 0.000000
- /* Gain = 4.000000 dB */
-#define HPF_Fs24000_Gain4_A0 1.214086
-#define HPF_Fs24000_Gain4_A1 0.053863
-#define HPF_Fs24000_Gain4_A2 0.000000
-#define HPF_Fs24000_Gain4_B1 0.267949
-#define HPF_Fs24000_Gain4_B2 0.000000
- /* Gain = 5.000000 dB */
-#define HPF_Fs24000_Gain5_A0 1.284870
-#define HPF_Fs24000_Gain5_A1 (-0.016921)
-#define HPF_Fs24000_Gain5_A2 0.000000
-#define HPF_Fs24000_Gain5_B1 0.267949
-#define HPF_Fs24000_Gain5_B2 0.000000
- /* Gain = 6.000000 dB */
-#define HPF_Fs24000_Gain6_A0 1.364291
-#define HPF_Fs24000_Gain6_A1 (-0.096342)
-#define HPF_Fs24000_Gain6_A2 0.000000
-#define HPF_Fs24000_Gain6_B1 0.267949
-#define HPF_Fs24000_Gain6_B2 0.000000
- /* Gain = 7.000000 dB */
-#define HPF_Fs24000_Gain7_A0 1.453403
-#define HPF_Fs24000_Gain7_A1 (-0.185454)
-#define HPF_Fs24000_Gain7_A2 0.000000
-#define HPF_Fs24000_Gain7_B1 0.267949
-#define HPF_Fs24000_Gain7_B2 0.000000
- /* Gain = 8.000000 dB */
-#define HPF_Fs24000_Gain8_A0 1.553389
-#define HPF_Fs24000_Gain8_A1 (-0.285440)
-#define HPF_Fs24000_Gain8_A2 0.000000
-#define HPF_Fs24000_Gain8_B1 0.267949
-#define HPF_Fs24000_Gain8_B2 0.000000
- /* Gain = 9.000000 dB */
-#define HPF_Fs24000_Gain9_A0 1.665574
-#define HPF_Fs24000_Gain9_A1 (-0.397625)
-#define HPF_Fs24000_Gain9_A2 0.000000
-#define HPF_Fs24000_Gain9_B1 0.267949
-#define HPF_Fs24000_Gain9_B2 0.000000
- /* Gain = 10.000000 dB */
-#define HPF_Fs24000_Gain10_A0 1.791449
-#define HPF_Fs24000_Gain10_A1 (-0.523499)
-#define HPF_Fs24000_Gain10_A2 0.000000
-#define HPF_Fs24000_Gain10_B1 0.267949
-#define HPF_Fs24000_Gain10_B2 0.000000
- /* Gain = 11.000000 dB */
-#define HPF_Fs24000_Gain11_A0 1.932682
-#define HPF_Fs24000_Gain11_A1 (-0.664733)
-#define HPF_Fs24000_Gain11_A2 0.000000
-#define HPF_Fs24000_Gain11_B1 0.267949
-#define HPF_Fs24000_Gain11_B2 0.000000
- /* Gain = 12.000000 dB */
-#define HPF_Fs24000_Gain12_A0 2.091148
-#define HPF_Fs24000_Gain12_A1 (-0.823199)
-#define HPF_Fs24000_Gain12_A2 0.000000
-#define HPF_Fs24000_Gain12_B1 0.267949
-#define HPF_Fs24000_Gain12_B2 0.000000
- /* Gain = 13.000000 dB */
-#define HPF_Fs24000_Gain13_A0 2.268950
-#define HPF_Fs24000_Gain13_A1 (-1.001001)
-#define HPF_Fs24000_Gain13_A2 0.000000
-#define HPF_Fs24000_Gain13_B1 0.267949
-#define HPF_Fs24000_Gain13_B2 0.000000
- /* Gain = 14.000000 dB */
-#define HPF_Fs24000_Gain14_A0 2.468447
-#define HPF_Fs24000_Gain14_A1 (-1.200498)
-#define HPF_Fs24000_Gain14_A2 0.000000
-#define HPF_Fs24000_Gain14_B1 0.267949
-#define HPF_Fs24000_Gain14_B2 0.000000
- /* Gain = 15.000000 dB */
-#define HPF_Fs24000_Gain15_A0 2.692287
-#define HPF_Fs24000_Gain15_A1 (-1.424338)
-#define HPF_Fs24000_Gain15_A2 0.000000
-#define HPF_Fs24000_Gain15_B1 0.267949
-#define HPF_Fs24000_Gain15_B2 0.000000
+/* Gain = 1.000000 dB */
+#define HPF_Fs24000_Gain1_A0 1.044662
+#define HPF_Fs24000_Gain1_A1 0.223287
+#define HPF_Fs24000_Gain1_A2 0.000000
+#define HPF_Fs24000_Gain1_B1 0.267949
+#define HPF_Fs24000_Gain1_B2 0.000000
+/* Gain = 2.000000 dB */
+#define HPF_Fs24000_Gain2_A0 1.094773
+#define HPF_Fs24000_Gain2_A1 0.173176
+#define HPF_Fs24000_Gain2_A2 0.000000
+#define HPF_Fs24000_Gain2_B1 0.267949
+#define HPF_Fs24000_Gain2_B2 0.000000
+/* Gain = 3.000000 dB */
+#define HPF_Fs24000_Gain3_A0 1.150999
+#define HPF_Fs24000_Gain3_A1 0.116950
+#define HPF_Fs24000_Gain3_A2 0.000000
+#define HPF_Fs24000_Gain3_B1 0.267949
+#define HPF_Fs24000_Gain3_B2 0.000000
+/* Gain = 4.000000 dB */
+#define HPF_Fs24000_Gain4_A0 1.214086
+#define HPF_Fs24000_Gain4_A1 0.053863
+#define HPF_Fs24000_Gain4_A2 0.000000
+#define HPF_Fs24000_Gain4_B1 0.267949
+#define HPF_Fs24000_Gain4_B2 0.000000
+/* Gain = 5.000000 dB */
+#define HPF_Fs24000_Gain5_A0 1.284870
+#define HPF_Fs24000_Gain5_A1 (-0.016921)
+#define HPF_Fs24000_Gain5_A2 0.000000
+#define HPF_Fs24000_Gain5_B1 0.267949
+#define HPF_Fs24000_Gain5_B2 0.000000
+/* Gain = 6.000000 dB */
+#define HPF_Fs24000_Gain6_A0 1.364291
+#define HPF_Fs24000_Gain6_A1 (-0.096342)
+#define HPF_Fs24000_Gain6_A2 0.000000
+#define HPF_Fs24000_Gain6_B1 0.267949
+#define HPF_Fs24000_Gain6_B2 0.000000
+/* Gain = 7.000000 dB */
+#define HPF_Fs24000_Gain7_A0 1.453403
+#define HPF_Fs24000_Gain7_A1 (-0.185454)
+#define HPF_Fs24000_Gain7_A2 0.000000
+#define HPF_Fs24000_Gain7_B1 0.267949
+#define HPF_Fs24000_Gain7_B2 0.000000
+/* Gain = 8.000000 dB */
+#define HPF_Fs24000_Gain8_A0 1.553389
+#define HPF_Fs24000_Gain8_A1 (-0.285440)
+#define HPF_Fs24000_Gain8_A2 0.000000
+#define HPF_Fs24000_Gain8_B1 0.267949
+#define HPF_Fs24000_Gain8_B2 0.000000
+/* Gain = 9.000000 dB */
+#define HPF_Fs24000_Gain9_A0 1.665574
+#define HPF_Fs24000_Gain9_A1 (-0.397625)
+#define HPF_Fs24000_Gain9_A2 0.000000
+#define HPF_Fs24000_Gain9_B1 0.267949
+#define HPF_Fs24000_Gain9_B2 0.000000
+/* Gain = 10.000000 dB */
+#define HPF_Fs24000_Gain10_A0 1.791449
+#define HPF_Fs24000_Gain10_A1 (-0.523499)
+#define HPF_Fs24000_Gain10_A2 0.000000
+#define HPF_Fs24000_Gain10_B1 0.267949
+#define HPF_Fs24000_Gain10_B2 0.000000
+/* Gain = 11.000000 dB */
+#define HPF_Fs24000_Gain11_A0 1.932682
+#define HPF_Fs24000_Gain11_A1 (-0.664733)
+#define HPF_Fs24000_Gain11_A2 0.000000
+#define HPF_Fs24000_Gain11_B1 0.267949
+#define HPF_Fs24000_Gain11_B2 0.000000
+/* Gain = 12.000000 dB */
+#define HPF_Fs24000_Gain12_A0 2.091148
+#define HPF_Fs24000_Gain12_A1 (-0.823199)
+#define HPF_Fs24000_Gain12_A2 0.000000
+#define HPF_Fs24000_Gain12_B1 0.267949
+#define HPF_Fs24000_Gain12_B2 0.000000
+/* Gain = 13.000000 dB */
+#define HPF_Fs24000_Gain13_A0 2.268950
+#define HPF_Fs24000_Gain13_A1 (-1.001001)
+#define HPF_Fs24000_Gain13_A2 0.000000
+#define HPF_Fs24000_Gain13_B1 0.267949
+#define HPF_Fs24000_Gain13_B2 0.000000
+/* Gain = 14.000000 dB */
+#define HPF_Fs24000_Gain14_A0 2.468447
+#define HPF_Fs24000_Gain14_A1 (-1.200498)
+#define HPF_Fs24000_Gain14_A2 0.000000
+#define HPF_Fs24000_Gain14_B1 0.267949
+#define HPF_Fs24000_Gain14_B2 0.000000
+/* Gain = 15.000000 dB */
+#define HPF_Fs24000_Gain15_A0 2.692287
+#define HPF_Fs24000_Gain15_A1 (-1.424338)
+#define HPF_Fs24000_Gain15_A2 0.000000
+#define HPF_Fs24000_Gain15_B1 0.267949
+#define HPF_Fs24000_Gain15_B2 0.000000
/* Coefficients for sample rate 32000Hz */
- /* Gain = 1.000000 dB */
-#define HPF_Fs32000_Gain1_A0 1.061009
-#define HPF_Fs32000_Gain1_A1 (-0.061009)
-#define HPF_Fs32000_Gain1_A2 0.000000
-#define HPF_Fs32000_Gain1_B1 (-0.000000)
-#define HPF_Fs32000_Gain1_B2 0.000000
- /* Gain = 2.000000 dB */
-#define HPF_Fs32000_Gain2_A0 1.129463
-#define HPF_Fs32000_Gain2_A1 (-0.129463)
-#define HPF_Fs32000_Gain2_A2 0.000000
-#define HPF_Fs32000_Gain2_B1 (-0.000000)
-#define HPF_Fs32000_Gain2_B2 0.000000
- /* Gain = 3.000000 dB */
-#define HPF_Fs32000_Gain3_A0 1.206267
-#define HPF_Fs32000_Gain3_A1 (-0.206267)
-#define HPF_Fs32000_Gain3_A2 0.000000
-#define HPF_Fs32000_Gain3_B1 (-0.000000)
-#define HPF_Fs32000_Gain3_B2 0.000000
- /* Gain = 4.000000 dB */
-#define HPF_Fs32000_Gain4_A0 1.292447
-#define HPF_Fs32000_Gain4_A1 (-0.292447)
-#define HPF_Fs32000_Gain4_A2 0.000000
-#define HPF_Fs32000_Gain4_B1 (-0.000000)
-#define HPF_Fs32000_Gain4_B2 0.000000
- /* Gain = 5.000000 dB */
-#define HPF_Fs32000_Gain5_A0 1.389140
-#define HPF_Fs32000_Gain5_A1 (-0.389140)
-#define HPF_Fs32000_Gain5_A2 0.000000
-#define HPF_Fs32000_Gain5_B1 (-0.000000)
-#define HPF_Fs32000_Gain5_B2 0.000000
- /* Gain = 6.000000 dB */
-#define HPF_Fs32000_Gain6_A0 1.497631
-#define HPF_Fs32000_Gain6_A1 (-0.497631)
-#define HPF_Fs32000_Gain6_A2 0.000000
-#define HPF_Fs32000_Gain6_B1 (-0.000000)
-#define HPF_Fs32000_Gain6_B2 0.000000
- /* Gain = 7.000000 dB */
-#define HPF_Fs32000_Gain7_A0 1.619361
-#define HPF_Fs32000_Gain7_A1 (-0.619361)
-#define HPF_Fs32000_Gain7_A2 0.000000
-#define HPF_Fs32000_Gain7_B1 (-0.000000)
-#define HPF_Fs32000_Gain7_B2 0.000000
- /* Gain = 8.000000 dB */
-#define HPF_Fs32000_Gain8_A0 1.755943
-#define HPF_Fs32000_Gain8_A1 (-0.755943)
-#define HPF_Fs32000_Gain8_A2 0.000000
-#define HPF_Fs32000_Gain8_B1 (-0.000000)
-#define HPF_Fs32000_Gain8_B2 0.000000
- /* Gain = 9.000000 dB */
-#define HPF_Fs32000_Gain9_A0 1.909191
-#define HPF_Fs32000_Gain9_A1 (-0.909191)
-#define HPF_Fs32000_Gain9_A2 0.000000
-#define HPF_Fs32000_Gain9_B1 (-0.000000)
-#define HPF_Fs32000_Gain9_B2 0.000000
- /* Gain = 10.000000 dB */
-#define HPF_Fs32000_Gain10_A0 2.081139
-#define HPF_Fs32000_Gain10_A1 (-1.081139)
-#define HPF_Fs32000_Gain10_A2 0.000000
-#define HPF_Fs32000_Gain10_B1 (-0.000000)
-#define HPF_Fs32000_Gain10_B2 0.000000
- /* Gain = 11.000000 dB */
-#define HPF_Fs32000_Gain11_A0 2.274067
-#define HPF_Fs32000_Gain11_A1 (-1.274067)
-#define HPF_Fs32000_Gain11_A2 0.000000
-#define HPF_Fs32000_Gain11_B1 (-0.000000)
-#define HPF_Fs32000_Gain11_B2 0.000000
- /* Gain = 12.000000 dB */
-#define HPF_Fs32000_Gain12_A0 2.490536
-#define HPF_Fs32000_Gain12_A1 (-1.490536)
-#define HPF_Fs32000_Gain12_A2 0.000000
-#define HPF_Fs32000_Gain12_B1 (-0.000000)
-#define HPF_Fs32000_Gain12_B2 0.000000
- /* Gain = 13.000000 dB */
-#define HPF_Fs32000_Gain13_A0 2.733418
-#define HPF_Fs32000_Gain13_A1 (-1.733418)
-#define HPF_Fs32000_Gain13_A2 0.000000
-#define HPF_Fs32000_Gain13_B1 (-0.000000)
-#define HPF_Fs32000_Gain13_B2 0.000000
- /* Gain = 14.000000 dB */
-#define HPF_Fs32000_Gain14_A0 3.005936
-#define HPF_Fs32000_Gain14_A1 (-2.005936)
-#define HPF_Fs32000_Gain14_A2 0.000000
-#define HPF_Fs32000_Gain14_B1 (-0.000000)
-#define HPF_Fs32000_Gain14_B2 0.000000
- /* Gain = 15.000000 dB */
-#define HPF_Fs32000_Gain15_A0 3.311707
-#define HPF_Fs32000_Gain15_A1 (-2.311707)
-#define HPF_Fs32000_Gain15_A2 0.000000
-#define HPF_Fs32000_Gain15_B1 (-0.000000)
-#define HPF_Fs32000_Gain15_B2 0.000000
+/* Gain = 1.000000 dB */
+#define HPF_Fs32000_Gain1_A0 1.061009
+#define HPF_Fs32000_Gain1_A1 (-0.061009)
+#define HPF_Fs32000_Gain1_A2 0.000000
+#define HPF_Fs32000_Gain1_B1 (-0.000000)
+#define HPF_Fs32000_Gain1_B2 0.000000
+/* Gain = 2.000000 dB */
+#define HPF_Fs32000_Gain2_A0 1.129463
+#define HPF_Fs32000_Gain2_A1 (-0.129463)
+#define HPF_Fs32000_Gain2_A2 0.000000
+#define HPF_Fs32000_Gain2_B1 (-0.000000)
+#define HPF_Fs32000_Gain2_B2 0.000000
+/* Gain = 3.000000 dB */
+#define HPF_Fs32000_Gain3_A0 1.206267
+#define HPF_Fs32000_Gain3_A1 (-0.206267)
+#define HPF_Fs32000_Gain3_A2 0.000000
+#define HPF_Fs32000_Gain3_B1 (-0.000000)
+#define HPF_Fs32000_Gain3_B2 0.000000
+/* Gain = 4.000000 dB */
+#define HPF_Fs32000_Gain4_A0 1.292447
+#define HPF_Fs32000_Gain4_A1 (-0.292447)
+#define HPF_Fs32000_Gain4_A2 0.000000
+#define HPF_Fs32000_Gain4_B1 (-0.000000)
+#define HPF_Fs32000_Gain4_B2 0.000000
+/* Gain = 5.000000 dB */
+#define HPF_Fs32000_Gain5_A0 1.389140
+#define HPF_Fs32000_Gain5_A1 (-0.389140)
+#define HPF_Fs32000_Gain5_A2 0.000000
+#define HPF_Fs32000_Gain5_B1 (-0.000000)
+#define HPF_Fs32000_Gain5_B2 0.000000
+/* Gain = 6.000000 dB */
+#define HPF_Fs32000_Gain6_A0 1.497631
+#define HPF_Fs32000_Gain6_A1 (-0.497631)
+#define HPF_Fs32000_Gain6_A2 0.000000
+#define HPF_Fs32000_Gain6_B1 (-0.000000)
+#define HPF_Fs32000_Gain6_B2 0.000000
+/* Gain = 7.000000 dB */
+#define HPF_Fs32000_Gain7_A0 1.619361
+#define HPF_Fs32000_Gain7_A1 (-0.619361)
+#define HPF_Fs32000_Gain7_A2 0.000000
+#define HPF_Fs32000_Gain7_B1 (-0.000000)
+#define HPF_Fs32000_Gain7_B2 0.000000
+/* Gain = 8.000000 dB */
+#define HPF_Fs32000_Gain8_A0 1.755943
+#define HPF_Fs32000_Gain8_A1 (-0.755943)
+#define HPF_Fs32000_Gain8_A2 0.000000
+#define HPF_Fs32000_Gain8_B1 (-0.000000)
+#define HPF_Fs32000_Gain8_B2 0.000000
+/* Gain = 9.000000 dB */
+#define HPF_Fs32000_Gain9_A0 1.909191
+#define HPF_Fs32000_Gain9_A1 (-0.909191)
+#define HPF_Fs32000_Gain9_A2 0.000000
+#define HPF_Fs32000_Gain9_B1 (-0.000000)
+#define HPF_Fs32000_Gain9_B2 0.000000
+/* Gain = 10.000000 dB */
+#define HPF_Fs32000_Gain10_A0 2.081139
+#define HPF_Fs32000_Gain10_A1 (-1.081139)
+#define HPF_Fs32000_Gain10_A2 0.000000
+#define HPF_Fs32000_Gain10_B1 (-0.000000)
+#define HPF_Fs32000_Gain10_B2 0.000000
+/* Gain = 11.000000 dB */
+#define HPF_Fs32000_Gain11_A0 2.274067
+#define HPF_Fs32000_Gain11_A1 (-1.274067)
+#define HPF_Fs32000_Gain11_A2 0.000000
+#define HPF_Fs32000_Gain11_B1 (-0.000000)
+#define HPF_Fs32000_Gain11_B2 0.000000
+/* Gain = 12.000000 dB */
+#define HPF_Fs32000_Gain12_A0 2.490536
+#define HPF_Fs32000_Gain12_A1 (-1.490536)
+#define HPF_Fs32000_Gain12_A2 0.000000
+#define HPF_Fs32000_Gain12_B1 (-0.000000)
+#define HPF_Fs32000_Gain12_B2 0.000000
+/* Gain = 13.000000 dB */
+#define HPF_Fs32000_Gain13_A0 2.733418
+#define HPF_Fs32000_Gain13_A1 (-1.733418)
+#define HPF_Fs32000_Gain13_A2 0.000000
+#define HPF_Fs32000_Gain13_B1 (-0.000000)
+#define HPF_Fs32000_Gain13_B2 0.000000
+/* Gain = 14.000000 dB */
+#define HPF_Fs32000_Gain14_A0 3.005936
+#define HPF_Fs32000_Gain14_A1 (-2.005936)
+#define HPF_Fs32000_Gain14_A2 0.000000
+#define HPF_Fs32000_Gain14_B1 (-0.000000)
+#define HPF_Fs32000_Gain14_B2 0.000000
+/* Gain = 15.000000 dB */
+#define HPF_Fs32000_Gain15_A0 3.311707
+#define HPF_Fs32000_Gain15_A1 (-2.311707)
+#define HPF_Fs32000_Gain15_A2 0.000000
+#define HPF_Fs32000_Gain15_B1 (-0.000000)
+#define HPF_Fs32000_Gain15_B2 0.000000
/* Coefficients for sample rate 44100Hz */
- /* Gain = 1.000000 dB */
-#define HPF_Fs44100_Gain1_A0 1.074364
-#define HPF_Fs44100_Gain1_A1 (-0.293257)
-#define HPF_Fs44100_Gain1_A2 0.000000
-#define HPF_Fs44100_Gain1_B1 (-0.218894)
-#define HPF_Fs44100_Gain1_B2 0.000000
- /* Gain = 2.000000 dB */
-#define HPF_Fs44100_Gain2_A0 1.157801
-#define HPF_Fs44100_Gain2_A1 (-0.376695)
-#define HPF_Fs44100_Gain2_A2 0.000000
-#define HPF_Fs44100_Gain2_B1 (-0.218894)
-#define HPF_Fs44100_Gain2_B2 0.000000
- /* Gain = 3.000000 dB */
-#define HPF_Fs44100_Gain3_A0 1.251420
-#define HPF_Fs44100_Gain3_A1 (-0.470313)
-#define HPF_Fs44100_Gain3_A2 0.000000
-#define HPF_Fs44100_Gain3_B1 (-0.218894)
-#define HPF_Fs44100_Gain3_B2 0.000000
- /* Gain = 4.000000 dB */
-#define HPF_Fs44100_Gain4_A0 1.356461
-#define HPF_Fs44100_Gain4_A1 (-0.575355)
-#define HPF_Fs44100_Gain4_A2 0.000000
-#define HPF_Fs44100_Gain4_B1 (-0.218894)
-#define HPF_Fs44100_Gain4_B2 0.000000
- /* Gain = 5.000000 dB */
-#define HPF_Fs44100_Gain5_A0 1.474320
-#define HPF_Fs44100_Gain5_A1 (-0.693213)
-#define HPF_Fs44100_Gain5_A2 0.000000
-#define HPF_Fs44100_Gain5_B1 (-0.218894)
-#define HPF_Fs44100_Gain5_B2 0.000000
- /* Gain = 6.000000 dB */
-#define HPF_Fs44100_Gain6_A0 1.606559
-#define HPF_Fs44100_Gain6_A1 (-0.825453)
-#define HPF_Fs44100_Gain6_A2 0.000000
-#define HPF_Fs44100_Gain6_B1 (-0.218894)
-#define HPF_Fs44100_Gain6_B2 0.000000
- /* Gain = 7.000000 dB */
-#define HPF_Fs44100_Gain7_A0 1.754935
-#define HPF_Fs44100_Gain7_A1 (-0.973828)
-#define HPF_Fs44100_Gain7_A2 0.000000
-#define HPF_Fs44100_Gain7_B1 (-0.218894)
-#define HPF_Fs44100_Gain7_B2 0.000000
- /* Gain = 8.000000 dB */
-#define HPF_Fs44100_Gain8_A0 1.921414
-#define HPF_Fs44100_Gain8_A1 (-1.140308)
-#define HPF_Fs44100_Gain8_A2 0.000000
-#define HPF_Fs44100_Gain8_B1 (-0.218894)
-#define HPF_Fs44100_Gain8_B2 0.000000
- /* Gain = 9.000000 dB */
-#define HPF_Fs44100_Gain9_A0 2.108208
-#define HPF_Fs44100_Gain9_A1 (-1.327101)
-#define HPF_Fs44100_Gain9_A2 0.000000
-#define HPF_Fs44100_Gain9_B1 (-0.218894)
-#define HPF_Fs44100_Gain9_B2 0.000000
- /* Gain = 10.000000 dB */
-#define HPF_Fs44100_Gain10_A0 2.317793
-#define HPF_Fs44100_Gain10_A1 (-1.536687)
-#define HPF_Fs44100_Gain10_A2 0.000000
-#define HPF_Fs44100_Gain10_B1 (-0.218894)
-#define HPF_Fs44100_Gain10_B2 0.000000
- /* Gain = 11.000000 dB */
-#define HPF_Fs44100_Gain11_A0 2.552952
-#define HPF_Fs44100_Gain11_A1 (-1.771846)
-#define HPF_Fs44100_Gain11_A2 0.000000
-#define HPF_Fs44100_Gain11_B1 (-0.218894)
-#define HPF_Fs44100_Gain11_B2 0.000000
- /* Gain = 12.000000 dB */
-#define HPF_Fs44100_Gain12_A0 2.816805
-#define HPF_Fs44100_Gain12_A1 (-2.035698)
-#define HPF_Fs44100_Gain12_A2 0.000000
-#define HPF_Fs44100_Gain12_B1 (-0.218894)
-#define HPF_Fs44100_Gain12_B2 0.000000
- /* Gain = 13.000000 dB */
-#define HPF_Fs44100_Gain13_A0 3.112852
-#define HPF_Fs44100_Gain13_A1 (-2.331746)
-#define HPF_Fs44100_Gain13_A2 0.000000
-#define HPF_Fs44100_Gain13_B1 (-0.218894)
-#define HPF_Fs44100_Gain13_B2 0.000000
- /* Gain = 14.000000 dB */
-#define HPF_Fs44100_Gain14_A0 3.445023
-#define HPF_Fs44100_Gain14_A1 (-2.663916)
-#define HPF_Fs44100_Gain14_A2 0.000000
-#define HPF_Fs44100_Gain14_B1 (-0.218894)
-#define HPF_Fs44100_Gain14_B2 0.000000
- /* Gain = 15.000000 dB */
-#define HPF_Fs44100_Gain15_A0 3.817724
-#define HPF_Fs44100_Gain15_A1 (-3.036618)
-#define HPF_Fs44100_Gain15_A2 0.000000
-#define HPF_Fs44100_Gain15_B1 (-0.218894)
-#define HPF_Fs44100_Gain15_B2 0.000000
+/* Gain = 1.000000 dB */
+#define HPF_Fs44100_Gain1_A0 1.074364
+#define HPF_Fs44100_Gain1_A1 (-0.293257)
+#define HPF_Fs44100_Gain1_A2 0.000000
+#define HPF_Fs44100_Gain1_B1 (-0.218894)
+#define HPF_Fs44100_Gain1_B2 0.000000
+/* Gain = 2.000000 dB */
+#define HPF_Fs44100_Gain2_A0 1.157801
+#define HPF_Fs44100_Gain2_A1 (-0.376695)
+#define HPF_Fs44100_Gain2_A2 0.000000
+#define HPF_Fs44100_Gain2_B1 (-0.218894)
+#define HPF_Fs44100_Gain2_B2 0.000000
+/* Gain = 3.000000 dB */
+#define HPF_Fs44100_Gain3_A0 1.251420
+#define HPF_Fs44100_Gain3_A1 (-0.470313)
+#define HPF_Fs44100_Gain3_A2 0.000000
+#define HPF_Fs44100_Gain3_B1 (-0.218894)
+#define HPF_Fs44100_Gain3_B2 0.000000
+/* Gain = 4.000000 dB */
+#define HPF_Fs44100_Gain4_A0 1.356461
+#define HPF_Fs44100_Gain4_A1 (-0.575355)
+#define HPF_Fs44100_Gain4_A2 0.000000
+#define HPF_Fs44100_Gain4_B1 (-0.218894)
+#define HPF_Fs44100_Gain4_B2 0.000000
+/* Gain = 5.000000 dB */
+#define HPF_Fs44100_Gain5_A0 1.474320
+#define HPF_Fs44100_Gain5_A1 (-0.693213)
+#define HPF_Fs44100_Gain5_A2 0.000000
+#define HPF_Fs44100_Gain5_B1 (-0.218894)
+#define HPF_Fs44100_Gain5_B2 0.000000
+/* Gain = 6.000000 dB */
+#define HPF_Fs44100_Gain6_A0 1.606559
+#define HPF_Fs44100_Gain6_A1 (-0.825453)
+#define HPF_Fs44100_Gain6_A2 0.000000
+#define HPF_Fs44100_Gain6_B1 (-0.218894)
+#define HPF_Fs44100_Gain6_B2 0.000000
+/* Gain = 7.000000 dB */
+#define HPF_Fs44100_Gain7_A0 1.754935
+#define HPF_Fs44100_Gain7_A1 (-0.973828)
+#define HPF_Fs44100_Gain7_A2 0.000000
+#define HPF_Fs44100_Gain7_B1 (-0.218894)
+#define HPF_Fs44100_Gain7_B2 0.000000
+/* Gain = 8.000000 dB */
+#define HPF_Fs44100_Gain8_A0 1.921414
+#define HPF_Fs44100_Gain8_A1 (-1.140308)
+#define HPF_Fs44100_Gain8_A2 0.000000
+#define HPF_Fs44100_Gain8_B1 (-0.218894)
+#define HPF_Fs44100_Gain8_B2 0.000000
+/* Gain = 9.000000 dB */
+#define HPF_Fs44100_Gain9_A0 2.108208
+#define HPF_Fs44100_Gain9_A1 (-1.327101)
+#define HPF_Fs44100_Gain9_A2 0.000000
+#define HPF_Fs44100_Gain9_B1 (-0.218894)
+#define HPF_Fs44100_Gain9_B2 0.000000
+/* Gain = 10.000000 dB */
+#define HPF_Fs44100_Gain10_A0 2.317793
+#define HPF_Fs44100_Gain10_A1 (-1.536687)
+#define HPF_Fs44100_Gain10_A2 0.000000
+#define HPF_Fs44100_Gain10_B1 (-0.218894)
+#define HPF_Fs44100_Gain10_B2 0.000000
+/* Gain = 11.000000 dB */
+#define HPF_Fs44100_Gain11_A0 2.552952
+#define HPF_Fs44100_Gain11_A1 (-1.771846)
+#define HPF_Fs44100_Gain11_A2 0.000000
+#define HPF_Fs44100_Gain11_B1 (-0.218894)
+#define HPF_Fs44100_Gain11_B2 0.000000
+/* Gain = 12.000000 dB */
+#define HPF_Fs44100_Gain12_A0 2.816805
+#define HPF_Fs44100_Gain12_A1 (-2.035698)
+#define HPF_Fs44100_Gain12_A2 0.000000
+#define HPF_Fs44100_Gain12_B1 (-0.218894)
+#define HPF_Fs44100_Gain12_B2 0.000000
+/* Gain = 13.000000 dB */
+#define HPF_Fs44100_Gain13_A0 3.112852
+#define HPF_Fs44100_Gain13_A1 (-2.331746)
+#define HPF_Fs44100_Gain13_A2 0.000000
+#define HPF_Fs44100_Gain13_B1 (-0.218894)
+#define HPF_Fs44100_Gain13_B2 0.000000
+/* Gain = 14.000000 dB */
+#define HPF_Fs44100_Gain14_A0 3.445023
+#define HPF_Fs44100_Gain14_A1 (-2.663916)
+#define HPF_Fs44100_Gain14_A2 0.000000
+#define HPF_Fs44100_Gain14_B1 (-0.218894)
+#define HPF_Fs44100_Gain14_B2 0.000000
+/* Gain = 15.000000 dB */
+#define HPF_Fs44100_Gain15_A0 3.817724
+#define HPF_Fs44100_Gain15_A1 (-3.036618)
+#define HPF_Fs44100_Gain15_A2 0.000000
+#define HPF_Fs44100_Gain15_B1 (-0.218894)
+#define HPF_Fs44100_Gain15_B2 0.000000
/* Coefficients for sample rate 48000Hz */
- /* Gain = 1.000000 dB */
-#define HPF_Fs48000_Gain1_A0 1.077357
-#define HPF_Fs48000_Gain1_A1 (-0.345306)
-#define HPF_Fs48000_Gain1_A2 0.000000
-#define HPF_Fs48000_Gain1_B1 (-0.267949)
-#define HPF_Fs48000_Gain1_B2 0.000000
- /* Gain = 2.000000 dB */
-#define HPF_Fs48000_Gain2_A0 1.164152
-#define HPF_Fs48000_Gain2_A1 (-0.432101)
-#define HPF_Fs48000_Gain2_A2 0.000000
-#define HPF_Fs48000_Gain2_B1 (-0.267949)
-#define HPF_Fs48000_Gain2_B2 0.000000
- /* Gain = 3.000000 dB */
-#define HPF_Fs48000_Gain3_A0 1.261538
-#define HPF_Fs48000_Gain3_A1 (-0.529488)
-#define HPF_Fs48000_Gain3_A2 0.000000
-#define HPF_Fs48000_Gain3_B1 (-0.267949)
-#define HPF_Fs48000_Gain3_B2 0.000000
- /* Gain = 4.000000 dB */
-#define HPF_Fs48000_Gain4_A0 1.370807
-#define HPF_Fs48000_Gain4_A1 (-0.638757)
-#define HPF_Fs48000_Gain4_A2 0.000000
-#define HPF_Fs48000_Gain4_B1 (-0.267949)
-#define HPF_Fs48000_Gain4_B2 0.000000
- /* Gain = 5.000000 dB */
-#define HPF_Fs48000_Gain5_A0 1.493409
-#define HPF_Fs48000_Gain5_A1 (-0.761359)
-#define HPF_Fs48000_Gain5_A2 0.000000
-#define HPF_Fs48000_Gain5_B1 (-0.267949)
-#define HPF_Fs48000_Gain5_B2 0.000000
- /* Gain = 6.000000 dB */
-#define HPF_Fs48000_Gain6_A0 1.630971
-#define HPF_Fs48000_Gain6_A1 (-0.898920)
-#define HPF_Fs48000_Gain6_A2 0.000000
-#define HPF_Fs48000_Gain6_B1 (-0.267949)
-#define HPF_Fs48000_Gain6_B2 0.000000
- /* Gain = 7.000000 dB */
-#define HPF_Fs48000_Gain7_A0 1.785318
-#define HPF_Fs48000_Gain7_A1 (-1.053267)
-#define HPF_Fs48000_Gain7_A2 0.000000
-#define HPF_Fs48000_Gain7_B1 (-0.267949)
-#define HPF_Fs48000_Gain7_B2 0.000000
- /* Gain = 8.000000 dB */
-#define HPF_Fs48000_Gain8_A0 1.958498
-#define HPF_Fs48000_Gain8_A1 (-1.226447)
-#define HPF_Fs48000_Gain8_A2 0.000000
-#define HPF_Fs48000_Gain8_B1 (-0.267949)
-#define HPF_Fs48000_Gain8_B2 0.000000
- /* Gain = 9.000000 dB */
-#define HPF_Fs48000_Gain9_A0 2.152809
-#define HPF_Fs48000_Gain9_A1 (-1.420758)
-#define HPF_Fs48000_Gain9_A2 0.000000
-#define HPF_Fs48000_Gain9_B1 (-0.267949)
-#define HPF_Fs48000_Gain9_B2 0.000000
- /* Gain = 10.000000 dB */
-#define HPF_Fs48000_Gain10_A0 2.370829
-#define HPF_Fs48000_Gain10_A1 (-1.638778)
-#define HPF_Fs48000_Gain10_A2 0.000000
-#define HPF_Fs48000_Gain10_B1 (-0.267949)
-#define HPF_Fs48000_Gain10_B2 0.000000
- /* Gain = 11.000000 dB */
-#define HPF_Fs48000_Gain11_A0 2.615452
-#define HPF_Fs48000_Gain11_A1 (-1.883401)
-#define HPF_Fs48000_Gain11_A2 0.000000
-#define HPF_Fs48000_Gain11_B1 (-0.267949)
-#define HPF_Fs48000_Gain11_B2 0.000000
- /* Gain = 12.000000 dB */
-#define HPF_Fs48000_Gain12_A0 2.889924
-#define HPF_Fs48000_Gain12_A1 (-2.157873)
-#define HPF_Fs48000_Gain12_A2 0.000000
-#define HPF_Fs48000_Gain12_B1 (-0.267949)
-#define HPF_Fs48000_Gain12_B2 0.000000
- /* Gain = 13.000000 dB */
-#define HPF_Fs48000_Gain13_A0 3.197886
-#define HPF_Fs48000_Gain13_A1 (-2.465835)
-#define HPF_Fs48000_Gain13_A2 0.000000
-#define HPF_Fs48000_Gain13_B1 (-0.267949)
-#define HPF_Fs48000_Gain13_B2 0.000000
- /* Gain = 14.000000 dB */
-#define HPF_Fs48000_Gain14_A0 3.543425
-#define HPF_Fs48000_Gain14_A1 (-2.811374)
-#define HPF_Fs48000_Gain14_A2 0.000000
-#define HPF_Fs48000_Gain14_B1 (-0.267949)
-#define HPF_Fs48000_Gain14_B2 0.000000
- /* Gain = 15.000000 dB */
-#define HPF_Fs48000_Gain15_A0 3.931127
-#define HPF_Fs48000_Gain15_A1 (-3.199076)
-#define HPF_Fs48000_Gain15_A2 0.000000
-#define HPF_Fs48000_Gain15_B1 (-0.267949)
-#define HPF_Fs48000_Gain15_B2 0.000000
+/* Gain = 1.000000 dB */
+#define HPF_Fs48000_Gain1_A0 1.077357
+#define HPF_Fs48000_Gain1_A1 (-0.345306)
+#define HPF_Fs48000_Gain1_A2 0.000000
+#define HPF_Fs48000_Gain1_B1 (-0.267949)
+#define HPF_Fs48000_Gain1_B2 0.000000
+/* Gain = 2.000000 dB */
+#define HPF_Fs48000_Gain2_A0 1.164152
+#define HPF_Fs48000_Gain2_A1 (-0.432101)
+#define HPF_Fs48000_Gain2_A2 0.000000
+#define HPF_Fs48000_Gain2_B1 (-0.267949)
+#define HPF_Fs48000_Gain2_B2 0.000000
+/* Gain = 3.000000 dB */
+#define HPF_Fs48000_Gain3_A0 1.261538
+#define HPF_Fs48000_Gain3_A1 (-0.529488)
+#define HPF_Fs48000_Gain3_A2 0.000000
+#define HPF_Fs48000_Gain3_B1 (-0.267949)
+#define HPF_Fs48000_Gain3_B2 0.000000
+/* Gain = 4.000000 dB */
+#define HPF_Fs48000_Gain4_A0 1.370807
+#define HPF_Fs48000_Gain4_A1 (-0.638757)
+#define HPF_Fs48000_Gain4_A2 0.000000
+#define HPF_Fs48000_Gain4_B1 (-0.267949)
+#define HPF_Fs48000_Gain4_B2 0.000000
+/* Gain = 5.000000 dB */
+#define HPF_Fs48000_Gain5_A0 1.493409
+#define HPF_Fs48000_Gain5_A1 (-0.761359)
+#define HPF_Fs48000_Gain5_A2 0.000000
+#define HPF_Fs48000_Gain5_B1 (-0.267949)
+#define HPF_Fs48000_Gain5_B2 0.000000
+/* Gain = 6.000000 dB */
+#define HPF_Fs48000_Gain6_A0 1.630971
+#define HPF_Fs48000_Gain6_A1 (-0.898920)
+#define HPF_Fs48000_Gain6_A2 0.000000
+#define HPF_Fs48000_Gain6_B1 (-0.267949)
+#define HPF_Fs48000_Gain6_B2 0.000000
+/* Gain = 7.000000 dB */
+#define HPF_Fs48000_Gain7_A0 1.785318
+#define HPF_Fs48000_Gain7_A1 (-1.053267)
+#define HPF_Fs48000_Gain7_A2 0.000000
+#define HPF_Fs48000_Gain7_B1 (-0.267949)
+#define HPF_Fs48000_Gain7_B2 0.000000
+/* Gain = 8.000000 dB */
+#define HPF_Fs48000_Gain8_A0 1.958498
+#define HPF_Fs48000_Gain8_A1 (-1.226447)
+#define HPF_Fs48000_Gain8_A2 0.000000
+#define HPF_Fs48000_Gain8_B1 (-0.267949)
+#define HPF_Fs48000_Gain8_B2 0.000000
+/* Gain = 9.000000 dB */
+#define HPF_Fs48000_Gain9_A0 2.152809
+#define HPF_Fs48000_Gain9_A1 (-1.420758)
+#define HPF_Fs48000_Gain9_A2 0.000000
+#define HPF_Fs48000_Gain9_B1 (-0.267949)
+#define HPF_Fs48000_Gain9_B2 0.000000
+/* Gain = 10.000000 dB */
+#define HPF_Fs48000_Gain10_A0 2.370829
+#define HPF_Fs48000_Gain10_A1 (-1.638778)
+#define HPF_Fs48000_Gain10_A2 0.000000
+#define HPF_Fs48000_Gain10_B1 (-0.267949)
+#define HPF_Fs48000_Gain10_B2 0.000000
+/* Gain = 11.000000 dB */
+#define HPF_Fs48000_Gain11_A0 2.615452
+#define HPF_Fs48000_Gain11_A1 (-1.883401)
+#define HPF_Fs48000_Gain11_A2 0.000000
+#define HPF_Fs48000_Gain11_B1 (-0.267949)
+#define HPF_Fs48000_Gain11_B2 0.000000
+/* Gain = 12.000000 dB */
+#define HPF_Fs48000_Gain12_A0 2.889924
+#define HPF_Fs48000_Gain12_A1 (-2.157873)
+#define HPF_Fs48000_Gain12_A2 0.000000
+#define HPF_Fs48000_Gain12_B1 (-0.267949)
+#define HPF_Fs48000_Gain12_B2 0.000000
+/* Gain = 13.000000 dB */
+#define HPF_Fs48000_Gain13_A0 3.197886
+#define HPF_Fs48000_Gain13_A1 (-2.465835)
+#define HPF_Fs48000_Gain13_A2 0.000000
+#define HPF_Fs48000_Gain13_B1 (-0.267949)
+#define HPF_Fs48000_Gain13_B2 0.000000
+/* Gain = 14.000000 dB */
+#define HPF_Fs48000_Gain14_A0 3.543425
+#define HPF_Fs48000_Gain14_A1 (-2.811374)
+#define HPF_Fs48000_Gain14_A2 0.000000
+#define HPF_Fs48000_Gain14_B1 (-0.267949)
+#define HPF_Fs48000_Gain14_B2 0.000000
+/* Gain = 15.000000 dB */
+#define HPF_Fs48000_Gain15_A0 3.931127
+#define HPF_Fs48000_Gain15_A1 (-3.199076)
+#define HPF_Fs48000_Gain15_A2 0.000000
+#define HPF_Fs48000_Gain15_B1 (-0.267949)
+#define HPF_Fs48000_Gain15_B2 0.000000
/* Coefficients for sample rate 88200 */
/* Gain = 1.000000 dB */
-#define HPF_Fs88200_Gain1_A0 1.094374f
-#define HPF_Fs88200_Gain1_A1 (-0.641256f)
-#define HPF_Fs88200_Gain1_A2 0.000000f
-#define HPF_Fs88200_Gain1_B1 (-0.546882f)
-#define HPF_Fs88200_Gain1_B2 0.000000f
+#define HPF_Fs88200_Gain1_A0 1.094374f
+#define HPF_Fs88200_Gain1_A1 (-0.641256f)
+#define HPF_Fs88200_Gain1_A2 0.000000f
+#define HPF_Fs88200_Gain1_B1 (-0.546882f)
+#define HPF_Fs88200_Gain1_B2 0.000000f
/* Gain = 2.000000 dB */
-#define HPF_Fs88200_Gain2_A0 1.200264f
-#define HPF_Fs88200_Gain2_A1 (-0.747146f)
-#define HPF_Fs88200_Gain2_A2 0.000000f
-#define HPF_Fs88200_Gain2_B1 (-0.546882f)
-#define HPF_Fs88200_Gain2_B2 0.000000f
+#define HPF_Fs88200_Gain2_A0 1.200264f
+#define HPF_Fs88200_Gain2_A1 (-0.747146f)
+#define HPF_Fs88200_Gain2_A2 0.000000f
+#define HPF_Fs88200_Gain2_B1 (-0.546882f)
+#define HPF_Fs88200_Gain2_B2 0.000000f
/* Gain = 3.000000 dB */
-#define HPF_Fs88200_Gain3_A0 1.319074f
-#define HPF_Fs88200_Gain3_A1 (-0.865956f)
-#define HPF_Fs88200_Gain3_A2 0.000000f
-#define HPF_Fs88200_Gain3_B1 (-0.546882f)
-#define HPF_Fs88200_Gain3_B2 0.000000f
+#define HPF_Fs88200_Gain3_A0 1.319074f
+#define HPF_Fs88200_Gain3_A1 (-0.865956f)
+#define HPF_Fs88200_Gain3_A2 0.000000f
+#define HPF_Fs88200_Gain3_B1 (-0.546882f)
+#define HPF_Fs88200_Gain3_B2 0.000000f
/* Gain = 4.000000 dB */
-#define HPF_Fs88200_Gain4_A0 1.452380f
-#define HPF_Fs88200_Gain4_A1 (-0.999263f)
-#define HPF_Fs88200_Gain4_A2 0.000000f
-#define HPF_Fs88200_Gain4_B1 (-0.546882f)
-#define HPF_Fs88200_Gain4_B2 0.000000f
+#define HPF_Fs88200_Gain4_A0 1.452380f
+#define HPF_Fs88200_Gain4_A1 (-0.999263f)
+#define HPF_Fs88200_Gain4_A2 0.000000f
+#define HPF_Fs88200_Gain4_B1 (-0.546882f)
+#define HPF_Fs88200_Gain4_B2 0.000000f
/* Gain = 5.000000 dB */
-#define HPF_Fs88200_Gain5_A0 1.601953f
-#define HPF_Fs88200_Gain5_A1 (-1.148836f)
-#define HPF_Fs88200_Gain5_A2 0.000000f
-#define HPF_Fs88200_Gain5_B1 (-0.546882f)
-#define HPF_Fs88200_Gain5_B2 0.000000f
+#define HPF_Fs88200_Gain5_A0 1.601953f
+#define HPF_Fs88200_Gain5_A1 (-1.148836f)
+#define HPF_Fs88200_Gain5_A2 0.000000f
+#define HPF_Fs88200_Gain5_B1 (-0.546882f)
+#define HPF_Fs88200_Gain5_B2 0.000000f
/* Gain = 6.000000 dB */
-#define HPF_Fs88200_Gain6_A0 1.769777f
-#define HPF_Fs88200_Gain6_A1 (-1.316659f)
-#define HPF_Fs88200_Gain6_A2 0.000000f
-#define HPF_Fs88200_Gain6_B1 (-0.546882f)
-#define HPF_Fs88200_Gain6_B2 0.000000f
+#define HPF_Fs88200_Gain6_A0 1.769777f
+#define HPF_Fs88200_Gain6_A1 (-1.316659f)
+#define HPF_Fs88200_Gain6_A2 0.000000f
+#define HPF_Fs88200_Gain6_B1 (-0.546882f)
+#define HPF_Fs88200_Gain6_B2 0.000000f
/* Gain = 7.000000 dB */
-#define HPF_Fs88200_Gain7_A0 1.958078f
-#define HPF_Fs88200_Gain7_A1 (-1.504960f)
-#define HPF_Fs88200_Gain7_A2 0.000000f
-#define HPF_Fs88200_Gain7_B1 (-0.546882f)
-#define HPF_Fs88200_Gain7_B2 0.000000f
+#define HPF_Fs88200_Gain7_A0 1.958078f
+#define HPF_Fs88200_Gain7_A1 (-1.504960f)
+#define HPF_Fs88200_Gain7_A2 0.000000f
+#define HPF_Fs88200_Gain7_B1 (-0.546882f)
+#define HPF_Fs88200_Gain7_B2 0.000000f
/* Gain = 8.000000 dB */
-#define HPF_Fs88200_Gain8_A0 2.169355f
-#define HPF_Fs88200_Gain8_A1 (-1.716238f)
-#define HPF_Fs88200_Gain8_A2 0.000000f
-#define HPF_Fs88200_Gain8_B1 (-0.546882f)
-#define HPF_Fs88200_Gain8_B2 0.000000f
+#define HPF_Fs88200_Gain8_A0 2.169355f
+#define HPF_Fs88200_Gain8_A1 (-1.716238f)
+#define HPF_Fs88200_Gain8_A2 0.000000f
+#define HPF_Fs88200_Gain8_B1 (-0.546882f)
+#define HPF_Fs88200_Gain8_B2 0.000000f
/* Gain = 9.000000 dB */
-#define HPF_Fs88200_Gain9_A0 2.406412f
-#define HPF_Fs88200_Gain9_A1 (-1.953295f)
-#define HPF_Fs88200_Gain9_A2 0.000000f
-#define HPF_Fs88200_Gain9_B1 (-0.546882f)
-#define HPF_Fs88200_Gain9_B2 0.000000f
+#define HPF_Fs88200_Gain9_A0 2.406412f
+#define HPF_Fs88200_Gain9_A1 (-1.953295f)
+#define HPF_Fs88200_Gain9_A2 0.000000f
+#define HPF_Fs88200_Gain9_B1 (-0.546882f)
+#define HPF_Fs88200_Gain9_B2 0.000000f
/* Gain = 10.000000 dB */
-#define HPF_Fs88200_Gain10_A0 2.672395f
-#define HPF_Fs88200_Gain10_A1 (-2.219277f)
-#define HPF_Fs88200_Gain10_A2 0.000000f
-#define HPF_Fs88200_Gain10_B1 (-0.546882f)
-#define HPF_Fs88200_Gain10_B2 0.000000f
+#define HPF_Fs88200_Gain10_A0 2.672395f
+#define HPF_Fs88200_Gain10_A1 (-2.219277f)
+#define HPF_Fs88200_Gain10_A2 0.000000f
+#define HPF_Fs88200_Gain10_B1 (-0.546882f)
+#define HPF_Fs88200_Gain10_B2 0.000000f
/* Gain = 11.000000 dB */
-#define HPF_Fs88200_Gain11_A0 2.970832f
-#define HPF_Fs88200_Gain11_A1 (-2.517714f)
-#define HPF_Fs88200_Gain11_A2 0.000000f
-#define HPF_Fs88200_Gain11_B1 (-0.546882f)
-#define HPF_Fs88200_Gain11_B2 0.000000f
+#define HPF_Fs88200_Gain11_A0 2.970832f
+#define HPF_Fs88200_Gain11_A1 (-2.517714f)
+#define HPF_Fs88200_Gain11_A2 0.000000f
+#define HPF_Fs88200_Gain11_B1 (-0.546882f)
+#define HPF_Fs88200_Gain11_B2 0.000000f
/* Gain = 12.000000 dB */
-#define HPF_Fs88200_Gain12_A0 3.305684f
-#define HPF_Fs88200_Gain12_A1 (-2.852566f)
-#define HPF_Fs88200_Gain12_A2 0.000000f
-#define HPF_Fs88200_Gain12_B1 (-0.546882f)
-#define HPF_Fs88200_Gain12_B2 0.000000f
+#define HPF_Fs88200_Gain12_A0 3.305684f
+#define HPF_Fs88200_Gain12_A1 (-2.852566f)
+#define HPF_Fs88200_Gain12_A2 0.000000f
+#define HPF_Fs88200_Gain12_B1 (-0.546882f)
+#define HPF_Fs88200_Gain12_B2 0.000000f
/* Gain = 13.000000 dB */
-#define HPF_Fs88200_Gain13_A0 3.681394f
-#define HPF_Fs88200_Gain13_A1 (-3.228276f)
-#define HPF_Fs88200_Gain13_A2 0.000000f
-#define HPF_Fs88200_Gain13_B1 (-0.546882f)
-#define HPF_Fs88200_Gain13_B2 0.000000f
+#define HPF_Fs88200_Gain13_A0 3.681394f
+#define HPF_Fs88200_Gain13_A1 (-3.228276f)
+#define HPF_Fs88200_Gain13_A2 0.000000f
+#define HPF_Fs88200_Gain13_B1 (-0.546882f)
+#define HPF_Fs88200_Gain13_B2 0.000000f
/* Gain = 14.000000 dB */
-#define HPF_Fs88200_Gain14_A0 4.102947f
-#define HPF_Fs88200_Gain14_A1 (-3.649830f)
-#define HPF_Fs88200_Gain14_A2 0.000000f
-#define HPF_Fs88200_Gain14_B1 (-0.546882f)
-#define HPF_Fs88200_Gain14_B2 0.000000f
+#define HPF_Fs88200_Gain14_A0 4.102947f
+#define HPF_Fs88200_Gain14_A1 (-3.649830f)
+#define HPF_Fs88200_Gain14_A2 0.000000f
+#define HPF_Fs88200_Gain14_B1 (-0.546882f)
+#define HPF_Fs88200_Gain14_B2 0.000000f
/* Gain = 15.000000 dB */
-#define HPF_Fs88200_Gain15_A0 4.575938f
-#define HPF_Fs88200_Gain15_A1 (-4.122820f)
-#define HPF_Fs88200_Gain15_A2 0.000000f
-#define HPF_Fs88200_Gain15_B1 (-0.546882f)
-#define HPF_Fs88200_Gain15_B2 0.000000f
+#define HPF_Fs88200_Gain15_A0 4.575938f
+#define HPF_Fs88200_Gain15_A1 (-4.122820f)
+#define HPF_Fs88200_Gain15_A2 0.000000f
+#define HPF_Fs88200_Gain15_B1 (-0.546882f)
+#define HPF_Fs88200_Gain15_B2 0.000000f
/* Coefficients for sample rate 96000Hz */
- /* Gain = 1.000000 dB */
-#define HPF_Fs96000_Gain1_A0 1.096233
-#define HPF_Fs96000_Gain1_A1 (-0.673583)
-#define HPF_Fs96000_Gain1_A2 0.000000
-#define HPF_Fs96000_Gain1_B1 (-0.577350)
-#define HPF_Fs96000_Gain1_B2 0.000000
- /* Gain = 2.000000 dB */
-#define HPF_Fs96000_Gain2_A0 1.204208
-#define HPF_Fs96000_Gain2_A1 (-0.781558)
-#define HPF_Fs96000_Gain2_A2 0.000000
-#define HPF_Fs96000_Gain2_B1 (-0.577350)
-#define HPF_Fs96000_Gain2_B2 0.000000
- /* Gain = 3.000000 dB */
-#define HPF_Fs96000_Gain3_A0 1.325358
-#define HPF_Fs96000_Gain3_A1 (-0.902708)
-#define HPF_Fs96000_Gain3_A2 0.000000
-#define HPF_Fs96000_Gain3_B1 (-0.577350)
-#define HPF_Fs96000_Gain3_B2 0.000000
- /* Gain = 4.000000 dB */
-#define HPF_Fs96000_Gain4_A0 1.461291
-#define HPF_Fs96000_Gain4_A1 (-1.038641)
-#define HPF_Fs96000_Gain4_A2 0.000000
-#define HPF_Fs96000_Gain4_B1 (-0.577350)
-#define HPF_Fs96000_Gain4_B2 0.000000
- /* Gain = 5.000000 dB */
-#define HPF_Fs96000_Gain5_A0 1.613810
-#define HPF_Fs96000_Gain5_A1 (-1.191160)
-#define HPF_Fs96000_Gain5_A2 0.000000
-#define HPF_Fs96000_Gain5_B1 (-0.577350)
-#define HPF_Fs96000_Gain5_B2 0.000000
- /* Gain = 6.000000 dB */
-#define HPF_Fs96000_Gain6_A0 1.784939
-#define HPF_Fs96000_Gain6_A1 (-1.362289)
-#define HPF_Fs96000_Gain6_A2 0.000000
-#define HPF_Fs96000_Gain6_B1 (-0.577350)
-#define HPF_Fs96000_Gain6_B2 0.000000
- /* Gain = 7.000000 dB */
-#define HPF_Fs96000_Gain7_A0 1.976949
-#define HPF_Fs96000_Gain7_A1 (-1.554299)
-#define HPF_Fs96000_Gain7_A2 0.000000
-#define HPF_Fs96000_Gain7_B1 (-0.577350)
-#define HPF_Fs96000_Gain7_B2 0.000000
- /* Gain = 8.000000 dB */
-#define HPF_Fs96000_Gain8_A0 2.192387
-#define HPF_Fs96000_Gain8_A1 (-1.769738)
-#define HPF_Fs96000_Gain8_A2 0.000000
-#define HPF_Fs96000_Gain8_B1 (-0.577350)
-#define HPF_Fs96000_Gain8_B2 0.000000
- /* Gain = 9.000000 dB */
-#define HPF_Fs96000_Gain9_A0 2.434113
-#define HPF_Fs96000_Gain9_A1 (-2.011464)
-#define HPF_Fs96000_Gain9_A2 0.000000
-#define HPF_Fs96000_Gain9_B1 (-0.577350)
-#define HPF_Fs96000_Gain9_B2 0.000000
- /* Gain = 10.000000 dB */
-#define HPF_Fs96000_Gain10_A0 2.705335
-#define HPF_Fs96000_Gain10_A1 (-2.282685)
-#define HPF_Fs96000_Gain10_A2 0.000000
-#define HPF_Fs96000_Gain10_B1 (-0.577350)
-#define HPF_Fs96000_Gain10_B2 0.000000
- /* Gain = 11.000000 dB */
-#define HPF_Fs96000_Gain11_A0 3.009650
-#define HPF_Fs96000_Gain11_A1 (-2.587000)
-#define HPF_Fs96000_Gain11_A2 0.000000
-#define HPF_Fs96000_Gain11_B1 (-0.577350)
-#define HPF_Fs96000_Gain11_B2 0.000000
- /* Gain = 12.000000 dB */
-#define HPF_Fs96000_Gain12_A0 3.351097
-#define HPF_Fs96000_Gain12_A1 (-2.928447)
-#define HPF_Fs96000_Gain12_A2 0.000000
-#define HPF_Fs96000_Gain12_B1 (-0.577350)
-#define HPF_Fs96000_Gain12_B2 0.000000
- /* Gain = 13.000000 dB */
-#define HPF_Fs96000_Gain13_A0 3.734207
-#define HPF_Fs96000_Gain13_A1 (-3.311558)
-#define HPF_Fs96000_Gain13_A2 0.000000
-#define HPF_Fs96000_Gain13_B1 (-0.577350)
-#define HPF_Fs96000_Gain13_B2 0.000000
- /* Gain = 14.000000 dB */
-#define HPF_Fs96000_Gain14_A0 4.164064
-#define HPF_Fs96000_Gain14_A1 (-3.741414)
-#define HPF_Fs96000_Gain14_A2 0.000000
-#define HPF_Fs96000_Gain14_B1 (-0.577350)
-#define HPF_Fs96000_Gain14_B2 0.000000
- /* Gain = 15.000000 dB */
-#define HPF_Fs96000_Gain15_A0 4.646371
-#define HPF_Fs96000_Gain15_A1 (-4.223721)
-#define HPF_Fs96000_Gain15_A2 0.000000
-#define HPF_Fs96000_Gain15_B1 (-0.577350)
-#define HPF_Fs96000_Gain15_B2 0.000000
+/* Gain = 1.000000 dB */
+#define HPF_Fs96000_Gain1_A0 1.096233
+#define HPF_Fs96000_Gain1_A1 (-0.673583)
+#define HPF_Fs96000_Gain1_A2 0.000000
+#define HPF_Fs96000_Gain1_B1 (-0.577350)
+#define HPF_Fs96000_Gain1_B2 0.000000
+/* Gain = 2.000000 dB */
+#define HPF_Fs96000_Gain2_A0 1.204208
+#define HPF_Fs96000_Gain2_A1 (-0.781558)
+#define HPF_Fs96000_Gain2_A2 0.000000
+#define HPF_Fs96000_Gain2_B1 (-0.577350)
+#define HPF_Fs96000_Gain2_B2 0.000000
+/* Gain = 3.000000 dB */
+#define HPF_Fs96000_Gain3_A0 1.325358
+#define HPF_Fs96000_Gain3_A1 (-0.902708)
+#define HPF_Fs96000_Gain3_A2 0.000000
+#define HPF_Fs96000_Gain3_B1 (-0.577350)
+#define HPF_Fs96000_Gain3_B2 0.000000
+/* Gain = 4.000000 dB */
+#define HPF_Fs96000_Gain4_A0 1.461291
+#define HPF_Fs96000_Gain4_A1 (-1.038641)
+#define HPF_Fs96000_Gain4_A2 0.000000
+#define HPF_Fs96000_Gain4_B1 (-0.577350)
+#define HPF_Fs96000_Gain4_B2 0.000000
+/* Gain = 5.000000 dB */
+#define HPF_Fs96000_Gain5_A0 1.613810
+#define HPF_Fs96000_Gain5_A1 (-1.191160)
+#define HPF_Fs96000_Gain5_A2 0.000000
+#define HPF_Fs96000_Gain5_B1 (-0.577350)
+#define HPF_Fs96000_Gain5_B2 0.000000
+/* Gain = 6.000000 dB */
+#define HPF_Fs96000_Gain6_A0 1.784939
+#define HPF_Fs96000_Gain6_A1 (-1.362289)
+#define HPF_Fs96000_Gain6_A2 0.000000
+#define HPF_Fs96000_Gain6_B1 (-0.577350)
+#define HPF_Fs96000_Gain6_B2 0.000000
+/* Gain = 7.000000 dB */
+#define HPF_Fs96000_Gain7_A0 1.976949
+#define HPF_Fs96000_Gain7_A1 (-1.554299)
+#define HPF_Fs96000_Gain7_A2 0.000000
+#define HPF_Fs96000_Gain7_B1 (-0.577350)
+#define HPF_Fs96000_Gain7_B2 0.000000
+/* Gain = 8.000000 dB */
+#define HPF_Fs96000_Gain8_A0 2.192387
+#define HPF_Fs96000_Gain8_A1 (-1.769738)
+#define HPF_Fs96000_Gain8_A2 0.000000
+#define HPF_Fs96000_Gain8_B1 (-0.577350)
+#define HPF_Fs96000_Gain8_B2 0.000000
+/* Gain = 9.000000 dB */
+#define HPF_Fs96000_Gain9_A0 2.434113
+#define HPF_Fs96000_Gain9_A1 (-2.011464)
+#define HPF_Fs96000_Gain9_A2 0.000000
+#define HPF_Fs96000_Gain9_B1 (-0.577350)
+#define HPF_Fs96000_Gain9_B2 0.000000
+/* Gain = 10.000000 dB */
+#define HPF_Fs96000_Gain10_A0 2.705335
+#define HPF_Fs96000_Gain10_A1 (-2.282685)
+#define HPF_Fs96000_Gain10_A2 0.000000
+#define HPF_Fs96000_Gain10_B1 (-0.577350)
+#define HPF_Fs96000_Gain10_B2 0.000000
+/* Gain = 11.000000 dB */
+#define HPF_Fs96000_Gain11_A0 3.009650
+#define HPF_Fs96000_Gain11_A1 (-2.587000)
+#define HPF_Fs96000_Gain11_A2 0.000000
+#define HPF_Fs96000_Gain11_B1 (-0.577350)
+#define HPF_Fs96000_Gain11_B2 0.000000
+/* Gain = 12.000000 dB */
+#define HPF_Fs96000_Gain12_A0 3.351097
+#define HPF_Fs96000_Gain12_A1 (-2.928447)
+#define HPF_Fs96000_Gain12_A2 0.000000
+#define HPF_Fs96000_Gain12_B1 (-0.577350)
+#define HPF_Fs96000_Gain12_B2 0.000000
+/* Gain = 13.000000 dB */
+#define HPF_Fs96000_Gain13_A0 3.734207
+#define HPF_Fs96000_Gain13_A1 (-3.311558)
+#define HPF_Fs96000_Gain13_A2 0.000000
+#define HPF_Fs96000_Gain13_B1 (-0.577350)
+#define HPF_Fs96000_Gain13_B2 0.000000
+/* Gain = 14.000000 dB */
+#define HPF_Fs96000_Gain14_A0 4.164064
+#define HPF_Fs96000_Gain14_A1 (-3.741414)
+#define HPF_Fs96000_Gain14_A2 0.000000
+#define HPF_Fs96000_Gain14_B1 (-0.577350)
+#define HPF_Fs96000_Gain14_B2 0.000000
+/* Gain = 15.000000 dB */
+#define HPF_Fs96000_Gain15_A0 4.646371
+#define HPF_Fs96000_Gain15_A1 (-4.223721)
+#define HPF_Fs96000_Gain15_A2 0.000000
+#define HPF_Fs96000_Gain15_B1 (-0.577350)
+#define HPF_Fs96000_Gain15_B2 0.000000
/* Coefficients for sample rate 176400 */
/* Gain = 1.000000 dB */
-#define HPF_Fs176400_Gain1_A0 1.106711f
-#define HPF_Fs176400_Gain1_A1 (-0.855807f)
-#define HPF_Fs176400_Gain1_A2 0.000000f
-#define HPF_Fs176400_Gain1_B1 (-0.749096f)
-#define HPF_Fs176400_Gain1_B2 0.000000f
+#define HPF_Fs176400_Gain1_A0 1.106711f
+#define HPF_Fs176400_Gain1_A1 (-0.855807f)
+#define HPF_Fs176400_Gain1_A2 0.000000f
+#define HPF_Fs176400_Gain1_B1 (-0.749096f)
+#define HPF_Fs176400_Gain1_B2 0.000000f
/* Gain = 2.000000 dB */
-#define HPF_Fs176400_Gain2_A0 1.226443f
-#define HPF_Fs176400_Gain2_A1 (-0.975539f)
-#define HPF_Fs176400_Gain2_A2 0.000000f
-#define HPF_Fs176400_Gain2_B1 (-0.749096f)
-#define HPF_Fs176400_Gain2_B2 0.000000f
+#define HPF_Fs176400_Gain2_A0 1.226443f
+#define HPF_Fs176400_Gain2_A1 (-0.975539f)
+#define HPF_Fs176400_Gain2_A2 0.000000f
+#define HPF_Fs176400_Gain2_B1 (-0.749096f)
+#define HPF_Fs176400_Gain2_B2 0.000000f
/* Gain = 3.000000 dB */
-#define HPF_Fs176400_Gain3_A0 1.360784f
-#define HPF_Fs176400_Gain3_A1 (-1.109880f)
-#define HPF_Fs176400_Gain3_A2 0.000000f
-#define HPF_Fs176400_Gain3_B1 (-0.749096f)
-#define HPF_Fs176400_Gain3_B2 0.000000f
+#define HPF_Fs176400_Gain3_A0 1.360784f
+#define HPF_Fs176400_Gain3_A1 (-1.109880f)
+#define HPF_Fs176400_Gain3_A2 0.000000f
+#define HPF_Fs176400_Gain3_B1 (-0.749096f)
+#define HPF_Fs176400_Gain3_B2 0.000000f
/* Gain = 4.000000 dB */
-#define HPF_Fs176400_Gain4_A0 1.511517f
-#define HPF_Fs176400_Gain4_A1 (-1.260613f)
-#define HPF_Fs176400_Gain4_A2 0.000000f
-#define HPF_Fs176400_Gain4_B1 (-0.749096f)
-#define HPF_Fs176400_Gain4_B2 0.000000f
+#define HPF_Fs176400_Gain4_A0 1.511517f
+#define HPF_Fs176400_Gain4_A1 (-1.260613f)
+#define HPF_Fs176400_Gain4_A2 0.000000f
+#define HPF_Fs176400_Gain4_B1 (-0.749096f)
+#define HPF_Fs176400_Gain4_B2 0.000000f
/* Gain = 5.000000 dB */
-#define HPF_Fs176400_Gain5_A0 1.680643f
-#define HPF_Fs176400_Gain5_A1 (-1.429739f)
-#define HPF_Fs176400_Gain5_A2 0.000000f
-#define HPF_Fs176400_Gain5_B1 (-0.749096f)
-#define HPF_Fs176400_Gain5_B2 0.000000f
+#define HPF_Fs176400_Gain5_A0 1.680643f
+#define HPF_Fs176400_Gain5_A1 (-1.429739f)
+#define HPF_Fs176400_Gain5_A2 0.000000f
+#define HPF_Fs176400_Gain5_B1 (-0.749096f)
+#define HPF_Fs176400_Gain5_B2 0.000000f
/* Gain = 6.000000 dB */
-#define HPF_Fs176400_Gain6_A0 1.870405f
-#define HPF_Fs176400_Gain6_A1 (-1.619501f)
-#define HPF_Fs176400_Gain6_A2 0.000000f
-#define HPF_Fs176400_Gain6_B1 (-0.749096f)
-#define HPF_Fs176400_Gain6_B2 0.000000f
+#define HPF_Fs176400_Gain6_A0 1.870405f
+#define HPF_Fs176400_Gain6_A1 (-1.619501f)
+#define HPF_Fs176400_Gain6_A2 0.000000f
+#define HPF_Fs176400_Gain6_B1 (-0.749096f)
+#define HPF_Fs176400_Gain6_B2 0.000000f
/* Gain = 7.000000 dB */
-#define HPF_Fs176400_Gain7_A0 2.083321f
-#define HPF_Fs176400_Gain7_A1 (-1.832417f)
-#define HPF_Fs176400_Gain7_A2 0.000000f
-#define HPF_Fs176400_Gain7_B1 (-0.749096f)
-#define HPF_Fs176400_Gain7_B2 0.000000f
+#define HPF_Fs176400_Gain7_A0 2.083321f
+#define HPF_Fs176400_Gain7_A1 (-1.832417f)
+#define HPF_Fs176400_Gain7_A2 0.000000f
+#define HPF_Fs176400_Gain7_B1 (-0.749096f)
+#define HPF_Fs176400_Gain7_B2 0.000000f
/* Gain = 8.000000 dB */
-#define HPF_Fs176400_Gain8_A0 2.322217f
-#define HPF_Fs176400_Gain8_A1 (-2.071313f)
-#define HPF_Fs176400_Gain8_A2 0.000000f
-#define HPF_Fs176400_Gain8_B1 (-0.749096f)
-#define HPF_Fs176400_Gain8_B2 0.000000f
+#define HPF_Fs176400_Gain8_A0 2.322217f
+#define HPF_Fs176400_Gain8_A1 (-2.071313f)
+#define HPF_Fs176400_Gain8_A2 0.000000f
+#define HPF_Fs176400_Gain8_B1 (-0.749096f)
+#define HPF_Fs176400_Gain8_B2 0.000000f
/* Gain = 9.000000 dB */
-#define HPF_Fs176400_Gain9_A0 2.590263f
-#define HPF_Fs176400_Gain9_A1 (-2.339359f)
-#define HPF_Fs176400_Gain9_A2 0.000000f
-#define HPF_Fs176400_Gain9_B1 (-0.749096f)
-#define HPF_Fs176400_Gain9_B2 0.000000f
+#define HPF_Fs176400_Gain9_A0 2.590263f
+#define HPF_Fs176400_Gain9_A1 (-2.339359f)
+#define HPF_Fs176400_Gain9_A2 0.000000f
+#define HPF_Fs176400_Gain9_B1 (-0.749096f)
+#define HPF_Fs176400_Gain9_B2 0.000000f
/* Gain = 10.000000 dB */
-#define HPF_Fs176400_Gain10_A0 2.891016f
-#define HPF_Fs176400_Gain10_A1 (-2.640112f)
-#define HPF_Fs176400_Gain10_A2 0.000000f
-#define HPF_Fs176400_Gain10_B1 (-0.749096f)
-#define HPF_Fs176400_Gain10_B2 0.000000f
+#define HPF_Fs176400_Gain10_A0 2.891016f
+#define HPF_Fs176400_Gain10_A1 (-2.640112f)
+#define HPF_Fs176400_Gain10_A2 0.000000f
+#define HPF_Fs176400_Gain10_B1 (-0.749096f)
+#define HPF_Fs176400_Gain10_B2 0.000000f
/* Gain = 11.000000 dB */
-#define HPF_Fs176400_Gain11_A0 3.228465f
-#define HPF_Fs176400_Gain11_A1 (-2.977561f)
-#define HPF_Fs176400_Gain11_A2 0.000000f
-#define HPF_Fs176400_Gain11_B1 (-0.749096f)
-#define HPF_Fs176400_Gain11_B2 0.000000f
+#define HPF_Fs176400_Gain11_A0 3.228465f
+#define HPF_Fs176400_Gain11_A1 (-2.977561f)
+#define HPF_Fs176400_Gain11_A2 0.000000f
+#define HPF_Fs176400_Gain11_B1 (-0.749096f)
+#define HPF_Fs176400_Gain11_B2 0.000000f
/* Gain = 12.000000 dB */
-#define HPF_Fs176400_Gain12_A0 3.607090f
-#define HPF_Fs176400_Gain12_A1 (-3.356186f)
-#define HPF_Fs176400_Gain12_A2 0.000000f
-#define HPF_Fs176400_Gain12_B1 (-0.749096f)
-#define HPF_Fs176400_Gain12_B2 0.000000f
+#define HPF_Fs176400_Gain12_A0 3.607090f
+#define HPF_Fs176400_Gain12_A1 (-3.356186f)
+#define HPF_Fs176400_Gain12_A2 0.000000f
+#define HPF_Fs176400_Gain12_B1 (-0.749096f)
+#define HPF_Fs176400_Gain12_B2 0.000000f
/* Gain = 13.000000 dB */
-#define HPF_Fs176400_Gain13_A0 4.031914f
-#define HPF_Fs176400_Gain13_A1 (-3.781010f)
-#define HPF_Fs176400_Gain13_A2 0.000000f
-#define HPF_Fs176400_Gain13_B1 (-0.749096f)
-#define HPF_Fs176400_Gain13_B2 0.000000f
+#define HPF_Fs176400_Gain13_A0 4.031914f
+#define HPF_Fs176400_Gain13_A1 (-3.781010f)
+#define HPF_Fs176400_Gain13_A2 0.000000f
+#define HPF_Fs176400_Gain13_B1 (-0.749096f)
+#define HPF_Fs176400_Gain13_B2 0.000000f
/* Gain = 14.000000 dB */
-#define HPF_Fs176400_Gain14_A0 4.508575f
-#define HPF_Fs176400_Gain14_A1 (-4.257671f)
-#define HPF_Fs176400_Gain14_A2 0.000000f
-#define HPF_Fs176400_Gain14_B1 (-0.749096f)
-#define HPF_Fs176400_Gain14_B2 0.000000f
+#define HPF_Fs176400_Gain14_A0 4.508575f
+#define HPF_Fs176400_Gain14_A1 (-4.257671f)
+#define HPF_Fs176400_Gain14_A2 0.000000f
+#define HPF_Fs176400_Gain14_B1 (-0.749096f)
+#define HPF_Fs176400_Gain14_B2 0.000000f
/* Gain = 15.000000 dB */
-#define HPF_Fs176400_Gain15_A0 5.043397f
-#define HPF_Fs176400_Gain15_A1 (-4.792493f)
-#define HPF_Fs176400_Gain15_A2 0.000000f
-#define HPF_Fs176400_Gain15_B1 (-0.749096f)
-#define HPF_Fs176400_Gain15_B2 0.000000f
+#define HPF_Fs176400_Gain15_A0 5.043397f
+#define HPF_Fs176400_Gain15_A1 (-4.792493f)
+#define HPF_Fs176400_Gain15_A2 0.000000f
+#define HPF_Fs176400_Gain15_B1 (-0.749096f)
+#define HPF_Fs176400_Gain15_B2 0.000000f
/* Coefficients for sample rate 192000Hz */
- /* Gain = 1.000000 dB */
-#define HPF_Fs192000_Gain1_A0 1.107823
-#define HPF_Fs192000_Gain1_A1 (-0.875150)
-#define HPF_Fs192000_Gain1_A2 0.000000
-#define HPF_Fs192000_Gain1_B1 (-0.767327)
-#define HPF_Fs192000_Gain1_B2 0.000000
- /* Gain = 2.000000 dB */
-#define HPF_Fs192000_Gain2_A0 1.228803
-#define HPF_Fs192000_Gain2_A1 (-0.996130)
-#define HPF_Fs192000_Gain2_A2 0.000000
-#define HPF_Fs192000_Gain2_B1 (-0.767327)
-#define HPF_Fs192000_Gain2_B2 0.000000
- /* Gain = 3.000000 dB */
-#define HPF_Fs192000_Gain3_A0 1.364544
-#define HPF_Fs192000_Gain3_A1 (-1.131871)
-#define HPF_Fs192000_Gain3_A2 0.000000
-#define HPF_Fs192000_Gain3_B1 (-0.767327)
-#define HPF_Fs192000_Gain3_B2 0.000000
- /* Gain = 4.000000 dB */
-#define HPF_Fs192000_Gain4_A0 1.516849
-#define HPF_Fs192000_Gain4_A1 (-1.284176)
-#define HPF_Fs192000_Gain4_A2 0.000000
-#define HPF_Fs192000_Gain4_B1 (-0.767327)
-#define HPF_Fs192000_Gain4_B2 0.000000
- /* Gain = 5.000000 dB */
-#define HPF_Fs192000_Gain5_A0 1.687737
-#define HPF_Fs192000_Gain5_A1 (-1.455064)
-#define HPF_Fs192000_Gain5_A2 0.000000
-#define HPF_Fs192000_Gain5_B1 (-0.767327)
-#define HPF_Fs192000_Gain5_B2 0.000000
- /* Gain = 6.000000 dB */
-#define HPF_Fs192000_Gain6_A0 1.879477
-#define HPF_Fs192000_Gain6_A1 (-1.646804)
-#define HPF_Fs192000_Gain6_A2 0.000000
-#define HPF_Fs192000_Gain6_B1 (-0.767327)
-#define HPF_Fs192000_Gain6_B2 0.000000
- /* Gain = 7.000000 dB */
-#define HPF_Fs192000_Gain7_A0 2.094613
-#define HPF_Fs192000_Gain7_A1 (-1.861940)
-#define HPF_Fs192000_Gain7_A2 0.000000
-#define HPF_Fs192000_Gain7_B1 (-0.767327)
-#define HPF_Fs192000_Gain7_B2 0.000000
- /* Gain = 8.000000 dB */
-#define HPF_Fs192000_Gain8_A0 2.335999
-#define HPF_Fs192000_Gain8_A1 (-2.103326)
-#define HPF_Fs192000_Gain8_A2 0.000000
-#define HPF_Fs192000_Gain8_B1 (-0.767327)
-#define HPF_Fs192000_Gain8_B2 0.000000
- /* Gain = 9.000000 dB */
-#define HPF_Fs192000_Gain9_A0 2.606839
-#define HPF_Fs192000_Gain9_A1 (-2.374166)
-#define HPF_Fs192000_Gain9_A2 0.000000
-#define HPF_Fs192000_Gain9_B1 (-0.767327)
-#define HPF_Fs192000_Gain9_B2 0.000000
- /* Gain = 10.000000 dB */
-#define HPF_Fs192000_Gain10_A0 2.910726
-#define HPF_Fs192000_Gain10_A1 (-2.678053)
-#define HPF_Fs192000_Gain10_A2 0.000000
-#define HPF_Fs192000_Gain10_B1 (-0.767327)
-#define HPF_Fs192000_Gain10_B2 0.000000
- /* Gain = 11.000000 dB */
-#define HPF_Fs192000_Gain11_A0 3.251693
-#define HPF_Fs192000_Gain11_A1 (-3.019020)
-#define HPF_Fs192000_Gain11_A2 0.000000
-#define HPF_Fs192000_Gain11_B1 (-0.767327)
-#define HPF_Fs192000_Gain11_B2 0.000000
- /* Gain = 12.000000 dB */
-#define HPF_Fs192000_Gain12_A0 3.634264
-#define HPF_Fs192000_Gain12_A1 (-3.401591)
-#define HPF_Fs192000_Gain12_A2 0.000000
-#define HPF_Fs192000_Gain12_B1 (-0.767327)
-#define HPF_Fs192000_Gain12_B2 0.000000
- /* Gain = 13.000000 dB */
-#define HPF_Fs192000_Gain13_A0 4.063516
-#define HPF_Fs192000_Gain13_A1 (-3.830843)
-#define HPF_Fs192000_Gain13_A2 0.000000
-#define HPF_Fs192000_Gain13_B1 (-0.767327)
-#define HPF_Fs192000_Gain13_B2 0.000000
- /* Gain = 14.000000 dB */
-#define HPF_Fs192000_Gain14_A0 4.545145
-#define HPF_Fs192000_Gain14_A1 (-4.312472)
-#define HPF_Fs192000_Gain14_A2 0.000000
-#define HPF_Fs192000_Gain14_B1 (-0.767327)
-#define HPF_Fs192000_Gain14_B2 0.000000
- /* Gain = 15.000000 dB */
-#define HPF_Fs192000_Gain15_A0 5.085542
-#define HPF_Fs192000_Gain15_A1 (-4.852868)
-#define HPF_Fs192000_Gain15_A2 0.000000
-#define HPF_Fs192000_Gain15_B1 (-0.767327)
-#define HPF_Fs192000_Gain15_B2 0.000000
+/* Gain = 1.000000 dB */
+#define HPF_Fs192000_Gain1_A0 1.107823
+#define HPF_Fs192000_Gain1_A1 (-0.875150)
+#define HPF_Fs192000_Gain1_A2 0.000000
+#define HPF_Fs192000_Gain1_B1 (-0.767327)
+#define HPF_Fs192000_Gain1_B2 0.000000
+/* Gain = 2.000000 dB */
+#define HPF_Fs192000_Gain2_A0 1.228803
+#define HPF_Fs192000_Gain2_A1 (-0.996130)
+#define HPF_Fs192000_Gain2_A2 0.000000
+#define HPF_Fs192000_Gain2_B1 (-0.767327)
+#define HPF_Fs192000_Gain2_B2 0.000000
+/* Gain = 3.000000 dB */
+#define HPF_Fs192000_Gain3_A0 1.364544
+#define HPF_Fs192000_Gain3_A1 (-1.131871)
+#define HPF_Fs192000_Gain3_A2 0.000000
+#define HPF_Fs192000_Gain3_B1 (-0.767327)
+#define HPF_Fs192000_Gain3_B2 0.000000
+/* Gain = 4.000000 dB */
+#define HPF_Fs192000_Gain4_A0 1.516849
+#define HPF_Fs192000_Gain4_A1 (-1.284176)
+#define HPF_Fs192000_Gain4_A2 0.000000
+#define HPF_Fs192000_Gain4_B1 (-0.767327)
+#define HPF_Fs192000_Gain4_B2 0.000000
+/* Gain = 5.000000 dB */
+#define HPF_Fs192000_Gain5_A0 1.687737
+#define HPF_Fs192000_Gain5_A1 (-1.455064)
+#define HPF_Fs192000_Gain5_A2 0.000000
+#define HPF_Fs192000_Gain5_B1 (-0.767327)
+#define HPF_Fs192000_Gain5_B2 0.000000
+/* Gain = 6.000000 dB */
+#define HPF_Fs192000_Gain6_A0 1.879477
+#define HPF_Fs192000_Gain6_A1 (-1.646804)
+#define HPF_Fs192000_Gain6_A2 0.000000
+#define HPF_Fs192000_Gain6_B1 (-0.767327)
+#define HPF_Fs192000_Gain6_B2 0.000000
+/* Gain = 7.000000 dB */
+#define HPF_Fs192000_Gain7_A0 2.094613
+#define HPF_Fs192000_Gain7_A1 (-1.861940)
+#define HPF_Fs192000_Gain7_A2 0.000000
+#define HPF_Fs192000_Gain7_B1 (-0.767327)
+#define HPF_Fs192000_Gain7_B2 0.000000
+/* Gain = 8.000000 dB */
+#define HPF_Fs192000_Gain8_A0 2.335999
+#define HPF_Fs192000_Gain8_A1 (-2.103326)
+#define HPF_Fs192000_Gain8_A2 0.000000
+#define HPF_Fs192000_Gain8_B1 (-0.767327)
+#define HPF_Fs192000_Gain8_B2 0.000000
+/* Gain = 9.000000 dB */
+#define HPF_Fs192000_Gain9_A0 2.606839
+#define HPF_Fs192000_Gain9_A1 (-2.374166)
+#define HPF_Fs192000_Gain9_A2 0.000000
+#define HPF_Fs192000_Gain9_B1 (-0.767327)
+#define HPF_Fs192000_Gain9_B2 0.000000
+/* Gain = 10.000000 dB */
+#define HPF_Fs192000_Gain10_A0 2.910726
+#define HPF_Fs192000_Gain10_A1 (-2.678053)
+#define HPF_Fs192000_Gain10_A2 0.000000
+#define HPF_Fs192000_Gain10_B1 (-0.767327)
+#define HPF_Fs192000_Gain10_B2 0.000000
+/* Gain = 11.000000 dB */
+#define HPF_Fs192000_Gain11_A0 3.251693
+#define HPF_Fs192000_Gain11_A1 (-3.019020)
+#define HPF_Fs192000_Gain11_A2 0.000000
+#define HPF_Fs192000_Gain11_B1 (-0.767327)
+#define HPF_Fs192000_Gain11_B2 0.000000
+/* Gain = 12.000000 dB */
+#define HPF_Fs192000_Gain12_A0 3.634264
+#define HPF_Fs192000_Gain12_A1 (-3.401591)
+#define HPF_Fs192000_Gain12_A2 0.000000
+#define HPF_Fs192000_Gain12_B1 (-0.767327)
+#define HPF_Fs192000_Gain12_B2 0.000000
+/* Gain = 13.000000 dB */
+#define HPF_Fs192000_Gain13_A0 4.063516
+#define HPF_Fs192000_Gain13_A1 (-3.830843)
+#define HPF_Fs192000_Gain13_A2 0.000000
+#define HPF_Fs192000_Gain13_B1 (-0.767327)
+#define HPF_Fs192000_Gain13_B2 0.000000
+/* Gain = 14.000000 dB */
+#define HPF_Fs192000_Gain14_A0 4.545145
+#define HPF_Fs192000_Gain14_A1 (-4.312472)
+#define HPF_Fs192000_Gain14_A2 0.000000
+#define HPF_Fs192000_Gain14_B1 (-0.767327)
+#define HPF_Fs192000_Gain14_B2 0.000000
+/* Gain = 15.000000 dB */
+#define HPF_Fs192000_Gain15_A0 5.085542
+#define HPF_Fs192000_Gain15_A1 (-4.852868)
+#define HPF_Fs192000_Gain15_A2 0.000000
+#define HPF_Fs192000_Gain15_B1 (-0.767327)
+#define HPF_Fs192000_Gain15_B2 0.000000
#endif
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp
index ff2c90a..3118e77 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp
@@ -50,78 +50,62 @@
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t hInstance,
- LVM_ControlParams_t *pParams)
-{
- LVM_Instance_t *pInstance =(LVM_Instance_t *)hInstance;
+LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
- if ((pParams == LVM_NULL) || (hInstance == LVM_NULL))
- {
+ if ((pParams == LVM_NULL) || (hInstance == LVM_NULL)) {
return (LVM_NULLADDRESS);
}
pInstance->NewParams = *pParams;
- if(
- /* General parameters */
- ((pParams->OperatingMode != LVM_MODE_OFF) && (pParams->OperatingMode != LVM_MODE_ON)) ||
- ((pParams->SampleRate != LVM_FS_8000) && (pParams->SampleRate != LVM_FS_11025) && (pParams->SampleRate != LVM_FS_12000) &&
- (pParams->SampleRate != LVM_FS_16000) && (pParams->SampleRate != LVM_FS_22050) && (pParams->SampleRate != LVM_FS_24000) &&
- (pParams->SampleRate != LVM_FS_32000) && (pParams->SampleRate != LVM_FS_44100) && (pParams->SampleRate != LVM_FS_48000) &&
- (pParams->SampleRate != LVM_FS_88200) && (pParams->SampleRate != LVM_FS_96000) &&
- (pParams->SampleRate != LVM_FS_176400) && (pParams->SampleRate != LVM_FS_192000)) ||
-#ifdef SUPPORT_MC
- ((pParams->SourceFormat != LVM_STEREO) &&
- (pParams->SourceFormat != LVM_MONOINSTEREO) &&
- (pParams->SourceFormat != LVM_MONO) &&
- (pParams->SourceFormat != LVM_MULTICHANNEL)) ||
-#else
- ((pParams->SourceFormat != LVM_STEREO) && (pParams->SourceFormat != LVM_MONOINSTEREO) && (pParams->SourceFormat != LVM_MONO)) ||
-#endif
- (pParams->SpeakerType > LVM_EX_HEADPHONES))
- {
+ if (
+ /* General parameters */
+ ((pParams->OperatingMode != LVM_MODE_OFF) && (pParams->OperatingMode != LVM_MODE_ON)) ||
+ ((pParams->SampleRate != LVM_FS_8000) && (pParams->SampleRate != LVM_FS_11025) &&
+ (pParams->SampleRate != LVM_FS_12000) && (pParams->SampleRate != LVM_FS_16000) &&
+ (pParams->SampleRate != LVM_FS_22050) && (pParams->SampleRate != LVM_FS_24000) &&
+ (pParams->SampleRate != LVM_FS_32000) && (pParams->SampleRate != LVM_FS_44100) &&
+ (pParams->SampleRate != LVM_FS_48000) && (pParams->SampleRate != LVM_FS_88200) &&
+ (pParams->SampleRate != LVM_FS_96000) && (pParams->SampleRate != LVM_FS_176400) &&
+ (pParams->SampleRate != LVM_FS_192000)) ||
+ ((pParams->SourceFormat != LVM_STEREO) && (pParams->SourceFormat != LVM_MONOINSTEREO) &&
+ (pParams->SourceFormat != LVM_MONO) && (pParams->SourceFormat != LVM_MULTICHANNEL)) ||
+ (pParams->SpeakerType > LVM_EX_HEADPHONES)) {
return (LVM_OUTOFRANGE);
}
-#ifdef SUPPORT_MC
pInstance->Params.NrChannels = pParams->NrChannels;
- pInstance->Params.ChMask = pParams->ChMask;
-#endif
+ pInstance->Params.ChMask = pParams->ChMask;
/*
* Cinema Sound parameters
*/
- if((pParams->VirtualizerOperatingMode != LVM_MODE_OFF) && (pParams->VirtualizerOperatingMode != LVM_MODE_ON))
- {
+ if ((pParams->VirtualizerOperatingMode != LVM_MODE_OFF) &&
+ (pParams->VirtualizerOperatingMode != LVM_MODE_ON)) {
return (LVM_OUTOFRANGE);
}
- if(pParams->VirtualizerType != LVM_CONCERTSOUND)
- {
+ if (pParams->VirtualizerType != LVM_CONCERTSOUND) {
return (LVM_OUTOFRANGE);
}
- if(pParams->VirtualizerReverbLevel > LVM_VIRTUALIZER_MAX_REVERB_LEVEL)
- {
+ if (pParams->VirtualizerReverbLevel > LVM_VIRTUALIZER_MAX_REVERB_LEVEL) {
return (LVM_OUTOFRANGE);
}
- if(pParams->CS_EffectLevel < LVM_CS_MIN_EFFECT_LEVEL)
- {
+ if (pParams->CS_EffectLevel < LVM_CS_MIN_EFFECT_LEVEL) {
return (LVM_OUTOFRANGE);
}
/*
* N-Band Equalizer
*/
- if(pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands)
- {
+ if (pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands) {
return (LVM_OUTOFRANGE);
}
/* Definition pointer */
- if ((pParams->pEQNB_BandDefinition == LVM_NULL) &&
- (pParams->EQNB_NBands != 0))
- {
+ if ((pParams->pEQNB_BandDefinition == LVM_NULL) && (pParams->EQNB_NBands != 0)) {
return (LVM_NULLADDRESS);
}
@@ -129,35 +113,31 @@
* Copy the filter definitions for the Equaliser
*/
{
- LVM_INT16 i;
+ LVM_INT16 i;
- if (pParams->EQNB_NBands != 0)
- {
- for (i=0; i<pParams->EQNB_NBands; i++)
- {
+ if (pParams->EQNB_NBands != 0) {
+ for (i = 0; i < pParams->EQNB_NBands; i++) {
pInstance->pEQNB_BandDefs[i] = pParams->pEQNB_BandDefinition[i];
}
pInstance->NewParams.pEQNB_BandDefinition = pInstance->pEQNB_BandDefs;
}
}
- if( /* N-Band Equaliser parameters */
- ((pParams->EQNB_OperatingMode != LVM_EQNB_OFF) && (pParams->EQNB_OperatingMode != LVM_EQNB_ON)) ||
- (pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands))
- {
+ if (/* N-Band Equaliser parameters */
+ ((pParams->EQNB_OperatingMode != LVM_EQNB_OFF) &&
+ (pParams->EQNB_OperatingMode != LVM_EQNB_ON)) ||
+ (pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands)) {
return (LVM_OUTOFRANGE);
}
/* Band parameters*/
{
LVM_INT16 i;
- for(i = 0; i < pParams->EQNB_NBands; i++)
- {
- if(((pParams->pEQNB_BandDefinition[i].Frequency < LVM_EQNB_MIN_BAND_FREQ) ||
- (pParams->pEQNB_BandDefinition[i].Frequency > LVM_EQNB_MAX_BAND_FREQ)) ||
- ((pParams->pEQNB_BandDefinition[i].Gain < LVM_EQNB_MIN_BAND_GAIN) ||
- (pParams->pEQNB_BandDefinition[i].Gain > LVM_EQNB_MAX_BAND_GAIN)) ||
- ((pParams->pEQNB_BandDefinition[i].QFactor < LVM_EQNB_MIN_QFACTOR) ||
- (pParams->pEQNB_BandDefinition[i].QFactor > LVM_EQNB_MAX_QFACTOR)))
- {
+ for (i = 0; i < pParams->EQNB_NBands; i++) {
+ if (((pParams->pEQNB_BandDefinition[i].Frequency < LVM_EQNB_MIN_BAND_FREQ) ||
+ (pParams->pEQNB_BandDefinition[i].Frequency > LVM_EQNB_MAX_BAND_FREQ)) ||
+ ((pParams->pEQNB_BandDefinition[i].Gain < LVM_EQNB_MIN_BAND_GAIN) ||
+ (pParams->pEQNB_BandDefinition[i].Gain > LVM_EQNB_MAX_BAND_GAIN)) ||
+ ((pParams->pEQNB_BandDefinition[i].QFactor < LVM_EQNB_MIN_QFACTOR) ||
+ (pParams->pEQNB_BandDefinition[i].QFactor > LVM_EQNB_MAX_QFACTOR))) {
return (LVM_OUTOFRANGE);
}
}
@@ -166,24 +146,25 @@
/*
* Bass Enhancement parameters
*/
- if(((pParams->BE_OperatingMode != LVM_BE_OFF) && (pParams->BE_OperatingMode != LVM_BE_ON)) ||
- ((pParams->BE_EffectLevel < LVM_BE_MIN_EFFECTLEVEL ) || (pParams->BE_EffectLevel > LVM_BE_MAX_EFFECTLEVEL ))||
- ((pParams->BE_CentreFreq != LVM_BE_CENTRE_55Hz) && (pParams->BE_CentreFreq != LVM_BE_CENTRE_66Hz) &&
- (pParams->BE_CentreFreq != LVM_BE_CENTRE_78Hz) && (pParams->BE_CentreFreq != LVM_BE_CENTRE_90Hz)) ||
- ((pParams->BE_HPF != LVM_BE_HPF_OFF) && (pParams->BE_HPF != LVM_BE_HPF_ON)))
- {
+ if (((pParams->BE_OperatingMode != LVM_BE_OFF) && (pParams->BE_OperatingMode != LVM_BE_ON)) ||
+ ((pParams->BE_EffectLevel < LVM_BE_MIN_EFFECTLEVEL) ||
+ (pParams->BE_EffectLevel > LVM_BE_MAX_EFFECTLEVEL)) ||
+ ((pParams->BE_CentreFreq != LVM_BE_CENTRE_55Hz) &&
+ (pParams->BE_CentreFreq != LVM_BE_CENTRE_66Hz) &&
+ (pParams->BE_CentreFreq != LVM_BE_CENTRE_78Hz) &&
+ (pParams->BE_CentreFreq != LVM_BE_CENTRE_90Hz)) ||
+ ((pParams->BE_HPF != LVM_BE_HPF_OFF) && (pParams->BE_HPF != LVM_BE_HPF_ON))) {
return (LVM_OUTOFRANGE);
}
/*
* Volume Control parameters
*/
- if((pParams->VC_EffectLevel < LVM_VC_MIN_EFFECTLEVEL ) || (pParams->VC_EffectLevel > LVM_VC_MAX_EFFECTLEVEL ))
- {
+ if ((pParams->VC_EffectLevel < LVM_VC_MIN_EFFECTLEVEL) ||
+ (pParams->VC_EffectLevel > LVM_VC_MAX_EFFECTLEVEL)) {
return (LVM_OUTOFRANGE);
}
- if((pParams->VC_Balance < LVM_VC_BALANCE_MIN ) || (pParams->VC_Balance > LVM_VC_BALANCE_MAX ))
- {
+ if ((pParams->VC_Balance < LVM_VC_BALANCE_MIN) || (pParams->VC_Balance > LVM_VC_BALANCE_MAX)) {
return (LVM_OUTOFRANGE);
}
@@ -191,22 +172,21 @@
* PSA parameters
*/
if (((LVPSA_LevelDetectSpeed_en)pParams->PSA_PeakDecayRate > LVPSA_SPEED_HIGH) ||
- (pParams->PSA_Enable > LVM_PSA_ON))
- {
+ (pParams->PSA_Enable > LVM_PSA_ON)) {
return (LVM_OUTOFRANGE);
}
/*
- * Set the flag to indicate there are new parameters to use
- *
- * Protect the copy of the new parameters from interrupts to avoid possible problems
- * with loss control parameters. This problem can occur if this control function is called more
- * than once before a call to the process function. If the process function interrupts
- * the copy to NewParams then one frame may have mixed parameters, some old and some new.
- */
+ * Set the flag to indicate there are new parameters to use
+ *
+ * Protect the copy of the new parameters from interrupts to avoid possible problems
+ * with loss control parameters. This problem can occur if this control function is called more
+ * than once before a call to the process function. If the process function interrupts
+ * the copy to NewParams then one frame may have mixed parameters, some old and some new.
+ */
pInstance->ControlPending = LVM_TRUE;
- return(LVM_SUCCESS);
+ return (LVM_SUCCESS);
}
/****************************************************************************************/
@@ -230,16 +210,13 @@
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t hInstance,
- LVM_ControlParams_t *pParams)
-{
- LVM_Instance_t *pInstance =(LVM_Instance_t *)hInstance;
+LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
/*
* Check pointer
*/
- if ((pParams == LVM_NULL) || (hInstance == LVM_NULL))
- {
+ if ((pParams == LVM_NULL) || (hInstance == LVM_NULL)) {
return (LVM_NULLADDRESS);
}
*pParams = pInstance->NewParams;
@@ -248,17 +225,16 @@
* Copy the filter definitions for the Equaliser
*/
{
- LVM_INT16 i;
+ LVM_INT16 i;
if (pInstance->NewParams.EQNB_NBands != 0)
- for (i=0; i<pInstance->NewParams.EQNB_NBands; i++)
- {
- pInstance->pEQNB_UserDefs[i] = pInstance->pEQNB_BandDefs[i];
- }
+ for (i = 0; i < pInstance->NewParams.EQNB_NBands; i++) {
+ pInstance->pEQNB_UserDefs[i] = pInstance->pEQNB_BandDefs[i];
+ }
pParams->pEQNB_BandDefinition = pInstance->pEQNB_UserDefs;
}
- return(LVM_SUCCESS);
+ return (LVM_SUCCESS);
}
/****************************************************************************************/
@@ -274,56 +250,46 @@
/* pParams Pointer to the parameters to use */
/* */
/****************************************************************************************/
-void LVM_SetTrebleBoost(LVM_Instance_t *pInstance,
- LVM_ControlParams_t *pParams)
-{
- extern FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[];
+void LVM_SetTrebleBoost(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams) {
+ extern FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[];
- LVM_INT16 Offset;
- LVM_INT16 EffectLevel = 0;
+ LVM_INT16 Offset;
+ LVM_INT16 EffectLevel = 0;
/*
* Load the coefficients
*/
- if ((pParams->TE_OperatingMode == LVM_TE_ON) &&
- (pParams->SampleRate >= TrebleBoostMinRate) &&
- (pParams->OperatingMode == LVM_MODE_ON) &&
- (pParams->TE_EffectLevel > 0))
- {
- if((pParams->TE_EffectLevel == LVM_TE_LOW_MIPS) &&
- ((pParams->SpeakerType == LVM_HEADPHONES)||
- (pParams->SpeakerType == LVM_EX_HEADPHONES)))
- {
+ if ((pParams->TE_OperatingMode == LVM_TE_ON) && (pParams->SampleRate >= TrebleBoostMinRate) &&
+ (pParams->OperatingMode == LVM_MODE_ON) && (pParams->TE_EffectLevel > 0)) {
+ if ((pParams->TE_EffectLevel == LVM_TE_LOW_MIPS) &&
+ ((pParams->SpeakerType == LVM_HEADPHONES) ||
+ (pParams->SpeakerType == LVM_EX_HEADPHONES))) {
pInstance->TE_Active = LVM_FALSE;
- }
- else
- {
+ } else {
EffectLevel = pParams->TE_EffectLevel;
pInstance->TE_Active = LVM_TRUE;
}
- if(pInstance->TE_Active == LVM_TRUE)
- {
+ if (pInstance->TE_Active == LVM_TRUE) {
/*
* Load the coefficients and enabled the treble boost
*/
- Offset = (LVM_INT16)(EffectLevel - 1 + TrebleBoostSteps * (pParams->SampleRate - TrebleBoostMinRate));
+ Offset = (LVM_INT16)(EffectLevel - 1 +
+ TrebleBoostSteps * (pParams->SampleRate - TrebleBoostMinRate));
FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(&pInstance->pTE_State->TrebleBoost_State,
- &pInstance->pTE_Taps->TrebleBoost_Taps,
- &LVM_TrebleBoostCoefs[Offset]);
+ &pInstance->pTE_Taps->TrebleBoost_Taps,
+ &LVM_TrebleBoostCoefs[Offset]);
/*
* Clear the taps
*/
- LoadConst_Float((LVM_FLOAT)0, /* Value */
- (LVM_FLOAT *)&pInstance->pTE_Taps->TrebleBoost_Taps, /* Destination.\
- Cast to void: no dereferencing in function */
- (LVM_UINT16)(sizeof(pInstance->pTE_Taps->TrebleBoost_Taps) / \
- sizeof(LVM_FLOAT))); /* Number of words */
+ LoadConst_Float((LVM_FLOAT)0, /* Value */
+ (LVM_FLOAT*)&pInstance->pTE_Taps->TrebleBoost_Taps, /* Destination.\
+ Cast to void: no dereferencing in function */
+ (LVM_UINT16)(sizeof(pInstance->pTE_Taps->TrebleBoost_Taps) /
+ sizeof(LVM_FLOAT))); /* Number of words */
}
- }
- else
- {
+ } else {
/*
* Disable the treble boost
*/
@@ -345,94 +311,76 @@
/* pParams Initialisation parameters */
/* */
/************************************************************************************/
-void LVM_SetVolume(LVM_Instance_t *pInstance,
- LVM_ControlParams_t *pParams)
-{
-
- LVM_UINT16 dBShifts; /* 6dB shifts */
- LVM_UINT16 dBOffset; /* Table offset */
- LVM_INT16 Volume = 0; /* Required volume in dBs */
- LVM_FLOAT Temp;
+void LVM_SetVolume(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams) {
+ LVM_UINT16 dBShifts; /* 6dB shifts */
+ LVM_UINT16 dBOffset; /* Table offset */
+ LVM_INT16 Volume = 0; /* Required volume in dBs */
+ LVM_FLOAT Temp;
/*
* Limit the gain to the maximum allowed
*/
- if (pParams->VC_EffectLevel > 0)
- {
- Volume = 0;
- }
- else
- {
- Volume = pParams->VC_EffectLevel;
- }
+ if (pParams->VC_EffectLevel > 0) {
+ Volume = 0;
+ } else {
+ Volume = pParams->VC_EffectLevel;
+ }
- /* Compensate this volume in PSA plot */
- if(Volume > -60) /* Limit volume loss to PSA Limits*/
- pInstance->PSA_GainOffset=(LVM_INT16)(-Volume);/* Loss is compensated by Gain*/
- else
- pInstance->PSA_GainOffset=(LVM_INT16)60;/* Loss is compensated by Gain*/
+ /* Compensate this volume in PSA plot */
+ if (Volume > -60) /* Limit volume loss to PSA Limits*/
+ pInstance->PSA_GainOffset = (LVM_INT16)(-Volume); /* Loss is compensated by Gain*/
+ else
+ pInstance->PSA_GainOffset = (LVM_INT16)60; /* Loss is compensated by Gain*/
pInstance->VC_AVLFixedVolume = 0;
/*
* Set volume control and AVL volumes according to headroom and volume user setting
*/
- if(pParams->OperatingMode == LVM_MODE_ON)
- {
+ if (pParams->OperatingMode == LVM_MODE_ON) {
/* Default Situation with no AVL and no RS */
- if(pParams->EQNB_OperatingMode == LVM_EQNB_ON)
- {
- if(Volume > -pInstance->Headroom)
- Volume = (LVM_INT16)-pInstance->Headroom;
+ if (pParams->EQNB_OperatingMode == LVM_EQNB_ON) {
+ if (Volume > -pInstance->Headroom) Volume = (LVM_INT16)-pInstance->Headroom;
}
}
/*
* Activate volume control if necessary
*/
- pInstance->VC_Active = LVM_TRUE;
- if (Volume != 0)
- {
+ pInstance->VC_Active = LVM_TRUE;
+ if (Volume != 0) {
pInstance->VC_VolumedB = Volume;
- }
- else
- {
+ } else {
pInstance->VC_VolumedB = 0;
}
/*
* Calculate the required gain and shifts
*/
- dBOffset = (LVM_UINT16)((-Volume) % 6); /* Get the dBs 0-5 */
- dBShifts = (LVM_UINT16)(Volume / -6); /* Get the 6dB shifts */
+ dBOffset = (LVM_UINT16)((-Volume) % 6); /* Get the dBs 0-5 */
+ dBShifts = (LVM_UINT16)(Volume / -6); /* Get the 6dB shifts */
/*
* Set the parameters
*/
- if(dBShifts == 0)
- {
+ if (dBShifts == 0) {
LVC_Mixer_SetTarget(&pInstance->VC_Volume.MixerStream[0],
- (LVM_FLOAT)LVM_VolumeTable[dBOffset]);
- }
- else
- {
+ (LVM_FLOAT)LVM_VolumeTable[dBOffset]);
+ } else {
Temp = LVM_VolumeTable[dBOffset];
- while(dBShifts) {
+ while (dBShifts) {
Temp = Temp / 2.0f;
dBShifts--;
}
LVC_Mixer_SetTarget(&pInstance->VC_Volume.MixerStream[0], Temp);
}
pInstance->VC_Volume.MixerStream[0].CallbackSet = 1;
- if(pInstance->NoSmoothVolume == LVM_TRUE)
- {
+ if (pInstance->NoSmoothVolume == LVM_TRUE) {
LVC_Mixer_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0], 0,
pInstance->Params.SampleRate, 2);
- }
- else
- {
- LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0],
- LVM_VC_MIXER_TIME, pInstance->Params.SampleRate, 2);
+ } else {
+ LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0], LVM_VC_MIXER_TIME,
+ pInstance->Params.SampleRate, 2);
}
}
@@ -453,43 +401,39 @@
/* NOTES: */
/* */
/************************************************************************************/
-void LVM_SetHeadroom(LVM_Instance_t *pInstance,
- LVM_ControlParams_t *pParams)
-{
- LVM_INT16 ii, jj;
- LVM_INT16 Headroom = 0;
- LVM_INT16 MaxGain = 0;
+void LVM_SetHeadroom(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams) {
+ LVM_INT16 ii, jj;
+ LVM_INT16 Headroom = 0;
+ LVM_INT16 MaxGain = 0;
- if (((LVEQNB_Mode_en)pParams->EQNB_OperatingMode == LVEQNB_ON)
- && (pInstance->HeadroomParams.Headroom_OperatingMode == LVM_HEADROOM_ON))
- {
+ if (((LVEQNB_Mode_en)pParams->EQNB_OperatingMode == LVEQNB_ON) &&
+ (pInstance->HeadroomParams.Headroom_OperatingMode == LVM_HEADROOM_ON)) {
/* Find typical headroom value */
- for(jj = 0; jj < pInstance->HeadroomParams.NHeadroomBands; jj++)
- {
+ for (jj = 0; jj < pInstance->HeadroomParams.NHeadroomBands; jj++) {
MaxGain = 0;
- for( ii = 0; ii < pParams->EQNB_NBands; ii++)
- {
- if((pParams->pEQNB_BandDefinition[ii].Frequency >= pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_Low) &&
- (pParams->pEQNB_BandDefinition[ii].Frequency <= pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_High))
- {
- if(pParams->pEQNB_BandDefinition[ii].Gain > MaxGain)
- {
+ for (ii = 0; ii < pParams->EQNB_NBands; ii++) {
+ if ((pParams->pEQNB_BandDefinition[ii].Frequency >=
+ pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_Low) &&
+ (pParams->pEQNB_BandDefinition[ii].Frequency <=
+ pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_High)) {
+ if (pParams->pEQNB_BandDefinition[ii].Gain > MaxGain) {
MaxGain = pParams->pEQNB_BandDefinition[ii].Gain;
}
}
}
- if((MaxGain - pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset) > Headroom){
- Headroom = (LVM_INT16)(MaxGain - pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset);
+ if ((MaxGain - pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset) >
+ Headroom) {
+ Headroom = (LVM_INT16)(
+ MaxGain -
+ pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset);
}
}
/* Saturate */
- if(Headroom < 0)
- Headroom = 0;
+ if (Headroom < 0) Headroom = 0;
}
- pInstance->Headroom = (LVM_UINT16)Headroom ;
-
+ pInstance->Headroom = (LVM_UINT16)Headroom;
}
/****************************************************************************************/
@@ -510,32 +454,26 @@
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t hInstance)
-{
- LVM_Instance_t *pInstance =(LVM_Instance_t *)hInstance;
- LVM_ControlParams_t LocalParams;
- LVM_INT16 Count = 5;
+LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t hInstance) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+ LVM_ControlParams_t LocalParams;
+ LVM_INT16 Count = 5;
/*
* Copy the new parameters but make sure they didn't change while copying
*/
- do
- {
+ do {
pInstance->ControlPending = LVM_FALSE;
LocalParams = pInstance->NewParams;
pInstance->HeadroomParams = pInstance->NewHeadroomParams;
Count--;
- } while ((pInstance->ControlPending != LVM_FALSE) &&
- (Count > 0));
+ } while ((pInstance->ControlPending != LVM_FALSE) && (Count > 0));
-#ifdef SUPPORT_MC
pInstance->NrChannels = LocalParams.NrChannels;
pInstance->ChMask = LocalParams.ChMask;
-#endif
/* Clear all internal data if format change*/
- if(LocalParams.SourceFormat != pInstance->Params.SourceFormat)
- {
+ if (LocalParams.SourceFormat != pInstance->Params.SourceFormat) {
LVM_ClearAudioBuffers(pInstance);
pInstance->ControlPending = LVM_FALSE;
}
@@ -547,31 +485,27 @@
(pInstance->Params.TE_EffectLevel != LocalParams.TE_EffectLevel) ||
(pInstance->Params.TE_OperatingMode != LocalParams.TE_OperatingMode) ||
(pInstance->Params.OperatingMode != LocalParams.OperatingMode) ||
- (pInstance->Params.SpeakerType != LocalParams.SpeakerType))
- {
- LVM_SetTrebleBoost(pInstance,
- &LocalParams);
+ (pInstance->Params.SpeakerType != LocalParams.SpeakerType)) {
+ LVM_SetTrebleBoost(pInstance, &LocalParams);
}
/*
* Update the headroom if required
*/
- LVM_SetHeadroom(pInstance, /* Instance pointer */
- &LocalParams); /* New parameters */
+ LVM_SetHeadroom(pInstance, /* Instance pointer */
+ &LocalParams); /* New parameters */
/*
* Update the volume if required
*/
{
- LVM_SetVolume(pInstance, /* Instance pointer */
- &LocalParams); /* New parameters */
+ LVM_SetVolume(pInstance, /* Instance pointer */
+ &LocalParams); /* New parameters */
}
/* Apply balance changes*/
- if(pInstance->Params.VC_Balance != LocalParams.VC_Balance)
- {
+ if (pInstance->Params.VC_Balance != LocalParams.VC_Balance) {
/* Configure Mixer module for gradual changes to volume*/
- if(LocalParams.VC_Balance < 0)
- {
+ if (LocalParams.VC_Balance < 0) {
LVM_FLOAT Target_Float;
/* Drop in right channel volume*/
Target_Float = LVM_MAXFLOAT;
@@ -583,9 +517,7 @@
LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1], Target_Float);
LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],
LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
- }
- else if(LocalParams.VC_Balance >0)
- {
+ } else if (LocalParams.VC_Balance > 0) {
LVM_FLOAT Target_Float;
/* Drop in left channel volume*/
Target_Float = dB_to_LinFloat((LVM_INT16)((-LocalParams.VC_Balance) << 4));
@@ -597,63 +529,54 @@
LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1], Target_Float);
LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],
LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
- }
- else
- {
+ } else {
LVM_FLOAT Target_Float;
/* No drop*/
Target_Float = LVM_MAXFLOAT;
- LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[0],Target_Float);
+ LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[0], Target_Float);
LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[0],
- LVM_VC_MIXER_TIME,LocalParams.SampleRate, 1);
+ LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
- LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1],Target_Float);
+ LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1], Target_Float);
LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],
- LVM_VC_MIXER_TIME,LocalParams.SampleRate, 1);
+ LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
}
}
/*
* Update the bass enhancement
*/
{
- LVDBE_ReturnStatus_en DBE_Status;
- LVDBE_Params_t DBE_Params;
- LVDBE_Handle_t *hDBEInstance = (LVDBE_Handle_t *)pInstance->hDBEInstance;
+ LVDBE_ReturnStatus_en DBE_Status;
+ LVDBE_Params_t DBE_Params;
+ LVDBE_Handle_t* hDBEInstance = (LVDBE_Handle_t*)pInstance->hDBEInstance;
/*
* Set the new parameters
*/
- if(LocalParams.OperatingMode == LVM_MODE_OFF)
- {
+ if (LocalParams.OperatingMode == LVM_MODE_OFF) {
DBE_Params.OperatingMode = LVDBE_OFF;
+ } else {
+ DBE_Params.OperatingMode = (LVDBE_Mode_en)LocalParams.BE_OperatingMode;
}
- else
- {
- DBE_Params.OperatingMode = (LVDBE_Mode_en)LocalParams.BE_OperatingMode;
- }
- DBE_Params.SampleRate = (LVDBE_Fs_en)LocalParams.SampleRate;
- DBE_Params.EffectLevel = LocalParams.BE_EffectLevel;
- DBE_Params.CentreFrequency = (LVDBE_CentreFreq_en)LocalParams.BE_CentreFreq;
- DBE_Params.HPFSelect = (LVDBE_FilterSelect_en)LocalParams.BE_HPF;
- DBE_Params.HeadroomdB = 0;
- DBE_Params.VolumeControl = LVDBE_VOLUME_OFF;
- DBE_Params.VolumedB = 0;
-#ifdef SUPPORT_MC
- DBE_Params.NrChannels = LocalParams.NrChannels;
-#endif
+ DBE_Params.SampleRate = (LVDBE_Fs_en)LocalParams.SampleRate;
+ DBE_Params.EffectLevel = LocalParams.BE_EffectLevel;
+ DBE_Params.CentreFrequency = (LVDBE_CentreFreq_en)LocalParams.BE_CentreFreq;
+ DBE_Params.HPFSelect = (LVDBE_FilterSelect_en)LocalParams.BE_HPF;
+ DBE_Params.HeadroomdB = 0;
+ DBE_Params.VolumeControl = LVDBE_VOLUME_OFF;
+ DBE_Params.VolumedB = 0;
+ DBE_Params.NrChannels = LocalParams.NrChannels;
/*
* Make the changes
*/
- DBE_Status = LVDBE_Control(hDBEInstance,
- &DBE_Params);
+ DBE_Status = LVDBE_Control(hDBEInstance, &DBE_Params);
/*
* Quit if the changes were not accepted
*/
- if (DBE_Status != LVDBE_SUCCESS)
- {
- return((LVM_ReturnStatus_en)DBE_Status);
+ if (DBE_Status != LVDBE_SUCCESS) {
+ return ((LVM_ReturnStatus_en)DBE_Status);
}
/*
@@ -666,168 +589,132 @@
* Update the N-Band Equaliser
*/
{
- LVEQNB_ReturnStatus_en EQNB_Status;
- LVEQNB_Params_t EQNB_Params;
- LVEQNB_Handle_t *hEQNBInstance = (LVEQNB_Handle_t *)pInstance->hEQNBInstance;
+ LVEQNB_ReturnStatus_en EQNB_Status;
+ LVEQNB_Params_t EQNB_Params;
+ LVEQNB_Handle_t* hEQNBInstance = (LVEQNB_Handle_t*)pInstance->hEQNBInstance;
/*
* Set the new parameters
*/
- if(LocalParams.OperatingMode == LVM_MODE_OFF)
- {
- EQNB_Params.OperatingMode = LVEQNB_BYPASS;
- }
- else
- {
- EQNB_Params.OperatingMode = (LVEQNB_Mode_en)LocalParams.EQNB_OperatingMode;
+ if (LocalParams.OperatingMode == LVM_MODE_OFF) {
+ EQNB_Params.OperatingMode = LVEQNB_BYPASS;
+ } else {
+ EQNB_Params.OperatingMode = (LVEQNB_Mode_en)LocalParams.EQNB_OperatingMode;
}
- EQNB_Params.SampleRate = (LVEQNB_Fs_en)LocalParams.SampleRate;
- EQNB_Params.NBands = LocalParams.EQNB_NBands;
- EQNB_Params.pBandDefinition = (LVEQNB_BandDef_t *)LocalParams.pEQNB_BandDefinition;
- if (LocalParams.SourceFormat == LVM_STEREO) /* Mono format not supported */
+ EQNB_Params.SampleRate = (LVEQNB_Fs_en)LocalParams.SampleRate;
+ EQNB_Params.NBands = LocalParams.EQNB_NBands;
+ EQNB_Params.pBandDefinition = (LVEQNB_BandDef_t*)LocalParams.pEQNB_BandDefinition;
+ if (LocalParams.SourceFormat == LVM_STEREO) /* Mono format not supported */
{
EQNB_Params.SourceFormat = LVEQNB_STEREO;
}
-#ifdef SUPPORT_MC
/* Note: Currently SourceFormat field of EQNB is not been
* used by the module.
*/
- else if (LocalParams.SourceFormat == LVM_MULTICHANNEL)
- {
+ else if (LocalParams.SourceFormat == LVM_MULTICHANNEL) {
EQNB_Params.SourceFormat = LVEQNB_MULTICHANNEL;
+ } else {
+ EQNB_Params.SourceFormat = LVEQNB_MONOINSTEREO; /* Force to Mono-in-Stereo mode */
}
-#endif
- else
- {
- EQNB_Params.SourceFormat = LVEQNB_MONOINSTEREO; /* Force to Mono-in-Stereo mode */
- }
-#ifdef SUPPORT_MC
- EQNB_Params.NrChannels = LocalParams.NrChannels;
-#endif
+ EQNB_Params.NrChannels = LocalParams.NrChannels;
/*
* Set the control flag
*/
if ((LocalParams.OperatingMode == LVM_MODE_ON) &&
- (LocalParams.EQNB_OperatingMode == LVM_EQNB_ON))
- {
+ (LocalParams.EQNB_OperatingMode == LVM_EQNB_ON)) {
pInstance->EQNB_Active = LVM_TRUE;
- }
- else
- {
+ } else {
EQNB_Params.OperatingMode = LVEQNB_BYPASS;
}
/*
* Make the changes
*/
- EQNB_Status = LVEQNB_Control(hEQNBInstance,
- &EQNB_Params);
+ EQNB_Status = LVEQNB_Control(hEQNBInstance, &EQNB_Params);
/*
* Quit if the changes were not accepted
*/
- if (EQNB_Status != LVEQNB_SUCCESS)
- {
- return((LVM_ReturnStatus_en)EQNB_Status);
+ if (EQNB_Status != LVEQNB_SUCCESS) {
+ return ((LVM_ReturnStatus_en)EQNB_Status);
}
-
}
/*
* Update concert sound
*/
{
- LVCS_ReturnStatus_en CS_Status;
- LVCS_Params_t CS_Params;
- LVCS_Handle_t *hCSInstance = (LVCS_Handle_t *)pInstance->hCSInstance;
- LVM_Mode_en CompressorMode=LVM_MODE_ON;
+ LVCS_ReturnStatus_en CS_Status;
+ LVCS_Params_t CS_Params;
+ LVCS_Handle_t* hCSInstance = (LVCS_Handle_t*)pInstance->hCSInstance;
+ LVM_Mode_en CompressorMode = LVM_MODE_ON;
/*
* Set the new parameters
*/
- if(LocalParams.VirtualizerOperatingMode == LVM_MODE_ON)
- {
- CS_Params.OperatingMode = LVCS_ON;
- }
- else
- {
- CS_Params.OperatingMode = LVCS_OFF;
+ if (LocalParams.VirtualizerOperatingMode == LVM_MODE_ON) {
+ CS_Params.OperatingMode = LVCS_ON;
+ } else {
+ CS_Params.OperatingMode = LVCS_OFF;
}
- if((LocalParams.TE_OperatingMode == LVM_TE_ON) && (LocalParams.TE_EffectLevel == LVM_TE_LOW_MIPS))
- {
- CS_Params.SpeakerType = LVCS_EX_HEADPHONES;
- }
- else
- {
- CS_Params.SpeakerType = LVCS_HEADPHONES;
+ if ((LocalParams.TE_OperatingMode == LVM_TE_ON) &&
+ (LocalParams.TE_EffectLevel == LVM_TE_LOW_MIPS)) {
+ CS_Params.SpeakerType = LVCS_EX_HEADPHONES;
+ } else {
+ CS_Params.SpeakerType = LVCS_HEADPHONES;
}
-#ifdef SUPPORT_MC
/* Concert sound module processes only the left and right channels
* data. So the Source Format is set to LVCS_STEREO for multichannel
* input also.
*/
if (LocalParams.SourceFormat == LVM_STEREO ||
- LocalParams.SourceFormat == LVM_MULTICHANNEL)
-#else
- if (LocalParams.SourceFormat == LVM_STEREO) /* Mono format not supported */
-#endif
- {
+ LocalParams.SourceFormat == LVM_MULTICHANNEL) {
CS_Params.SourceFormat = LVCS_STEREO;
+ } else {
+ CS_Params.SourceFormat = LVCS_MONOINSTEREO; /* Force to Mono-in-Stereo mode */
}
- else
- {
- CS_Params.SourceFormat = LVCS_MONOINSTEREO; /* Force to Mono-in-Stereo mode */
- }
- CS_Params.SampleRate = LocalParams.SampleRate;
+ CS_Params.SampleRate = LocalParams.SampleRate;
CS_Params.ReverbLevel = LocalParams.VirtualizerReverbLevel;
CS_Params.EffectLevel = LocalParams.CS_EffectLevel;
-#ifdef SUPPORT_MC
- CS_Params.NrChannels = LocalParams.NrChannels;
-#endif
+ CS_Params.NrChannels = LocalParams.NrChannels;
/*
* Set the control flag
*/
if (((LVM_Mode_en)LocalParams.OperatingMode == LVM_MODE_ON) &&
- ((LVCS_Modes_en)LocalParams.VirtualizerOperatingMode != LVCS_OFF))
- {
+ ((LVCS_Modes_en)LocalParams.VirtualizerOperatingMode != LVCS_OFF)) {
pInstance->CS_Active = LVM_TRUE;
- }
- else
- {
+ } else {
CS_Params.OperatingMode = LVCS_OFF;
}
- CS_Params.CompressorMode=CompressorMode;
+ CS_Params.CompressorMode = CompressorMode;
/*
* Make the changes
*/
- CS_Status = LVCS_Control(hCSInstance,
- &CS_Params);
+ CS_Status = LVCS_Control(hCSInstance, &CS_Params);
/*
* Quit if the changes were not accepted
*/
- if (CS_Status != LVCS_SUCCESS)
- {
- return((LVM_ReturnStatus_en)CS_Status);
+ if (CS_Status != LVCS_SUCCESS) {
+ return ((LVM_ReturnStatus_en)CS_Status);
}
-
}
/*
* Update the Power Spectrum Analyser
*/
{
- LVPSA_RETURN PSA_Status;
- LVPSA_ControlParams_t PSA_Params;
- pLVPSA_Handle_t *hPSAInstance = (pLVPSA_Handle_t *)pInstance->hPSAInstance;
+ LVPSA_RETURN PSA_Status;
+ LVPSA_ControlParams_t PSA_Params;
+ pLVPSA_Handle_t* hPSAInstance = (pLVPSA_Handle_t*)pInstance->hPSAInstance;
/*
* Set the new parameters
@@ -838,23 +725,19 @@
/*
* Make the changes
*/
- if(pInstance->InstParams.PSA_Included==LVM_PSA_ON)
- {
- PSA_Status = LVPSA_Control(hPSAInstance,
- &PSA_Params);
+ if (pInstance->InstParams.PSA_Included == LVM_PSA_ON) {
+ PSA_Status = LVPSA_Control(hPSAInstance, &PSA_Params);
- if (PSA_Status != LVPSA_OK)
- {
- return((LVM_ReturnStatus_en)PSA_Status);
+ if (PSA_Status != LVPSA_OK) {
+ return ((LVM_ReturnStatus_en)PSA_Status);
}
/*
* Apply new settings
*/
- PSA_Status = LVPSA_ApplyNewSettings ((LVPSA_InstancePr_t*)hPSAInstance);
- if(PSA_Status != LVPSA_OK)
- {
- return((LVM_ReturnStatus_en)PSA_Status);
+ PSA_Status = LVPSA_ApplyNewSettings((LVPSA_InstancePr_t*)hPSAInstance);
+ if (PSA_Status != LVPSA_OK) {
+ return ((LVM_ReturnStatus_en)PSA_Status);
}
}
}
@@ -863,9 +746,9 @@
* Update the parameters and clear the flag
*/
pInstance->NoSmoothVolume = LVM_FALSE;
- pInstance->Params = LocalParams;
+ pInstance->Params = LocalParams;
- return(LVM_SUCCESS);
+ return (LVM_SUCCESS);
}
/****************************************************************************************/
@@ -887,36 +770,30 @@
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetHeadroomParams(LVM_Handle_t hInstance,
- LVM_HeadroomParams_t *pHeadroomParams)
-{
- LVM_Instance_t *pInstance =(LVM_Instance_t *)hInstance;
- LVM_UINT16 ii, NBands;
+LVM_ReturnStatus_en LVM_SetHeadroomParams(LVM_Handle_t hInstance,
+ LVM_HeadroomParams_t* pHeadroomParams) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+ LVM_UINT16 ii, NBands;
/* Check for NULL pointers */
- if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL))
- {
+ if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL)) {
return (LVM_NULLADDRESS);
}
- if ((pHeadroomParams->NHeadroomBands != 0) && (pHeadroomParams->pHeadroomDefinition == LVM_NULL))
- {
+ if ((pHeadroomParams->NHeadroomBands != 0) &&
+ (pHeadroomParams->pHeadroomDefinition == LVM_NULL)) {
return (LVM_NULLADDRESS);
}
/* Consider only the LVM_HEADROOM_MAX_NBANDS first bands*/
- if (pHeadroomParams->NHeadroomBands > LVM_HEADROOM_MAX_NBANDS)
- {
+ if (pHeadroomParams->NHeadroomBands > LVM_HEADROOM_MAX_NBANDS) {
NBands = LVM_HEADROOM_MAX_NBANDS;
- }
- else
- {
+ } else {
NBands = pHeadroomParams->NHeadroomBands;
}
pInstance->NewHeadroomParams.NHeadroomBands = NBands;
/* Copy settings in memory */
- for(ii = 0; ii < NBands; ii++)
- {
+ for (ii = 0; ii < NBands; ii++) {
pInstance->pHeadroom_BandDefs[ii] = pHeadroomParams->pHeadroomDefinition[ii];
}
@@ -924,7 +801,7 @@
pInstance->NewHeadroomParams.Headroom_OperatingMode = pHeadroomParams->Headroom_OperatingMode;
pInstance->ControlPending = LVM_TRUE;
- return(LVM_SUCCESS);
+ return (LVM_SUCCESS);
}
/****************************************************************************************/
@@ -947,29 +824,26 @@
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetHeadroomParams(LVM_Handle_t hInstance,
- LVM_HeadroomParams_t *pHeadroomParams)
-{
- LVM_Instance_t *pInstance =(LVM_Instance_t *)hInstance;
- LVM_UINT16 ii;
+LVM_ReturnStatus_en LVM_GetHeadroomParams(LVM_Handle_t hInstance,
+ LVM_HeadroomParams_t* pHeadroomParams) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+ LVM_UINT16 ii;
/* Check for NULL pointers */
- if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL))
- {
+ if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL)) {
return (LVM_NULLADDRESS);
}
pHeadroomParams->NHeadroomBands = pInstance->NewHeadroomParams.NHeadroomBands;
/* Copy settings in memory */
- for(ii = 0; ii < pInstance->NewHeadroomParams.NHeadroomBands; ii++)
- {
+ for (ii = 0; ii < pInstance->NewHeadroomParams.NHeadroomBands; ii++) {
pInstance->pHeadroom_UserDefs[ii] = pInstance->pHeadroom_BandDefs[ii];
}
pHeadroomParams->pHeadroomDefinition = pInstance->pHeadroom_UserDefs;
pHeadroomParams->Headroom_OperatingMode = pInstance->NewHeadroomParams.Headroom_OperatingMode;
- return(LVM_SUCCESS);
+ return (LVM_SUCCESS);
}
/****************************************************************************************/
@@ -988,18 +862,14 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_INT32 LVM_AlgoCallBack( void *pBundleHandle,
- void *pData,
- LVM_INT16 callbackId)
-{
- LVM_Instance_t *pInstance =(LVM_Instance_t *)pBundleHandle;
+LVM_INT32 LVM_AlgoCallBack(void* pBundleHandle, void* pData, LVM_INT16 callbackId) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)pBundleHandle;
- (void) pData;
+ (void)pData;
- switch(callbackId & 0xFF00){
+ switch (callbackId & 0xFF00) {
case ALGORITHM_CS_ID:
- switch(callbackId & 0x00FF)
- {
+ switch (callbackId & 0x00FF) {
case LVCS_EVENT_ALGOFF:
pInstance->CS_Active = LVM_FALSE;
break;
@@ -1008,8 +878,7 @@
}
break;
case ALGORITHM_EQNB_ID:
- switch(callbackId & 0x00FF)
- {
+ switch (callbackId & 0x00FF) {
case LVEQNB_EVENT_ALGOFF:
pInstance->EQNB_Active = LVM_FALSE;
break;
@@ -1040,21 +909,17 @@
/* 1. This function may be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVM_INT32 LVM_VCCallBack(void* pBundleHandle,
- void* pGeneralPurpose,
- short CallBackParam)
-{
- LVM_Instance_t *pInstance =(LVM_Instance_t *)pBundleHandle;
- LVM_FLOAT Target;
+LVM_INT32 LVM_VCCallBack(void* pBundleHandle, void* pGeneralPurpose, short CallBackParam) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)pBundleHandle;
+ LVM_FLOAT Target;
- (void) pGeneralPurpose;
- (void) CallBackParam;
+ (void)pGeneralPurpose;
+ (void)CallBackParam;
/* When volume mixer has reached 0 dB target then stop it to avoid
unnecessary processing. */
Target = LVC_Mixer_GetTarget(&pInstance->VC_Volume.MixerStream[0]);
- if(Target == 1.0f)
- {
+ if (Target == 1.0f) {
pInstance->VC_Active = LVM_FALSE;
}
return 1;
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
index 5620529..bb962df 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
@@ -20,6 +20,7 @@
/* Includes */
/* */
/************************************************************************************/
+#include <stdlib.h>
#include "LVM_Private.h"
#include "LVM_Tables.h"
@@ -28,570 +29,90 @@
/****************************************************************************************/
/* */
-/* FUNCTION: LVM_GetMemoryTable */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. It can be called in */
-/* two ways: */
-/* */
-/* hInstance = NULL Returns the memory requirements */
-/* hInstance = Instance handle Returns the memory requirements and */
-/* allocated base addresses for the instance */
-/* */
-/* When this function is called for memory allocation (hInstance=NULL) the memory */
-/* base address pointers are NULL on return. */
-/* */
-/* When the function is called for free (hInstance = Instance Handle) the memory */
-/* table returns the allocated memory and base addresses used during initialisation. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pMemoryTable Pointer to an empty memory definition table */
-/* pCapabilities Pointer to the default capabilities */
-/* */
-/* RETURNS: */
-/* LVM_SUCCESS Succeeded */
-/* LVM_NULLADDRESS When one of pMemoryTable or pInstParams is NULL */
-/* LVM_OUTOFRANGE When any of the Instance parameters are out of range */
-/* */
-/* NOTES: */
-/* 1. This function may be interrupted by the LVM_Process function */
-/* 2. The scratch memory is the largest required by any of the sub-modules plus any */
-/* additional scratch requirements of the bundle */
-/* */
-/****************************************************************************************/
-
-/*
- * 4 Types of Memory Regions of LVM
- * TODO: Allocate on the fly.
- * i) LVM_MEMREGION_PERSISTENT_SLOW_DATA - For Instance Handles
- * ii) LVM_MEMREGION_PERSISTENT_FAST_DATA - Persistent Buffers
- * iii) LVM_MEMREGION_PERSISTENT_FAST_COEF - For Holding Structure values
- * iv) LVM_MEMREGION_TEMPORARY_FAST - For Holding Structure values
- *
- * LVM_MEMREGION_PERSISTENT_SLOW_DATA:
- * Total Memory size:
- * sizeof(LVM_Instance_t) + \
- * sizeof(LVM_Buffer_t) + \
- * sizeof(LVPSA_InstancePr_t) + \
- * sizeof(LVM_Buffer_t) - needed if buffer mode is LVM_MANAGED_BUFFER
- *
- * LVM_MEMREGION_PERSISTENT_FAST_DATA:
- * Total Memory size:
- * sizeof(LVM_TE_Data_t) + \
- * 2 * pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t) + \
- * sizeof(LVCS_Data_t) + \
- * sizeof(LVDBE_Data_FLOAT_t) + \
- * sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
- * sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
- * pInstParams->EQNB_NumBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
- * pInstParams->EQNB_NumBands * sizeof(LVEQNB_BandDef_t) + \
- * pInstParams->EQNB_NumBands * sizeof(LVEQNB_BiquadType_en) + \
- * 2 * LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t) + \
- * PSA_InitParams.nBands * sizeof(Biquad_1I_Order2_Taps_t) + \
- * PSA_InitParams.nBands * sizeof(QPD_Taps_t)
- *
- * LVM_MEMREGION_PERSISTENT_FAST_COEF:
- * Total Memory size:
- * sizeof(LVM_TE_Coefs_t) + \
- * sizeof(LVCS_Coefficient_t) + \
- * sizeof(LVDBE_Coef_FLOAT_t) + \
- * sizeof(Biquad_FLOAT_Instance_t) + \
- * sizeof(Biquad_FLOAT_Instance_t) + \
- * pInstParams->EQNB_NumBands * sizeof(Biquad_FLOAT_Instance_t) + \
- * PSA_InitParams.nBands * sizeof(Biquad_Instance_t) + \
- * PSA_InitParams.nBands * sizeof(QPD_State_t)
- *
- * LVM_MEMREGION_TEMPORARY_FAST (Scratch):
- * Total Memory Size:
- * BundleScratchSize + \
- * MAX_INTERNAL_BLOCKSIZE * sizeof(LVM_FLOAT) + \
- * MaxScratchOf (CS, EQNB, DBE, PSA)
- *
- * a)BundleScratchSize:
- * 3 * LVM_MAX_CHANNELS \
- * * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) * sizeof(LVM_FLOAT)
- * This Memory is allocated only when Buffer mode is LVM_MANAGED_BUFFER.
- * b)MaxScratchOf (CS, EQNB, DBE, PSA)
- * This Memory is needed for scratch usage for CS, EQNB, DBE, PSA.
- * CS = (LVCS_SCRATCHBUFFERS * sizeof(LVM_FLOAT)
- * * pCapabilities->MaxBlockSize)
- * EQNB = (LVEQNB_SCRATCHBUFFERS * sizeof(LVM_FLOAT)
- * * pCapabilities->MaxBlockSize)
- * DBE = (LVDBE_SCRATCHBUFFERS_INPLACE*sizeof(LVM_FLOAT)
- * * pCapabilities->MaxBlockSize)
- * PSA = (2 * pInitParams->MaxInputBlockSize * sizeof(LVM_FLOAT))
- * one MaxInputBlockSize for input and another for filter output
- * c)MAX_INTERNAL_BLOCKSIZE
- * This Memory is needed for PSAInput - Temp memory to store output
- * from McToMono block and given as input to PSA block
- */
-
-LVM_ReturnStatus_en LVM_GetMemoryTable(LVM_Handle_t hInstance,
- LVM_MemTab_t *pMemoryTable,
- LVM_InstParams_t *pInstParams)
-{
-
- LVM_Instance_t *pInstance = (LVM_Instance_t *)hInstance;
- LVM_UINT32 AlgScratchSize;
- LVM_UINT32 BundleScratchSize;
- LVM_UINT16 InternalBlockSize;
- INST_ALLOC AllocMem[LVM_NR_MEMORY_REGIONS];
- LVM_INT16 i;
-
- /*
- * Check parameters
- */
- if(pMemoryTable == LVM_NULL)
- {
- return LVM_NULLADDRESS;
- }
-
- /*
- * Return memory table if the instance has already been created
- */
- if (hInstance != LVM_NULL)
- {
- /* Read back memory allocation table */
- *pMemoryTable = pInstance->MemoryTable;
- return(LVM_SUCCESS);
- }
-
- if(pInstParams == LVM_NULL)
- {
- return LVM_NULLADDRESS;
- }
-
- /*
- * Power Spectrum Analyser
- */
- if(pInstParams->PSA_Included > LVM_PSA_ON)
- {
- return (LVM_OUTOFRANGE);
- }
-
- /*
- * Check the instance parameters
- */
- if( (pInstParams->BufferMode != LVM_MANAGED_BUFFERS) && (pInstParams->BufferMode != LVM_UNMANAGED_BUFFERS) )
- {
- return (LVM_OUTOFRANGE);
- }
-
- /* N-Band Equalizer */
- if( pInstParams->EQNB_NumBands > 32 )
- {
- return (LVM_OUTOFRANGE);
- }
-
- if(pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
- {
- if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_MANAGED_MAX_MAXBLOCKSIZE ) )
- {
- return (LVM_OUTOFRANGE);
- }
- }
- else
- {
- if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_UNMANAGED_MAX_MAXBLOCKSIZE) )
- {
- return (LVM_OUTOFRANGE);
- }
- }
-
- /*
- * Initialise the AllocMem structures
- */
- for (i=0; i<LVM_NR_MEMORY_REGIONS; i++)
- {
- InstAlloc_Init(&AllocMem[i], LVM_NULL);
- }
- InternalBlockSize = (LVM_UINT16)((pInstParams->MaxBlockSize) & MIN_INTERNAL_BLOCKMASK); /* Force to a multiple of MIN_INTERNAL_BLOCKSIZE */
-
- if (InternalBlockSize < MIN_INTERNAL_BLOCKSIZE)
- {
- InternalBlockSize = MIN_INTERNAL_BLOCKSIZE;
- }
-
- /* Maximum Internal Black Size should not be more than MAX_INTERNAL_BLOCKSIZE*/
- if(InternalBlockSize > MAX_INTERNAL_BLOCKSIZE)
- {
- InternalBlockSize = MAX_INTERNAL_BLOCKSIZE;
- }
-
- /*
- * Bundle requirements
- */
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
- sizeof(LVM_Instance_t));
-
- /*
- * Set the algorithm and bundle scratch requirements
- */
- AlgScratchSize = 0;
- if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
- {
- BundleScratchSize = 3 * LVM_MAX_CHANNELS \
- * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) \
- * sizeof(LVM_FLOAT);
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST], /* Scratch buffer */
- BundleScratchSize);
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
- sizeof(LVM_Buffer_t));
- }
-
- /*
- * Treble Enhancement requirements
- */
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- sizeof(LVM_TE_Data_t));
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
- sizeof(LVM_TE_Coefs_t));
-
- /*
- * N-Band Equalizer requirements
- */
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA], /* Local storage */
- (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA], /* User storage */
- (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
-
- /*
- * Concert Sound requirements
- */
- {
- LVCS_MemTab_t CS_MemTab;
- LVCS_Capabilities_t CS_Capabilities;
-
- /*
- * Set the capabilities
- */
- CS_Capabilities.MaxBlockSize = InternalBlockSize;
-
- /*
- * Get the memory requirements
- */
- LVCS_Memory(LVM_NULL,
- &CS_MemTab,
- &CS_Capabilities);
-
- /*
- * Update the memory allocation structures
- */
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- CS_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size);
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
- CS_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size);
- if (CS_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size > AlgScratchSize) AlgScratchSize = CS_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size;
-
- }
-
- /*
- * Dynamic Bass Enhancement requirements
- */
- {
- LVDBE_MemTab_t DBE_MemTab;
- LVDBE_Capabilities_t DBE_Capabilities;
-
- /*
- * Set the capabilities
- */
- DBE_Capabilities.SampleRate = LVDBE_CAP_FS_8000 | LVDBE_CAP_FS_11025 |
- LVDBE_CAP_FS_12000 | LVDBE_CAP_FS_16000 |
- LVDBE_CAP_FS_22050 | LVDBE_CAP_FS_24000 |
- LVDBE_CAP_FS_32000 | LVDBE_CAP_FS_44100 |
- LVDBE_CAP_FS_48000 | LVDBE_CAP_FS_88200 |
- LVDBE_CAP_FS_96000 | LVDBE_CAP_FS_176400 |
- LVDBE_CAP_FS_192000;
- DBE_Capabilities.CentreFrequency = LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_66Hz | LVDBE_CAP_CENTRE_78Hz | LVDBE_CAP_CENTRE_90Hz;
- DBE_Capabilities.MaxBlockSize = InternalBlockSize;
-
- /*
- * Get the memory requirements
- */
- LVDBE_Memory(LVM_NULL,
- &DBE_MemTab,
-
- &DBE_Capabilities);
- /*
- * Update the bundle table
- */
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- DBE_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size);
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
- DBE_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size);
- if (DBE_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size > AlgScratchSize) AlgScratchSize = DBE_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size;
-
- }
-
- /*
- * N-Band equaliser requirements
- */
- {
- LVEQNB_MemTab_t EQNB_MemTab; /* For N-Band Equaliser */
- LVEQNB_Capabilities_t EQNB_Capabilities;
-
- /*
- * Set the capabilities
- */
- EQNB_Capabilities.SampleRate = LVEQNB_CAP_FS_8000 | LVEQNB_CAP_FS_11025 |
- LVEQNB_CAP_FS_12000 | LVEQNB_CAP_FS_16000 |
- LVEQNB_CAP_FS_22050 | LVEQNB_CAP_FS_24000 |
- LVEQNB_CAP_FS_32000 | LVEQNB_CAP_FS_44100 |
- LVEQNB_CAP_FS_48000 | LVEQNB_CAP_FS_88200 |
- LVEQNB_CAP_FS_96000 | LVEQNB_CAP_FS_176400 |
- LVEQNB_CAP_FS_192000;
- EQNB_Capabilities.SourceFormat = LVEQNB_CAP_STEREO | LVEQNB_CAP_MONOINSTEREO;
- EQNB_Capabilities.MaxBlockSize = InternalBlockSize;
- EQNB_Capabilities.MaxBands = pInstParams->EQNB_NumBands;
-
- /*
- * Get the memory requirements
- */
- LVEQNB_Memory(LVM_NULL,
- &EQNB_MemTab,
- &EQNB_Capabilities);
-
- /*
- * Update the bundle table
- */
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- EQNB_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size);
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
- EQNB_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size);
- if (EQNB_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size > AlgScratchSize) AlgScratchSize = EQNB_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size;
-
- }
-
- /*
- * Headroom management memory allocation
- */
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
-
- /*
- * Spectrum Analyzer memory requirements
- */
- {
- pLVPSA_Handle_t hPSAInst = LVM_NULL;
- LVPSA_MemTab_t PSA_MemTab;
- LVPSA_InitParams_t PSA_InitParams;
- LVPSA_FilterParam_t FiltersParams[9];
- LVPSA_RETURN PSA_Status;
-
- if(pInstParams->PSA_Included == LVM_PSA_ON)
- {
- PSA_InitParams.SpectralDataBufferDuration = (LVM_UINT16) 500;
- PSA_InitParams.MaxInputBlockSize = (LVM_UINT16) 1000;
- PSA_InitParams.nBands = (LVM_UINT16) 9;
-
- PSA_InitParams.pFiltersParams = &FiltersParams[0];
- for(i = 0; i < PSA_InitParams.nBands; i++)
- {
- FiltersParams[i].CenterFrequency = (LVM_UINT16) 1000;
- FiltersParams[i].QFactor = (LVM_UINT16) 25;
- FiltersParams[i].PostGain = (LVM_INT16) 0;
- }
-
- /*
- * Get the memory requirements
- */
- PSA_Status = LVPSA_Memory (hPSAInst,
- &PSA_MemTab,
- &PSA_InitParams);
-
- if (PSA_Status != LVPSA_OK)
- {
- return((LVM_ReturnStatus_en) LVM_ALGORITHMPSA);
- }
-
- /*
- * Update the bundle table
- */
- /* Slow Data */
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
- PSA_MemTab.Region[LVM_PERSISTENT_SLOW_DATA].Size);
-
- /* Fast Data */
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- PSA_MemTab.Region[LVM_PERSISTENT_FAST_DATA].Size);
-
- /* Fast Coef */
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
- PSA_MemTab.Region[LVM_PERSISTENT_FAST_COEF].Size);
-
- /* Fast Temporary */
- InstAlloc_AddMember(&AllocMem[LVM_TEMPORARY_FAST],
- MAX_INTERNAL_BLOCKSIZE * sizeof(LVM_FLOAT));
-
- if (PSA_MemTab.Region[LVM_TEMPORARY_FAST].Size > AlgScratchSize)
- {
- AlgScratchSize = PSA_MemTab.Region[LVM_TEMPORARY_FAST].Size;
- }
- }
- }
-
- /*
- * Return the memory table
- */
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_SLOW_DATA].Size = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA]);
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA]);
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
- if (pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size < 4)
- {
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size = 0;
- }
-
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF]);
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
- if (pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size < 4)
- {
- pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size = 0;
- }
-
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
- AlgScratchSize);
- pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST]);
- pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
- pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
- if (pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Size < 4)
- {
- pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Size = 0;
- }
-
- return(LVM_SUCCESS);
-
-}
-
-/****************************************************************************************/
-/* */
/* FUNCTION: LVM_GetInstanceHandle */
/* */
/* DESCRIPTION: */
-/* This function is used to create a bundle instance. It returns the created instance */
-/* handle through phInstance. All parameters are set to their default, inactive state. */
+/* This function is used to create a bundle instance. */
+/* All parameters are set to their default, inactive state. */
/* */
/* PARAMETERS: */
-/* phInstance pointer to the instance handle */
-/* pMemoryTable Pointer to the memory definition table */
-/* pInstParams Pointer to the initialisation capabilities */
+/* phInstance Pointer to the instance handle */
+/* pInstParams Pointer to the instance parameters */
/* */
/* RETURNS: */
/* LVM_SUCCESS Initialisation succeeded */
+/* LVM_NULLADDRESS One or more memory has a NULL pointer */
/* LVM_OUTOFRANGE When any of the Instance parameters are out of range */
-/* LVM_NULLADDRESS When one of phInstance, pMemoryTable or pInstParams are NULL*/
/* */
/* NOTES: */
/* 1. This function must not be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-
-LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t *phInstance,
- LVM_MemTab_t *pMemoryTable,
- LVM_InstParams_t *pInstParams)
-{
-
- LVM_ReturnStatus_en Status = LVM_SUCCESS;
- LVM_Instance_t *pInstance;
- INST_ALLOC AllocMem[LVM_NR_MEMORY_REGIONS];
- LVM_INT16 i;
- LVM_UINT16 InternalBlockSize;
- LVM_INT32 BundleScratchSize;
+LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t* phInstance, LVM_InstParams_t* pInstParams) {
+ LVM_ReturnStatus_en Status = LVM_SUCCESS;
+ LVM_Instance_t* pInstance;
+ LVM_INT16 i;
+ LVM_UINT16 InternalBlockSize;
+ LVM_INT32 BundleScratchSize;
/*
* Check valid points have been given
*/
- if ((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pInstParams == LVM_NULL))
- {
+ if ((phInstance == LVM_NULL) || (pInstParams == LVM_NULL)) {
return (LVM_NULLADDRESS);
}
/*
- * Check the memory table for NULL pointers
- */
- for (i=0; i<LVM_NR_MEMORY_REGIONS; i++)
- {
- if ((pMemoryTable->Region[i].Size != 0) &&
- (pMemoryTable->Region[i].pBaseAddress==LVM_NULL))
- {
- return(LVM_NULLADDRESS);
- }
- }
-
- /*
* Check the instance parameters
*/
- if( (pInstParams->BufferMode != LVM_MANAGED_BUFFERS) && (pInstParams->BufferMode != LVM_UNMANAGED_BUFFERS) )
- {
+ if ((pInstParams->BufferMode != LVM_MANAGED_BUFFERS) &&
+ (pInstParams->BufferMode != LVM_UNMANAGED_BUFFERS)) {
return (LVM_OUTOFRANGE);
}
- if( pInstParams->EQNB_NumBands > 32 )
- {
+ if (pInstParams->EQNB_NumBands > 32) {
return (LVM_OUTOFRANGE);
}
- if(pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
- {
- if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_MANAGED_MAX_MAXBLOCKSIZE ) )
- {
+ if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS) {
+ if ((pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE) ||
+ (pInstParams->MaxBlockSize > LVM_MANAGED_MAX_MAXBLOCKSIZE)) {
return (LVM_OUTOFRANGE);
}
- }
- else
- {
- if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_UNMANAGED_MAX_MAXBLOCKSIZE) )
- {
+ } else {
+ if ((pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE) ||
+ (pInstParams->MaxBlockSize > LVM_UNMANAGED_MAX_MAXBLOCKSIZE)) {
return (LVM_OUTOFRANGE);
}
}
- if(pInstParams->PSA_Included > LVM_PSA_ON)
- {
+ if (pInstParams->PSA_Included > LVM_PSA_ON) {
return (LVM_OUTOFRANGE);
}
/*
- * Initialise the AllocMem structures
+ * Create the instance handle
*/
- for (i=0; i<LVM_NR_MEMORY_REGIONS; i++)
- {
- InstAlloc_Init(&AllocMem[i],
- pMemoryTable->Region[i].pBaseAddress);
+ *phInstance = (LVM_Handle_t)calloc(1, sizeof(*pInstance));
+ if (*phInstance == LVM_NULL) {
+ return LVM_NULLADDRESS;
}
+ pInstance = (LVM_Instance_t*)*phInstance;
+
+ pInstance->InstParams = *pInstParams;
/*
- * Set the instance handle
+ * Create the bundle scratch memory and initialse the buffer management
*/
- *phInstance = (LVM_Handle_t)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
- sizeof(LVM_Instance_t));
- pInstance =(LVM_Instance_t *)*phInstance;
-
- /*
- * Save the memory table, parameters and capabilities
- */
- pInstance->MemoryTable = *pMemoryTable;
- pInstance->InstParams = *pInstParams;
-
- /*
- * Set the bundle scratch memory and initialse the buffer management
- */
- InternalBlockSize = (LVM_UINT16)((pInstParams->MaxBlockSize) & MIN_INTERNAL_BLOCKMASK); /* Force to a multiple of MIN_INTERNAL_BLOCKSIZE */
- if (InternalBlockSize < MIN_INTERNAL_BLOCKSIZE)
- {
+ InternalBlockSize = (LVM_UINT16)(
+ (pInstParams->MaxBlockSize) &
+ MIN_INTERNAL_BLOCKMASK); /* Force to a multiple of MIN_INTERNAL_BLOCKSIZE */
+ if (InternalBlockSize < MIN_INTERNAL_BLOCKSIZE) {
InternalBlockSize = MIN_INTERNAL_BLOCKSIZE;
}
/* Maximum Internal Black Size should not be more than MAX_INTERNAL_BLOCKSIZE*/
- if(InternalBlockSize > MAX_INTERNAL_BLOCKSIZE)
- {
+ if (InternalBlockSize > MAX_INTERNAL_BLOCKSIZE) {
InternalBlockSize = MAX_INTERNAL_BLOCKSIZE;
}
pInstance->InternalBlockSize = (LVM_INT16)InternalBlockSize;
@@ -599,40 +120,46 @@
/*
* Common settings for managed and unmanaged buffers
*/
- pInstance->SamplesToProcess = 0; /* No samples left to process */
- if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
- {
+ pInstance->SamplesToProcess = 0; /* No samples left to process */
+ BundleScratchSize =
+ (LVM_INT32)(3 * LVM_MAX_CHANNELS * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) *
+ sizeof(LVM_FLOAT));
+ pInstance->pScratch = calloc(1, BundleScratchSize);
+ if (pInstance->pScratch == LVM_NULL) {
+ return LVM_NULLADDRESS;
+ }
+
+ if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS) {
/*
* Managed buffers required
*/
- pInstance->pBufferManagement = (LVM_Buffer_t *)
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
- sizeof(LVM_Buffer_t));
- BundleScratchSize = (LVM_INT32)
- (3 * LVM_MAX_CHANNELS \
- * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) \
- * sizeof(LVM_FLOAT));
- pInstance->pBufferManagement->pScratch = (LVM_FLOAT *)
- InstAlloc_AddMember(
- &AllocMem[LVM_MEMREGION_TEMPORARY_FAST], /* Scratch 1 buffer */
- (LVM_UINT32)BundleScratchSize);
- LoadConst_Float(0, /* Clear the input delay buffer */
- (LVM_FLOAT *)&pInstance->pBufferManagement->InDelayBuffer,
+ pInstance->pBufferManagement =
+ (LVM_Buffer_t*)calloc(1, sizeof(*(pInstance->pBufferManagement)));
+ if (pInstance->pBufferManagement == LVM_NULL) {
+ return LVM_NULLADDRESS;
+ }
+
+ pInstance->pBufferManagement->pScratch = (LVM_FLOAT*)pInstance->pScratch;
+
+ LoadConst_Float(0, /* Clear the input delay buffer */
+ (LVM_FLOAT*)&pInstance->pBufferManagement->InDelayBuffer,
(LVM_INT16)(LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE));
- pInstance->pBufferManagement->InDelaySamples = MIN_INTERNAL_BLOCKSIZE; /* Set the number of delay samples */
- pInstance->pBufferManagement->OutDelaySamples = 0; /* No samples in the output buffer */
- pInstance->pBufferManagement->BufferState = LVM_FIRSTCALL; /* Set the state ready for the first call */
+ pInstance->pBufferManagement->InDelaySamples =
+ MIN_INTERNAL_BLOCKSIZE; /* Set the number of delay samples */
+ pInstance->pBufferManagement->OutDelaySamples = 0; /* No samples in the output buffer */
+ pInstance->pBufferManagement->BufferState =
+ LVM_FIRSTCALL; /* Set the state ready for the first call */
}
/*
* Set default parameters
*/
- pInstance->Params.OperatingMode = LVM_MODE_OFF;
- pInstance->Params.SampleRate = LVM_FS_8000;
- pInstance->Params.SourceFormat = LVM_MONO;
- pInstance->Params.SpeakerType = LVM_HEADPHONES;
- pInstance->Params.VC_EffectLevel = 0;
- pInstance->Params.VC_Balance = 0;
+ pInstance->Params.OperatingMode = LVM_MODE_OFF;
+ pInstance->Params.SampleRate = LVM_FS_8000;
+ pInstance->Params.SourceFormat = LVM_MONO;
+ pInstance->Params.SpeakerType = LVM_HEADPHONES;
+ pInstance->Params.VC_EffectLevel = 0;
+ pInstance->Params.VC_Balance = 0;
/*
* Set callback
@@ -642,338 +169,265 @@
/*
* DC removal filter
*/
-#ifdef SUPPORT_MC
DC_Mc_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#else
- DC_2I_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#endif
/*
* Treble Enhancement
*/
- pInstance->pTE_Taps = (LVM_TE_Data_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- sizeof(LVM_TE_Data_t));
-
- pInstance->pTE_State = (LVM_TE_Coefs_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
- sizeof(LVM_TE_Coefs_t));
+ pInstance->pTE_Taps = (LVM_TE_Data_t*)calloc(1, sizeof(*(pInstance->pTE_Taps)));
+ if (pInstance->pTE_Taps == LVM_NULL) {
+ return LVM_NULLADDRESS;
+ }
+ pInstance->pTE_State = (LVM_TE_Coefs_t*)calloc(1, sizeof(*(pInstance->pTE_State)));
+ if (pInstance->pTE_State == LVM_NULL) {
+ return LVM_NULLADDRESS;
+ }
pInstance->Params.TE_OperatingMode = LVM_TE_OFF;
- pInstance->Params.TE_EffectLevel = 0;
- pInstance->TE_Active = LVM_FALSE;
+ pInstance->Params.TE_EffectLevel = 0;
+ pInstance->TE_Active = LVM_FALSE;
/*
* Set the volume control and initialise Current to Target
*/
- pInstance->VC_Volume.MixerStream[0].CallbackParam = 0;
- pInstance->VC_Volume.MixerStream[0].CallbackSet = 0;
- pInstance->VC_Volume.MixerStream[0].pCallbackHandle = pInstance;
- pInstance->VC_Volume.MixerStream[0].pCallBack = LVM_VCCallBack;
+ pInstance->VC_Volume.MixerStream[0].CallbackParam = 0;
+ pInstance->VC_Volume.MixerStream[0].CallbackSet = 0;
+ pInstance->VC_Volume.MixerStream[0].pCallbackHandle = pInstance;
+ pInstance->VC_Volume.MixerStream[0].pCallBack = LVM_VCCallBack;
- /* In managed buffering, start with low signal level as delay in buffer management causes a click*/
- if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
- {
+ /* In managed buffering, start with low signal level as delay in buffer management causes a
+ * click*/
+ if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS) {
LVC_Mixer_Init(&pInstance->VC_Volume.MixerStream[0], 0, 0);
- }
- else
- {
+ } else {
LVC_Mixer_Init(&pInstance->VC_Volume.MixerStream[0], LVM_MAXFLOAT, LVM_MAXFLOAT);
}
- LVC_Mixer_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0],0,LVM_FS_8000,2);
+ LVC_Mixer_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0], 0, LVM_FS_8000, 2);
- pInstance->VC_VolumedB = 0;
- pInstance->VC_AVLFixedVolume = 0;
- pInstance->VC_Active = LVM_FALSE;
+ pInstance->VC_VolumedB = 0;
+ pInstance->VC_AVLFixedVolume = 0;
+ pInstance->VC_Active = LVM_FALSE;
- pInstance->VC_BalanceMix.MixerStream[0].CallbackParam = 0;
- pInstance->VC_BalanceMix.MixerStream[0].CallbackSet = 0;
- pInstance->VC_BalanceMix.MixerStream[0].pCallbackHandle = pInstance;
- pInstance->VC_BalanceMix.MixerStream[0].pCallBack = LVM_VCCallBack;
+ pInstance->VC_BalanceMix.MixerStream[0].CallbackParam = 0;
+ pInstance->VC_BalanceMix.MixerStream[0].CallbackSet = 0;
+ pInstance->VC_BalanceMix.MixerStream[0].pCallbackHandle = pInstance;
+ pInstance->VC_BalanceMix.MixerStream[0].pCallBack = LVM_VCCallBack;
LVC_Mixer_Init(&pInstance->VC_BalanceMix.MixerStream[0], LVM_MAXFLOAT, LVM_MAXFLOAT);
- LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[0],LVM_VC_MIXER_TIME,LVM_FS_8000,2);
+ LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[0], LVM_VC_MIXER_TIME,
+ LVM_FS_8000, 2);
- pInstance->VC_BalanceMix.MixerStream[1].CallbackParam = 0;
- pInstance->VC_BalanceMix.MixerStream[1].CallbackSet = 0;
- pInstance->VC_BalanceMix.MixerStream[1].pCallbackHandle = pInstance;
- pInstance->VC_BalanceMix.MixerStream[1].pCallBack = LVM_VCCallBack;
+ pInstance->VC_BalanceMix.MixerStream[1].CallbackParam = 0;
+ pInstance->VC_BalanceMix.MixerStream[1].CallbackSet = 0;
+ pInstance->VC_BalanceMix.MixerStream[1].pCallbackHandle = pInstance;
+ pInstance->VC_BalanceMix.MixerStream[1].pCallBack = LVM_VCCallBack;
LVC_Mixer_Init(&pInstance->VC_BalanceMix.MixerStream[1], LVM_MAXFLOAT, LVM_MAXFLOAT);
- LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],LVM_VC_MIXER_TIME,LVM_FS_8000,2);
+ LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1], LVM_VC_MIXER_TIME,
+ LVM_FS_8000, 2);
/*
- * Set the default EQNB pre-gain and pointer to the band definitions
+ * Create the default EQNB pre-gain and pointer to the band definitions
*/
- pInstance->pEQNB_BandDefs =
- (LVM_EQNB_BandDef_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
- pInstance->pEQNB_UserDefs =
- (LVM_EQNB_BandDef_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
+ pInstance->pEQNB_BandDefs = (LVM_EQNB_BandDef_t*)calloc(pInstParams->EQNB_NumBands,
+ sizeof(*(pInstance->pEQNB_BandDefs)));
+ if (pInstance->pEQNB_BandDefs == LVM_NULL) {
+ return LVM_NULLADDRESS;
+ }
+ pInstance->pEQNB_UserDefs = (LVM_EQNB_BandDef_t*)calloc(pInstParams->EQNB_NumBands,
+ sizeof(*(pInstance->pEQNB_UserDefs)));
+ if (pInstance->pEQNB_UserDefs == LVM_NULL) {
+ return LVM_NULLADDRESS;
+ }
/*
* Initialise the Concert Sound module
*/
{
- LVCS_Handle_t hCSInstance; /* Instance handle */
- LVCS_MemTab_t CS_MemTab; /* Memory table */
- LVCS_Capabilities_t CS_Capabilities; /* Initial capabilities */
- LVCS_ReturnStatus_en LVCS_Status; /* Function call status */
+ LVCS_Handle_t hCSInstance; /* Instance handle */
+ LVCS_Capabilities_t CS_Capabilities; /* Initial capabilities */
+ LVCS_ReturnStatus_en LVCS_Status; /* Function call status */
/*
* Set default parameters
*/
- pInstance->Params.VirtualizerReverbLevel = 100;
- pInstance->Params.VirtualizerType = LVM_CONCERTSOUND;
- pInstance->Params.VirtualizerOperatingMode = LVM_MODE_OFF;
- pInstance->CS_Active = LVM_FALSE;
+ pInstance->Params.VirtualizerReverbLevel = 100;
+ pInstance->Params.VirtualizerType = LVM_CONCERTSOUND;
+ pInstance->Params.VirtualizerOperatingMode = LVM_MODE_OFF;
+ pInstance->CS_Active = LVM_FALSE;
/*
* Set the initialisation capabilities
*/
- CS_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
+ CS_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
CS_Capabilities.CallBack = pInstance->CallBack;
CS_Capabilities.pBundleInstance = (void*)pInstance;
/*
- * Get the memory requirements and then set the address pointers, forcing alignment
- */
- LVCS_Status = LVCS_Memory(LVM_NULL, /* Get the memory requirements */
- &CS_MemTab,
- &CS_Capabilities);
- CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress = &pInstance->CS_Instance;
- CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].Size);
- CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
- CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].Size);
- CS_MemTab.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
- 0);
-
- /*
* Initialise the Concert Sound instance and save the instance handle
*/
- hCSInstance = LVM_NULL; /* Set to NULL to return handle */
- LVCS_Status = LVCS_Init(&hCSInstance, /* Initiailse */
- &CS_MemTab,
- &CS_Capabilities);
- if (LVCS_Status != LVCS_SUCCESS) return((LVM_ReturnStatus_en)LVCS_Status);
- pInstance->hCSInstance = hCSInstance; /* Save the instance handle */
-
+ hCSInstance = LVM_NULL; /* Set to NULL to return handle */
+ LVCS_Status = LVCS_Init(&hCSInstance, /* Create and initiailse */
+ &CS_Capabilities, pInstance->pScratch);
+ if (LVCS_Status != LVCS_SUCCESS) return ((LVM_ReturnStatus_en)LVCS_Status);
+ pInstance->hCSInstance = hCSInstance; /* Save the instance handle */
}
/*
* Initialise the Bass Enhancement module
*/
{
- LVDBE_Handle_t hDBEInstance; /* Instance handle */
- LVDBE_MemTab_t DBE_MemTab; /* Memory table */
- LVDBE_Capabilities_t DBE_Capabilities; /* Initial capabilities */
- LVDBE_ReturnStatus_en LVDBE_Status; /* Function call status */
+ LVDBE_Handle_t hDBEInstance; /* Instance handle */
+ LVDBE_Capabilities_t DBE_Capabilities; /* Initial capabilities */
+ LVDBE_ReturnStatus_en LVDBE_Status; /* Function call status */
/*
* Set the initialisation parameters
*/
pInstance->Params.BE_OperatingMode = LVM_BE_OFF;
- pInstance->Params.BE_CentreFreq = LVM_BE_CENTRE_55Hz;
- pInstance->Params.BE_EffectLevel = 0;
- pInstance->Params.BE_HPF = LVM_BE_HPF_OFF;
+ pInstance->Params.BE_CentreFreq = LVM_BE_CENTRE_55Hz;
+ pInstance->Params.BE_EffectLevel = 0;
+ pInstance->Params.BE_HPF = LVM_BE_HPF_OFF;
- pInstance->DBE_Active = LVM_FALSE;
+ pInstance->DBE_Active = LVM_FALSE;
/*
* Set the initialisation capabilities
*/
- DBE_Capabilities.SampleRate = LVDBE_CAP_FS_8000 | LVDBE_CAP_FS_11025 |
- LVDBE_CAP_FS_12000 | LVDBE_CAP_FS_16000 |
- LVDBE_CAP_FS_22050 | LVDBE_CAP_FS_24000 |
- LVDBE_CAP_FS_32000 | LVDBE_CAP_FS_44100 |
- LVDBE_CAP_FS_48000 | LVDBE_CAP_FS_88200 |
- LVDBE_CAP_FS_96000 | LVDBE_CAP_FS_176400 |
- LVDBE_CAP_FS_192000;
- DBE_Capabilities.CentreFrequency = LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_66Hz | LVDBE_CAP_CENTRE_78Hz | LVDBE_CAP_CENTRE_90Hz;
- DBE_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
+ DBE_Capabilities.SampleRate = LVDBE_CAP_FS_8000 | LVDBE_CAP_FS_11025 | LVDBE_CAP_FS_12000 |
+ LVDBE_CAP_FS_16000 | LVDBE_CAP_FS_22050 | LVDBE_CAP_FS_24000 |
+ LVDBE_CAP_FS_32000 | LVDBE_CAP_FS_44100 | LVDBE_CAP_FS_48000 |
+ LVDBE_CAP_FS_88200 | LVDBE_CAP_FS_96000 |
+ LVDBE_CAP_FS_176400 | LVDBE_CAP_FS_192000;
- /*
- * Get the memory requirements and then set the address pointers
- */
- LVDBE_Status = LVDBE_Memory(LVM_NULL, /* Get the memory requirements */
- &DBE_MemTab,
- &DBE_Capabilities);
- DBE_MemTab.Region[LVDBE_MEMREGION_INSTANCE].pBaseAddress = &pInstance->DBE_Instance;
- DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_DATA].Size);
- DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
- DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_COEF].Size);
- DBE_MemTab.Region[LVDBE_MEMREGION_SCRATCH].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
- 0);
+ DBE_Capabilities.CentreFrequency = LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_55Hz |
+ LVDBE_CAP_CENTRE_66Hz | LVDBE_CAP_CENTRE_78Hz |
+ LVDBE_CAP_CENTRE_90Hz;
+ DBE_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
/*
* Initialise the Dynamic Bass Enhancement instance and save the instance handle
*/
- hDBEInstance = LVM_NULL; /* Set to NULL to return handle */
- LVDBE_Status = LVDBE_Init(&hDBEInstance, /* Initiailse */
- &DBE_MemTab,
- &DBE_Capabilities);
- if (LVDBE_Status != LVDBE_SUCCESS) return((LVM_ReturnStatus_en)LVDBE_Status);
- pInstance->hDBEInstance = hDBEInstance; /* Save the instance handle */
+ hDBEInstance = LVM_NULL; /* Set to NULL to return handle */
+ LVDBE_Status = LVDBE_Init(&hDBEInstance, /* Create and initiailse */
+ &DBE_Capabilities, pInstance->pScratch);
+ if (LVDBE_Status != LVDBE_SUCCESS) return ((LVM_ReturnStatus_en)LVDBE_Status);
+ pInstance->hDBEInstance = hDBEInstance; /* Save the instance handle */
}
/*
* Initialise the N-Band Equaliser module
*/
{
- LVEQNB_Handle_t hEQNBInstance; /* Instance handle */
- LVEQNB_MemTab_t EQNB_MemTab; /* Memory table */
- LVEQNB_Capabilities_t EQNB_Capabilities; /* Initial capabilities */
- LVEQNB_ReturnStatus_en LVEQNB_Status; /* Function call status */
+ LVEQNB_Handle_t hEQNBInstance; /* Instance handle */
+ LVEQNB_Capabilities_t EQNB_Capabilities; /* Initial capabilities */
+ LVEQNB_ReturnStatus_en LVEQNB_Status; /* Function call status */
/*
* Set the initialisation parameters
*/
- pInstance->Params.EQNB_OperatingMode = LVM_EQNB_OFF;
- pInstance->Params.EQNB_NBands = 0;
+ pInstance->Params.EQNB_OperatingMode = LVM_EQNB_OFF;
+ pInstance->Params.EQNB_NBands = 0;
pInstance->Params.pEQNB_BandDefinition = LVM_NULL;
- pInstance->EQNB_Active = LVM_FALSE;
+ pInstance->EQNB_Active = LVM_FALSE;
/*
* Set the initialisation capabilities
*/
- EQNB_Capabilities.SampleRate = LVEQNB_CAP_FS_8000 | LVEQNB_CAP_FS_11025 |
- LVEQNB_CAP_FS_12000 | LVEQNB_CAP_FS_16000 |
- LVEQNB_CAP_FS_22050 | LVEQNB_CAP_FS_24000 |
- LVEQNB_CAP_FS_32000 | LVEQNB_CAP_FS_44100 |
- LVEQNB_CAP_FS_48000 | LVEQNB_CAP_FS_88200 |
- LVEQNB_CAP_FS_96000 | LVEQNB_CAP_FS_176400 |
- LVEQNB_CAP_FS_192000;
- EQNB_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
- EQNB_Capabilities.MaxBands = pInstParams->EQNB_NumBands;
- EQNB_Capabilities.SourceFormat = LVEQNB_CAP_STEREO | LVEQNB_CAP_MONOINSTEREO;
- EQNB_Capabilities.CallBack = pInstance->CallBack;
- EQNB_Capabilities.pBundleInstance = (void*)pInstance;
+ EQNB_Capabilities.SampleRate =
+ LVEQNB_CAP_FS_8000 | LVEQNB_CAP_FS_11025 | LVEQNB_CAP_FS_12000 |
+ LVEQNB_CAP_FS_16000 | LVEQNB_CAP_FS_22050 | LVEQNB_CAP_FS_24000 |
+ LVEQNB_CAP_FS_32000 | LVEQNB_CAP_FS_44100 | LVEQNB_CAP_FS_48000 |
+ LVEQNB_CAP_FS_88200 | LVEQNB_CAP_FS_96000 | LVEQNB_CAP_FS_176400 |
+ LVEQNB_CAP_FS_192000;
- /*
- * Get the memory requirements and then set the address pointers, forcing alignment
- */
- LVEQNB_Status = LVEQNB_Memory(LVM_NULL, /* Get the memory requirements */
- &EQNB_MemTab,
- &EQNB_Capabilities);
- EQNB_MemTab.Region[LVEQNB_MEMREGION_INSTANCE].pBaseAddress = &pInstance->EQNB_Instance;
- EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Size);
- EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
- EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Size);
- EQNB_MemTab.Region[LVEQNB_MEMREGION_SCRATCH].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
- 0);
+ EQNB_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
+ EQNB_Capabilities.MaxBands = pInstParams->EQNB_NumBands;
+ EQNB_Capabilities.SourceFormat = LVEQNB_CAP_STEREO | LVEQNB_CAP_MONOINSTEREO;
+ EQNB_Capabilities.CallBack = pInstance->CallBack;
+ EQNB_Capabilities.pBundleInstance = (void*)pInstance;
/*
* Initialise the Dynamic Bass Enhancement instance and save the instance handle
*/
- hEQNBInstance = LVM_NULL; /* Set to NULL to return handle */
- LVEQNB_Status = LVEQNB_Init(&hEQNBInstance, /* Initiailse */
- &EQNB_MemTab,
- &EQNB_Capabilities);
- if (LVEQNB_Status != LVEQNB_SUCCESS) return((LVM_ReturnStatus_en)LVEQNB_Status);
- pInstance->hEQNBInstance = hEQNBInstance; /* Save the instance handle */
+ hEQNBInstance = LVM_NULL; /* Set to NULL to return handle */
+ LVEQNB_Status = LVEQNB_Init(&hEQNBInstance, /* Create and initiailse */
+ &EQNB_Capabilities, pInstance->pScratch);
+ if (LVEQNB_Status != LVEQNB_SUCCESS) return ((LVM_ReturnStatus_en)LVEQNB_Status);
+ pInstance->hEQNBInstance = hEQNBInstance; /* Save the instance handle */
}
/*
* Headroom management memory allocation
*/
{
- pInstance->pHeadroom_BandDefs = (LVM_HeadroomBandDef_t *)
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
- pInstance->pHeadroom_UserDefs = (LVM_HeadroomBandDef_t *)
- InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
+ pInstance->pHeadroom_BandDefs = (LVM_HeadroomBandDef_t*)calloc(
+ LVM_HEADROOM_MAX_NBANDS, sizeof(*(pInstance->pHeadroom_BandDefs)));
+ if (pInstance->pHeadroom_BandDefs == LVM_NULL) {
+ return LVM_NULLADDRESS;
+ }
+ pInstance->pHeadroom_UserDefs = (LVM_HeadroomBandDef_t*)calloc(
+ LVM_HEADROOM_MAX_NBANDS, sizeof(*(pInstance->pHeadroom_UserDefs)));
+ if (pInstance->pHeadroom_UserDefs == LVM_NULL) {
+ return LVM_NULLADDRESS;
+ }
/* Headroom management parameters initialisation */
pInstance->NewHeadroomParams.NHeadroomBands = 2;
pInstance->NewHeadroomParams.pHeadroomDefinition = pInstance->pHeadroom_BandDefs;
- pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_Low = 20;
- pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_High = 4999;
- pInstance->NewHeadroomParams.pHeadroomDefinition[0].Headroom_Offset = 3;
- pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_Low = 5000;
- pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_High = 24000;
- pInstance->NewHeadroomParams.pHeadroomDefinition[1].Headroom_Offset = 4;
+ pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_Low = 20;
+ pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_High = 4999;
+ pInstance->NewHeadroomParams.pHeadroomDefinition[0].Headroom_Offset = 3;
+ pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_Low = 5000;
+ pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_High = 24000;
+ pInstance->NewHeadroomParams.pHeadroomDefinition[1].Headroom_Offset = 4;
pInstance->NewHeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
- pInstance->Headroom =0;
+ pInstance->Headroom = 0;
}
/*
* Initialise the PSA module
*/
{
- pLVPSA_Handle_t hPSAInstance = LVM_NULL; /* Instance handle */
- LVPSA_MemTab_t PSA_MemTab;
- LVPSA_RETURN PSA_Status; /* Function call status */
+ pLVPSA_Handle_t hPSAInstance = LVM_NULL; /* Instance handle */
+ LVPSA_RETURN PSA_Status; /* Function call status */
LVPSA_FilterParam_t FiltersParams[9];
- if(pInstParams->PSA_Included==LVM_PSA_ON)
- {
- pInstance->PSA_InitParams.SpectralDataBufferDuration = (LVM_UINT16) 500;
- pInstance->PSA_InitParams.MaxInputBlockSize = (LVM_UINT16) 2048;
- pInstance->PSA_InitParams.nBands = (LVM_UINT16) 9;
- pInstance->PSA_InitParams.pFiltersParams = &FiltersParams[0];
- for(i = 0; i < pInstance->PSA_InitParams.nBands; i++)
- {
- FiltersParams[i].CenterFrequency = (LVM_UINT16) 1000;
- FiltersParams[i].QFactor = (LVM_UINT16) 100;
- FiltersParams[i].PostGain = (LVM_INT16) 0;
+ if (pInstParams->PSA_Included == LVM_PSA_ON) {
+ pInstance->PSA_InitParams.SpectralDataBufferDuration = (LVM_UINT16)500;
+ pInstance->PSA_InitParams.MaxInputBlockSize = (LVM_UINT16)2048;
+ pInstance->PSA_InitParams.nBands = (LVM_UINT16)9;
+ pInstance->PSA_InitParams.pFiltersParams = &FiltersParams[0];
+ for (i = 0; i < pInstance->PSA_InitParams.nBands; i++) {
+ FiltersParams[i].CenterFrequency = (LVM_UINT16)1000;
+ FiltersParams[i].QFactor = (LVM_UINT16)100;
+ FiltersParams[i].PostGain = (LVM_INT16)0;
}
- /*Get the memory requirements and then set the address pointers*/
- PSA_Status = LVPSA_Memory (hPSAInstance,
- &PSA_MemTab,
- &pInstance->PSA_InitParams);
-
- if (PSA_Status != LVPSA_OK)
- {
- return((LVM_ReturnStatus_en) LVM_ALGORITHMPSA);
- }
-
- /* Slow Data */
- PSA_MemTab.Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
- PSA_MemTab.Region[LVM_PERSISTENT_SLOW_DATA].Size);
-
- /* Fast Data */
- PSA_MemTab.Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
- PSA_MemTab.Region[LVM_PERSISTENT_FAST_DATA].Size);
-
- /* Fast Coef */
- PSA_MemTab.Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
- PSA_MemTab.Region[LVM_PERSISTENT_FAST_COEF].Size);
-
- /* Fast Temporary */
- pInstance->pPSAInput = (LVM_FLOAT *)InstAlloc_AddMember(&AllocMem[LVM_TEMPORARY_FAST],
- (LVM_UINT32) MAX_INTERNAL_BLOCKSIZE * \
- sizeof(LVM_FLOAT));
- PSA_MemTab.Region[LVM_TEMPORARY_FAST].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],0);
-
/*Initialise PSA instance and save the instance handle*/
pInstance->PSA_ControlParams.Fs = LVM_FS_48000;
- pInstance->PSA_ControlParams.LevelDetectionSpeed = LVPSA_SPEED_MEDIUM;
- PSA_Status = LVPSA_Init (&hPSAInstance,
- &pInstance->PSA_InitParams,
- &pInstance->PSA_ControlParams,
- &PSA_MemTab);
+ pInstance->PSA_ControlParams.LevelDetectionSpeed = LVPSA_SPEED_MEDIUM;
+ pInstance->pPSAInput = (LVM_FLOAT*)calloc(MAX_INTERNAL_BLOCKSIZE, sizeof(LVM_FLOAT));
+ if (pInstance->pPSAInput == LVM_NULL) {
+ return LVM_NULLADDRESS;
+ }
+ PSA_Status = LVPSA_Init(&hPSAInstance, &pInstance->PSA_InitParams,
+ &pInstance->PSA_ControlParams, pInstance->pScratch);
- if (PSA_Status != LVPSA_OK)
- {
- return((LVM_ReturnStatus_en) LVM_ALGORITHMPSA);
+ if (PSA_Status != LVPSA_OK) {
+ return ((LVM_ReturnStatus_en)LVM_ALGORITHMPSA);
}
- pInstance->hPSAInstance = hPSAInstance; /* Save the instance handle */
+ pInstance->hPSAInstance = hPSAInstance; /* Save the instance handle */
pInstance->PSA_GainOffset = 0;
- }
- else
- {
+ } else {
pInstance->hPSAInstance = LVM_NULL;
}
/*
* Set the initialisation parameters.
*/
- pInstance->Params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
- pInstance->Params.PSA_Enable = LVM_PSA_OFF;
+ pInstance->Params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
+ pInstance->Params.PSA_Enable = LVM_PSA_OFF;
}
/*
@@ -992,20 +446,121 @@
pInstance->ConfigurationNumber += LVM_VC_MASK;
pInstance->ConfigurationNumber += LVM_PSA_MASK;
- if(((pInstance->ConfigurationNumber & LVM_CS_MASK)!=0) ||
- ((pInstance->ConfigurationNumber & LVM_DBE_MASK)!=0) ||
- ((pInstance->ConfigurationNumber & LVM_EQNB_MASK)!=0)||
- ((pInstance->ConfigurationNumber & LVM_TE_MASK)!=0) ||
- ((pInstance->ConfigurationNumber & LVM_VC_MASK)!=0))
- {
- pInstance->BlickSizeMultiple = 4;
- }
- else
- {
- pInstance->BlickSizeMultiple = 1;
+ if (((pInstance->ConfigurationNumber & LVM_CS_MASK) != 0) ||
+ ((pInstance->ConfigurationNumber & LVM_DBE_MASK) != 0) ||
+ ((pInstance->ConfigurationNumber & LVM_EQNB_MASK) != 0) ||
+ ((pInstance->ConfigurationNumber & LVM_TE_MASK) != 0) ||
+ ((pInstance->ConfigurationNumber & LVM_VC_MASK) != 0)) {
+ pInstance->BlickSizeMultiple = 4;
+ } else {
+ pInstance->BlickSizeMultiple = 1;
}
- return(Status);
+ return (Status);
+}
+/****************************************************************************************/
+/* */
+/* FUNCTION: LVM_DelInstanceHandle */
+/* */
+/* DESCRIPTION: */
+/* This function is used to create a bundle instance. It returns the created instance */
+/* handle through phInstance. All parameters are set to their default, inactive state. */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to the instance handle */
+/* */
+/* NOTES: */
+/* 1. This function must not be interrupted by the LVM_Process function */
+/* */
+/****************************************************************************************/
+void LVM_DelInstanceHandle(LVM_Handle_t* phInstance) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)*phInstance;
+
+ if (pInstance->pScratch != LVM_NULL) {
+ free(pInstance->pScratch);
+ pInstance->pScratch = LVM_NULL;
+ }
+
+ if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS) {
+ /*
+ * Managed buffers required
+ */
+ if (pInstance->pBufferManagement != LVM_NULL) {
+ free(pInstance->pBufferManagement);
+ pInstance->pBufferManagement = LVM_NULL;
+ }
+ }
+
+ /*
+ * Treble Enhancement
+ */
+ if (pInstance->pTE_Taps != LVM_NULL) {
+ free(pInstance->pTE_Taps);
+ pInstance->pTE_Taps = LVM_NULL;
+ }
+ if (pInstance->pTE_State != LVM_NULL) {
+ free(pInstance->pTE_State);
+ pInstance->pTE_State = LVM_NULL;
+ }
+
+ /*
+ * Free the default EQNB pre-gain and pointer to the band definitions
+ */
+ if (pInstance->pEQNB_BandDefs != LVM_NULL) {
+ free(pInstance->pEQNB_BandDefs);
+ pInstance->pEQNB_BandDefs = LVM_NULL;
+ }
+ if (pInstance->pEQNB_UserDefs != LVM_NULL) {
+ free(pInstance->pEQNB_UserDefs);
+ pInstance->pEQNB_UserDefs = LVM_NULL;
+ }
+
+ /*
+ * De-initialise the Concert Sound module
+ */
+ if (pInstance->hCSInstance != LVM_NULL) {
+ LVCS_DeInit(&pInstance->hCSInstance);
+ }
+
+ /*
+ * De-initialise the Bass Enhancement module
+ */
+ if (pInstance->hDBEInstance != LVM_NULL) {
+ LVDBE_DeInit(&pInstance->hDBEInstance);
+ }
+
+ /*
+ * De-initialise the N-Band Equaliser module
+ */
+ if (pInstance->hEQNBInstance != LVM_NULL) {
+ LVEQNB_DeInit(&pInstance->hEQNBInstance);
+ }
+
+ /*
+ * Free Headroom management memory.
+ */
+ if (pInstance->pHeadroom_BandDefs != LVM_NULL) {
+ free(pInstance->pHeadroom_BandDefs);
+ pInstance->pHeadroom_BandDefs = LVM_NULL;
+ }
+ if (pInstance->pHeadroom_UserDefs != LVM_NULL) {
+ free(pInstance->pHeadroom_UserDefs);
+ pInstance->pHeadroom_UserDefs = LVM_NULL;
+ }
+
+ /*
+ * De-initialise the PSA module
+ */
+ if (pInstance->hPSAInstance != LVM_NULL) {
+ LVPSA_DeInit(&pInstance->hPSAInstance);
+ }
+ if (pInstance->pPSAInput != LVM_NULL) {
+ free(pInstance->pPSAInput);
+ pInstance->pPSAInput = LVM_NULL;
+ }
+
+ free(*phInstance);
+ return;
}
/****************************************************************************************/
@@ -1027,48 +582,36 @@
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t hInstance)
-{
- LVM_MemTab_t MemTab; /* Memory table */
- LVM_InstParams_t InstParams; /* Instance parameters */
- LVM_ControlParams_t Params; /* Control Parameters */
- LVM_Instance_t *pInstance = (LVM_Instance_t *)hInstance; /* Pointer to Instance */
- LVM_HeadroomParams_t HeadroomParams;
+LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t hInstance) {
+ LVM_InstParams_t InstParams; /* Instance parameters */
+ LVM_ControlParams_t Params; /* Control Parameters */
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance; /* Pointer to Instance */
+ LVM_HeadroomParams_t HeadroomParams;
- if(hInstance == LVM_NULL){
+ if (hInstance == LVM_NULL) {
return LVM_NULLADDRESS;
}
- /* Save the control parameters */ /* coverity[unchecked_value] */ /* Do not check return value internal function calls */
+ /* Save the control parameters */ /* coverity[unchecked_value] */ /* Do not check return value
+ internal function calls */
LVM_GetControlParameters(hInstance, &Params);
/*Save the headroom parameters*/
LVM_GetHeadroomParams(hInstance, &HeadroomParams);
- /* Retrieve allocated buffers in memtab */
- LVM_GetMemoryTable(hInstance, &MemTab, LVM_NULL);
-
/* Save the instance parameters */
InstParams = pInstance->InstParams;
/* Call LVM_GetInstanceHandle to re-initialise the bundle */
- LVM_GetInstanceHandle( &hInstance,
- &MemTab,
- &InstParams);
-
- /* Restore control parameters */ /* coverity[unchecked_value] */ /* Do not check return value internal function calls */
+ /* Restore control parameters */ /* coverity[unchecked_value] */ /* Do not check return value
+ internal function calls */
LVM_SetControlParameters(hInstance, &Params);
/*Restore the headroom parameters*/
LVM_SetHeadroomParams(hInstance, &HeadroomParams);
/* DC removal filter */
-#ifdef SUPPORT_MC
DC_Mc_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#else
- DC_2I_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#endif
return LVM_SUCCESS;
}
-
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
index ddaac99..90a1f19 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
@@ -33,14 +33,14 @@
/* */
/************************************************************************************/
-#include "LVM.h" /* LifeVibes */
-#include "LVM_Common.h" /* LifeVibes common */
-#include "BIQUAD.h" /* Biquad library */
-#include "LVC_Mixer.h" /* Mixer library */
-#include "LVCS_Private.h" /* Concert Sound */
-#include "LVDBE_Private.h" /* Dynamic Bass Enhancement */
-#include "LVEQNB_Private.h" /* N-Band equaliser */
-#include "LVPSA_Private.h" /* Parametric Spectrum Analyzer */
+#include "LVM.h" /* LifeVibes */
+#include "LVM_Common.h" /* LifeVibes common */
+#include "BIQUAD.h" /* Biquad library */
+#include "LVC_Mixer.h" /* Mixer library */
+#include "LVCS_Private.h" /* Concert Sound */
+#include "LVDBE_Private.h" /* Dynamic Bass Enhancement */
+#include "LVEQNB_Private.h" /* N-Band equaliser */
+#include "LVPSA_Private.h" /* Parametric Spectrum Analyzer */
/************************************************************************************/
/* */
@@ -49,63 +49,64 @@
/************************************************************************************/
/* General */
-#define LVM_INVALID 0xFFFF /* Invalid init parameter */
+#define LVM_INVALID 0xFFFF /* Invalid init parameter */
/* Memory */
-#define LVM_INSTANCE_ALIGN 4 /* 32-bit for structures */
-#define LVM_FIRSTCALL 0 /* First call to the buffer */
-#define LVM_MAXBLOCKCALL 1 /* Maximum block size calls to the buffer */
-#define LVM_LASTCALL 2 /* Last call to the buffer */
-#define LVM_FIRSTLASTCALL 3 /* Single call for small number of samples */
+#define LVM_INSTANCE_ALIGN 4 /* 32-bit for structures */
+#define LVM_FIRSTCALL 0 /* First call to the buffer */
+#define LVM_MAXBLOCKCALL 1 /* Maximum block size calls to the buffer */
+#define LVM_LASTCALL 2 /* Last call to the buffer */
+#define LVM_FIRSTLASTCALL 3 /* Single call for small number of samples */
/* Block Size */
-#define LVM_MIN_MAXBLOCKSIZE 16 /* Minimum MaxBlockSize Limit*/
-#define LVM_MANAGED_MAX_MAXBLOCKSIZE 8191 /* Maximum MaxBlockSzie Limit for Managed Buffer Mode*/
-#define LVM_UNMANAGED_MAX_MAXBLOCKSIZE 4096 /* Maximum MaxBlockSzie Limit for Unmanaged Buffer Mode */
+#define LVM_MIN_MAXBLOCKSIZE 16 /* Minimum MaxBlockSize Limit*/
+#define LVM_MANAGED_MAX_MAXBLOCKSIZE 8191 /* Maximum MaxBlockSzie Limit for Managed Buffer Mode*/
+#define LVM_UNMANAGED_MAX_MAXBLOCKSIZE \
+ 4096 /* Maximum MaxBlockSzie Limit for Unmanaged Buffer Mode */
-#define MAX_INTERNAL_BLOCKSIZE 8128 /* Maximum multiple of 64 below 8191*/
+#define MAX_INTERNAL_BLOCKSIZE 8128 /* Maximum multiple of 64 below 8191*/
-#define MIN_INTERNAL_BLOCKSIZE 16 /* Minimum internal block size */
-#define MIN_INTERNAL_BLOCKSHIFT 4 /* Minimum internal block size as a power of 2 */
-#define MIN_INTERNAL_BLOCKMASK 0xFFF0 /* Minimum internal block size mask */
+#define MIN_INTERNAL_BLOCKSIZE 16 /* Minimum internal block size */
+#define MIN_INTERNAL_BLOCKSHIFT 4 /* Minimum internal block size as a power of 2 */
+#define MIN_INTERNAL_BLOCKMASK 0xFFF0 /* Minimum internal block size mask */
-#define LVM_PSA_DYNAMICRANGE 60 /* Spectral Dynamic range: used for offseting output*/
-#define LVM_PSA_BARHEIGHT 127 /* Spectral Bar Height*/
+#define LVM_PSA_DYNAMICRANGE 60 /* Spectral Dynamic range: used for offseting output*/
+#define LVM_PSA_BARHEIGHT 127 /* Spectral Bar Height*/
-#define LVM_TE_MIN_EFFECTLEVEL 0 /*TE Minimum EffectLevel*/
-#define LVM_TE_MAX_EFFECTLEVEL 15 /*TE Maximum Effect level*/
+#define LVM_TE_MIN_EFFECTLEVEL 0 /*TE Minimum EffectLevel*/
+#define LVM_TE_MAX_EFFECTLEVEL 15 /*TE Maximum Effect level*/
-#define LVM_VC_MIN_EFFECTLEVEL (-96) /*VC Minimum EffectLevel*/
-#define LVM_VC_MAX_EFFECTLEVEL 0 /*VC Maximum Effect level*/
+#define LVM_VC_MIN_EFFECTLEVEL (-96) /*VC Minimum EffectLevel*/
+#define LVM_VC_MAX_EFFECTLEVEL 0 /*VC Maximum Effect level*/
-#define LVM_BE_MIN_EFFECTLEVEL 0 /*BE Minimum EffectLevel*/
-#define LVM_BE_MAX_EFFECTLEVEL 15 /*BE Maximum Effect level*/
+#define LVM_BE_MIN_EFFECTLEVEL 0 /*BE Minimum EffectLevel*/
+#define LVM_BE_MAX_EFFECTLEVEL 15 /*BE Maximum Effect level*/
-#define LVM_EQNB_MIN_BAND_FREQ 20 /*EQNB Minimum Band Frequency*/
-#define LVM_EQNB_MAX_BAND_FREQ 24000 /*EQNB Maximum Band Frequency*/
-#define LVM_EQNB_MIN_BAND_GAIN (-15) /*EQNB Minimum Band Frequency*/
-#define LVM_EQNB_MAX_BAND_GAIN 15 /*EQNB Maximum Band Frequency*/
-#define LVM_EQNB_MIN_QFACTOR 25 /*EQNB Minimum Q Factor*/
-#define LVM_EQNB_MAX_QFACTOR 1200 /*EQNB Maximum Q Factor*/
-#define LVM_EQNB_MIN_LPF_FREQ 1000 /*EQNB Minimum Low Pass Corner frequency*/
-#define LVM_EQNB_MIN_HPF_FREQ 20 /*EQNB Minimum High Pass Corner frequency*/
-#define LVM_EQNB_MAX_HPF_FREQ 1000 /*EQNB Maximum High Pass Corner frequency*/
+#define LVM_EQNB_MIN_BAND_FREQ 20 /*EQNB Minimum Band Frequency*/
+#define LVM_EQNB_MAX_BAND_FREQ 24000 /*EQNB Maximum Band Frequency*/
+#define LVM_EQNB_MIN_BAND_GAIN (-15) /*EQNB Minimum Band Frequency*/
+#define LVM_EQNB_MAX_BAND_GAIN 15 /*EQNB Maximum Band Frequency*/
+#define LVM_EQNB_MIN_QFACTOR 25 /*EQNB Minimum Q Factor*/
+#define LVM_EQNB_MAX_QFACTOR 1200 /*EQNB Maximum Q Factor*/
+#define LVM_EQNB_MIN_LPF_FREQ 1000 /*EQNB Minimum Low Pass Corner frequency*/
+#define LVM_EQNB_MIN_HPF_FREQ 20 /*EQNB Minimum High Pass Corner frequency*/
+#define LVM_EQNB_MAX_HPF_FREQ 1000 /*EQNB Maximum High Pass Corner frequency*/
-#define LVM_CS_MIN_EFFECT_LEVEL 0 /*CS Minimum Effect Level*/
-#define LVM_CS_MAX_REVERB_LEVEL 100 /*CS Maximum Reverb Level*/
-#define LVM_VIRTUALIZER_MAX_REVERB_LEVEL 100 /*Vitrualizer Maximum Reverb Level*/
+#define LVM_CS_MIN_EFFECT_LEVEL 0 /*CS Minimum Effect Level*/
+#define LVM_CS_MAX_REVERB_LEVEL 100 /*CS Maximum Reverb Level*/
+#define LVM_VIRTUALIZER_MAX_REVERB_LEVEL 100 /*Vitrualizer Maximum Reverb Level*/
-#define LVM_VC_MIXER_TIME 100 /*VC mixer time*/
-#define LVM_VC_BALANCE_MAX 96 /*VC balance max value*/
-#define LVM_VC_BALANCE_MIN (-96) /*VC balance min value*/
+#define LVM_VC_MIXER_TIME 100 /*VC mixer time*/
+#define LVM_VC_BALANCE_MAX 96 /*VC balance max value*/
+#define LVM_VC_BALANCE_MIN (-96) /*VC balance min value*/
/* Algorithm masks */
-#define LVM_CS_MASK 1
-#define LVM_EQNB_MASK 2
-#define LVM_DBE_MASK 4
-#define LVM_VC_MASK 16
-#define LVM_TE_MASK 32
-#define LVM_PSA_MASK 2048
+#define LVM_CS_MASK 1
+#define LVM_EQNB_MASK 2
+#define LVM_DBE_MASK 4
+#define LVM_VC_MASK 16
+#define LVM_TE_MASK 32
+#define LVM_PSA_MASK 2048
/************************************************************************************/
/* */
@@ -113,133 +114,102 @@
/* */
/************************************************************************************/
-/* Memory region definition */
-typedef struct
-{
- LVM_UINT32 Size; /* Region size in bytes */
- LVM_UINT16 Alignment; /* Byte alignment */
- LVM_MemoryTypes_en Type; /* Region type */
- void *pBaseAddress; /* Pointer to the region base address */
-} LVM_IntMemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
- LVM_IntMemoryRegion_t Region[LVM_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVM_IntMemTab_t;
-
/* Buffer Management */
-typedef struct
-{
- LVM_FLOAT *pScratch; /* Bundle scratch buffer */
+typedef struct {
+ LVM_FLOAT* pScratch; /* Bundle scratch buffer */
- LVM_INT16 BufferState; /* Buffer status */
-#ifdef SUPPORT_MC
- LVM_FLOAT InDelayBuffer[3 * LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
-#else
- LVM_FLOAT InDelayBuffer[6 * MIN_INTERNAL_BLOCKSIZE]; /* Input buffer delay line, \
- left and right */
-#endif
- LVM_INT16 InDelaySamples; /* Number of samples in the input delay buffer */
-#ifdef SUPPORT_MC
- LVM_FLOAT OutDelayBuffer[LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
-#else
- LVM_FLOAT OutDelayBuffer[2 * MIN_INTERNAL_BLOCKSIZE]; /* Output buffer delay \
- line */
-#endif
- LVM_INT16 OutDelaySamples; /* Number of samples in the output delay buffer, \
- left and right */
- LVM_INT16 SamplesToOutput; /* Samples to write to the output */
+ LVM_INT16 BufferState; /* Buffer status */
+ LVM_FLOAT InDelayBuffer[3 * LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
+ LVM_INT16 InDelaySamples; /* Number of samples in the input delay buffer */
+ LVM_FLOAT OutDelayBuffer[LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
+ LVM_INT16 OutDelaySamples; /* Number of samples in the output delay buffer, \
+ left and right */
+ LVM_INT16 SamplesToOutput; /* Samples to write to the output */
} LVM_Buffer_t;
/* Filter taps */
-typedef struct
-{
- Biquad_2I_Order1_FLOAT_Taps_t TrebleBoost_Taps; /* Treble boost Taps */
+typedef struct {
+ Biquad_2I_Order1_FLOAT_Taps_t TrebleBoost_Taps; /* Treble boost Taps */
} LVM_TE_Data_t;
/* Coefficients */
-typedef struct
-{
- Biquad_FLOAT_Instance_t TrebleBoost_State; /* State for the treble boost filter */
+typedef struct {
+ Biquad_FLOAT_Instance_t TrebleBoost_State; /* State for the treble boost filter */
} LVM_TE_Coefs_t;
-typedef struct
-{
+typedef struct {
/* Public parameters */
- LVM_MemTab_t MemoryTable; /* Instance memory allocation table */
- LVM_ControlParams_t Params; /* Control parameters */
- LVM_InstParams_t InstParams; /* Instance parameters */
+ LVM_ControlParams_t Params; /* Control parameters */
+ LVM_InstParams_t InstParams; /* Instance parameters */
/* Private parameters */
- LVM_UINT16 ControlPending; /* Control flag to indicate update pending */
- LVM_ControlParams_t NewParams; /* New control parameters pending update */
+ LVM_UINT16 ControlPending; /* Control flag to indicate update pending */
+ LVM_ControlParams_t NewParams; /* New control parameters pending update */
/* Buffer control */
- LVM_INT16 InternalBlockSize; /* Maximum internal block size */
- LVM_Buffer_t *pBufferManagement; /* Buffer management variables */
- LVM_INT16 SamplesToProcess; /* Input samples left to process */
- LVM_FLOAT *pInputSamples; /* External input sample pointer */
- LVM_FLOAT *pOutputSamples; /* External output sample pointer */
+ LVM_INT16 InternalBlockSize; /* Maximum internal block size */
+ LVM_Buffer_t* pBufferManagement; /* Buffer management variables */
+ LVM_INT16 SamplesToProcess; /* Input samples left to process */
+ LVM_FLOAT* pInputSamples; /* External input sample pointer */
+ LVM_FLOAT* pOutputSamples; /* External output sample pointer */
/* Configuration number */
- LVM_INT32 ConfigurationNumber;
- LVM_INT32 BlickSizeMultiple;
+ LVM_INT32 ConfigurationNumber;
+ LVM_INT32 BlickSizeMultiple;
/* DC removal */
- Biquad_FLOAT_Instance_t DC_RemovalInstance; /* DC removal filter instance */
+ Biquad_FLOAT_Instance_t DC_RemovalInstance; /* DC removal filter instance */
/* Concert Sound */
- LVCS_Handle_t hCSInstance; /* Concert Sound instance handle */
- LVCS_Instance_t CS_Instance; /* Concert Sound instance */
- LVM_INT16 CS_Active; /* Control flag */
+ LVCS_Handle_t hCSInstance; /* Concert Sound instance handle */
+ LVCS_Instance_t CS_Instance; /* Concert Sound instance */
+ LVM_INT16 CS_Active; /* Control flag */
/* Equalizer */
- LVEQNB_Handle_t hEQNBInstance; /* N-Band Equaliser instance handle */
- LVEQNB_Instance_t EQNB_Instance; /* N-Band Equaliser instance */
- LVM_EQNB_BandDef_t *pEQNB_BandDefs; /* Local storage for new definitions */
- LVM_EQNB_BandDef_t *pEQNB_UserDefs; /* Local storage for the user's definitions */
- LVM_INT16 EQNB_Active; /* Control flag */
+ LVEQNB_Handle_t hEQNBInstance; /* N-Band Equaliser instance handle */
+ LVEQNB_Instance_t EQNB_Instance; /* N-Band Equaliser instance */
+ LVM_EQNB_BandDef_t* pEQNB_BandDefs; /* Local storage for new definitions */
+ LVM_EQNB_BandDef_t* pEQNB_UserDefs; /* Local storage for the user's definitions */
+ LVM_INT16 EQNB_Active; /* Control flag */
/* Dynamic Bass Enhancement */
- LVDBE_Handle_t hDBEInstance; /* Dynamic Bass Enhancement instance handle */
- LVDBE_Instance_t DBE_Instance; /* Dynamic Bass Enhancement instance */
- LVM_INT16 DBE_Active; /* Control flag */
+ LVDBE_Handle_t hDBEInstance; /* Dynamic Bass Enhancement instance handle */
+ LVDBE_Instance_t DBE_Instance; /* Dynamic Bass Enhancement instance */
+ LVM_INT16 DBE_Active; /* Control flag */
/* Volume Control */
- LVMixer3_1St_FLOAT_st VC_Volume; /* Volume scaler */
- LVMixer3_2St_FLOAT_st VC_BalanceMix; /* VC balance mixer */
- LVM_INT16 VC_VolumedB; /* Gain in dB */
- LVM_INT16 VC_Active; /* Control flag */
- LVM_INT16 VC_AVLFixedVolume; /* AVL fixed volume */
+ LVMixer3_1St_FLOAT_st VC_Volume; /* Volume scaler */
+ LVMixer3_2St_FLOAT_st VC_BalanceMix; /* VC balance mixer */
+ LVM_INT16 VC_VolumedB; /* Gain in dB */
+ LVM_INT16 VC_Active; /* Control flag */
+ LVM_INT16 VC_AVLFixedVolume; /* AVL fixed volume */
/* Treble Enhancement */
- LVM_TE_Data_t *pTE_Taps; /* Treble boost Taps */
- LVM_TE_Coefs_t *pTE_State; /* State for the treble boost filter */
- LVM_INT16 TE_Active; /* Control flag */
+ LVM_TE_Data_t* pTE_Taps; /* Treble boost Taps */
+ LVM_TE_Coefs_t* pTE_State; /* State for the treble boost filter */
+ LVM_INT16 TE_Active; /* Control flag */
/* Headroom */
- LVM_HeadroomParams_t NewHeadroomParams; /* New headroom parameters pending update */
- LVM_HeadroomParams_t HeadroomParams; /* Headroom parameters */
- LVM_HeadroomBandDef_t *pHeadroom_BandDefs; /* Local storage for new definitions */
- LVM_HeadroomBandDef_t *pHeadroom_UserDefs; /* Local storage for the user's definitions */
- LVM_UINT16 Headroom; /* Value of the current headroom */
+ LVM_HeadroomParams_t NewHeadroomParams; /* New headroom parameters pending update */
+ LVM_HeadroomParams_t HeadroomParams; /* Headroom parameters */
+ LVM_HeadroomBandDef_t* pHeadroom_BandDefs; /* Local storage for new definitions */
+ LVM_HeadroomBandDef_t* pHeadroom_UserDefs; /* Local storage for the user's definitions */
+ LVM_UINT16 Headroom; /* Value of the current headroom */
/* Spectrum Analyzer */
- pLVPSA_Handle_t hPSAInstance; /* Spectrum Analyzer instance handle */
- LVPSA_InstancePr_t PSA_Instance; /* Spectrum Analyzer instance */
- LVPSA_InitParams_t PSA_InitParams; /* Spectrum Analyzer initialization parameters */
- LVPSA_ControlParams_t PSA_ControlParams; /* Spectrum Analyzer control parameters */
- LVM_INT16 PSA_GainOffset; /* Tone control flag */
- LVM_Callback CallBack;
- LVM_FLOAT *pPSAInput; /* PSA input pointer */
+ pLVPSA_Handle_t hPSAInstance; /* Spectrum Analyzer instance handle */
+ LVPSA_InstancePr_t PSA_Instance; /* Spectrum Analyzer instance */
+ LVPSA_InitParams_t PSA_InitParams; /* Spectrum Analyzer initialization parameters */
+ LVPSA_ControlParams_t PSA_ControlParams; /* Spectrum Analyzer control parameters */
+ LVM_INT16 PSA_GainOffset; /* Tone control flag */
+ LVM_Callback CallBack;
+ LVM_FLOAT* pPSAInput; /* PSA input pointer */
- LVM_INT16 NoSmoothVolume; /* Enable or disable smooth volume changes*/
+ LVM_INT16 NoSmoothVolume; /* Enable or disable smooth volume changes*/
-#ifdef SUPPORT_MC
- LVM_INT16 NrChannels;
- LVM_INT32 ChMask;
-#endif
+ LVM_INT16 NrChannels;
+ LVM_INT32 ChMask;
+ void* pScratch; /* Pointer to bundle scratch buffer*/
} LVM_Instance_t;
@@ -249,32 +219,19 @@
/* */
/************************************************************************************/
-LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t hInstance);
+LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t hInstance);
-void LVM_SetTrebleBoost( LVM_Instance_t *pInstance,
- LVM_ControlParams_t *pParams);
+void LVM_SetTrebleBoost(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams);
-void LVM_SetVolume( LVM_Instance_t *pInstance,
- LVM_ControlParams_t *pParams);
+void LVM_SetVolume(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams);
-LVM_INT32 LVM_VCCallBack(void* pBundleHandle,
- void* pGeneralPurpose,
- short CallBackParam);
+LVM_INT32 LVM_VCCallBack(void* pBundleHandle, void* pGeneralPurpose, short CallBackParam);
-void LVM_SetHeadroom( LVM_Instance_t *pInstance,
- LVM_ControlParams_t *pParams);
-void LVM_BufferIn( LVM_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT **pToProcess,
- LVM_FLOAT **pProcessed,
- LVM_UINT16 *pNumSamples);
-void LVM_BufferOut( LVM_Handle_t hInstance,
- LVM_FLOAT *pOutData,
- LVM_UINT16 *pNumSamples);
+void LVM_SetHeadroom(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams);
+void LVM_BufferIn(LVM_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT** pToProcess,
+ LVM_FLOAT** pProcessed, LVM_UINT16* pNumSamples);
+void LVM_BufferOut(LVM_Handle_t hInstance, LVM_FLOAT* pOutData, LVM_UINT16* pNumSamples);
-LVM_INT32 LVM_AlgoCallBack( void *pBundleHandle,
- void *pData,
- LVM_INT16 callbackId);
+LVM_INT32 LVM_AlgoCallBack(void* pBundleHandle, void* pData, LVM_INT16 callbackId);
-#endif /* __LVM_PRIVATE_H__ */
-
+#endif /* __LVM_PRIVATE_H__ */
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
index dc86cfd..c94c469 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
@@ -51,77 +51,61 @@
/* NOTES: */
/* */
/****************************************************************************************/
-LVM_ReturnStatus_en LVM_Process(LVM_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples,
- LVM_UINT32 AudioTime)
-{
-
- LVM_Instance_t *pInstance = (LVM_Instance_t *)hInstance;
- LVM_UINT16 SampleCount = NumSamples;
- LVM_FLOAT *pInput = (LVM_FLOAT *)pInData;
- LVM_FLOAT *pToProcess = (LVM_FLOAT *)pInData;
- LVM_FLOAT *pProcessed = pOutData;
- LVM_ReturnStatus_en Status;
-#ifdef SUPPORT_MC
- LVM_INT32 NrChannels = pInstance->NrChannels;
- LVM_INT32 ChMask = pInstance->ChMask;
+LVM_ReturnStatus_en LVM_Process(LVM_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, LVM_UINT16 NumSamples, LVM_UINT32 AudioTime) {
+ LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+ LVM_UINT16 SampleCount = NumSamples;
+ LVM_FLOAT* pInput = (LVM_FLOAT*)pInData;
+ LVM_FLOAT* pToProcess = (LVM_FLOAT*)pInData;
+ LVM_FLOAT* pProcessed = pOutData;
+ LVM_ReturnStatus_en Status;
+ LVM_INT32 NrChannels = pInstance->NrChannels;
+ LVM_INT32 ChMask = pInstance->ChMask;
#define NrFrames SampleCount // alias for clarity
-#endif
/*
* Check if the number of samples is zero
*/
- if (NumSamples == 0)
- {
- return(LVM_SUCCESS);
+ if (NumSamples == 0) {
+ return (LVM_SUCCESS);
}
/*
* Check valid points have been given
*/
- if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL))
- {
+ if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL)) {
return (LVM_NULLADDRESS);
}
/*
* For unmanaged mode only
*/
- if(pInstance->InstParams.BufferMode == LVM_UNMANAGED_BUFFERS)
- {
- /*
+ if (pInstance->InstParams.BufferMode == LVM_UNMANAGED_BUFFERS) {
+ /*
* Check if the number of samples is a good multiple (unmanaged mode only)
*/
- if((NumSamples % pInstance->BlickSizeMultiple) != 0)
- {
- return(LVM_INVALIDNUMSAMPLES);
+ if ((NumSamples % pInstance->BlickSizeMultiple) != 0) {
+ return (LVM_INVALIDNUMSAMPLES);
}
/*
* Check the buffer alignment
*/
- if((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0))
- {
- return(LVM_ALIGNMENTERROR);
+ if ((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0)) {
+ return (LVM_ALIGNMENTERROR);
}
}
/*
* Update new parameters if necessary
*/
- if (pInstance->ControlPending == LVM_TRUE)
- {
+ if (pInstance->ControlPending == LVM_TRUE) {
Status = LVM_ApplyNewSettings(hInstance);
-#ifdef SUPPORT_MC
/* Update the local variable NrChannels from pInstance->NrChannels value */
NrChannels = pInstance->NrChannels;
- ChMask = pInstance->ChMask;
-#endif
+ ChMask = pInstance->ChMask;
- if(Status != LVM_SUCCESS)
- {
+ if (Status != LVM_SUCCESS) {
return Status;
}
}
@@ -129,201 +113,116 @@
/*
* Convert from Mono if necessary
*/
- if (pInstance->Params.SourceFormat == LVM_MONO)
- {
- MonoTo2I_Float(pInData, /* Source */
- pOutData, /* Destination */
- (LVM_INT16)NumSamples); /* Number of input samples */
- pInput = pOutData;
+ if (pInstance->Params.SourceFormat == LVM_MONO) {
+ MonoTo2I_Float(pInData, /* Source */
+ pOutData, /* Destination */
+ (LVM_INT16)NumSamples); /* Number of input samples */
+ pInput = pOutData;
pToProcess = pOutData;
-#ifdef SUPPORT_MC
NrChannels = 2;
- ChMask = AUDIO_CHANNEL_OUT_STEREO;
-#endif
+ ChMask = AUDIO_CHANNEL_OUT_STEREO;
}
/*
* Process the data with managed buffers
*/
- while (SampleCount != 0)
- {
+ while (SampleCount != 0) {
/*
* Manage the input buffer and frame processing
*/
- LVM_BufferIn(hInstance,
- pInput,
- &pToProcess,
- &pProcessed,
- &SampleCount);
+ LVM_BufferIn(hInstance, pInput, &pToProcess, &pProcessed, &SampleCount);
/*
* Only process data when SampleCount is none zero, a zero count can occur when
* the BufferIn routine is working in managed mode.
*/
- if (SampleCount != 0)
- {
+ if (SampleCount != 0) {
/*
* Apply ConcertSound if required
*/
- if (pInstance->CS_Active == LVM_TRUE)
- {
- (void)LVCS_Process(pInstance->hCSInstance, /* Concert Sound instance handle */
- pToProcess,
- pProcessed,
- SampleCount);
+ if (pInstance->CS_Active == LVM_TRUE) {
+ (void)LVCS_Process(pInstance->hCSInstance, /* Concert Sound instance handle */
+ pToProcess, pProcessed, SampleCount);
pToProcess = pProcessed;
}
/*
* Apply volume if required
*/
- if (pInstance->VC_Active!=0)
- {
-#ifdef SUPPORT_MC
- LVC_MixSoft_Mc_D16C31_SAT(&pInstance->VC_Volume,
- pToProcess,
- pProcessed,
- (LVM_INT16)(NrFrames),
- NrChannels);
-#else
- LVC_MixSoft_1St_D16C31_SAT(&pInstance->VC_Volume,
- pToProcess,
- pProcessed,
- (LVM_INT16)(2 * SampleCount)); /* Left and right*/
-#endif
+ if (pInstance->VC_Active != 0) {
+ LVC_MixSoft_Mc_D16C31_SAT(&pInstance->VC_Volume, pToProcess, pProcessed,
+ (LVM_INT16)(NrFrames), NrChannels);
pToProcess = pProcessed;
}
/*
* Call N-Band equaliser if enabled
*/
- if (pInstance->EQNB_Active == LVM_TRUE)
- {
- LVEQNB_Process(pInstance->hEQNBInstance, /* N-Band equaliser instance handle */
- pToProcess,
- pProcessed,
- SampleCount);
+ if (pInstance->EQNB_Active == LVM_TRUE) {
+ LVEQNB_Process(pInstance->hEQNBInstance, /* N-Band equaliser instance handle */
+ pToProcess, pProcessed, SampleCount);
pToProcess = pProcessed;
}
/*
* Call bass enhancement if enabled
*/
- if (pInstance->DBE_Active == LVM_TRUE)
- {
- LVDBE_Process(pInstance->hDBEInstance, /* Dynamic Bass Enhancement \
- instance handle */
- pToProcess,
- pProcessed,
- SampleCount);
+ if (pInstance->DBE_Active == LVM_TRUE) {
+ LVDBE_Process(pInstance->hDBEInstance, /* Dynamic Bass Enhancement \
+ instance handle */
+ pToProcess, pProcessed, SampleCount);
pToProcess = pProcessed;
}
/*
* Bypass mode or everything off, so copy the input to the output
*/
- if (pToProcess != pProcessed)
- {
-#ifdef SUPPORT_MC
- Copy_Float(pToProcess, /* Source */
- pProcessed, /* Destination */
- (LVM_INT16)(NrChannels * NrFrames)); /* Copy all samples */
-#else
- Copy_Float(pToProcess, /* Source */
- pProcessed, /* Destination */
- (LVM_INT16)(2 * SampleCount)); /* Left and right */
-#endif
+ if (pToProcess != pProcessed) {
+ Copy_Float(pToProcess, /* Source */
+ pProcessed, /* Destination */
+ (LVM_INT16)(NrChannels * NrFrames)); /* Copy all samples */
}
/*
* Apply treble boost if required
*/
- if (pInstance->TE_Active == LVM_TRUE)
- {
+ if (pInstance->TE_Active == LVM_TRUE) {
/*
* Apply the filter
*/
-#ifdef SUPPORT_MC
FO_Mc_D16F32C15_LShx_TRC_WRA_01(&pInstance->pTE_State->TrebleBoost_State,
- pProcessed,
- pProcessed,
- (LVM_INT16)NrFrames,
- (LVM_INT16)NrChannels);
-#else
- FO_2I_D16F32C15_LShx_TRC_WRA_01(&pInstance->pTE_State->TrebleBoost_State,
- pProcessed,
- pProcessed,
- (LVM_INT16)SampleCount);
-#endif
-
+ pProcessed, pProcessed, (LVM_INT16)NrFrames,
+ (LVM_INT16)NrChannels);
}
-#ifdef SUPPORT_MC
/*
* Volume balance
*/
- LVC_MixSoft_1St_MC_float_SAT(&pInstance->VC_BalanceMix,
- pProcessed,
- pProcessed,
- NrFrames,
- NrChannels,
- ChMask);
-#else
- /*
- * Volume balance
- */
- LVC_MixSoft_1St_2i_D16C31_SAT(&pInstance->VC_BalanceMix,
- pProcessed,
- pProcessed,
- SampleCount);
-#endif
+ LVC_MixSoft_1St_MC_float_SAT(&pInstance->VC_BalanceMix, pProcessed, pProcessed,
+ NrFrames, NrChannels, ChMask);
/*
* Perform Parametric Spectum Analysis
*/
if ((pInstance->Params.PSA_Enable == LVM_PSA_ON) &&
- (pInstance->InstParams.PSA_Included == LVM_PSA_ON))
- {
-#ifdef SUPPORT_MC
- FromMcToMono_Float(pProcessed,
- pInstance->pPSAInput,
- (LVM_INT16)(NrFrames),
+ (pInstance->InstParams.PSA_Included == LVM_PSA_ON)) {
+ FromMcToMono_Float(pProcessed, pInstance->pPSAInput, (LVM_INT16)(NrFrames),
NrChannels);
-#else
- From2iToMono_Float(pProcessed,
- pInstance->pPSAInput,
- (LVM_INT16)(SampleCount));
-#endif
- LVPSA_Process(pInstance->hPSAInstance,
- pInstance->pPSAInput,
- (LVM_UINT16)(SampleCount),
- AudioTime);
+ LVPSA_Process(pInstance->hPSAInstance, pInstance->pPSAInput,
+ (LVM_UINT16)(SampleCount), AudioTime);
}
/*
* DC removal
*/
-#ifdef SUPPORT_MC
- DC_Mc_D16_TRC_WRA_01(&pInstance->DC_RemovalInstance,
- pProcessed,
- pProcessed,
- (LVM_INT16)NrFrames,
- NrChannels);
-#else
- DC_2I_D16_TRC_WRA_01(&pInstance->DC_RemovalInstance,
- pProcessed,
- pProcessed,
- (LVM_INT16)SampleCount);
-#endif
+ DC_Mc_D16_TRC_WRA_01(&pInstance->DC_RemovalInstance, pProcessed, pProcessed,
+ (LVM_INT16)NrFrames, NrChannels);
}
/*
* Manage the output buffer
*/
- LVM_BufferOut(hInstance,
- pOutData,
- &SampleCount);
-
+ LVM_BufferOut(hInstance, pOutData, &SampleCount);
}
- return(LVM_SUCCESS);
+ return (LVM_SUCCESS);
}
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp
index 66392e2..860196b 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp
@@ -30,431 +30,297 @@
/* */
/************************************************************************************/
-FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[] = {
+FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[] = {
- /* 22kHz sampling rate */
- {HPF_Fs22050_Gain1_A1, /* Gain setting 1 */
- HPF_Fs22050_Gain1_A0,
- -HPF_Fs22050_Gain1_B1},
- {HPF_Fs22050_Gain2_A1, /* Gain setting 2 */
- HPF_Fs22050_Gain2_A0,
- -HPF_Fs22050_Gain2_B1},
- {HPF_Fs22050_Gain3_A1, /* Gain setting 3 */
- HPF_Fs22050_Gain3_A0,
- -HPF_Fs22050_Gain3_B1},
- {HPF_Fs22050_Gain4_A1, /* Gain setting 4 */
- HPF_Fs22050_Gain4_A0,
- -HPF_Fs22050_Gain4_B1},
- {HPF_Fs22050_Gain5_A1, /* Gain setting 5 */
- HPF_Fs22050_Gain5_A0,
- -HPF_Fs22050_Gain5_B1},
- {HPF_Fs22050_Gain6_A1, /* Gain setting 6 */
- HPF_Fs22050_Gain6_A0,
- -HPF_Fs22050_Gain6_B1},
- {HPF_Fs22050_Gain7_A1, /* Gain setting 7 */
- HPF_Fs22050_Gain7_A0,
- -HPF_Fs22050_Gain7_B1},
- {HPF_Fs22050_Gain8_A1, /* Gain setting 8 */
- HPF_Fs22050_Gain8_A0,
- -HPF_Fs22050_Gain8_B1},
- {HPF_Fs22050_Gain9_A1, /* Gain setting 9 */
- HPF_Fs22050_Gain9_A0,
- -HPF_Fs22050_Gain9_B1},
- {HPF_Fs22050_Gain10_A1, /* Gain setting 10 */
- HPF_Fs22050_Gain10_A0,
- -HPF_Fs22050_Gain10_B1},
- {HPF_Fs22050_Gain11_A1, /* Gain setting 11 */
- HPF_Fs22050_Gain11_A0,
- -HPF_Fs22050_Gain11_B1},
- {HPF_Fs22050_Gain12_A1, /* Gain setting 12 */
- HPF_Fs22050_Gain12_A0,
- -HPF_Fs22050_Gain12_B1},
- {HPF_Fs22050_Gain13_A1, /* Gain setting 13 */
- HPF_Fs22050_Gain13_A0,
- -HPF_Fs22050_Gain13_B1},
- {HPF_Fs22050_Gain14_A1, /* Gain setting 14 */
- HPF_Fs22050_Gain14_A0,
- -HPF_Fs22050_Gain14_B1},
- {HPF_Fs22050_Gain15_A1, /* Gain setting 15 */
- HPF_Fs22050_Gain15_A0,
- -HPF_Fs22050_Gain15_B1},
+ /* 22kHz sampling rate */
+ {HPF_Fs22050_Gain1_A1, /* Gain setting 1 */
+ HPF_Fs22050_Gain1_A0, -HPF_Fs22050_Gain1_B1},
+ {HPF_Fs22050_Gain2_A1, /* Gain setting 2 */
+ HPF_Fs22050_Gain2_A0, -HPF_Fs22050_Gain2_B1},
+ {HPF_Fs22050_Gain3_A1, /* Gain setting 3 */
+ HPF_Fs22050_Gain3_A0, -HPF_Fs22050_Gain3_B1},
+ {HPF_Fs22050_Gain4_A1, /* Gain setting 4 */
+ HPF_Fs22050_Gain4_A0, -HPF_Fs22050_Gain4_B1},
+ {HPF_Fs22050_Gain5_A1, /* Gain setting 5 */
+ HPF_Fs22050_Gain5_A0, -HPF_Fs22050_Gain5_B1},
+ {HPF_Fs22050_Gain6_A1, /* Gain setting 6 */
+ HPF_Fs22050_Gain6_A0, -HPF_Fs22050_Gain6_B1},
+ {HPF_Fs22050_Gain7_A1, /* Gain setting 7 */
+ HPF_Fs22050_Gain7_A0, -HPF_Fs22050_Gain7_B1},
+ {HPF_Fs22050_Gain8_A1, /* Gain setting 8 */
+ HPF_Fs22050_Gain8_A0, -HPF_Fs22050_Gain8_B1},
+ {HPF_Fs22050_Gain9_A1, /* Gain setting 9 */
+ HPF_Fs22050_Gain9_A0, -HPF_Fs22050_Gain9_B1},
+ {HPF_Fs22050_Gain10_A1, /* Gain setting 10 */
+ HPF_Fs22050_Gain10_A0, -HPF_Fs22050_Gain10_B1},
+ {HPF_Fs22050_Gain11_A1, /* Gain setting 11 */
+ HPF_Fs22050_Gain11_A0, -HPF_Fs22050_Gain11_B1},
+ {HPF_Fs22050_Gain12_A1, /* Gain setting 12 */
+ HPF_Fs22050_Gain12_A0, -HPF_Fs22050_Gain12_B1},
+ {HPF_Fs22050_Gain13_A1, /* Gain setting 13 */
+ HPF_Fs22050_Gain13_A0, -HPF_Fs22050_Gain13_B1},
+ {HPF_Fs22050_Gain14_A1, /* Gain setting 14 */
+ HPF_Fs22050_Gain14_A0, -HPF_Fs22050_Gain14_B1},
+ {HPF_Fs22050_Gain15_A1, /* Gain setting 15 */
+ HPF_Fs22050_Gain15_A0, -HPF_Fs22050_Gain15_B1},
- /* 24kHz sampling rate */
- {HPF_Fs24000_Gain1_A1, /* Gain setting 1 */
- HPF_Fs24000_Gain1_A0,
- -HPF_Fs24000_Gain1_B1},
- {HPF_Fs24000_Gain2_A1, /* Gain setting 2 */
- HPF_Fs24000_Gain2_A0,
- -HPF_Fs24000_Gain2_B1},
- {HPF_Fs24000_Gain3_A1, /* Gain setting 3 */
- HPF_Fs24000_Gain3_A0,
- -HPF_Fs24000_Gain3_B1},
- {HPF_Fs24000_Gain4_A1, /* Gain setting 4 */
- HPF_Fs24000_Gain4_A0,
- -HPF_Fs24000_Gain4_B1},
- {HPF_Fs24000_Gain5_A1, /* Gain setting 5 */
- HPF_Fs24000_Gain5_A0,
- -HPF_Fs24000_Gain5_B1},
- {HPF_Fs24000_Gain6_A1, /* Gain setting 6 */
- HPF_Fs24000_Gain6_A0,
- -HPF_Fs24000_Gain6_B1},
- {HPF_Fs24000_Gain7_A1, /* Gain setting 7 */
- HPF_Fs24000_Gain7_A0,
- -HPF_Fs24000_Gain7_B1},
- {HPF_Fs24000_Gain8_A1, /* Gain setting 8 */
- HPF_Fs24000_Gain8_A0,
- -HPF_Fs24000_Gain8_B1},
- {HPF_Fs24000_Gain9_A1, /* Gain setting 9 */
- HPF_Fs24000_Gain9_A0,
- -HPF_Fs24000_Gain9_B1},
- {HPF_Fs24000_Gain10_A1, /* Gain setting 10 */
- HPF_Fs24000_Gain10_A0,
- -HPF_Fs24000_Gain10_B1},
- {HPF_Fs24000_Gain11_A1, /* Gain setting 11 */
- HPF_Fs24000_Gain11_A0,
- -HPF_Fs24000_Gain11_B1},
- {HPF_Fs24000_Gain12_A1, /* Gain setting 12 */
- HPF_Fs24000_Gain12_A0,
- -HPF_Fs24000_Gain12_B1},
- {HPF_Fs24000_Gain13_A1, /* Gain setting 13 */
- HPF_Fs24000_Gain13_A0,
- -HPF_Fs24000_Gain13_B1},
- {HPF_Fs24000_Gain14_A1, /* Gain setting 14 */
- HPF_Fs24000_Gain14_A0,
- -HPF_Fs24000_Gain14_B1},
- {HPF_Fs24000_Gain15_A1, /* Gain setting 15 */
- HPF_Fs24000_Gain15_A0,
- -HPF_Fs24000_Gain15_B1},
+ /* 24kHz sampling rate */
+ {HPF_Fs24000_Gain1_A1, /* Gain setting 1 */
+ HPF_Fs24000_Gain1_A0, -HPF_Fs24000_Gain1_B1},
+ {HPF_Fs24000_Gain2_A1, /* Gain setting 2 */
+ HPF_Fs24000_Gain2_A0, -HPF_Fs24000_Gain2_B1},
+ {HPF_Fs24000_Gain3_A1, /* Gain setting 3 */
+ HPF_Fs24000_Gain3_A0, -HPF_Fs24000_Gain3_B1},
+ {HPF_Fs24000_Gain4_A1, /* Gain setting 4 */
+ HPF_Fs24000_Gain4_A0, -HPF_Fs24000_Gain4_B1},
+ {HPF_Fs24000_Gain5_A1, /* Gain setting 5 */
+ HPF_Fs24000_Gain5_A0, -HPF_Fs24000_Gain5_B1},
+ {HPF_Fs24000_Gain6_A1, /* Gain setting 6 */
+ HPF_Fs24000_Gain6_A0, -HPF_Fs24000_Gain6_B1},
+ {HPF_Fs24000_Gain7_A1, /* Gain setting 7 */
+ HPF_Fs24000_Gain7_A0, -HPF_Fs24000_Gain7_B1},
+ {HPF_Fs24000_Gain8_A1, /* Gain setting 8 */
+ HPF_Fs24000_Gain8_A0, -HPF_Fs24000_Gain8_B1},
+ {HPF_Fs24000_Gain9_A1, /* Gain setting 9 */
+ HPF_Fs24000_Gain9_A0, -HPF_Fs24000_Gain9_B1},
+ {HPF_Fs24000_Gain10_A1, /* Gain setting 10 */
+ HPF_Fs24000_Gain10_A0, -HPF_Fs24000_Gain10_B1},
+ {HPF_Fs24000_Gain11_A1, /* Gain setting 11 */
+ HPF_Fs24000_Gain11_A0, -HPF_Fs24000_Gain11_B1},
+ {HPF_Fs24000_Gain12_A1, /* Gain setting 12 */
+ HPF_Fs24000_Gain12_A0, -HPF_Fs24000_Gain12_B1},
+ {HPF_Fs24000_Gain13_A1, /* Gain setting 13 */
+ HPF_Fs24000_Gain13_A0, -HPF_Fs24000_Gain13_B1},
+ {HPF_Fs24000_Gain14_A1, /* Gain setting 14 */
+ HPF_Fs24000_Gain14_A0, -HPF_Fs24000_Gain14_B1},
+ {HPF_Fs24000_Gain15_A1, /* Gain setting 15 */
+ HPF_Fs24000_Gain15_A0, -HPF_Fs24000_Gain15_B1},
- /* 32kHz sampling rate */
- {HPF_Fs32000_Gain1_A1, /* Gain setting 1 */
- HPF_Fs32000_Gain1_A0,
- -HPF_Fs32000_Gain1_B1},
- {HPF_Fs32000_Gain2_A1, /* Gain setting 2 */
- HPF_Fs32000_Gain2_A0,
- -HPF_Fs32000_Gain2_B1},
- {HPF_Fs32000_Gain3_A1, /* Gain setting 3 */
- HPF_Fs32000_Gain3_A0,
- -HPF_Fs32000_Gain3_B1},
- {HPF_Fs32000_Gain4_A1, /* Gain setting 4 */
- HPF_Fs32000_Gain4_A0,
- -HPF_Fs32000_Gain4_B1},
- {HPF_Fs32000_Gain5_A1, /* Gain setting 5 */
- HPF_Fs32000_Gain5_A0,
- -HPF_Fs32000_Gain5_B1},
- {HPF_Fs32000_Gain6_A1, /* Gain setting 6 */
- HPF_Fs32000_Gain6_A0,
- -HPF_Fs32000_Gain6_B1},
- {HPF_Fs32000_Gain7_A1, /* Gain setting 7 */
- HPF_Fs32000_Gain7_A0,
- -HPF_Fs32000_Gain7_B1},
- {HPF_Fs32000_Gain8_A1, /* Gain setting 8 */
- HPF_Fs32000_Gain8_A0,
- -HPF_Fs32000_Gain8_B1},
- {HPF_Fs32000_Gain9_A1, /* Gain setting 9 */
- HPF_Fs32000_Gain9_A0,
- -HPF_Fs32000_Gain9_B1},
- {HPF_Fs32000_Gain10_A1, /* Gain setting 10 */
- HPF_Fs32000_Gain10_A0,
- -HPF_Fs32000_Gain10_B1},
- {HPF_Fs32000_Gain11_A1, /* Gain setting 11 */
- HPF_Fs32000_Gain11_A0,
- -HPF_Fs32000_Gain11_B1},
- {HPF_Fs32000_Gain12_A1, /* Gain setting 12 */
- HPF_Fs32000_Gain12_A0,
- -HPF_Fs32000_Gain12_B1},
- {HPF_Fs32000_Gain13_A1, /* Gain setting 13 */
- HPF_Fs32000_Gain13_A0,
- -HPF_Fs32000_Gain13_B1},
- {HPF_Fs32000_Gain14_A1, /* Gain setting 14 */
- HPF_Fs32000_Gain14_A0,
- -HPF_Fs32000_Gain14_B1},
- {HPF_Fs32000_Gain15_A1, /* Gain setting 15 */
- HPF_Fs32000_Gain15_A0,
- -HPF_Fs32000_Gain15_B1},
+ /* 32kHz sampling rate */
+ {HPF_Fs32000_Gain1_A1, /* Gain setting 1 */
+ HPF_Fs32000_Gain1_A0, -HPF_Fs32000_Gain1_B1},
+ {HPF_Fs32000_Gain2_A1, /* Gain setting 2 */
+ HPF_Fs32000_Gain2_A0, -HPF_Fs32000_Gain2_B1},
+ {HPF_Fs32000_Gain3_A1, /* Gain setting 3 */
+ HPF_Fs32000_Gain3_A0, -HPF_Fs32000_Gain3_B1},
+ {HPF_Fs32000_Gain4_A1, /* Gain setting 4 */
+ HPF_Fs32000_Gain4_A0, -HPF_Fs32000_Gain4_B1},
+ {HPF_Fs32000_Gain5_A1, /* Gain setting 5 */
+ HPF_Fs32000_Gain5_A0, -HPF_Fs32000_Gain5_B1},
+ {HPF_Fs32000_Gain6_A1, /* Gain setting 6 */
+ HPF_Fs32000_Gain6_A0, -HPF_Fs32000_Gain6_B1},
+ {HPF_Fs32000_Gain7_A1, /* Gain setting 7 */
+ HPF_Fs32000_Gain7_A0, -HPF_Fs32000_Gain7_B1},
+ {HPF_Fs32000_Gain8_A1, /* Gain setting 8 */
+ HPF_Fs32000_Gain8_A0, -HPF_Fs32000_Gain8_B1},
+ {HPF_Fs32000_Gain9_A1, /* Gain setting 9 */
+ HPF_Fs32000_Gain9_A0, -HPF_Fs32000_Gain9_B1},
+ {HPF_Fs32000_Gain10_A1, /* Gain setting 10 */
+ HPF_Fs32000_Gain10_A0, -HPF_Fs32000_Gain10_B1},
+ {HPF_Fs32000_Gain11_A1, /* Gain setting 11 */
+ HPF_Fs32000_Gain11_A0, -HPF_Fs32000_Gain11_B1},
+ {HPF_Fs32000_Gain12_A1, /* Gain setting 12 */
+ HPF_Fs32000_Gain12_A0, -HPF_Fs32000_Gain12_B1},
+ {HPF_Fs32000_Gain13_A1, /* Gain setting 13 */
+ HPF_Fs32000_Gain13_A0, -HPF_Fs32000_Gain13_B1},
+ {HPF_Fs32000_Gain14_A1, /* Gain setting 14 */
+ HPF_Fs32000_Gain14_A0, -HPF_Fs32000_Gain14_B1},
+ {HPF_Fs32000_Gain15_A1, /* Gain setting 15 */
+ HPF_Fs32000_Gain15_A0, -HPF_Fs32000_Gain15_B1},
- /* 44kHz sampling rate */
- {HPF_Fs44100_Gain1_A1, /* Gain setting 1 */
- HPF_Fs44100_Gain1_A0,
- -HPF_Fs44100_Gain1_B1,},
- {HPF_Fs44100_Gain2_A1, /* Gain setting 2 */
- HPF_Fs44100_Gain2_A0,
- -HPF_Fs44100_Gain2_B1},
- {HPF_Fs44100_Gain3_A1, /* Gain setting 3 */
- HPF_Fs44100_Gain3_A0,
- -HPF_Fs44100_Gain3_B1},
- {HPF_Fs44100_Gain4_A1, /* Gain setting 4 */
- HPF_Fs44100_Gain4_A0,
- -HPF_Fs44100_Gain4_B1},
- {HPF_Fs44100_Gain5_A1, /* Gain setting 5 */
- HPF_Fs44100_Gain5_A0,
- -HPF_Fs44100_Gain5_B1},
- {HPF_Fs44100_Gain6_A1, /* Gain setting 6 */
- HPF_Fs44100_Gain6_A0,
- -HPF_Fs44100_Gain6_B1},
- {HPF_Fs44100_Gain7_A1, /* Gain setting 7 */
- HPF_Fs44100_Gain7_A0,
- -HPF_Fs44100_Gain7_B1},
- {HPF_Fs44100_Gain8_A1, /* Gain setting 8 */
- HPF_Fs44100_Gain8_A0,
- -HPF_Fs44100_Gain8_B1},
- {HPF_Fs44100_Gain9_A1, /* Gain setting 9 */
- HPF_Fs44100_Gain9_A0,
- -HPF_Fs44100_Gain9_B1},
- {HPF_Fs44100_Gain10_A1, /* Gain setting 10 */
- HPF_Fs44100_Gain10_A0,
- -HPF_Fs44100_Gain10_B1},
- {HPF_Fs44100_Gain11_A1, /* Gain setting 11 */
- HPF_Fs44100_Gain11_A0,
- -HPF_Fs44100_Gain11_B1},
- {HPF_Fs44100_Gain12_A1, /* Gain setting 12 */
- HPF_Fs44100_Gain12_A0,
- -HPF_Fs44100_Gain12_B1},
- {HPF_Fs44100_Gain13_A1, /* Gain setting 13 */
- HPF_Fs44100_Gain13_A0,
- -HPF_Fs44100_Gain13_B1},
- {HPF_Fs44100_Gain14_A1, /* Gain setting 14 */
- HPF_Fs44100_Gain14_A0,
- -HPF_Fs44100_Gain14_B1},
- {HPF_Fs44100_Gain15_A1, /* Gain setting 15 */
- HPF_Fs44100_Gain15_A0,
- -HPF_Fs44100_Gain15_B1},
+ /* 44kHz sampling rate */
+ {
+ HPF_Fs44100_Gain1_A1, /* Gain setting 1 */
+ HPF_Fs44100_Gain1_A0,
+ -HPF_Fs44100_Gain1_B1,
+ },
+ {HPF_Fs44100_Gain2_A1, /* Gain setting 2 */
+ HPF_Fs44100_Gain2_A0, -HPF_Fs44100_Gain2_B1},
+ {HPF_Fs44100_Gain3_A1, /* Gain setting 3 */
+ HPF_Fs44100_Gain3_A0, -HPF_Fs44100_Gain3_B1},
+ {HPF_Fs44100_Gain4_A1, /* Gain setting 4 */
+ HPF_Fs44100_Gain4_A0, -HPF_Fs44100_Gain4_B1},
+ {HPF_Fs44100_Gain5_A1, /* Gain setting 5 */
+ HPF_Fs44100_Gain5_A0, -HPF_Fs44100_Gain5_B1},
+ {HPF_Fs44100_Gain6_A1, /* Gain setting 6 */
+ HPF_Fs44100_Gain6_A0, -HPF_Fs44100_Gain6_B1},
+ {HPF_Fs44100_Gain7_A1, /* Gain setting 7 */
+ HPF_Fs44100_Gain7_A0, -HPF_Fs44100_Gain7_B1},
+ {HPF_Fs44100_Gain8_A1, /* Gain setting 8 */
+ HPF_Fs44100_Gain8_A0, -HPF_Fs44100_Gain8_B1},
+ {HPF_Fs44100_Gain9_A1, /* Gain setting 9 */
+ HPF_Fs44100_Gain9_A0, -HPF_Fs44100_Gain9_B1},
+ {HPF_Fs44100_Gain10_A1, /* Gain setting 10 */
+ HPF_Fs44100_Gain10_A0, -HPF_Fs44100_Gain10_B1},
+ {HPF_Fs44100_Gain11_A1, /* Gain setting 11 */
+ HPF_Fs44100_Gain11_A0, -HPF_Fs44100_Gain11_B1},
+ {HPF_Fs44100_Gain12_A1, /* Gain setting 12 */
+ HPF_Fs44100_Gain12_A0, -HPF_Fs44100_Gain12_B1},
+ {HPF_Fs44100_Gain13_A1, /* Gain setting 13 */
+ HPF_Fs44100_Gain13_A0, -HPF_Fs44100_Gain13_B1},
+ {HPF_Fs44100_Gain14_A1, /* Gain setting 14 */
+ HPF_Fs44100_Gain14_A0, -HPF_Fs44100_Gain14_B1},
+ {HPF_Fs44100_Gain15_A1, /* Gain setting 15 */
+ HPF_Fs44100_Gain15_A0, -HPF_Fs44100_Gain15_B1},
- /* 48kHz sampling rate */
- {HPF_Fs48000_Gain1_A1, /* Gain setting 1 */
- HPF_Fs48000_Gain1_A0,
- -HPF_Fs48000_Gain1_B1},
- {HPF_Fs48000_Gain2_A1, /* Gain setting 2 */
- HPF_Fs48000_Gain2_A0,
- -HPF_Fs48000_Gain2_B1},
- {HPF_Fs48000_Gain3_A1, /* Gain setting 3 */
- HPF_Fs48000_Gain3_A0,
- -HPF_Fs48000_Gain3_B1},
- {HPF_Fs48000_Gain4_A1, /* Gain setting 4 */
- HPF_Fs48000_Gain4_A0,
- -HPF_Fs48000_Gain4_B1},
- {HPF_Fs48000_Gain5_A1, /* Gain setting 5 */
- HPF_Fs48000_Gain5_A0,
- -HPF_Fs48000_Gain5_B1},
- {HPF_Fs48000_Gain6_A1, /* Gain setting 6 */
- HPF_Fs48000_Gain6_A0,
- -HPF_Fs48000_Gain6_B1},
- {HPF_Fs48000_Gain7_A1, /* Gain setting 7 */
- HPF_Fs48000_Gain7_A0,
- -HPF_Fs48000_Gain7_B1},
- {HPF_Fs48000_Gain8_A1, /* Gain setting 8 */
- HPF_Fs48000_Gain8_A0,
- -HPF_Fs48000_Gain8_B1},
- {HPF_Fs48000_Gain9_A1, /* Gain setting 9 */
- HPF_Fs48000_Gain9_A0,
- -HPF_Fs48000_Gain9_B1},
- {HPF_Fs48000_Gain10_A1, /* Gain setting 10 */
- HPF_Fs48000_Gain10_A0,
- -HPF_Fs48000_Gain10_B1},
- {HPF_Fs48000_Gain11_A1, /* Gain setting 11 */
- HPF_Fs48000_Gain11_A0,
- -HPF_Fs48000_Gain11_B1},
- {HPF_Fs48000_Gain12_A1, /* Gain setting 12 */
- HPF_Fs48000_Gain12_A0,
- -HPF_Fs48000_Gain12_B1},
- {HPF_Fs48000_Gain13_A1, /* Gain setting 13 */
- HPF_Fs48000_Gain13_A0,
- -HPF_Fs48000_Gain13_B1},
- {HPF_Fs48000_Gain14_A1, /* Gain setting 14 */
- HPF_Fs48000_Gain14_A0,
- -HPF_Fs48000_Gain14_B1},
- {HPF_Fs48000_Gain15_A1, /* Gain setting 15 */
- HPF_Fs48000_Gain15_A0,
- -HPF_Fs48000_Gain15_B1}
- ,
- /* 88kHz Sampling rate */
- {HPF_Fs88200_Gain1_A1, /* Gain Setting 1 */
- HPF_Fs88200_Gain1_A0,
- -HPF_Fs88200_Gain1_B1},
- {HPF_Fs88200_Gain2_A1, /* Gain Setting 2 */
- HPF_Fs88200_Gain2_A0,
- -HPF_Fs88200_Gain2_B1},
- {HPF_Fs88200_Gain3_A1, /* Gain Setting 3 */
- HPF_Fs88200_Gain3_A0,
- -HPF_Fs88200_Gain3_B1},
- {HPF_Fs88200_Gain4_A1, /* Gain Setting 4 */
- HPF_Fs88200_Gain4_A0,
- -HPF_Fs88200_Gain4_B1},
- {HPF_Fs88200_Gain5_A1, /* Gain Setting 5 */
- HPF_Fs88200_Gain5_A0,
- -HPF_Fs88200_Gain5_B1},
- {HPF_Fs88200_Gain6_A1, /* Gain Setting 6 */
- HPF_Fs88200_Gain6_A0,
- -HPF_Fs88200_Gain6_B1},
- {HPF_Fs88200_Gain7_A1, /* Gain Setting 7 */
- HPF_Fs88200_Gain7_A0,
- -HPF_Fs88200_Gain7_B1},
- {HPF_Fs88200_Gain8_A1, /* Gain Setting 8 */
- HPF_Fs88200_Gain8_A0,
- -HPF_Fs88200_Gain8_B1},
- {HPF_Fs88200_Gain9_A1, /* Gain Setting 9 */
- HPF_Fs88200_Gain9_A0,
- -HPF_Fs88200_Gain9_B1},
- {HPF_Fs88200_Gain10_A1, /* Gain Setting 10 */
- HPF_Fs88200_Gain10_A0,
- -HPF_Fs88200_Gain10_B1},
- {HPF_Fs88200_Gain11_A1, /* Gain Setting 11 */
- HPF_Fs88200_Gain11_A0,
- -HPF_Fs88200_Gain11_B1},
- {HPF_Fs88200_Gain12_A1, /* Gain Setting 12 */
- HPF_Fs88200_Gain12_A0,
- -HPF_Fs88200_Gain12_B1},
- {HPF_Fs88200_Gain13_A1, /* Gain Setting 13 */
- HPF_Fs88200_Gain13_A0,
- -HPF_Fs88200_Gain13_B1},
- {HPF_Fs88200_Gain14_A1, /* Gain Setting 14 */
- HPF_Fs88200_Gain14_A0,
- -HPF_Fs88200_Gain14_B1},
- {HPF_Fs88200_Gain15_A1, /* Gain Setting 15 */
- HPF_Fs88200_Gain15_A0,
- -HPF_Fs88200_Gain15_B1},
+ /* 48kHz sampling rate */
+ {HPF_Fs48000_Gain1_A1, /* Gain setting 1 */
+ HPF_Fs48000_Gain1_A0, -HPF_Fs48000_Gain1_B1},
+ {HPF_Fs48000_Gain2_A1, /* Gain setting 2 */
+ HPF_Fs48000_Gain2_A0, -HPF_Fs48000_Gain2_B1},
+ {HPF_Fs48000_Gain3_A1, /* Gain setting 3 */
+ HPF_Fs48000_Gain3_A0, -HPF_Fs48000_Gain3_B1},
+ {HPF_Fs48000_Gain4_A1, /* Gain setting 4 */
+ HPF_Fs48000_Gain4_A0, -HPF_Fs48000_Gain4_B1},
+ {HPF_Fs48000_Gain5_A1, /* Gain setting 5 */
+ HPF_Fs48000_Gain5_A0, -HPF_Fs48000_Gain5_B1},
+ {HPF_Fs48000_Gain6_A1, /* Gain setting 6 */
+ HPF_Fs48000_Gain6_A0, -HPF_Fs48000_Gain6_B1},
+ {HPF_Fs48000_Gain7_A1, /* Gain setting 7 */
+ HPF_Fs48000_Gain7_A0, -HPF_Fs48000_Gain7_B1},
+ {HPF_Fs48000_Gain8_A1, /* Gain setting 8 */
+ HPF_Fs48000_Gain8_A0, -HPF_Fs48000_Gain8_B1},
+ {HPF_Fs48000_Gain9_A1, /* Gain setting 9 */
+ HPF_Fs48000_Gain9_A0, -HPF_Fs48000_Gain9_B1},
+ {HPF_Fs48000_Gain10_A1, /* Gain setting 10 */
+ HPF_Fs48000_Gain10_A0, -HPF_Fs48000_Gain10_B1},
+ {HPF_Fs48000_Gain11_A1, /* Gain setting 11 */
+ HPF_Fs48000_Gain11_A0, -HPF_Fs48000_Gain11_B1},
+ {HPF_Fs48000_Gain12_A1, /* Gain setting 12 */
+ HPF_Fs48000_Gain12_A0, -HPF_Fs48000_Gain12_B1},
+ {HPF_Fs48000_Gain13_A1, /* Gain setting 13 */
+ HPF_Fs48000_Gain13_A0, -HPF_Fs48000_Gain13_B1},
+ {HPF_Fs48000_Gain14_A1, /* Gain setting 14 */
+ HPF_Fs48000_Gain14_A0, -HPF_Fs48000_Gain14_B1},
+ {HPF_Fs48000_Gain15_A1, /* Gain setting 15 */
+ HPF_Fs48000_Gain15_A0, -HPF_Fs48000_Gain15_B1},
+ /* 88kHz Sampling rate */
+ {HPF_Fs88200_Gain1_A1, /* Gain Setting 1 */
+ HPF_Fs88200_Gain1_A0, -HPF_Fs88200_Gain1_B1},
+ {HPF_Fs88200_Gain2_A1, /* Gain Setting 2 */
+ HPF_Fs88200_Gain2_A0, -HPF_Fs88200_Gain2_B1},
+ {HPF_Fs88200_Gain3_A1, /* Gain Setting 3 */
+ HPF_Fs88200_Gain3_A0, -HPF_Fs88200_Gain3_B1},
+ {HPF_Fs88200_Gain4_A1, /* Gain Setting 4 */
+ HPF_Fs88200_Gain4_A0, -HPF_Fs88200_Gain4_B1},
+ {HPF_Fs88200_Gain5_A1, /* Gain Setting 5 */
+ HPF_Fs88200_Gain5_A0, -HPF_Fs88200_Gain5_B1},
+ {HPF_Fs88200_Gain6_A1, /* Gain Setting 6 */
+ HPF_Fs88200_Gain6_A0, -HPF_Fs88200_Gain6_B1},
+ {HPF_Fs88200_Gain7_A1, /* Gain Setting 7 */
+ HPF_Fs88200_Gain7_A0, -HPF_Fs88200_Gain7_B1},
+ {HPF_Fs88200_Gain8_A1, /* Gain Setting 8 */
+ HPF_Fs88200_Gain8_A0, -HPF_Fs88200_Gain8_B1},
+ {HPF_Fs88200_Gain9_A1, /* Gain Setting 9 */
+ HPF_Fs88200_Gain9_A0, -HPF_Fs88200_Gain9_B1},
+ {HPF_Fs88200_Gain10_A1, /* Gain Setting 10 */
+ HPF_Fs88200_Gain10_A0, -HPF_Fs88200_Gain10_B1},
+ {HPF_Fs88200_Gain11_A1, /* Gain Setting 11 */
+ HPF_Fs88200_Gain11_A0, -HPF_Fs88200_Gain11_B1},
+ {HPF_Fs88200_Gain12_A1, /* Gain Setting 12 */
+ HPF_Fs88200_Gain12_A0, -HPF_Fs88200_Gain12_B1},
+ {HPF_Fs88200_Gain13_A1, /* Gain Setting 13 */
+ HPF_Fs88200_Gain13_A0, -HPF_Fs88200_Gain13_B1},
+ {HPF_Fs88200_Gain14_A1, /* Gain Setting 14 */
+ HPF_Fs88200_Gain14_A0, -HPF_Fs88200_Gain14_B1},
+ {HPF_Fs88200_Gain15_A1, /* Gain Setting 15 */
+ HPF_Fs88200_Gain15_A0, -HPF_Fs88200_Gain15_B1},
- /* 96kHz sampling rate */
- {HPF_Fs96000_Gain1_A1, /* Gain setting 1 */
- HPF_Fs96000_Gain1_A0,
- -HPF_Fs96000_Gain1_B1},
- {HPF_Fs96000_Gain2_A1, /* Gain setting 2 */
- HPF_Fs96000_Gain2_A0,
- -HPF_Fs96000_Gain2_B1},
- {HPF_Fs96000_Gain3_A1, /* Gain setting 3 */
- HPF_Fs96000_Gain3_A0,
- -HPF_Fs96000_Gain3_B1},
- {HPF_Fs96000_Gain4_A1, /* Gain setting 4 */
- HPF_Fs96000_Gain4_A0,
- -HPF_Fs96000_Gain4_B1},
- {HPF_Fs96000_Gain5_A1, /* Gain setting 5 */
- HPF_Fs96000_Gain5_A0,
- -HPF_Fs96000_Gain5_B1},
- {HPF_Fs96000_Gain6_A1, /* Gain setting 6 */
- HPF_Fs96000_Gain6_A0,
- -HPF_Fs96000_Gain6_B1},
- {HPF_Fs96000_Gain7_A1, /* Gain setting 7 */
- HPF_Fs96000_Gain7_A0,
- -HPF_Fs96000_Gain7_B1},
- {HPF_Fs96000_Gain8_A1, /* Gain setting 8 */
- HPF_Fs96000_Gain8_A0,
- -HPF_Fs96000_Gain8_B1},
- {HPF_Fs96000_Gain9_A1, /* Gain setting 9 */
- HPF_Fs96000_Gain9_A0,
- -HPF_Fs96000_Gain9_B1},
- {HPF_Fs96000_Gain10_A1, /* Gain setting 10 */
- HPF_Fs96000_Gain10_A0,
- -HPF_Fs96000_Gain10_B1},
- {HPF_Fs96000_Gain11_A1, /* Gain setting 11 */
- HPF_Fs96000_Gain11_A0,
- -HPF_Fs96000_Gain11_B1},
- {HPF_Fs96000_Gain12_A1, /* Gain setting 12 */
- HPF_Fs96000_Gain12_A0,
- -HPF_Fs96000_Gain12_B1},
- {HPF_Fs96000_Gain13_A1, /* Gain setting 13 */
- HPF_Fs96000_Gain13_A0,
- -HPF_Fs96000_Gain13_B1},
- {HPF_Fs96000_Gain14_A1, /* Gain setting 14 */
- HPF_Fs96000_Gain14_A0,
- -HPF_Fs96000_Gain14_B1},
- {HPF_Fs96000_Gain15_A1, /* Gain setting 15 */
- HPF_Fs96000_Gain15_A0,
- -HPF_Fs96000_Gain15_B1},
+ /* 96kHz sampling rate */
+ {HPF_Fs96000_Gain1_A1, /* Gain setting 1 */
+ HPF_Fs96000_Gain1_A0, -HPF_Fs96000_Gain1_B1},
+ {HPF_Fs96000_Gain2_A1, /* Gain setting 2 */
+ HPF_Fs96000_Gain2_A0, -HPF_Fs96000_Gain2_B1},
+ {HPF_Fs96000_Gain3_A1, /* Gain setting 3 */
+ HPF_Fs96000_Gain3_A0, -HPF_Fs96000_Gain3_B1},
+ {HPF_Fs96000_Gain4_A1, /* Gain setting 4 */
+ HPF_Fs96000_Gain4_A0, -HPF_Fs96000_Gain4_B1},
+ {HPF_Fs96000_Gain5_A1, /* Gain setting 5 */
+ HPF_Fs96000_Gain5_A0, -HPF_Fs96000_Gain5_B1},
+ {HPF_Fs96000_Gain6_A1, /* Gain setting 6 */
+ HPF_Fs96000_Gain6_A0, -HPF_Fs96000_Gain6_B1},
+ {HPF_Fs96000_Gain7_A1, /* Gain setting 7 */
+ HPF_Fs96000_Gain7_A0, -HPF_Fs96000_Gain7_B1},
+ {HPF_Fs96000_Gain8_A1, /* Gain setting 8 */
+ HPF_Fs96000_Gain8_A0, -HPF_Fs96000_Gain8_B1},
+ {HPF_Fs96000_Gain9_A1, /* Gain setting 9 */
+ HPF_Fs96000_Gain9_A0, -HPF_Fs96000_Gain9_B1},
+ {HPF_Fs96000_Gain10_A1, /* Gain setting 10 */
+ HPF_Fs96000_Gain10_A0, -HPF_Fs96000_Gain10_B1},
+ {HPF_Fs96000_Gain11_A1, /* Gain setting 11 */
+ HPF_Fs96000_Gain11_A0, -HPF_Fs96000_Gain11_B1},
+ {HPF_Fs96000_Gain12_A1, /* Gain setting 12 */
+ HPF_Fs96000_Gain12_A0, -HPF_Fs96000_Gain12_B1},
+ {HPF_Fs96000_Gain13_A1, /* Gain setting 13 */
+ HPF_Fs96000_Gain13_A0, -HPF_Fs96000_Gain13_B1},
+ {HPF_Fs96000_Gain14_A1, /* Gain setting 14 */
+ HPF_Fs96000_Gain14_A0, -HPF_Fs96000_Gain14_B1},
+ {HPF_Fs96000_Gain15_A1, /* Gain setting 15 */
+ HPF_Fs96000_Gain15_A0, -HPF_Fs96000_Gain15_B1},
- /* 176kHz Sampling rate */
- {HPF_Fs176400_Gain1_A1, /* Gain Setting 1 */
- HPF_Fs176400_Gain1_A0,
- -HPF_Fs176400_Gain1_B1},
- {HPF_Fs176400_Gain2_A1, /* Gain Setting 2 */
- HPF_Fs176400_Gain2_A0,
- -HPF_Fs176400_Gain2_B1},
- {HPF_Fs176400_Gain3_A1, /* Gain Setting 3 */
- HPF_Fs176400_Gain3_A0,
- -HPF_Fs176400_Gain3_B1},
- {HPF_Fs176400_Gain4_A1, /* Gain Setting 4 */
- HPF_Fs176400_Gain4_A0,
- -HPF_Fs176400_Gain4_B1},
- {HPF_Fs176400_Gain5_A1, /* Gain Setting 5 */
- HPF_Fs176400_Gain5_A0,
- -HPF_Fs176400_Gain5_B1},
- {HPF_Fs176400_Gain6_A1, /* Gain Setting 6 */
- HPF_Fs176400_Gain6_A0,
- -HPF_Fs176400_Gain6_B1},
- {HPF_Fs176400_Gain7_A1, /* Gain Setting 7 */
- HPF_Fs176400_Gain7_A0,
- -HPF_Fs176400_Gain7_B1},
- {HPF_Fs176400_Gain8_A1, /* Gain Setting 8 */
- HPF_Fs176400_Gain8_A0,
- -HPF_Fs176400_Gain8_B1},
- {HPF_Fs176400_Gain9_A1, /* Gain Setting 9 */
- HPF_Fs176400_Gain9_A0,
- -HPF_Fs176400_Gain9_B1},
- {HPF_Fs176400_Gain10_A1, /* Gain Setting 10 */
- HPF_Fs176400_Gain10_A0,
- -HPF_Fs176400_Gain10_B1},
- {HPF_Fs176400_Gain11_A1, /* Gain Setting 11 */
- HPF_Fs176400_Gain11_A0,
- -HPF_Fs176400_Gain11_B1},
- {HPF_Fs176400_Gain12_A1, /* Gain Setting 12 */
- HPF_Fs176400_Gain12_A0,
- -HPF_Fs176400_Gain12_B1},
- {HPF_Fs176400_Gain13_A1, /* Gain Setting 13 */
- HPF_Fs176400_Gain13_A0,
- -HPF_Fs176400_Gain13_B1},
- {HPF_Fs176400_Gain14_A1, /* Gain Setting 14 */
- HPF_Fs176400_Gain14_A0,
- -HPF_Fs176400_Gain14_B1},
- {HPF_Fs176400_Gain15_A1, /* Gain Setting 15 */
- HPF_Fs176400_Gain15_A0,
- -HPF_Fs176400_Gain15_B1},
+ /* 176kHz Sampling rate */
+ {HPF_Fs176400_Gain1_A1, /* Gain Setting 1 */
+ HPF_Fs176400_Gain1_A0, -HPF_Fs176400_Gain1_B1},
+ {HPF_Fs176400_Gain2_A1, /* Gain Setting 2 */
+ HPF_Fs176400_Gain2_A0, -HPF_Fs176400_Gain2_B1},
+ {HPF_Fs176400_Gain3_A1, /* Gain Setting 3 */
+ HPF_Fs176400_Gain3_A0, -HPF_Fs176400_Gain3_B1},
+ {HPF_Fs176400_Gain4_A1, /* Gain Setting 4 */
+ HPF_Fs176400_Gain4_A0, -HPF_Fs176400_Gain4_B1},
+ {HPF_Fs176400_Gain5_A1, /* Gain Setting 5 */
+ HPF_Fs176400_Gain5_A0, -HPF_Fs176400_Gain5_B1},
+ {HPF_Fs176400_Gain6_A1, /* Gain Setting 6 */
+ HPF_Fs176400_Gain6_A0, -HPF_Fs176400_Gain6_B1},
+ {HPF_Fs176400_Gain7_A1, /* Gain Setting 7 */
+ HPF_Fs176400_Gain7_A0, -HPF_Fs176400_Gain7_B1},
+ {HPF_Fs176400_Gain8_A1, /* Gain Setting 8 */
+ HPF_Fs176400_Gain8_A0, -HPF_Fs176400_Gain8_B1},
+ {HPF_Fs176400_Gain9_A1, /* Gain Setting 9 */
+ HPF_Fs176400_Gain9_A0, -HPF_Fs176400_Gain9_B1},
+ {HPF_Fs176400_Gain10_A1, /* Gain Setting 10 */
+ HPF_Fs176400_Gain10_A0, -HPF_Fs176400_Gain10_B1},
+ {HPF_Fs176400_Gain11_A1, /* Gain Setting 11 */
+ HPF_Fs176400_Gain11_A0, -HPF_Fs176400_Gain11_B1},
+ {HPF_Fs176400_Gain12_A1, /* Gain Setting 12 */
+ HPF_Fs176400_Gain12_A0, -HPF_Fs176400_Gain12_B1},
+ {HPF_Fs176400_Gain13_A1, /* Gain Setting 13 */
+ HPF_Fs176400_Gain13_A0, -HPF_Fs176400_Gain13_B1},
+ {HPF_Fs176400_Gain14_A1, /* Gain Setting 14 */
+ HPF_Fs176400_Gain14_A0, -HPF_Fs176400_Gain14_B1},
+ {HPF_Fs176400_Gain15_A1, /* Gain Setting 15 */
+ HPF_Fs176400_Gain15_A0, -HPF_Fs176400_Gain15_B1},
- /* 192kHz sampling rate */
- {HPF_Fs192000_Gain1_A1, /* Gain setting 1 */
- HPF_Fs192000_Gain1_A0,
- -HPF_Fs192000_Gain1_B1},
- {HPF_Fs192000_Gain2_A1, /* Gain setting 2 */
- HPF_Fs192000_Gain2_A0,
- -HPF_Fs192000_Gain2_B1},
- {HPF_Fs192000_Gain3_A1, /* Gain setting 3 */
- HPF_Fs192000_Gain3_A0,
- -HPF_Fs192000_Gain3_B1},
- {HPF_Fs192000_Gain4_A1, /* Gain setting 4 */
- HPF_Fs192000_Gain4_A0,
- -HPF_Fs192000_Gain4_B1},
- {HPF_Fs192000_Gain5_A1, /* Gain setting 5 */
- HPF_Fs192000_Gain5_A0,
- -HPF_Fs192000_Gain5_B1},
- {HPF_Fs192000_Gain6_A1, /* Gain setting 6 */
- HPF_Fs192000_Gain6_A0,
- -HPF_Fs192000_Gain6_B1},
- {HPF_Fs192000_Gain7_A1, /* Gain setting 7 */
- HPF_Fs192000_Gain7_A0,
- -HPF_Fs192000_Gain7_B1},
- {HPF_Fs192000_Gain8_A1, /* Gain setting 8 */
- HPF_Fs192000_Gain8_A0,
- -HPF_Fs192000_Gain8_B1},
- {HPF_Fs192000_Gain9_A1, /* Gain setting 9 */
- HPF_Fs192000_Gain9_A0,
- -HPF_Fs192000_Gain9_B1},
- {HPF_Fs192000_Gain10_A1, /* Gain setting 10 */
- HPF_Fs192000_Gain10_A0,
- -HPF_Fs192000_Gain10_B1},
- {HPF_Fs192000_Gain11_A1, /* Gain setting 11 */
- HPF_Fs192000_Gain11_A0,
- -HPF_Fs192000_Gain11_B1},
- {HPF_Fs192000_Gain12_A1, /* Gain setting 12 */
- HPF_Fs192000_Gain12_A0,
- -HPF_Fs192000_Gain12_B1},
- {HPF_Fs192000_Gain13_A1, /* Gain setting 13 */
- HPF_Fs192000_Gain13_A0,
- -HPF_Fs192000_Gain13_B1},
- {HPF_Fs192000_Gain14_A1, /* Gain setting 14 */
- HPF_Fs192000_Gain14_A0,
- -HPF_Fs192000_Gain14_B1},
- {HPF_Fs192000_Gain15_A1, /* Gain setting 15 */
- HPF_Fs192000_Gain15_A0,
- -HPF_Fs192000_Gain15_B1}
- };
+ /* 192kHz sampling rate */
+ {HPF_Fs192000_Gain1_A1, /* Gain setting 1 */
+ HPF_Fs192000_Gain1_A0, -HPF_Fs192000_Gain1_B1},
+ {HPF_Fs192000_Gain2_A1, /* Gain setting 2 */
+ HPF_Fs192000_Gain2_A0, -HPF_Fs192000_Gain2_B1},
+ {HPF_Fs192000_Gain3_A1, /* Gain setting 3 */
+ HPF_Fs192000_Gain3_A0, -HPF_Fs192000_Gain3_B1},
+ {HPF_Fs192000_Gain4_A1, /* Gain setting 4 */
+ HPF_Fs192000_Gain4_A0, -HPF_Fs192000_Gain4_B1},
+ {HPF_Fs192000_Gain5_A1, /* Gain setting 5 */
+ HPF_Fs192000_Gain5_A0, -HPF_Fs192000_Gain5_B1},
+ {HPF_Fs192000_Gain6_A1, /* Gain setting 6 */
+ HPF_Fs192000_Gain6_A0, -HPF_Fs192000_Gain6_B1},
+ {HPF_Fs192000_Gain7_A1, /* Gain setting 7 */
+ HPF_Fs192000_Gain7_A0, -HPF_Fs192000_Gain7_B1},
+ {HPF_Fs192000_Gain8_A1, /* Gain setting 8 */
+ HPF_Fs192000_Gain8_A0, -HPF_Fs192000_Gain8_B1},
+ {HPF_Fs192000_Gain9_A1, /* Gain setting 9 */
+ HPF_Fs192000_Gain9_A0, -HPF_Fs192000_Gain9_B1},
+ {HPF_Fs192000_Gain10_A1, /* Gain setting 10 */
+ HPF_Fs192000_Gain10_A0, -HPF_Fs192000_Gain10_B1},
+ {HPF_Fs192000_Gain11_A1, /* Gain setting 11 */
+ HPF_Fs192000_Gain11_A0, -HPF_Fs192000_Gain11_B1},
+ {HPF_Fs192000_Gain12_A1, /* Gain setting 12 */
+ HPF_Fs192000_Gain12_A0, -HPF_Fs192000_Gain12_B1},
+ {HPF_Fs192000_Gain13_A1, /* Gain setting 13 */
+ HPF_Fs192000_Gain13_A0, -HPF_Fs192000_Gain13_B1},
+ {HPF_Fs192000_Gain14_A1, /* Gain setting 14 */
+ HPF_Fs192000_Gain14_A0, -HPF_Fs192000_Gain14_B1},
+ {HPF_Fs192000_Gain15_A1, /* Gain setting 15 */
+ HPF_Fs192000_Gain15_A0, -HPF_Fs192000_Gain15_B1}};
/************************************************************************************/
/* */
@@ -463,14 +329,13 @@
/************************************************************************************/
/* dB to linear conversion table */
-const LVM_FLOAT LVM_VolumeTable[] = {
- 1.000f, /* 0dB */
- 0.891f, /* -1dB */
- 0.794f, /* -2dB */
- 0.708f, /* -3dB */
- 0.631f, /* -4dB */
- 0.562f, /* -5dB */
- 0.501f}; /* -6dB */
+const LVM_FLOAT LVM_VolumeTable[] = {1.000f, /* 0dB */
+ 0.891f, /* -1dB */
+ 0.794f, /* -2dB */
+ 0.708f, /* -3dB */
+ 0.631f, /* -4dB */
+ 0.562f, /* -5dB */
+ 0.501f}; /* -6dB */
/************************************************************************************/
/* */
@@ -478,24 +343,16 @@
/* */
/************************************************************************************/
-#define LVM_MIX_TC_Fs8000 32580 /* Floating point value 0.994262695 */
-#define LVM_MIX_TC_Fs11025 32632 /* Floating point value 0.995849609 */
-#define LVM_MIX_TC_Fs12000 32643 /* Floating point value 0.996185303 */
-#define LVM_MIX_TC_Fs16000 32674 /* Floating point value 0.997131348 */
-#define LVM_MIX_TC_Fs22050 32700 /* Floating point value 0.997924805 */
-#define LVM_MIX_TC_Fs24000 32705 /* Floating point value 0.998077393 */
-#define LVM_MIX_TC_Fs32000 32721 /* Floating point value 0.998565674 */
-#define LVM_MIX_TC_Fs44100 32734 /* Floating point value 0.998962402 */
-#define LVM_MIX_TC_Fs48000 32737 /* Floating point value 0.999053955 */
+#define LVM_MIX_TC_Fs8000 32580 /* Floating point value 0.994262695 */
+#define LVM_MIX_TC_Fs11025 32632 /* Floating point value 0.995849609 */
+#define LVM_MIX_TC_Fs12000 32643 /* Floating point value 0.996185303 */
+#define LVM_MIX_TC_Fs16000 32674 /* Floating point value 0.997131348 */
+#define LVM_MIX_TC_Fs22050 32700 /* Floating point value 0.997924805 */
+#define LVM_MIX_TC_Fs24000 32705 /* Floating point value 0.998077393 */
+#define LVM_MIX_TC_Fs32000 32721 /* Floating point value 0.998565674 */
+#define LVM_MIX_TC_Fs44100 32734 /* Floating point value 0.998962402 */
+#define LVM_MIX_TC_Fs48000 32737 /* Floating point value 0.999053955 */
-const LVM_INT16 LVM_MixerTCTable[] = {
- LVM_MIX_TC_Fs8000,
- LVM_MIX_TC_Fs11025,
- LVM_MIX_TC_Fs12000,
- LVM_MIX_TC_Fs16000,
- LVM_MIX_TC_Fs22050,
- LVM_MIX_TC_Fs24000,
- LVM_MIX_TC_Fs32000,
- LVM_MIX_TC_Fs44100,
- LVM_MIX_TC_Fs48000};
-
+const LVM_INT16 LVM_MixerTCTable[] = {LVM_MIX_TC_Fs8000, LVM_MIX_TC_Fs11025, LVM_MIX_TC_Fs12000,
+ LVM_MIX_TC_Fs16000, LVM_MIX_TC_Fs22050, LVM_MIX_TC_Fs24000,
+ LVM_MIX_TC_Fs32000, LVM_MIX_TC_Fs44100, LVM_MIX_TC_Fs48000};
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h
index fc82194..cf2fb5d 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h
@@ -33,7 +33,7 @@
/* */
/************************************************************************************/
-extern FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[];
+extern FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[];
/************************************************************************************/
/* */
@@ -45,4 +45,3 @@
extern const LVM_INT16 LVM_MixerTCTable[];
#endif /* __LVM_TABLES_H__ */
-
diff --git a/media/libeffects/lvm/lib/Common/lib/AGC.h b/media/libeffects/lvm/lib/Common/lib/AGC.h
index bef7fa1..c20b49a 100644
--- a/media/libeffects/lvm/lib/Common/lib/AGC.h
+++ b/media/libeffects/lvm/lib/Common/lib/AGC.h
@@ -31,16 +31,15 @@
/* Types */
/* */
/**********************************************************************************/
-typedef struct
-{
- LVM_FLOAT AGC_Gain; /* The current AGC gain */
- LVM_FLOAT AGC_MaxGain; /* The maximum AGC gain */
- LVM_FLOAT Volume; /* The current volume setting */
- LVM_FLOAT Target; /* The target volume setting */
- LVM_FLOAT AGC_Target; /* AGC target level */
- LVM_FLOAT AGC_Attack; /* AGC attack scaler */
- LVM_FLOAT AGC_Decay; /* AGC decay scaler */
- LVM_FLOAT VolumeTC; /* Volume update time constant */
+typedef struct {
+ LVM_FLOAT AGC_Gain; /* The current AGC gain */
+ LVM_FLOAT AGC_MaxGain; /* The maximum AGC gain */
+ LVM_FLOAT Volume; /* The current volume setting */
+ LVM_FLOAT Target; /* The target volume setting */
+ LVM_FLOAT AGC_Target; /* AGC target level */
+ LVM_FLOAT AGC_Attack; /* AGC attack scaler */
+ LVM_FLOAT AGC_Decay; /* AGC decay scaler */
+ LVM_FLOAT VolumeTC; /* Volume update time constant */
} AGC_MIX_VOL_2St1Mon_FLOAT_t;
@@ -49,19 +48,16 @@
/* Function Prototypes */
/* */
/**********************************************************************************/
-void AGC_MIX_VOL_2St1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t *pInstance, /* Instance pointer */
- const LVM_FLOAT *pStSrc, /* Stereo source */
- const LVM_FLOAT *pMonoSrc, /* Mono source */
- LVM_FLOAT *pDst, /* Stereo destination */
- LVM_UINT16 n); /* Number of samples */
-#ifdef SUPPORT_MC
-void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t *pInstance, /* Instance pointer */
- const LVM_FLOAT *pStSrc, /* Source */
- const LVM_FLOAT *pMonoSrc, /* Mono source */
- LVM_FLOAT *pDst, /* Destination */
- LVM_UINT16 NrFrames, /* Number of frames */
- LVM_UINT16 NrChannels); /* Number of channels */
-#endif
+void AGC_MIX_VOL_2St1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t* pInstance, /* Instance pointer */
+ const LVM_FLOAT* pStSrc, /* Stereo source */
+ const LVM_FLOAT* pMonoSrc, /* Mono source */
+ LVM_FLOAT* pDst, /* Stereo destination */
+ LVM_UINT16 n); /* Number of samples */
+void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t* pInstance, /* Instance pointer */
+ const LVM_FLOAT* pStSrc, /* Source */
+ const LVM_FLOAT* pMonoSrc, /* Mono source */
+ LVM_FLOAT* pDst, /* Destination */
+ LVM_UINT16 NrFrames, /* Number of frames */
+ LVM_UINT16 NrChannels); /* Number of channels */
-#endif /* __AGC_H__ */
-
+#endif /* __AGC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/BIQUAD.h b/media/libeffects/lvm/lib/Common/lib/BIQUAD.h
index c050cd0..b38e9fb 100644
--- a/media/libeffects/lvm/lib/Common/lib/BIQUAD.h
+++ b/media/libeffects/lvm/lib/Common/lib/BIQUAD.h
@@ -22,64 +22,54 @@
/**********************************************************************************
INSTANCE MEMORY TYPE DEFINITION
***********************************************************************************/
-typedef struct
-{
-#ifdef SUPPORT_MC
+typedef struct {
/* The memory region created by this structure instance is typecast
* into another structure containing a pointer and an array of filter
* coefficients. In one case this memory region is used for storing
* DC component of channels
*/
- LVM_FLOAT *pStorage;
+ LVM_FLOAT* pStorage;
LVM_FLOAT Storage[LVM_MAX_CHANNELS];
-#else
- LVM_FLOAT Storage[6];
-#endif
} Biquad_FLOAT_Instance_t;
/**********************************************************************************
COEFFICIENT TYPE DEFINITIONS
***********************************************************************************/
/*** Biquad coefficients **********************************************************/
-typedef struct
-{
- LVM_FLOAT A2; /* a2 */
- LVM_FLOAT A1; /* a1 */
- LVM_FLOAT A0; /* a0 */
- LVM_FLOAT B2; /* -b2! */
- LVM_FLOAT B1; /* -b1! */
+typedef struct {
+ LVM_FLOAT A2; /* a2 */
+ LVM_FLOAT A1; /* a1 */
+ LVM_FLOAT A0; /* a0 */
+ LVM_FLOAT B2; /* -b2! */
+ LVM_FLOAT B1; /* -b1! */
} BQ_FLOAT_Coefs_t;
/*** First order coefficients *****************************************************/
-typedef struct
-{
- LVM_FLOAT A1; /* a1 */
- LVM_FLOAT A0; /* a0 */
- LVM_FLOAT B1; /* -b1! */
+typedef struct {
+ LVM_FLOAT A1; /* a1 */
+ LVM_FLOAT A0; /* a0 */
+ LVM_FLOAT B1; /* -b1! */
} FO_FLOAT_Coefs_t;
/*** First order coefficients with Shift*****************************************************/
-typedef struct
-{
- LVM_FLOAT A1; /* a1 */
- LVM_FLOAT A0; /* a0 */
- LVM_FLOAT B1; /* -b1! */
+typedef struct {
+ LVM_FLOAT A1; /* a1 */
+ LVM_FLOAT A0; /* a0 */
+ LVM_FLOAT B1; /* -b1! */
} FO_FLOAT_LShx_Coefs_t;
/*** Band pass coefficients *******************************************************/
-typedef struct
-{
- LVM_FLOAT A0; /* a0 */
- LVM_FLOAT B2; /* -b2! */
- LVM_FLOAT B1; /* -b1! */
+typedef struct {
+ LVM_FLOAT A0; /* a0 */
+ LVM_FLOAT B2; /* -b2! */
+ LVM_FLOAT B1; /* -b1! */
} BP_FLOAT_Coefs_t;
/*** Peaking coefficients *********************************************************/
-typedef struct
-{
- LVM_FLOAT A0; /* a0 */
- LVM_FLOAT B2; /* -b2! */
- LVM_FLOAT B1; /* -b1! */
- LVM_FLOAT G; /* Gain */
+typedef struct {
+ LVM_FLOAT A0; /* a0 */
+ LVM_FLOAT B2; /* -b2! */
+ LVM_FLOAT B1; /* -b1! */
+ LVM_FLOAT G; /* Gain */
} PK_FLOAT_Coefs_t;
/**********************************************************************************
@@ -87,39 +77,28 @@
***********************************************************************************/
/*** Types used for first order and shelving filter *******************************/
-typedef struct
-{
- LVM_FLOAT Storage[ (1 * 2) ]; /* One channel, two taps of size LVM_INT32 */
+typedef struct {
+ LVM_FLOAT Storage[(1 * 2)]; /* One channel, two taps of size LVM_INT32 */
} Biquad_1I_Order1_FLOAT_Taps_t;
-typedef struct
-{
-#ifdef SUPPORT_MC
+typedef struct {
/* LVM_MAX_CHANNELS channels, two taps of size LVM_FLOAT */
- LVM_FLOAT Storage[ (LVM_MAX_CHANNELS * 2) ];
-#else
- LVM_FLOAT Storage[ (2 * 2) ]; /* Two channels, two taps of size LVM_FLOAT */
-#endif
+ LVM_FLOAT Storage[(LVM_MAX_CHANNELS * 2)];
} Biquad_2I_Order1_FLOAT_Taps_t;
/*** Types used for biquad, band pass and peaking filter **************************/
-typedef struct
-{
- LVM_FLOAT Storage[ (1 * 4) ]; /* One channel, four taps of size LVM_FLOAT */
+typedef struct {
+ LVM_FLOAT Storage[(1 * 4)]; /* One channel, four taps of size LVM_FLOAT */
} Biquad_1I_Order2_FLOAT_Taps_t;
-typedef struct
-{
-#ifdef SUPPORT_MC
+typedef struct {
/* LVM_MAX_CHANNELS, four taps of size LVM_FLOAT */
- LVM_FLOAT Storage[ (LVM_MAX_CHANNELS * 4) ];
-#else
- LVM_FLOAT Storage[ (2 * 4) ]; /* Two channels, four taps of size LVM_FLOAT */
-#endif
+ LVM_FLOAT Storage[(LVM_MAX_CHANNELS * 4)];
} Biquad_2I_Order2_FLOAT_Taps_t;
-/* The names of the functions are changed to satisfy QAC rules: Name should be Unique withing 16 characters*/
-#define BQ_2I_D32F32Cll_TRC_WRA_01_Init Init_BQ_2I_D32F32Cll_TRC_WRA_01
-#define BP_1I_D32F32C30_TRC_WRA_02 TWO_BP_1I_D32F32C30_TRC_WRA_02
+/* The names of the functions are changed to satisfy QAC rules: Name should be Unique within 16
+ * characters*/
+#define BQ_2I_D32F32Cll_TRC_WRA_01_Init Init_BQ_2I_D32F32Cll_TRC_WRA_01
+#define BP_1I_D32F32C30_TRC_WRA_02 TWO_BP_1I_D32F32C30_TRC_WRA_02
/**********************************************************************************
FUNCTION PROTOTYPES: BIQUAD FILTERS
@@ -127,176 +106,108 @@
/*** 16 bit data path *************************************************************/
-void BQ_2I_D16F32Css_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
- BQ_FLOAT_Coefs_t *pCoef);
+void BQ_2I_D16F32Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_2I_Order2_FLOAT_Taps_t* pTaps, BQ_FLOAT_Coefs_t* pCoef);
-void BQ_2I_D16F32C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void BQ_2I_D16F32C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
-void BQ_2I_D16F32C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void BQ_2I_D16F32C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
-void BQ_2I_D16F32C13_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void BQ_2I_D16F32C13_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
-void BQ_2I_D16F16Css_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
- BQ_FLOAT_Coefs_t *pCoef);
+void BQ_2I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_2I_Order2_FLOAT_Taps_t* pTaps, BQ_FLOAT_Coefs_t* pCoef);
-void BQ_2I_D16F16C15_TRC_WRA_01( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void BQ_2I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
-void BQ_2I_D16F16C14_TRC_WRA_01( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void BQ_2I_D16F16C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
-void BQ_1I_D16F16Css_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
- BQ_FLOAT_Coefs_t *pCoef);
+void BQ_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order2_FLOAT_Taps_t* pTaps, BQ_FLOAT_Coefs_t* pCoef);
-void BQ_1I_D16F16C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void BQ_1I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
-void BQ_1I_D16F32Css_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
- BQ_FLOAT_Coefs_t *pCoef);
+void BQ_1I_D16F32Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order2_FLOAT_Taps_t* pTaps, BQ_FLOAT_Coefs_t* pCoef);
-void BQ_1I_D16F32C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void BQ_1I_D16F32C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
/*** 32 bit data path *************************************************************/
-void BQ_2I_D32F32Cll_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
- BQ_FLOAT_Coefs_t *pCoef);
-void BQ_2I_D32F32C30_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
-#ifdef SUPPORT_MC
-void BQ_MC_D32F32C30_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#endif
+void BQ_2I_D32F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_2I_Order2_FLOAT_Taps_t* pTaps, BQ_FLOAT_Coefs_t* pCoef);
+void BQ_2I_D32F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
+void BQ_MC_D32F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
/**********************************************************************************
FUNCTION PROTOTYPES: FIRST ORDER FILTERS
***********************************************************************************/
/*** 16 bit data path *************************************************************/
-void FO_1I_D16F16Css_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order1_FLOAT_Taps_t *pTaps,
- FO_FLOAT_Coefs_t *pCoef);
+void FO_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order1_FLOAT_Taps_t* pTaps, FO_FLOAT_Coefs_t* pCoef);
-void FO_1I_D16F16C15_TRC_WRA_01( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void FO_1I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
-void FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t *pInstance,
- Biquad_2I_Order1_FLOAT_Taps_t *pTaps,
- FO_FLOAT_LShx_Coefs_t *pCoef);
+void FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_2I_Order1_FLOAT_Taps_t* pTaps,
+ FO_FLOAT_LShx_Coefs_t* pCoef);
-void FO_2I_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void FO_2I_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
/*** 32 bit data path *************************************************************/
-void FO_1I_D32F32Cll_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order1_FLOAT_Taps_t *pTaps,
- FO_FLOAT_Coefs_t *pCoef);
-void FO_1I_D32F32C31_TRC_WRA_01( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
-#ifdef SUPPORT_MC
-void FO_Mc_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#endif
+void FO_1I_D32F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order1_FLOAT_Taps_t* pTaps, FO_FLOAT_Coefs_t* pCoef);
+void FO_1I_D32F32C31_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
+void FO_Mc_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
/**********************************************************************************
FUNCTION PROTOTYPES: BAND PASS FILTERS
***********************************************************************************/
/*** 16 bit data path *************************************************************/
-void BP_1I_D16F16Css_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
- BP_FLOAT_Coefs_t *pCoef);
-void BP_1I_D16F16C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
-void BP_1I_D16F32Cll_TRC_WRA_01_Init (Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
- BP_FLOAT_Coefs_t *pCoef);
-void BP_1I_D16F32C30_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void BP_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order2_FLOAT_Taps_t* pTaps, BP_FLOAT_Coefs_t* pCoef);
+void BP_1I_D16F16C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
+void BP_1I_D16F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order2_FLOAT_Taps_t* pTaps, BP_FLOAT_Coefs_t* pCoef);
+void BP_1I_D16F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
/*** 32 bit data path *************************************************************/
-void BP_1I_D32F32Cll_TRC_WRA_02_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
- BP_FLOAT_Coefs_t *pCoef);
-void BP_1I_D32F32C30_TRC_WRA_02( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
+void BP_1I_D32F32Cll_TRC_WRA_02_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order2_FLOAT_Taps_t* pTaps, BP_FLOAT_Coefs_t* pCoef);
+void BP_1I_D32F32C30_TRC_WRA_02(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
/*** 32 bit data path STEREO ******************************************************/
-void PK_2I_D32F32CssGss_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
- PK_FLOAT_Coefs_t *pCoef);
-void PK_2I_D32F32C14G11_TRC_WRA_01( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
-#ifdef SUPPORT_MC
-void PK_Mc_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#endif
+void PK_2I_D32F32CssGss_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_2I_Order2_FLOAT_Taps_t* pTaps,
+ PK_FLOAT_Coefs_t* pCoef);
+void PK_2I_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples);
+void PK_Mc_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
/**********************************************************************************
FUNCTION PROTOTYPES: DC REMOVAL FILTERS
***********************************************************************************/
/*** 16 bit data path STEREO ******************************************************/
-#ifdef SUPPORT_MC
-void DC_Mc_D16_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance);
+void DC_Mc_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance);
-void DC_Mc_D16_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#else
-void DC_2I_D16_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance);
-
-void DC_2I_D16_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples);
-#endif
+void DC_Mc_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
/**********************************************************************************/
-#endif /** _BIQUAD_H_ **/
-
+#endif /** _BIQUAD_H_ **/
diff --git a/media/libeffects/lvm/lib/Common/lib/CompLim.h b/media/libeffects/lvm/lib/Common/lib/CompLim.h
index 5b7cb1b..2fc78b4 100644
--- a/media/libeffects/lvm/lib/Common/lib/CompLim.h
+++ b/media/libeffects/lvm/lib/Common/lib/CompLim.h
@@ -35,21 +35,21 @@
typedef struct /* Compressor state */
{
/* Normaliser */
- LVM_INT16 Norm_Attack; /* Attack time constant of the Normaliser integrator */
- LVM_INT16 Norm_Decay; /* Decay time constant of the Normaliser integrator */
- LVM_INT32 NormInt; /* Normaliser integrator current value */
- LVM_INT16 Shift; /* Shift gain */
- LVM_INT16 Threshold; /* Target threshold */
+ LVM_INT16 Norm_Attack; /* Attack time constant of the Normaliser integrator */
+ LVM_INT16 Norm_Decay; /* Decay time constant of the Normaliser integrator */
+ LVM_INT32 NormInt; /* Normaliser integrator current value */
+ LVM_INT16 Shift; /* Shift gain */
+ LVM_INT16 Threshold; /* Target threshold */
/* Compressor */
- LVM_INT16 Comp_Atten; /* Attenuation applied before soft knee compressor */
- LVM_INT16 Comp_Attack_S; /* Attack time constant of the slow integrator */
- LVM_INT16 Comp_Decay_S; /* Decay time constant of slow the integrator */
- LVM_INT16 Comp_Attack_F; /* Attack time constant of fast the integrator */
- LVM_INT16 Comp_Decay_F; /* Decay time constant of fast the integrator */
- LVM_INT16 SoftClipGain; /* Soft clip gain control */
- LVM_INT32 CompIntSlow; /* Compressor slow integrator current value */
- LVM_INT32 CompIntFast; /* Compressor fast integrator current value */
+ LVM_INT16 Comp_Atten; /* Attenuation applied before soft knee compressor */
+ LVM_INT16 Comp_Attack_S; /* Attack time constant of the slow integrator */
+ LVM_INT16 Comp_Decay_S; /* Decay time constant of slow the integrator */
+ LVM_INT16 Comp_Attack_F; /* Attack time constant of fast the integrator */
+ LVM_INT16 Comp_Decay_F; /* Decay time constant of fast the integrator */
+ LVM_INT16 SoftClipGain; /* Soft clip gain control */
+ LVM_INT32 CompIntSlow; /* Compressor slow integrator current value */
+ LVM_INT32 CompIntFast; /* Compressor fast integrator current value */
} CompLim_Instance_t;
@@ -58,10 +58,7 @@
/* Function Prototypes */
/* */
/************************************************************************************/
-void NonLinComp_Float(LVM_FLOAT Gain,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT32 BlockLength);
+void NonLinComp_Float(LVM_FLOAT Gain, LVM_FLOAT* pDataIn, LVM_FLOAT* pDataOut,
+ LVM_INT32 BlockLength);
#endif /* #ifndef _COMP_LIM_H */
-
diff --git a/media/libeffects/lvm/lib/Common/lib/Filter.h b/media/libeffects/lvm/lib/Common/lib/Filter.h
index 1eeb321..0ba5223 100644
--- a/media/libeffects/lvm/lib/Common/lib/Filter.h
+++ b/media/libeffects/lvm/lib/Common/lib/Filter.h
@@ -27,26 +27,20 @@
/**********************************************************************************
DEFINES
***********************************************************************************/
-#define FILTER_LOSS 32730 /* -0.01dB loss to avoid wrapping due to band ripple */
-#define FILTER_LOSS_FLOAT 0.998849f
+#define FILTER_LOSS 32730 /* -0.01dB loss to avoid wrapping due to band ripple */
+#define FILTER_LOSS_FLOAT 0.998849f
/**********************************************************************************
FUNCTION PROTOTYPES
***********************************************************************************/
-LVM_FLOAT LVM_Power10( LVM_FLOAT X);
+LVM_FLOAT LVM_Power10(LVM_FLOAT X);
-LVM_FLOAT LVM_Polynomial(LVM_UINT16 N,
- LVM_FLOAT *pCoefficients,
- LVM_FLOAT X);
-LVM_FLOAT LVM_GetOmega(LVM_UINT32 Fc,
- LVM_Fs_en SampleRate);
+LVM_FLOAT LVM_Polynomial(LVM_UINT16 N, LVM_FLOAT* pCoefficients, LVM_FLOAT X);
+LVM_FLOAT LVM_GetOmega(LVM_UINT32 Fc, LVM_Fs_en SampleRate);
-LVM_FLOAT LVM_FO_LPF( LVM_FLOAT w,
- FO_FLOAT_Coefs_t *pCoeffs);
+LVM_FLOAT LVM_FO_LPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs);
-LVM_FLOAT LVM_FO_HPF( LVM_FLOAT w,
- FO_FLOAT_Coefs_t *pCoeffs);
+LVM_FLOAT LVM_FO_HPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs);
/**********************************************************************************/
-#endif /** _FILTER_H_ **/
-
+#endif /** _FILTER_H_ **/
diff --git a/media/libeffects/lvm/lib/Common/lib/InstAlloc.h b/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
index bae84e7..17699ef 100644
--- a/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
+++ b/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
@@ -22,11 +22,10 @@
/*######################################################################################*/
/* Type declarations */
/*######################################################################################*/
-typedef struct
-{
- LVM_UINT32 TotalSize; /* Accumulative total memory size */
- uintptr_t pNextMember; /* Pointer to the next instance member to be allocated */
-} INST_ALLOC;
+typedef struct {
+ LVM_UINT32 TotalSize; /* Accumulative total memory size */
+ uintptr_t pNextMember; /* Pointer to the next instance member to be allocated */
+} INST_ALLOC;
/*######################################################################################*/
/* Function prototypes */
@@ -41,7 +40,7 @@
* Remarks :
****************************************************************************************/
-void InstAlloc_Init( INST_ALLOC *pms, void *StartAddr );
+void InstAlloc_Init(INST_ALLOC* pms, void* StartAddr);
/****************************************************************************************
* Name : InstAlloc_AddMember()
@@ -54,7 +53,7 @@
* Remarks :
****************************************************************************************/
-void* InstAlloc_AddMember( INST_ALLOC *pms, LVM_UINT32 Size );
+void* InstAlloc_AddMember(INST_ALLOC* pms, LVM_UINT32 Size);
/****************************************************************************************
* Name : InstAlloc_GetTotal()
@@ -64,19 +63,14 @@
* Remarks :
****************************************************************************************/
-LVM_UINT32 InstAlloc_GetTotal( INST_ALLOC *pms);
+LVM_UINT32 InstAlloc_GetTotal(INST_ALLOC* pms);
-void* InstAlloc_AddMemberAllRet( INST_ALLOC *pms,
- LVM_UINT32 Size[],
- void **ptr);
+void* InstAlloc_AddMemberAllRet(INST_ALLOC* pms, LVM_UINT32 Size[], void** ptr);
-void* InstAlloc_AddMemberAll( INST_ALLOC *pms,
- LVM_UINT32 Size[],
- LVM_MemoryTable_st *pMemoryTable);
+void* InstAlloc_AddMemberAll(INST_ALLOC* pms, LVM_UINT32 Size[], LVM_MemoryTable_st* pMemoryTable);
-void InstAlloc_InitAll( INST_ALLOC *pms,
- LVM_MemoryTable_st *pMemoryTable);
+void InstAlloc_InitAll(INST_ALLOC* pms, LVM_MemoryTable_st* pMemoryTable);
-void InstAlloc_InitAll_NULL( INST_ALLOC *pms);
+void InstAlloc_InitAll_NULL(INST_ALLOC* pms);
#endif /* __JBS_INSTALLOC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Common.h b/media/libeffects/lvm/lib/Common/lib/LVM_Common.h
index 49f16ad..d3d128a 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Common.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Common.h
@@ -39,12 +39,11 @@
/* */
/****************************************************************************************/
/* Algorithm identification */
-#define ALGORITHM_NONE_ID 0x0000
-#define ALGORITHM_CS_ID 0x0100
-#define ALGORITHM_EQNB_ID 0x0200
-#define ALGORITHM_DBE_ID 0x0300
-#define ALGORITHM_VC_ID 0x0500
-#define ALGORITHM_TE_ID 0x0600
+#define ALGORITHM_NONE_ID 0x0000
+#define ALGORITHM_CS_ID 0x0100
+#define ALGORITHM_EQNB_ID 0x0200
+#define ALGORITHM_DBE_ID 0x0300
+#define ALGORITHM_VC_ID 0x0500
+#define ALGORITHM_TE_ID 0x0600
-#endif /* __LVM_COMMON_H__ */
-
+#endif /* __LVM_COMMON_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h b/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h
index 1a15125..b984745 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h
@@ -28,31 +28,36 @@
of overflow is undefined.
***********************************************************************************/
-#define MUL32x32INTO32(A,B,C,ShiftR) \
- {LVM_INT32 MUL32x32INTO32_temp,MUL32x32INTO32_temp2,MUL32x32INTO32_mask,MUL32x32INTO32_HH,MUL32x32INTO32_HL,MUL32x32INTO32_LH,MUL32x32INTO32_LL;\
- LVM_INT32 shiftValue;\
- shiftValue = (ShiftR);\
- MUL32x32INTO32_mask=0x0000FFFF;\
- MUL32x32INTO32_HH= ((LVM_INT32)((LVM_INT16)((A)>>16))*((LVM_INT16)((B)>>16)) );\
- MUL32x32INTO32_HL= ((LVM_INT32)((B)&MUL32x32INTO32_mask)*((LVM_INT16)((A)>>16))) ;\
- MUL32x32INTO32_LH= ((LVM_INT32)((A)&MUL32x32INTO32_mask)*((LVM_INT16)((B)>>16)));\
- MUL32x32INTO32_LL= (LVM_INT32)((A)&MUL32x32INTO32_mask)*(LVM_INT32)((B)&MUL32x32INTO32_mask);\
- MUL32x32INTO32_temp= (LVM_INT32)(MUL32x32INTO32_HL&MUL32x32INTO32_mask)+(LVM_INT32)(MUL32x32INTO32_LH&MUL32x32INTO32_mask)+(LVM_INT32)((MUL32x32INTO32_LL>>16)&MUL32x32INTO32_mask);\
- MUL32x32INTO32_HH= MUL32x32INTO32_HH+(LVM_INT32)(MUL32x32INTO32_HL>>16)+(LVM_INT32)(MUL32x32INTO32_LH>>16)+(LVM_INT32)(MUL32x32INTO32_temp>>16);\
- MUL32x32INTO32_LL=MUL32x32INTO32_LL+(LVM_INT32)(MUL32x32INTO32_HL<<16)+(LVM_INT32)(MUL32x32INTO32_LH<<16);\
- if(shiftValue<32)\
- {\
- MUL32x32INTO32_HH=MUL32x32INTO32_HH<<(32-shiftValue);\
- MUL32x32INTO32_mask=((LVM_INT32)1<<(32-shiftValue))-1;\
- MUL32x32INTO32_LL=(MUL32x32INTO32_LL>>shiftValue)&MUL32x32INTO32_mask;\
- MUL32x32INTO32_temp2=MUL32x32INTO32_HH|MUL32x32INTO32_LL;\
- }\
- else\
- {\
- MUL32x32INTO32_temp2=(LVM_INT32)MUL32x32INTO32_HH>>(shiftValue-32);\
- }\
- (C) = MUL32x32INTO32_temp2;\
- }
+#define MUL32x32INTO32(A, B, C, ShiftR) \
+ { \
+ LVM_INT32 MUL32x32INTO32_temp, MUL32x32INTO32_temp2, MUL32x32INTO32_mask, \
+ MUL32x32INTO32_HH, MUL32x32INTO32_HL, MUL32x32INTO32_LH, MUL32x32INTO32_LL; \
+ LVM_INT32 shiftValue; \
+ shiftValue = (ShiftR); \
+ MUL32x32INTO32_mask = 0x0000FFFF; \
+ MUL32x32INTO32_HH = ((LVM_INT32)((LVM_INT16)((A) >> 16)) * ((LVM_INT16)((B) >> 16))); \
+ MUL32x32INTO32_HL = ((LVM_INT32)((B)&MUL32x32INTO32_mask) * ((LVM_INT16)((A) >> 16))); \
+ MUL32x32INTO32_LH = ((LVM_INT32)((A)&MUL32x32INTO32_mask) * ((LVM_INT16)((B) >> 16))); \
+ MUL32x32INTO32_LL = \
+ (LVM_INT32)((A)&MUL32x32INTO32_mask) * (LVM_INT32)((B)&MUL32x32INTO32_mask); \
+ MUL32x32INTO32_temp = (LVM_INT32)(MUL32x32INTO32_HL & MUL32x32INTO32_mask) + \
+ (LVM_INT32)(MUL32x32INTO32_LH & MUL32x32INTO32_mask) + \
+ (LVM_INT32)((MUL32x32INTO32_LL >> 16) & MUL32x32INTO32_mask); \
+ MUL32x32INTO32_HH = MUL32x32INTO32_HH + (LVM_INT32)(MUL32x32INTO32_HL >> 16) + \
+ (LVM_INT32)(MUL32x32INTO32_LH >> 16) + \
+ (LVM_INT32)(MUL32x32INTO32_temp >> 16); \
+ MUL32x32INTO32_LL = MUL32x32INTO32_LL + (LVM_INT32)(MUL32x32INTO32_HL << 16) + \
+ (LVM_INT32)(MUL32x32INTO32_LH << 16); \
+ if (shiftValue < 32) { \
+ MUL32x32INTO32_HH = MUL32x32INTO32_HH << (32 - shiftValue); \
+ MUL32x32INTO32_mask = ((LVM_INT32)1 << (32 - shiftValue)) - 1; \
+ MUL32x32INTO32_LL = (MUL32x32INTO32_LL >> shiftValue) & MUL32x32INTO32_mask; \
+ MUL32x32INTO32_temp2 = MUL32x32INTO32_HH | MUL32x32INTO32_LL; \
+ } else { \
+ MUL32x32INTO32_temp2 = (LVM_INT32)MUL32x32INTO32_HH >> (shiftValue - 32); \
+ } \
+ (C) = MUL32x32INTO32_temp2; \
+ }
/**********************************************************************************
MUL32x16INTO32(A,B,C,ShiftR)
@@ -65,25 +70,24 @@
of overflow is undefined.
***********************************************************************************/
-#define MUL32x16INTO32(A,B,C,ShiftR) \
- {LVM_INT32 MUL32x16INTO32_mask,MUL32x16INTO32_HH,MUL32x16INTO32_LL;\
- LVM_INT32 shiftValue;\
- shiftValue = (ShiftR);\
- MUL32x16INTO32_mask=0x0000FFFF;\
- MUL32x16INTO32_HH= ((LVM_INT32)(B)*((LVM_INT16)((A)>>16)));\
- MUL32x16INTO32_LL= ((LVM_INT32)((A)&MUL32x16INTO32_mask)*(B));\
- if(shiftValue<16)\
- {\
- MUL32x16INTO32_HH=(LVM_INT32)((LVM_UINT32)MUL32x16INTO32_HH<<(16-shiftValue));\
- (C)=MUL32x16INTO32_HH+(LVM_INT32)(MUL32x16INTO32_LL>>shiftValue);\
- }\
- else if(shiftValue<32) {\
- MUL32x16INTO32_HH=(LVM_INT32)(MUL32x16INTO32_HH>>(shiftValue-16));\
- (C)=MUL32x16INTO32_HH+(LVM_INT32)(MUL32x16INTO32_LL>>shiftValue);\
- }\
- else {\
- (C)=MUL32x16INTO32_HH>>(shiftValue-16);}\
- }
+#define MUL32x16INTO32(A, B, C, ShiftR) \
+ { \
+ LVM_INT32 MUL32x16INTO32_mask, MUL32x16INTO32_HH, MUL32x16INTO32_LL; \
+ LVM_INT32 shiftValue; \
+ shiftValue = (ShiftR); \
+ MUL32x16INTO32_mask = 0x0000FFFF; \
+ MUL32x16INTO32_HH = ((LVM_INT32)(B) * ((LVM_INT16)((A) >> 16))); \
+ MUL32x16INTO32_LL = ((LVM_INT32)((A)&MUL32x16INTO32_mask) * (B)); \
+ if (shiftValue < 16) { \
+ MUL32x16INTO32_HH = (LVM_INT32)((LVM_UINT32)MUL32x16INTO32_HH << (16 - shiftValue)); \
+ (C) = MUL32x16INTO32_HH + (LVM_INT32)(MUL32x16INTO32_LL >> shiftValue); \
+ } else if (shiftValue < 32) { \
+ MUL32x16INTO32_HH = (LVM_INT32)(MUL32x16INTO32_HH >> (shiftValue - 16)); \
+ (C) = MUL32x16INTO32_HH + (LVM_INT32)(MUL32x16INTO32_LL >> shiftValue); \
+ } else { \
+ (C) = MUL32x16INTO32_HH >> (shiftValue - 16); \
+ } \
+ }
/**********************************************************************************
ADD2_SAT_32x32(A,B,C)
@@ -91,16 +95,16 @@
A,B and C are 32 bit SIGNED numbers.
***********************************************************************************/
-#define ADD2_SAT_32x32(A,B,C) \
- {(C)=(A)+(B);\
- if ((((C) ^ (A)) & ((C) ^ (B))) >> 31)\
- {\
- if((A)<0)\
- (C)=0x80000000l;\
- else\
- (C)=0x7FFFFFFFl;\
- }\
- }
+#define ADD2_SAT_32x32(A, B, C) \
+ { \
+ (C) = (A) + (B); \
+ if ((((C) ^ (A)) & ((C) ^ (B))) >> 31) { \
+ if ((A) < 0) \
+ (C) = 0x80000000l; \
+ else \
+ (C) = 0x7FFFFFFFl; \
+ } \
+ }
#endif /* _LVM_MACROS_H_ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h b/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h
index dbf9e6a..75f4785 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h
@@ -37,8 +37,7 @@
/* TYPE DEFINITIONS */
/****************************************************************************************/
-typedef struct
-{
+typedef struct {
/*
* The memory area created using this structure is internally
* typecast to LVM_Timer_Instance_Private_t and used.
@@ -51,14 +50,13 @@
} LVM_Timer_Instance_t;
-typedef struct
-{
- LVM_INT32 SamplingRate;
- LVM_INT16 TimeInMs;
- LVM_INT32 CallBackParam;
- void *pCallBackParams;
- void *pCallbackInstance;
- void (*pCallBack)(void*,void*,LVM_INT32);
+typedef struct {
+ LVM_INT32 SamplingRate;
+ LVM_INT16 TimeInMs;
+ LVM_INT32 CallBackParam;
+ void* pCallBackParams;
+ void* pCallbackInstance;
+ void (*pCallBack)(void*, void*, LVM_INT32);
} LVM_Timer_Params_t;
@@ -66,14 +64,12 @@
/* FUNCTION PROTOTYPES */
/****************************************************************************************/
-void LVM_Timer_Init ( LVM_Timer_Instance_t *pInstance,
- LVM_Timer_Params_t *pParams );
+void LVM_Timer_Init(LVM_Timer_Instance_t* pInstance, LVM_Timer_Params_t* pParams);
-void LVM_Timer ( LVM_Timer_Instance_t *pInstance,
- LVM_INT16 BlockSize );
+void LVM_Timer(LVM_Timer_Instance_t* pInstance, LVM_INT16 BlockSize);
/****************************************************************************************/
/* END OF HEADER */
/****************************************************************************************/
-#endif /* __LVM_TIMER_H__ */
+#endif /* __LVM_TIMER_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
index 8b687f6..fb797be 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
@@ -33,46 +33,27 @@
/* */
/****************************************************************************************/
-#define LVM_NULL 0 /* NULL pointer */
+#define LVM_NULL 0 /* NULL pointer */
-#define LVM_TRUE 1 /* Booleans */
-#define LVM_FALSE 0
+#define LVM_TRUE 1 /* Booleans */
+#define LVM_FALSE 0
-#define LVM_MAXINT_8 127 /* Maximum positive integer size */
-#define LVM_MAXINT_16 32767
-#define LVM_MAXINT_32 2147483647
-#define LVM_MAXENUM 2147483647
+#define LVM_MAXINT_8 127 /* Maximum positive integer size */
+#define LVM_MAXINT_16 32767
+#define LVM_MAXINT_32 2147483647
+#define LVM_MAXENUM 2147483647
-#define LVM_MODULEID_MASK 0xFF00 /* Mask to extract the calling module ID from callbackId */
-#define LVM_EVENTID_MASK 0x00FF /* Mask to extract the callback event from callbackId */
+#define LVM_MODULEID_MASK 0xFF00 /* Mask to extract the calling module ID from callbackId */
+#define LVM_EVENTID_MASK 0x00FF /* Mask to extract the callback event from callbackId */
/* Memory table*/
-#define LVM_MEMREGION_PERSISTENT_SLOW_DATA 0 /* Offset to the instance memory region */
-#define LVM_MEMREGION_PERSISTENT_FAST_DATA 1 /* Offset to the persistent data memory region */
-#define LVM_MEMREGION_PERSISTENT_FAST_COEF 2 /* Offset to the persistent coefficient memory region */
-#define LVM_MEMREGION_TEMPORARY_FAST 3 /* Offset to temporary memory region */
+#define LVM_MEMREGION_PERSISTENT_SLOW_DATA 0 /* Offset to the instance memory region */
+#define LVM_MEMREGION_PERSISTENT_FAST_DATA 1 /* Offset to the persistent data memory region */
+#define LVM_MEMREGION_PERSISTENT_FAST_COEF \
+ 2 /* Offset to the persistent coefficient memory region */
+#define LVM_MEMREGION_TEMPORARY_FAST 3 /* Offset to temporary memory region */
-#define LVM_NR_MEMORY_REGIONS 4 /* Number of memory regions */
-
-/* Memory partition type */
-#define LVM_MEM_PARTITION0 0 /* 1st memory partition */
-#define LVM_MEM_PARTITION1 1 /* 2nd memory partition */
-#define LVM_MEM_PARTITION2 2 /* 3rd memory partition */
-#define LVM_MEM_PARTITION3 3 /* 4th memory partition */
-
-/* Use type */
-#define LVM_MEM_PERSISTENT 0 /* Persistent memory type */
-#define LVM_MEM_SCRATCH 4 /* Scratch memory type */
-
-/* Access type */
-#define LVM_MEM_INTERNAL 0 /* Internal (fast) access memory */
-#define LVM_MEM_EXTERNAL 8 /* External (slow) access memory */
-
-/* Platform specific */
-#define LVM_PERSISTENT (LVM_MEM_PARTITION0+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL)
-#define LVM_PERSISTENT_DATA (LVM_MEM_PARTITION1+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL)
-#define LVM_PERSISTENT_COEF (LVM_MEM_PARTITION2+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL)
-#define LVM_SCRATCH (LVM_MEM_PARTITION3+LVM_MEM_SCRATCH+LVM_MEM_INTERNAL)
+#define LVM_NR_MEMORY_REGIONS 4 /* Number of memory regions */
/****************************************************************************************/
/* */
@@ -80,33 +61,28 @@
/* */
/****************************************************************************************/
-typedef char LVM_CHAR; /* ASCII character */
+typedef char LVM_CHAR; /* ASCII character */
-typedef int8_t LVM_INT8; /* Signed 8-bit word */
-typedef uint8_t LVM_UINT8; /* Unsigned 8-bit word */
+typedef int8_t LVM_INT8; /* Signed 8-bit word */
+typedef uint8_t LVM_UINT8; /* Unsigned 8-bit word */
-typedef int16_t LVM_INT16; /* Signed 16-bit word */
-typedef uint16_t LVM_UINT16; /* Unsigned 16-bit word */
+typedef int16_t LVM_INT16; /* Signed 16-bit word */
+typedef uint16_t LVM_UINT16; /* Unsigned 16-bit word */
-typedef int32_t LVM_INT32; /* Signed 32-bit word */
-typedef uint32_t LVM_UINT32; /* Unsigned 32-bit word */
-typedef int64_t LVM_INT64; /* Signed 64-bit word */
+typedef int32_t LVM_INT32; /* Signed 32-bit word */
+typedef uint32_t LVM_UINT32; /* Unsigned 32-bit word */
+typedef int64_t LVM_INT64; /* Signed 64-bit word */
-#define LVM_MAXFLOAT 1.f
+#define LVM_MAXFLOAT 1.f
-typedef float LVM_FLOAT; /* single precision floating point */
+typedef float LVM_FLOAT; /* single precision floating point */
// Select whether we expose int16_t or float buffers.
-#define EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
-typedef float effect_buffer_t;
+#define EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
+typedef float effect_buffer_t;
-
-#ifdef SUPPORT_MC
-#define LVM_MAX_CHANNELS 8 // FCC_8
-#else
-#define LVM_MAX_CHANNELS 2 // FCC_2
-#endif
+#define LVM_MAX_CHANNELS 8 // FCC_8
/****************************************************************************************/
/* */
@@ -115,29 +91,20 @@
/****************************************************************************************/
/* Operating mode */
-typedef enum
-{
- LVM_MODE_OFF = 0,
- LVM_MODE_ON = 1,
- LVM_MODE_DUMMY = LVM_MAXENUM
-} LVM_Mode_en;
+typedef enum { LVM_MODE_OFF = 0, LVM_MODE_ON = 1, LVM_MODE_DUMMY = LVM_MAXENUM } LVM_Mode_en;
/* Format */
-typedef enum
-{
- LVM_STEREO = 0,
- LVM_MONOINSTEREO = 1,
- LVM_MONO = 2,
-#ifdef SUPPORT_MC
- LVM_MULTICHANNEL = 3,
-#endif
- LVM_SOURCE_DUMMY = LVM_MAXENUM
+typedef enum {
+ LVM_STEREO = 0,
+ LVM_MONOINSTEREO = 1,
+ LVM_MONO = 2,
+ LVM_MULTICHANNEL = 3,
+ LVM_SOURCE_DUMMY = LVM_MAXENUM
} LVM_Format_en;
/* LVM sampling rates */
-typedef enum
-{
- LVM_FS_8000 = 0,
+typedef enum {
+ LVM_FS_8000 = 0,
LVM_FS_11025 = 1,
LVM_FS_12000 = 2,
LVM_FS_16000 = 3,
@@ -150,32 +117,29 @@
LVM_FS_96000 = 10,
LVM_FS_176400 = 11,
LVM_FS_192000 = 12,
- LVM_FS_INVALID = LVM_MAXENUM-1,
+ LVM_FS_INVALID = LVM_MAXENUM - 1,
LVM_FS_DUMMY = LVM_MAXENUM
} LVM_Fs_en;
/* Memory Types */
-typedef enum
-{
- LVM_PERSISTENT_SLOW_DATA = LVM_MEMREGION_PERSISTENT_SLOW_DATA,
- LVM_PERSISTENT_FAST_DATA = LVM_MEMREGION_PERSISTENT_FAST_DATA,
- LVM_PERSISTENT_FAST_COEF = LVM_MEMREGION_PERSISTENT_FAST_COEF,
- LVM_TEMPORARY_FAST = LVM_MEMREGION_TEMPORARY_FAST,
- LVM_MEMORYTYPE_DUMMY = LVM_MAXENUM
+typedef enum {
+ LVM_PERSISTENT_SLOW_DATA = LVM_MEMREGION_PERSISTENT_SLOW_DATA,
+ LVM_PERSISTENT_FAST_DATA = LVM_MEMREGION_PERSISTENT_FAST_DATA,
+ LVM_PERSISTENT_FAST_COEF = LVM_MEMREGION_PERSISTENT_FAST_COEF,
+ LVM_TEMPORARY_FAST = LVM_MEMREGION_TEMPORARY_FAST,
+ LVM_MEMORYTYPE_DUMMY = LVM_MAXENUM
} LVM_MemoryTypes_en;
/* Memory region definition */
-typedef struct
-{
- LVM_UINT32 Size; /* Region size in bytes */
- LVM_MemoryTypes_en Type; /* Region type */
- void *pBaseAddress; /* Pointer to the region base address */
+typedef struct {
+ LVM_UINT32 Size; /* Region size in bytes */
+ LVM_MemoryTypes_en Type; /* Region type */
+ void* pBaseAddress; /* Pointer to the region base address */
} LVM_MemoryRegion_st;
/* Memory table containing the region definitions */
-typedef struct
-{
- LVM_MemoryRegion_st Region[LVM_NR_MEMORY_REGIONS]; /* One definition for each region */
+typedef struct {
+ LVM_MemoryRegion_st Region[LVM_NR_MEMORY_REGIONS]; /* One definition for each region */
} LVM_MemoryTable_st;
/****************************************************************************************/
@@ -183,9 +147,11 @@
/* Standard Function Prototypes */
/* */
/****************************************************************************************/
-typedef LVM_INT32 (*LVM_Callback)(void *pCallbackData, /* Pointer to the callback data structure */
- void *pGeneralPurpose, /* General purpose pointer (e.g. to a data structure needed in the callback) */
- LVM_INT16 GeneralPurpose ); /* General purpose variable (e.g. to be used as callback ID) */
+typedef LVM_INT32 (*LVM_Callback)(
+ void* pCallbackData, /* Pointer to the callback data structure */
+ void* pGeneralPurpose, /* General purpose pointer (e.g. to a data structure needed in the
+ callback) */
+ LVM_INT16 GeneralPurpose); /* General purpose variable (e.g. to be used as callback ID) */
/****************************************************************************************/
/* */
@@ -193,4 +159,4 @@
/* */
/****************************************************************************************/
-#endif /* LVM_TYPES_H */
+#endif /* LVM_TYPES_H */
diff --git a/media/libeffects/lvm/lib/Common/lib/Mixer.h b/media/libeffects/lvm/lib/Common/lib/Mixer.h
index b2e0195..ba605e5 100644
--- a/media/libeffects/lvm/lib/Common/lib/Mixer.h
+++ b/media/libeffects/lvm/lib/Common/lib/Mixer.h
@@ -24,80 +24,62 @@
INSTANCE MEMORY TYPE DEFINITION
***********************************************************************************/
-typedef struct
-{
- LVM_FLOAT Alpha; /* Time constant. Set by calling application. \
- Can be changed at any time */
- LVM_FLOAT Target; /* Target value. Set by calling application. \
- Can be changed at any time */
- LVM_FLOAT Current; /* Current value. Set by the mixer function. */
- LVM_INT16 CallbackSet; /* Boolean. Should be set by calling application \
- each time the target value is updated */
- LVM_INT16 CallbackParam; /* Parameter that will be used in the calback function */
- void *pCallbackHandle; /* Pointer to the instance of the callback function */
- void *pGeneralPurpose; /* Pointer for general purpose usage */
- LVM_Callback pCallBack; /* Pointer to the callback function */
+typedef struct {
+ LVM_FLOAT Alpha; /* Time constant. Set by calling application. \
+ Can be changed at any time */
+ LVM_FLOAT Target; /* Target value. Set by calling application. \
+ Can be changed at any time */
+ LVM_FLOAT Current; /* Current value. Set by the mixer function. */
+ LVM_INT16 CallbackSet; /* Boolean. Should be set by calling application \
+ each time the target value is updated */
+ LVM_INT16 CallbackParam; /* Parameter that will be used in the calback function */
+ void* pCallbackHandle; /* Pointer to the instance of the callback function */
+ void* pGeneralPurpose; /* Pointer for general purpose usage */
+ LVM_Callback pCallBack; /* Pointer to the callback function */
} Mix_1St_Cll_FLOAT_t;
-typedef struct
-{
- LVM_FLOAT Alpha1;
- LVM_FLOAT Target1;
- LVM_FLOAT Current1;
- LVM_INT16 CallbackSet1;
- LVM_INT16 CallbackParam1;
- void *pCallbackHandle1;
- void *pGeneralPurpose1;
+typedef struct {
+ LVM_FLOAT Alpha1;
+ LVM_FLOAT Target1;
+ LVM_FLOAT Current1;
+ LVM_INT16 CallbackSet1;
+ LVM_INT16 CallbackParam1;
+ void* pCallbackHandle1;
+ void* pGeneralPurpose1;
LVM_Callback pCallBack1;
- LVM_FLOAT Alpha2; /* Warning the address of this location is passed as a \
- pointer to Mix_1St_Cll_t in some functions */
- LVM_FLOAT Target2;
- LVM_FLOAT Current2;
- LVM_INT16 CallbackSet2;
- LVM_INT16 CallbackParam2;
- void *pCallbackHandle2;
- void *pGeneralPurpose2;
+ LVM_FLOAT Alpha2; /* Warning the address of this location is passed as a \
+ pointer to Mix_1St_Cll_t in some functions */
+ LVM_FLOAT Target2;
+ LVM_FLOAT Current2;
+ LVM_INT16 CallbackSet2;
+ LVM_INT16 CallbackParam2;
+ void* pCallbackHandle2;
+ void* pGeneralPurpose2;
LVM_Callback pCallBack2;
} Mix_2St_Cll_FLOAT_t;
/*** General functions ************************************************************/
-LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32 tc,
- LVM_UINT32 Fs,
- LVM_UINT16 NumChannels);
+LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32 tc, LVM_UINT32 Fs, LVM_UINT16 NumChannels);
-void MixSoft_1St_D32C31_WRA( Mix_1St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
+void MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+ LVM_INT16 n);
-void MixSoft_2St_D32C31_SAT( Mix_2St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src1,
- const LVM_FLOAT *src2,
- LVM_FLOAT *dst,
- LVM_INT16 n);
+void MixSoft_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+ const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n);
-void MixInSoft_D32C31_SAT( Mix_1St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
+void MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+ LVM_INT16 n);
/**********************************************************************************
FUNCTION PROTOTYPES (LOW LEVEL SUBFUNCTIONS)
***********************************************************************************/
-void Core_MixSoft_1St_D32C31_WRA( Mix_1St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
-void Core_MixHard_2St_D32C31_SAT( Mix_2St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src1,
- const LVM_FLOAT *src2,
- LVM_FLOAT *dst,
- LVM_INT16 n);
-void Core_MixInSoft_D32C31_SAT( Mix_1St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
+void Core_MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n);
+void Core_MixHard_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+ const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n);
+void Core_MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+ LVM_INT16 n);
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h b/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h
index ae54419..04b180c 100644
--- a/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h
@@ -30,7 +30,7 @@
/* Absolute value including the corner case for the extreme negative value */
-LVM_FLOAT Abs_Float(LVM_FLOAT input);
+LVM_FLOAT Abs_Float(LVM_FLOAT input);
/****************************************************************************************
* Name : dB_to_Lin32()
@@ -44,7 +44,6 @@
* (15->01) = decimal part
* Returns : Lin value format 1.16.15
****************************************************************************************/
-LVM_FLOAT dB_to_LinFloat(LVM_INT16 db_fix);
+LVM_FLOAT dB_to_LinFloat(LVM_INT16 db_fix);
-#endif /* __SCALARARITHMETIC_H__ */
-
+#endif /* __SCALARARITHMETIC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
index b27bac5..66e3e79 100644
--- a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
@@ -24,24 +24,13 @@
VARIOUS FUNCTIONS
***********************************************************************************/
-void LoadConst_Float( const LVM_FLOAT val,
- LVM_FLOAT *dst,
- LVM_INT16 n );
+void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
-void Copy_Float( const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n );
-#ifdef SUPPORT_MC
-void Copy_Float_Mc_Stereo( const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT32 NrChannels);
-void Copy_Float_Stereo_Mc( const LVM_FLOAT *src,
- LVM_FLOAT *StereoOut,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT32 NrChannels);
-#endif
+void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
+ LVM_INT32 NrChannels);
+void Copy_Float_Stereo_Mc(const LVM_FLOAT* src, LVM_FLOAT* StereoOut, LVM_FLOAT* dst,
+ LVM_INT16 NrFrames, LVM_INT32 NrChannels);
/*********************************************************************************
* note: In Mult3s_16x16() saturation of result is not taken care when *
@@ -51,10 +40,7 @@
* This is the only case which will give wrong result. *
* For more information refer to Vector_Arithmetic.doc in /doc folder *
*********************************************************************************/
-void Mult3s_Float( const LVM_FLOAT *src,
- const LVM_FLOAT val,
- LVM_FLOAT *dst,
- LVM_INT16 n);
+void Mult3s_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
/*********************************************************************************
* note: In Mult3s_32x16() saturation of result is not taken care when *
@@ -64,87 +50,54 @@
* This is the only extreme condition which is giving unexpected result *
* For more information refer to Vector_Arithmetic.doc in /doc folder *
*********************************************************************************/
-void Mult3s_32x16( const LVM_INT32 *src,
- const LVM_INT16 val,
- LVM_INT32 *dst,
- LVM_INT16 n);
-void DelayMix_Float(const LVM_FLOAT *src, /* Source 1, to be delayed */
- LVM_FLOAT *delay, /* Delay buffer */
- LVM_INT16 size, /* Delay size */
- LVM_FLOAT *dst, /* Source/destination */
- LVM_INT16 *pOffset, /* Delay offset */
- LVM_INT16 n) ; /* Number of stereo samples */
-void DelayWrite_32( const LVM_INT32 *src, /* Source 1, to be delayed */
- LVM_INT32 *delay, /* Delay buffer */
- LVM_UINT16 size, /* Delay size */
- LVM_UINT16 *pOffset, /* Delay offset */
- LVM_INT16 n);
-void Add2_Sat_Float( const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n );
-void Mac3s_Sat_Float( const LVM_FLOAT *src,
- const LVM_FLOAT val,
- LVM_FLOAT *dst,
- LVM_INT16 n);
-void DelayAllPass_Sat_32x16To32( LVM_INT32 *delay, /* Delay buffer */
- LVM_UINT16 size, /* Delay size */
- LVM_INT16 coeff, /* All pass filter coefficient */
- LVM_UINT16 DelayOffset, /* Simple delay offset */
- LVM_UINT16 *pAllPassOffset, /* All pass filter delay offset */
- LVM_INT32 *dst, /* Source/destination */
- LVM_INT16 n);
+void Mult3s_32x16(const LVM_INT32* src, const LVM_INT16 val, LVM_INT32* dst, LVM_INT16 n);
+void DelayMix_Float(const LVM_FLOAT* src, /* Source 1, to be delayed */
+ LVM_FLOAT* delay, /* Delay buffer */
+ LVM_INT16 size, /* Delay size */
+ LVM_FLOAT* dst, /* Source/destination */
+ LVM_INT16* pOffset, /* Delay offset */
+ LVM_INT16 n); /* Number of stereo samples */
+void DelayWrite_32(const LVM_INT32* src, /* Source 1, to be delayed */
+ LVM_INT32* delay, /* Delay buffer */
+ LVM_UINT16 size, /* Delay size */
+ LVM_UINT16* pOffset, /* Delay offset */
+ LVM_INT16 n);
+void Add2_Sat_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void Mac3s_Sat_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
+void DelayAllPass_Sat_32x16To32(LVM_INT32* delay, /* Delay buffer */
+ LVM_UINT16 size, /* Delay size */
+ LVM_INT16 coeff, /* All pass filter coefficient */
+ LVM_UINT16 DelayOffset, /* Simple delay offset */
+ LVM_UINT16* pAllPassOffset, /* All pass filter delay offset */
+ LVM_INT32* dst, /* Source/destination */
+ LVM_INT16 n);
/**********************************************************************************
SHIFT FUNCTIONS
***********************************************************************************/
-void Shift_Sat_Float (const LVM_INT16 val,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
+void Shift_Sat_Float(const LVM_INT16 val, const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
/**********************************************************************************
AUDIO FORMAT CONVERSION FUNCTIONS
***********************************************************************************/
-void MonoTo2I_Float( const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
-void From2iToMono_Float( const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
-#ifdef SUPPORT_MC
-void FromMcToMono_Float(const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
+void MonoTo2I_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void From2iToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void FromMcToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
LVM_INT16 NrChannels);
-#endif
-void MSTo2i_Sat_Float( const LVM_FLOAT *srcM,
- const LVM_FLOAT *srcS,
- LVM_FLOAT *dst,
- LVM_INT16 n );
-void From2iToMS_Float( const LVM_FLOAT *src,
- LVM_FLOAT *dstM,
- LVM_FLOAT *dstS,
- LVM_INT16 n );
-void JoinTo2i_Float( const LVM_FLOAT *srcL,
- const LVM_FLOAT *srcR,
- LVM_FLOAT *dst,
- LVM_INT16 n );
+void MSTo2i_Sat_Float(const LVM_FLOAT* srcM, const LVM_FLOAT* srcS, LVM_FLOAT* dst, LVM_INT16 n);
+void From2iToMS_Float(const LVM_FLOAT* src, LVM_FLOAT* dstM, LVM_FLOAT* dstS, LVM_INT16 n);
+void JoinTo2i_Float(const LVM_FLOAT* srcL, const LVM_FLOAT* srcR, LVM_FLOAT* dst, LVM_INT16 n);
/**********************************************************************************
DATA TYPE CONVERSION FUNCTIONS
***********************************************************************************/
-void Int16LShiftToInt32_16x32(const LVM_INT16 *src,
- LVM_INT32 *dst,
- LVM_INT16 n,
- LVM_INT16 shift );
+void Int16LShiftToInt32_16x32(const LVM_INT16* src, LVM_INT32* dst, LVM_INT16 n, LVM_INT16 shift);
-void Int32RShiftToInt16_Sat_32x16(const LVM_INT32 *src,
- LVM_INT16 *dst,
- LVM_INT16 n,
- LVM_INT16 shift );
+void Int32RShiftToInt16_Sat_32x16(const LVM_INT32* src, LVM_INT16* dst, LVM_INT16 n,
+ LVM_INT16 shift);
/**********************************************************************************/
-#endif /* _VECTOR_ARITHMETIC_H_ */
+#endif /* _VECTOR_ARITHMETIC_H_ */
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp b/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp
index e18aa78..ae8cdad 100644
--- a/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp
@@ -30,10 +30,10 @@
/* */
/****************************************************************************************/
-#define VOL_TC_SHIFT 21 /* As a power of 2 */
-#define DECAY_SHIFT 10 /* As a power of 2 */
-#define VOL_TC_FLOAT 2.0f /* As a power of 2 */
-#define DECAY_FAC_FLOAT 64.0f /* As a power of 2 */
+#define VOL_TC_SHIFT 21 /* As a power of 2 */
+#define DECAY_SHIFT 10 /* As a power of 2 */
+#define VOL_TC_FLOAT 2.0f /* As a power of 2 */
+#define DECAY_FAC_FLOAT 64.0f /* As a power of 2 */
/****************************************************************************************/
/* */
@@ -69,91 +69,83 @@
/* NOTES: */
/* */
/****************************************************************************************/
-void AGC_MIX_VOL_2St1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t *pInstance, /* Instance pointer */
- const LVM_FLOAT *pStSrc, /* Stereo source */
- const LVM_FLOAT *pMonoSrc, /* Mono source */
- LVM_FLOAT *pDst, /* Stereo destination */
- LVM_UINT16 NumSamples) /* Number of samples */
+void AGC_MIX_VOL_2St1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t* pInstance, /* Instance pointer */
+ const LVM_FLOAT* pStSrc, /* Stereo source */
+ const LVM_FLOAT* pMonoSrc, /* Mono source */
+ LVM_FLOAT* pDst, /* Stereo destination */
+ LVM_UINT16 NumSamples) /* Number of samples */
{
-
/*
* General variables
*/
- LVM_UINT16 i; /* Sample index */
- LVM_FLOAT Left; /* Left sample */
- LVM_FLOAT Right; /* Right sample */
- LVM_FLOAT Mono; /* Mono sample */
- LVM_FLOAT AbsPeak; /* Absolute peak signal */
- LVM_FLOAT AGC_Mult; /* Short AGC gain */
- LVM_FLOAT Vol_Mult; /* Short volume */
+ LVM_UINT16 i; /* Sample index */
+ LVM_FLOAT Left; /* Left sample */
+ LVM_FLOAT Right; /* Right sample */
+ LVM_FLOAT Mono; /* Mono sample */
+ LVM_FLOAT AbsPeak; /* Absolute peak signal */
+ LVM_FLOAT AGC_Mult; /* Short AGC gain */
+ LVM_FLOAT Vol_Mult; /* Short volume */
/*
* Instance control variables
*/
- LVM_FLOAT AGC_Gain = pInstance->AGC_Gain; /* Get the current AGC gain */
- LVM_FLOAT AGC_MaxGain = pInstance->AGC_MaxGain; /* Get maximum AGC gain */
- LVM_FLOAT AGC_Attack = pInstance->AGC_Attack; /* Attack scaler */
- LVM_FLOAT AGC_Decay = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT)));/* Decay scaler */
- LVM_FLOAT AGC_Target = pInstance->AGC_Target; /* Get the target level */
- LVM_FLOAT Vol_Current = pInstance->Volume; /* Actual volume setting */
- LVM_FLOAT Vol_Target = pInstance->Target; /* Target volume setting */
- LVM_FLOAT Vol_TC = pInstance->VolumeTC; /* Time constant */
+ LVM_FLOAT AGC_Gain = pInstance->AGC_Gain; /* Get the current AGC gain */
+ LVM_FLOAT AGC_MaxGain = pInstance->AGC_MaxGain; /* Get maximum AGC gain */
+ LVM_FLOAT AGC_Attack = pInstance->AGC_Attack; /* Attack scaler */
+ LVM_FLOAT AGC_Decay = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT))); /* Decay scaler */
+ LVM_FLOAT AGC_Target = pInstance->AGC_Target; /* Get the target level */
+ LVM_FLOAT Vol_Current = pInstance->Volume; /* Actual volume setting */
+ LVM_FLOAT Vol_Target = pInstance->Target; /* Target volume setting */
+ LVM_FLOAT Vol_TC = pInstance->VolumeTC; /* Time constant */
/*
* Process on a sample by sample basis
*/
- for (i = 0; i < NumSamples; i++) /* For each sample */
+ for (i = 0; i < NumSamples; i++) /* For each sample */
{
-
/*
* Get the short scalers
*/
- AGC_Mult = (LVM_FLOAT)(AGC_Gain); /* Get the short AGC gain */
- Vol_Mult = (LVM_FLOAT)(Vol_Current); /* Get the short volume gain */
+ AGC_Mult = (LVM_FLOAT)(AGC_Gain); /* Get the short AGC gain */
+ Vol_Mult = (LVM_FLOAT)(Vol_Current); /* Get the short volume gain */
/*
* Get the input samples
*/
- Left = *pStSrc++; /* Get the left sample */
- Right = *pStSrc++; /* Get the right sample */
- Mono = *pMonoSrc++; /* Get the mono sample */
+ Left = *pStSrc++; /* Get the left sample */
+ Right = *pStSrc++; /* Get the right sample */
+ Mono = *pMonoSrc++; /* Get the mono sample */
/*
* Apply the AGC gain to the mono input and mix with the stereo signal
*/
- Left += (Mono * AGC_Mult); /* Mix in the mono signal */
+ Left += (Mono * AGC_Mult); /* Mix in the mono signal */
Right += (Mono * AGC_Mult);
/*
* Apply the volume and write to the output stream
*/
- Left = Left * Vol_Mult;
+ Left = Left * Vol_Mult;
Right = Right * Vol_Mult;
- *pDst++ = Left; /* Save the results */
+ *pDst++ = Left; /* Save the results */
*pDst++ = Right;
/*
* Update the AGC gain
*/
AbsPeak = Abs_Float(Left) > Abs_Float(Right) ? Abs_Float(Left) : Abs_Float(Right);
- if (AbsPeak > AGC_Target)
- {
+ if (AbsPeak > AGC_Target) {
/*
* The signal is too large so decrease the gain
*/
AGC_Gain = AGC_Gain * AGC_Attack;
- }
- else
- {
+ } else {
/*
* The signal is too small so increase the gain
*/
- if (AGC_Gain > AGC_MaxGain)
- {
+ if (AGC_Gain > AGC_MaxGain) {
AGC_Gain -= (AGC_Decay);
- }
- else
- {
+ } else {
AGC_Gain += (AGC_Decay);
}
}
@@ -161,18 +153,17 @@
/*
* Update the gain
*/
- Vol_Current += (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
+ Vol_Current += (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
}
/*
* Update the parameters
*/
- pInstance->Volume = Vol_Current; /* Actual volume setting */
+ pInstance->Volume = Vol_Current; /* Actual volume setting */
pInstance->AGC_Gain = AGC_Gain;
return;
}
-#ifdef SUPPORT_MC
/****************************************************************************************/
/* */
/* FUNCTION: AGC_MIX_VOL_Mc1Mon_D32_WRA */
@@ -209,93 +200,80 @@
/* NOTES: */
/* */
/****************************************************************************************/
-void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t *pInstance,
- const LVM_FLOAT *pMcSrc,
- const LVM_FLOAT *pMonoSrc,
- LVM_FLOAT *pDst,
- LVM_UINT16 NrFrames,
- LVM_UINT16 NrChannels)
-{
-
+void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t* pInstance, const LVM_FLOAT* pMcSrc,
+ const LVM_FLOAT* pMonoSrc, LVM_FLOAT* pDst, LVM_UINT16 NrFrames,
+ LVM_UINT16 NrChannels) {
/*
* General variables
*/
- LVM_UINT16 i, jj; /* Sample index */
- LVM_FLOAT SampleVal; /* Sample value */
- LVM_FLOAT Mono; /* Mono sample */
- LVM_FLOAT AbsPeak; /* Absolute peak signal */
- LVM_FLOAT AGC_Mult; /* Short AGC gain */
- LVM_FLOAT Vol_Mult; /* Short volume */
+ LVM_UINT16 i, jj; /* Sample index */
+ LVM_FLOAT SampleVal; /* Sample value */
+ LVM_FLOAT Mono; /* Mono sample */
+ LVM_FLOAT AbsPeak; /* Absolute peak signal */
+ LVM_FLOAT AGC_Mult; /* Short AGC gain */
+ LVM_FLOAT Vol_Mult; /* Short volume */
/*
* Instance control variables
*/
- LVM_FLOAT AGC_Gain = pInstance->AGC_Gain; /* Get the current AGC gain */
- LVM_FLOAT AGC_MaxGain = pInstance->AGC_MaxGain; /* Get maximum AGC gain */
- LVM_FLOAT AGC_Attack = pInstance->AGC_Attack; /* Attack scaler */
+ LVM_FLOAT AGC_Gain = pInstance->AGC_Gain; /* Get the current AGC gain */
+ LVM_FLOAT AGC_MaxGain = pInstance->AGC_MaxGain; /* Get maximum AGC gain */
+ LVM_FLOAT AGC_Attack = pInstance->AGC_Attack; /* Attack scaler */
/* Decay scaler */
- LVM_FLOAT AGC_Decay = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT)));
- LVM_FLOAT AGC_Target = pInstance->AGC_Target; /* Get the target level */
- LVM_FLOAT Vol_Current = pInstance->Volume; /* Actual volume setting */
- LVM_FLOAT Vol_Target = pInstance->Target; /* Target volume setting */
- LVM_FLOAT Vol_TC = pInstance->VolumeTC; /* Time constant */
+ LVM_FLOAT AGC_Decay = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT)));
+ LVM_FLOAT AGC_Target = pInstance->AGC_Target; /* Get the target level */
+ LVM_FLOAT Vol_Current = pInstance->Volume; /* Actual volume setting */
+ LVM_FLOAT Vol_Target = pInstance->Target; /* Target volume setting */
+ LVM_FLOAT Vol_TC = pInstance->VolumeTC; /* Time constant */
/*
* Process on a sample by sample basis
*/
- for (i = 0; i < NrFrames; i++) /* For each frame */
+ for (i = 0; i < NrFrames; i++) /* For each frame */
{
-
/*
* Get the scalers
*/
- AGC_Mult = (LVM_FLOAT)(AGC_Gain); /* Get the AGC gain */
- Vol_Mult = (LVM_FLOAT)(Vol_Current); /* Get the volume gain */
+ AGC_Mult = (LVM_FLOAT)(AGC_Gain); /* Get the AGC gain */
+ Vol_Mult = (LVM_FLOAT)(Vol_Current); /* Get the volume gain */
AbsPeak = 0.0f;
/*
* Get the input samples
*/
- for (jj = 0; jj < NrChannels; jj++)
- {
- SampleVal = *pMcSrc++; /* Get the sample value of jj Channel*/
- Mono = *pMonoSrc; /* Get the mono sample */
+ for (jj = 0; jj < NrChannels; jj++) {
+ SampleVal = *pMcSrc++; /* Get the sample value of jj Channel*/
+ Mono = *pMonoSrc; /* Get the mono sample */
/*
* Apply the AGC gain to the mono input and mix with the input signal
*/
- SampleVal += (Mono * AGC_Mult); /* Mix in the mono signal */
+ SampleVal += (Mono * AGC_Mult); /* Mix in the mono signal */
/*
* Apply the volume and write to the output stream
*/
- SampleVal = SampleVal * Vol_Mult;
+ SampleVal = SampleVal * Vol_Mult;
- *pDst++ = SampleVal; /* Save the results */
+ *pDst++ = SampleVal; /* Save the results */
/*
* Update the AGC gain
*/
AbsPeak = Abs_Float(SampleVal) > AbsPeak ? Abs_Float(SampleVal) : AbsPeak;
}
- if (AbsPeak > AGC_Target)
- {
+ if (AbsPeak > AGC_Target) {
/*
* The signal is too large so decrease the gain
*/
AGC_Gain = AGC_Gain * AGC_Attack;
- }
- else
- {
+ } else {
/*
* The signal is too small so increase the gain
*/
- if (AGC_Gain > AGC_MaxGain)
- {
+ if (AGC_Gain > AGC_MaxGain) {
AGC_Gain -= (AGC_Decay);
- }
- else
- {
+ } else {
AGC_Gain += (AGC_Decay);
}
}
@@ -303,15 +281,14 @@
/*
* Update the gain
*/
- Vol_Current += (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
+ Vol_Current += (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
}
/*
* Update the parameters
*/
- pInstance->Volume = Vol_Current; /* Actual volume setting */
+ pInstance->Volume = Vol_Current; /* Actual volume setting */
pInstance->AGC_Gain = AGC_Gain;
return;
}
-#endif /*SUPPORT_MC*/
diff --git a/media/libeffects/lvm/lib/Common/src/Abs_32.cpp b/media/libeffects/lvm/lib/Common/src/Abs_32.cpp
index e013809..3e37d89 100644
--- a/media/libeffects/lvm/lib/Common/src/Abs_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Abs_32.cpp
@@ -19,7 +19,7 @@
/* Include files */
/*######################################################################################*/
-#include "ScalarArithmetic.h"
+#include "ScalarArithmetic.h"
/****************************************************************************************
* Name : Abs_32()
@@ -30,27 +30,20 @@
* Remarks :
****************************************************************************************/
-LVM_INT32 Abs_32(LVM_INT32 input)
-{
- if(input < 0)
- {
- if (input == (LVM_INT32)(0x80000000U))
- {
+LVM_INT32 Abs_32(LVM_INT32 input) {
+ if (input < 0) {
+ if (input == (LVM_INT32)(0x80000000U)) {
/* The corner case, so set to the maximum positive value */
- input=(LVM_INT32) 0x7fffffff;
- }
- else
- {
+ input = (LVM_INT32)0x7fffffff;
+ } else {
/* Negative input, so invert */
input = (LVM_INT32)(-input);
}
}
return input;
}
-LVM_FLOAT Abs_Float(LVM_FLOAT input)
-{
- if(input < 0)
- {
+LVM_FLOAT Abs_Float(LVM_FLOAT input) {
+ if (input < 0) {
/* Negative input, so invert */
input = (LVM_FLOAT)(-input);
}
diff --git a/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp b/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp
index 6978fe7..be20521 100644
--- a/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp
@@ -25,27 +25,18 @@
FUNCTION ADD2_SAT_16X16
***********************************************************************************/
-void Add2_Sat_16x16( const LVM_INT16 *src,
- LVM_INT16 *dst,
- LVM_INT16 n )
-{
+void Add2_Sat_16x16(const LVM_INT16* src, LVM_INT16* dst, LVM_INT16 n) {
LVM_INT32 Temp;
LVM_INT16 ii;
- for (ii = n; ii != 0; ii--)
- {
- Temp = ((LVM_INT32) *src) + ((LVM_INT32) *dst);
+ for (ii = n; ii != 0; ii--) {
+ Temp = ((LVM_INT32)*src) + ((LVM_INT32)*dst);
src++;
- if (Temp > 0x00007FFF)
- {
+ if (Temp > 0x00007FFF) {
*dst = 0x7FFF;
- }
- else if (Temp < -0x00008000)
- {
- *dst = - 0x8000;
- }
- else
- {
+ } else if (Temp < -0x00008000) {
+ *dst = -0x8000;
+ } else {
*dst = (LVM_INT16)Temp;
}
dst++;
diff --git a/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp b/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp
index a48e668..420f93e 100644
--- a/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp
@@ -25,28 +25,21 @@
FUNCTION ADD2_SAT_32X32
***********************************************************************************/
-void Add2_Sat_32x32( const LVM_INT32 *src,
- LVM_INT32 *dst,
- LVM_INT16 n )
-{
- LVM_INT32 a,b,c;
+void Add2_Sat_32x32(const LVM_INT32* src, LVM_INT32* dst, LVM_INT16 n) {
+ LVM_INT32 a, b, c;
LVM_INT16 ii;
- for (ii = n; ii != 0; ii--)
- {
- a=*src;
+ for (ii = n; ii != 0; ii--) {
+ a = *src;
src++;
- b=*dst;
- c=a+b;
- if ((((c ^ a) & (c ^ b)) >> 31)!=0) /* overflow / underflow */
+ b = *dst;
+ c = a + b;
+ if ((((c ^ a) & (c ^ b)) >> 31) != 0) /* overflow / underflow */
{
- if(a<0)
- {
- c=0x80000000L;
- }
- else
- {
- c=0x7FFFFFFFL;
+ if (a < 0) {
+ c = 0x80000000L;
+ } else {
+ c = 0x7FFFFFFFL;
}
}
@@ -56,27 +49,18 @@
return;
}
-void Add2_Sat_Float( const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n )
-{
+void Add2_Sat_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
LVM_FLOAT Temp;
LVM_INT16 ii;
- for (ii = n; ii != 0; ii--)
- {
- Temp = ((LVM_FLOAT) *src) + ((LVM_FLOAT) *dst);
+ for (ii = n; ii != 0; ii--) {
+ Temp = ((LVM_FLOAT)*src) + ((LVM_FLOAT)*dst);
src++;
- if (Temp > 1.000000f)
- {
+ if (Temp > 1.000000f) {
*dst = 1.000000f;
- }
- else if (Temp < -1.000000f)
- {
+ } else if (Temp < -1.000000f) {
*dst = -1.000000f;
- }
- else
- {
+ } else {
*dst = Temp;
}
dst++;
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp
index 1a5e07f..198a6a1 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp
@@ -32,45 +32,38 @@
pBiquadState->pDelays[2] is y(n-1)L in Q0 format
pBiquadState->pDelays[3] is y(n-2)L in Q0 format
***************************************************************************/
-void BP_1I_D16F16C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
+void BP_1I_D16F16C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+{
+ LVM_FLOAT ynL;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ // ynL= (A0 * (x(n)L - x(n-2)L ) )
+ ynL = pBiquadState->coefs[0] * ((*pDataIn) - pBiquadState->pDelays[1]);
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- // ynL= (A0 * (x(n)L - x(n-2)L ) )
- ynL = pBiquadState->coefs[0] * ((*pDataIn)-pBiquadState->pDelays[1]);
+ // ynL+= ((-B2 * y(n-2)L ) )
+ ynL += pBiquadState->coefs[1] * pBiquadState->pDelays[3];
- // ynL+= ((-B2 * y(n-2)L ) )
- ynL += pBiquadState->coefs[1] * pBiquadState->pDelays[3];
+ // ynL+= ((-B1 * y(n-1)L ) )
+ ynL += pBiquadState->coefs[2] * pBiquadState->pDelays[2];
- // ynL+= ((-B1 * y(n-1)L ) )
- ynL += pBiquadState->coefs[2] * pBiquadState->pDelays[2];
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
+ pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
+ pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
+ pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
- pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
- pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
- pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut++=ynL; // Write Left output
-
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut++ = ynL; // Write Left output
}
-
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp
index 60b6c16..6d36302 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp
@@ -37,12 +37,11 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void BP_1I_D16F16Css_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
- BP_FLOAT_Coefs_t *pCoef)
-{
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
- pBiquadState->pDelays = (LVM_FLOAT *) pTaps;
+void BP_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order2_FLOAT_Taps_t* pTaps,
+ BP_FLOAT_Coefs_t* pCoef) {
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
pBiquadState->coefs[0] = pCoef->A0;
pBiquadState->coefs[1] = pCoef->B2;
@@ -50,4 +49,3 @@
}
/*-------------------------------------------------------------------------*/
/* End Of File: BP_1I_D16F16Css_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h
index 8a000b6..a41c855 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h
@@ -19,19 +19,16 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
-typedef struct _Filter_State_FLOAT
-{
-
- LVM_FLOAT * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+typedef struct _Filter_State_FLOAT {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
#endif /*_BP_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp
index c844d03..d4d4eb1 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp
@@ -32,22 +32,18 @@
pBiquadState->pDelays[2] is y(n-1)L in Q16 format
pBiquadState->pDelays[3] is y(n-2)L in Q16 format
***************************************************************************/
-void BP_1I_D16F32C30_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
-{
- LVM_FLOAT ynL,templ;
+void BP_1I_D16F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL, templ;
LVM_INT16 ii;
PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
/**************************************************************************
PROCESSING OF THE LEFT CHANNEL
***************************************************************************/
// ynL= (A0 * (x(n)L - x(n-2)L ))
- templ = (LVM_FLOAT) *pDataIn - pBiquadState->pDelays[1];
+ templ = (LVM_FLOAT)*pDataIn - pBiquadState->pDelays[1];
ynL = pBiquadState->coefs[0] * templ;
// ynL+= ((-B2 * y(n-2)L ) )
@@ -61,14 +57,14 @@
/**************************************************************************
UPDATING THE DELAYS
***************************************************************************/
- pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
- pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
- pBiquadState->pDelays[2] = ynL; // Update y(n-1)L in Q16
- pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L in Q0
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
+ pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
+ pBiquadState->pDelays[2] = ynL; // Update y(n-1)L in Q16
+ pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L in Q0
/**************************************************************************
WRITING THE OUTPUT
***************************************************************************/
- *pDataOut++ = (ynL); // Write Left output
- }
+ *pDataOut++ = (ynL); // Write Left output
+ }
}
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp
index eb15032..d322a8e 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp
@@ -47,17 +47,15 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void BP_1I_D16F32Cll_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
- BP_FLOAT_Coefs_t *pCoef)
-{
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
- pBiquadState->pDelays =(LVM_FLOAT *) pTaps;
+void BP_1I_D16F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order2_FLOAT_Taps_t* pTaps,
+ BP_FLOAT_Coefs_t* pCoef) {
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
- pBiquadState->coefs[0] = pCoef->A0;
- pBiquadState->coefs[1] = pCoef->B2;
- pBiquadState->coefs[2] = pCoef->B1;
+ pBiquadState->coefs[0] = pCoef->A0;
+ pBiquadState->coefs[1] = pCoef->B2;
+ pBiquadState->coefs[2] = pCoef->B1;
}
/*-------------------------------------------------------------------------*/
/* End Of File: BP_1I_D16F32Cll_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h
index 6d754e2..0603256 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h
@@ -19,17 +19,15 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
-typedef struct _Filter_State_FLOAT
-{
- LVM_FLOAT * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
-}Filter_State_Float;
-typedef Filter_State_Float * PFilter_State_FLOAT ;
+typedef Filter_State* PFilter_State;
+typedef struct _Filter_State_FLOAT {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_State_Float;
+typedef Filter_State_Float* PFilter_State_FLOAT;
#endif /*_BP_1I_D16F32CLL_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp
index d0ba206..0670334 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp
@@ -32,46 +32,39 @@
pBiquadState->pDelays[2] is y(n-1)L in Q0 format
pBiquadState->pDelays[3] is y(n-2)L in Q0 format
***************************************************************************/
-void BP_1I_D32F32C30_TRC_WRA_02 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL,templ;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BP_1I_D32F32C30_TRC_WRA_02(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL, templ;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ // ynL= (A0 * (x(n)L - x(n-2)L ) )
+ templ = (*pDataIn) - pBiquadState->pDelays[1];
+ ynL = pBiquadState->coefs[0] * templ;
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- // ynL= (A0 * (x(n)L - x(n-2)L ) )
- templ = (*pDataIn) - pBiquadState->pDelays[1];
- ynL = pBiquadState->coefs[0] * templ;
+ // ynL+= ((-B2 * y(n-2)L ) )
+ templ = pBiquadState->coefs[1] * pBiquadState->pDelays[3];
+ ynL += templ;
- // ynL+= ((-B2 * y(n-2)L ) )
- templ = pBiquadState->coefs[1] * pBiquadState->pDelays[3];
- ynL += templ;
+ // ynL+= ((-B1 * y(n-1)L ) )
+ templ = pBiquadState->coefs[2] * pBiquadState->pDelays[2];
+ ynL += templ;
- // ynL+= ((-B1 * y(n-1)L ) )
- templ = pBiquadState->coefs[2] * pBiquadState->pDelays[2];
- ynL += templ;
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
+ pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
+ pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
+ pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
- pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
- pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
- pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut++ = ynL; // Write Left output in Q0
-
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut++ = ynL; // Write Left output in Q0
}
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp
index 6f7d0b5..146cc63 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp
@@ -37,12 +37,11 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void BP_1I_D32F32Cll_TRC_WRA_02_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
- BP_FLOAT_Coefs_t *pCoef)
-{
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
- pBiquadState->pDelays =(LVM_FLOAT *) pTaps;
+void BP_1I_D32F32Cll_TRC_WRA_02_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order2_FLOAT_Taps_t* pTaps,
+ BP_FLOAT_Coefs_t* pCoef) {
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
pBiquadState->coefs[0] = pCoef->A0;
@@ -52,4 +51,3 @@
}
/*-------------------------------------------------------------------------*/
/* End Of File: BP_1I_D32F32Cll_TRC_WRA_02_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h
index 9f1c66a..ea83c0b 100644
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h
@@ -19,18 +19,16 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
-typedef struct _Filter_State_FLOAT
-{
- LVM_FLOAT * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
-}Filter_State_Float;
-typedef Filter_State_Float* PFilter_State_FLOAT ;
+typedef Filter_State* PFilter_State;
+typedef struct _Filter_State_FLOAT {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_State_Float;
+typedef Filter_State_Float* PFilter_State_FLOAT;
#endif /*_BP_1I_D32F32CLL_TRC_WRA_02_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp
index 9aecc40..a46b1ef 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp
@@ -32,49 +32,42 @@
pBiquadState->pDelays[2] is y(n-1)L in Q0 format
pBiquadState->pDelays[3] is y(n-2)L in Q0 format
***************************************************************************/
-void BQ_1I_D16F16C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_1I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ // ynL=A2 * x(n-2)L
+ ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[1];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- // ynL=A2 * x(n-2)L
- ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[1];
+ // ynL+=A1 * x(n-1)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
- // ynL+=A1 * x(n-1)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+ // ynL+=A0 * x(n)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
- // ynL+=A0 * x(n)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+ // ynL+= (-B2 * y(n-2)L )
+ ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[3];
- // ynL+= (-B2 * y(n-2)L )
- ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[3];
+ // ynL+= (-B1 * y(n-1)L )
+ ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[2];
- // ynL+= (-B1 * y(n-1)L )
- ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[2];
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
+ pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
+ pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
+ pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
- pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
- pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
- pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output in Q0
-
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output in Q0
}
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp
index f0b5d06..e8bfcd8 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp
@@ -37,13 +37,12 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void BQ_1I_D16F16Css_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
- BQ_FLOAT_Coefs_t *pCoef)
-{
+void BQ_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order2_FLOAT_Taps_t* pTaps,
+ BQ_FLOAT_Coefs_t* pCoef) {
LVM_FLOAT temp;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
- pBiquadState->pDelays = (LVM_FLOAT *) pTaps ;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
temp = pCoef->A2;
pBiquadState->coefs[0] = temp;
temp = pCoef->A1;
@@ -57,4 +56,3 @@
}
/*-------------------------------------------------------------------------*/
/* End Of File: BQ_1I_D16F16Css_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h
index fad345d..ac2819e 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h
@@ -19,19 +19,17 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
-typedef struct _Filter_State_FLOAT
-{
- LVM_FLOAT * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
+typedef struct _Filter_State_FLOAT {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
#endif /*_BQ_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp
index 043bc5f..c60dcf8 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp
@@ -32,48 +32,42 @@
pBiquadState->pDelays[2] is y(n-1)L in Q16 format
pBiquadState->pDelays[3] is y(n-2)L in Q16 format
***************************************************************************/
-void BQ_1I_D16F32C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_1I_D16F32C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ // ynL=A2 * x(n-2)L
+ ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[1];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- // ynL=A2 * x(n-2)L
- ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[1];
+ // ynL+=A1 * x(n-1)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
- // ynL+=A1 * x(n-1)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+ // ynL+=A0 * x(n)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
- // ynL+=A0 * x(n)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+ // ynL+= ( (-B2 * y(n-2)L )
+ ynL += pBiquadState->pDelays[3] * pBiquadState->coefs[3];
- // ynL+= ( (-B2 * y(n-2)L )
- ynL += pBiquadState->pDelays[3] * pBiquadState->coefs[3];
+ // ynL+= -B1 * y(n-1)L
+ ynL += pBiquadState->pDelays[2] * pBiquadState->coefs[4];
- // ynL+= -B1 * y(n-1)L
- ynL += pBiquadState->pDelays[2] * pBiquadState->coefs[4];
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
+ pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
+ pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
+ pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
- pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
- pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
- pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut++ = (LVM_FLOAT)(ynL); // Write Left output
-
- }
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut++ = (LVM_FLOAT)(ynL); // Write Left output
}
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h
index 6a61d9a..af0efc8 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h
@@ -19,19 +19,17 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
-typedef struct _Filter_State_FLOAT
-{
- LVM_FLOAT * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
+typedef struct _Filter_State_FLOAT {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
#endif /*_BQ_1I_D16F32CSS_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp
index 2b80691..ecf44ca 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp
@@ -37,13 +37,12 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void BQ_1I_D16F32Css_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
- BQ_FLOAT_Coefs_t *pCoef)
-{
+void BQ_1I_D16F32Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order2_FLOAT_Taps_t* pTaps,
+ BQ_FLOAT_Coefs_t* pCoef) {
LVM_FLOAT temp;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
- pBiquadState->pDelays = (LVM_FLOAT *)pTaps;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
temp = pCoef->A2;
pBiquadState->coefs[0] = temp;
@@ -58,4 +57,3 @@
}
/*-------------------------------------------------------------------------*/
/* End Of File: BQ_1I_D16F32Css_TRC_WRA_01_Init */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp
index 51cd918..d047e91 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp
@@ -36,72 +36,65 @@
pBiquadState->pDelays[6] is y(n-2)L in Q0 format
pBiquadState->pDelays[7] is y(n-2)R in Q0 format
***************************************************************************/
-void BQ_2I_D16F16C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL,ynR;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_2I_D16F16C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL, ynR;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ // ynL=A2 * x(n-2)L
+ ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- // ynL=A2 * x(n-2)L
- ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+ // ynL+=A1 * x(n-1)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
- // ynL+=A1 * x(n-1)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+ // ynL+=A0 * x(n)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
- // ynL+=A0 * x(n)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+ // ynL+= ( -B2 * y(n-2)L )
+ ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[6];
- // ynL+= ( -B2 * y(n-2)L )
- ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[6];
+ // ynL+=( -B1 * y(n-1)L )
+ ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[4];
- // ynL+=( -B1 * y(n-1)L )
- ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[4];
+ /**************************************************************************
+ PROCESSING OF THE RIGHT CHANNEL
+ ***************************************************************************/
+ // ynR=A2 * x(n-2)R
+ ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
- /**************************************************************************
- PROCESSING OF THE RIGHT CHANNEL
- ***************************************************************************/
- // ynR=A2 * x(n-2)R
- ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+ // ynR+=A1 * x(n-1)R
+ ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
- // ynR+=A1 * x(n-1)R
- ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+ // ynR+=A0 * x(n)R
+ ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn + 1));
- // ynR+=A0 * x(n)R
- ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
+ // ynR+= ( -B2 * y(n-2)R )
+ ynR += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[7];
- // ynR+= ( -B2 * y(n-2)R )
- ynR += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[7];
+ // ynR+=( -B1 * y(n-1)R )
+ ynR += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[5];
- // ynR+=( -B1 * y(n-1)R )
- ynR += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[5];
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; // y(n-2)R=y(n-1)R
+ pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; // y(n-2)L=y(n-1)L
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; // x(n-2)R=x(n-1)R
+ pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
+ pBiquadState->pDelays[5] = ynR; // Update y(n-1)R
+ pBiquadState->pDelays[4] = ynL; // Update y(n-1)L
+ pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
+ pBiquadState->pDelays[1] = (*pDataIn++); // Update x(n-1)R
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; // y(n-2)R=y(n-1)R
- pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; // y(n-2)L=y(n-1)L
- pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; // x(n-2)R=x(n-1)R
- pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
- pBiquadState->pDelays[5] = ynR; // Update y(n-1)R
- pBiquadState->pDelays[4] = ynL; // Update y(n-1)L
- pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
- pBiquadState->pDelays[1] = (*pDataIn++); // Update x(n-1)R
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
- *pDataOut++ = (LVM_FLOAT)ynR; // Write Right ouput
-
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
+ *pDataOut++ = (LVM_FLOAT)ynR; // Write Right output
}
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp
index 8f74749..399b5ec 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp
@@ -36,72 +36,65 @@
pBiquadState->pDelays[6] is y(n-2)L in Q0 format
pBiquadState->pDelays[7] is y(n-2)R in Q0 format
***************************************************************************/
-void BQ_2I_D16F16C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL,ynR;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_2I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL, ynR;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ // ynL=A2 * x(n-2)L
+ ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- // ynL=A2 * x(n-2)L
- ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+ // ynL+=A1 * x(n-1)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
- // ynL+=A1 * x(n-1)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+ // ynL+=A0 * x(n)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
- // ynL+=A0 * x(n)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+ // ynL+= ( -B2 * y(n-2)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[6];
- // ynL+= ( -B2 * y(n-2)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[6];
+ // ynL+=( -B1 * y(n-1)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[4];
- // ynL+=( -B1 * y(n-1)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[4];
+ /**************************************************************************
+ PROCESSING OF THE RIGHT CHANNEL
+ ***************************************************************************/
+ // ynR=A2 * x(n-2)R
+ ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
- /**************************************************************************
- PROCESSING OF THE RIGHT CHANNEL
- ***************************************************************************/
- // ynR=A2 * x(n-2)R
- ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+ // ynR+=A1 * x(n-1)R
+ ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
- // ynR+=A1 * x(n-1)R
- ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+ // ynR+=A0 * x(n)R
+ ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn + 1));
- // ynR+=A0 * x(n)R
- ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
+ // ynR+= ( -B2 * y(n-2)R )
+ ynR += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[7];
- // ynR+= ( -B2 * y(n-2)R )
- ynR += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[7];
+ // ynR+=( -B1 * y(n-1)R )
+ ynR += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[5];
- // ynR+=( -B1 * y(n-1)R )
- ynR += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[5];
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; // y(n-2)R=y(n-1)R
+ pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; // y(n-2)L=y(n-1)L
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; // x(n-2)R=x(n-1)R
+ pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
+ pBiquadState->pDelays[5] = ynR; // Update y(n-1)R
+ pBiquadState->pDelays[4] = ynL; // Update y(n-1)L
+ pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
+ pBiquadState->pDelays[1] = (*pDataIn++); // Update x(n-1)R
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; // y(n-2)R=y(n-1)R
- pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; // y(n-2)L=y(n-1)L
- pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; // x(n-2)R=x(n-1)R
- pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
- pBiquadState->pDelays[5] = ynR; // Update y(n-1)R
- pBiquadState->pDelays[4] = ynL; // Update y(n-1)L
- pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
- pBiquadState->pDelays[1] = (*pDataIn++); // Update x(n-1)R
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
- *pDataOut++ = (LVM_FLOAT)ynR; // Write Right ouput
-
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
+ *pDataOut++ = (LVM_FLOAT)ynR; // Write Right output
}
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp
index 987cbcf..e0cd934 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp
@@ -37,13 +37,12 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void BQ_2I_D16F16Css_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
- BQ_FLOAT_Coefs_t *pCoef)
-{
+void BQ_2I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_2I_Order2_FLOAT_Taps_t* pTaps,
+ BQ_FLOAT_Coefs_t* pCoef) {
LVM_FLOAT temp;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
- pBiquadState->pDelays = (LVM_FLOAT *) pTaps ;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
temp = pCoef->A2;
pBiquadState->coefs[0] = temp;
@@ -58,4 +57,3 @@
}
/*-------------------------------------------------------------------------*/
/* End Of File: BQ_2I_D16F16Css_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h
index 5a9a0e9..94cc794 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h
@@ -20,20 +20,18 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
-typedef struct _Filter_State_FLOAT
-{
- LVM_FLOAT * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
+typedef struct _Filter_State_FLOAT {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
#endif /* _BQ_2I_D16F16CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp
index 331c97f..3b7eb5e 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp
@@ -36,74 +36,69 @@
pBiquadState->pDelays[6] is y(n-2)L in Q16 format
pBiquadState->pDelays[7] is y(n-2)R in Q16 format
***************************************************************************/
-void BQ_2I_D16F32C13_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL,ynR;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_2I_D16F32C13_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL, ynR;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ /* ynL=A2 * x(n-2)L */
+ ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- /* ynL=A2 * x(n-2)L */
- ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+ /* ynL+=A1* x(n-1)L */
+ ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
- /* ynL+=A1* x(n-1)L */
- ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+ /* ynL+=A0* x(n)L */
+ ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
- /* ynL+=A0* x(n)L */
- ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+ /* ynL+=-B2*y(n-2)L */
+ ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
- /* ynL+=-B2*y(n-2)L */
- ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
+ /* ynL+=-B1*y(n-1)L */
+ ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
- /* ynL+=-B1*y(n-1)L */
- ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
+ /**************************************************************************
+ PROCESSING OF THE RIGHT CHANNEL
+ ***************************************************************************/
+ /* ynR=A2 * x(n-2)R */
+ ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
- /**************************************************************************
- PROCESSING OF THE RIGHT CHANNEL
- ***************************************************************************/
- /* ynR=A2 * x(n-2)R */
- ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+ /* ynR+=A1* x(n-1)R */
+ ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
- /* ynR+=A1* x(n-1)R */
- ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+ /* ynR+=A0* x(n)R */
+ ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn + 1));
- /* ynR+=A0* x(n)R */
- ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
+ /* ynR+=-B2 * y(n-2)R */
+ ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
- /* ynR+=-B2 * y(n-2)R */
- ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
+ /* ynR+=-B1 * y(n-1)R */
+ ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
- /* ynR+=-B1 * y(n-1)R */
- ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
+ pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
+ pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
+ pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R */
+ pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L */
+ pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
+ pDataIn++;
+ pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
+ pDataIn++;
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
- pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
- pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
- pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
- pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R */
- pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L */
- pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
- pDataIn++;
- pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
- pDataIn++;
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output */
- pDataOut++;
- *pDataOut = (LVM_FLOAT)(ynR); /* Write Right ouput */
- pDataOut++;
- }
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output */
+ pDataOut++;
+ *pDataOut = (LVM_FLOAT)(ynR); /* Write Right output */
+ pDataOut++;
}
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp
index 3a396df..8c43430 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp
@@ -36,75 +36,69 @@
pBiquadState->pDelays[6] is y(n-2)L in Q16 format
pBiquadState->pDelays[7] is y(n-2)R in Q16 format
***************************************************************************/
-void BQ_2I_D16F32C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL,ynR;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_2I_D16F32C14_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL, ynR;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ /* ynL=A2 * x(n-2)L */
+ ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- /* ynL=A2 * x(n-2)L */
- ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+ /* ynL+=A1 * x(n-1)L */
+ ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
- /* ynL+=A1 * x(n-1)L */
- ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+ /* ynL+=A0 * x(n)L */
+ ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
- /* ynL+=A0 * x(n)L */
- ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+ /* ynL+= ( (-B2 * y(n-2)L ))*/
+ ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
- /* ynL+= ( (-B2 * y(n-2)L ))*/
- ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
+ /* ynL+=( (-B1 * y(n-1)L )) */
+ ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
- /* ynL+=( (-B1 * y(n-1)L )) */
- ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
+ /**************************************************************************
+ PROCESSING OF THE RIGHT CHANNEL
+ ***************************************************************************/
+ /* ynR=A2 * x(n-2)R */
+ ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
- /**************************************************************************
- PROCESSING OF THE RIGHT CHANNEL
- ***************************************************************************/
- /* ynR=A2 * x(n-2)R */
- ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+ /* ynR+=A1 * x(n-1)R */
+ ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
- /* ynR+=A1 * x(n-1)R */
- ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+ /* ynR+=A0 * x(n)R */
+ ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn + 1));
- /* ynR+=A0 * x(n)R */
- ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
+ /* ynR+= ( (-B2 * y(n-2)R ))*/
+ ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
- /* ynR+= ( (-B2 * y(n-2)R ))*/
- ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
+ /* ynR+=( (-B1 * y(n-1)R )) */
+ ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
- /* ynR+=( (-B1 * y(n-1)R )) */
- ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
+ pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
+ pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
+ pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R */
+ pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L */
+ pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
+ pDataIn++;
+ pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
+ pDataIn++;
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
- pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
- pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
- pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
- pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R */
- pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L */
- pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
- pDataIn++;
- pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
- pDataIn++;
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output */
- pDataOut++;
- *pDataOut = (LVM_FLOAT)(ynR); /* Write Right ouput */
- pDataOut++;
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output */
+ pDataOut++;
+ *pDataOut = (LVM_FLOAT)(ynR); /* Write Right output */
+ pDataOut++;
}
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp
index 1cbff1a..84fbadf 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp
@@ -36,75 +36,69 @@
pBiquadState->pDelays[6] is y(n-2)L in Q16 format
pBiquadState->pDelays[7] is y(n-2)R in Q16 format
***************************************************************************/
-void BQ_2I_D16F32C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL,ynR;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void BQ_2I_D16F32C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL, ynR;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ /* ynL=A2 * x(n-2)L */
+ ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- /* ynL=A2 * x(n-2)L */
- ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+ /* ynL+=A1 * x(n-1)L */
+ ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
- /* ynL+=A1 * x(n-1)L */
- ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+ /* ynL+=A0 * x(n)L */
+ ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
- /* ynL+=A0 * x(n)L */
- ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
+ /* ynL+= ( (-B2 * y(n-2)L ) */
+ ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
- /* ynL+= ( (-B2 * y(n-2)L ) */
- ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
+ /* ynL+=( (-B1 * y(n-1)L )) */
+ ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
- /* ynL+=( (-B1 * y(n-1)L )) */
- ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
+ /**************************************************************************
+ PROCESSING OF THE RIGHT CHANNEL
+ ***************************************************************************/
+ /* ynR=A2 * x(n-2)R */
+ ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
- /**************************************************************************
- PROCESSING OF THE RIGHT CHANNEL
- ***************************************************************************/
- /* ynR=A2 * x(n-2)R */
- ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+ /* ynR+=A1 * x(n-1)R */
+ ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
- /* ynR+=A1 * x(n-1)R */
- ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+ /* ynR+=A0 * x(n)R */
+ ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn + 1));
- /* ynR+=A0 * x(n)R */
- ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
+ /* ynR+= ( (-B2 * y(n-2)R ) */
+ ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
- /* ynR+= ( (-B2 * y(n-2)R ) */
- ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
+ /* ynR+=( (-B1 * y(n-1)R )) in Q15 */
+ ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
- /* ynR+=( (-B1 * y(n-1)R )) in Q15 */
- ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
+ pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
+ pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
+ pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R*/
+ pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L*/
+ pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L*/
+ pDataIn++;
+ pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R*/
+ pDataIn++;
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
- pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
- pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
- pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
- pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R*/
- pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L*/
- pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L*/
- pDataIn++;
- pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R*/
- pDataIn++;
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output*/
- pDataOut++;
- *pDataOut = (LVM_FLOAT)(ynR); /* Write Right ouput*/
- pDataOut++;
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output*/
+ pDataOut++;
+ *pDataOut = (LVM_FLOAT)(ynR); /* Write Right output*/
+ pDataOut++;
}
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h
index 314388a..1cc7618 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h
@@ -20,20 +20,18 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT16 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
-typedef struct _Filter_State_FLOAT
-{
- LVM_FLOAT * pDelays; /* pointer to the delayed samples \
- (data of 32 bits) */
- LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+typedef struct _Filter_State_FLOAT {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples \
+ (data of 32 bits) */
+ LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
#endif /* _BQ_2I_D16F32CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp
index 058541a..6817d9f 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp
@@ -36,13 +36,12 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void BQ_2I_D16F32Css_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
- BQ_FLOAT_Coefs_t *pCoef)
-{
+void BQ_2I_D16F32Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_2I_Order2_FLOAT_Taps_t* pTaps,
+ BQ_FLOAT_Coefs_t* pCoef) {
LVM_FLOAT temp;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
- pBiquadState->pDelays = (LVM_FLOAT *) pTaps;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
temp = pCoef->A2;
pBiquadState->coefs[0] = temp;
temp = pCoef->A1;
@@ -56,4 +55,3 @@
}
/*-------------------------------------------------------------------------*/
/* End Of File: BQ_2I_D16F32Css_TRC_WRA_01_Init */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp
index 78d1ba1..4eeaaa8 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp
@@ -36,91 +36,84 @@
pBiquadState->pDelays[6] is y(n-2)L in Q0 format
pBiquadState->pDelays[7] is y(n-2)R in Q0 format
***************************************************************************/
-void BQ_2I_D32F32C30_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
+void BQ_2I_D32F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL,ynR,templ,tempd;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+{
+ LVM_FLOAT ynL, ynR, templ, tempd;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ /* ynL= ( A2 * x(n-2)L ) */
+ ynL = pBiquadState->coefs[0] * pBiquadState->pDelays[2];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- /* ynL= ( A2 * x(n-2)L ) */
- ynL = pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+ /* ynL+= ( A1 * x(n-1)L )*/
+ templ = pBiquadState->coefs[1] * pBiquadState->pDelays[0];
+ ynL += templ;
- /* ynL+= ( A1 * x(n-1)L )*/
- templ = pBiquadState->coefs[1] * pBiquadState->pDelays[0];
- ynL += templ;
+ /* ynL+= ( A0 * x(n)L ) */
+ templ = pBiquadState->coefs[2] * (*pDataIn);
+ ynL += templ;
- /* ynL+= ( A0 * x(n)L ) */
- templ = pBiquadState->coefs[2] * (*pDataIn);
- ynL += templ;
+ /* ynL+= (-B2 * y(n-2)L ) */
+ templ = pBiquadState->coefs[3] * pBiquadState->pDelays[6];
+ ynL += templ;
- /* ynL+= (-B2 * y(n-2)L ) */
- templ = pBiquadState->coefs[3] * pBiquadState->pDelays[6];
- ynL += templ;
+ /* ynL+= (-B1 * y(n-1)L )*/
+ templ = pBiquadState->coefs[4] * pBiquadState->pDelays[4];
+ ynL += templ;
- /* ynL+= (-B1 * y(n-1)L )*/
- templ = pBiquadState->coefs[4] * pBiquadState->pDelays[4];
- ynL += templ;
+ /**************************************************************************
+ PROCESSING OF THE RIGHT CHANNEL
+ ***************************************************************************/
+ /* ynR= ( A2 * x(n-2)R ) */
+ ynR = pBiquadState->coefs[0] * pBiquadState->pDelays[3];
- /**************************************************************************
- PROCESSING OF THE RIGHT CHANNEL
- ***************************************************************************/
- /* ynR= ( A2 * x(n-2)R ) */
- ynR = pBiquadState->coefs[0] * pBiquadState->pDelays[3];
+ /* ynR+= ( A1 * x(n-1)R ) */
+ templ = pBiquadState->coefs[1] * pBiquadState->pDelays[1];
+ ynR += templ;
- /* ynR+= ( A1 * x(n-1)R ) */
- templ = pBiquadState->coefs[1] * pBiquadState->pDelays[1];
- ynR += templ;
+ /* ynR+= ( A0 * x(n)R ) */
+ tempd = *(pDataIn + 1);
+ templ = pBiquadState->coefs[2] * tempd;
+ ynR += templ;
- /* ynR+= ( A0 * x(n)R ) */
- tempd =* (pDataIn+1);
- templ = pBiquadState->coefs[2] * tempd;
- ynR += templ;
+ /* ynR+= (-B2 * y(n-2)R ) */
+ templ = pBiquadState->coefs[3] * pBiquadState->pDelays[7];
+ ynR += templ;
- /* ynR+= (-B2 * y(n-2)R ) */
- templ = pBiquadState->coefs[3] * pBiquadState->pDelays[7];
- ynR += templ;
+ /* ynR+= (-B1 * y(n-1)R ) */
+ templ = pBiquadState->coefs[4] * pBiquadState->pDelays[5];
+ ynR += templ;
- /* ynR+= (-B1 * y(n-1)R ) */
- templ = pBiquadState->coefs[4] * pBiquadState->pDelays[5];
- ynR += templ;
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
+ pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
+ pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
+ pBiquadState->pDelays[5] = (LVM_FLOAT)ynR; /* Update y(n-1)R */
+ pBiquadState->pDelays[4] = (LVM_FLOAT)ynL; /* Update y(n-1)L */
+ pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
+ pDataIn++;
+ pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
+ pDataIn++;
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
- pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
- pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
- pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
- pBiquadState->pDelays[5] = (LVM_FLOAT)ynR; /* Update y(n-1)R */
- pBiquadState->pDelays[4] = (LVM_FLOAT)ynL; /* Update y(n-1)L */
- pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
- pDataIn++;
- pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
- pDataIn++;
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut = (LVM_FLOAT)ynL; /* Write Left output */
- pDataOut++;
- *pDataOut = (LVM_FLOAT)ynR; /* Write Right ouput */
- pDataOut++;
-
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut = (LVM_FLOAT)ynL; /* Write Left output */
+ pDataOut++;
+ *pDataOut = (LVM_FLOAT)ynR; /* Write Right output */
+ pDataOut++;
}
+}
-#ifdef SUPPORT_MC
/**************************************************************************
ASSUMPTIONS:
COEFS-
@@ -141,61 +134,53 @@
pBiquadState->pDelays[3*NrChannels] to
pBiquadState->pDelays[4*NrChannels - 1] is y(n-2) for all NrChannels
***************************************************************************/
-void BQ_MC_D32F32C30_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
+void BQ_MC_D32F32C30_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels)
- {
- LVM_FLOAT yn, temp;
- LVM_INT16 ii, jj;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+{
+ LVM_FLOAT yn, temp;
+ LVM_INT16 ii, jj;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrFrames; ii != 0; ii--)
- {
+ for (ii = NrFrames; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING CHANNEL-WISE
+ ***************************************************************************/
+ for (jj = 0; jj < NrChannels; jj++) {
+ /* yn= (A2 * x(n-2)) */
+ yn = pBiquadState->coefs[0] * pBiquadState->pDelays[NrChannels + jj];
+
+ /* yn+= (A1 * x(n-1)) */
+ temp = pBiquadState->coefs[1] * pBiquadState->pDelays[jj];
+ yn += temp;
+
+ /* yn+= (A0 * x(n)) */
+ temp = pBiquadState->coefs[2] * (*pDataIn);
+ yn += temp;
+
+ /* yn+= (-B2 * y(n-2)) */
+ temp = pBiquadState->coefs[3] * pBiquadState->pDelays[NrChannels * 3 + jj];
+ yn += temp;
+
+ /* yn+= (-B1 * y(n-1)) */
+ temp = pBiquadState->coefs[4] * pBiquadState->pDelays[NrChannels * 2 + jj];
+ yn += temp;
+
/**************************************************************************
- PROCESSING CHANNEL-WISE
+ UPDATING THE DELAYS
***************************************************************************/
- for (jj = 0; jj < NrChannels; jj++)
- {
- /* yn= (A2 * x(n-2)) */
- yn = pBiquadState->coefs[0] * pBiquadState->pDelays[NrChannels + jj];
-
- /* yn+= (A1 * x(n-1)) */
- temp = pBiquadState->coefs[1] * pBiquadState->pDelays[jj];
- yn += temp;
-
- /* yn+= (A0 * x(n)) */
- temp = pBiquadState->coefs[2] * (*pDataIn);
- yn += temp;
-
- /* yn+= (-B2 * y(n-2)) */
- temp = pBiquadState->coefs[3] * pBiquadState->pDelays[NrChannels*3 + jj];
- yn += temp;
-
- /* yn+= (-B1 * y(n-1)) */
- temp = pBiquadState->coefs[4] * pBiquadState->pDelays[NrChannels*2 + jj];
- yn += temp;
-
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[NrChannels * 3 + jj] =
+ pBiquadState->pDelays[NrChannels * 3 + jj] =
pBiquadState->pDelays[NrChannels * 2 + jj]; /* y(n-2)=y(n-1)*/
- pBiquadState->pDelays[NrChannels * 1 + jj] =
- pBiquadState->pDelays[jj]; /* x(n-2)=x(n-1)*/
- pBiquadState->pDelays[NrChannels * 2 + jj] = (LVM_FLOAT)yn; /* Update y(n-1)*/
- pBiquadState->pDelays[jj] = (*pDataIn); /* Update x(n-1)*/
- pDataIn++;
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut = (LVM_FLOAT)yn; /* Write jj Channel output */
- pDataOut++;
- }
+ pBiquadState->pDelays[NrChannels * 1 + jj] =
+ pBiquadState->pDelays[jj]; /* x(n-2)=x(n-1)*/
+ pBiquadState->pDelays[NrChannels * 2 + jj] = (LVM_FLOAT)yn; /* Update y(n-1)*/
+ pBiquadState->pDelays[jj] = (*pDataIn); /* Update x(n-1)*/
+ pDataIn++;
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut = (LVM_FLOAT)yn; /* Write jj Channel output */
+ pDataOut++;
}
-
}
-#endif /*SUPPORT_MC*/
-
+}
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp
index 492a9e0..1e27391 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp
@@ -37,13 +37,12 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void BQ_2I_D32F32Cll_TRC_WRA_01_Init ( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
- BQ_FLOAT_Coefs_t *pCoef)
-{
+void BQ_2I_D32F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_2I_Order2_FLOAT_Taps_t* pTaps,
+ BQ_FLOAT_Coefs_t* pCoef) {
LVM_FLOAT temp;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
- pBiquadState->pDelays = (LVM_FLOAT *) pTaps;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
temp = pCoef->A2;
pBiquadState->coefs[0] = temp;
temp = pCoef->A1;
@@ -57,4 +56,3 @@
}
/*-------------------------------------------------------------------------*/
/* End Of File: BQ_2I_D32F32C32_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h
index 7eb6474..4a2149d 100644
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h
@@ -20,20 +20,18 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT32 coefs[5]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT32 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
-typedef struct _Filter_State_FLOAT
-{
- LVM_FLOAT * pDelays; /* pointer to the delayed samples \
- (data of 32 bits) */
- LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+typedef struct _Filter_State_FLOAT {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples \
+ (data of 32 bits) */
+ LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
+} Filter_State_FLOAT;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
#endif /* _BQ_2I_D32F32CLL_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/CompLim_private.h b/media/libeffects/lvm/lib/Common/src/CompLim_private.h
index 06a21c3..9c7a96b 100644
--- a/media/libeffects/lvm/lib/Common/src/CompLim_private.h
+++ b/media/libeffects/lvm/lib/Common/src/CompLim_private.h
@@ -28,17 +28,16 @@
DEFINITIONS
***********************************************************************************/
-#define FS_48K 48000
+#define FS_48K 48000
-#define INTEGER_16 0xFFFF /* 65535*/
-#define INTEGER_15 0x7FFF /* 32767*/
+#define INTEGER_16 0xFFFF /* 65535*/
+#define INTEGER_15 0x7FFF /* 32767*/
-#define GAIN_6DB 1
-#define GAIN_12DB 2
-#define GAIN_18DB 3
-#define GAIN_24DB 4
+#define GAIN_6DB 1
+#define GAIN_12DB 2
+#define GAIN_18DB 3
+#define GAIN_24DB 4
#endif /* #ifndef _COMP_LIM_PRIVATE_ */
/*** End of file ******************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/Copy_16.cpp b/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
index 3a50554..8887890 100644
--- a/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
@@ -25,27 +25,19 @@
FUNCTION COPY_16
***********************************************************************************/
-void Copy_16( const LVM_INT16 *src,
- LVM_INT16 *dst,
- LVM_INT16 n )
-{
+void Copy_16(const LVM_INT16* src, LVM_INT16* dst, LVM_INT16 n) {
LVM_INT16 ii;
- if (src > dst)
- {
- for (ii = n; ii != 0; ii--)
- {
+ if (src > dst) {
+ for (ii = n; ii != 0; ii--) {
*dst = *src;
dst++;
src++;
}
- }
- else
- {
+ } else {
src += n - 1;
dst += n - 1;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
*dst = *src;
dst--;
src--;
@@ -54,27 +46,19 @@
return;
}
-void Copy_Float( const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n )
-{
+void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
LVM_INT16 ii;
- if (src > dst)
- {
- for (ii = n; ii != 0; ii--)
- {
+ if (src > dst) {
+ for (ii = n; ii != 0; ii--) {
*dst = *src;
dst++;
src++;
}
- }
- else
- {
+ } else {
src += n - 1;
dst += n - 1;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
*dst = *src;
dst--;
src--;
@@ -83,46 +67,35 @@
return;
}
-#ifdef SUPPORT_MC
// Extract out the stereo channel pair from multichannel source.
-void Copy_Float_Mc_Stereo(const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames, /* Number of frames */
- LVM_INT32 NrChannels)
-{
+void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst,
+ LVM_INT16 NrFrames, /* Number of frames */
+ LVM_INT32 NrChannels) {
LVM_INT16 ii;
- if (NrChannels >= 2)
- {
- for (ii = NrFrames; ii != 0; ii--)
- {
+ if (NrChannels >= 2) {
+ for (ii = NrFrames; ii != 0; ii--) {
dst[0] = src[0];
dst[1] = src[1];
dst += 2;
src += NrChannels;
}
- }
- else if (NrChannels == 1)
- { // not expected to occur, provided for completeness.
+ } else if (NrChannels == 1) { // not expected to occur, provided for completeness.
src += (NrFrames - 1);
dst += 2 * (NrFrames - 1);
- for (ii = NrFrames; ii != 0; ii--)
- {
+ for (ii = NrFrames; ii != 0; ii--) {
dst[0] = src[0];
dst[1] = src[0];
dst -= 2;
- src --;
+ src--;
}
}
}
// Merge a multichannel source with stereo contained in StereoOut, to dst.
-void Copy_Float_Stereo_Mc(const LVM_FLOAT *src,
- LVM_FLOAT *StereoOut,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames, /* Number of frames*/
- LVM_INT32 NrChannels)
-{
+void Copy_Float_Stereo_Mc(const LVM_FLOAT* src, LVM_FLOAT* StereoOut, LVM_FLOAT* dst,
+ LVM_INT16 NrFrames, /* Number of frames*/
+ LVM_INT32 NrChannels) {
LVM_INT16 ii, jj;
// pack dst with stereo information of StereoOut
@@ -130,18 +103,15 @@
StereoOut += 2 * (NrFrames - 1);
dst += NrChannels * (NrFrames - 1);
src += NrChannels * (NrFrames - 1);
- for (ii = NrFrames; ii != 0; ii--)
- {
+ for (ii = NrFrames; ii != 0; ii--) {
dst[1] = StereoOut[1];
- dst[0] = StereoOut[0]; // copy 1 before 0 is required for NrChannels == 3.
- for (jj = 2; jj < NrChannels; jj++)
- {
+ dst[0] = StereoOut[0]; // copy 1 before 0 is required for NrChannels == 3.
+ for (jj = 2; jj < NrChannels; jj++) {
dst[jj] = src[jj];
}
- dst -= NrChannels;
- src -= NrChannels;
+ dst -= NrChannels;
+ src -= NrChannels;
StereoOut -= 2;
}
}
-#endif
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp
index 5e77335..2c2061a 100644
--- a/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp
@@ -25,13 +25,9 @@
/**********************************************************************************
FUNCTION CORE_MIXHARD_2ST_D32C31_SAT
***********************************************************************************/
-void Core_MixHard_2St_D32C31_SAT( Mix_2St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src1,
- const LVM_FLOAT *src2,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- LVM_FLOAT Temp1,Temp2,Temp3;
+void Core_MixHard_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+ const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+ LVM_FLOAT Temp1, Temp2, Temp3;
LVM_INT16 ii;
LVM_FLOAT Current1Short;
LVM_FLOAT Current2Short;
@@ -39,7 +35,7 @@
Current1Short = (pInstance->Current1);
Current2Short = (pInstance->Current2);
- for (ii = n; ii != 0; ii--){
+ for (ii = n; ii != 0; ii--) {
Temp1 = *src1++;
Temp3 = Temp1 * Current1Short;
Temp2 = *src2++;
@@ -47,11 +43,11 @@
Temp2 = (Temp1 / 2.0f) + (Temp3 / 2.0f);
if (Temp2 > 0.5f)
Temp2 = 1.0f;
- else if (Temp2 < -0.5f )
+ else if (Temp2 < -0.5f)
Temp2 = -1.0f;
else
Temp2 = (Temp2 * 2);
- *dst++ = Temp2;
+ *dst++ = Temp2;
}
}
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp
index 8f5c0ae..be9e49b 100644
--- a/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp
@@ -26,53 +26,48 @@
FUNCTION CORE_MIXSOFT_1ST_D32C31_WRA
***********************************************************************************/
-void Core_MixInSoft_D32C31_SAT( Mix_1St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- LVM_FLOAT Temp1,Temp2,Temp3;
- LVM_INT16 OutLoop;
- LVM_INT16 InLoop;
- LVM_FLOAT TargetTimesOneMinAlpha;
- LVM_FLOAT CurrentTimesAlpha;
- LVM_INT16 ii,jj;
+void Core_MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+ LVM_INT16 n) {
+ LVM_FLOAT Temp1, Temp2, Temp3;
+ LVM_INT16 OutLoop;
+ LVM_INT16 InLoop;
+ LVM_FLOAT TargetTimesOneMinAlpha;
+ LVM_FLOAT CurrentTimesAlpha;
+ LVM_INT16 ii, jj;
InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
OutLoop = (LVM_INT16)(n - (InLoop << 2));
- TargetTimesOneMinAlpha = ((1.0f -pInstance->Alpha) * pInstance->Target);
- if (pInstance->Target >= pInstance->Current){
- TargetTimesOneMinAlpha +=(LVM_FLOAT)(2.0f / 2147483647.0f); /* Ceil*/
+ TargetTimesOneMinAlpha = ((1.0f - pInstance->Alpha) * pInstance->Target);
+ if (pInstance->Target >= pInstance->Current) {
+ TargetTimesOneMinAlpha += (LVM_FLOAT)(2.0f / 2147483647.0f); /* Ceil*/
}
- if (OutLoop){
-
+ if (OutLoop) {
CurrentTimesAlpha = pInstance->Current * pInstance->Alpha;
pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
- for (ii = OutLoop; ii != 0; ii--){
- Temp1 = *src++;
- Temp2 = *dst;
+ for (ii = OutLoop; ii != 0; ii--) {
+ Temp1 = *src++;
+ Temp2 = *dst;
- Temp3 = Temp1 * (pInstance->Current);
- Temp1 = Temp2 + Temp3;
+ Temp3 = Temp1 * (pInstance->Current);
+ Temp1 = Temp2 + Temp3;
- if (Temp1 > 1.0f)
- Temp1 = 1.0f;
- else if (Temp1 < -1.0f)
- Temp1 = -1.0f;
+ if (Temp1 > 1.0f)
+ Temp1 = 1.0f;
+ else if (Temp1 < -1.0f)
+ Temp1 = -1.0f;
- *dst++ = Temp1;
+ *dst++ = Temp1;
}
}
- for (ii = InLoop; ii != 0; ii--){
-
+ for (ii = InLoop; ii != 0; ii--) {
CurrentTimesAlpha = pInstance->Current * pInstance->Alpha;
pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
- for (jj = 4; jj!=0 ; jj--){
+ for (jj = 4; jj != 0; jj--) {
Temp1 = *src++;
Temp2 = *dst;
diff --git a/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp
index 6ff7853..61a4752 100644
--- a/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp
@@ -25,12 +25,9 @@
/**********************************************************************************
FUNCTION CORE_MIXSOFT_1ST_D32C31_WRA
***********************************************************************************/
-void Core_MixSoft_1St_D32C31_WRA( Mix_1St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- LVM_FLOAT Temp1,Temp2;
+void Core_MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n) {
+ LVM_FLOAT Temp1, Temp2;
LVM_INT16 OutLoop;
LVM_INT16 InLoop;
LVM_FLOAT TargetTimesOneMinAlpha;
@@ -41,19 +38,17 @@
InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
OutLoop = (LVM_INT16)(n - (InLoop << 2));
- TargetTimesOneMinAlpha = (1.0f - pInstance->Alpha) * pInstance->Target; /* float * float in float */
- if (pInstance->Target >= pInstance->Current)
- {
+ TargetTimesOneMinAlpha =
+ (1.0f - pInstance->Alpha) * pInstance->Target; /* float * float in float */
+ if (pInstance->Target >= pInstance->Current) {
TargetTimesOneMinAlpha += (LVM_FLOAT)(2.0f / 2147483647.0f); /* Ceil*/
}
- if (OutLoop != 0)
- {
+ if (OutLoop != 0) {
CurrentTimesAlpha = (pInstance->Current * pInstance->Alpha);
pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
- for (ii = OutLoop; ii != 0; ii--)
- {
+ for (ii = OutLoop; ii != 0; ii--) {
Temp1 = *src;
src++;
@@ -63,37 +58,36 @@
}
}
- for (ii = InLoop; ii != 0; ii--)
- {
+ for (ii = InLoop; ii != 0; ii--) {
CurrentTimesAlpha = pInstance->Current * pInstance->Alpha;
pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
- Temp1 = *src;
- src++;
+ Temp1 = *src;
+ src++;
- Temp2 = Temp1 * (pInstance->Current);
- *dst = Temp2;
- dst++;
+ Temp2 = Temp1 * (pInstance->Current);
+ *dst = Temp2;
+ dst++;
- Temp1 = *src;
- src++;
+ Temp1 = *src;
+ src++;
- Temp2 = Temp1 * (pInstance->Current);
- *dst = Temp2;
- dst++;
+ Temp2 = Temp1 * (pInstance->Current);
+ *dst = Temp2;
+ dst++;
- Temp1 = *src;
- src++;
+ Temp1 = *src;
+ src++;
- Temp2 = Temp1 * (pInstance->Current);
- *dst = Temp2;
- dst++;
+ Temp2 = Temp1 * (pInstance->Current);
+ *dst = Temp2;
+ dst++;
- Temp1 = *src;
- src++;
- Temp2 = Temp1 * (pInstance->Current);
- *dst = Temp2;
- dst++;
+ Temp1 = *src;
+ src++;
+ Temp2 = Temp1 * (pInstance->Current);
+ *dst = Temp2;
+ dst++;
}
}
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp
index a7ce4d3..2861be6 100644
--- a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp
@@ -18,50 +18,47 @@
#include "BIQUAD.h"
#include "DC_2I_D16_TRC_WRA_01_Private.h"
#include "LVM_Macros.h"
-void DC_2I_D16_TRC_WRA_01( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT LeftDC,RightDC;
- LVM_FLOAT Diff;
- LVM_INT32 j;
- PFilter_FLOAT_State pBiquadState = (PFilter_FLOAT_State) pInstance;
+void DC_2I_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT LeftDC, RightDC;
+ LVM_FLOAT Diff;
+ LVM_INT32 j;
+ PFilter_FLOAT_State pBiquadState = (PFilter_FLOAT_State)pInstance;
- LeftDC = pBiquadState->LeftDC;
- RightDC = pBiquadState->RightDC;
- for(j = NrSamples-1; j >= 0; j--)
- {
- /* Subtract DC and saturate */
- Diff =* (pDataIn++) - (LeftDC);
- if (Diff > 1.0f) {
- Diff = 1.0f; }
- else if (Diff < -1.0f) {
- Diff = -1.0f; }
- *(pDataOut++) = (LVM_FLOAT)Diff;
- if (Diff < 0) {
- LeftDC -= DC_FLOAT_STEP; }
- else {
- LeftDC += DC_FLOAT_STEP; }
-
- /* Subtract DC an saturate */
- Diff =* (pDataIn++) - (RightDC);
- if (Diff > 1.0f) {
- Diff = 1.0f; }
- else if (Diff < -1.0f) {
- Diff = -1.0f; }
- *(pDataOut++) = (LVM_FLOAT)Diff;
- if (Diff < 0) {
- RightDC -= DC_FLOAT_STEP; }
- else {
- RightDC += DC_FLOAT_STEP; }
-
+ LeftDC = pBiquadState->LeftDC;
+ RightDC = pBiquadState->RightDC;
+ for (j = NrSamples - 1; j >= 0; j--) {
+ /* Subtract DC and saturate */
+ Diff = *(pDataIn++) - (LeftDC);
+ if (Diff > 1.0f) {
+ Diff = 1.0f;
+ } else if (Diff < -1.0f) {
+ Diff = -1.0f;
}
- pBiquadState->LeftDC = LeftDC;
- pBiquadState->RightDC = RightDC;
+ *(pDataOut++) = (LVM_FLOAT)Diff;
+ if (Diff < 0) {
+ LeftDC -= DC_FLOAT_STEP;
+ } else {
+ LeftDC += DC_FLOAT_STEP;
+ }
+ /* Subtract DC an saturate */
+ Diff = *(pDataIn++) - (RightDC);
+ if (Diff > 1.0f) {
+ Diff = 1.0f;
+ } else if (Diff < -1.0f) {
+ Diff = -1.0f;
+ }
+ *(pDataOut++) = (LVM_FLOAT)Diff;
+ if (Diff < 0) {
+ RightDC -= DC_FLOAT_STEP;
+ } else {
+ RightDC += DC_FLOAT_STEP;
+ }
}
-#ifdef SUPPORT_MC
+ pBiquadState->LeftDC = LeftDC;
+ pBiquadState->RightDC = RightDC;
+}
/*
* FUNCTION: DC_Mc_D16_TRC_WRA_01
*
@@ -79,37 +76,30 @@
* void
*
*/
-void DC_Mc_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
- {
- LVM_FLOAT *ChDC;
- LVM_FLOAT Diff;
- LVM_INT32 j;
- LVM_INT32 i;
- PFilter_FLOAT_State_Mc pBiquadState = (PFilter_FLOAT_State_Mc) pInstance;
+void DC_Mc_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+ LVM_FLOAT* ChDC;
+ LVM_FLOAT Diff;
+ LVM_INT32 j;
+ LVM_INT32 i;
+ PFilter_FLOAT_State_Mc pBiquadState = (PFilter_FLOAT_State_Mc)pInstance;
- ChDC = &pBiquadState->ChDC[0];
- for (j = NrFrames - 1; j >= 0; j--)
- {
- /* Subtract DC and saturate */
- for (i = NrChannels - 1; i >= 0; i--)
- {
- Diff = *(pDataIn++) - (ChDC[i]);
- if (Diff > 1.0f) {
- Diff = 1.0f;
- } else if (Diff < -1.0f) {
- Diff = -1.0f; }
- *(pDataOut++) = (LVM_FLOAT)Diff;
- if (Diff < 0) {
- ChDC[i] -= DC_FLOAT_STEP;
- } else {
- ChDC[i] += DC_FLOAT_STEP; }
+ ChDC = &pBiquadState->ChDC[0];
+ for (j = NrFrames - 1; j >= 0; j--) {
+ /* Subtract DC and saturate */
+ for (i = NrChannels - 1; i >= 0; i--) {
+ Diff = *(pDataIn++) - (ChDC[i]);
+ if (Diff > 1.0f) {
+ Diff = 1.0f;
+ } else if (Diff < -1.0f) {
+ Diff = -1.0f;
}
-
+ *(pDataOut++) = (LVM_FLOAT)Diff;
+ if (Diff < 0) {
+ ChDC[i] -= DC_FLOAT_STEP;
+ } else {
+ ChDC[i] += DC_FLOAT_STEP;
+ }
}
-
}
-#endif
+}
diff --git a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp
index beee112..2828cb3 100644
--- a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp
@@ -17,20 +17,15 @@
#include "BIQUAD.h"
#include "DC_2I_D16_TRC_WRA_01_Private.h"
-void DC_2I_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t *pInstance)
-{
- PFilter_FLOAT_State pBiquadState = (PFilter_FLOAT_State) pInstance;
- pBiquadState->LeftDC = 0.0f;
- pBiquadState->RightDC = 0.0f;
+void DC_2I_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance) {
+ PFilter_FLOAT_State pBiquadState = (PFilter_FLOAT_State)pInstance;
+ pBiquadState->LeftDC = 0.0f;
+ pBiquadState->RightDC = 0.0f;
}
-#ifdef SUPPORT_MC
-void DC_Mc_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t *pInstance)
-{
- PFilter_FLOAT_State_Mc pBiquadState = (PFilter_FLOAT_State_Mc) pInstance;
+void DC_Mc_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance) {
+ PFilter_FLOAT_State_Mc pBiquadState = (PFilter_FLOAT_State_Mc)pInstance;
LVM_INT32 i;
- for (i = 0; i < LVM_MAX_CHANNELS; i++)
- {
+ for (i = 0; i < LVM_MAX_CHANNELS; i++) {
pBiquadState->ChDC[i] = 0.0f;
}
}
-#endif
diff --git a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h
index 4170b3c..8f459d2 100644
--- a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h
@@ -18,21 +18,17 @@
#ifndef _DC_2I_D16_TRC_WRA_01_PRIVATE_H_
#define _DC_2I_D16_TRC_WRA_01_PRIVATE_H_
-#define DC_FLOAT_STEP 0.0000002384f
+#define DC_FLOAT_STEP 0.0000002384f
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use.*/
-typedef struct _Filter_FLOAT_State_
-{
- LVM_FLOAT LeftDC; /* LeftDC */
- LVM_FLOAT RightDC; /* RightDC */
-}Filter_FLOAT_State;
-typedef Filter_FLOAT_State * PFilter_FLOAT_State ;
-#ifdef SUPPORT_MC
-typedef struct _Filter_FLOAT_State_Mc_
-{
- LVM_FLOAT ChDC[LVM_MAX_CHANNELS]; /* ChannelDC */
+typedef struct _Filter_FLOAT_State_ {
+ LVM_FLOAT LeftDC; /* LeftDC */
+ LVM_FLOAT RightDC; /* RightDC */
+} Filter_FLOAT_State;
+typedef Filter_FLOAT_State* PFilter_FLOAT_State;
+typedef struct _Filter_FLOAT_State_Mc_ {
+ LVM_FLOAT ChDC[LVM_MAX_CHANNELS]; /* ChannelDC */
} Filter_FLOAT_State_Mc;
-typedef Filter_FLOAT_State_Mc * PFilter_FLOAT_State_Mc ;
-#endif
+typedef Filter_FLOAT_State_Mc* PFilter_FLOAT_State_Mc;
#endif /* _DC_2I_D16_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp b/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp
index 771fae2..5daef59 100644
--- a/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp
@@ -27,54 +27,44 @@
FUNCTION DelayAllPass_32x32
***********************************************************************************/
-void DelayAllPass_Sat_32x16To32( LVM_INT32 *delay, /* Delay buffer */
- LVM_UINT16 size, /* Delay size */
- LVM_INT16 coeff, /* All pass filter coefficient */
- LVM_UINT16 DelayOffset, /* Simple delay offset */
- LVM_UINT16 *pAllPassOffset, /* All pass filter delay offset */
- LVM_INT32 *dst, /* Source/destination */
- LVM_INT16 n) /* Number of samples */
+void DelayAllPass_Sat_32x16To32(LVM_INT32* delay, /* Delay buffer */
+ LVM_UINT16 size, /* Delay size */
+ LVM_INT16 coeff, /* All pass filter coefficient */
+ LVM_UINT16 DelayOffset, /* Simple delay offset */
+ LVM_UINT16* pAllPassOffset, /* All pass filter delay offset */
+ LVM_INT32* dst, /* Source/destination */
+ LVM_INT16 n) /* Number of samples */
{
- LVM_INT16 i;
- LVM_UINT16 AllPassOffset = *pAllPassOffset;
- LVM_INT32 temp;
- LVM_INT32 a,b,c;
+ LVM_INT16 i;
+ LVM_UINT16 AllPassOffset = *pAllPassOffset;
+ LVM_INT32 temp;
+ LVM_INT32 a, b, c;
- for (i = 0; i < n; i++)
- {
-
- MUL32x16INTO32(delay[AllPassOffset], coeff, temp, 15)
- a = temp;
+ for (i = 0; i < n; i++) {
+ MUL32x16INTO32(delay[AllPassOffset], coeff, temp, 15) a = temp;
b = delay[DelayOffset];
DelayOffset++;
c = a + b;
- if ((((c ^ a) & (c ^ b)) >> 31) != 0) /* overflow / underflow */
+ if ((((c ^ a) & (c ^ b)) >> 31) != 0) /* overflow / underflow */
{
- if(a < 0)
- {
+ if (a < 0) {
c = 0x80000000L;
- }
- else
- {
+ } else {
c = 0x7FFFFFFFL;
}
}
*dst = c;
dst++;
- MUL32x16INTO32(c, -coeff, temp, 15)
- a = temp;
+ MUL32x16INTO32(c, -coeff, temp, 15) a = temp;
b = delay[AllPassOffset];
c = a + b;
- if ((((c ^ a) & (c ^ b)) >> 31)!=0) /* overflow / underflow */
+ if ((((c ^ a) & (c ^ b)) >> 31) != 0) /* overflow / underflow */
{
- if(a < 0)
- {
+ if (a < 0) {
c = 0x80000000L;
- }
- else
- {
+ } else {
c = 0x7FFFFFFFL;
}
}
@@ -82,13 +72,11 @@
AllPassOffset++;
/* Make the delay buffer a circular buffer */
- if (DelayOffset >= size)
- {
+ if (DelayOffset >= size) {
DelayOffset = 0;
}
- if (AllPassOffset >= size)
- {
+ if (AllPassOffset >= size) {
AllPassOffset = 0;
}
}
@@ -100,4 +88,3 @@
}
/**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp b/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
index 52d263f..da75982 100644
--- a/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
@@ -25,19 +25,18 @@
FUNCTION DelayMix_16x16
***********************************************************************************/
-void DelayMix_16x16(const LVM_INT16 *src, /* Source 1, to be delayed */
- LVM_INT16 *delay, /* Delay buffer */
- LVM_INT16 size, /* Delay size */
- LVM_INT16 *dst, /* Source/destination */
- LVM_INT16 *pOffset, /* Delay offset */
- LVM_INT16 n) /* Number of stereo samples */
+void DelayMix_16x16(const LVM_INT16* src, /* Source 1, to be delayed */
+ LVM_INT16* delay, /* Delay buffer */
+ LVM_INT16 size, /* Delay size */
+ LVM_INT16* dst, /* Source/destination */
+ LVM_INT16* pOffset, /* Delay offset */
+ LVM_INT16 n) /* Number of stereo samples */
{
- LVM_INT16 i;
- LVM_INT16 Offset = *pOffset;
- LVM_INT16 temp;
+ LVM_INT16 i;
+ LVM_INT16 Offset = *pOffset;
+ LVM_INT16 temp;
- for (i = 0; i < n; i++)
- {
+ for (i = 0; i < n; i++) {
/* Left channel */
temp = (LVM_INT16)((LVM_UINT32)((LVM_INT32)(*dst) + (LVM_INT32)delay[Offset]) >> 1);
*dst = temp;
@@ -57,8 +56,7 @@
src++;
/* Make the reverb delay buffer a circular buffer */
- if (Offset >= size)
- {
+ if (Offset >= size) {
Offset = 0;
}
}
@@ -68,22 +66,21 @@
return;
}
-void DelayMix_Float(const LVM_FLOAT *src, /* Source 1, to be delayed */
- LVM_FLOAT *delay, /* Delay buffer */
- LVM_INT16 size, /* Delay size */
- LVM_FLOAT *dst, /* Source/destination */
- LVM_INT16 *pOffset, /* Delay offset */
- LVM_INT16 n) /* Number of stereo samples */
+void DelayMix_Float(const LVM_FLOAT* src, /* Source 1, to be delayed */
+ LVM_FLOAT* delay, /* Delay buffer */
+ LVM_INT16 size, /* Delay size */
+ LVM_FLOAT* dst, /* Source/destination */
+ LVM_INT16* pOffset, /* Delay offset */
+ LVM_INT16 n) /* Number of stereo samples */
{
- LVM_INT16 i;
- LVM_INT16 Offset = *pOffset;
- LVM_FLOAT temp;
+ LVM_INT16 i;
+ LVM_INT16 Offset = *pOffset;
+ LVM_FLOAT temp;
- for (i=0; i<n; i++)
- {
+ for (i = 0; i < n; i++) {
/* Left channel */
- temp = (LVM_FLOAT)((LVM_FLOAT)(*dst + (LVM_FLOAT)delay[Offset]) / 2.0f);
- *dst = temp;
+ temp = (LVM_FLOAT)((LVM_FLOAT)(*dst + (LVM_FLOAT)delay[Offset]) / 2.0f);
+ *dst = temp;
dst++;
delay[Offset] = *src;
@@ -91,8 +88,8 @@
src++;
/* Right channel */
- temp = (LVM_FLOAT)((LVM_FLOAT)(*dst - (LVM_FLOAT)delay[Offset]) / 2.0f);
- *dst = temp;
+ temp = (LVM_FLOAT)((LVM_FLOAT)(*dst - (LVM_FLOAT)delay[Offset]) / 2.0f);
+ *dst = temp;
dst++;
delay[Offset] = *src;
@@ -100,8 +97,7 @@
src++;
/* Make the reverb delay buffer a circular buffer */
- if (Offset >= size)
- {
+ if (Offset >= size) {
Offset = 0;
}
}
diff --git a/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp b/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp
index 809cddc..47cffbf 100644
--- a/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp
@@ -25,24 +25,22 @@
FUNCTION DelayMix_16x16
***********************************************************************************/
-void DelayWrite_32(const LVM_INT32 *src, /* Source 1, to be delayed */
- LVM_INT32 *delay, /* Delay buffer */
- LVM_UINT16 size, /* Delay size */
- LVM_UINT16 *pOffset, /* Delay offset */
- LVM_INT16 n) /* Number of samples */
+void DelayWrite_32(const LVM_INT32* src, /* Source 1, to be delayed */
+ LVM_INT32* delay, /* Delay buffer */
+ LVM_UINT16 size, /* Delay size */
+ LVM_UINT16* pOffset, /* Delay offset */
+ LVM_INT16 n) /* Number of samples */
{
- LVM_INT16 i;
- LVM_INT16 Offset = (LVM_INT16)*pOffset;
+ LVM_INT16 i;
+ LVM_INT16 Offset = (LVM_INT16)*pOffset;
- for (i=0; i<n; i++)
- {
+ for (i = 0; i < n; i++) {
delay[Offset] = *src;
Offset++;
src++;
/* Make the delay buffer a circular buffer */
- if (Offset >= size)
- {
+ if (Offset >= size) {
Offset = 0;
}
}
@@ -54,4 +52,3 @@
}
/**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp
index bef0d62..df8fadc 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp
@@ -31,41 +31,34 @@
pBiquadState->pDelays[1] is y(n-1)L in Q0 format
***************************************************************************/
-void FO_1I_D16F16C15_TRC_WRA_01( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void FO_1I_D16F16C15_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ // ynL=A1 * x(n-1)L
+ ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[0];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- // ynL=A1 * x(n-1)L
- ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[0];
+ // ynL+=A0 * x(n)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[1] * (*pDataIn);
- // ynL+=A0 * x(n)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[1] * (*pDataIn);
+ // ynL+= (-B1 * y(n-1)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[2] * pBiquadState->pDelays[1];
- // ynL+= (-B1 * y(n-1)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[2] * pBiquadState->pDelays[1];
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
+ pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
- pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
-
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
}
+}
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp
index 161225e..10604bf 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp
@@ -37,13 +37,12 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void FO_1I_D16F16Css_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order1_FLOAT_Taps_t *pTaps,
- FO_FLOAT_Coefs_t *pCoef)
-{
+void FO_1I_D16F16Css_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order1_FLOAT_Taps_t* pTaps,
+ FO_FLOAT_Coefs_t* pCoef) {
LVM_FLOAT temp;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
- pBiquadState->pDelays = (LVM_FLOAT *)pTaps;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
temp = pCoef->A1;
pBiquadState->coefs[0] = temp;
temp = pCoef->A0;
@@ -53,4 +52,3 @@
}
/*------------------------------------------------*/
/* End Of File: FO_1I_D16F16Css_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h
index 34f3df9..d1819fc 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h
@@ -20,20 +20,18 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT16 coefs[3]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT16 coefs[3]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
-typedef struct _Filter_State_FLOAT
-{
- LVM_FLOAT * pDelays; /* pointer to the delayed samples \
- (data of 32 bits) */
- LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
+typedef struct _Filter_State_FLOAT {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples \
+ (data of 32 bits) */
+ LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
#endif /* _FO_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp
index e3efad7..4c75e04 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp
@@ -30,42 +30,36 @@
pBiquadState->pDelays[0] is x(n-1)L in Q0 format
pBiquadState->pDelays[1] is y(n-1)L in Q0 format
***************************************************************************/
-void FO_1I_D32F32C31_TRC_WRA_01( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL,templ;
- LVM_INT16 ii;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
+void FO_1I_D32F32C31_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL, templ;
+ LVM_INT16 ii;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ // ynL=A1 * x(n-1)L
+ ynL = pBiquadState->coefs[0] * pBiquadState->pDelays[0];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- // ynL=A1 * x(n-1)L
- ynL = pBiquadState->coefs[0] * pBiquadState->pDelays[0];
+ // ynL+=A0 * x(n)L
+ templ = pBiquadState->coefs[1] * (*pDataIn);
+ ynL += templ;
- // ynL+=A0 * x(n)L
- templ = pBiquadState->coefs[1] * (*pDataIn);
- ynL += templ;
+ // ynL+= (-B1 * y(n-1)L
+ templ = pBiquadState->coefs[2] * pBiquadState->pDelays[1];
+ ynL += templ;
- // ynL+= (-B1 * y(n-1)L
- templ = pBiquadState->coefs[2] * pBiquadState->pDelays[1];
- ynL += templ;
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
+ pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
- pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output in Q0
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output in Q0
}
+}
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp
index bb5295c..bf2e5e1 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp
@@ -36,13 +36,12 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void FO_1I_D32F32Cll_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t *pInstance,
- Biquad_1I_Order1_FLOAT_Taps_t *pTaps,
- FO_FLOAT_Coefs_t *pCoef)
-{
+void FO_1I_D32F32Cll_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_1I_Order1_FLOAT_Taps_t* pTaps,
+ FO_FLOAT_Coefs_t* pCoef) {
LVM_FLOAT temp;
- PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
- pBiquadState->pDelays = (LVM_FLOAT *) pTaps;
+ PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
temp = pCoef->A1;
pBiquadState->coefs[0] = temp;
@@ -53,4 +52,3 @@
}
/*------------------------------------------------*/
/* End Of File: FO_1I_D32F32Cll_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h
index 67d1384..8645593 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h
@@ -20,19 +20,17 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT32 coefs[3]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
-typedef struct _Filter_State_FLOAT_
-{
- LVM_FLOAT * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
+typedef struct _Filter_State_FLOAT_ {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
+typedef Filter_State_FLOAT* PFilter_State_FLOAT;
#endif /* _FO_1I_D32F32CLL_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp
index 6ca819a..dad070b 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp
@@ -32,88 +32,73 @@
pBiquadState->pDelays[2] is x(n-1)R in Q15 format
pBiquadState->pDelays[3] is y(n-1)R in Q30 format
***************************************************************************/
-void FO_2I_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL,ynR;
- LVM_FLOAT Temp;
- LVM_FLOAT NegSatValue;
- LVM_INT16 ii;
+void FO_2I_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL, ynR;
+ LVM_FLOAT Temp;
+ LVM_FLOAT NegSatValue;
+ LVM_INT16 ii;
- PFilter_Float_State pBiquadState = (PFilter_Float_State) pInstance;
+ PFilter_Float_State pBiquadState = (PFilter_Float_State)pInstance;
- NegSatValue = -1.0f;
+ NegSatValue = -1.0f;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
+ // ynL =A1 * x(n-1)L
+ ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[0];
+ // ynR =A1 * x(n-1)R
+ ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
- // ynL =A1 * x(n-1)L
- ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[0];
- // ynR =A1 * x(n-1)R
- ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
+ // ynL+=A0 * x(n)L
+ ynL += (LVM_FLOAT)pBiquadState->coefs[1] * (*pDataIn);
+ // ynR+=A0 * x(n)L
+ ynR += (LVM_FLOAT)pBiquadState->coefs[1] * (*(pDataIn + 1));
- // ynL+=A0 * x(n)L
- ynL += (LVM_FLOAT)pBiquadState->coefs[1] * (*pDataIn);
- // ynR+=A0 * x(n)L
- ynR += (LVM_FLOAT)pBiquadState->coefs[1] * (*(pDataIn+1));
+ // ynL += (-B1 * y(n-1)L )
+ Temp = pBiquadState->pDelays[1] * pBiquadState->coefs[2];
+ ynL += Temp;
+ // ynR += (-B1 * y(n-1)R ) )
+ Temp = pBiquadState->pDelays[3] * pBiquadState->coefs[2];
+ ynR += Temp;
- // ynL += (-B1 * y(n-1)L )
- Temp = pBiquadState->pDelays[1] * pBiquadState->coefs[2];
- ynL += Temp;
- // ynR += (-B1 * y(n-1)R ) )
- Temp = pBiquadState->pDelays[3] * pBiquadState->coefs[2];
- ynR += Temp;
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
+ pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
- pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
+ pBiquadState->pDelays[3] = ynR; // Update y(n-1)R
+ pBiquadState->pDelays[2] = (*pDataIn++); // Update x(n-1)R
- pBiquadState->pDelays[3] = ynR; // Update y(n-1)R
- pBiquadState->pDelays[2] = (*pDataIn++); // Update x(n-1)R
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
-
- /*Saturate results*/
- if(ynL > 1.0f)
- {
- ynL = 1.0f;
+ /*Saturate results*/
+ if (ynL > 1.0f) {
+ ynL = 1.0f;
+ } else {
+ if (ynL < NegSatValue) {
+ ynL = NegSatValue;
}
- else
- {
- if(ynL < NegSatValue)
- {
- ynL = NegSatValue;
- }
- }
-
- if(ynR > 1.0f)
- {
- ynR = 1.0f;
- }
- else
- {
- if(ynR < NegSatValue)
- {
- ynR = NegSatValue;
- }
- }
-
- *pDataOut++ = (LVM_FLOAT)ynL;
- *pDataOut++ = (LVM_FLOAT)ynR;
}
+ if (ynR > 1.0f) {
+ ynR = 1.0f;
+ } else {
+ if (ynR < NegSatValue) {
+ ynR = NegSatValue;
+ }
+ }
+
+ *pDataOut++ = (LVM_FLOAT)ynL;
+ *pDataOut++ = (LVM_FLOAT)ynR;
}
-#ifdef SUPPORT_MC
+}
/**************************************************************************
ASSUMPTIONS:
COEFS-
@@ -135,64 +120,56 @@
RETURNS:
void
***************************************************************************/
-void FO_Mc_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
- {
- LVM_FLOAT yn;
- LVM_FLOAT Temp;
- LVM_INT16 ii;
- LVM_INT16 ch;
- PFilter_Float_State pBiquadState = (PFilter_Float_State) pInstance;
+void FO_Mc_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrFrames,
+ LVM_INT16 NrChannels) {
+ LVM_FLOAT yn;
+ LVM_FLOAT Temp;
+ LVM_INT16 ii;
+ LVM_INT16 ch;
+ PFilter_Float_State pBiquadState = (PFilter_Float_State)pInstance;
- LVM_FLOAT *pDelays = pBiquadState->pDelays;
- LVM_FLOAT *pCoefs = &pBiquadState->coefs[0];
- LVM_FLOAT A0 = pCoefs[1];
- LVM_FLOAT A1 = pCoefs[0];
- LVM_FLOAT B1 = pCoefs[2];
+ LVM_FLOAT* pDelays = pBiquadState->pDelays;
+ LVM_FLOAT* pCoefs = &pBiquadState->coefs[0];
+ LVM_FLOAT A0 = pCoefs[1];
+ LVM_FLOAT A1 = pCoefs[0];
+ LVM_FLOAT B1 = pCoefs[2];
- for (ii = NrFrames; ii != 0; ii--)
- {
+ for (ii = NrFrames; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE CHANNELS
+ ***************************************************************************/
+ for (ch = 0; ch < NrChannels; ch++) {
+ // yn =A1 * x(n-1)
+ yn = (LVM_FLOAT)A1 * pDelays[0];
+
+ // yn+=A0 * x(n)
+ yn += (LVM_FLOAT)A0 * (*pDataIn);
+
+ // yn += (-B1 * y(n-1))
+ Temp = B1 * pDelays[1];
+ yn += Temp;
/**************************************************************************
- PROCESSING OF THE CHANNELS
+ UPDATING THE DELAYS
***************************************************************************/
- for (ch = 0; ch < NrChannels; ch++)
- {
- // yn =A1 * x(n-1)
- yn = (LVM_FLOAT)A1 * pDelays[0];
+ pDelays[1] = yn; // Update y(n-1)
+ pDelays[0] = (*pDataIn++); // Update x(n-1)
- // yn+=A0 * x(n)
- yn += (LVM_FLOAT)A0 * (*pDataIn);
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
- // yn += (-B1 * y(n-1))
- Temp = B1 * pDelays[1];
- yn += Temp;
-
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pDelays[1] = yn; // Update y(n-1)
- pDelays[0] = (*pDataIn++); // Update x(n-1)
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
-
- /*Saturate results*/
- if (yn > 1.0f)
- {
- yn = 1.0f;
- } else if (yn < -1.0f) {
- yn = -1.0f;
- }
-
- *pDataOut++ = (LVM_FLOAT)yn;
- pDelays += 2;
+ /*Saturate results*/
+ if (yn > 1.0f) {
+ yn = 1.0f;
+ } else if (yn < -1.0f) {
+ yn = -1.0f;
}
- pDelays -= NrChannels * 2;
+
+ *pDataOut++ = (LVM_FLOAT)yn;
+ pDelays += 2;
}
+ pDelays -= NrChannels * 2;
}
-#endif
+}
diff --git a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp
index b81b976..552aeda 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp
@@ -37,13 +37,12 @@
/* RETURNS: */
/* void return code */
/*-------------------------------------------------------------------------*/
-void FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t *pInstance,
- Biquad_2I_Order1_FLOAT_Taps_t *pTaps,
- FO_FLOAT_LShx_Coefs_t *pCoef)
-{
+void FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_2I_Order1_FLOAT_Taps_t* pTaps,
+ FO_FLOAT_LShx_Coefs_t* pCoef) {
LVM_FLOAT temp;
- PFilter_Float_State pBiquadState = (PFilter_Float_State) pInstance;
- pBiquadState->pDelays = (LVM_FLOAT *) pTaps ;
+ PFilter_Float_State pBiquadState = (PFilter_Float_State)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
temp = pCoef->A1;
pBiquadState->coefs[0] = temp;
@@ -54,4 +53,3 @@
}
/*-------------------------------------------------------------------------*/
/* End Of File: FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h
index 5022500..0103328 100644
--- a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h
@@ -20,11 +20,10 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_FLOAT *pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
-}Filter_Float_State;
+typedef struct _Filter_State_ {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_FLOAT coefs[3]; /* pointer to the filter coefficients */
+} Filter_Float_State;
-typedef Filter_Float_State * PFilter_Float_State ;
+typedef Filter_Float_State* PFilter_Float_State;
#endif /* _FO_2I_D16F32CSS_LSHX_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/Filters.h b/media/libeffects/lvm/lib/Common/src/Filters.h
index b5db8f4..8eb3e76 100644
--- a/media/libeffects/lvm/lib/Common/src/Filters.h
+++ b/media/libeffects/lvm/lib/Common/src/Filters.h
@@ -30,26 +30,23 @@
* Biquad with coefficients A0, A1, A2, B1 and B2 coefficients
*/
/* Single precision (16-bit) Biquad section coefficients */
-typedef struct
-{
- LVM_FLOAT A0;
- LVM_FLOAT A1;
- LVM_FLOAT A2;
- LVM_FLOAT B1;
- LVM_FLOAT B2;
- LVM_UINT16 Scale;
+typedef struct {
+ LVM_FLOAT A0;
+ LVM_FLOAT A1;
+ LVM_FLOAT A2;
+ LVM_FLOAT B1;
+ LVM_FLOAT B2;
+ LVM_UINT16 Scale;
} BiquadA012B12CoefsSP_t;
/*
* Biquad with coefficients A0, A1 and B1 coefficients
*/
/* Single precision (16-bit) Biquad section coefficients */
-typedef struct
-{
- LVM_FLOAT A0;
- LVM_FLOAT A1;
- LVM_FLOAT B1;
- LVM_UINT16 Scale;
+typedef struct {
+ LVM_FLOAT A0;
+ LVM_FLOAT A1;
+ LVM_FLOAT B1;
+ LVM_UINT16 Scale;
} BiquadA01B1CoefsSP_t;
-#endif /* FILTERS_H */
-
+#endif /* FILTERS_H */
diff --git a/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp b/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp
index c3f6648..b050267 100644
--- a/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp
@@ -25,15 +25,10 @@
FUNCTION From2iToMS_16x16
***********************************************************************************/
-void From2iToMS_16x16( const LVM_INT16 *src,
- LVM_INT16 *dstM,
- LVM_INT16 *dstS,
- LVM_INT16 n )
-{
- LVM_INT32 temp1,left,right;
+void From2iToMS_16x16(const LVM_INT16* src, LVM_INT16* dstM, LVM_INT16* dstS, LVM_INT16 n) {
+ LVM_INT32 temp1, left, right;
LVM_INT16 ii;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
left = (LVM_INT32)*src;
src++;
@@ -41,27 +36,22 @@
src++;
/* Compute M signal*/
- temp1 = (left+right)>>1;
+ temp1 = (left + right) >> 1;
*dstM = (LVM_INT16)temp1;
dstM++;
/* Compute S signal*/
- temp1 = (left-right)>>1;
+ temp1 = (left - right) >> 1;
*dstS = (LVM_INT16)temp1;
dstS++;
}
return;
}
-void From2iToMS_Float( const LVM_FLOAT *src,
- LVM_FLOAT *dstM,
- LVM_FLOAT *dstS,
- LVM_INT16 n )
-{
- LVM_FLOAT temp1,left,right;
+void From2iToMS_Float(const LVM_FLOAT* src, LVM_FLOAT* dstM, LVM_FLOAT* dstS, LVM_INT16 n) {
+ LVM_FLOAT temp1, left, right;
LVM_INT16 ii;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
left = (LVM_FLOAT)*src;
src++;
@@ -69,12 +59,12 @@
src++;
/* Compute M signal*/
- temp1 = (left + right) / 2.0f;
+ temp1 = (left + right) / 2.0f;
*dstM = (LVM_FLOAT)temp1;
dstM++;
/* Compute S signal*/
- temp1 = (left - right) / 2.0f;
+ temp1 = (left - right) / 2.0f;
*dstS = (LVM_FLOAT)temp1;
dstS++;
}
diff --git a/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp b/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp
index b758ee7..9a54ee4 100644
--- a/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp
@@ -25,21 +25,17 @@
FUNCTION From2iToMono_16
***********************************************************************************/
-void From2iToMono_16( const LVM_INT16 *src,
- LVM_INT16 *dst,
- LVM_INT16 n)
-{
+void From2iToMono_16(const LVM_INT16* src, LVM_INT16* dst, LVM_INT16 n) {
LVM_INT16 ii;
LVM_INT32 Temp;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
Temp = (LVM_INT32)*src;
src++;
Temp += (LVM_INT32)*src;
src++;
- *dst = (LVM_INT16)(Temp >>1);
+ *dst = (LVM_INT16)(Temp >> 1);
dst++;
}
diff --git a/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp b/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp
index a8688b4..6ede958 100644
--- a/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp
@@ -25,19 +25,15 @@
FUNCTION From2iToMono_32
***********************************************************************************/
-void From2iToMono_32( const LVM_INT32 *src,
- LVM_INT32 *dst,
- LVM_INT16 n)
-{
+void From2iToMono_32(const LVM_INT32* src, LVM_INT32* dst, LVM_INT16 n) {
LVM_INT16 ii;
LVM_INT32 Temp;
- for (ii = n; ii != 0; ii--)
- {
- Temp = (*src>>1);
+ for (ii = n; ii != 0; ii--) {
+ Temp = (*src >> 1);
src++;
- Temp +=(*src>>1);
+ Temp += (*src >> 1);
src++;
*dst = Temp;
@@ -46,15 +42,11 @@
return;
}
-void From2iToMono_Float( const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
+void From2iToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
LVM_INT16 ii;
LVM_FLOAT Temp;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
Temp = (*src);
src++;
@@ -67,7 +59,6 @@
return;
}
-#ifdef SUPPORT_MC
/*
* FUNCTION: FromMcToMono_Float
*
@@ -85,19 +76,14 @@
* void
*
*/
-void FromMcToMono_Float(const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
-{
+void FromMcToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
+ LVM_INT16 NrChannels) {
LVM_INT16 ii, jj;
LVM_FLOAT Temp;
- for (ii = NrFrames; ii != 0; ii--)
- {
+ for (ii = NrFrames; ii != 0; ii--) {
Temp = 0.0f;
- for (jj = NrChannels; jj !=0; jj--)
- {
+ for (jj = NrChannels; jj != 0; jj--) {
Temp += (*src);
src++;
}
@@ -107,6 +93,5 @@
return;
}
-#endif
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp b/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
index a039bf5..2cfe056 100644
--- a/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
+++ b/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
@@ -26,9 +26,7 @@
* Remarks :
****************************************************************************************/
-void InstAlloc_Init( INST_ALLOC *pms,
- void *StartAddr )
-{
+void InstAlloc_Init(INST_ALLOC* pms, void* StartAddr) {
pms->TotalSize = 3;
pms->pNextMember = (((uintptr_t)StartAddr + 3) & (uintptr_t)~3);
}
@@ -44,10 +42,8 @@
* Remarks :
****************************************************************************************/
-void* InstAlloc_AddMember( INST_ALLOC *pms,
- LVM_UINT32 Size )
-{
- void *NewMemberAddress; /* Variable to temporarily store the return value */
+void* InstAlloc_AddMember(INST_ALLOC* pms, LVM_UINT32 Size) {
+ void* NewMemberAddress; /* Variable to temporarily store the return value */
NewMemberAddress = (void*)pms->pNextMember;
Size = ((Size + 3) & (LVM_UINT32)~3); /* Ceil the size to a multiple of four */
@@ -55,7 +51,7 @@
pms->TotalSize += Size;
pms->pNextMember += Size;
- return(NewMemberAddress);
+ return (NewMemberAddress);
}
/****************************************************************************************
@@ -66,21 +62,15 @@
* Remarks :
****************************************************************************************/
-LVM_UINT32 InstAlloc_GetTotal( INST_ALLOC *pms)
-{
- if (pms->TotalSize > 3)
- {
- return(pms->TotalSize);
- }
- else
- {
- return 0; /* No memory added */
+LVM_UINT32 InstAlloc_GetTotal(INST_ALLOC* pms) {
+ if (pms->TotalSize > 3) {
+ return (pms->TotalSize);
+ } else {
+ return 0; /* No memory added */
}
}
-void InstAlloc_InitAll( INST_ALLOC *pms,
- LVM_MemoryTable_st *pMemoryTable)
-{
+void InstAlloc_InitAll(INST_ALLOC* pms, LVM_MemoryTable_st* pMemoryTable) {
uintptr_t StartAddr;
StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress;
@@ -102,7 +92,6 @@
pms[3].TotalSize = 3;
pms[3].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
}
/****************************************************************************************
@@ -114,8 +103,7 @@
* Remarks :
****************************************************************************************/
-void InstAlloc_InitAll_NULL( INST_ALLOC *pms)
-{
+void InstAlloc_InitAll_NULL(INST_ALLOC* pms) {
pms[0].TotalSize = 3;
pms[0].pNextMember = 0;
@@ -127,47 +115,46 @@
pms[3].TotalSize = 3;
pms[3].pNextMember = 0;
-
}
-void* InstAlloc_AddMemberAll( INST_ALLOC *pms,
- LVM_UINT32 Size[],
- LVM_MemoryTable_st *pMemoryTable)
-{
- void *NewMemberAddress; /* Variable to temporarily store the return value */
+void* InstAlloc_AddMemberAll(INST_ALLOC* pms, LVM_UINT32 Size[], LVM_MemoryTable_st* pMemoryTable) {
+ void* NewMemberAddress; /* Variable to temporarily store the return value */
/* coverity[returned_pointer] Ignore coverity warning that ptr is not used */
- NewMemberAddress = InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
+ NewMemberAddress =
+ InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
- pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size = InstAlloc_GetTotal(&pms[LVM_PERSISTENT_SLOW_DATA]);
- pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
+ pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size =
+ InstAlloc_GetTotal(&pms[LVM_PERSISTENT_SLOW_DATA]);
+ pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
- NewMemberAddress = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
+ NewMemberAddress =
+ InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
- pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size = InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_DATA]);
- pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
+ pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size =
+ InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_DATA]);
+ pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
- NewMemberAddress = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
+ NewMemberAddress =
+ InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
- pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size = InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_COEF]);
- pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
+ pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size =
+ InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_COEF]);
+ pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
NewMemberAddress = InstAlloc_AddMember(&pms[LVM_TEMPORARY_FAST], Size[LVM_TEMPORARY_FAST]);
- pMemoryTable->Region[LVM_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&pms[LVM_TEMPORARY_FAST]);
- pMemoryTable->Region[LVM_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
- pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
+ pMemoryTable->Region[LVM_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&pms[LVM_TEMPORARY_FAST]);
+ pMemoryTable->Region[LVM_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
+ pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
- return(NewMemberAddress);
+ return (NewMemberAddress);
}
-void* InstAlloc_AddMemberAllRet( INST_ALLOC *pms,
- LVM_UINT32 Size[],
- void **ptr)
-{
+void* InstAlloc_AddMemberAllRet(INST_ALLOC* pms, LVM_UINT32 Size[], void** ptr) {
ptr[0] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
ptr[1] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
ptr[2] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
diff --git a/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp b/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp
index 9f09e4d..9ddcbe4 100644
--- a/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp
@@ -25,19 +25,14 @@
FUNCTION INT16LSHIFTTOINT32_16X32
***********************************************************************************/
-void Int16LShiftToInt32_16x32(const LVM_INT16 *src,
- LVM_INT32 *dst,
- LVM_INT16 n,
- LVM_INT16 shift )
-{
+void Int16LShiftToInt32_16x32(const LVM_INT16* src, LVM_INT32* dst, LVM_INT16 n, LVM_INT16 shift) {
LVM_INT16 ii;
- src += n-1;
- dst += n-1;
+ src += n - 1;
+ dst += n - 1;
- for (ii = n; ii != 0; ii--)
- {
- *dst = ( ((LVM_INT32)*src) << shift);
+ for (ii = n; ii != 0; ii--) {
+ *dst = (((LVM_INT32)*src) << shift);
src--;
dst--;
}
diff --git a/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp b/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp
index 8c9980d..2584117 100644
--- a/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp
@@ -25,29 +25,20 @@
FUNCTION INT32RSHIFTTOINT16_SAT_32X16
***********************************************************************************/
-void Int32RShiftToInt16_Sat_32x16(const LVM_INT32 *src,
- LVM_INT16 *dst,
- LVM_INT16 n,
- LVM_INT16 shift )
-{
+void Int32RShiftToInt16_Sat_32x16(const LVM_INT32* src, LVM_INT16* dst, LVM_INT16 n,
+ LVM_INT16 shift) {
LVM_INT32 temp;
LVM_INT16 ii;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
temp = *src >> shift;
src++;
- if (temp > 0x00007FFF)
- {
+ if (temp > 0x00007FFF) {
*dst = 0x7FFF;
- }
- else if (temp < -0x00008000)
- {
- *dst = - 0x8000;
- }
- else
- {
+ } else if (temp < -0x00008000) {
+ *dst = -0x8000;
+ } else {
*dst = (LVM_INT16)temp;
}
diff --git a/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp b/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp
index 05df656..0721b76 100644
--- a/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp
@@ -25,19 +25,14 @@
FUNCTION JoinTo2i_32x32
***********************************************************************************/
-void JoinTo2i_32x32( const LVM_INT32 *srcL,
- const LVM_INT32 *srcR,
- LVM_INT32 *dst,
- LVM_INT16 n )
-{
+void JoinTo2i_32x32(const LVM_INT32* srcL, const LVM_INT32* srcR, LVM_INT32* dst, LVM_INT16 n) {
LVM_INT16 ii;
- srcL += n-1;
- srcR += n-1;
- dst += ((2*n)-1);
+ srcL += n - 1;
+ srcR += n - 1;
+ dst += ((2 * n) - 1);
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
*dst = *srcR;
dst--;
srcR--;
@@ -49,19 +44,14 @@
return;
}
-void JoinTo2i_Float( const LVM_FLOAT *srcL,
- const LVM_FLOAT *srcR,
- LVM_FLOAT *dst,
- LVM_INT16 n )
-{
+void JoinTo2i_Float(const LVM_FLOAT* srcL, const LVM_FLOAT* srcR, LVM_FLOAT* dst, LVM_INT16 n) {
LVM_INT16 ii;
srcL += n - 1;
srcR += n - 1;
- dst += ((2 * n) - 1);
+ dst += ((2 * n) - 1);
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
*dst = *srcR;
dst--;
srcR--;
@@ -74,4 +64,3 @@
return;
}
/**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp
index 14d61bd..8b00925 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp
@@ -26,19 +26,15 @@
/**********************************************************************************
FUNCTION LVC_Core_MixHard_1St_2i_D16C31_SAT
***********************************************************************************/
-void LVC_Core_MixHard_1St_2i_D16C31_SAT( LVMixer3_FLOAT_st *ptrInstance1,
- LVMixer3_FLOAT_st *ptrInstance2,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- LVM_FLOAT Temp;
+void LVC_Core_MixHard_1St_2i_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance1,
+ LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n) {
+ LVM_FLOAT Temp;
LVM_INT16 ii;
- Mix_Private_FLOAT_st *pInstance1 = (Mix_Private_FLOAT_st *)(ptrInstance1->PrivateParams);
- Mix_Private_FLOAT_st *pInstance2 = (Mix_Private_FLOAT_st *)(ptrInstance2->PrivateParams);
- for (ii = n; ii != 0; ii--)
- {
- Temp = ((LVM_FLOAT)*(src++) * (LVM_FLOAT)pInstance1->Current);
+ Mix_Private_FLOAT_st* pInstance1 = (Mix_Private_FLOAT_st*)(ptrInstance1->PrivateParams);
+ Mix_Private_FLOAT_st* pInstance2 = (Mix_Private_FLOAT_st*)(ptrInstance2->PrivateParams);
+ for (ii = n; ii != 0; ii--) {
+ Temp = ((LVM_FLOAT) * (src++) * (LVM_FLOAT)pInstance1->Current);
if (Temp > 1.0f)
*dst++ = 1.0f;
else if (Temp < -1.0f)
@@ -46,7 +42,7 @@
else
*dst++ = (LVM_FLOAT)Temp;
- Temp = ((LVM_FLOAT)*(src++) * (LVM_FLOAT)pInstance2->Current);
+ Temp = ((LVM_FLOAT) * (src++) * (LVM_FLOAT)pInstance2->Current);
if (Temp > 1.0f)
*dst++ = 1.0f;
else if (Temp < -1.0f)
@@ -54,23 +50,15 @@
else
*dst++ = (LVM_FLOAT)Temp;
}
-
}
-#ifdef SUPPORT_MC
-void LVC_Core_MixHard_1St_MC_float_SAT (Mix_Private_FLOAT_st **ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
-{
- LVM_FLOAT Temp;
+void LVC_Core_MixHard_1St_MC_float_SAT(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+ LVM_FLOAT Temp;
LVM_INT16 ii, jj;
- for (ii = NrFrames; ii != 0; ii--)
- {
- for (jj = 0; jj < NrChannels; jj++)
- {
- Mix_Private_FLOAT_st *pInstance1 = (Mix_Private_FLOAT_st *)(ptrInstance[jj]);
- Temp = ((LVM_FLOAT)*(src++) * (LVM_FLOAT)pInstance1->Current);
+ for (ii = NrFrames; ii != 0; ii--) {
+ for (jj = 0; jj < NrChannels; jj++) {
+ Mix_Private_FLOAT_st* pInstance1 = (Mix_Private_FLOAT_st*)(ptrInstance[jj]);
+ Temp = ((LVM_FLOAT) * (src++) * (LVM_FLOAT)pInstance1->Current);
if (Temp > 1.0f)
*dst++ = 1.0f;
else if (Temp < -1.0f)
@@ -80,5 +68,4 @@
}
}
}
-#endif
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp
index 841fa1e..31cd805 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp
@@ -24,26 +24,22 @@
/**********************************************************************************
FUNCTION LVCore_MIXHARD_2ST_D16C31_SAT
***********************************************************************************/
-void LVC_Core_MixHard_2St_D16C31_SAT( LVMixer3_FLOAT_st *ptrInstance1,
- LVMixer3_FLOAT_st *ptrInstance2,
- const LVM_FLOAT *src1,
- const LVM_FLOAT *src2,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- LVM_FLOAT Temp;
+void LVC_Core_MixHard_2St_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance1,
+ LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src1,
+ const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+ LVM_FLOAT Temp;
LVM_INT16 ii;
LVM_FLOAT Current1;
LVM_FLOAT Current2;
- Mix_Private_FLOAT_st *pInstance1 = (Mix_Private_FLOAT_st *)(ptrInstance1->PrivateParams);
- Mix_Private_FLOAT_st *pInstance2 = (Mix_Private_FLOAT_st *)(ptrInstance2->PrivateParams);
+ Mix_Private_FLOAT_st* pInstance1 = (Mix_Private_FLOAT_st*)(ptrInstance1->PrivateParams);
+ Mix_Private_FLOAT_st* pInstance2 = (Mix_Private_FLOAT_st*)(ptrInstance2->PrivateParams);
Current1 = (pInstance1->Current);
Current2 = (pInstance2->Current);
- for (ii = n; ii != 0; ii--){
- Temp = (((LVM_FLOAT)*(src1++) * (LVM_FLOAT)Current1)) +
- (((LVM_FLOAT)*(src2++) * (LVM_FLOAT)Current2));
+ for (ii = n; ii != 0; ii--) {
+ Temp = (((LVM_FLOAT) * (src1++) * (LVM_FLOAT)Current1)) +
+ (((LVM_FLOAT) * (src2++) * (LVM_FLOAT)Current2));
if (Temp > 1.0f)
*dst++ = 1.0f;
else if (Temp < -1.0f)
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp
index 318138d..b7865d9 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp
@@ -25,33 +25,28 @@
/**********************************************************************************
FUNCTION LVCore_MIXSOFT_1ST_D16C31_WRA
***********************************************************************************/
-void LVC_Core_MixInSoft_D16C31_SAT(LVMixer3_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
-
- LVM_INT16 OutLoop;
- LVM_INT16 InLoop;
- LVM_INT32 ii,jj;
- Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
- LVM_FLOAT Delta = pInstance->Delta;
- LVM_FLOAT Current = pInstance->Current;
- LVM_FLOAT Target = pInstance->Target;
- LVM_FLOAT Temp;
+void LVC_Core_MixInSoft_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n) {
+ LVM_INT16 OutLoop;
+ LVM_INT16 InLoop;
+ LVM_INT32 ii, jj;
+ Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+ LVM_FLOAT Delta = pInstance->Delta;
+ LVM_FLOAT Current = pInstance->Current;
+ LVM_FLOAT Target = pInstance->Target;
+ LVM_FLOAT Temp;
InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
OutLoop = (LVM_INT16)(n - (InLoop << 2));
- if(Current < Target){
- if (OutLoop){
+ if (Current < Target) {
+ if (OutLoop) {
Temp = Current + Delta;
Current = Temp;
- if (Current > Target)
- Current = Target;
+ if (Current > Target) Current = Target;
- for (ii = OutLoop; ii != 0; ii--){
- Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
+ for (ii = OutLoop; ii != 0; ii--) {
+ Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT) * (src++) * Current));
if (Temp > 1.0f)
*dst++ = 1.0f;
else if (Temp < -1.0f)
@@ -61,14 +56,13 @@
}
}
- for (ii = InLoop; ii != 0; ii--){
+ for (ii = InLoop; ii != 0; ii--) {
Temp = Current + Delta;
Current = Temp;
- if (Current > Target)
- Current = Target;
+ if (Current > Target) Current = Target;
- for (jj = 4; jj != 0 ; jj--){
- Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
+ for (jj = 4; jj != 0; jj--) {
+ Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT) * (src++) * Current));
if (Temp > 1.0f)
*dst++ = 1.0f;
else if (Temp < -1.0f)
@@ -77,15 +71,13 @@
*dst++ = (LVM_FLOAT)Temp;
}
}
- }
- else{
- if (OutLoop){
+ } else {
+ if (OutLoop) {
Current -= Delta;
- if (Current < Target)
- Current = Target;
+ if (Current < Target) Current = Target;
- for (ii = OutLoop; ii != 0; ii--){
- Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
+ for (ii = OutLoop; ii != 0; ii--) {
+ Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT) * (src++) * Current));
if (Temp > 1.0f)
*dst++ = 1.0f;
else if (Temp < -1.0f)
@@ -95,13 +87,12 @@
}
}
- for (ii = InLoop; ii != 0; ii--){
+ for (ii = InLoop; ii != 0; ii--) {
Current -= Delta;
- if (Current < Target)
- Current = Target;
+ if (Current < Target) Current = Target;
- for (jj = 4; jj != 0 ; jj--){
- Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
+ for (jj = 4; jj != 0; jj--) {
+ Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT) * (src++) * Current));
if (Temp > 1.0f)
*dst++ = 1.0f;
else if (Temp < -1.0f)
@@ -113,7 +104,6 @@
}
pInstance->Current = Current;
}
-#ifdef SUPPORT_MC
/*
* FUNCTION: LVC_Core_MixInSoft_Mc_D16C31_SAT
*
@@ -131,21 +121,16 @@
* void
*
*/
-void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
-{
-
- LVM_INT16 OutLoop;
- LVM_INT16 InLoop;
- LVM_INT32 ii, jj;
- Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
- LVM_FLOAT Delta = pInstance->Delta;
- LVM_FLOAT Current = pInstance->Current;
- LVM_FLOAT Target = pInstance->Target;
- LVM_FLOAT Temp;
+void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+ LVM_INT16 OutLoop;
+ LVM_INT16 InLoop;
+ LVM_INT32 ii, jj;
+ Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+ LVM_FLOAT Delta = pInstance->Delta;
+ LVM_FLOAT Current = pInstance->Current;
+ LVM_FLOAT Target = pInstance->Target;
+ LVM_FLOAT Temp;
/*
* Same operation is performed on consecutive frames.
@@ -160,10 +145,9 @@
if (OutLoop) {
Temp = Current + Delta;
Current = Temp;
- if (Current > Target)
- Current = Target;
+ if (Current > Target) Current = Target;
- for (ii = OutLoop*NrChannels; ii != 0; ii--) {
+ for (ii = OutLoop * NrChannels; ii != 0; ii--) {
Temp = (*dst) + (*(src++) * Current);
if (Temp > 1.0f)
*dst++ = 1.0f;
@@ -177,10 +161,9 @@
for (ii = InLoop; ii != 0; ii--) {
Temp = Current + Delta;
Current = Temp;
- if (Current > Target)
- Current = Target;
+ if (Current > Target) Current = Target;
- for (jj = NrChannels; jj != 0 ; jj--) {
+ for (jj = NrChannels; jj != 0; jj--) {
Temp = (*dst) + (*(src++) * Current);
if (Temp > 1.0f)
*dst++ = 1.0f;
@@ -196,17 +179,14 @@
*dst++ = -1.0f;
else
*dst++ = Temp;
-
}
}
- }
- else{
+ } else {
if (OutLoop) {
Current -= Delta;
- if (Current < Target)
- Current = Target;
+ if (Current < Target) Current = Target;
- for (ii = OutLoop*NrChannels; ii != 0; ii--) {
+ for (ii = OutLoop * NrChannels; ii != 0; ii--) {
Temp = (*dst) + (*(src++) * Current);
if (Temp > 1.0f)
*dst++ = 1.0f;
@@ -219,10 +199,9 @@
for (ii = InLoop; ii != 0; ii--) {
Current -= Delta;
- if (Current < Target)
- Current = Target;
+ if (Current < Target) Current = Target;
- for (jj = NrChannels; jj != 0 ; jj--) {
+ for (jj = NrChannels; jj != 0; jj--) {
Temp = (*dst) + (*(src++) * Current);
if (Temp > 1.0f)
*dst++ = 1.0f;
@@ -238,12 +217,10 @@
*dst++ = -1.0f;
else
*dst++ = Temp;
-
}
}
}
pInstance->Current = Current;
}
-#endif
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp
index 1f4b08a..d45845a 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp
@@ -26,12 +26,9 @@
/**********************************************************************************
FUNCTION LVC_Core_MixSoft_1St_2i_D16C31_WRA
***********************************************************************************/
-static LVM_FLOAT ADD2_SAT_FLOAT(LVM_FLOAT a,
- LVM_FLOAT b,
- LVM_FLOAT c)
-{
+static LVM_FLOAT ADD2_SAT_FLOAT(LVM_FLOAT a, LVM_FLOAT b, LVM_FLOAT c) {
LVM_FLOAT temp;
- temp = a + b ;
+ temp = a + b;
if (temp < -1.0f)
c = -1.0f;
else if (temp > 1.0f)
@@ -40,154 +37,112 @@
c = temp;
return c;
}
-void LVC_Core_MixSoft_1St_2i_D16C31_WRA( LVMixer3_FLOAT_st *ptrInstance1,
- LVMixer3_FLOAT_st *ptrInstance2,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- LVM_INT16 OutLoop;
- LVM_INT16 InLoop;
- LVM_INT32 ii;
- Mix_Private_FLOAT_st *pInstanceL = (Mix_Private_FLOAT_st *)(ptrInstance1->PrivateParams);
- Mix_Private_FLOAT_st *pInstanceR = (Mix_Private_FLOAT_st *)(ptrInstance2->PrivateParams);
+void LVC_Core_MixSoft_1St_2i_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance1,
+ LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n) {
+ LVM_INT16 OutLoop;
+ LVM_INT16 InLoop;
+ LVM_INT32 ii;
+ Mix_Private_FLOAT_st* pInstanceL = (Mix_Private_FLOAT_st*)(ptrInstance1->PrivateParams);
+ Mix_Private_FLOAT_st* pInstanceR = (Mix_Private_FLOAT_st*)(ptrInstance2->PrivateParams);
- LVM_FLOAT DeltaL = pInstanceL->Delta;
- LVM_FLOAT CurrentL = pInstanceL->Current;
- LVM_FLOAT TargetL = pInstanceL->Target;
+ LVM_FLOAT DeltaL = pInstanceL->Delta;
+ LVM_FLOAT CurrentL = pInstanceL->Current;
+ LVM_FLOAT TargetL = pInstanceL->Target;
- LVM_FLOAT DeltaR = pInstanceR->Delta;
- LVM_FLOAT CurrentR = pInstanceR->Current;
- LVM_FLOAT TargetR = pInstanceR->Target;
+ LVM_FLOAT DeltaR = pInstanceR->Delta;
+ LVM_FLOAT CurrentR = pInstanceR->Current;
+ LVM_FLOAT TargetR = pInstanceR->Target;
- LVM_FLOAT Temp = 0;
+ LVM_FLOAT Temp = 0;
InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
OutLoop = (LVM_INT16)(n - (InLoop << 2));
- if (OutLoop)
- {
- if(CurrentL < TargetL)
- {
+ if (OutLoop) {
+ if (CurrentL < TargetL) {
ADD2_SAT_FLOAT(CurrentL, DeltaL, Temp);
CurrentL = Temp;
- if (CurrentL > TargetL)
- CurrentL = TargetL;
- }
- else
- {
+ if (CurrentL > TargetL) CurrentL = TargetL;
+ } else {
CurrentL -= DeltaL;
- if (CurrentL < TargetL)
- CurrentL = TargetL;
+ if (CurrentL < TargetL) CurrentL = TargetL;
}
- if(CurrentR < TargetR)
- {
+ if (CurrentR < TargetR) {
ADD2_SAT_FLOAT(CurrentR, DeltaR, Temp);
CurrentR = Temp;
- if (CurrentR > TargetR)
- CurrentR = TargetR;
- }
- else
- {
+ if (CurrentR > TargetR) CurrentR = TargetR;
+ } else {
CurrentR -= DeltaR;
- if (CurrentR < TargetR)
- CurrentR = TargetR;
+ if (CurrentR < TargetR) CurrentR = TargetR;
}
- for (ii = OutLoop * 2; ii != 0; ii -= 2)
- {
- *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
- *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
+ for (ii = OutLoop * 2; ii != 0; ii -= 2) {
+ *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentL));
+ *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentR));
}
}
- for (ii = InLoop * 2; ii != 0; ii-=2)
- {
- if(CurrentL < TargetL)
- {
+ for (ii = InLoop * 2; ii != 0; ii -= 2) {
+ if (CurrentL < TargetL) {
ADD2_SAT_FLOAT(CurrentL, DeltaL, Temp);
CurrentL = Temp;
- if (CurrentL > TargetL)
- CurrentL = TargetL;
- }
- else
- {
+ if (CurrentL > TargetL) CurrentL = TargetL;
+ } else {
CurrentL -= DeltaL;
- if (CurrentL < TargetL)
- CurrentL = TargetL;
+ if (CurrentL < TargetL) CurrentL = TargetL;
}
- if(CurrentR < TargetR)
- {
+ if (CurrentR < TargetR) {
ADD2_SAT_FLOAT(CurrentR, DeltaR, Temp);
CurrentR = Temp;
- if (CurrentR > TargetR)
- CurrentR = TargetR;
- }
- else
- {
+ if (CurrentR > TargetR) CurrentR = TargetR;
+ } else {
CurrentR -= DeltaR;
- if (CurrentR < TargetR)
- CurrentR = TargetR;
+ if (CurrentR < TargetR) CurrentR = TargetR;
}
- *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
- *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
- *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
- *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
- *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
- *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
- *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
- *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
+ *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentL));
+ *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentR));
+ *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentL));
+ *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentR));
+ *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentL));
+ *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentR));
+ *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentL));
+ *(dst++) = (LVM_FLOAT)(((LVM_FLOAT) * (src++) * (LVM_FLOAT)CurrentR));
}
pInstanceL->Current = CurrentL;
pInstanceR->Current = CurrentR;
-
}
-#ifdef SUPPORT_MC
-void LVC_Core_MixSoft_1St_MC_float_WRA (Mix_Private_FLOAT_st **ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
-{
- LVM_INT32 ii, ch;
- LVM_FLOAT Temp =0.0f;
- LVM_FLOAT tempCurrent[NrChannels];
- for (ch = 0; ch < NrChannels; ch++)
- {
+void LVC_Core_MixSoft_1St_MC_float_WRA(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+ LVM_INT32 ii, ch;
+ LVM_FLOAT Temp = 0.0f;
+ LVM_FLOAT tempCurrent[NrChannels];
+ for (ch = 0; ch < NrChannels; ch++) {
tempCurrent[ch] = ptrInstance[ch]->Current;
}
- for (ii = NrFrames; ii > 0; ii--)
- {
- for (ch = 0; ch < NrChannels; ch++)
- {
- Mix_Private_FLOAT_st *pInstance = ptrInstance[ch];
- const LVM_FLOAT Delta = pInstance->Delta;
- LVM_FLOAT Current = tempCurrent[ch];
- const LVM_FLOAT Target = pInstance->Target;
- if (Current < Target)
- {
+ for (ii = NrFrames; ii > 0; ii--) {
+ for (ch = 0; ch < NrChannels; ch++) {
+ Mix_Private_FLOAT_st* pInstance = ptrInstance[ch];
+ const LVM_FLOAT Delta = pInstance->Delta;
+ LVM_FLOAT Current = tempCurrent[ch];
+ const LVM_FLOAT Target = pInstance->Target;
+ if (Current < Target) {
ADD2_SAT_FLOAT(Current, Delta, Temp);
Current = Temp;
- if (Current > Target)
- Current = Target;
- }
- else
- {
+ if (Current > Target) Current = Target;
+ } else {
Current -= Delta;
- if (Current < Target)
- Current = Target;
+ if (Current < Target) Current = Target;
}
*dst++ = *src++ * Current;
tempCurrent[ch] = Current;
}
}
- for (ch = 0; ch < NrChannels; ch++)
- {
+ for (ch = 0; ch < NrChannels; ch++) {
ptrInstance[ch]->Current = tempCurrent[ch];
}
}
-#endif
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp
index 5d8aadc..f8c0a9d 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp
@@ -26,43 +26,37 @@
/**********************************************************************************
FUNCTION LVCore_MIXSOFT_1ST_D16C31_WRA
***********************************************************************************/
-void LVC_Core_MixSoft_1St_D16C31_WRA(LVMixer3_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- LVM_INT16 OutLoop;
- LVM_INT16 InLoop;
- LVM_INT32 ii;
- Mix_Private_FLOAT_st *pInstance=(Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
- LVM_FLOAT Delta= (LVM_FLOAT)pInstance->Delta;
- LVM_FLOAT Current = (LVM_FLOAT)pInstance->Current;
- LVM_FLOAT Target= (LVM_FLOAT)pInstance->Target;
- LVM_FLOAT Temp;
+void LVC_Core_MixSoft_1St_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n) {
+ LVM_INT16 OutLoop;
+ LVM_INT16 InLoop;
+ LVM_INT32 ii;
+ Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+ LVM_FLOAT Delta = (LVM_FLOAT)pInstance->Delta;
+ LVM_FLOAT Current = (LVM_FLOAT)pInstance->Current;
+ LVM_FLOAT Target = (LVM_FLOAT)pInstance->Target;
+ LVM_FLOAT Temp;
InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
OutLoop = (LVM_INT16)(n - (InLoop << 2));
- if(Current<Target){
- if (OutLoop){
-
+ if (Current < Target) {
+ if (OutLoop) {
Temp = Current + Delta;
if (Temp > 1.0f)
Temp = 1.0f;
else if (Temp < -1.0f)
Temp = -1.0f;
- Current=Temp;
- if (Current > Target)
- Current = Target;
+ Current = Temp;
+ if (Current > Target) Current = Target;
- for (ii = OutLoop; ii != 0; ii--){
- *(dst++) = (((LVM_FLOAT)*(src++) * (LVM_FLOAT)Current));
+ for (ii = OutLoop; ii != 0; ii--) {
+ *(dst++) = (((LVM_FLOAT) * (src++) * (LVM_FLOAT)Current));
}
}
- for (ii = InLoop; ii != 0; ii--){
-
+ for (ii = InLoop; ii != 0; ii--) {
Temp = Current + Delta;
if (Temp > 1.0f)
@@ -70,42 +64,37 @@
else if (Temp < -1.0f)
Temp = -1.0f;
- Current=Temp;
- if (Current > Target)
- Current = Target;
+ Current = Temp;
+ if (Current > Target) Current = Target;
- *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
- *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
- *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
- *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
}
- }
- else{
- if (OutLoop){
+ } else {
+ if (OutLoop) {
Current -= Delta;
- if (Current < Target)
- Current = Target;
+ if (Current < Target) Current = Target;
- for (ii = OutLoop; ii != 0; ii--){
- *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+ for (ii = OutLoop; ii != 0; ii--) {
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
}
}
- for (ii = InLoop; ii != 0; ii--){
+ for (ii = InLoop; ii != 0; ii--) {
Current -= Delta;
- if (Current < Target)
- Current = Target;
+ if (Current < Target) Current = Target;
- *(dst++) = (((LVM_FLOAT)*(src++) * Current));
- *(dst++) = (((LVM_FLOAT)*(src++) * Current));
- *(dst++) = (((LVM_FLOAT)*(src++) * Current));
- *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
}
}
- pInstance->Current=Current;
+ pInstance->Current = Current;
}
-#ifdef SUPPORT_MC
/*
* FUNCTION: LVC_Core_MixSoft_Mc_D16C31_WRA
*
@@ -123,20 +112,16 @@
* void
*
*/
-void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
-{
- LVM_INT16 OutLoop;
- LVM_INT16 InLoop;
- LVM_INT32 ii, jj;
- Mix_Private_FLOAT_st *pInstance=(Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
- LVM_FLOAT Delta= (LVM_FLOAT)pInstance->Delta;
- LVM_FLOAT Current = (LVM_FLOAT)pInstance->Current;
- LVM_FLOAT Target= (LVM_FLOAT)pInstance->Target;
- LVM_FLOAT Temp;
+void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+ LVM_INT16 OutLoop;
+ LVM_INT16 InLoop;
+ LVM_INT32 ii, jj;
+ Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+ LVM_FLOAT Delta = (LVM_FLOAT)pInstance->Delta;
+ LVM_FLOAT Current = (LVM_FLOAT)pInstance->Current;
+ LVM_FLOAT Target = (LVM_FLOAT)pInstance->Target;
+ LVM_FLOAT Temp;
/*
* Same operation is performed on consecutive frames.
@@ -147,28 +132,25 @@
/* OutLoop is calculated to handle cases where NrFrames value can be odd.*/
OutLoop = (LVM_INT16)(NrFrames - (InLoop << 1));
- if (Current<Target) {
+ if (Current < Target) {
if (OutLoop) {
-
Temp = Current + Delta;
if (Temp > 1.0f)
Temp = 1.0f;
else if (Temp < -1.0f)
Temp = -1.0f;
- Current=Temp;
- if (Current > Target)
- Current = Target;
+ Current = Temp;
+ if (Current > Target) Current = Target;
for (ii = OutLoop; ii != 0; ii--) {
- for (jj = NrChannels; jj !=0; jj--) {
- *(dst++) = (((LVM_FLOAT)*(src++) * (LVM_FLOAT)Current));
+ for (jj = NrChannels; jj != 0; jj--) {
+ *(dst++) = (((LVM_FLOAT) * (src++) * (LVM_FLOAT)Current));
}
}
}
for (ii = InLoop; ii != 0; ii--) {
-
Temp = Current + Delta;
if (Temp > 1.0f)
@@ -176,44 +158,37 @@
else if (Temp < -1.0f)
Temp = -1.0f;
- Current=Temp;
- if (Current > Target)
- Current = Target;
+ Current = Temp;
+ if (Current > Target) Current = Target;
- for (jj = NrChannels; jj != 0 ; jj--)
- {
- *(dst++) = (((LVM_FLOAT)*(src++) * Current));
- *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+ for (jj = NrChannels; jj != 0; jj--) {
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
}
}
- }
- else{
+ } else {
if (OutLoop) {
Current -= Delta;
- if (Current < Target)
- Current = Target;
+ if (Current < Target) Current = Target;
for (ii = OutLoop; ii != 0; ii--) {
- for (jj = NrChannels; jj !=0; jj--) {
- *(dst++) = (((LVM_FLOAT)*(src++) * (LVM_FLOAT)Current));
+ for (jj = NrChannels; jj != 0; jj--) {
+ *(dst++) = (((LVM_FLOAT) * (src++) * (LVM_FLOAT)Current));
}
}
}
for (ii = InLoop; ii != 0; ii--) {
Current -= Delta;
- if (Current < Target)
- Current = Target;
+ if (Current < Target) Current = Target;
- for (jj = NrChannels; jj != 0 ; jj--)
- {
- *(dst++) = (((LVM_FLOAT)*(src++) * Current));
- *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+ for (jj = NrChannels; jj != 0; jj--) {
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+ *(dst++) = (((LVM_FLOAT) * (src++) * Current));
}
}
}
- pInstance->Current=Current;
+ pInstance->Current = Current;
}
-#endif
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp
index 2bec3be..270c7e0 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp
@@ -27,39 +27,35 @@
DEFINITIONS
***********************************************************************************/
-#define TRUE 1
-#define FALSE 0
+#define TRUE 1
+#define FALSE 0
/**********************************************************************************
FUNCTION MIXINSOFT_D16C31_SAT
***********************************************************************************/
-void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- char HardMixing = TRUE;
- LVM_FLOAT TargetGain;
- Mix_Private_FLOAT_st *pInstance = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
+void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n) {
+ char HardMixing = TRUE;
+ LVM_FLOAT TargetGain;
+ Mix_Private_FLOAT_st* pInstance =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
- if(n <= 0) return;
+ if (n <= 0) return;
/******************************************************************************
SOFT MIXING
*******************************************************************************/
- if (pInstance->Current != pInstance->Target)
- {
- if(pInstance->Delta == 1.0f){
+ if (pInstance->Current != pInstance->Target) {
+ if (pInstance->Delta == 1.0f) {
pInstance->Current = pInstance->Target;
TargetGain = pInstance->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
+ } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
Make them equal. */
TargetGain = pInstance->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }else{
+ } else {
/* Soft mixing has to be applied */
HardMixing = FALSE;
LVC_Core_MixInSoft_D16C31_SAT(&(ptrInstance->MixerStream[0]), src, dst, n);
@@ -70,111 +66,12 @@
HARD MIXING
*******************************************************************************/
- if (HardMixing){
- if (pInstance->Target != 0){ /* Nothing to do in case Target = 0 */
- if ((pInstance->Target) == 1.0f){
- Add2_Sat_Float(src, dst, n);
- }
- else{
- Mac3s_Sat_Float(src, (pInstance->Target), dst, n);
- /* In case the LVCore function would have changed the Current value */
- pInstance->Current = pInstance->Target;
- }
- }
- }
-
- /******************************************************************************
- CALL BACK
- *******************************************************************************/
-
- if (ptrInstance->MixerStream[0].CallbackSet){
- if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
- pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
- Make them equal. */
- TargetGain = pInstance->Target;
- LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
- ptrInstance->MixerStream[0].CallbackSet = FALSE;
- if (ptrInstance->MixerStream[0].pCallBack != 0){
- (*ptrInstance->MixerStream[0].pCallBack) ( \
- ptrInstance->MixerStream[0].pCallbackHandle,
- ptrInstance->MixerStream[0].pGeneralPurpose,
- ptrInstance->MixerStream[0].CallbackParam );
- }
- }
- }
-
-}
-
-#ifdef SUPPORT_MC
-/*
- * FUNCTION: LVC_MixInSoft_Mc_D16C31_SAT
- *
- * DESCRIPTION:
- * Mixer function with support for processing multichannel input
- *
- * PARAMETERS:
- * ptrInstance Instance pointer
- * src Source
- * dst Destination
- * NrFrames Number of frames
- * NrChannels Number of channels
- *
- * RETURNS:
- * void
- *
- */
-void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
-{
- char HardMixing = TRUE;
- LVM_FLOAT TargetGain;
- Mix_Private_FLOAT_st *pInstance = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-
- if (NrFrames <= 0) return;
-
- /******************************************************************************
- SOFT MIXING
- *******************************************************************************/
- if (pInstance->Current != pInstance->Target)
- {
- if (pInstance->Delta == 1.0f) {
- pInstance->Current = pInstance->Target;
- TargetGain = pInstance->Target;
- LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
- pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
- Make them equal. */
- TargetGain = pInstance->Target;
- LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }else{
- /* Soft mixing has to be applied */
- HardMixing = FALSE;
- LVC_Core_MixInSoft_Mc_D16C31_SAT(&(ptrInstance->MixerStream[0]),
- src,
- dst,
- NrFrames,
- NrChannels);
- }
- }
-
- /******************************************************************************
- HARD MIXING
- *******************************************************************************/
-
if (HardMixing) {
if (pInstance->Target != 0) { /* Nothing to do in case Target = 0 */
if ((pInstance->Target) == 1.0f) {
- Add2_Sat_Float(src, dst, NrFrames*NrChannels);
- }
- else{
- Mac3s_Sat_Float(src,
- (pInstance->Target),
- dst,
- NrFrames * NrChannels);
+ Add2_Sat_Float(src, dst, n);
+ } else {
+ Mac3s_Sat_Float(src, (pInstance->Target), dst, n);
/* In case the LVCore function would have changed the Current value */
pInstance->Current = pInstance->Target;
}
@@ -193,15 +90,97 @@
LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
ptrInstance->MixerStream[0].CallbackSet = FALSE;
if (ptrInstance->MixerStream[0].pCallBack != 0) {
- (*ptrInstance->MixerStream[0].pCallBack) (\
- ptrInstance->MixerStream[0].pCallbackHandle,
- ptrInstance->MixerStream[0].pGeneralPurpose,
- ptrInstance->MixerStream[0].CallbackParam);
+ (*ptrInstance->MixerStream[0].pCallBack)(
+ ptrInstance->MixerStream[0].pCallbackHandle,
+ ptrInstance->MixerStream[0].pGeneralPurpose,
+ ptrInstance->MixerStream[0].CallbackParam);
+ }
+ }
+ }
+}
+
+/*
+ * FUNCTION: LVC_MixInSoft_Mc_D16C31_SAT
+ *
+ * DESCRIPTION:
+ * Mixer function with support for processing multichannel input
+ *
+ * PARAMETERS:
+ * ptrInstance Instance pointer
+ * src Source
+ * dst Destination
+ * NrFrames Number of frames
+ * NrChannels Number of channels
+ *
+ * RETURNS:
+ * void
+ *
+ */
+void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+ char HardMixing = TRUE;
+ LVM_FLOAT TargetGain;
+ Mix_Private_FLOAT_st* pInstance =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+
+ if (NrFrames <= 0) return;
+
+ /******************************************************************************
+ SOFT MIXING
+ *******************************************************************************/
+ if (pInstance->Current != pInstance->Target) {
+ if (pInstance->Delta == 1.0f) {
+ pInstance->Current = pInstance->Target;
+ TargetGain = pInstance->Target;
+ LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
+ } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
+ pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
+ Make them equal. */
+ TargetGain = pInstance->Target;
+ LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
+ } else {
+ /* Soft mixing has to be applied */
+ HardMixing = FALSE;
+ LVC_Core_MixInSoft_Mc_D16C31_SAT(&(ptrInstance->MixerStream[0]), src, dst, NrFrames,
+ NrChannels);
+ }
+ }
+
+ /******************************************************************************
+ HARD MIXING
+ *******************************************************************************/
+
+ if (HardMixing) {
+ if (pInstance->Target != 0) { /* Nothing to do in case Target = 0 */
+ if ((pInstance->Target) == 1.0f) {
+ Add2_Sat_Float(src, dst, NrFrames * NrChannels);
+ } else {
+ Mac3s_Sat_Float(src, (pInstance->Target), dst, NrFrames * NrChannels);
+ /* In case the LVCore function would have changed the Current value */
+ pInstance->Current = pInstance->Target;
}
}
}
+ /******************************************************************************
+ CALL BACK
+ *******************************************************************************/
+
+ if (ptrInstance->MixerStream[0].CallbackSet) {
+ if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
+ pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
+ Make them equal. */
+ TargetGain = pInstance->Target;
+ LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
+ ptrInstance->MixerStream[0].CallbackSet = FALSE;
+ if (ptrInstance->MixerStream[0].pCallBack != 0) {
+ (*ptrInstance->MixerStream[0].pCallBack)(
+ ptrInstance->MixerStream[0].pCallbackHandle,
+ ptrInstance->MixerStream[0].pGeneralPurpose,
+ ptrInstance->MixerStream[0].CallbackParam);
+ }
+ }
+ }
}
-#endif
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
index 3153ada..c74c8c6 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
@@ -29,144 +29,117 @@
DEFINITIONS
***********************************************************************************/
-#define TRUE 1
-#define FALSE 0
+#define TRUE 1
+#define FALSE 0
#define ARRAY_SIZE(a) ((sizeof(a)) / (sizeof(*(a))))
/**********************************************************************************
FUNCTION LVC_MixSoft_1St_2i_D16C31_SAT
***********************************************************************************/
-#ifdef SUPPORT_MC
/* This threshold is used to decide on the processing to be applied on
* front center and back center channels
*/
#define LVM_VOL_BAL_THR (0.000016f)
-void LVC_MixSoft_1St_MC_float_SAT (LVMixer3_2St_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT32 NrChannels,
- LVM_INT32 ChMask)
-{
- char HardMixing = TRUE;
- LVM_FLOAT TargetGain;
- Mix_Private_FLOAT_st Target_lfe = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
- Mix_Private_FLOAT_st Target_ctr = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
- Mix_Private_FLOAT_st *pInstance1 = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
- Mix_Private_FLOAT_st *pInstance2 = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
- Mix_Private_FLOAT_st *pMixPrivInst[4] = {pInstance1, pInstance2, &Target_ctr, &Target_lfe};
- Mix_Private_FLOAT_st *pInstance[NrChannels];
+void LVC_MixSoft_1St_MC_float_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT32 NrChannels,
+ LVM_INT32 ChMask) {
+ char HardMixing = TRUE;
+ LVM_FLOAT TargetGain;
+ Mix_Private_FLOAT_st Target_lfe = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
+ Mix_Private_FLOAT_st Target_ctr = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
+ Mix_Private_FLOAT_st* pInstance1 =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+ Mix_Private_FLOAT_st* pInstance2 =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
+ Mix_Private_FLOAT_st* pMixPrivInst[4] = {pInstance1, pInstance2, &Target_ctr, &Target_lfe};
+ Mix_Private_FLOAT_st* pInstance[NrChannels];
- if (audio_channel_mask_get_representation(ChMask)
- == AUDIO_CHANNEL_REPRESENTATION_INDEX)
- {
- for (int i = 0; i < 2; i++)
- {
+ if (audio_channel_mask_get_representation(ChMask) == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+ for (int i = 0; i < 2; i++) {
pInstance[i] = pMixPrivInst[i];
}
- for (int i = 2; i < NrChannels; i++)
- {
+ for (int i = 2; i < NrChannels; i++) {
pInstance[i] = pMixPrivInst[2];
}
- }
- else
- {
+ } else {
// TODO: Combine with system/media/audio_utils/Balance.cpp
// Constants in system/media/audio/include/system/audio-base.h
// 'mixInstIdx' is used to map the appropriate mixer instance for each channel.
const int mixInstIdx[] = {
- 0, // AUDIO_CHANNEL_OUT_FRONT_LEFT = 0x1u,
- 1, // AUDIO_CHANNEL_OUT_FRONT_RIGHT = 0x2u,
- 2, // AUDIO_CHANNEL_OUT_FRONT_CENTER = 0x4u,
- 3, // AUDIO_CHANNEL_OUT_LOW_FREQUENCY = 0x8u,
- 0, // AUDIO_CHANNEL_OUT_BACK_LEFT = 0x10u,
- 1, // AUDIO_CHANNEL_OUT_BACK_RIGHT = 0x20u,
- 0, // AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER = 0x40u,
- 1, // AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x80u,
- 2, // AUDIO_CHANNEL_OUT_BACK_CENTER = 0x100u,
- 0, // AUDIO_CHANNEL_OUT_SIDE_LEFT = 0x200u,
- 1, // AUDIO_CHANNEL_OUT_SIDE_RIGHT = 0x400u,
- 2, // AUDIO_CHANNEL_OUT_TOP_CENTER = 0x800u,
- 0, // AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT = 0x1000u,
- 2, // AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER = 0x2000u,
- 1, // AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT = 0x4000u,
- 0, // AUDIO_CHANNEL_OUT_TOP_BACK_LEFT = 0x8000u,
- 2, // AUDIO_CHANNEL_OUT_TOP_BACK_CENTER = 0x10000u,
- 1, // AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT = 0x20000u,
- 0, // AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT = 0x40000u,
- 1, // AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT = 0x80000u
+ 0, // AUDIO_CHANNEL_OUT_FRONT_LEFT = 0x1u,
+ 1, // AUDIO_CHANNEL_OUT_FRONT_RIGHT = 0x2u,
+ 2, // AUDIO_CHANNEL_OUT_FRONT_CENTER = 0x4u,
+ 3, // AUDIO_CHANNEL_OUT_LOW_FREQUENCY = 0x8u,
+ 0, // AUDIO_CHANNEL_OUT_BACK_LEFT = 0x10u,
+ 1, // AUDIO_CHANNEL_OUT_BACK_RIGHT = 0x20u,
+ 0, // AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER = 0x40u,
+ 1, // AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x80u,
+ 2, // AUDIO_CHANNEL_OUT_BACK_CENTER = 0x100u,
+ 0, // AUDIO_CHANNEL_OUT_SIDE_LEFT = 0x200u,
+ 1, // AUDIO_CHANNEL_OUT_SIDE_RIGHT = 0x400u,
+ 2, // AUDIO_CHANNEL_OUT_TOP_CENTER = 0x800u,
+ 0, // AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT = 0x1000u,
+ 2, // AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER = 0x2000u,
+ 1, // AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT = 0x4000u,
+ 0, // AUDIO_CHANNEL_OUT_TOP_BACK_LEFT = 0x8000u,
+ 2, // AUDIO_CHANNEL_OUT_TOP_BACK_CENTER = 0x10000u,
+ 1, // AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT = 0x20000u,
+ 0, // AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT = 0x40000u,
+ 1, // AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT = 0x80000u
};
- if (pInstance1->Target <= LVM_VOL_BAL_THR ||
- pInstance2->Target <= LVM_VOL_BAL_THR)
- {
- Target_ctr.Target = 0.0f;
+ if (pInstance1->Target <= LVM_VOL_BAL_THR || pInstance2->Target <= LVM_VOL_BAL_THR) {
+ Target_ctr.Target = 0.0f;
Target_ctr.Current = 0.0f;
- Target_ctr.Delta = 0.0f;
+ Target_ctr.Delta = 0.0f;
}
const unsigned int idxArrSize = ARRAY_SIZE(mixInstIdx);
- for (unsigned int i = 0, channel = ChMask; channel !=0 ; ++i)
- {
+ for (unsigned int i = 0, channel = ChMask; channel != 0; ++i) {
const unsigned int idx = __builtin_ctz(channel);
- if (idx < idxArrSize)
- {
+ if (idx < idxArrSize) {
pInstance[i] = pMixPrivInst[mixInstIdx[idx]];
- }
- else
- {
+ } else {
pInstance[i] = pMixPrivInst[2];
}
channel &= ~(1 << idx);
}
}
- if (NrFrames <= 0) return;
+ if (NrFrames <= 0) return;
/******************************************************************************
SOFT MIXING
*******************************************************************************/
if ((pInstance1->Current != pInstance1->Target) ||
- (pInstance2->Current != pInstance2->Target))
- {
+ (pInstance2->Current != pInstance2->Target)) {
// TODO: combine similar checks below.
- if (pInstance1->Delta == LVM_MAXFLOAT
- || Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
- {
+ if (pInstance1->Delta == LVM_MAXFLOAT ||
+ Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta) {
/* Difference is not significant anymore. Make them equal. */
pInstance1->Current = pInstance1->Target;
TargetGain = pInstance1->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }
- else
- {
+ } else {
/* Soft mixing has to be applied */
HardMixing = FALSE;
}
- if (HardMixing == TRUE)
- {
- if (pInstance2->Delta == LVM_MAXFLOAT
- || Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
- {
+ if (HardMixing == TRUE) {
+ if (pInstance2->Delta == LVM_MAXFLOAT ||
+ Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta) {
/* Difference is not significant anymore. Make them equal. */
pInstance2->Current = pInstance2->Target;
TargetGain = pInstance2->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[1]), TargetGain);
- }
- else
- {
+ } else {
/* Soft mixing has to be applied */
HardMixing = FALSE;
}
}
- if (HardMixing == FALSE)
- {
- LVC_Core_MixSoft_1St_MC_float_WRA (&pInstance[0],
- src, dst, NrFrames, NrChannels);
+ if (HardMixing == FALSE) {
+ LVC_Core_MixSoft_1St_MC_float_WRA(&pInstance[0], src, dst, NrFrames, NrChannels);
}
}
@@ -174,19 +147,13 @@
HARD MIXING
*******************************************************************************/
- if (HardMixing == TRUE)
- {
- if ((pInstance1->Target == LVM_MAXFLOAT) && (pInstance2->Target == LVM_MAXFLOAT))
- {
- if (src != dst)
- {
- Copy_Float(src, dst, NrFrames*NrChannels);
+ if (HardMixing == TRUE) {
+ if ((pInstance1->Target == LVM_MAXFLOAT) && (pInstance2->Target == LVM_MAXFLOAT)) {
+ if (src != dst) {
+ Copy_Float(src, dst, NrFrames * NrChannels);
}
- }
- else
- {
- LVC_Core_MixHard_1St_MC_float_SAT(&(pInstance[0]),
- src, dst, NrFrames, NrChannels);
+ } else {
+ LVC_Core_MixHard_1St_MC_float_SAT(&(pInstance[0]), src, dst, NrFrames, NrChannels);
}
}
@@ -194,109 +161,86 @@
CALL BACK
*******************************************************************************/
- if (ptrInstance->MixerStream[0].CallbackSet)
- {
- if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
- {
+ if (ptrInstance->MixerStream[0].CallbackSet) {
+ if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta) {
pInstance1->Current = pInstance1->Target; /* Difference is not significant anymore. \
Make them equal. */
TargetGain = pInstance1->Target;
LVC_Mixer_SetTarget(&ptrInstance->MixerStream[0], TargetGain);
ptrInstance->MixerStream[0].CallbackSet = FALSE;
- if (ptrInstance->MixerStream[0].pCallBack != 0)
- {
- (*ptrInstance->MixerStream[0].pCallBack) (\
- ptrInstance->MixerStream[0].pCallbackHandle,
- ptrInstance->MixerStream[0].pGeneralPurpose,
- ptrInstance->MixerStream[0].CallbackParam);
+ if (ptrInstance->MixerStream[0].pCallBack != 0) {
+ (*ptrInstance->MixerStream[0].pCallBack)(
+ ptrInstance->MixerStream[0].pCallbackHandle,
+ ptrInstance->MixerStream[0].pGeneralPurpose,
+ ptrInstance->MixerStream[0].CallbackParam);
}
}
}
- if (ptrInstance->MixerStream[1].CallbackSet)
- {
- if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
- {
+ if (ptrInstance->MixerStream[1].CallbackSet) {
+ if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta) {
pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore.
Make them equal. */
TargetGain = pInstance2->Target;
LVC_Mixer_SetTarget(&ptrInstance->MixerStream[1], TargetGain);
ptrInstance->MixerStream[1].CallbackSet = FALSE;
- if (ptrInstance->MixerStream[1].pCallBack != 0)
- {
- (*ptrInstance->MixerStream[1].pCallBack) (\
- ptrInstance->MixerStream[1].pCallbackHandle,
- ptrInstance->MixerStream[1].pGeneralPurpose,
- ptrInstance->MixerStream[1].CallbackParam);
+ if (ptrInstance->MixerStream[1].pCallBack != 0) {
+ (*ptrInstance->MixerStream[1].pCallBack)(
+ ptrInstance->MixerStream[1].pCallbackHandle,
+ ptrInstance->MixerStream[1].pGeneralPurpose,
+ ptrInstance->MixerStream[1].CallbackParam);
}
}
}
}
-#endif
-void LVC_MixSoft_1St_2i_D16C31_SAT( LVMixer3_2St_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- char HardMixing = TRUE;
- LVM_FLOAT TargetGain;
- Mix_Private_FLOAT_st *pInstance1 = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
- Mix_Private_FLOAT_st *pInstance2 = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
+void LVC_MixSoft_1St_2i_D16C31_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n) {
+ char HardMixing = TRUE;
+ LVM_FLOAT TargetGain;
+ Mix_Private_FLOAT_st* pInstance1 =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+ Mix_Private_FLOAT_st* pInstance2 =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
- if(n <= 0) return;
+ if (n <= 0) return;
/******************************************************************************
SOFT MIXING
*******************************************************************************/
- if ((pInstance1->Current != pInstance1->Target) || (pInstance2->Current != pInstance2->Target))
- {
- if(pInstance1->Delta == 1.0f)
- {
+ if ((pInstance1->Current != pInstance1->Target) ||
+ (pInstance2->Current != pInstance2->Target)) {
+ if (pInstance1->Delta == 1.0f) {
pInstance1->Current = pInstance1->Target;
TargetGain = pInstance1->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }
- else if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
- {
+ } else if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta) {
pInstance1->Current = pInstance1->Target; /* Difference is not significant anymore. \
Make them equal. */
TargetGain = pInstance1->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }
- else
- {
+ } else {
/* Soft mixing has to be applied */
HardMixing = FALSE;
}
- if(HardMixing == TRUE)
- {
- if(pInstance2->Delta == 1.0f)
- {
+ if (HardMixing == TRUE) {
+ if (pInstance2->Delta == 1.0f) {
pInstance2->Current = pInstance2->Target;
TargetGain = pInstance2->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[1]), TargetGain);
- }
- else if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
- {
- pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore. \
- Make them equal. */
+ } else if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta) {
+ pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore.
+ \ Make them equal. */
TargetGain = pInstance2->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[1]), TargetGain);
- }
- else
- {
+ } else {
/* Soft mixing has to be applied */
HardMixing = FALSE;
}
}
- if(HardMixing == FALSE)
- {
- LVC_Core_MixSoft_1St_2i_D16C31_WRA( &(ptrInstance->MixerStream[0]),
- &(ptrInstance->MixerStream[1]),
- src, dst, n);
+ if (HardMixing == FALSE) {
+ LVC_Core_MixSoft_1St_2i_D16C31_WRA(&(ptrInstance->MixerStream[0]),
+ &(ptrInstance->MixerStream[1]), src, dst, n);
}
}
@@ -304,20 +248,14 @@
HARD MIXING
*******************************************************************************/
- if (HardMixing)
- {
- if ((pInstance1->Target == 1.0f) && (pInstance2->Target == 1.0f))
- {
- if(src != dst)
- {
+ if (HardMixing) {
+ if ((pInstance1->Target == 1.0f) && (pInstance2->Target == 1.0f)) {
+ if (src != dst) {
Copy_Float(src, dst, n);
}
- }
- else
- {
+ } else {
LVC_Core_MixHard_1St_2i_D16C31_SAT(&(ptrInstance->MixerStream[0]),
- &(ptrInstance->MixerStream[1]),
- src, dst, n);
+ &(ptrInstance->MixerStream[1]), src, dst, n);
}
}
@@ -325,39 +263,33 @@
CALL BACK
*******************************************************************************/
- if (ptrInstance->MixerStream[0].CallbackSet)
- {
- if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
- {
+ if (ptrInstance->MixerStream[0].CallbackSet) {
+ if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta) {
pInstance1->Current = pInstance1->Target; /* Difference is not significant anymore. \
Make them equal. */
TargetGain = pInstance1->Target;
LVC_Mixer_SetTarget(&ptrInstance->MixerStream[0], TargetGain);
ptrInstance->MixerStream[0].CallbackSet = FALSE;
- if (ptrInstance->MixerStream[0].pCallBack != 0)
- {
- (*ptrInstance->MixerStream[0].pCallBack) ( \
- ptrInstance->MixerStream[0].pCallbackHandle,
- ptrInstance->MixerStream[0].pGeneralPurpose,
- ptrInstance->MixerStream[0].CallbackParam );
+ if (ptrInstance->MixerStream[0].pCallBack != 0) {
+ (*ptrInstance->MixerStream[0].pCallBack)(
+ ptrInstance->MixerStream[0].pCallbackHandle,
+ ptrInstance->MixerStream[0].pGeneralPurpose,
+ ptrInstance->MixerStream[0].CallbackParam);
}
}
}
- if (ptrInstance->MixerStream[1].CallbackSet)
- {
- if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
- {
+ if (ptrInstance->MixerStream[1].CallbackSet) {
+ if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta) {
pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore.
Make them equal. */
TargetGain = pInstance2->Target;
LVC_Mixer_SetTarget(&ptrInstance->MixerStream[1], TargetGain);
ptrInstance->MixerStream[1].CallbackSet = FALSE;
- if (ptrInstance->MixerStream[1].pCallBack != 0)
- {
- (*ptrInstance->MixerStream[1].pCallBack) (
- ptrInstance->MixerStream[1].pCallbackHandle,
- ptrInstance->MixerStream[1].pGeneralPurpose,
- ptrInstance->MixerStream[1].CallbackParam );
+ if (ptrInstance->MixerStream[1].pCallBack != 0) {
+ (*ptrInstance->MixerStream[1].pCallBack)(
+ ptrInstance->MixerStream[1].pCallbackHandle,
+ ptrInstance->MixerStream[1].pGeneralPurpose,
+ ptrInstance->MixerStream[1].CallbackParam);
}
}
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
index 4d229da..be19fa0 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
@@ -27,39 +27,35 @@
DEFINITIONS
***********************************************************************************/
-#define TRUE 1
-#define FALSE 0
+#define TRUE 1
+#define FALSE 0
/**********************************************************************************
FUNCTION LVMixer3_MIXSOFT_1ST_D16C31_SAT
***********************************************************************************/
-void LVC_MixSoft_1St_D16C31_SAT( LVMixer3_1St_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- char HardMixing = TRUE;
- LVM_FLOAT TargetGain;
- Mix_Private_FLOAT_st *pInstance = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
+void LVC_MixSoft_1St_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n) {
+ char HardMixing = TRUE;
+ LVM_FLOAT TargetGain;
+ Mix_Private_FLOAT_st* pInstance =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
- if(n <= 0) return;
+ if (n <= 0) return;
/******************************************************************************
SOFT MIXING
*******************************************************************************/
- if (pInstance->Current != pInstance->Target)
- {
- if(pInstance->Delta == 1.0f){
+ if (pInstance->Current != pInstance->Target) {
+ if (pInstance->Delta == 1.0f) {
pInstance->Current = pInstance->Target;
TargetGain = pInstance->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
+ } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
Make them equal. */
TargetGain = pInstance->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }else{
+ } else {
/* Soft mixing has to be applied */
HardMixing = FALSE;
LVC_Core_MixSoft_1St_D16C31_WRA(&(ptrInstance->MixerStream[0]), src, dst, n);
@@ -70,39 +66,37 @@
HARD MIXING
*******************************************************************************/
- if (HardMixing){
+ if (HardMixing) {
if (pInstance->Target == 0)
LoadConst_Float(0.0, dst, n);
else {
if ((pInstance->Target) != 1.0f)
Mult3s_Float(src, (pInstance->Target), dst, n);
- else if(src != dst)
+ else if (src != dst)
Copy_Float(src, dst, n);
}
-
}
/******************************************************************************
CALL BACK
*******************************************************************************/
- if (ptrInstance->MixerStream[0].CallbackSet){
- if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
+ if (ptrInstance->MixerStream[0].CallbackSet) {
+ if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
Make them equal. */
TargetGain = pInstance->Target;
LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
ptrInstance->MixerStream[0].CallbackSet = FALSE;
- if (ptrInstance->MixerStream[0].pCallBack != 0){
- (*ptrInstance->MixerStream[0].pCallBack) ( \
- ptrInstance->MixerStream[0].pCallbackHandle,
- ptrInstance->MixerStream[0].pGeneralPurpose,
- ptrInstance->MixerStream[0].CallbackParam );
+ if (ptrInstance->MixerStream[0].pCallBack != 0) {
+ (*ptrInstance->MixerStream[0].pCallBack)(
+ ptrInstance->MixerStream[0].pCallbackHandle,
+ ptrInstance->MixerStream[0].pGeneralPurpose,
+ ptrInstance->MixerStream[0].CallbackParam);
}
}
}
}
-#ifdef SUPPORT_MC
/*
* FUNCTION: LVC_MixSoft_Mc_D16C31_SAT
*
@@ -120,40 +114,32 @@
* void
*
*/
-void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
-{
- char HardMixing = TRUE;
- LVM_FLOAT TargetGain;
- Mix_Private_FLOAT_st *pInstance = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
+void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+ char HardMixing = TRUE;
+ LVM_FLOAT TargetGain;
+ Mix_Private_FLOAT_st* pInstance =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
- if (NrFrames <= 0) return;
+ if (NrFrames <= 0) return;
/******************************************************************************
SOFT MIXING
*******************************************************************************/
- if (pInstance->Current != pInstance->Target)
- {
+ if (pInstance->Current != pInstance->Target) {
if (pInstance->Delta == 1.0f) {
pInstance->Current = pInstance->Target;
TargetGain = pInstance->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
+ } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
Make them equal. */
TargetGain = pInstance->Target;
LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
- }else{
+ } else {
/* Soft mixing has to be applied */
HardMixing = FALSE;
- LVC_Core_MixSoft_Mc_D16C31_WRA(&(ptrInstance->MixerStream[0]),
- src,
- dst,
- NrFrames,
+ LVC_Core_MixSoft_Mc_D16C31_WRA(&(ptrInstance->MixerStream[0]), src, dst, NrFrames,
NrChannels);
}
}
@@ -171,7 +157,6 @@
else if (src != dst)
Copy_Float(src, dst, NrFrames * NrChannels);
}
-
}
/******************************************************************************
@@ -186,15 +171,13 @@
LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
ptrInstance->MixerStream[0].CallbackSet = FALSE;
if (ptrInstance->MixerStream[0].pCallBack != 0) {
- (*ptrInstance->MixerStream[0].pCallBack) (\
- ptrInstance->MixerStream[0].pCallbackHandle,
- ptrInstance->MixerStream[0].pGeneralPurpose,
- ptrInstance->MixerStream[0].CallbackParam);
+ (*ptrInstance->MixerStream[0].pCallBack)(
+ ptrInstance->MixerStream[0].pCallbackHandle,
+ ptrInstance->MixerStream[0].pGeneralPurpose,
+ ptrInstance->MixerStream[0].CallbackParam);
}
}
}
}
-#endif
-
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp
index 54ab79d..882a8ce 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp
@@ -25,49 +25,39 @@
/**********************************************************************************
FUNCTION LVC_MixSoft_2St_D16C31_SAT.c
***********************************************************************************/
-void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src1,
- const LVM_FLOAT *src2,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- Mix_Private_FLOAT_st *pInstance1 = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
- Mix_Private_FLOAT_st *pInstance2 = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
+void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src1,
+ const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+ Mix_Private_FLOAT_st* pInstance1 =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+ Mix_Private_FLOAT_st* pInstance2 =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
- if(n <= 0) return;
+ if (n <= 0) return;
/******************************************************************************
SOFT MIXING
*******************************************************************************/
- if ((pInstance1->Current == pInstance1->Target) && (pInstance1->Current == 0)){
- LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
- src2, dst, n);
- }
- else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)){
- LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
- src1, dst, n);
- }
- else if ((pInstance1->Current != pInstance1->Target) || \
- (pInstance2->Current != pInstance2->Target))
- {
- LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
- src1, dst, n);
- LVC_MixInSoft_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
- src2, dst, n);
- }
- else{
+ if ((pInstance1->Current == pInstance1->Target) && (pInstance1->Current == 0)) {
+ LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2,
+ dst, n);
+ } else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)) {
+ LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1,
+ dst, n);
+ } else if ((pInstance1->Current != pInstance1->Target) ||
+ (pInstance2->Current != pInstance2->Target)) {
+ LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1,
+ dst, n);
+ LVC_MixInSoft_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2, dst,
+ n);
+ } else {
/******************************************************************************
HARD MIXING
*******************************************************************************/
- LVC_Core_MixHard_2St_D16C31_SAT( &ptrInstance->MixerStream[0],
- &ptrInstance->MixerStream[1],
- src1, src2, dst, n);
+ LVC_Core_MixHard_2St_D16C31_SAT(&ptrInstance->MixerStream[0], &ptrInstance->MixerStream[1],
+ src1, src2, dst, n);
}
}
-#ifdef SUPPORT_MC
/*
* FUNCTION: LVC_MixSoft_2Mc_D16C31_SAT
*
@@ -86,48 +76,38 @@
* void
*
*/
-void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src1,
- const LVM_FLOAT *src2,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
-{
- Mix_Private_FLOAT_st *pInstance1 = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
- Mix_Private_FLOAT_st *pInstance2 = \
- (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
+void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src1,
+ const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 NrFrames,
+ LVM_INT16 NrChannels) {
+ Mix_Private_FLOAT_st* pInstance1 =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+ Mix_Private_FLOAT_st* pInstance2 =
+ (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
- if (NrFrames <= 0) return;
+ if (NrFrames <= 0) return;
/******************************************************************************
SOFT MIXING
*******************************************************************************/
if ((pInstance1->Current == pInstance1->Target) && (pInstance1->Current == 0)) {
- LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
- src2, dst, NrFrames, NrChannels);
- }
- else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)) {
- LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
- src1, dst, NrFrames, NrChannels);
- }
- else if ((pInstance1->Current != pInstance1->Target) || \
- (pInstance2->Current != pInstance2->Target))
- {
- LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
- src1, dst, NrFrames, NrChannels);
- LVC_MixInSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
- src2, dst, NrFrames, NrChannels);
- }
- else{
+ LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2, dst,
+ NrFrames, NrChannels);
+ } else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)) {
+ LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1, dst,
+ NrFrames, NrChannels);
+ } else if ((pInstance1->Current != pInstance1->Target) ||
+ (pInstance2->Current != pInstance2->Target)) {
+ LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1, dst,
+ NrFrames, NrChannels);
+ LVC_MixInSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2,
+ dst, NrFrames, NrChannels);
+ } else {
/******************************************************************************
HARD MIXING
*******************************************************************************/
- LVC_Core_MixHard_2St_D16C31_SAT(&ptrInstance->MixerStream[0],
- &ptrInstance->MixerStream[1],
+ LVC_Core_MixHard_2St_D16C31_SAT(&ptrInstance->MixerStream[0], &ptrInstance->MixerStream[1],
src1, src2, dst, NrFrames * NrChannels);
}
}
-#endif
/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h b/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
index ce42d2e..55255a6 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
@@ -25,34 +25,31 @@
***********************************************************************************/
/* LVMixer3_st structure stores Instance parameters for one audio stream */
-typedef struct
-{
- LVM_FLOAT PrivateParams[3]; /* Private Instance params for \
- Audio Stream shift parameter */
- LVM_INT16 CallbackSet; /* Boolean. Should be set by calling application \
- each time the target value is updated */
- LVM_INT16 CallbackParam; /* Parameter that will be used in the calback function */
- void *pCallbackHandle; /* Pointer to the instance of the callback function */
- void *pGeneralPurpose; /* Pointer for general purpose usage */
- LVM_Callback pCallBack; /* Pointer to the callback function */
+typedef struct {
+ LVM_FLOAT PrivateParams[3]; /* Private Instance params for \
+ Audio Stream shift parameter */
+ LVM_INT16 CallbackSet; /* Boolean. Should be set by calling application \
+ each time the target value is updated */
+ LVM_INT16 CallbackParam; /* Parameter that will be used in the calback function */
+ void* pCallbackHandle; /* Pointer to the instance of the callback function */
+ void* pGeneralPurpose; /* Pointer for general purpose usage */
+ LVM_Callback pCallBack; /* Pointer to the callback function */
} LVMixer3_FLOAT_st;
-typedef struct
-{
- LVMixer3_FLOAT_st MixerStream[1]; /* Instance Params for one Audio Stream */
+typedef struct {
+ LVMixer3_FLOAT_st MixerStream[1]; /* Instance Params for one Audio Stream */
} LVMixer3_1St_FLOAT_st;
-typedef struct
-{
- LVMixer3_FLOAT_st MixerStream[2]; /* Instance Params for two Audio Streams */
+typedef struct {
+ LVMixer3_FLOAT_st MixerStream[2]; /* Instance Params for two Audio Streams */
} LVMixer3_2St_FLOAT_st;
/**********************************************************************************
FUNCTION PROTOTYPES (HIGH LEVEL FUNCTIONS)
***********************************************************************************/
/* Function names should be unique within first 16 characters */
-#define LVMixer3_MixSoft_1St_D16C31_SAT LVMixer3_1St_D16C31_SAT_MixSoft
-#define LVMixer3_MixInSoft_D16C31_SAT LVMixer3_D16C31_SAT_MixInSoft
-#define LVMixer3_MixSoft_2St_D16C31_SAT LVMixer3_2St_D16C31_SAT_MixSoft
-#define LVMixer3_MixSoft_3St_D16C31_SAT LVMixer3_3St_D16C31_SAT_MixSoft
+#define LVMixer3_MixSoft_1St_D16C31_SAT LVMixer3_1St_D16C31_SAT_MixSoft
+#define LVMixer3_MixInSoft_D16C31_SAT LVMixer3_D16C31_SAT_MixInSoft
+#define LVMixer3_MixSoft_2St_D16C31_SAT LVMixer3_2St_D16C31_SAT_MixSoft
+#define LVMixer3_MixSoft_3St_D16C31_SAT LVMixer3_3St_D16C31_SAT_MixSoft
/*** General functions ************************************************************/
@@ -62,85 +59,52 @@
/* then the calculation will give an incorrect value for alpha, see the mixer */
/* documentation for further details. */
/* ********************************************************************************/
-void LVC_Mixer_SetTarget( LVMixer3_FLOAT_st *pStream,
- LVM_FLOAT TargetGain);
-LVM_FLOAT LVC_Mixer_GetTarget( LVMixer3_FLOAT_st *pStream);
+void LVC_Mixer_SetTarget(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain);
+LVM_FLOAT LVC_Mixer_GetTarget(LVMixer3_FLOAT_st* pStream);
-LVM_FLOAT LVC_Mixer_GetCurrent( LVMixer3_FLOAT_st *pStream);
+LVM_FLOAT LVC_Mixer_GetCurrent(LVMixer3_FLOAT_st* pStream);
-void LVC_Mixer_Init( LVMixer3_FLOAT_st *pStream,
- LVM_FLOAT TargetGain,
- LVM_FLOAT CurrentGain);
+void LVC_Mixer_Init(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain, LVM_FLOAT CurrentGain);
-void LVC_Mixer_SetTimeConstant( LVMixer3_FLOAT_st *pStream,
- LVM_INT32 Tc_millisec,
- LVM_Fs_en Fs,
- LVM_INT16 NumChannels);
+void LVC_Mixer_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec, LVM_Fs_en Fs,
+ LVM_INT16 NumChannels);
-void LVC_Mixer_VarSlope_SetTimeConstant( LVMixer3_FLOAT_st *pStream,
- LVM_INT32 Tc_millisec,
- LVM_Fs_en Fs,
- LVM_INT16 NumChannels);
+void LVC_Mixer_VarSlope_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec,
+ LVM_Fs_en Fs, LVM_INT16 NumChannels);
/*** 16 bit functions *************************************************************/
-void LVC_MixSoft_1St_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
-#ifdef SUPPORT_MC
-void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#endif
+void LVC_MixSoft_1St_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
-void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
-#ifdef SUPPORT_MC
-void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#endif
+void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
-void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st *pInstance,
- const LVM_FLOAT *src1,
- const LVM_FLOAT *src2,
- LVM_FLOAT *dst, /* dst cannot be equal to src2 */
- LVM_INT16 n);
-#ifdef SUPPORT_MC
-void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st *pInstance,
- const LVM_FLOAT *src1,
- const LVM_FLOAT *src2,
- LVM_FLOAT *dst, /* dst cannot be equal to src2 */
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#endif
+void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src1,
+ const LVM_FLOAT* src2,
+ LVM_FLOAT* dst, /* dst cannot be equal to src2 */
+ LVM_INT16 n);
+void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src1,
+ const LVM_FLOAT* src2,
+ LVM_FLOAT* dst, /* dst cannot be equal to src2 */
+ LVM_INT16 NrFrames, LVM_INT16 NrChannels);
/**********************************************************************************/
/* For applying different gains to Left and right chennals */
/* MixerStream[0] applies to Left channel */
/* MixerStream[1] applies to Right channel */
/* Gain values should not be more that 1.0 */
/**********************************************************************************/
-#ifdef SUPPORT_MC
-void LVC_MixSoft_1St_MC_float_SAT(LVMixer3_2St_FLOAT_st *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst, /* dst can be equal to src */
- LVM_INT16 NrFrames,
- LVM_INT32 NrChannels,
- LVM_INT32 ChMask);
-#endif
-void LVC_MixSoft_1St_2i_D16C31_SAT(LVMixer3_2St_FLOAT_st *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst, /* dst can be equal to src */
- LVM_INT16 n); /* Number of stereo samples */
+void LVC_MixSoft_1St_MC_float_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, /* dst can be equal to src */
+ LVM_INT16 NrFrames, LVM_INT32 NrChannels, LVM_INT32 ChMask);
+void LVC_MixSoft_1St_2i_D16C31_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, /* dst can be equal to src */
+ LVM_INT16 n); /* Number of stereo samples */
/**********************************************************************************/
-#endif //#ifndef __LVC_MIXER_H__
-
+#endif //#ifndef __LVC_MIXER_H__
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp
index d0b50e6..03de8b0 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp
@@ -30,10 +30,9 @@
/* CurrentGain - CurrentGain value in Q 16.15 format */
/* */
/************************************************************************/
-LVM_FLOAT LVC_Mixer_GetCurrent( LVMixer3_FLOAT_st *pStream)
-{
- LVM_FLOAT CurrentGain;
- Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+LVM_FLOAT LVC_Mixer_GetCurrent(LVMixer3_FLOAT_st* pStream) {
+ LVM_FLOAT CurrentGain;
+ Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
CurrentGain = pInstance->Current; // CurrentGain
return CurrentGain;
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp
index 3ae5ba4..21ebac1f 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp
@@ -30,10 +30,9 @@
/* TargetGain - TargetGain value in Q 16.15 format */
/* */
/************************************************************************/
-LVM_FLOAT LVC_Mixer_GetTarget( LVMixer3_FLOAT_st *pStream)
-{
- LVM_FLOAT TargetGain;
- Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+LVM_FLOAT LVC_Mixer_GetTarget(LVMixer3_FLOAT_st* pStream) {
+ LVM_FLOAT TargetGain;
+ Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
TargetGain = pInstance->Target; // TargetGain
return TargetGain;
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp
index c9fd344..e37f635 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp
@@ -44,14 +44,10 @@
/* void */
/* */
/************************************************************************/
-void LVC_Mixer_Init( LVMixer3_FLOAT_st *pStream,
- LVM_FLOAT TargetGain,
- LVM_FLOAT CurrentGain)
-{
+void LVC_Mixer_Init(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain, LVM_FLOAT CurrentGain) {
LVM_FLOAT MaxGain = TargetGain;
- Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
- if(CurrentGain > MaxGain)
- MaxGain = CurrentGain;
- pInstance->Target = TargetGain; // Update fractional gain Target
- pInstance->Current = CurrentGain; // Update fractional gain Current
+ Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
+ if (CurrentGain > MaxGain) MaxGain = CurrentGain;
+ pInstance->Target = TargetGain; // Update fractional gain Target
+ pInstance->Current = CurrentGain; // Update fractional gain Current
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
index 123d22b..5f22d77 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
@@ -26,54 +26,36 @@
#include "VectorArithmetic.h"
/* Instance parameter structure */
-typedef struct
-{
+typedef struct {
/* General */
- LVM_FLOAT Target; /*number specifying value of Target Gain */
- LVM_FLOAT Current; /*number specifying value of Current Gain */
- LVM_FLOAT Delta; /*number specifying value of Delta Gain */
+ LVM_FLOAT Target; /*number specifying value of Target Gain */
+ LVM_FLOAT Current; /*number specifying value of Current Gain */
+ LVM_FLOAT Delta; /*number specifying value of Delta Gain */
} Mix_Private_FLOAT_st;
/**********************************************************************************
DEFINITIONS
***********************************************************************************/
-#define LVCore_MixInSoft_D32C31_SAT LVCore_InSoft_D32C31_SAT
-#define LVCore_MixSoft_1St_D32C31_WRA LVCore_Soft_1St_D32C31_WRA
-#define LVCore_MixHard_2St_D32C31_SAT LVCore_Hard_2St_D32C31_SAT
+#define LVCore_MixInSoft_D32C31_SAT LVCore_InSoft_D32C31_SAT
+#define LVCore_MixSoft_1St_D32C31_WRA LVCore_Soft_1St_D32C31_WRA
+#define LVCore_MixHard_2St_D32C31_SAT LVCore_Hard_2St_D32C31_SAT
/**********************************************************************************
FUNCTION PROTOTYPES (LOW LEVEL SUBFUNCTIONS)
***********************************************************************************/
/*** 16 bit functions *************************************************************/
-void LVC_Core_MixInSoft_D16C31_SAT( LVMixer3_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
-#ifdef SUPPORT_MC
-void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#endif
-void LVC_Core_MixSoft_1St_D16C31_WRA( LVMixer3_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
-#ifdef SUPPORT_MC
-void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st *ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#endif
-void LVC_Core_MixHard_2St_D16C31_SAT( LVMixer3_FLOAT_st *pInstance1,
- LVMixer3_FLOAT_st *pInstance2,
- const LVM_FLOAT *src1,
- const LVM_FLOAT *src2,
- LVM_FLOAT *dst,
- LVM_INT16 n);
+void LVC_Core_MixInSoft_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
+void LVC_Core_MixSoft_1St_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
+void LVC_Core_MixHard_2St_D16C31_SAT(LVMixer3_FLOAT_st* pInstance1, LVMixer3_FLOAT_st* pInstance2,
+ const LVM_FLOAT* src1, const LVM_FLOAT* src2, LVM_FLOAT* dst,
+ LVM_INT16 n);
/**********************************************************************************/
/* For applying different gains to Left and right chennals */
@@ -81,18 +63,11 @@
/* ptrInstance2 applies to Right channel */
/* Gain values should not be more that 1.0 */
/**********************************************************************************/
-#ifdef SUPPORT_MC
-void LVC_Core_MixSoft_1St_MC_float_WRA(Mix_Private_FLOAT_st **ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#endif
-void LVC_Core_MixSoft_1St_2i_D16C31_WRA( LVMixer3_FLOAT_st *ptrInstance1,
- LVMixer3_FLOAT_st *ptrInstance2,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
+void LVC_Core_MixSoft_1St_MC_float_WRA(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
+void LVC_Core_MixSoft_1St_2i_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance1,
+ LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n);
/**********************************************************************************/
/* For applying different gains to Left and right chennals */
@@ -100,21 +75,13 @@
/* ptrInstance2 applies to Right channel */
/* Gain values should not be more that 1.0 */
/**********************************************************************************/
-#ifdef SUPPORT_MC
-void LVC_Core_MixHard_1St_MC_float_SAT(Mix_Private_FLOAT_st **ptrInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels);
-#endif
-void LVC_Core_MixHard_1St_2i_D16C31_SAT( LVMixer3_FLOAT_st *ptrInstance1,
- LVMixer3_FLOAT_st *ptrInstance2,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n);
+void LVC_Core_MixHard_1St_MC_float_SAT(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
+void LVC_Core_MixHard_1St_2i_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance1,
+ LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src,
+ LVM_FLOAT* dst, LVM_INT16 n);
/*** 32 bit functions *************************************************************/
/**********************************************************************************/
-#endif //#ifndef __LVC_MIXER_PRIVATE_H__
-
+#endif //#ifndef __LVC_MIXER_PRIVATE_H__
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp
index 47b0cec..d8015c4 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp
@@ -43,9 +43,7 @@
/* void */
/* */
/************************************************************************/
-void LVC_Mixer_SetTarget(LVMixer3_FLOAT_st *pStream,
- LVM_FLOAT TargetGain)
-{
- Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
- pInstance->Target = TargetGain; // Update gain Target
+void LVC_Mixer_SetTarget(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain) {
+ Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
+ pInstance->Target = TargetGain; // Update gain Target
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp
index 1a8da7a..715b908 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp
@@ -44,36 +44,33 @@
/* RETURNS: */
/* void */
/************************************************************************/
-void LVC_Mixer_SetTimeConstant(LVMixer3_FLOAT_st *pStream,
- LVM_INT32 Tc_millisec,
- LVM_Fs_en Fs,
- LVM_INT16 NumChannels)
-{
- LVM_FLOAT DeltaTable[13] = {0.500000f,/*8000*/
- 0.362812f,/*11025*/
- 0.333333f,/*12000*/
- 0.250000f,/*16000*/
- 0.181406f,/*22050*/
- 0.166666f,/*24000*/
- 0.125000f,/*32000*/
- 0.090703f,/*44100*/
- 0.083333f,/*48000*/
- 0.045352f,/*88200*/
- 0.041667f,/*96000*/
- 0.022676f,/*176400*/
- 0.020833f};/*192000*/
+void LVC_Mixer_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec, LVM_Fs_en Fs,
+ LVM_INT16 NumChannels) {
+ LVM_FLOAT DeltaTable[13] = {0.500000f, /*8000*/
+ 0.362812f, /*11025*/
+ 0.333333f, /*12000*/
+ 0.250000f, /*16000*/
+ 0.181406f, /*22050*/
+ 0.166666f, /*24000*/
+ 0.125000f, /*32000*/
+ 0.090703f, /*44100*/
+ 0.083333f, /*48000*/
+ 0.045352f, /*88200*/
+ 0.041667f, /*96000*/
+ 0.022676f, /*176400*/
+ 0.020833f}; /*192000*/
- Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+ Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
LVM_FLOAT Delta = DeltaTable[Fs];
Delta = Delta / (NumChannels);
- if(Tc_millisec == 0)
+ if (Tc_millisec == 0)
Delta = 1.000000f;
else
Delta = Delta / Tc_millisec;
- if(Delta == 0)
- Delta = 0.0000000005f; /* If Time Constant is so large that Delta is 0, \
- assign minimum value to Delta */
+ if (Delta == 0)
+ Delta = 0.0000000005f; /* If Time Constant is so large that Delta is 0, \
+ assign minimum value to Delta */
pInstance->Delta = Delta; // Delta=(2147483647*4*1000)/(NumChannels*SampleRate*Tc_millisec)
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp
index f335a1e..cf84613 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp
@@ -44,57 +44,50 @@
/* RETURNS: */
/* void */
/************************************************************************/
-void LVC_Mixer_VarSlope_SetTimeConstant( LVMixer3_FLOAT_st *pStream,
- LVM_INT32 Tc_millisec,
- LVM_Fs_en Fs,
- LVM_INT16 NumChannels)
-{
- LVM_FLOAT DeltaTable[13] = {0.500000f,/*8000*/
- 0.362812f,/*11025*/
- 0.333333f,/*12000*/
- 0.250000f,/*16000*/
- 0.181406f,/*22050*/
- 0.166666f,/*24000*/
- 0.125000f,/*32000*/
- 0.090703f,/*44100*/
- 0.083333f,/*48000*/
- 0.045352f,/*88200*/
- 0.041666f,/*96000*/
- 0.022676f,/*176400*/
- 0.020833f};/*192000*/
+void LVC_Mixer_VarSlope_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec,
+ LVM_Fs_en Fs, LVM_INT16 NumChannels) {
+ LVM_FLOAT DeltaTable[13] = {0.500000f, /*8000*/
+ 0.362812f, /*11025*/
+ 0.333333f, /*12000*/
+ 0.250000f, /*16000*/
+ 0.181406f, /*22050*/
+ 0.166666f, /*24000*/
+ 0.125000f, /*32000*/
+ 0.090703f, /*44100*/
+ 0.083333f, /*48000*/
+ 0.045352f, /*88200*/
+ 0.041666f, /*96000*/
+ 0.022676f, /*176400*/
+ 0.020833f}; /*192000*/
LVM_FLOAT Tc_millisec_float;
- Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+ Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
LVM_FLOAT Delta = DeltaTable[Fs];
- LVM_FLOAT Current;
- LVM_FLOAT Target;
+ LVM_FLOAT Current;
+ LVM_FLOAT Target;
- Delta=Delta / (NumChannels);
+ Delta = Delta / (NumChannels);
/* Get gain values */
Current = pInstance->Current;
Target = pInstance->Target;
- if (Current != Target)
- {
+ if (Current != Target) {
Tc_millisec_float = (LVM_FLOAT)(Tc_millisec) / (Current - Target);
- if (Tc_millisec_float < 0)
- Tc_millisec_float = -Tc_millisec_float;
+ if (Tc_millisec_float < 0) Tc_millisec_float = -Tc_millisec_float;
- if(Tc_millisec == 0)
+ if (Tc_millisec == 0)
Delta = 1.000000f;
else
Delta = Delta / Tc_millisec_float;
- if(Delta == 0)
+ if (Delta == 0)
Delta = 0.0000000005f; /* If Time Constant is so large that Delta is 0, \
assign minimum value to Delta */
- }
- else
- {
- Delta = 0.0000000005f; /* Minimum value for proper call-backs \
- (setting it to zero has some problems, to be corrected) */
+ } else {
+ Delta = 0.0000000005f; /* Minimum value for proper call-backs \
+ (setting it to zero has some problems, to be corrected) */
}
- pInstance->Delta = Delta; // Delta=(2147483647*4*1000)/(NumChannels*SampleRate*Tc_millisec)
+ pInstance->Delta = Delta; // Delta=(2147483647*4*1000)/(NumChannels*SampleRate*Tc_millisec)
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp b/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp
index 2497d29..59095df 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp
@@ -67,31 +67,19 @@
/* RETURNS: */
/* */
/*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_FO_HPF( LVM_FLOAT w,
- FO_FLOAT_Coefs_t *pCoeffs)
-{
- LVM_FLOAT Y,Coefficients[13] = {-0.999996f,
- 0.999801f,
- -0.497824f,
- 0.322937f,
- -0.180880f,
- 0.087658f,
- -0.032102f,
- 0.008163f,
- -0.001252f,
- 0.000089f,
- 0,
- 0,
- 0};
- Y=LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
+LVM_FLOAT LVM_FO_HPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs) {
+ LVM_FLOAT Y, Coefficients[13] = {-0.999996f, 0.999801f, -0.497824f, 0.322937f, -0.180880f,
+ 0.087658f, -0.032102f, 0.008163f, -0.001252f, 0.000089f,
+ 0, 0, 0};
+ Y = LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
- pCoeffs->B1 = -Y; /* Store -B1 in filter structure instead of B1!*/
- /* A0=(1-B1)/2= B1/2 - 0.5*/
- Y = Y / 2.0f; /* A0=Y=B1/2*/
- Y = Y - 0.5f; /* A0=Y=(B1/2 - 0.5)*/
+ pCoeffs->B1 = -Y; /* Store -B1 in filter structure instead of B1!*/
+ /* A0=(1-B1)/2= B1/2 - 0.5*/
+ Y = Y / 2.0f; /* A0=Y=B1/2*/
+ Y = Y - 0.5f; /* A0=Y=(B1/2 - 0.5)*/
- pCoeffs->A0 = Y * FILTER_LOSS_FLOAT; /* Apply loss to avoid overflow*/
- pCoeffs->A1 = -pCoeffs->A0; /* Store A1=-A0*/
+ pCoeffs->A0 = Y * FILTER_LOSS_FLOAT; /* Apply loss to avoid overflow*/
+ pCoeffs->A1 = -pCoeffs->A0; /* Store A1=-A0*/
return 1;
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp b/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp
index 7bc6046..91964fb 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp
@@ -67,25 +67,15 @@
/* RETURNS: */
/* */
/*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_FO_LPF( LVM_FLOAT w,
- FO_FLOAT_Coefs_t *pCoeffs)
-{
- LVM_FLOAT Y,Coefficients[13] = {-0.999996f,
- 0.999801f,
- -0.497824f,
- 0.322937f,
- -0.180880f,
- 0.087658f,
- -0.032102f,
- 0.008163f,
- -0.001252f,
- 0.000089f,
- 0};
- Y=LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
- pCoeffs->B1 = -Y; // Store -B1 in filter structure instead of B1!
- // A0=(1+B1)/2= B1/2 + 0.5
- Y = Y / 2.0f; // A0=Y=B1/2
- Y = Y + 0.5f; // A0=Y=(B1/2 + 0.5)
+LVM_FLOAT LVM_FO_LPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs) {
+ LVM_FLOAT Y,
+ Coefficients[13] = {-0.999996f, 0.999801f, -0.497824f, 0.322937f, -0.180880f, 0.087658f,
+ -0.032102f, 0.008163f, -0.001252f, 0.000089f, 0};
+ Y = LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
+ pCoeffs->B1 = -Y; // Store -B1 in filter structure instead of B1!
+ // A0=(1+B1)/2= B1/2 + 0.5
+ Y = Y / 2.0f; // A0=Y=B1/2
+ Y = Y + 0.5f; // A0=Y=(B1/2 + 0.5)
pCoeffs->A0 = Y * FILTER_LOSS_FLOAT;
pCoeffs->A1 = pCoeffs->A0;
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp b/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp
index 2a7cca2..5f25677 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp
@@ -25,41 +25,32 @@
/* */
/************************************************************************************/
-#define LVVDL_2PiBy_8000 1727108826 /* In Q41 format */
-#define LVVDL_2PiBy_11025 1253230894 /* In Q41 format */
-#define LVVDL_2PiBy_12000 1151405884 /* In Q41 format */
+#define LVVDL_2PiBy_8000 1727108826 /* In Q41 format */
+#define LVVDL_2PiBy_11025 1253230894 /* In Q41 format */
+#define LVVDL_2PiBy_12000 1151405884 /* In Q41 format */
-#define LVVDL_2PiByFs_SHIFT1 12 /* Qformat shift for 8kHz, 11.025kHz and 12kHz i.e. 12=41-29 */
-#define LVVDL_2PiByFs_SHIFT2 13 /* Qformat shift for 16kHz, 22.050kHz and 24kHz i.e. 13=42-29 */
-#define LVVDL_2PiByFs_SHIFT3 14 /* Qformat shift for 32kHz, 44.1kHz and 48kHz i.e. 14=43-29 */
-#define LVVDL_2PiBy_8000_f 0.000785398f
-#define LVVDL_2PiBy_11025_f 0.000569903f
-#define LVVDL_2PiBy_12000_f 0.000523599f
-#define LVVDL_2PiBy_16000_f 0.000392700f
-#define LVVDL_2PiBy_22050_f 0.000284952f
-#define LVVDL_2PiBy_24000_f 0.000261800f
-#define LVVDL_2PiBy_32000_f 0.000196350f
-#define LVVDL_2PiBy_44100_f 0.000142476f
-#define LVVDL_2PiBy_48000_f 0.000130900f
+#define LVVDL_2PiByFs_SHIFT1 12 /* Qformat shift for 8kHz, 11.025kHz and 12kHz i.e. 12=41-29 */
+#define LVVDL_2PiByFs_SHIFT2 13 /* Qformat shift for 16kHz, 22.050kHz and 24kHz i.e. 13=42-29 */
+#define LVVDL_2PiByFs_SHIFT3 14 /* Qformat shift for 32kHz, 44.1kHz and 48kHz i.e. 14=43-29 */
+#define LVVDL_2PiBy_8000_f 0.000785398f
+#define LVVDL_2PiBy_11025_f 0.000569903f
+#define LVVDL_2PiBy_12000_f 0.000523599f
+#define LVVDL_2PiBy_16000_f 0.000392700f
+#define LVVDL_2PiBy_22050_f 0.000284952f
+#define LVVDL_2PiBy_24000_f 0.000261800f
+#define LVVDL_2PiBy_32000_f 0.000196350f
+#define LVVDL_2PiBy_44100_f 0.000142476f
+#define LVVDL_2PiBy_48000_f 0.000130900f
-#define LVVDL_2PiBy_88200_f 0.000071238f
-#define LVVDL_2PiBy_96000_f 0.000065450f
-#define LVVDL_2PiBy_176400_f 0.000035619f
-#define LVVDL_2PiBy_192000_f 0.000032725f
-const LVM_FLOAT LVVDL_2PiOnFsTable[] = {LVVDL_2PiBy_8000_f,
- LVVDL_2PiBy_11025_f,
- LVVDL_2PiBy_12000_f,
- LVVDL_2PiBy_16000_f,
- LVVDL_2PiBy_22050_f,
- LVVDL_2PiBy_24000_f,
- LVVDL_2PiBy_32000_f,
- LVVDL_2PiBy_44100_f,
- LVVDL_2PiBy_48000_f
- ,LVVDL_2PiBy_88200_f
- ,LVVDL_2PiBy_96000_f
- ,LVVDL_2PiBy_176400_f
- ,LVVDL_2PiBy_192000_f
- };
+#define LVVDL_2PiBy_88200_f 0.000071238f
+#define LVVDL_2PiBy_96000_f 0.000065450f
+#define LVVDL_2PiBy_176400_f 0.000035619f
+#define LVVDL_2PiBy_192000_f 0.000032725f
+const LVM_FLOAT LVVDL_2PiOnFsTable[] = {
+ LVVDL_2PiBy_8000_f, LVVDL_2PiBy_11025_f, LVVDL_2PiBy_12000_f, LVVDL_2PiBy_16000_f,
+ LVVDL_2PiBy_22050_f, LVVDL_2PiBy_24000_f, LVVDL_2PiBy_32000_f, LVVDL_2PiBy_44100_f,
+ LVVDL_2PiBy_48000_f, LVVDL_2PiBy_88200_f, LVVDL_2PiBy_96000_f, LVVDL_2PiBy_176400_f,
+ LVVDL_2PiBy_192000_f};
/*-------------------------------------------------------------------------*/
/* FUNCTION: */
/* LVM_GetOmega */
@@ -77,10 +68,8 @@
/* RETURNS: */
/* w=2*pi*Fc/Fs in Q2.29 format */
/*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_GetOmega(LVM_UINT32 Fc,
- LVM_Fs_en Fs)
-{
- LVM_FLOAT w;
+LVM_FLOAT LVM_GetOmega(LVM_UINT32 Fc, LVM_Fs_en Fs) {
+ LVM_FLOAT w;
w = (LVM_FLOAT)Fc * LVVDL_2PiOnFsTable[Fs];
return w;
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h
index 244f09d..31dcaa4 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h
@@ -33,109 +33,109 @@
/* */
/************************************************************************************/
-#define Alpha_TableSize 50 /* The number of table entires */
-#define ALPHA_0 2147480769 /* Floating point Alpha = 0.999999 */
-#define ALPHA_1 2147479577 /* Floating point Alpha = 0.999998 */
-#define ALPHA_2 2147477892 /* Floating point Alpha = 0.999997 */
-#define ALPHA_3 2147475510 /* Floating point Alpha = 0.999996 */
-#define ALPHA_4 2147472141 /* Floating point Alpha = 0.999995 */
-#define ALPHA_5 2147467377 /* Floating point Alpha = 0.999992 */
-#define ALPHA_6 2147460642 /* Floating point Alpha = 0.999989 */
-#define ALPHA_7 2147451118 /* Floating point Alpha = 0.999985 */
-#define ALPHA_8 2147437651 /* Floating point Alpha = 0.999979 */
-#define ALPHA_9 2147418608 /* Floating point Alpha = 0.999970 */
-#define ALPHA_10 2147391683 /* Floating point Alpha = 0.999957 */
-#define ALPHA_11 2147353611 /* Floating point Alpha = 0.999939 */
-#define ALPHA_12 2147299779 /* Floating point Alpha = 0.999914 */
-#define ALPHA_13 2147223662 /* Floating point Alpha = 0.999879 */
-#define ALPHA_14 2147116037 /* Floating point Alpha = 0.999829 */
-#define ALPHA_15 2146963865 /* Floating point Alpha = 0.999758 */
-#define ALPHA_16 2146748712 /* Floating point Alpha = 0.999658 */
-#define ALPHA_17 2146444522 /* Floating point Alpha = 0.999516 */
-#define ALPHA_18 2146014472 /* Floating point Alpha = 0.999316 */
-#define ALPHA_19 2145406527 /* Floating point Alpha = 0.999033 */
-#define ALPHA_20 2144547188 /* Floating point Alpha = 0.998633 */
-#define ALPHA_21 2143332669 /* Floating point Alpha = 0.998067 */
-#define ALPHA_22 2141616514 /* Floating point Alpha = 0.997268 */
-#define ALPHA_23 2139192215 /* Floating point Alpha = 0.996139 */
-#define ALPHA_24 2135768939 /* Floating point Alpha = 0.994545 */
-#define ALPHA_25 2130937774 /* Floating point Alpha = 0.992295 */
-#define ALPHA_26 2124125153 /* Floating point Alpha = 0.989123 */
-#define ALPHA_27 2114529263 /* Floating point Alpha = 0.984654 */
-#define ALPHA_28 2101034612 /* Floating point Alpha = 0.978370 */
-#define ALPHA_29 2082100030 /* Floating point Alpha = 0.969553 */
-#define ALPHA_30 2055617398 /* Floating point Alpha = 0.957221 */
-#define ALPHA_31 2018744824 /* Floating point Alpha = 0.940051 */
-#define ALPHA_32 1967733015 /* Floating point Alpha = 0.916297 */
-#define ALPHA_33 1897794587 /* Floating point Alpha = 0.883729 */
-#define ALPHA_34 1803123234 /* Floating point Alpha = 0.839645 */
-#define ALPHA_35 1677262220 /* Floating point Alpha = 0.781036 */
-#define ALPHA_36 1514142675 /* Floating point Alpha = 0.705078 */
-#define ALPHA_37 1310197875 /* Floating point Alpha = 0.610108 */
-#define ALPHA_38 1067813480 /* Floating point Alpha = 0.497239 */
-#define ALPHA_39 799601371 /* Floating point Alpha = 0.372343 */
-#define ALPHA_40 531183049 /* Floating point Alpha = 0.247351 */
-#define ALPHA_41 297904007 /* Floating point Alpha = 0.138722 */
-#define ALPHA_42 131499768 /* Floating point Alpha = 0.061234 */
-#define ALPHA_43 41375282 /* Floating point Alpha = 0.019267 */
-#define ALPHA_44 8065899 /* Floating point Alpha = 0.003756 */
-#define ALPHA_45 799076 /* Floating point Alpha = 0.000372 */
-#define ALPHA_46 30398 /* Floating point Alpha = 0.000014 */
-#define ALPHA_47 299 /* Floating point Alpha = 0.000000 */
-#define ALPHA_48 0 /* Floating point Alpha = 0.000000 */
-#define ALPHA_49 0 /* Floating point Alpha = 0.000000 */
-#define ALPHA_50 0 /* Floating point Alpha = 0.000000 */
+#define Alpha_TableSize 50 /* The number of table entires */
+#define ALPHA_0 2147480769 /* Floating point Alpha = 0.999999 */
+#define ALPHA_1 2147479577 /* Floating point Alpha = 0.999998 */
+#define ALPHA_2 2147477892 /* Floating point Alpha = 0.999997 */
+#define ALPHA_3 2147475510 /* Floating point Alpha = 0.999996 */
+#define ALPHA_4 2147472141 /* Floating point Alpha = 0.999995 */
+#define ALPHA_5 2147467377 /* Floating point Alpha = 0.999992 */
+#define ALPHA_6 2147460642 /* Floating point Alpha = 0.999989 */
+#define ALPHA_7 2147451118 /* Floating point Alpha = 0.999985 */
+#define ALPHA_8 2147437651 /* Floating point Alpha = 0.999979 */
+#define ALPHA_9 2147418608 /* Floating point Alpha = 0.999970 */
+#define ALPHA_10 2147391683 /* Floating point Alpha = 0.999957 */
+#define ALPHA_11 2147353611 /* Floating point Alpha = 0.999939 */
+#define ALPHA_12 2147299779 /* Floating point Alpha = 0.999914 */
+#define ALPHA_13 2147223662 /* Floating point Alpha = 0.999879 */
+#define ALPHA_14 2147116037 /* Floating point Alpha = 0.999829 */
+#define ALPHA_15 2146963865 /* Floating point Alpha = 0.999758 */
+#define ALPHA_16 2146748712 /* Floating point Alpha = 0.999658 */
+#define ALPHA_17 2146444522 /* Floating point Alpha = 0.999516 */
+#define ALPHA_18 2146014472 /* Floating point Alpha = 0.999316 */
+#define ALPHA_19 2145406527 /* Floating point Alpha = 0.999033 */
+#define ALPHA_20 2144547188 /* Floating point Alpha = 0.998633 */
+#define ALPHA_21 2143332669 /* Floating point Alpha = 0.998067 */
+#define ALPHA_22 2141616514 /* Floating point Alpha = 0.997268 */
+#define ALPHA_23 2139192215 /* Floating point Alpha = 0.996139 */
+#define ALPHA_24 2135768939 /* Floating point Alpha = 0.994545 */
+#define ALPHA_25 2130937774 /* Floating point Alpha = 0.992295 */
+#define ALPHA_26 2124125153 /* Floating point Alpha = 0.989123 */
+#define ALPHA_27 2114529263 /* Floating point Alpha = 0.984654 */
+#define ALPHA_28 2101034612 /* Floating point Alpha = 0.978370 */
+#define ALPHA_29 2082100030 /* Floating point Alpha = 0.969553 */
+#define ALPHA_30 2055617398 /* Floating point Alpha = 0.957221 */
+#define ALPHA_31 2018744824 /* Floating point Alpha = 0.940051 */
+#define ALPHA_32 1967733015 /* Floating point Alpha = 0.916297 */
+#define ALPHA_33 1897794587 /* Floating point Alpha = 0.883729 */
+#define ALPHA_34 1803123234 /* Floating point Alpha = 0.839645 */
+#define ALPHA_35 1677262220 /* Floating point Alpha = 0.781036 */
+#define ALPHA_36 1514142675 /* Floating point Alpha = 0.705078 */
+#define ALPHA_37 1310197875 /* Floating point Alpha = 0.610108 */
+#define ALPHA_38 1067813480 /* Floating point Alpha = 0.497239 */
+#define ALPHA_39 799601371 /* Floating point Alpha = 0.372343 */
+#define ALPHA_40 531183049 /* Floating point Alpha = 0.247351 */
+#define ALPHA_41 297904007 /* Floating point Alpha = 0.138722 */
+#define ALPHA_42 131499768 /* Floating point Alpha = 0.061234 */
+#define ALPHA_43 41375282 /* Floating point Alpha = 0.019267 */
+#define ALPHA_44 8065899 /* Floating point Alpha = 0.003756 */
+#define ALPHA_45 799076 /* Floating point Alpha = 0.000372 */
+#define ALPHA_46 30398 /* Floating point Alpha = 0.000014 */
+#define ALPHA_47 299 /* Floating point Alpha = 0.000000 */
+#define ALPHA_48 0 /* Floating point Alpha = 0.000000 */
+#define ALPHA_49 0 /* Floating point Alpha = 0.000000 */
+#define ALPHA_50 0 /* Floating point Alpha = 0.000000 */
-#define ALPHA_Float_0 0.999999f
-#define ALPHA_Float_1 0.999998f
-#define ALPHA_Float_2 0.999997f
-#define ALPHA_Float_3 0.999996f
-#define ALPHA_Float_4 0.999995f
-#define ALPHA_Float_5 0.999992f
-#define ALPHA_Float_6 0.999989f
-#define ALPHA_Float_7 0.999985f
-#define ALPHA_Float_8 0.999979f
-#define ALPHA_Float_9 0.999970f
-#define ALPHA_Float_10 0.999957f
-#define ALPHA_Float_11 0.999939f
-#define ALPHA_Float_12 0.999914f
-#define ALPHA_Float_13 0.999879f
-#define ALPHA_Float_14 0.999829f
-#define ALPHA_Float_15 0.999758f
-#define ALPHA_Float_16 0.999658f
-#define ALPHA_Float_17 0.999516f
-#define ALPHA_Float_18 0.999316f
-#define ALPHA_Float_19 0.999033f
-#define ALPHA_Float_20 0.998633f
-#define ALPHA_Float_21 0.998067f
-#define ALPHA_Float_22 0.997268f
-#define ALPHA_Float_23 0.996139f
-#define ALPHA_Float_24 0.994545f
-#define ALPHA_Float_25 0.992295f
-#define ALPHA_Float_26 0.989123f
-#define ALPHA_Float_27 0.984654f
-#define ALPHA_Float_28 0.978370f
-#define ALPHA_Float_29 0.969553f
-#define ALPHA_Float_30 0.957221f
-#define ALPHA_Float_31 0.940051f
-#define ALPHA_Float_32 0.916297f
-#define ALPHA_Float_33 0.883729f
-#define ALPHA_Float_34 0.839645f
-#define ALPHA_Float_35 0.781036f
-#define ALPHA_Float_36 0.705078f
-#define ALPHA_Float_37 0.610108f
-#define ALPHA_Float_38 0.497239f
-#define ALPHA_Float_39 0.372343f
-#define ALPHA_Float_40 0.247351f
-#define ALPHA_Float_41 0.138722f
-#define ALPHA_Float_42 0.061234f
-#define ALPHA_Float_43 0.019267f
-#define ALPHA_Float_44 0.003756f
-#define ALPHA_Float_45 0.000372f
-#define ALPHA_Float_46 0.000014f
-#define ALPHA_Float_47 0.000000f
-#define ALPHA_Float_48 0.000000f
-#define ALPHA_Float_49 0.000000f
-#define ALPHA_Float_50 0.000000f
+#define ALPHA_Float_0 0.999999f
+#define ALPHA_Float_1 0.999998f
+#define ALPHA_Float_2 0.999997f
+#define ALPHA_Float_3 0.999996f
+#define ALPHA_Float_4 0.999995f
+#define ALPHA_Float_5 0.999992f
+#define ALPHA_Float_6 0.999989f
+#define ALPHA_Float_7 0.999985f
+#define ALPHA_Float_8 0.999979f
+#define ALPHA_Float_9 0.999970f
+#define ALPHA_Float_10 0.999957f
+#define ALPHA_Float_11 0.999939f
+#define ALPHA_Float_12 0.999914f
+#define ALPHA_Float_13 0.999879f
+#define ALPHA_Float_14 0.999829f
+#define ALPHA_Float_15 0.999758f
+#define ALPHA_Float_16 0.999658f
+#define ALPHA_Float_17 0.999516f
+#define ALPHA_Float_18 0.999316f
+#define ALPHA_Float_19 0.999033f
+#define ALPHA_Float_20 0.998633f
+#define ALPHA_Float_21 0.998067f
+#define ALPHA_Float_22 0.997268f
+#define ALPHA_Float_23 0.996139f
+#define ALPHA_Float_24 0.994545f
+#define ALPHA_Float_25 0.992295f
+#define ALPHA_Float_26 0.989123f
+#define ALPHA_Float_27 0.984654f
+#define ALPHA_Float_28 0.978370f
+#define ALPHA_Float_29 0.969553f
+#define ALPHA_Float_30 0.957221f
+#define ALPHA_Float_31 0.940051f
+#define ALPHA_Float_32 0.916297f
+#define ALPHA_Float_33 0.883729f
+#define ALPHA_Float_34 0.839645f
+#define ALPHA_Float_35 0.781036f
+#define ALPHA_Float_36 0.705078f
+#define ALPHA_Float_37 0.610108f
+#define ALPHA_Float_38 0.497239f
+#define ALPHA_Float_39 0.372343f
+#define ALPHA_Float_40 0.247351f
+#define ALPHA_Float_41 0.138722f
+#define ALPHA_Float_42 0.061234f
+#define ALPHA_Float_43 0.019267f
+#define ALPHA_Float_44 0.003756f
+#define ALPHA_Float_45 0.000372f
+#define ALPHA_Float_46 0.000014f
+#define ALPHA_Float_47 0.000000f
+#define ALPHA_Float_48 0.000000f
+#define ALPHA_Float_49 0.000000f
+#define ALPHA_Float_50 0.000000f
#endif
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp
index 73da2cf..3ec103a 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp
@@ -56,83 +56,37 @@
/* Alpha - the filter coefficient Q31 format */
/* */
/************************************************************************/
-LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32 tc,
- LVM_UINT32 Fs,
- LVM_UINT16 NumChannels)
-{
-
- LVM_UINT32 Product;
- LVM_FLOAT ProductFloat;
- LVM_INT16 InterpolateShort;
- LVM_FLOAT Interpolate;
- LVM_UINT16 Shift;
- LVM_FLOAT Diff;
- LVM_FLOAT Table[] = {ALPHA_Float_0, /* Log spaced look-up table */
- ALPHA_Float_1,
- ALPHA_Float_2,
- ALPHA_Float_3,
- ALPHA_Float_4,
- ALPHA_Float_5,
- ALPHA_Float_6,
- ALPHA_Float_7,
- ALPHA_Float_8,
- ALPHA_Float_9,
- ALPHA_Float_10,
- ALPHA_Float_11,
- ALPHA_Float_12,
- ALPHA_Float_13,
- ALPHA_Float_14,
- ALPHA_Float_15,
- ALPHA_Float_16,
- ALPHA_Float_17,
- ALPHA_Float_18,
- ALPHA_Float_19,
- ALPHA_Float_20,
- ALPHA_Float_21,
- ALPHA_Float_22,
- ALPHA_Float_23,
- ALPHA_Float_24,
- ALPHA_Float_25,
- ALPHA_Float_26,
- ALPHA_Float_27,
- ALPHA_Float_28,
- ALPHA_Float_29,
- ALPHA_Float_30,
- ALPHA_Float_31,
- ALPHA_Float_32,
- ALPHA_Float_33,
- ALPHA_Float_34,
- ALPHA_Float_35,
- ALPHA_Float_36,
- ALPHA_Float_37,
- ALPHA_Float_38,
- ALPHA_Float_39,
- ALPHA_Float_40,
- ALPHA_Float_41,
- ALPHA_Float_42,
- ALPHA_Float_43,
- ALPHA_Float_44,
- ALPHA_Float_45,
- ALPHA_Float_46,
- ALPHA_Float_47,
- ALPHA_Float_48,
- ALPHA_Float_49,
- ALPHA_Float_50};
+LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32 tc, LVM_UINT32 Fs, LVM_UINT16 NumChannels) {
+ LVM_UINT32 Product;
+ LVM_FLOAT ProductFloat;
+ LVM_INT16 InterpolateShort;
+ LVM_FLOAT Interpolate;
+ LVM_UINT16 Shift;
+ LVM_FLOAT Diff;
+ LVM_FLOAT Table[] = {
+ ALPHA_Float_0, /* Log spaced look-up table */
+ ALPHA_Float_1, ALPHA_Float_2, ALPHA_Float_3, ALPHA_Float_4, ALPHA_Float_5,
+ ALPHA_Float_6, ALPHA_Float_7, ALPHA_Float_8, ALPHA_Float_9, ALPHA_Float_10,
+ ALPHA_Float_11, ALPHA_Float_12, ALPHA_Float_13, ALPHA_Float_14, ALPHA_Float_15,
+ ALPHA_Float_16, ALPHA_Float_17, ALPHA_Float_18, ALPHA_Float_19, ALPHA_Float_20,
+ ALPHA_Float_21, ALPHA_Float_22, ALPHA_Float_23, ALPHA_Float_24, ALPHA_Float_25,
+ ALPHA_Float_26, ALPHA_Float_27, ALPHA_Float_28, ALPHA_Float_29, ALPHA_Float_30,
+ ALPHA_Float_31, ALPHA_Float_32, ALPHA_Float_33, ALPHA_Float_34, ALPHA_Float_35,
+ ALPHA_Float_36, ALPHA_Float_37, ALPHA_Float_38, ALPHA_Float_39, ALPHA_Float_40,
+ ALPHA_Float_41, ALPHA_Float_42, ALPHA_Float_43, ALPHA_Float_44, ALPHA_Float_45,
+ ALPHA_Float_46, ALPHA_Float_47, ALPHA_Float_48, ALPHA_Float_49, ALPHA_Float_50};
/* Calculate the product of the time constant and the sample rate */
- Product = ((tc >> 16) * (LVM_UINT32)Fs) << 13; /* Stereo value */
+ Product = ((tc >> 16) * (LVM_UINT32)Fs) << 13; /* Stereo value */
Product = Product + (((tc & 0x0000FFFF) * (LVM_UINT32)Fs) >> 3);
- if (NumChannels == 1)
- {
- Product = Product >> 1; /* Mono value */
+ if (NumChannels == 1) {
+ Product = Product >> 1; /* Mono value */
}
/* Normalize to get the table index and interpolation factor */
- for (Shift = 0; Shift < ((Alpha_TableSize - 1) / 2); Shift++)
- {
- if ((Product & 0x80000000) != 0)
- {
+ for (Shift = 0; Shift < ((Alpha_TableSize - 1) / 2); Shift++) {
+ if ((Product & 0x80000000) != 0) {
break;
}
@@ -140,8 +94,7 @@
}
Shift = (LVM_UINT16)((Shift << 1));
- if ((Product & 0x40000000)==0)
- {
+ if ((Product & 0x40000000) == 0) {
Shift++;
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp
index 2c3e9ec..8382529 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp
@@ -40,33 +40,25 @@
/* RETURNS: */
/* The result of the polynomial expansion in Q1.31 format */
/*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_Polynomial(LVM_UINT16 N,
- LVM_FLOAT *pCoefficients,
- LVM_FLOAT X)
-{
+LVM_FLOAT LVM_Polynomial(LVM_UINT16 N, LVM_FLOAT* pCoefficients, LVM_FLOAT X) {
LVM_INT32 i;
- LVM_FLOAT Y,A,XTemp,Temp,sign;
+ LVM_FLOAT Y, A, XTemp, Temp, sign;
Y = *pCoefficients; /* Y=A0*/
pCoefficients++;
- if(X == -1.0f)
- {
+ if (X == -1.0f) {
Temp = -1;
sign = Temp;
- for(i = 1; i <= N; i++)
- {
+ for (i = 1; i <= N; i++) {
Y += ((*pCoefficients) * sign);
pCoefficients++;
sign *= Temp;
}
- }
- else
- {
+ } else {
XTemp = X;
- for(i = N-1; i >= 0; i--)
- {
+ for (i = N - 1; i >= 0; i--) {
A = *pCoefficients;
pCoefficients++;
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp
index ae8e9d1..85596aa 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp
@@ -53,23 +53,10 @@
/* RETURNS: */
/* The result of the 10x expansion in Q8.24 format */
/*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_Power10(LVM_FLOAT X)
-{
- LVM_FLOAT Y,Coefficients[13]={0.999906f,
- 2.302475f,
- 2.652765f,
- 2.035494f,
- 1.165667f,
- 0.537676f,
- 0.213192f,
- 0.069603f,
- 0.016553f,
- 0.004373f,
- 0.001817f,
- 0.000367f,
- 0};
- Y=LVM_Polynomial((LVM_UINT16)11,
- Coefficients,
- X);
+LVM_FLOAT LVM_Power10(LVM_FLOAT X) {
+ LVM_FLOAT Y, Coefficients[13] = {0.999906f, 2.302475f, 2.652765f, 2.035494f, 1.165667f,
+ 0.537676f, 0.213192f, 0.069603f, 0.016553f, 0.004373f,
+ 0.001817f, 0.000367f, 0};
+ Y = LVM_Polynomial((LVM_UINT16)11, Coefficients, X);
return Y;
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp
index 5995f54..be7c8e4 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp
@@ -26,19 +26,16 @@
/* TIMER FUNCTION */
/****************************************************************************************/
-void LVM_Timer ( LVM_Timer_Instance_t *pInstance,
- LVM_INT16 BlockSize ){
+void LVM_Timer(LVM_Timer_Instance_t* pInstance, LVM_INT16 BlockSize) {
+ LVM_Timer_Instance_Private_t* pInstancePr;
+ pInstancePr = (LVM_Timer_Instance_Private_t*)pInstance;
- LVM_Timer_Instance_Private_t *pInstancePr;
- pInstancePr = (LVM_Timer_Instance_Private_t *)pInstance;
-
- if (pInstancePr->TimerArmed){
+ if (pInstancePr->TimerArmed) {
pInstancePr->RemainingTimeInSamples -= BlockSize;
- if (pInstancePr->RemainingTimeInSamples <= 0){
+ if (pInstancePr->RemainingTimeInSamples <= 0) {
pInstancePr->TimerArmed = 0;
- (*pInstancePr->pCallBack) ( pInstancePr->pCallbackInstance,
- pInstancePr->pCallBackParams,
- pInstancePr->CallBackParam );
+ (*pInstancePr->pCallBack)(pInstancePr->pCallbackInstance, pInstancePr->pCallBackParams,
+ pInstancePr->CallBackParam);
}
}
}
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp
index 3015057..bfd6bcf 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp
@@ -33,20 +33,20 @@
/* INIT FUNCTION */
/****************************************************************************************/
-void LVM_Timer_Init ( LVM_Timer_Instance_t *pInstance,
- LVM_Timer_Params_t *pParams ){
+void LVM_Timer_Init(LVM_Timer_Instance_t* pInstance, LVM_Timer_Params_t* pParams) {
+ LVM_Timer_Instance_Private_t* pInstancePr;
+ pInstancePr = (LVM_Timer_Instance_Private_t*)pInstance;
- LVM_Timer_Instance_Private_t *pInstancePr;
- pInstancePr = (LVM_Timer_Instance_Private_t *)pInstance;
-
- pInstancePr->CallBackParam = pParams->CallBackParam;
- pInstancePr->pCallBackParams = (LVM_INT32 *)pParams->pCallBackParams;
+ pInstancePr->CallBackParam = pParams->CallBackParam;
+ pInstancePr->pCallBackParams = (LVM_INT32*)pParams->pCallBackParams;
pInstancePr->pCallbackInstance = pParams->pCallbackInstance;
- pInstancePr->pCallBack = pParams->pCallBack;
- pInstancePr->TimerArmed = 1;
+ pInstancePr->pCallBack = pParams->pCallBack;
+ pInstancePr->TimerArmed = 1;
- MUL32x16INTO32(pParams->SamplingRate,OneOverThousandInQ24,pInstancePr->RemainingTimeInSamples,16); /* (Q0 * Q24) >>16 into Q8*/
- MUL32x16INTO32(pInstancePr->RemainingTimeInSamples,pParams->TimeInMs,pInstancePr->RemainingTimeInSamples,8); /* (Q8 * Q0) >>8 into Q0*/
+ MUL32x16INTO32(pParams->SamplingRate, OneOverThousandInQ24, pInstancePr->RemainingTimeInSamples,
+ 16); /* (Q0 * Q24) >>16 into Q8*/
+ MUL32x16INTO32(pInstancePr->RemainingTimeInSamples, pParams->TimeInMs,
+ pInstancePr->RemainingTimeInSamples, 8); /* (Q8 * Q0) >>8 into Q0*/
}
/****************************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h
index a372b82..3e8aba8 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h
@@ -24,14 +24,13 @@
/* TYPE DEFINITIONS */
/****************************************************************************************/
-typedef struct
-{
- LVM_INT32 RemainingTimeInSamples;
- LVM_INT32 CallBackParam;
- LVM_INT32 *pCallBackParams;
- void *pCallbackInstance;
- void (*pCallBack)(void*,void*,LVM_INT32);
- LVM_INT16 TimerArmed; /* Boolean, true between init and callback */
+typedef struct {
+ LVM_INT32 RemainingTimeInSamples;
+ LVM_INT32 CallBackParam;
+ LVM_INT32* pCallBackParams;
+ void* pCallbackInstance;
+ void (*pCallBack)(void*, void*, LVM_INT32);
+ LVM_INT16 TimerArmed; /* Boolean, true between init and callback */
} LVM_Timer_Instance_Private_t;
@@ -39,4 +38,4 @@
/* END OF HEADER */
/****************************************************************************************/
-#endif /* LVM_TIMER_PRIVATE_H */
+#endif /* LVM_TIMER_PRIVATE_H */
diff --git a/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp b/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp
index f88ca0e..a39fa2f 100644
--- a/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp
@@ -25,14 +25,10 @@
FUNCTION LoadConst_16
***********************************************************************************/
-void LoadConst_16(const LVM_INT16 val,
- LVM_INT16 *dst,
- LVM_INT16 n )
-{
+void LoadConst_16(const LVM_INT16 val, LVM_INT16* dst, LVM_INT16 n) {
LVM_INT16 ii;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
*dst = val;
dst++;
}
diff --git a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp b/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
index c789756..df7a558 100644
--- a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
@@ -24,14 +24,10 @@
/**********************************************************************************
FUNCTION LoadConst_32
***********************************************************************************/
-void LoadConst_Float(const LVM_FLOAT val,
- LVM_FLOAT *dst,
- LVM_INT16 n )
-{
+void LoadConst_Float(const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
LVM_INT16 ii;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
*dst = val;
dst++;
}
diff --git a/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp b/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp
index 1ea765a..a19e66f 100644
--- a/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp
@@ -25,50 +25,35 @@
FUNCTION MSTO2I_SAT_16X16
***********************************************************************************/
-void MSTo2i_Sat_16x16(const LVM_INT16 *srcM,
- const LVM_INT16 *srcS,
- LVM_INT16 *dst,
- LVM_INT16 n )
-{
- LVM_INT32 temp,mVal,sVal;
+void MSTo2i_Sat_16x16(const LVM_INT16* srcM, const LVM_INT16* srcS, LVM_INT16* dst, LVM_INT16 n) {
+ LVM_INT32 temp, mVal, sVal;
LVM_INT16 ii;
- for (ii = n; ii != 0; ii--)
- {
- mVal=(LVM_INT32)*srcM;
+ for (ii = n; ii != 0; ii--) {
+ mVal = (LVM_INT32)*srcM;
srcM++;
- sVal=(LVM_INT32)*srcS;
+ sVal = (LVM_INT32)*srcS;
srcS++;
temp = mVal + sVal;
- if (temp > 0x00007FFF)
- {
+ if (temp > 0x00007FFF) {
*dst = 0x7FFF;
- }
- else if (temp < -0x00008000)
- {
- *dst = - 0x8000;
- }
- else
- {
+ } else if (temp < -0x00008000) {
+ *dst = -0x8000;
+ } else {
*dst = (LVM_INT16)temp;
}
dst++;
temp = mVal - sVal;
- if (temp > 0x00007FFF)
- {
+ if (temp > 0x00007FFF) {
*dst = 0x7FFF;
- }
- else if (temp < -0x00008000)
- {
- *dst = - 0x8000;
- }
- else
- {
+ } else if (temp < -0x00008000) {
+ *dst = -0x8000;
+ } else {
*dst = (LVM_INT16)temp;
}
dst++;
@@ -76,16 +61,11 @@
return;
}
-void MSTo2i_Sat_Float(const LVM_FLOAT *srcM,
- const LVM_FLOAT *srcS,
- LVM_FLOAT *dst,
- LVM_INT16 n )
-{
- LVM_FLOAT temp,mVal,sVal;
+void MSTo2i_Sat_Float(const LVM_FLOAT* srcM, const LVM_FLOAT* srcS, LVM_FLOAT* dst, LVM_INT16 n) {
+ LVM_FLOAT temp, mVal, sVal;
LVM_INT16 ii;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
mVal = (LVM_FLOAT)*srcM;
srcM++;
@@ -94,32 +74,22 @@
temp = mVal + sVal;
- if (temp > 1.0f)
- {
+ if (temp > 1.0f) {
*dst = 1.0f;
- }
- else if (temp < -1.0f)
- {
+ } else if (temp < -1.0f) {
*dst = -1.0f;
- }
- else
- {
+ } else {
*dst = (LVM_FLOAT)temp;
}
dst++;
temp = mVal - sVal;
- if (temp > 1.0f)
- {
+ if (temp > 1.0f) {
*dst = 1.0f;
- }
- else if (temp < -1.0f)
- {
- *dst = - 1.0f;
- }
- else
- {
+ } else if (temp < -1.0f) {
+ *dst = -1.0f;
+ } else {
*dst = (LVM_FLOAT)temp;
}
dst++;
diff --git a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp
index 6584251..1d450b0 100644
--- a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp
@@ -35,36 +35,26 @@
FUNCTION Mac3S_16X16
***********************************************************************************/
-void Mac3s_Sat_16x16( const LVM_INT16 *src,
- const LVM_INT16 val,
- LVM_INT16 *dst,
- LVM_INT16 n)
-{
+void Mac3s_Sat_16x16(const LVM_INT16* src, const LVM_INT16 val, LVM_INT16* dst, LVM_INT16 n) {
LVM_INT16 ii;
LVM_INT16 srcval;
- LVM_INT32 Temp,dInVal;
+ LVM_INT32 Temp, dInVal;
- for (ii = n; ii != 0; ii--)
- {
- srcval=*src;
+ for (ii = n; ii != 0; ii--) {
+ srcval = *src;
src++;
- Temp = (srcval *val)>>15;
+ Temp = (srcval * val) >> 15;
- dInVal = (LVM_INT32)*dst;
+ dInVal = (LVM_INT32)*dst;
Temp = Temp + dInVal;
- if (Temp > 0x00007FFF)
- {
+ if (Temp > 0x00007FFF) {
*dst = 0x7FFF;
- }
- else if (Temp < -0x00008000)
- {
- *dst = - 0x8000;
- }
- else
- {
+ } else if (Temp < -0x00008000) {
+ *dst = -0x8000;
+ } else {
*dst = (LVM_INT16)Temp;
}
@@ -75,4 +65,3 @@
}
/**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp
index 5d5564f..0fe9fef 100644
--- a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp
@@ -26,33 +26,25 @@
FUNCTION MAC3S_16X16
***********************************************************************************/
-void Mac3s_Sat_32x16( const LVM_INT32 *src,
- const LVM_INT16 val,
- LVM_INT32 *dst,
- LVM_INT16 n)
-{
+void Mac3s_Sat_32x16(const LVM_INT32* src, const LVM_INT16 val, LVM_INT32* dst, LVM_INT16 n) {
LVM_INT16 ii;
- LVM_INT32 srcval,temp, dInVal, dOutVal;
+ LVM_INT32 srcval, temp, dInVal, dOutVal;
- for (ii = n; ii != 0; ii--)
- {
- srcval=*src;
+ for (ii = n; ii != 0; ii--) {
+ srcval = *src;
src++;
- MUL32x16INTO32(srcval,val,temp,15)
+ MUL32x16INTO32(srcval, val, temp, 15)
- dInVal = *dst;
+ dInVal = *dst;
dOutVal = temp + dInVal;
- if ((((dOutVal ^ temp) & (dOutVal ^ dInVal)) >> 31)!=0) /* overflow / underflow */
+ if ((((dOutVal ^ temp) & (dOutVal ^ dInVal)) >> 31) != 0) /* overflow / underflow */
{
- if(temp<0)
- {
- dOutVal=0x80000000L;
- }
- else
- {
- dOutVal=0x7FFFFFFFL;
+ if (temp < 0) {
+ dOutVal = 0x80000000L;
+ } else {
+ dOutVal = 0x7FFFFFFFL;
}
}
@@ -62,35 +54,25 @@
return;
}
-void Mac3s_Sat_Float(const LVM_FLOAT *src,
- const LVM_FLOAT val,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
+void Mac3s_Sat_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
LVM_INT16 ii;
LVM_FLOAT srcval;
- LVM_FLOAT Temp,dInVal;
+ LVM_FLOAT Temp, dInVal;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
srcval = *src;
src++;
Temp = srcval * val;
- dInVal = (LVM_FLOAT)*dst;
+ dInVal = (LVM_FLOAT)*dst;
Temp = Temp + dInVal;
- if (Temp > 1.000000f)
- {
+ if (Temp > 1.000000f) {
*dst = 1.000000f;
- }
- else if (Temp < -1.000000f)
- {
+ } else if (Temp < -1.000000f) {
*dst = -1.000000f;
- }
- else
- {
+ } else {
*dst = Temp;
}
dst++;
@@ -99,4 +81,3 @@
return;
}
/**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp
index 7c7b36f..9663998 100644
--- a/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp
@@ -26,33 +26,29 @@
DEFINITIONS
***********************************************************************************/
-#define TRUE 1
-#define FALSE 0
+#define TRUE 1
+#define FALSE 0
/**********************************************************************************
FUNCTION MIXINSOFT_D32C31_SAT
***********************************************************************************/
-void MixInSoft_D32C31_SAT( Mix_1St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
+void MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+ LVM_INT16 n) {
char HardMixing = TRUE;
- if(n <= 0) return;
+ if (n <= 0) return;
/******************************************************************************
SOFT MIXING
*******************************************************************************/
- if (pInstance->Current != pInstance->Target)
- {
- if(pInstance->Alpha == 0){
+ if (pInstance->Current != pInstance->Target) {
+ if (pInstance->Alpha == 0) {
pInstance->Current = pInstance->Target;
- }else if ((pInstance->Current-pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
- (pInstance->Current-pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+ } else if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
+ (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
Make them equal. */
- }else{
+ } else {
/* Soft mixing has to be applied */
HardMixing = FALSE;
Core_MixInSoft_D32C31_SAT(pInstance, src, dst, n);
@@ -63,11 +59,11 @@
HARD MIXING
*******************************************************************************/
- if (HardMixing){
- if (pInstance->Target != 0){ /* Nothing to do in case Target = 0 */
+ if (HardMixing) {
+ if (pInstance->Target != 0) { /* Nothing to do in case Target = 0 */
if ((pInstance->Target) == 1.0f)
Add2_Sat_Float(src, dst, n);
- else{
+ else {
Core_MixInSoft_D32C31_SAT(pInstance, src, dst, n);
pInstance->Current = pInstance->Target; /* In case the core function would \
have changed the Current value */
@@ -81,16 +77,15 @@
/* Call back before the hard mixing, because in this case, hard mixing makes
use of the core soft mix function which can change the Current value! */
- if (pInstance->CallbackSet){
+ if (pInstance->CallbackSet) {
if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
- (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+ (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
Make them equal. */
pInstance->CallbackSet = FALSE;
- if (pInstance->pCallBack != 0){
- (*pInstance->pCallBack) ( pInstance->pCallbackHandle,
- pInstance->pGeneralPurpose,
- pInstance->CallbackParam );
+ if (pInstance->pCallBack != 0) {
+ (*pInstance->pCallBack)(pInstance->pCallbackHandle, pInstance->pGeneralPurpose,
+ pInstance->CallbackParam);
}
}
}
diff --git a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
index d3325ec..8408962 100644
--- a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
@@ -26,33 +26,29 @@
DEFINITIONS
***********************************************************************************/
-#define TRUE 1
-#define FALSE 0
+#define TRUE 1
+#define FALSE 0
/**********************************************************************************
FUNCTION MIXSOFT_1ST_D32C31_WRA
***********************************************************************************/
-void MixSoft_1St_D32C31_WRA( Mix_1St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
+void MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+ LVM_INT16 n) {
char HardMixing = TRUE;
- if(n <= 0) return;
+ if (n <= 0) return;
/******************************************************************************
SOFT MIXING
*******************************************************************************/
- if (pInstance->Current != pInstance->Target)
- {
- if(pInstance->Alpha == 0){
+ if (pInstance->Current != pInstance->Target) {
+ if (pInstance->Alpha == 0) {
pInstance->Current = pInstance->Target;
- }else if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
- (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+ } else if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
+ (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
Make them equal. */
- }else{
+ } else {
/* Soft mixing has to be applied */
HardMixing = FALSE;
Core_MixSoft_1St_D32C31_WRA(pInstance, src, dst, n);
@@ -63,14 +59,12 @@
HARD MIXING
*******************************************************************************/
- if (HardMixing){
+ if (HardMixing) {
if (pInstance->Target == 0)
LoadConst_Float(0, dst, n);
- else if ((pInstance->Target) == 1.0f){
- if (src != dst)
- Copy_Float((LVM_FLOAT*)src, (LVM_FLOAT*)dst, (LVM_INT16)(n));
- }
- else
+ else if ((pInstance->Target) == 1.0f) {
+ if (src != dst) Copy_Float((LVM_FLOAT*)src, (LVM_FLOAT*)dst, (LVM_INT16)(n));
+ } else
Mult3s_Float(src, pInstance->Current, dst, n);
}
@@ -78,16 +72,15 @@
CALL BACK
*******************************************************************************/
- if (pInstance->CallbackSet){
+ if (pInstance->CallbackSet) {
if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
- (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+ (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
Make them equal. */
pInstance->CallbackSet = FALSE;
- if (pInstance->pCallBack != 0){
- (*pInstance->pCallBack) ( pInstance->pCallbackHandle,
- pInstance->pGeneralPurpose,
- pInstance->CallbackParam );
+ if (pInstance->pCallBack != 0) {
+ (*pInstance->pCallBack)(pInstance->pCallbackHandle, pInstance->pGeneralPurpose,
+ pInstance->CallbackParam);
}
}
}
diff --git a/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp
index b002738..aba8537 100644
--- a/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp
@@ -25,42 +25,35 @@
/**********************************************************************************
FUNCTION MIXSOFT_2ST_D32C31_SAT
***********************************************************************************/
-void MixSoft_2St_D32C31_SAT( Mix_2St_Cll_FLOAT_t *pInstance,
- const LVM_FLOAT *src1,
- const LVM_FLOAT *src2,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
-
- if(n <= 0) return;
+void MixSoft_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+ const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+ if (n <= 0) return;
/******************************************************************************
SOFT MIXING
*******************************************************************************/
- if ((pInstance->Current1 != pInstance->Target1) || (pInstance->Current2 != pInstance->Target2))
- {
+ if ((pInstance->Current1 != pInstance->Target1) ||
+ (pInstance->Current2 != pInstance->Target2)) {
MixSoft_1St_D32C31_WRA((Mix_1St_Cll_FLOAT_t*)pInstance, src1, dst, n);
- MixInSoft_D32C31_SAT((Mix_1St_Cll_FLOAT_t *)&pInstance->Alpha2, /* Cast to void: \
- no dereferencing in function*/
- src2, dst, n);
+ MixInSoft_D32C31_SAT((Mix_1St_Cll_FLOAT_t*)&pInstance->Alpha2, /* Cast to void: \
+ no dereferencing in function*/
+ src2, dst, n);
}
/******************************************************************************
HARD MIXING
*******************************************************************************/
- else
- {
+ else {
if (pInstance->Current1 == 0)
MixSoft_1St_D32C31_WRA(
- (Mix_1St_Cll_FLOAT_t *) &pInstance->Alpha2, /* Cast to void: no \
- dereferencing in function*/
- src2, dst, n);
+ (Mix_1St_Cll_FLOAT_t*)&pInstance->Alpha2, /* Cast to void: no \
+ dereferencing in function*/
+ src2, dst, n);
else if (pInstance->Current2 == 0)
- MixSoft_1St_D32C31_WRA((Mix_1St_Cll_FLOAT_t*) pInstance, src1, dst, n);
+ MixSoft_1St_D32C31_WRA((Mix_1St_Cll_FLOAT_t*)pInstance, src1, dst, n);
else
Core_MixHard_2St_D32C31_SAT(pInstance, src1, src2, dst, n);
}
}
/**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/Mixer_private.h b/media/libeffects/lvm/lib/Common/src/Mixer_private.h
index 1d653bb..e1e62c5 100644
--- a/media/libeffects/lvm/lib/Common/src/Mixer_private.h
+++ b/media/libeffects/lvm/lib/Common/src/Mixer_private.h
@@ -26,13 +26,13 @@
#define POINT_ZERO_ONE_DB 2473805 /* 0.01 dB on a full scale signal = (10^(0.01/20) -1) * 2^31 */
-#define POINT_ZERO_ONE_DB_FLOAT 0.001152 /* 0.01 dB on a full scale \
- signal = (10^(0.01/20) -1) * 2^31 */
+#define POINT_ZERO_ONE_DB_FLOAT \
+ 0.001152 /* 0.01 dB on a full scale \
+ signal = (10^(0.01/20) -1) * 2^31 */
/**********************************************************************************
DEFINITIONS
***********************************************************************************/
/**********************************************************************************/
-#endif //#ifndef __MIXER_PRIVATE_H__
-
+#endif //#ifndef __MIXER_PRIVATE_H__
diff --git a/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp b/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp
index ead798d..7ab5d49 100644
--- a/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp
@@ -25,16 +25,12 @@
FUNCTION MonoTo2I_16
***********************************************************************************/
-void MonoTo2I_16( const LVM_INT16 *src,
- LVM_INT16 *dst,
- LVM_INT16 n)
-{
+void MonoTo2I_16(const LVM_INT16* src, LVM_INT16* dst, LVM_INT16 n) {
LVM_INT16 ii;
- src += (n-1);
- dst += ((n*2)-1);
+ src += (n - 1);
+ dst += ((n * 2) - 1);
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
*dst = *src;
dst--;
diff --git a/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp b/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp
index 603d1fc..1ba669f 100644
--- a/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp
@@ -25,16 +25,12 @@
FUNCTION MonoTo2I_32
***********************************************************************************/
-void MonoTo2I_32( const LVM_INT32 *src,
- LVM_INT32 *dst,
- LVM_INT16 n)
-{
+void MonoTo2I_32(const LVM_INT32* src, LVM_INT32* dst, LVM_INT16 n) {
LVM_INT16 ii;
- src += (n-1);
- dst += ((n*2)-1);
+ src += (n - 1);
+ dst += ((n * 2) - 1);
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
*dst = *src;
dst--;
@@ -45,16 +41,12 @@
return;
}
-void MonoTo2I_Float( const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
+void MonoTo2I_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
LVM_INT16 ii;
src += (n - 1);
dst += ((n * 2) - 1);
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
*dst = *src;
dst--;
diff --git a/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp b/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp
index 370c39a..4589703 100644
--- a/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp
@@ -26,37 +26,27 @@
FUNCTION MULT3S_16X16
***********************************************************************************/
-void Mult3s_32x16( const LVM_INT32 *src,
- const LVM_INT16 val,
- LVM_INT32 *dst,
- LVM_INT16 n)
-{
+void Mult3s_32x16(const LVM_INT32* src, const LVM_INT16 val, LVM_INT32* dst, LVM_INT16 n) {
LVM_INT16 ii;
- LVM_INT32 srcval,temp;
+ LVM_INT32 srcval, temp;
- for (ii = n; ii != 0; ii--)
- {
- srcval=*src;
+ for (ii = n; ii != 0; ii--) {
+ srcval = *src;
src++;
- MUL32x16INTO32(srcval,val,temp,15)
+ MUL32x16INTO32(srcval, val, temp, 15)
- *dst = temp;
+ * dst = temp;
dst++;
}
return;
}
-void Mult3s_Float( const LVM_FLOAT *src,
- const LVM_FLOAT val,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
+void Mult3s_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
LVM_INT16 ii;
LVM_FLOAT temp;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
temp = (*src) * val;
src++;
*dst = temp;
diff --git a/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp b/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp
index 36d1149..fba0666 100644
--- a/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp
@@ -61,22 +61,16 @@
/* */
/****************************************************************************************/
-void NonLinComp_D16(LVM_INT16 Gain,
- LVM_INT16 *pDataIn,
- LVM_INT16 *pDataOut,
- LVM_INT32 BlockLength)
-{
-
- LVM_INT16 Sample; /* Input samples */
- LVM_INT32 SampleNo; /* Sample index */
- LVM_INT16 Temp;
+void NonLinComp_D16(LVM_INT16 Gain, LVM_INT16* pDataIn, LVM_INT16* pDataOut,
+ LVM_INT32 BlockLength) {
+ LVM_INT16 Sample; /* Input samples */
+ LVM_INT32 SampleNo; /* Sample index */
+ LVM_INT16 Temp;
/*
* Process a block of samples
*/
- for(SampleNo = 0; SampleNo<BlockLength; SampleNo++)
- {
-
+ for (SampleNo = 0; SampleNo < BlockLength; SampleNo++) {
/*
* Read the input
*/
@@ -88,15 +82,11 @@
* harmonic distortion. The amount of compression is control by the
* gain factor
*/
- if ((LVM_INT32)Sample != -32768)
- {
+ if ((LVM_INT32)Sample != -32768) {
Temp = (LVM_INT16)((Sample * Sample) >> 15);
- if(Sample >0)
- {
+ if (Sample > 0) {
Sample = (LVM_INT16)(Sample + ((Gain * (Sample - Temp)) >> 15));
- }
- else
- {
+ } else {
Sample = (LVM_INT16)(Sample + ((Gain * (Sample + Temp)) >> 15));
}
}
@@ -106,25 +96,18 @@
*/
*pDataOut = Sample;
pDataOut++;
-
}
-
}
-void NonLinComp_Float(LVM_FLOAT Gain,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT32 BlockLength)
-{
-
- LVM_FLOAT Sample; /* Input samples */
- LVM_INT32 SampleNo; /* Sample index */
- LVM_FLOAT Temp;
+void NonLinComp_Float(LVM_FLOAT Gain, LVM_FLOAT* pDataIn, LVM_FLOAT* pDataOut,
+ LVM_INT32 BlockLength) {
+ LVM_FLOAT Sample; /* Input samples */
+ LVM_INT32 SampleNo; /* Sample index */
+ LVM_FLOAT Temp;
/*
* Process a block of samples
*/
- for(SampleNo = 0; SampleNo < BlockLength; SampleNo++)
- {
+ for (SampleNo = 0; SampleNo < BlockLength; SampleNo++) {
/*
* Read the input
*/
@@ -136,16 +119,12 @@
* harmonic distortion. The amount of compression is control by the
* gain factor
*/
- if (Sample != -1.0f)
- {
+ if (Sample != -1.0f) {
Temp = ((Sample * Sample));
- if(Sample > 0)
- {
- Sample = (Sample + ((Gain * (Sample - Temp)) ));
- }
- else
- {
- Sample = (Sample + ((Gain * (Sample + Temp)) ));
+ if (Sample > 0) {
+ Sample = (Sample + ((Gain * (Sample - Temp))));
+ } else {
+ Sample = (Sample + ((Gain * (Sample + Temp))));
}
}
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp
index 3f62f99..0afaad2 100644
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp
@@ -37,87 +37,79 @@
pBiquadState->pDelays[6] is y(n-2)L in Q0 format
pBiquadState->pDelays[7] is y(n-2)R in Q0 format
***************************************************************************/
-void PK_2I_D32F32C14G11_TRC_WRA_01 ( Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrSamples)
- {
- LVM_FLOAT ynL,ynR,ynLO,ynRO,templ;
- LVM_INT16 ii;
- PFilter_State_Float pBiquadState = (PFilter_State_Float) pInstance;
+void PK_2I_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrSamples) {
+ LVM_FLOAT ynL, ynR, ynLO, ynRO, templ;
+ LVM_INT16 ii;
+ PFilter_State_Float pBiquadState = (PFilter_State_Float)pInstance;
- for (ii = NrSamples; ii != 0; ii--)
- {
+ for (ii = NrSamples; ii != 0; ii--) {
+ /**************************************************************************
+ PROCESSING OF THE LEFT CHANNEL
+ ***************************************************************************/
+ /* ynL= (A0 * (x(n)L - x(n-2)L ) )*/
+ templ = (*pDataIn) - pBiquadState->pDelays[2];
+ ynL = templ * pBiquadState->coefs[0];
- /**************************************************************************
- PROCESSING OF THE LEFT CHANNEL
- ***************************************************************************/
- /* ynL= (A0 * (x(n)L - x(n-2)L ) )*/
- templ = (*pDataIn) - pBiquadState->pDelays[2];
- ynL = templ * pBiquadState->coefs[0];
+ /* ynL+= ((-B2 * y(n-2)L )) */
+ templ = pBiquadState->pDelays[6] * pBiquadState->coefs[1];
+ ynL += templ;
- /* ynL+= ((-B2 * y(n-2)L )) */
- templ = pBiquadState->pDelays[6] * pBiquadState->coefs[1];
- ynL += templ;
+ /* ynL+= ((-B1 * y(n-1)L ) ) */
+ templ = pBiquadState->pDelays[4] * pBiquadState->coefs[2];
+ ynL += templ;
- /* ynL+= ((-B1 * y(n-1)L ) ) */
- templ = pBiquadState->pDelays[4] * pBiquadState->coefs[2];
- ynL += templ;
+ /* ynLO= ((Gain * ynL )) */
+ ynLO = ynL * pBiquadState->coefs[3];
- /* ynLO= ((Gain * ynL )) */
- ynLO = ynL * pBiquadState->coefs[3];
+ /* ynLO=( ynLO + x(n)L )*/
+ ynLO += (*pDataIn);
- /* ynLO=( ynLO + x(n)L )*/
- ynLO += (*pDataIn);
+ /**************************************************************************
+ PROCESSING OF THE RIGHT CHANNEL
+ ***************************************************************************/
+ /* ynR= (A0 * (x(n)R - x(n-2)R ) ) */
+ templ = (*(pDataIn + 1)) - pBiquadState->pDelays[3];
+ ynR = templ * pBiquadState->coefs[0];
- /**************************************************************************
- PROCESSING OF THE RIGHT CHANNEL
- ***************************************************************************/
- /* ynR= (A0 * (x(n)R - x(n-2)R ) ) */
- templ = (*(pDataIn + 1)) - pBiquadState->pDelays[3];
- ynR = templ * pBiquadState->coefs[0];
+ /* ynR+= ((-B2 * y(n-2)R ) ) */
+ templ = pBiquadState->pDelays[7] * pBiquadState->coefs[1];
+ ynR += templ;
- /* ynR+= ((-B2 * y(n-2)R ) ) */
- templ = pBiquadState->pDelays[7] * pBiquadState->coefs[1];
- ynR += templ;
+ /* ynR+= ((-B1 * y(n-1)R ) ) */
+ templ = pBiquadState->pDelays[5] * pBiquadState->coefs[2];
+ ynR += templ;
- /* ynR+= ((-B1 * y(n-1)R ) ) */
- templ = pBiquadState->pDelays[5] * pBiquadState->coefs[2];
- ynR += templ;
+ /* ynRO= ((Gain * ynR )) */
+ ynRO = ynR * pBiquadState->coefs[3];
- /* ynRO= ((Gain * ynR )) */
- ynRO = ynR * pBiquadState->coefs[3];
+ /* ynRO=( ynRO + x(n)R )*/
+ ynRO += (*(pDataIn + 1));
- /* ynRO=( ynRO + x(n)R )*/
- ynRO += (*(pDataIn+1));
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
+ pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
+ pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
+ pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
+ pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R */
+ pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L */
+ pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
+ pDataIn++;
+ pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
+ pDataIn++;
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
- pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
- pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
- pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
- pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R */
- pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L */
- pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
- pDataIn++;
- pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
- pDataIn++;
-
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut = ynLO; /* Write Left output*/
- pDataOut++;
- *pDataOut = ynRO; /* Write Right ouput*/
- pDataOut++;
-
- }
-
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut = ynLO; /* Write Left output*/
+ pDataOut++;
+ *pDataOut = ynRO; /* Write Right output*/
+ pDataOut++;
}
+}
-#ifdef SUPPORT_MC
/**************************************************************************
DELAYS-
pBiquadState->pDelays[0] to
@@ -133,60 +125,51 @@
pBiquadState->pDelays[4*NrChannels - 1] is y(n-2) for all NrChannels
***************************************************************************/
-void PK_Mc_D32F32C14G11_TRC_WRA_01 (Biquad_FLOAT_Instance_t *pInstance,
- LVM_FLOAT *pDataIn,
- LVM_FLOAT *pDataOut,
- LVM_INT16 NrFrames,
- LVM_INT16 NrChannels)
- {
- LVM_FLOAT yn, ynO, temp;
- LVM_INT16 ii, jj;
- PFilter_State_Float pBiquadState = (PFilter_State_Float) pInstance;
+void PK_Mc_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+ LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+ LVM_FLOAT yn, ynO, temp;
+ LVM_INT16 ii, jj;
+ PFilter_State_Float pBiquadState = (PFilter_State_Float)pInstance;
- for (ii = NrFrames; ii != 0; ii--)
- {
+ for (ii = NrFrames; ii != 0; ii--) {
+ for (jj = 0; jj < NrChannels; jj++) {
+ /**************************************************************************
+ PROCESSING OF THE jj CHANNEL
+ ***************************************************************************/
+ /* yn= (A0 * (x(n) - x(n-2)))*/
+ temp = (*pDataIn) - pBiquadState->pDelays[NrChannels + jj];
+ yn = temp * pBiquadState->coefs[0];
- for (jj = 0; jj < NrChannels; jj++)
- {
- /**************************************************************************
- PROCESSING OF THE jj CHANNEL
- ***************************************************************************/
- /* yn= (A0 * (x(n) - x(n-2)))*/
- temp = (*pDataIn) - pBiquadState->pDelays[NrChannels + jj];
- yn = temp * pBiquadState->coefs[0];
+ /* yn+= ((-B2 * y(n-2))) */
+ temp = pBiquadState->pDelays[NrChannels * 3 + jj] * pBiquadState->coefs[1];
+ yn += temp;
- /* yn+= ((-B2 * y(n-2))) */
- temp = pBiquadState->pDelays[NrChannels*3 + jj] * pBiquadState->coefs[1];
- yn += temp;
+ /* yn+= ((-B1 * y(n-1))) */
+ temp = pBiquadState->pDelays[NrChannels * 2 + jj] * pBiquadState->coefs[2];
+ yn += temp;
- /* yn+= ((-B1 * y(n-1))) */
- temp = pBiquadState->pDelays[NrChannels*2 + jj] * pBiquadState->coefs[2];
- yn += temp;
+ /* ynO= ((Gain * yn)) */
+ ynO = yn * pBiquadState->coefs[3];
- /* ynO= ((Gain * yn)) */
- ynO = yn * pBiquadState->coefs[3];
+ /* ynO=(ynO + x(n))*/
+ ynO += (*pDataIn);
- /* ynO=(ynO + x(n))*/
- ynO += (*pDataIn);
-
- /**************************************************************************
- UPDATING THE DELAYS
- ***************************************************************************/
- pBiquadState->pDelays[NrChannels * 3 + jj] =
+ /**************************************************************************
+ UPDATING THE DELAYS
+ ***************************************************************************/
+ pBiquadState->pDelays[NrChannels * 3 + jj] =
pBiquadState->pDelays[NrChannels * 2 + jj]; /* y(n-2)=y(n-1)*/
- pBiquadState->pDelays[NrChannels * 1 + jj] =
- pBiquadState->pDelays[jj]; /* x(n-2)=x(n-1)*/
- pBiquadState->pDelays[NrChannels * 2 + jj] = yn; /* Update y(n-1) */
- pBiquadState->pDelays[jj] = (*pDataIn); /* Update x(n-1)*/
- pDataIn++;
+ pBiquadState->pDelays[NrChannels * 1 + jj] =
+ pBiquadState->pDelays[jj]; /* x(n-2)=x(n-1)*/
+ pBiquadState->pDelays[NrChannels * 2 + jj] = yn; /* Update y(n-1) */
+ pBiquadState->pDelays[jj] = (*pDataIn); /* Update x(n-1)*/
+ pDataIn++;
- /**************************************************************************
- WRITING THE OUTPUT
- ***************************************************************************/
- *pDataOut = ynO; /* Write output*/
- pDataOut++;
- }
+ /**************************************************************************
+ WRITING THE OUTPUT
+ ***************************************************************************/
+ *pDataOut = ynO; /* Write output*/
+ pDataOut++;
}
-
}
-#endif
+}
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp
index 714aa52..1e08a55 100644
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp
@@ -17,4 +17,3 @@
#include "BIQUAD.h"
#include "PK_2I_D32F32CllGss_TRC_WRA_01_Private.h"
-
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h
index c5f9c7c..3f5d332 100644
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h
@@ -20,12 +20,11 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT32 coefs[5]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT32 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
#endif /* _PK_2I_D32F32CLLGSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp
index f6c05da..178d766 100644
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp
@@ -17,12 +17,11 @@
#include "BIQUAD.h"
#include "PK_2I_D32F32CssGss_TRC_WRA_01_Private.h"
-void PK_2I_D32F32CssGss_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t *pInstance,
- Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
- PK_FLOAT_Coefs_t *pCoef)
-{
- PFilter_State_Float pBiquadState = (PFilter_State_Float) pInstance;
- pBiquadState->pDelays = (LVM_FLOAT *) pTaps;
+void PK_2I_D32F32CssGss_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance,
+ Biquad_2I_Order2_FLOAT_Taps_t* pTaps,
+ PK_FLOAT_Coefs_t* pCoef) {
+ PFilter_State_Float pBiquadState = (PFilter_State_Float)pInstance;
+ pBiquadState->pDelays = (LVM_FLOAT*)pTaps;
pBiquadState->coefs[0] = pCoef->A0;
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h
index cc924c4..57a1c16 100644
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h
@@ -21,19 +21,17 @@
/* The internal state variables are implemented in a (for the user) hidden structure */
/* In this (private) file, the internal structure is declared fro private use. */
-typedef struct _Filter_State_Float_
-{
- LVM_FLOAT * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
-}Filter_State_Float;
+typedef struct _Filter_State_Float_ {
+ LVM_FLOAT* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_FLOAT coefs[5]; /* pointer to the filter coefficients */
+} Filter_State_Float;
-typedef Filter_State_Float * PFilter_State_Float ;
-typedef struct _Filter_State_
-{
- LVM_INT32 * pDelays; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT32 coefs[5]; /* pointer to the filter coefficients */
-}Filter_State;
+typedef Filter_State_Float* PFilter_State_Float;
+typedef struct _Filter_State_ {
+ LVM_INT32* pDelays; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT32 coefs[5]; /* pointer to the filter coefficients */
+} Filter_State;
-typedef Filter_State * PFilter_State ;
+typedef Filter_State* PFilter_State;
#endif /* _PK_2I_D32F32CSSGSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp b/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp
index 97a04c1..f54ba90 100644
--- a/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp
@@ -24,55 +24,39 @@
/**********************************************************************************
FUNCTION Shift_Sat_v32xv32
***********************************************************************************/
-void Shift_Sat_Float (const LVM_INT16 val,
- const LVM_FLOAT *src,
- LVM_FLOAT *dst,
- LVM_INT16 n)
-{
- LVM_FLOAT temp;
- LVM_INT32 ii,ij;
- LVM_INT16 RShift;
+void Shift_Sat_Float(const LVM_INT16 val, const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
+ LVM_FLOAT temp;
+ LVM_INT32 ii, ij;
+ LVM_INT16 RShift;
- if(val > 0)
- {
- for (ii = n; ii != 0; ii--)
- {
+ if (val > 0) {
+ for (ii = n; ii != 0; ii--) {
temp = (LVM_FLOAT)*src;
src++;
- for(ij = 0; ij < val; ij++)
- {
+ for (ij = 0; ij < val; ij++) {
temp = temp * 2;
}
- if(temp > 1.0)
- temp = 1.0;
- if(temp < -1.0)
- temp = -1.0;
+ if (temp > 1.0) temp = 1.0;
+ if (temp < -1.0) temp = -1.0;
*dst = (LVM_FLOAT)temp;
dst++;
}
- }
- else if(val < 0)
- {
- RShift=(LVM_INT16)(-val);
+ } else if (val < 0) {
+ RShift = (LVM_INT16)(-val);
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
temp = (LVM_FLOAT)*src;
src++;
- for(ij = 0; ij < RShift; ij++)
- {
+ for (ij = 0; ij < RShift; ij++) {
temp = temp / 2;
}
*dst = (LVM_FLOAT)temp;
dst++;
}
- }
- else
- {
- if(src != dst)
- {
+ } else {
+ if (src != dst) {
Copy_Float(src, dst, n);
}
}
diff --git a/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp b/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp
index 4da2013..2143465 100644
--- a/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp
@@ -57,17 +57,16 @@
*
****************************************************************************************/
-#define FOUR_OVER_SIX 21846 /* (4 / 6) * 2^15 */
-#define SIX_DB 96 /* 6 * 16 or 6dB in Q11.4 format */
-#define FIRST_COEF_NEG 14884305
-#define FIRST_COEF_POS 7442152 /* FIRST_COEF_NEG / 2 */
-#define SECOND_COEF 38836
-#define MAX_VALUE 1536 /* 96 * 16 */
+#define FOUR_OVER_SIX 21846 /* (4 / 6) * 2^15 */
+#define SIX_DB 96 /* 6 * 16 or 6dB in Q11.4 format */
+#define FIRST_COEF_NEG 14884305
+#define FIRST_COEF_POS 7442152 /* FIRST_COEF_NEG / 2 */
+#define SECOND_COEF 38836
+#define MAX_VALUE 1536 /* 96 * 16 */
-LVM_FLOAT dB_to_LinFloat(LVM_INT16 db_fix)
-{
- LVM_FLOAT dB_Float;
- LVM_FLOAT LinFloat;
+LVM_FLOAT dB_to_LinFloat(LVM_INT16 db_fix) {
+ LVM_FLOAT dB_Float;
+ LVM_FLOAT LinFloat;
dB_Float = (LVM_FLOAT)((LVM_FLOAT)db_fix / 16.0f);
LinFloat = pow(10, dB_Float / 20.0);
diff --git a/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp b/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp
index 4092560..66f9132 100644
--- a/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp
@@ -25,16 +25,11 @@
FUNCTION MULT3S_16X16
***********************************************************************************/
-void Mult3s_16x16( const LVM_INT16 *src,
- const LVM_INT16 val,
- LVM_INT16 *dst,
- LVM_INT16 n)
-{
+void Mult3s_16x16(const LVM_INT16* src, const LVM_INT16 val, LVM_INT16* dst, LVM_INT16 n) {
LVM_INT16 ii;
LVM_INT32 temp;
- for (ii = n; ii != 0; ii--)
- {
+ for (ii = n; ii != 0; ii--) {
temp = (LVM_INT32)(*src) * (LVM_INT32)val;
src++;
diff --git a/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h b/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h
index c5ddf77..f1afcd6 100644
--- a/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h
+++ b/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h
@@ -86,16 +86,9 @@
/* */
/****************************************************************************************/
-/* Memory table */
-#define LVEQNB_MEMREGION_INSTANCE 0 /* Offset to the instance memory region */
-#define LVEQNB_MEMREGION_PERSISTENT_DATA 1 /* Offset to persistent data memory region */
-#define LVEQNB_MEMREGION_PERSISTENT_COEF 2 /* Offset to persistent coefficient region */
-#define LVEQNB_MEMREGION_SCRATCH 3 /* Offset to data scratch memory region */
-#define LVEQNB_NR_MEMORY_REGIONS 4 /* Number of memory regions */
-
/* Callback events */
-#define LVEQNB_EVENT_NONE 0x0000 /* Not a valid event */
-#define LVEQNB_EVENT_ALGOFF 0x0001 /* EQNB has completed switch off */
+#define LVEQNB_EVENT_NONE 0x0000 /* Not a valid event */
+#define LVEQNB_EVENT_ALGOFF 0x0001 /* EQNB has completed switch off */
/****************************************************************************************/
/* */
@@ -104,42 +97,25 @@
/****************************************************************************************/
/* Instance handle */
-typedef void *LVEQNB_Handle_t;
+typedef void* LVEQNB_Handle_t;
/* Operating modes */
-typedef enum
-{
- LVEQNB_BYPASS = 0,
- LVEQNB_ON = 1,
- LVEQNB_MODE_MAX = LVM_MAXINT_32
-} LVEQNB_Mode_en;
+typedef enum { LVEQNB_BYPASS = 0, LVEQNB_ON = 1, LVEQNB_MODE_MAX = LVM_MAXINT_32 } LVEQNB_Mode_en;
/* Filter mode control */
-typedef enum
-{
- LVEQNB_FILTER_OFF = 0,
- LVEQNB_FILTER_ON = 1,
+typedef enum {
+ LVEQNB_FILTER_OFF = 0,
+ LVEQNB_FILTER_ON = 1,
LVEQNB_FILTER_DUMMY = LVM_MAXINT_32
} LVEQNB_FilterMode_en;
-/* Memory Types */
-typedef enum
-{
- LVEQNB_PERSISTENT = 0,
- LVEQNB_PERSISTENT_DATA = 1,
- LVEQNB_PERSISTENT_COEF = 2,
- LVEQNB_SCRATCH = 3,
- LVEQNB_MEMORY_MAX = LVM_MAXINT_32
-} LVEQNB_MemoryTypes_en;
-
/* Function return status */
-typedef enum
-{
- LVEQNB_SUCCESS = 0, /* Successful return from a routine */
- LVEQNB_ALIGNMENTERROR = 1, /* Memory alignment error */
- LVEQNB_NULLADDRESS = 2, /* NULL allocation address */
- LVEQNB_TOOMANYSAMPLES = 3, /* Maximum block size exceeded */
- LVEQNB_STATUS_MAX = LVM_MAXINT_32
+typedef enum {
+ LVEQNB_SUCCESS = 0, /* Successful return from a routine */
+ LVEQNB_ALIGNMENTERROR = 1, /* Memory alignment error */
+ LVEQNB_NULLADDRESS = 2, /* NULL allocation address */
+ LVEQNB_TOOMANYSAMPLES = 3, /* Maximum block size exceeded */
+ LVEQNB_STATUS_MAX = LVM_MAXINT_32
} LVEQNB_ReturnStatus_en;
/****************************************************************************************/
@@ -166,39 +142,35 @@
/*
* Supported source data formats
*/
-#define LVEQNB_CAP_STEREO 1
-#define LVEQNB_CAP_MONOINSTEREO 2
+#define LVEQNB_CAP_STEREO 1
+#define LVEQNB_CAP_MONOINSTEREO 2
-typedef enum
-{
- LVEQNB_STEREO = 0,
+typedef enum {
+ LVEQNB_STEREO = 0,
LVEQNB_MONOINSTEREO = 1,
-#ifdef SUPPORT_MC
LVEQNB_MULTICHANNEL = 2,
-#endif
- LVEQNB_SOURCE_MAX = LVM_MAXINT_32
+ LVEQNB_SOURCE_MAX = LVM_MAXINT_32
} LVEQNB_SourceFormat_en;
/*
* Supported sample rates in samples per second
*/
-#define LVEQNB_CAP_FS_8000 1
-#define LVEQNB_CAP_FS_11025 2
-#define LVEQNB_CAP_FS_12000 4
-#define LVEQNB_CAP_FS_16000 8
-#define LVEQNB_CAP_FS_22050 16
-#define LVEQNB_CAP_FS_24000 32
-#define LVEQNB_CAP_FS_32000 64
-#define LVEQNB_CAP_FS_44100 128
-#define LVEQNB_CAP_FS_48000 256
-#define LVEQNB_CAP_FS_88200 512
-#define LVEQNB_CAP_FS_96000 1024
-#define LVEQNB_CAP_FS_176400 2048
-#define LVEQNB_CAP_FS_192000 4096
+#define LVEQNB_CAP_FS_8000 1
+#define LVEQNB_CAP_FS_11025 2
+#define LVEQNB_CAP_FS_12000 4
+#define LVEQNB_CAP_FS_16000 8
+#define LVEQNB_CAP_FS_22050 16
+#define LVEQNB_CAP_FS_24000 32
+#define LVEQNB_CAP_FS_32000 64
+#define LVEQNB_CAP_FS_44100 128
+#define LVEQNB_CAP_FS_48000 256
+#define LVEQNB_CAP_FS_88200 512
+#define LVEQNB_CAP_FS_96000 1024
+#define LVEQNB_CAP_FS_176400 2048
+#define LVEQNB_CAP_FS_192000 4096
-typedef enum
-{
- LVEQNB_FS_8000 = 0,
+typedef enum {
+ LVEQNB_FS_8000 = 0,
LVEQNB_FS_11025 = 1,
LVEQNB_FS_12000 = 2,
LVEQNB_FS_16000 = 3,
@@ -211,7 +183,7 @@
LVEQNB_FS_96000 = 10,
LVEQNB_FS_176400 = 11,
LVEQNB_FS_192000 = 12,
- LVEQNB_FS_MAX = LVM_MAXINT_32
+ LVEQNB_FS_MAX = LVM_MAXINT_32
} LVEQNB_Fs_en;
/****************************************************************************************/
@@ -220,58 +192,38 @@
/* */
/****************************************************************************************/
-/* Memory region definition */
-typedef struct
-{
- LVM_UINT32 Size; /* Region size in bytes */
- LVM_UINT16 Alignment; /* Region alignment in bytes */
- LVEQNB_MemoryTypes_en Type; /* Region type */
- void *pBaseAddress; /* Pointer to the region base address */
-} LVEQNB_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
- LVEQNB_MemoryRegion_t Region[LVEQNB_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVEQNB_MemTab_t;
-
/* Equaliser band definition */
-typedef struct
-{
- LVM_INT16 Gain; /* Band gain in dB */
- LVM_UINT16 Frequency; /* Band centre frequency in Hz */
- LVM_UINT16 QFactor; /* Band quality factor */
+typedef struct {
+ LVM_INT16 Gain; /* Band gain in dB */
+ LVM_UINT16 Frequency; /* Band centre frequency in Hz */
+ LVM_UINT16 QFactor; /* Band quality factor */
} LVEQNB_BandDef_t;
/* Parameter structure */
-typedef struct
-{
+typedef struct {
/* General parameters */
- LVEQNB_Mode_en OperatingMode;
- LVEQNB_Fs_en SampleRate;
- LVEQNB_SourceFormat_en SourceFormat;
+ LVEQNB_Mode_en OperatingMode;
+ LVEQNB_Fs_en SampleRate;
+ LVEQNB_SourceFormat_en SourceFormat;
/* Equaliser parameters */
- LVM_UINT16 NBands; /* Number of bands */
- LVEQNB_BandDef_t *pBandDefinition; /* Pointer to equaliser definitions */
-#ifdef SUPPORT_MC
- LVM_INT16 NrChannels;
-#endif
+ LVM_UINT16 NBands; /* Number of bands */
+ LVEQNB_BandDef_t* pBandDefinition; /* Pointer to equaliser definitions */
+ LVM_INT16 NrChannels;
} LVEQNB_Params_t;
/* Capability structure */
-typedef struct
-{
+typedef struct {
/* General parameters */
- LVM_UINT16 SampleRate;
+ LVM_UINT16 SampleRate;
- LVM_UINT16 SourceFormat;
- LVM_UINT16 MaxBlockSize;
- LVM_UINT16 MaxBands;
+ LVM_UINT16 SourceFormat;
+ LVM_UINT16 MaxBlockSize;
+ LVM_UINT16 MaxBands;
/* Callback parameters */
- LVM_Callback CallBack; /* Bundle callback */
- void *pBundleInstance; /* Bundle instance handle */
+ LVM_Callback CallBack; /* Bundle callback */
+ void* pBundleInstance; /* Bundle instance handle */
} LVEQNB_Capabilities_t;
@@ -283,78 +235,43 @@
/****************************************************************************************/
/* */
-/* FUNCTION: LVEQNB_Memory */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. It can be called in */
-/* two ways: */
-/* */
-/* hInstance = NULL Returns the memory requirements */
-/* hInstance = Instance handle Returns the memory requirements and */
-/* allocated base addresses for the instance */
-/* */
-/* When this function is called for memory allocation (hInstance=NULL) the memory */
-/* base address pointers are NULL on return. */
-/* */
-/* When the function is called for free (hInstance = Instance Handle) the memory */
-/* table returns the allocated memory and base addresses used during initialisation. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pMemoryTable Pointer to an empty memory definition table */
-/* pCapabilities Pointer to the default capabilities */
-/* */
-/* RETURNS: */
-/* LVEQNB_SUCCESS Succeeded */
-/* LVEQNB_NULLADDRESS When any of pMemoryTable and pCapabilities is NULL address */
-/* */
-/* NOTES: */
-/* 1. This function may be interrupted by the LVEQNB_Process function */
-/* */
-/****************************************************************************************/
-
-LVEQNB_ReturnStatus_en LVEQNB_Memory(LVEQNB_Handle_t hInstance,
- LVEQNB_MemTab_t *pMemoryTable,
- LVEQNB_Capabilities_t *pCapabilities);
-
-/****************************************************************************************/
-/* */
/* FUNCTION: LVEQNB_Init */
/* */
/* DESCRIPTION: */
-/* Create and initialisation function for the N-Band equalliser module */
-/* */
-/* This function can be used to create an algorithm instance by calling with */
-/* hInstance set to NULL. In this case the algorithm returns the new instance */
-/* handle. */
-/* */
-/* This function can be used to force a full re-initialisation of the algorithm */
-/* by calling with hInstance = Instance Handle. In this case the memory table */
-/* should be correct for the instance, this can be ensured by calling the function */
-/* LVEQNB_Memory before calling this function. */
+/* Create and initialisation function for the N-Band equaliser module. */
/* */
/* PARAMETERS: */
-/* hInstance Instance handle */
-/* pMemoryTable Pointer to the memory definition table */
+/* phInstance Pointer to instance handle */
/* pCapabilities Pointer to the initialisation capabilities */
+/* pScratch Pointer to bundle scratch buffer */
/* */
/* RETURNS: */
/* LVEQNB_SUCCESS Initialisation succeeded */
-/* LVEQNB_NULLADDRESS When pCapabilities or pMemoryTableis or phInstance are NULL */
-/* LVEQNB_NULLADDRESS One or more of the memory regions has a NULL base address */
-/* pointer for a memory region with a non-zero size. */
-/* */
+/* LVEQNB_NULLADDRESS When pCapabilities or phInstance are NULL */
+/* LVEQNB_NULLADDRESS When allocated memory has a NULL base address */
/* */
/* NOTES: */
-/* 1. The instance handle is the pointer to the base address of the first memory */
-/* region. */
-/* 2. This function must not be interrupted by the LVEQNB_Process function */
+/* 1. This function must not be interrupted by the LVEQNB_Process function */
/* */
/****************************************************************************************/
+LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t* phInstance,
+ LVEQNB_Capabilities_t* pCapabilities, void* pScratch);
-LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t *phInstance,
- LVEQNB_MemTab_t *pMemoryTable,
- LVEQNB_Capabilities_t *pCapabilities);
+/****************************************************************************************/
+/* */
+/* FUNCTION: LVEQNB_DeInit */
+/* */
+/* DESCRIPTION: */
+/* Free the memories created during LVEQNB_Init including instance handle */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to instance handle */
+/* */
+/* NOTES: */
+/* 1. This function must not be interrupted by the LVEQNB_Process function */
+/* */
+/****************************************************************************************/
+void LVEQNB_DeInit(LVEQNB_Handle_t* phInstance);
/****************************************************************************************/
/* */
@@ -377,8 +294,7 @@
/* */
/****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t hInstance,
- LVEQNB_Params_t *pParams);
+LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams);
/****************************************************************************************/
/* */
@@ -401,8 +317,8 @@
/* */
/****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t hInstance,
- LVEQNB_Capabilities_t *pCapabilities);
+LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t hInstance,
+ LVEQNB_Capabilities_t* pCapabilities);
/****************************************************************************************/
/* */
@@ -426,8 +342,7 @@
/* */
/****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t hInstance,
- LVEQNB_Params_t *pParams);
+LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams);
/****************************************************************************************/
/* */
@@ -451,10 +366,7 @@
/* NOTES: */
/* */
/****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_Process(LVEQNB_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples);
+LVEQNB_ReturnStatus_en LVEQNB_Process(LVEQNB_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
-#endif /* __LVEQNB__ */
-
+#endif /* __LVEQNB__ */
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp
index c3c0fad..f8a5f2a 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp
@@ -111,43 +111,37 @@
/* */
/****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16 Fs,
- LVEQNB_BandDef_t *pFilterDefinition,
- PK_FLOAT_Coefs_t *pCoefficients)
-{
-
- extern LVM_FLOAT LVEQNB_GainTable[];
- extern LVM_FLOAT LVEQNB_TwoPiOnFsTable[];
- extern LVM_FLOAT LVEQNB_DTable[];
+LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16 Fs, LVEQNB_BandDef_t* pFilterDefinition,
+ PK_FLOAT_Coefs_t* pCoefficients) {
+ extern LVM_FLOAT LVEQNB_GainTable[];
+ extern LVM_FLOAT LVEQNB_TwoPiOnFsTable[];
+ extern LVM_FLOAT LVEQNB_DTable[];
/*
* Get the filter definition
*/
- LVM_INT16 Gain = pFilterDefinition->Gain;
- LVM_UINT16 Frequency = pFilterDefinition->Frequency;
+ LVM_INT16 Gain = pFilterDefinition->Gain;
+ LVM_UINT16 Frequency = pFilterDefinition->Frequency;
/* As mentioned in effectbundle.h */
- LVM_FLOAT QFactor = (LVM_FLOAT)pFilterDefinition->QFactor / 100.0f;
+ LVM_FLOAT QFactor = (LVM_FLOAT)pFilterDefinition->QFactor / 100.0f;
/*
* Intermediate variables and temporary values
*/
- LVM_FLOAT T0;
- LVM_FLOAT D;
- LVM_FLOAT A0;
- LVM_FLOAT B1;
- LVM_FLOAT B2;
+ LVM_FLOAT T0;
+ LVM_FLOAT D;
+ LVM_FLOAT A0;
+ LVM_FLOAT B1;
+ LVM_FLOAT B2;
/*
* Calculating the intermediate values
*/
- T0 = Frequency * LVEQNB_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
- if (Gain >= 0)
- {
- D = LVEQNB_DTable[15]; /* D = 1 if GaindB >= 0 */
- }
- else
- {
- D = LVEQNB_DTable[Gain + 15]; /* D = 1 / (1 + G) if GaindB < 0 */
+ T0 = Frequency * LVEQNB_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
+ if (Gain >= 0) {
+ D = LVEQNB_DTable[15]; /* D = 1 if GaindB >= 0 */
+ } else {
+ D = LVEQNB_DTable[Gain + 15]; /* D = 1 / (1 + G) if GaindB < 0 */
}
/*
@@ -164,7 +158,7 @@
pCoefficients->A0 = 2 * A0;
pCoefficients->B1 = 2 * B1;
pCoefficients->B2 = 2 * B2;
- pCoefficients->G = LVEQNB_GainTable[Gain + 15];
+ pCoefficients->G = LVEQNB_GainTable[Gain + 15];
- return(LVEQNB_SUCCESS);
+ return (LVEQNB_SUCCESS);
}
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
index 6329181..c44a9be 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
@@ -23,78 +23,78 @@
/* Gain table for (10^(Gain/20) - 1) */
/* */
/************************************************************************************/
-#define LVEQNB_Gain_Neg15_dB (-0.822172f)
-#define LVEQNB_Gain_Neg14_dB (-0.800474f)
-#define LVEQNB_Gain_Neg13_dB (-0.776128f)
-#define LVEQNB_Gain_Neg12_dB (-0.748811f)
-#define LVEQNB_Gain_Neg11_dB (-0.718162f)
-#define LVEQNB_Gain_Neg10_dB (-0.683772f)
-#define LVEQNB_Gain_Neg9_dB (-0.645187f)
-#define LVEQNB_Gain_Neg8_dB (-0.601893f)
-#define LVEQNB_Gain_Neg7_dB (-0.553316f)
-#define LVEQNB_Gain_Neg6_dB (-0.498813f)
-#define LVEQNB_Gain_Neg5_dB (-0.437659f)
-#define LVEQNB_Gain_Neg4_dB (-0.369043f)
-#define LVEQNB_Gain_Neg3_dB (-0.292054f)
-#define LVEQNB_Gain_Neg2_dB (-0.205672f)
-#define LVEQNB_Gain_Neg1_dB (-0.108749f)
-#define LVEQNB_Gain_0_dB 0.000000f
-#define LVEQNB_Gain_1_dB 0.122018f
-#define LVEQNB_Gain_2_dB 0.258925f
-#define LVEQNB_Gain_3_dB 0.412538f
-#define LVEQNB_Gain_4_dB 0.584893f
-#define LVEQNB_Gain_5_dB 0.778279f
-#define LVEQNB_Gain_6_dB 0.995262f
-#define LVEQNB_Gain_7_dB 1.238721f
-#define LVEQNB_Gain_8_dB 1.511886f
-#define LVEQNB_Gain_9_dB 1.818383f
-#define LVEQNB_Gain_10_dB 2.162278f
-#define LVEQNB_Gain_11_dB 2.548134f
-#define LVEQNB_Gain_12_dB 2.981072f
-#define LVEQNB_Gain_13_dB 3.466836f
-#define LVEQNB_Gain_14_dB 4.011872f
-#define LVEQNB_Gain_15_dB 4.623413f
+#define LVEQNB_Gain_Neg15_dB (-0.822172f)
+#define LVEQNB_Gain_Neg14_dB (-0.800474f)
+#define LVEQNB_Gain_Neg13_dB (-0.776128f)
+#define LVEQNB_Gain_Neg12_dB (-0.748811f)
+#define LVEQNB_Gain_Neg11_dB (-0.718162f)
+#define LVEQNB_Gain_Neg10_dB (-0.683772f)
+#define LVEQNB_Gain_Neg9_dB (-0.645187f)
+#define LVEQNB_Gain_Neg8_dB (-0.601893f)
+#define LVEQNB_Gain_Neg7_dB (-0.553316f)
+#define LVEQNB_Gain_Neg6_dB (-0.498813f)
+#define LVEQNB_Gain_Neg5_dB (-0.437659f)
+#define LVEQNB_Gain_Neg4_dB (-0.369043f)
+#define LVEQNB_Gain_Neg3_dB (-0.292054f)
+#define LVEQNB_Gain_Neg2_dB (-0.205672f)
+#define LVEQNB_Gain_Neg1_dB (-0.108749f)
+#define LVEQNB_Gain_0_dB 0.000000f
+#define LVEQNB_Gain_1_dB 0.122018f
+#define LVEQNB_Gain_2_dB 0.258925f
+#define LVEQNB_Gain_3_dB 0.412538f
+#define LVEQNB_Gain_4_dB 0.584893f
+#define LVEQNB_Gain_5_dB 0.778279f
+#define LVEQNB_Gain_6_dB 0.995262f
+#define LVEQNB_Gain_7_dB 1.238721f
+#define LVEQNB_Gain_8_dB 1.511886f
+#define LVEQNB_Gain_9_dB 1.818383f
+#define LVEQNB_Gain_10_dB 2.162278f
+#define LVEQNB_Gain_11_dB 2.548134f
+#define LVEQNB_Gain_12_dB 2.981072f
+#define LVEQNB_Gain_13_dB 3.466836f
+#define LVEQNB_Gain_14_dB 4.011872f
+#define LVEQNB_Gain_15_dB 4.623413f
/************************************************************************************/
/* */
/* Frequency table for 2*Pi/Fs */
/* */
/************************************************************************************/
-#define LVEQNB_2PiOn_8000 0.000785f
-#define LVEQNB_2PiOn_11025 0.000570f
-#define LVEQNB_2PiOn_12000 0.000524f
-#define LVEQNB_2PiOn_16000 0.000393f
-#define LVEQNB_2PiOn_22050 0.000285f
-#define LVEQNB_2PiOn_24000 0.000262f
-#define LVEQNB_2PiOn_32000 0.000196f
-#define LVEQNB_2PiOn_44100 0.000142f
-#define LVEQNB_2PiOn_48000 0.000131f
+#define LVEQNB_2PiOn_8000 0.000785f
+#define LVEQNB_2PiOn_11025 0.000570f
+#define LVEQNB_2PiOn_12000 0.000524f
+#define LVEQNB_2PiOn_16000 0.000393f
+#define LVEQNB_2PiOn_22050 0.000285f
+#define LVEQNB_2PiOn_24000 0.000262f
+#define LVEQNB_2PiOn_32000 0.000196f
+#define LVEQNB_2PiOn_44100 0.000142f
+#define LVEQNB_2PiOn_48000 0.000131f
-#define LVEQNB_2PiOn_88200 0.000071f
-#define LVEQNB_2PiOn_96000 0.000065f
-#define LVEQNB_2PiOn_176400 0.000036f
-#define LVEQNB_2PiOn_192000 0.000033f
+#define LVEQNB_2PiOn_88200 0.000071f
+#define LVEQNB_2PiOn_96000 0.000065f
+#define LVEQNB_2PiOn_176400 0.000036f
+#define LVEQNB_2PiOn_192000 0.000033f
/************************************************************************************/
/* */
/* 50D table for 50 / ( 1 + Gain ) */
/* */
/************************************************************************************/
-#define LVEQNB_100D_Neg15_dB 5.623413f
-#define LVEQNB_100D_Neg14_dB 5.011872f
-#define LVEQNB_100D_Neg13_dB 4.466836f
-#define LVEQNB_100D_Neg12_dB 3.981072f
-#define LVEQNB_100D_Neg11_dB 3.548134f
-#define LVEQNB_100D_Neg10_dB 3.162278f
-#define LVEQNB_100D_Neg9_dB 2.818383f
-#define LVEQNB_100D_Neg8_dB 2.511886f
-#define LVEQNB_100D_Neg7_dB 2.238721f
-#define LVEQNB_100D_Neg6_dB 1.995262f
-#define LVEQNB_100D_Neg5_dB 1.778279f
-#define LVEQNB_100D_Neg4_dB 1.584893f
-#define LVEQNB_100D_Neg3_dB 1.412538f
-#define LVEQNB_100D_Neg2_dB 1.258925f
-#define LVEQNB_100D_Neg1_dB 1.122018f
-#define LVEQNB_100D_0_dB 1.000000f
+#define LVEQNB_100D_Neg15_dB 5.623413f
+#define LVEQNB_100D_Neg14_dB 5.011872f
+#define LVEQNB_100D_Neg13_dB 4.466836f
+#define LVEQNB_100D_Neg12_dB 3.981072f
+#define LVEQNB_100D_Neg11_dB 3.548134f
+#define LVEQNB_100D_Neg10_dB 3.162278f
+#define LVEQNB_100D_Neg9_dB 2.818383f
+#define LVEQNB_100D_Neg8_dB 2.511886f
+#define LVEQNB_100D_Neg7_dB 2.238721f
+#define LVEQNB_100D_Neg6_dB 1.995262f
+#define LVEQNB_100D_Neg5_dB 1.778279f
+#define LVEQNB_100D_Neg4_dB 1.584893f
+#define LVEQNB_100D_Neg3_dB 1.412538f
+#define LVEQNB_100D_Neg2_dB 1.258925f
+#define LVEQNB_100D_Neg1_dB 1.122018f
+#define LVEQNB_100D_0_dB 1.000000f
#endif
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
index 6bb4a7e..bccbe86 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
@@ -32,8 +32,8 @@
/* */
/****************************************************************************************/
-#define LOW_FREQ 298 /* 32768/110 for low test frequency */
-#define HIGH_FREQ 386 /* 32768/85 for high test frequency */
+#define LOW_FREQ 298 /* 32768/110 for low test frequency */
+#define HIGH_FREQ 386 /* 32768/85 for high test frequency */
/****************************************************************************************/
/* */
@@ -56,23 +56,19 @@
/* */
/****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t hInstance,
- LVEQNB_Params_t *pParams)
-{
+LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams) {
+ LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
- LVEQNB_Instance_t *pInstance =(LVEQNB_Instance_t *)hInstance;
-
- /*
+ /*
* Check for error conditions
*/
- if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
- {
+ if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
return LVEQNB_NULLADDRESS;
}
*pParams = pInstance->Params;
- return(LVEQNB_SUCCESS);
+ return (LVEQNB_SUCCESS);
}
/************************************************************************************/
@@ -96,20 +92,17 @@
/* */
/************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t hInstance,
- LVEQNB_Capabilities_t *pCapabilities)
-{
+LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t hInstance,
+ LVEQNB_Capabilities_t* pCapabilities) {
+ LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
- LVEQNB_Instance_t *pInstance =(LVEQNB_Instance_t *)hInstance;
-
- if((hInstance == LVM_NULL) || (pCapabilities == LVM_NULL))
- {
+ if ((hInstance == LVM_NULL) || (pCapabilities == LVM_NULL)) {
return LVEQNB_NULLADDRESS;
}
*pCapabilities = pInstance->Capabilities;
- return(LVEQNB_SUCCESS);
+ return (LVEQNB_SUCCESS);
}
/************************************************************************************/
@@ -134,33 +127,30 @@
/* */
/************************************************************************************/
-void LVEQNB_SetFilters(LVEQNB_Instance_t *pInstance,
- LVEQNB_Params_t *pParams)
-{
- extern const LVM_UINT32 LVEQNB_SampleRateTab[]; /* Sample rate table */
+void LVEQNB_SetFilters(LVEQNB_Instance_t* pInstance, LVEQNB_Params_t* pParams) {
+ extern const LVM_UINT32 LVEQNB_SampleRateTab[]; /* Sample rate table */
- LVM_UINT16 i; /* Filter band index */
- LVM_UINT32 fs = (LVM_UINT32)LVEQNB_SampleRateTab[(LVM_UINT16)pParams->SampleRate]; /* Sample rate */
- LVM_UINT32 fc; /* Filter centre frequency */
- LVM_INT16 QFactor; /* Filter Q factor */
+ LVM_UINT16 i; /* Filter band index */
+ LVM_UINT32 fs =
+ (LVM_UINT32)LVEQNB_SampleRateTab[(LVM_UINT16)pParams->SampleRate]; /* Sample rate */
+ LVM_UINT32 fc; /* Filter centre frequency */
+ LVM_INT16 QFactor; /* Filter Q factor */
pInstance->NBands = pParams->NBands;
- for (i=0; i<pParams->NBands; i++)
- {
+ for (i = 0; i < pParams->NBands; i++) {
/*
* Get the filter settings
*/
- fc = (LVM_UINT32)pParams->pBandDefinition[i].Frequency; /* Get the band centre frequency */
- QFactor = (LVM_INT16)pParams->pBandDefinition[i].QFactor; /* Get the band Q factor */
+ fc = (LVM_UINT32)pParams->pBandDefinition[i].Frequency; /* Get the band centre frequency */
+ QFactor = (LVM_INT16)pParams->pBandDefinition[i].QFactor; /* Get the band Q factor */
pInstance->pBiquadType[i] = LVEQNB_SinglePrecision_Float; /* Default to single precision */
/*
* Check for out of range frequencies
*/
- if (fc > (fs >> 1))
- {
+ if (fc > (fs >> 1)) {
pInstance->pBiquadType[i] = LVEQNB_OutOfRange;
}
@@ -168,7 +158,6 @@
* Copy the filter definition to persistant memory
*/
pInstance->pBandDefinitions[i] = pParams->pBandDefinition[i];
-
}
}
@@ -186,46 +175,37 @@
/* */
/************************************************************************************/
-void LVEQNB_SetCoefficients(LVEQNB_Instance_t *pInstance)
-{
-
- LVM_UINT16 i; /* Filter band index */
- LVEQNB_BiquadType_en BiquadType; /* Filter biquad type */
+void LVEQNB_SetCoefficients(LVEQNB_Instance_t* pInstance) {
+ LVM_UINT16 i; /* Filter band index */
+ LVEQNB_BiquadType_en BiquadType; /* Filter biquad type */
/*
* Set the coefficients for each band by the init function
*/
- for (i=0; i<pInstance->Params.NBands; i++)
- {
-
+ for (i = 0; i < pInstance->Params.NBands; i++) {
/*
* Check band type for correct initialisation method and recalculate the coefficients
*/
BiquadType = pInstance->pBiquadType[i];
- switch (BiquadType)
- {
- case LVEQNB_SinglePrecision_Float:
- {
- PK_FLOAT_Coefs_t Coefficients;
+ switch (BiquadType) {
+ case LVEQNB_SinglePrecision_Float: {
+ PK_FLOAT_Coefs_t Coefficients;
/*
* Calculate the single precision coefficients
*/
LVEQNB_SinglePrecCoefs((LVM_UINT16)pInstance->Params.SampleRate,
- &pInstance->pBandDefinitions[i],
- &Coefficients);
+ &pInstance->pBandDefinitions[i], &Coefficients);
/*
* Set the coefficients
*/
PK_2I_D32F32CssGss_TRC_WRA_01_Init(&pInstance->pEQNB_FilterState_Float[i],
- &pInstance->pEQNB_Taps_Float[i],
- &Coefficients);
+ &pInstance->pEQNB_Taps_Float[i], &Coefficients);
break;
}
default:
break;
}
}
-
}
/************************************************************************************/
@@ -239,20 +219,19 @@
/* pInstance Pointer to the instance */
/* */
/************************************************************************************/
-void LVEQNB_ClearFilterHistory(LVEQNB_Instance_t *pInstance)
-{
- LVM_FLOAT *pTapAddress;
- LVM_INT16 NumTaps;
+void LVEQNB_ClearFilterHistory(LVEQNB_Instance_t* pInstance) {
+ LVM_FLOAT* pTapAddress;
+ LVM_INT16 NumTaps;
- pTapAddress = (LVM_FLOAT *)pInstance->pEQNB_Taps_Float;
- NumTaps = (LVM_INT16)((pInstance->Capabilities.MaxBands * \
- sizeof(Biquad_2I_Order2_FLOAT_Taps_t)) / sizeof(LVM_FLOAT));
+ pTapAddress = (LVM_FLOAT*)pInstance->pEQNB_Taps_Float;
+ NumTaps =
+ (LVM_INT16)((pInstance->Capabilities.MaxBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t)) /
+ sizeof(LVM_FLOAT));
- if (NumTaps != 0)
- {
- LoadConst_Float(0, /* Clear the history, value 0 */
- pTapAddress, /* Destination */
- NumTaps); /* Number of words */
+ if (NumTaps != 0) {
+ LoadConst_Float(0, /* Clear the history, value 0 */
+ pTapAddress, /* Destination */
+ NumTaps); /* Number of words */
}
}
/****************************************************************************************/
@@ -277,56 +256,47 @@
/* */
/****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t hInstance,
- LVEQNB_Params_t *pParams)
-{
-
- LVEQNB_Instance_t *pInstance = (LVEQNB_Instance_t *)hInstance;
- LVM_INT16 bChange = LVM_FALSE;
- LVM_INT16 i = 0;
- LVEQNB_Mode_en OperatingModeSave ;
+LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams) {
+ LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
+ LVM_INT16 bChange = LVM_FALSE;
+ LVM_INT16 i = 0;
+ LVEQNB_Mode_en OperatingModeSave;
/*
* Check for error conditions
*/
- if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
- {
+ if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
return LVEQNB_NULLADDRESS;
}
- if((pParams->NBands !=0) && (pParams->pBandDefinition==LVM_NULL))
- {
+ if ((pParams->NBands != 0) && (pParams->pBandDefinition == LVM_NULL)) {
return LVEQNB_NULLADDRESS;
}
OperatingModeSave = pInstance->Params.OperatingMode;
/* Set the alpha factor of the mixer */
- if (pParams->SampleRate != pInstance->Params.SampleRate)
- {
- LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
- LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
+ if (pParams->SampleRate != pInstance->Params.SampleRate) {
+ LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],
+ LVEQNB_BYPASS_MIXER_TC, (LVM_Fs_en)pParams->SampleRate,
+ 2);
+ LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],
+ LVEQNB_BYPASS_MIXER_TC, (LVM_Fs_en)pParams->SampleRate,
+ 2);
}
- if( (pInstance->Params.NBands != pParams->NBands ) ||
- (pInstance->Params.OperatingMode != pParams->OperatingMode ) ||
- (pInstance->Params.pBandDefinition != pParams->pBandDefinition ) ||
- (pInstance->Params.SampleRate != pParams->SampleRate ) ||
- (pInstance->Params.SourceFormat != pParams->SourceFormat ))
- {
-
+ if ((pInstance->Params.NBands != pParams->NBands) ||
+ (pInstance->Params.OperatingMode != pParams->OperatingMode) ||
+ (pInstance->Params.pBandDefinition != pParams->pBandDefinition) ||
+ (pInstance->Params.SampleRate != pParams->SampleRate) ||
+ (pInstance->Params.SourceFormat != pParams->SourceFormat)) {
bChange = LVM_TRUE;
- }
- else
- {
- for(i = 0; i < pParams->NBands; i++)
- {
-
- if((pInstance->pBandDefinitions[i].Frequency != pParams->pBandDefinition[i].Frequency )||
- (pInstance->pBandDefinitions[i].Gain != pParams->pBandDefinition[i].Gain )||
- (pInstance->pBandDefinitions[i].QFactor != pParams->pBandDefinition[i].QFactor ))
- {
-
+ } else {
+ for (i = 0; i < pParams->NBands; i++) {
+ if ((pInstance->pBandDefinitions[i].Frequency !=
+ pParams->pBandDefinition[i].Frequency) ||
+ (pInstance->pBandDefinitions[i].Gain != pParams->pBandDefinition[i].Gain) ||
+ (pInstance->pBandDefinitions[i].QFactor != pParams->pBandDefinition[i].QFactor)) {
bChange = LVM_TRUE;
}
}
@@ -335,19 +305,17 @@
// During operating mode transition, there is a race condition where the mode
// is still LVEQNB_ON, but the effect is considered disabled in the upper layers.
// modeChange handles this special race condition.
- const int /* bool */ modeChange = pParams->OperatingMode != OperatingModeSave
- || (OperatingModeSave == LVEQNB_ON
- && pInstance->bInOperatingModeTransition
- && LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0);
+ const int /* bool */ modeChange =
+ pParams->OperatingMode != OperatingModeSave ||
+ (OperatingModeSave == LVEQNB_ON && pInstance->bInOperatingModeTransition &&
+ LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0);
if (bChange || modeChange) {
-
/*
* If the sample rate has changed clear the history
*/
- if (pInstance->Params.SampleRate != pParams->SampleRate)
- {
- LVEQNB_ClearFilterHistory(pInstance); /* Clear the history */
+ if (pInstance->Params.SampleRate != pParams->SampleRate) {
+ LVEQNB_ClearFilterHistory(pInstance); /* Clear the history */
}
/*
@@ -358,45 +326,45 @@
/*
* Reset the filters except if the algo is switched off
*/
- if(pParams->OperatingMode != LVEQNB_BYPASS){
+ if (pParams->OperatingMode != LVEQNB_BYPASS) {
/*
* Reset the filters as all parameters could have changed
*/
- LVEQNB_SetFilters(pInstance, /* Instance pointer */
- pParams); /* New parameters */
+ LVEQNB_SetFilters(pInstance, /* Instance pointer */
+ pParams); /* New parameters */
/*
* Update the filters
*/
- LVEQNB_SetCoefficients(pInstance); /* Instance pointer */
+ LVEQNB_SetCoefficients(pInstance); /* Instance pointer */
}
if (modeChange) {
- if(pParams->OperatingMode == LVEQNB_ON)
- {
+ if (pParams->OperatingMode == LVEQNB_ON) {
LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[0], 1.0f);
LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[1], 0.0f);
- pInstance->BypassMixer.MixerStream[0].CallbackSet = 1;
- pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
- }
- else
- {
+ pInstance->BypassMixer.MixerStream[0].CallbackSet = 1;
+ pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
+ } else {
/* Stay on the ON operating mode until the transition is done */
// This may introduce a state race condition if the effect is enabled again
// while in transition. This is fixed in the modeChange logic.
pInstance->Params.OperatingMode = LVEQNB_ON;
LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[0], 0.0f);
LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[1], 1.0f);
- pInstance->BypassMixer.MixerStream[0].CallbackSet = 1;
- pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
+ pInstance->BypassMixer.MixerStream[0].CallbackSet = 1;
+ pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
}
- LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
- LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
+ LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],
+ LVEQNB_BYPASS_MIXER_TC,
+ (LVM_Fs_en)pParams->SampleRate, 2);
+ LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],
+ LVEQNB_BYPASS_MIXER_TC,
+ (LVM_Fs_en)pParams->SampleRate, 2);
pInstance->bInOperatingModeTransition = LVM_TRUE;
}
-
}
- return(LVEQNB_SUCCESS);
+ return (LVEQNB_SUCCESS);
}
/****************************************************************************************/
@@ -408,23 +376,22 @@
/* transition */
/* */
/****************************************************************************************/
-LVM_INT32 LVEQNB_BypassMixerCallBack (void* hInstance,
- void *pGeneralPurpose,
- LVM_INT16 CallbackParam)
-{
- LVEQNB_Instance_t *pInstance =(LVEQNB_Instance_t *)hInstance;
- LVM_Callback CallBack = pInstance->Capabilities.CallBack;
+LVM_INT32 LVEQNB_BypassMixerCallBack(void* hInstance, void* pGeneralPurpose,
+ LVM_INT16 CallbackParam) {
+ LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
+ LVM_Callback CallBack = pInstance->Capabilities.CallBack;
- (void) pGeneralPurpose;
+ (void)pGeneralPurpose;
- /*
- * Send an ALGOFF event if the ON->OFF switch transition is finished
- */
- if((LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0) &&
- (CallbackParam == 0)){
+ /*
+ * Send an ALGOFF event if the ON->OFF switch transition is finished
+ */
+ if ((LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0) &&
+ (CallbackParam == 0)) {
pInstance->Params.OperatingMode = LVEQNB_BYPASS;
- if (CallBack != LVM_NULL){
- CallBack(pInstance->Capabilities.pBundleInstance, LVM_NULL, ALGORITHM_EQNB_ID|LVEQNB_EVENT_ALGOFF);
+ if (CallBack != LVM_NULL) {
+ CallBack(pInstance->Capabilities.pBundleInstance, LVM_NULL,
+ ALGORITHM_EQNB_ID | LVEQNB_EVENT_ALGOFF);
}
}
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
index 271a914..1d2a5f5 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
@@ -21,6 +21,7 @@
/* */
/****************************************************************************************/
+#include <stdlib.h>
#include "LVEQNB.h"
#include "LVEQNB_Private.h"
#include "InstAlloc.h"
@@ -28,295 +29,148 @@
/****************************************************************************************/
/* */
-/* FUNCTION: LVEQNB_Memory */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. It can be called in */
-/* two ways: */
-/* */
-/* hInstance = NULL Returns the memory requirements */
-/* hInstance = Instance handle Returns the memory requirements and */
-/* allocated base addresses for the instance */
-/* */
-/* When this function is called for memory allocation (hInstance=NULL) the memory */
-/* base address pointers are NULL on return. */
-/* */
-/* When the function is called for free (hInstance = Instance Handle) the memory */
-/* table returns the allocated memory and base addresses used during initialisation. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pMemoryTable Pointer to an empty memory definition table */
-/* pCapabilities Pointer to the instance capabilities */
-/* */
-/* RETURNS: */
-/* LVEQNB_SUCCESS Succeeded */
-/* LVEQNB_NULLADDRESS When any of pMemoryTable and pCapabilities is NULL address */
-/* */
-/* NOTES: */
-/* 1. This function may be interrupted by the LVEQNB_Process function */
-/* */
-/****************************************************************************************/
-
-LVEQNB_ReturnStatus_en LVEQNB_Memory(LVEQNB_Handle_t hInstance,
- LVEQNB_MemTab_t *pMemoryTable,
- LVEQNB_Capabilities_t *pCapabilities)
-{
-
- INST_ALLOC AllocMem;
- LVEQNB_Instance_t *pInstance = (LVEQNB_Instance_t *)hInstance;
-
- if((pMemoryTable == LVM_NULL)|| (pCapabilities == LVM_NULL))
- {
- return LVEQNB_NULLADDRESS;
- }
-
- /*
- * Fill in the memory table
- */
- if (hInstance == LVM_NULL)
- {
- /*
- * Instance memory
- */
- InstAlloc_Init(&AllocMem,
- LVM_NULL);
- InstAlloc_AddMember(&AllocMem, /* Low pass filter */
- sizeof(LVEQNB_Instance_t));
- pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].Size = InstAlloc_GetTotal(&AllocMem);
- pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].Alignment = LVEQNB_INSTANCE_ALIGN;
- pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].Type = LVEQNB_PERSISTENT;
- pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].pBaseAddress = LVM_NULL;
-
- /*
- * Persistant data memory
- */
- InstAlloc_Init(&AllocMem,
- LVM_NULL);
- InstAlloc_AddMember(&AllocMem, /* Low pass filter */
- sizeof(Biquad_2I_Order2_FLOAT_Taps_t));
- InstAlloc_AddMember(&AllocMem, /* High pass filter */
- sizeof(Biquad_2I_Order2_FLOAT_Taps_t));
- /* Equaliser Biquad Taps */
- InstAlloc_AddMember(&AllocMem,
- (pCapabilities->MaxBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t)));
- /* Filter definitions */
- InstAlloc_AddMember(&AllocMem,
- (pCapabilities->MaxBands * sizeof(LVEQNB_BandDef_t)));
- /* Biquad types */
- InstAlloc_AddMember(&AllocMem,
- (pCapabilities->MaxBands * sizeof(LVEQNB_BiquadType_en)));
- pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Size = InstAlloc_GetTotal(&AllocMem);
- pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Alignment = LVEQNB_DATA_ALIGN;
- pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Type = LVEQNB_PERSISTENT_DATA;
- pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].pBaseAddress = LVM_NULL;
-
- /*
- * Persistant coefficient memory
- */
- InstAlloc_Init(&AllocMem,
- LVM_NULL);
- InstAlloc_AddMember(&AllocMem, /* Low pass filter */
- sizeof(Biquad_FLOAT_Instance_t));
- InstAlloc_AddMember(&AllocMem, /* High pass filter */
- sizeof(Biquad_FLOAT_Instance_t));
- /* Equaliser Biquad Instance */
- InstAlloc_AddMember(&AllocMem,
- pCapabilities->MaxBands * sizeof(Biquad_FLOAT_Instance_t));
- pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Size = InstAlloc_GetTotal(&AllocMem);
- pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Alignment = LVEQNB_COEF_ALIGN;
- pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Type = LVEQNB_PERSISTENT_COEF;
- pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].pBaseAddress = LVM_NULL;
-
- /*
- * Scratch memory
- */
- InstAlloc_Init(&AllocMem,
- LVM_NULL);
- InstAlloc_AddMember(&AllocMem, /* Low pass filter */
- LVEQNB_SCRATCHBUFFERS * sizeof(LVM_FLOAT) * \
- pCapabilities->MaxBlockSize);
- pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].Size = InstAlloc_GetTotal(&AllocMem);
- pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].Alignment = LVEQNB_SCRATCH_ALIGN;
- pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].Type = LVEQNB_SCRATCH;
- pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].pBaseAddress = LVM_NULL;
- }
- else
- {
- /* Read back memory allocation table */
- *pMemoryTable = pInstance->MemoryTable;
- }
-
- return(LVEQNB_SUCCESS);
-}
-
-/****************************************************************************************/
-/* */
/* FUNCTION: LVEQNB_Init */
/* */
/* DESCRIPTION: */
-/* Create and initialisation function for the N-Band equaliser module */
-/* */
-/* This function can be used to create an algorithm instance by calling with */
-/* hInstance set to NULL. In this case the algorithm returns the new instance */
-/* handle. */
-/* */
-/* This function can be used to force a full re-initialisation of the algorithm */
-/* by calling with hInstance = Instance Handle. In this case the memory table */
-/* should be correct for the instance, this can be ensured by calling the function */
-/* DBE_Memory before calling this function. */
+/* Create and initialisation function for the N-Band equaliser module. */
/* */
/* PARAMETERS: */
-/* hInstance Instance handle */
-/* pMemoryTable Pointer to the memory definition table */
-/* pCapabilities Pointer to the instance capabilities */
+/* phInstance Pointer to instance handle */
+/* pCapabilities Pointer to the initialisation capabilities */
+/* pScratch Pointer to bundle scratch buffer */
/* */
/* RETURNS: */
/* LVEQNB_SUCCESS Initialisation succeeded */
-/* LVEQNB_NULLADDRESS When pCapabilities or pMemoryTableis or phInstance are NULL */
-/* LVEQNB_NULLADDRESS One or more of the memory regions has a NULL base address */
-/* pointer for a memory region with a non-zero size. */
+/* LVEQNB_NULLADDRESS One or more memory has a NULL pointer - malloc failure */
/* */
/* NOTES: */
-/* 1. The instance handle is the pointer to the base address of the first memory */
-/* region. */
-/* 2. This function must not be interrupted by the LVEQNB_Process function */
+/* 1. This function must not be interrupted by the LVEQNB_Process function */
/* */
/****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t *phInstance,
- LVEQNB_MemTab_t *pMemoryTable,
- LVEQNB_Capabilities_t *pCapabilities)
-{
+LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t* phInstance,
+ LVEQNB_Capabilities_t* pCapabilities, void* pScratch) {
+ LVEQNB_Instance_t* pInstance;
- LVEQNB_Instance_t *pInstance;
- LVM_UINT32 MemSize;
- INST_ALLOC AllocMem;
- LVM_INT32 i;
+ *phInstance = calloc(1, sizeof(*pInstance));
+ if (phInstance == LVM_NULL) {
+ return LVEQNB_NULLADDRESS;
+ }
+ pInstance = (LVEQNB_Instance_t*)*phInstance;
- /*
- * Check for NULL pointers
- */
- if((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pCapabilities == LVM_NULL))
- {
+ pInstance->Capabilities = *pCapabilities;
+ pInstance->pScratch = pScratch;
+
+ /* Equaliser Biquad Instance */
+ LVM_UINT32 MemSize = pCapabilities->MaxBands * sizeof(*(pInstance->pEQNB_FilterState_Float));
+ pInstance->pEQNB_FilterState_Float = (Biquad_FLOAT_Instance_t*)calloc(1, MemSize);
+ if (pInstance->pEQNB_FilterState_Float == LVM_NULL) {
return LVEQNB_NULLADDRESS;
}
- /*
- * Check the memory table for NULL pointers
- */
- for (i = 0; i < LVEQNB_NR_MEMORY_REGIONS; i++)
- {
- if (pMemoryTable->Region[i].Size!=0)
- {
- if (pMemoryTable->Region[i].pBaseAddress==LVM_NULL)
- {
- return(LVEQNB_NULLADDRESS);
- }
- }
+ MemSize = (pCapabilities->MaxBands * sizeof(*(pInstance->pEQNB_Taps_Float)));
+ pInstance->pEQNB_Taps_Float = (Biquad_2I_Order2_FLOAT_Taps_t*)calloc(1, MemSize);
+ if (pInstance->pEQNB_Taps_Float == LVM_NULL) {
+ return LVEQNB_NULLADDRESS;
}
- /*
- * Set the instance handle if not already initialised
- */
-
- InstAlloc_Init(&AllocMem, pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].pBaseAddress);
-
- if (*phInstance == LVM_NULL)
- {
- *phInstance = InstAlloc_AddMember(&AllocMem, sizeof(LVEQNB_Instance_t));
+ MemSize = (pCapabilities->MaxBands * sizeof(*(pInstance->pBandDefinitions)));
+ pInstance->pBandDefinitions = (LVEQNB_BandDef_t*)calloc(1, MemSize);
+ if (pInstance->pBandDefinitions == LVM_NULL) {
+ return LVEQNB_NULLADDRESS;
}
- pInstance =(LVEQNB_Instance_t *)*phInstance;
-
- /*
- * Save the memory table in the instance structure
- */
- pInstance->Capabilities = *pCapabilities;
-
- /*
- * Save the memory table in the instance structure and
- * set the structure pointers
- */
- pInstance->MemoryTable = *pMemoryTable;
-
- /*
- * Allocate coefficient memory
- */
- InstAlloc_Init(&AllocMem,
- pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].pBaseAddress);
-
- /* Equaliser Biquad Instance */
- pInstance->pEQNB_FilterState_Float = (Biquad_FLOAT_Instance_t *)
- InstAlloc_AddMember(&AllocMem, pCapabilities->MaxBands * \
- sizeof(Biquad_FLOAT_Instance_t));
-
- /*
- * Allocate data memory
- */
- InstAlloc_Init(&AllocMem,
- pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].pBaseAddress);
-
- MemSize = (pCapabilities->MaxBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t));
- pInstance->pEQNB_Taps_Float = (Biquad_2I_Order2_FLOAT_Taps_t *)InstAlloc_AddMember(&AllocMem,
- MemSize);
- MemSize = (pCapabilities->MaxBands * sizeof(LVEQNB_BandDef_t));
- pInstance->pBandDefinitions = (LVEQNB_BandDef_t *)InstAlloc_AddMember(&AllocMem,
- MemSize);
// clear all the bands, setting their gain to 0, otherwise when applying new params,
// it will compare against uninitialized values
memset(pInstance->pBandDefinitions, 0, MemSize);
- MemSize = (pCapabilities->MaxBands * sizeof(LVEQNB_BiquadType_en));
- pInstance->pBiquadType = (LVEQNB_BiquadType_en *)InstAlloc_AddMember(&AllocMem,
- MemSize);
- /*
- * Internally map, structure and allign scratch memory
- */
- InstAlloc_Init(&AllocMem,
- pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].pBaseAddress);
+ MemSize = (pCapabilities->MaxBands * sizeof(*(pInstance->pBiquadType)));
+ pInstance->pBiquadType = (LVEQNB_BiquadType_en*)calloc(1, MemSize);
+ if (pInstance->pBiquadType == LVM_NULL) {
+ return LVEQNB_NULLADDRESS;
+ }
- pInstance->pFastTemporary = (LVM_FLOAT *)InstAlloc_AddMember(&AllocMem,
- sizeof(LVM_FLOAT));
+ pInstance->pFastTemporary = (LVM_FLOAT*)pScratch;
/*
* Update the instance parameters
*/
- pInstance->Params.NBands = 0;
- pInstance->Params.OperatingMode = LVEQNB_BYPASS;
+ pInstance->Params.NBands = 0;
+ pInstance->Params.OperatingMode = LVEQNB_BYPASS;
pInstance->Params.pBandDefinition = LVM_NULL;
- pInstance->Params.SampleRate = LVEQNB_FS_8000;
- pInstance->Params.SourceFormat = LVEQNB_STEREO;
+ pInstance->Params.SampleRate = LVEQNB_FS_8000;
+ pInstance->Params.SourceFormat = LVEQNB_STEREO;
/*
* Initialise the filters
*/
- LVEQNB_SetFilters(pInstance, /* Set the filter types */
+ LVEQNB_SetFilters(pInstance, /* Set the filter types */
&pInstance->Params);
- LVEQNB_SetCoefficients(pInstance); /* Set the filter coefficients */
+ LVEQNB_SetCoefficients(pInstance); /* Set the filter coefficients */
- LVEQNB_ClearFilterHistory(pInstance); /* Clear the filter history */
+ LVEQNB_ClearFilterHistory(pInstance); /* Clear the filter history */
/*
* Initialise the bypass variables
*/
- pInstance->BypassMixer.MixerStream[0].CallbackSet = 0;
- pInstance->BypassMixer.MixerStream[0].CallbackParam = 0;
- pInstance->BypassMixer.MixerStream[0].pCallbackHandle = (void*)pInstance;
- pInstance->BypassMixer.MixerStream[0].pCallBack = LVEQNB_BypassMixerCallBack;
+ pInstance->BypassMixer.MixerStream[0].CallbackSet = 0;
+ pInstance->BypassMixer.MixerStream[0].CallbackParam = 0;
+ pInstance->BypassMixer.MixerStream[0].pCallbackHandle = (void*)pInstance;
+ pInstance->BypassMixer.MixerStream[0].pCallBack = LVEQNB_BypassMixerCallBack;
- LVC_Mixer_Init(&pInstance->BypassMixer.MixerStream[0],0,0);
- LVC_Mixer_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],0,LVM_FS_8000,2);
+ LVC_Mixer_Init(&pInstance->BypassMixer.MixerStream[0], 0, 0);
+ LVC_Mixer_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0], 0, LVM_FS_8000, 2);
- pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
- pInstance->BypassMixer.MixerStream[1].CallbackParam = 0;
- pInstance->BypassMixer.MixerStream[1].pCallbackHandle = LVM_NULL;
- pInstance->BypassMixer.MixerStream[1].pCallBack = LVM_NULL;
+ pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
+ pInstance->BypassMixer.MixerStream[1].CallbackParam = 0;
+ pInstance->BypassMixer.MixerStream[1].pCallbackHandle = LVM_NULL;
+ pInstance->BypassMixer.MixerStream[1].pCallBack = LVM_NULL;
LVC_Mixer_Init(&pInstance->BypassMixer.MixerStream[1], 0, 1.0f);
LVC_Mixer_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1], 0, LVM_FS_8000, 2);
- pInstance->bInOperatingModeTransition = LVM_FALSE;
+ pInstance->bInOperatingModeTransition = LVM_FALSE;
- return(LVEQNB_SUCCESS);
+ return (LVEQNB_SUCCESS);
}
+/****************************************************************************************/
+/* */
+/* FUNCTION: LVEQNB_DeInit */
+/* */
+/* DESCRIPTION: */
+/* Free the memories created during LVEQNB_Init including instance handle */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to instance handle */
+/* */
+/* NOTES: */
+/* 1. This function must not be interrupted by the LVEQNB_Process function */
+/* */
+/****************************************************************************************/
+void LVEQNB_DeInit(LVEQNB_Handle_t* phInstance) {
+ LVEQNB_Instance_t* pInstance;
+ if (phInstance == LVM_NULL) {
+ return;
+ }
+ pInstance = (LVEQNB_Instance_t*)*phInstance;
+
+ /* Equaliser Biquad Instance */
+ if (pInstance->pEQNB_FilterState_Float != LVM_NULL) {
+ free(pInstance->pEQNB_FilterState_Float);
+ pInstance->pEQNB_FilterState_Float = LVM_NULL;
+ }
+ if (pInstance->pEQNB_Taps_Float != LVM_NULL) {
+ free(pInstance->pEQNB_Taps_Float);
+ pInstance->pEQNB_Taps_Float = LVM_NULL;
+ }
+ if (pInstance->pBandDefinitions != LVM_NULL) {
+ free(pInstance->pBandDefinitions);
+ pInstance->pBandDefinitions = LVM_NULL;
+ }
+ if (pInstance->pBiquadType != LVM_NULL) {
+ free(pInstance->pBiquadType);
+ pInstance->pBiquadType = LVM_NULL;
+ }
+ free(pInstance);
+ *phInstance = LVM_NULL;
+}
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h
index 40facfb..83a3449 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h
@@ -24,7 +24,7 @@
/* */
/****************************************************************************************/
-#include "LVEQNB.h" /* Calling or Application layer definitions */
+#include "LVEQNB.h" /* Calling or Application layer definitions */
#include "BIQUAD.h"
#include "LVC_Mixer.h"
@@ -35,21 +35,8 @@
/****************************************************************************************/
/* General */
-#define LVEQNB_INVALID 0xFFFF /* Invalid init parameter */
-
-/* Memory */
-#define LVEQNB_INSTANCE_ALIGN 4 /* 32-bit alignment for instance structures */
-#define LVEQNB_DATA_ALIGN 4 /* 32-bit alignment for structures */
-#define LVEQNB_COEF_ALIGN 4 /* 32-bit alignment for long words */
-#ifdef SUPPORT_MC
-/* Number of buffers required for inplace processing */
-#define LVEQNB_SCRATCHBUFFERS (LVM_MAX_CHANNELS * 2)
-#else
-#define LVEQNB_SCRATCHBUFFERS 4 /* Number of buffers required for inplace processing */
-#endif
-#define LVEQNB_SCRATCH_ALIGN 4 /* 32-bit alignment for long data */
-
-#define LVEQNB_BYPASS_MIXER_TC 100 /* Bypass Mixer TC */
+#define LVEQNB_INVALID 0xFFFF /* Invalid init parameter */
+#define LVEQNB_BYPASS_MIXER_TC 100 /* Bypass Mixer TC */
/****************************************************************************************/
/* */
@@ -58,13 +45,12 @@
/****************************************************************************************/
/* Filter biquad types */
-typedef enum
-{
+typedef enum {
LVEQNB_SinglePrecision_Float = -1,
LVEQNB_SinglePrecision = 0,
LVEQNB_DoublePrecision = 1,
- LVEQNB_OutOfRange = 2,
- LVEQNB_BIQUADTYPE_MAX = LVM_MAXINT_32
+ LVEQNB_OutOfRange = 2,
+ LVEQNB_BIQUADTYPE_MAX = LVM_MAXINT_32
} LVEQNB_BiquadType_en;
/****************************************************************************************/
@@ -74,28 +60,27 @@
/****************************************************************************************/
/* Instance structure */
-typedef struct
-{
+typedef struct {
/* Public parameters */
- LVEQNB_MemTab_t MemoryTable; /* Instance memory allocation table */
- LVEQNB_Params_t Params; /* Instance parameters */
- LVEQNB_Capabilities_t Capabilities; /* Instance capabilities */
+ void* pScratch; /* Pointer to bundle scratch buffer */
+ LVEQNB_Params_t Params; /* Instance parameters */
+ LVEQNB_Capabilities_t Capabilities; /* Instance capabilities */
/* Aligned memory pointers */
- LVM_FLOAT *pFastTemporary; /* Fast temporary data base address */
+ LVM_FLOAT* pFastTemporary; /* Fast temporary data base address */
- Biquad_2I_Order2_FLOAT_Taps_t *pEQNB_Taps_Float; /* Equaliser Taps */
- Biquad_FLOAT_Instance_t *pEQNB_FilterState_Float; /* State for each filter band */
+ Biquad_2I_Order2_FLOAT_Taps_t* pEQNB_Taps_Float; /* Equaliser Taps */
+ Biquad_FLOAT_Instance_t* pEQNB_FilterState_Float; /* State for each filter band */
/* Filter definitions and call back */
- LVM_UINT16 NBands; /* Number of bands */
- LVEQNB_BandDef_t *pBandDefinitions; /* Filter band definitions */
- LVEQNB_BiquadType_en *pBiquadType; /* Filter biquad types */
+ LVM_UINT16 NBands; /* Number of bands */
+ LVEQNB_BandDef_t* pBandDefinitions; /* Filter band definitions */
+ LVEQNB_BiquadType_en* pBiquadType; /* Filter biquad types */
/* Bypass variable */
- LVMixer3_2St_FLOAT_st BypassMixer;
+ LVMixer3_2St_FLOAT_st BypassMixer;
- LVM_INT16 bInOperatingModeTransition; /* Operating mode transition flag */
+ LVM_INT16 bInOperatingModeTransition; /* Operating mode transition flag */
} LVEQNB_Instance_t;
@@ -105,17 +90,15 @@
/* */
/****************************************************************************************/
-void LVEQNB_SetFilters(LVEQNB_Instance_t *pInstance,
- LVEQNB_Params_t *pParams);
+void LVEQNB_SetFilters(LVEQNB_Instance_t* pInstance, LVEQNB_Params_t* pParams);
-void LVEQNB_SetCoefficients(LVEQNB_Instance_t *pInstance);
+void LVEQNB_SetCoefficients(LVEQNB_Instance_t* pInstance);
-void LVEQNB_ClearFilterHistory(LVEQNB_Instance_t *pInstance);
-LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16 Fs,
- LVEQNB_BandDef_t *pFilterDefinition,
- PK_FLOAT_Coefs_t *pCoefficients);
+void LVEQNB_ClearFilterHistory(LVEQNB_Instance_t* pInstance);
+LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16 Fs, LVEQNB_BandDef_t* pFilterDefinition,
+ PK_FLOAT_Coefs_t* pCoefficients);
-LVM_INT32 LVEQNB_BypassMixerCallBack (void* hInstance, void *pGeneralPurpose, LVM_INT16 CallbackParam);
+LVM_INT32 LVEQNB_BypassMixerCallBack(void* hInstance, void* pGeneralPurpose,
+ LVM_INT16 CallbackParam);
#endif /* __LVEQNB_PRIVATE_H__ */
-
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
index 65eff53..d2a26db 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
@@ -34,7 +34,7 @@
/* */
/****************************************************************************************/
-#define SHIFT 13
+#define SHIFT 13
/****************************************************************************************/
/* */
@@ -58,89 +58,65 @@
/* NOTES: */
/* */
/****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_Process(LVEQNB_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- const LVM_UINT16 NrFrames)
-{ // updated to use samples = frames * channels.
- LVEQNB_Instance_t *pInstance = (LVEQNB_Instance_t *)hInstance;
+LVEQNB_ReturnStatus_en LVEQNB_Process(
+ LVEQNB_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT* pOutData,
+ const LVM_UINT16 NrFrames) { // updated to use samples = frames * channels.
+ LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
-#ifdef SUPPORT_MC
// Mono passed in as stereo
- const LVM_INT32 NrChannels = pInstance->Params.NrChannels == 1
- ? 2 : pInstance->Params.NrChannels;
-#else
- const LVM_INT32 NrChannels = 2; // FCC_2
-#endif
+ const LVM_INT32 NrChannels =
+ pInstance->Params.NrChannels == 1 ? 2 : pInstance->Params.NrChannels;
const LVM_INT32 NrSamples = NrChannels * NrFrames;
- /* Check for NULL pointers */
- if((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL))
- {
+ /* Check for NULL pointers */
+ if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL)) {
return LVEQNB_NULLADDRESS;
}
/* Check if the input and output data buffers are 32-bit aligned */
- if ((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0))
- {
+ if ((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0)) {
return LVEQNB_ALIGNMENTERROR;
}
- LVM_FLOAT * const pScratch = (LVM_FLOAT *)pInstance->pFastTemporary;
+ LVM_FLOAT* const pScratch = (LVM_FLOAT*)pInstance->pFastTemporary;
/*
- * Check the number of frames is not too large
- */
- if (NrFrames > pInstance->Capabilities.MaxBlockSize)
- {
+ * Check the number of frames is not too large
+ */
+ if (NrFrames > pInstance->Capabilities.MaxBlockSize) {
return LVEQNB_TOOMANYSAMPLES;
}
- if (pInstance->Params.OperatingMode == LVEQNB_ON)
- {
+ if (pInstance->Params.OperatingMode == LVEQNB_ON) {
/*
* Copy input data in to scratch buffer
*/
- Copy_Float(pInData, /* Source */
- pScratch, /* Destination */
+ Copy_Float(pInData, /* Source */
+ pScratch, /* Destination */
(LVM_INT16)NrSamples);
/*
* For each section execte the filter unless the gain is 0dB
*/
- if (pInstance->NBands != 0)
- {
- for (LVM_UINT16 i = 0; i < pInstance->NBands; i++)
- {
+ if (pInstance->NBands != 0) {
+ for (LVM_UINT16 i = 0; i < pInstance->NBands; i++) {
/*
* Check if band is non-zero dB gain
*/
- if (pInstance->pBandDefinitions[i].Gain != 0)
- {
+ if (pInstance->pBandDefinitions[i].Gain != 0) {
/*
* Get the address of the biquad instance
*/
- Biquad_FLOAT_Instance_t *pBiquad = &pInstance->pEQNB_FilterState_Float[i];
+ Biquad_FLOAT_Instance_t* pBiquad = &pInstance->pEQNB_FilterState_Float[i];
/*
* Select single or double precision as required
*/
- switch (pInstance->pBiquadType[i])
- {
- case LVEQNB_SinglePrecision_Float:
- {
-#ifdef SUPPORT_MC
- PK_Mc_D32F32C14G11_TRC_WRA_01(pBiquad,
- pScratch,
- pScratch,
+ switch (pInstance->pBiquadType[i]) {
+ case LVEQNB_SinglePrecision_Float: {
+ PK_Mc_D32F32C14G11_TRC_WRA_01(pBiquad, pScratch, pScratch,
(LVM_INT16)NrFrames,
(LVM_INT16)NrChannels);
-#else
- PK_2I_D32F32C14G11_TRC_WRA_01(pBiquad,
- pScratch,
- pScratch,
- (LVM_INT16)NrFrames);
-#endif
break;
}
default:
@@ -150,44 +126,27 @@
}
}
- if(pInstance->bInOperatingModeTransition == LVM_TRUE){
-#ifdef SUPPORT_MC
- LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->BypassMixer,
- pScratch,
- pInData,
- pScratch,
- (LVM_INT16)NrFrames,
- (LVM_INT16)NrChannels);
-#else
- LVC_MixSoft_2St_D16C31_SAT(&pInstance->BypassMixer,
- pScratch,
- pInData,
- pScratch,
- (LVM_INT16)NrSamples);
-#endif
+ if (pInstance->bInOperatingModeTransition == LVM_TRUE) {
+ LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->BypassMixer, pScratch, pInData, pScratch,
+ (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
// duplicate with else clause(s)
- Copy_Float(pScratch, /* Source */
- pOutData, /* Destination */
- (LVM_INT16)NrSamples); /* All channel samples */
- }
- else{
+ Copy_Float(pScratch, /* Source */
+ pOutData, /* Destination */
+ (LVM_INT16)NrSamples); /* All channel samples */
+ } else {
Copy_Float(pScratch, /* Source */
pOutData, /* Destination */
(LVM_INT16)NrSamples); /* All channel samples */
}
- }
- else
- {
+ } else {
/*
* Mode is OFF so copy the data if necessary
*/
- if (pInData != pOutData)
- {
- Copy_Float(pInData, /* Source */
- pOutData, /* Destination */
- (LVM_INT16)NrSamples); /* All channel samples */
+ if (pInData != pOutData) {
+ Copy_Float(pInData, /* Source */
+ pOutData, /* Destination */
+ (LVM_INT16)NrSamples); /* All channel samples */
}
}
return LVEQNB_SUCCESS;
-
}
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp
index 0628114..d79d7c9 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp
@@ -35,20 +35,9 @@
* Sample rate table for converting between the enumerated type and the actual
* frequency
*/
-const LVM_UINT32 LVEQNB_SampleRateTab[] = {8000, /* 8kS/s */
- 11025,
- 12000,
- 16000,
- 22050,
- 24000,
- 32000,
- 44100,
- 48000,
- 88200,
- 96000,
- 176400,
- 192000
-};
+const LVM_UINT32 LVEQNB_SampleRateTab[] = {8000, /* 8kS/s */
+ 11025, 12000, 16000, 22050, 24000, 32000,
+ 44100, 48000, 88200, 96000, 176400, 192000};
/************************************************************************************/
/* */
@@ -59,74 +48,34 @@
/*
* Table for 2 * Pi / Fs
*/
-const LVM_FLOAT LVEQNB_TwoPiOnFsTable[] = {LVEQNB_2PiOn_8000, /* 8kS/s */
- LVEQNB_2PiOn_11025,
- LVEQNB_2PiOn_12000,
- LVEQNB_2PiOn_16000,
- LVEQNB_2PiOn_22050,
- LVEQNB_2PiOn_24000,
- LVEQNB_2PiOn_32000,
- LVEQNB_2PiOn_44100,
- LVEQNB_2PiOn_48000
- ,LVEQNB_2PiOn_88200
- ,LVEQNB_2PiOn_96000
- ,LVEQNB_2PiOn_176400
- ,LVEQNB_2PiOn_192000
- };
+const LVM_FLOAT LVEQNB_TwoPiOnFsTable[] = {
+ LVEQNB_2PiOn_8000, /* 8kS/s */
+ LVEQNB_2PiOn_11025, LVEQNB_2PiOn_12000, LVEQNB_2PiOn_16000, LVEQNB_2PiOn_22050,
+ LVEQNB_2PiOn_24000, LVEQNB_2PiOn_32000, LVEQNB_2PiOn_44100, LVEQNB_2PiOn_48000,
+ LVEQNB_2PiOn_88200, LVEQNB_2PiOn_96000, LVEQNB_2PiOn_176400, LVEQNB_2PiOn_192000};
/*
* Gain table
*/
-const LVM_FLOAT LVEQNB_GainTable[] = {LVEQNB_Gain_Neg15_dB, /* -15dB gain */
- LVEQNB_Gain_Neg14_dB,
- LVEQNB_Gain_Neg13_dB,
- LVEQNB_Gain_Neg12_dB,
- LVEQNB_Gain_Neg11_dB,
- LVEQNB_Gain_Neg10_dB,
- LVEQNB_Gain_Neg9_dB,
- LVEQNB_Gain_Neg8_dB,
- LVEQNB_Gain_Neg7_dB,
- LVEQNB_Gain_Neg6_dB,
- LVEQNB_Gain_Neg5_dB,
- LVEQNB_Gain_Neg4_dB,
- LVEQNB_Gain_Neg3_dB,
- LVEQNB_Gain_Neg2_dB,
- LVEQNB_Gain_Neg1_dB,
- LVEQNB_Gain_0_dB, /* 0dB gain */
- LVEQNB_Gain_1_dB,
- LVEQNB_Gain_2_dB,
- LVEQNB_Gain_3_dB,
- LVEQNB_Gain_4_dB,
- LVEQNB_Gain_5_dB,
- LVEQNB_Gain_6_dB,
- LVEQNB_Gain_7_dB,
- LVEQNB_Gain_8_dB,
- LVEQNB_Gain_9_dB,
- LVEQNB_Gain_10_dB,
- LVEQNB_Gain_11_dB,
- LVEQNB_Gain_12_dB,
- LVEQNB_Gain_13_dB,
- LVEQNB_Gain_14_dB,
- LVEQNB_Gain_15_dB}; /* +15dB gain */
+const LVM_FLOAT LVEQNB_GainTable[] = {
+ LVEQNB_Gain_Neg15_dB, /* -15dB gain */
+ LVEQNB_Gain_Neg14_dB, LVEQNB_Gain_Neg13_dB, LVEQNB_Gain_Neg12_dB, LVEQNB_Gain_Neg11_dB,
+ LVEQNB_Gain_Neg10_dB, LVEQNB_Gain_Neg9_dB, LVEQNB_Gain_Neg8_dB, LVEQNB_Gain_Neg7_dB,
+ LVEQNB_Gain_Neg6_dB, LVEQNB_Gain_Neg5_dB, LVEQNB_Gain_Neg4_dB, LVEQNB_Gain_Neg3_dB,
+ LVEQNB_Gain_Neg2_dB, LVEQNB_Gain_Neg1_dB, LVEQNB_Gain_0_dB, /* 0dB gain */
+ LVEQNB_Gain_1_dB, LVEQNB_Gain_2_dB, LVEQNB_Gain_3_dB, LVEQNB_Gain_4_dB,
+ LVEQNB_Gain_5_dB, LVEQNB_Gain_6_dB, LVEQNB_Gain_7_dB, LVEQNB_Gain_8_dB,
+ LVEQNB_Gain_9_dB, LVEQNB_Gain_10_dB, LVEQNB_Gain_11_dB, LVEQNB_Gain_12_dB,
+ LVEQNB_Gain_13_dB, LVEQNB_Gain_14_dB, LVEQNB_Gain_15_dB}; /* +15dB gain */
/*
* D table for 100 / (Gain + 1)
*/
-const LVM_FLOAT LVEQNB_DTable[] = {LVEQNB_100D_Neg15_dB, /* -15dB gain */
- LVEQNB_100D_Neg14_dB,
- LVEQNB_100D_Neg13_dB,
- LVEQNB_100D_Neg12_dB,
- LVEQNB_100D_Neg11_dB,
- LVEQNB_100D_Neg10_dB,
- LVEQNB_100D_Neg9_dB,
- LVEQNB_100D_Neg8_dB,
- LVEQNB_100D_Neg7_dB,
- LVEQNB_100D_Neg6_dB,
- LVEQNB_100D_Neg5_dB,
- LVEQNB_100D_Neg4_dB,
- LVEQNB_100D_Neg3_dB,
- LVEQNB_100D_Neg2_dB,
- LVEQNB_100D_Neg1_dB,
- LVEQNB_100D_0_dB}; /* 0dB gain */
+const LVM_FLOAT LVEQNB_DTable[] = {
+ LVEQNB_100D_Neg15_dB, /* -15dB gain */
+ LVEQNB_100D_Neg14_dB, LVEQNB_100D_Neg13_dB, LVEQNB_100D_Neg12_dB, LVEQNB_100D_Neg11_dB,
+ LVEQNB_100D_Neg10_dB, LVEQNB_100D_Neg9_dB, LVEQNB_100D_Neg8_dB, LVEQNB_100D_Neg7_dB,
+ LVEQNB_100D_Neg6_dB, LVEQNB_100D_Neg5_dB, LVEQNB_100D_Neg4_dB, LVEQNB_100D_Neg3_dB,
+ LVEQNB_100D_Neg2_dB, LVEQNB_100D_Neg1_dB, LVEQNB_100D_0_dB}; /* 0dB gain */
/************************************************************************************/
/* */
/* Filter polynomial coefficients */
@@ -142,13 +91,13 @@
* a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
* +1.0 to -1.0
*/
-const LVM_INT16 LVEQNB_CosCoef[] = {3, /* Shifts */
- 4096, /* a0 */
- -36, /* a1 */
- -19725, /* a2 */
- -2671, /* a3 */
- 23730, /* a4 */
- -9490}; /* a5 */
+const LVM_INT16 LVEQNB_CosCoef[] = {3, /* Shifts */
+ 4096, /* a0 */
+ -36, /* a1 */
+ -19725, /* a2 */
+ -2671, /* a3 */
+ 23730, /* a4 */
+ -9490}; /* a5 */
/*
* Coefficients for calculating the cosine error with the equation:
@@ -164,9 +113,8 @@
*
* Cos(x) = 1.0 - CosErr(x)
*/
-const LVM_INT16 LVEQNB_DPCosCoef[] = {1, /* Shifts */
- 0, /* a0 */
- -6, /* a1 */
- 16586, /* a2 */
- -44}; /* a3 */
-
+const LVM_INT16 LVEQNB_DPCosCoef[] = {1, /* Shifts */
+ 0, /* a0 */
+ -6, /* a1 */
+ 16586, /* a2 */
+ -44}; /* a3 */
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h
index a71eeb9..ab51196 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h
@@ -27,7 +27,7 @@
* Sample rate table for converting between the enumerated type and the actual
* frequency
*/
-extern const LVM_UINT32 LVEQNB_SampleRateTab[];
+extern const LVM_UINT32 LVEQNB_SampleRateTab[];
/************************************************************************************/
/* */
@@ -38,17 +38,17 @@
/*
* Table for 2 * Pi / Fs
*/
-extern const LVM_FLOAT LVEQNB_TwoPiOnFsTable[];
+extern const LVM_FLOAT LVEQNB_TwoPiOnFsTable[];
/*
* Gain table
*/
-extern const LVM_FLOAT LVEQNB_GainTable[];
+extern const LVM_FLOAT LVEQNB_GainTable[];
/*
* D table for 100 / (Gain + 1)
*/
-extern const LVM_FLOAT LVEQNB_DTable[];
+extern const LVM_FLOAT LVEQNB_DTable[];
/************************************************************************************/
/* */
@@ -65,7 +65,7 @@
* a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
* +1.0 to -1.0
*/
-extern const LVM_INT16 LVEQNB_CosCoef[];
+extern const LVM_INT16 LVEQNB_CosCoef[];
/*
* Coefficients for calculating the cosine error with the equation:
@@ -81,6 +81,6 @@
*
* Cos(x) = 1.0 - CosErr(x)
*/
-extern const LVM_INT16 LVEQNB_DPCosCoef[];
+extern const LVM_INT16 LVEQNB_DPCosCoef[];
#endif /* __LVEQNB_TABLES_H__ */
diff --git a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
index 8c91ea9..484787a 100644
--- a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
+++ b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
@@ -41,11 +41,11 @@
/* */
/****************************************************************************************/
/* General */
-#define LVREV_BLOCKSIZE_MULTIPLE 1 /* Processing block size multiple */
-#define LVREV_MAX_T60 7000 /* Maximum decay time is 7000ms */
+#define LVREV_BLOCKSIZE_MULTIPLE 1 /* Processing block size multiple */
+#define LVREV_MAX_T60 7000 /* Maximum decay time is 7000ms */
/* Memory table*/
-#define LVREV_NR_MEMORY_REGIONS 4 /* Number of memory regions */
+#define LVREV_NR_MEMORY_REGIONS 4 /* Number of memory regions */
/****************************************************************************************/
/* */
@@ -53,24 +53,22 @@
/* */
/****************************************************************************************/
/* Instance handle */
-typedef void *LVREV_Handle_t;
+typedef void* LVREV_Handle_t;
/* Status return values */
-typedef enum
-{
- LVREV_SUCCESS = 0, /* Successful return from a routine */
- LVREV_NULLADDRESS = 1, /* NULL allocation address */
- LVREV_OUTOFRANGE = 2, /* Out of range control parameter */
- LVREV_INVALIDNUMSAMPLES = 3, /* Invalid number of samples */
+typedef enum {
+ LVREV_SUCCESS = 0, /* Successful return from a routine */
+ LVREV_NULLADDRESS = 1, /* NULL allocation address */
+ LVREV_OUTOFRANGE = 2, /* Out of range control parameter */
+ LVREV_INVALIDNUMSAMPLES = 3, /* Invalid number of samples */
LVREV_RETURNSTATUS_DUMMY = LVM_MAXENUM
} LVREV_ReturnStatus_en;
/* Reverb delay lines */
-typedef enum
-{
- LVREV_DELAYLINES_1 = 1, /* One delay line */
- LVREV_DELAYLINES_2 = 2, /* Two delay lines */
- LVREV_DELAYLINES_4 = 4, /* Four delay lines */
+typedef enum {
+ LVREV_DELAYLINES_1 = 1, /* One delay line */
+ LVREV_DELAYLINES_2 = 2, /* Two delay lines */
+ LVREV_DELAYLINES_4 = 4, /* Four delay lines */
LVREV_DELAYLINES_DUMMY = LVM_MAXENUM
} LVREV_NumDelayLines_en;
@@ -81,40 +79,37 @@
/****************************************************************************************/
/* Memory table containing the region definitions */
-typedef struct
-{
- LVM_MemoryRegion_st Region[LVREV_NR_MEMORY_REGIONS]; /* One definition for each region */
+typedef struct {
+ LVM_MemoryRegion_st Region[LVREV_NR_MEMORY_REGIONS]; /* One definition for each region */
} LVREV_MemoryTable_st;
/* Control Parameter structure */
-typedef struct
-{
+typedef struct {
/* General parameters */
- LVM_Mode_en OperatingMode; /* Operating mode */
- LVM_Fs_en SampleRate; /* Sample rate */
- LVM_Format_en SourceFormat; /* Source data format */
+ LVM_Mode_en OperatingMode; /* Operating mode */
+ LVM_Fs_en SampleRate; /* Sample rate */
+ LVM_Format_en SourceFormat; /* Source data format */
/* Parameters for REV */
- LVM_UINT16 Level; /* Level, 0 to 100 representing percentage of reverb */
- LVM_UINT32 LPF; /* Low pass filter, in Hz */
- LVM_UINT32 HPF; /* High pass filter, in Hz */
+ LVM_UINT16 Level; /* Level, 0 to 100 representing percentage of reverb */
+ LVM_UINT32 LPF; /* Low pass filter, in Hz */
+ LVM_UINT32 HPF; /* High pass filter, in Hz */
- LVM_UINT16 T60; /* Decay time constant, in ms */
- LVM_UINT16 Density; /* Echo density, 0 to 100 for minimum to maximum density */
- LVM_UINT16 Damping; /* Damping */
- LVM_UINT16 RoomSize; /* Simulated room size, 1 to 100 for minimum to maximum size */
+ LVM_UINT16 T60; /* Decay time constant, in ms */
+ LVM_UINT16 Density; /* Echo density, 0 to 100 for minimum to maximum density */
+ LVM_UINT16 Damping; /* Damping */
+ LVM_UINT16 RoomSize; /* Simulated room size, 1 to 100 for minimum to maximum size */
} LVREV_ControlParams_st;
/* Instance Parameter structure */
-typedef struct
-{
+typedef struct {
/* General */
- LVM_UINT16 MaxBlockSize; /* Maximum processing block size */
+ LVM_UINT16 MaxBlockSize; /* Maximum processing block size */
/* Reverb */
- LVM_Format_en SourceFormat; /* Source data formats to support */
- LVREV_NumDelayLines_en NumDelays; /* The number of delay lines, 1, 2 or 4 */
+ LVM_Format_en SourceFormat; /* Source data formats to support */
+ LVREV_NumDelayLines_en NumDelays; /* The number of delay lines, 1, 2 or 4 */
} LVREV_InstanceParams_st;
@@ -160,9 +155,9 @@
/* 1. This function may be interrupted by the LVREV_Process function */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance,
- LVREV_MemoryTable_st *pMemoryTable,
- LVREV_InstanceParams_st *pInstanceParams);
+LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance,
+ LVREV_MemoryTable_st* pMemoryTable,
+ LVREV_InstanceParams_st* pInstanceParams);
/****************************************************************************************/
/* */
@@ -190,9 +185,9 @@
/* NOTES: */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t *phInstance,
- LVREV_MemoryTable_st *pMemoryTable,
- LVREV_InstanceParams_st *pInstanceParams);
+LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t* phInstance,
+ LVREV_MemoryTable_st* pMemoryTable,
+ LVREV_InstanceParams_st* pInstanceParams);
/****************************************************************************************/
/* */
@@ -214,8 +209,8 @@
/* 1. This function may be interrupted by the LVREV_Process function */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t hInstance,
- LVREV_ControlParams_st *pControlParams);
+LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t hInstance,
+ LVREV_ControlParams_st* pControlParams);
/****************************************************************************************/
/* */
@@ -236,8 +231,8 @@
/* 1. This function may be interrupted by the LVREV_Process function */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t hInstance,
- LVREV_ControlParams_st *pNewParams);
+LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t hInstance,
+ LVREV_ControlParams_st* pNewParams);
/****************************************************************************************/
/* */
@@ -257,7 +252,7 @@
/* 1. This function must not be interrupted by the LVREV_Process function */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t hInstance);
+LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t hInstance);
/****************************************************************************************/
/* */
@@ -280,11 +275,9 @@
/* 1. The input and output buffers must be 32-bit aligned */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- const LVM_UINT16 NumSamples);
+LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, const LVM_UINT16 NumSamples);
-#endif /* __LVREV_H__ */
+#endif /* __LVREV_H__ */
/* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp
index 1f0d39b..737ef01 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp
@@ -41,160 +41,134 @@
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_ApplyNewSettings (LVREV_Instance_st *pPrivate)
-{
-
- LVM_Mode_en OperatingMode;
- LVM_INT32 NumberOfDelayLines;
+LVREV_ReturnStatus_en LVREV_ApplyNewSettings(LVREV_Instance_st* pPrivate) {
+ LVM_Mode_en OperatingMode;
+ LVM_INT32 NumberOfDelayLines;
/* Check for NULL pointer */
- if(pPrivate == LVM_NULL)
- {
+ if (pPrivate == LVM_NULL) {
return LVREV_NULLADDRESS;
}
OperatingMode = pPrivate->NewParams.OperatingMode;
- if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
- {
+ if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
NumberOfDelayLines = 4;
- }
- else if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2)
- {
+ } else if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2) {
NumberOfDelayLines = 2;
- }
- else
- {
+ } else {
NumberOfDelayLines = 1;
}
/*
* Update the high pass filter coefficients
*/
- if((pPrivate->NewParams.HPF != pPrivate->CurrentParams.HPF) ||
- (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
- (pPrivate->bFirstControl == LVM_TRUE))
- {
- LVM_FLOAT Omega;
- FO_FLOAT_Coefs_t Coeffs;
+ if ((pPrivate->NewParams.HPF != pPrivate->CurrentParams.HPF) ||
+ (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
+ (pPrivate->bFirstControl == LVM_TRUE)) {
+ LVM_FLOAT Omega;
+ FO_FLOAT_Coefs_t Coeffs;
Omega = LVM_GetOmega(pPrivate->NewParams.HPF, pPrivate->NewParams.SampleRate);
LVM_FO_HPF(Omega, &Coeffs);
- FO_1I_D32F32Cll_TRC_WRA_01_Init( &pPrivate->pFastCoef->HPCoefs,
- &pPrivate->pFastData->HPTaps, &Coeffs);
- LoadConst_Float(0,
- (LVM_FLOAT *)&pPrivate->pFastData->HPTaps,
+ FO_1I_D32F32Cll_TRC_WRA_01_Init(&pPrivate->pFastCoef->HPCoefs, &pPrivate->pFastData->HPTaps,
+ &Coeffs);
+ LoadConst_Float(0, (LVM_FLOAT*)&pPrivate->pFastData->HPTaps,
sizeof(Biquad_1I_Order1_FLOAT_Taps_t) / sizeof(LVM_FLOAT));
}
/*
* Update the low pass filter coefficients
*/
- if((pPrivate->NewParams.LPF != pPrivate->CurrentParams.LPF) ||
- (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
- (pPrivate->bFirstControl == LVM_TRUE))
- {
- LVM_FLOAT Omega;
- FO_FLOAT_Coefs_t Coeffs;
+ if ((pPrivate->NewParams.LPF != pPrivate->CurrentParams.LPF) ||
+ (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
+ (pPrivate->bFirstControl == LVM_TRUE)) {
+ LVM_FLOAT Omega;
+ FO_FLOAT_Coefs_t Coeffs;
Coeffs.A0 = 1;
Coeffs.A1 = 0;
Coeffs.B1 = 0;
- if(pPrivate->NewParams.LPF <= (LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1))
- {
+ if (pPrivate->NewParams.LPF <= (LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1)) {
Omega = LVM_GetOmega(pPrivate->NewParams.LPF, pPrivate->NewParams.SampleRate);
/*
* Do not apply filter if w =2*pi*fc/fs >= 2.9
*/
- if(Omega <= (LVM_FLOAT)LVREV_2_9_INQ29)
- {
+ if (Omega <= (LVM_FLOAT)LVREV_2_9_INQ29) {
LVM_FO_LPF(Omega, &Coeffs);
}
}
- FO_1I_D32F32Cll_TRC_WRA_01_Init( &pPrivate->pFastCoef->LPCoefs,
- &pPrivate->pFastData->LPTaps, &Coeffs);
- LoadConst_Float(0,
- (LVM_FLOAT *)&pPrivate->pFastData->LPTaps,
+ FO_1I_D32F32Cll_TRC_WRA_01_Init(&pPrivate->pFastCoef->LPCoefs, &pPrivate->pFastData->LPTaps,
+ &Coeffs);
+ LoadConst_Float(0, (LVM_FLOAT*)&pPrivate->pFastData->LPTaps,
sizeof(Biquad_1I_Order1_FLOAT_Taps_t) / sizeof(LVM_FLOAT));
}
/*
* Calculate the room size parameter
*/
- if( pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize)
- {
+ if (pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) {
/* Room size range is 10ms to 200ms
* 0% -- 10ms
* 50% -- 65ms
* 100% -- 120ms
*/
- pPrivate->RoomSizeInms = 10 + (((pPrivate->NewParams.RoomSize*11) + 5) / 10);
+ pPrivate->RoomSizeInms = 10 + (((pPrivate->NewParams.RoomSize * 11) + 5) / 10);
}
/*
* Update the T delay number of samples and the all pass delay number of samples
*/
- if( (pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
+ if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
(pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
- (pPrivate->bFirstControl == LVM_TRUE))
- {
-
- LVM_UINT32 Temp;
- LVM_INT32 APDelaySize;
- LVM_INT32 Fs = LVM_GetFsFromTable(pPrivate->NewParams.SampleRate);
- LVM_UINT32 DelayLengthSamples = (LVM_UINT32)(Fs * pPrivate->RoomSizeInms);
- LVM_INT16 i;
- LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4, \
- LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
- LVM_INT16 MaxT_Delay[] = {LVREV_MAX_T0_DELAY, LVREV_MAX_T1_DELAY, \
- LVREV_MAX_T2_DELAY, LVREV_MAX_T3_DELAY};
- LVM_INT16 MaxAP_Delay[] = {LVREV_MAX_AP0_DELAY, LVREV_MAX_AP1_DELAY, \
- LVREV_MAX_AP2_DELAY, LVREV_MAX_AP3_DELAY};
+ (pPrivate->bFirstControl == LVM_TRUE)) {
+ LVM_UINT32 Temp;
+ LVM_INT32 APDelaySize;
+ LVM_INT32 Fs = LVM_GetFsFromTable(pPrivate->NewParams.SampleRate);
+ LVM_UINT32 DelayLengthSamples = (LVM_UINT32)(Fs * pPrivate->RoomSizeInms);
+ LVM_INT16 i;
+ LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4,
+ LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
+ LVM_INT16 MaxT_Delay[] = {LVREV_MAX_T0_DELAY, LVREV_MAX_T1_DELAY, LVREV_MAX_T2_DELAY,
+ LVREV_MAX_T3_DELAY};
+ LVM_INT16 MaxAP_Delay[] = {LVREV_MAX_AP0_DELAY, LVREV_MAX_AP1_DELAY, LVREV_MAX_AP2_DELAY,
+ LVREV_MAX_AP3_DELAY};
/*
* For each delay line
*/
- for (i = 0; i < NumberOfDelayLines; i++)
- {
- if (i != 0)
- {
- LVM_FLOAT Temp1; /* to avoid QAC warning on type conversion */
+ for (i = 0; i < NumberOfDelayLines; i++) {
+ if (i != 0) {
+ LVM_FLOAT Temp1; /* to avoid QAC warning on type conversion */
- Temp1=(LVM_FLOAT)DelayLengthSamples;
+ Temp1 = (LVM_FLOAT)DelayLengthSamples;
Temp = (LVM_UINT32)(Temp1 * ScaleTable[i]);
+ } else {
+ Temp = DelayLengthSamples;
}
- else
- {
- Temp = DelayLengthSamples;
- }
- APDelaySize = Temp / 1500;
+ APDelaySize = Temp / 1500;
/*
* Set the fixed delay
*/
- Temp = (MaxT_Delay[i] - MaxAP_Delay[i]) * Fs / 192000;
+ Temp = (MaxT_Delay[i] - MaxAP_Delay[i]) * Fs / 192000;
pPrivate->Delay_AP[i] = pPrivate->T[i] - Temp;
/*
* Set the tap selection
*/
- if (pPrivate->AB_Selection)
- {
+ if (pPrivate->AB_Selection) {
/* Smooth from tap A to tap B */
- pPrivate->pOffsetB[i] = &pPrivate->pDelay_T[i][pPrivate->T[i] - \
- Temp - APDelaySize];
- pPrivate->B_DelaySize[i] = APDelaySize;
+ pPrivate->pOffsetB[i] = &pPrivate->pDelay_T[i][pPrivate->T[i] - Temp - APDelaySize];
+ pPrivate->B_DelaySize[i] = APDelaySize;
pPrivate->Mixer_APTaps[i].Target1 = 0;
pPrivate->Mixer_APTaps[i].Target2 = 1.0f;
- }
- else
- {
+ } else {
/* Smooth from tap B to tap A */
- pPrivate->pOffsetA[i] = &pPrivate->pDelay_T[i][pPrivate->T[i] - \
- Temp - APDelaySize];
- pPrivate->A_DelaySize[i] = APDelaySize;
+ pPrivate->pOffsetA[i] = &pPrivate->pDelay_T[i][pPrivate->T[i] - Temp - APDelaySize];
+ pPrivate->A_DelaySize[i] = APDelaySize;
pPrivate->Mixer_APTaps[i].Target2 = 0;
pPrivate->Mixer_APTaps[i].Target1 = 1.0f;
}
@@ -202,22 +176,17 @@
/*
* Set the maximum block size to the smallest delay size
*/
- pPrivate->MaxBlkLen = Temp;
- if (pPrivate->MaxBlkLen > pPrivate->A_DelaySize[i])
- {
+ pPrivate->MaxBlkLen = Temp;
+ if (pPrivate->MaxBlkLen > pPrivate->A_DelaySize[i]) {
pPrivate->MaxBlkLen = pPrivate->A_DelaySize[i];
}
- if (pPrivate->MaxBlkLen > pPrivate->B_DelaySize[i])
- {
+ if (pPrivate->MaxBlkLen > pPrivate->B_DelaySize[i]) {
pPrivate->MaxBlkLen = pPrivate->B_DelaySize[i];
}
}
- if (pPrivate->AB_Selection)
- {
+ if (pPrivate->AB_Selection) {
pPrivate->AB_Selection = 0;
- }
- else
- {
+ } else {
pPrivate->AB_Selection = 1;
}
@@ -226,8 +195,7 @@
*/
/* Just as a precausion, but no problem if we remove this line */
pPrivate->MaxBlkLen = pPrivate->MaxBlkLen - 2;
- if(pPrivate->MaxBlkLen > pPrivate->InstanceParams.MaxBlockSize)
- {
+ if (pPrivate->MaxBlkLen > pPrivate->InstanceParams.MaxBlockSize) {
pPrivate->MaxBlkLen = (LVM_INT32)pPrivate->InstanceParams.MaxBlockSize;
}
}
@@ -235,39 +203,30 @@
/*
* Update the low pass filter coefficient
*/
- if( (pPrivate->NewParams.Damping != pPrivate->CurrentParams.Damping) ||
+ if ((pPrivate->NewParams.Damping != pPrivate->CurrentParams.Damping) ||
(pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
- (pPrivate->bFirstControl == LVM_TRUE))
- {
-
- LVM_INT32 Temp;
- LVM_FLOAT Omega;
- FO_FLOAT_Coefs_t Coeffs;
- LVM_INT16 i;
- LVM_INT16 Damping = (LVM_INT16)((pPrivate->NewParams.Damping * 100) + 1000);
- LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_0_on_4, LVREV_T_3_Power_1_on_4,
- LVREV_T_3_Power_2_on_4, LVREV_T_3_Power_3_on_4};
+ (pPrivate->bFirstControl == LVM_TRUE)) {
+ LVM_INT32 Temp;
+ LVM_FLOAT Omega;
+ FO_FLOAT_Coefs_t Coeffs;
+ LVM_INT16 i;
+ LVM_INT16 Damping = (LVM_INT16)((pPrivate->NewParams.Damping * 100) + 1000);
+ LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_0_on_4, LVREV_T_3_Power_1_on_4,
+ LVREV_T_3_Power_2_on_4, LVREV_T_3_Power_3_on_4};
/*
* For each filter
*/
- for (i = 0; i < NumberOfDelayLines; i++)
- {
- if (i != 0)
- {
+ for (i = 0; i < NumberOfDelayLines; i++) {
+ if (i != 0) {
Temp = (LVM_INT32)(ScaleTable[i] * Damping);
- }
- else
- {
+ } else {
Temp = Damping;
}
- if(Temp <= (LVM_INT32)(LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1))
- {
+ if (Temp <= (LVM_INT32)(LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1)) {
Omega = LVM_GetOmega(Temp, pPrivate->NewParams.SampleRate);
LVM_FO_LPF(Omega, &Coeffs);
- }
- else
- {
+ } else {
Coeffs.A0 = 1;
Coeffs.A1 = 0;
Coeffs.B1 = 0;
@@ -280,27 +239,23 @@
/*
* Update All-pass filter mixer time constants
*/
- if( (pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
+ if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
(pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
- (pPrivate->NewParams.Density != pPrivate->CurrentParams.Density))
- {
- LVM_INT16 i;
- LVM_FLOAT Alpha;
- LVM_FLOAT AlphaTap;
+ (pPrivate->NewParams.Density != pPrivate->CurrentParams.Density)) {
+ LVM_INT16 i;
+ LVM_FLOAT Alpha;
+ LVM_FLOAT AlphaTap;
Alpha = LVM_Mixer_TimeConstant(LVREV_ALLPASS_TC,
- LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
- 1);
+ LVM_GetFsFromTable(pPrivate->NewParams.SampleRate), 1);
AlphaTap = LVM_Mixer_TimeConstant(LVREV_ALLPASS_TAP_TC,
- LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
- 1);
+ LVM_GetFsFromTable(pPrivate->NewParams.SampleRate), 1);
- for (i = 0; i < 4; i++)
- {
- pPrivate->Mixer_APTaps[i].Alpha1 = AlphaTap;
- pPrivate->Mixer_APTaps[i].Alpha2 = AlphaTap;
- pPrivate->Mixer_SGFeedback[i].Alpha = Alpha;
+ for (i = 0; i < 4; i++) {
+ pPrivate->Mixer_APTaps[i].Alpha1 = AlphaTap;
+ pPrivate->Mixer_APTaps[i].Alpha2 = AlphaTap;
+ pPrivate->Mixer_SGFeedback[i].Alpha = Alpha;
pPrivate->Mixer_SGFeedforward[i].Alpha = Alpha;
}
}
@@ -308,150 +263,121 @@
/*
* Update the feed back gain
*/
- if( (pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
+ if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
(pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
- (pPrivate->NewParams.T60 != pPrivate->CurrentParams.T60) ||
- (pPrivate->bFirstControl == LVM_TRUE))
- {
+ (pPrivate->NewParams.T60 != pPrivate->CurrentParams.T60) ||
+ (pPrivate->bFirstControl == LVM_TRUE)) {
+ LVM_FLOAT G[4]; /* Feedback gain (Q7.24) */
- LVM_FLOAT G[4]; /* Feedback gain (Q7.24) */
-
- if(pPrivate->NewParams.T60 == 0)
- {
+ if (pPrivate->NewParams.T60 == 0) {
G[3] = 0;
G[2] = 0;
G[1] = 0;
G[0] = 0;
- }
- else
- {
- LVM_FLOAT Temp1;
- LVM_FLOAT Temp2;
- LVM_INT16 i;
- LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4,
- LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
+ } else {
+ LVM_FLOAT Temp1;
+ LVM_FLOAT Temp2;
+ LVM_INT16 i;
+ LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4,
+ LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
/*
* For each delay line
*/
- for (i = 0; i < NumberOfDelayLines; i++)
- {
+ for (i = 0; i < NumberOfDelayLines; i++) {
Temp1 = (3 * pPrivate->RoomSizeInms * ScaleTable[i]) / pPrivate->NewParams.T60;
- if(Temp1 >= (4))
- {
+ if (Temp1 >= (4)) {
G[i] = 0;
- }
- else if((Temp1 >= (2)))
- {
+ } else if ((Temp1 >= (2))) {
Temp2 = LVM_Power10(-(Temp1 / 2.0f));
Temp1 = LVM_Power10(-(Temp1 / 2.0f));
Temp1 = Temp1 * Temp2;
- }
- else
- {
+ } else {
Temp1 = LVM_Power10(-(Temp1));
}
- if (NumberOfDelayLines == 1)
- {
+ if (NumberOfDelayLines == 1) {
G[i] = Temp1;
- }
- else
- {
- LVM_FLOAT TempG;
+ } else {
+ LVM_FLOAT TempG;
TempG = Temp1 * ONE_OVER_SQRT_TWO;
- G[i]=TempG;
+ G[i] = TempG;
}
}
}
/* Set up the feedback mixers for four delay lines */
- pPrivate->FeedbackMixer[0].Target=G[0];
- pPrivate->FeedbackMixer[1].Target=G[1];
- pPrivate->FeedbackMixer[2].Target=G[2];
- pPrivate->FeedbackMixer[3].Target=G[3];
+ pPrivate->FeedbackMixer[0].Target = G[0];
+ pPrivate->FeedbackMixer[1].Target = G[1];
+ pPrivate->FeedbackMixer[2].Target = G[2];
+ pPrivate->FeedbackMixer[3].Target = G[3];
}
/*
* Calculate the gain correction
*/
- if((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
- (pPrivate->NewParams.Level != pPrivate->CurrentParams.Level) ||
- (pPrivate->NewParams.T60 != pPrivate->CurrentParams.T60) )
- {
- LVM_INT32 Index=0;
+ if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
+ (pPrivate->NewParams.Level != pPrivate->CurrentParams.Level) ||
+ (pPrivate->NewParams.T60 != pPrivate->CurrentParams.T60)) {
+ LVM_INT32 Index = 0;
LVM_FLOAT Index_FLOAT;
- LVM_INT32 i=0;
- LVM_FLOAT Gain=0;
- LVM_INT32 RoomSize=0;
+ LVM_INT32 i = 0;
+ LVM_FLOAT Gain = 0;
+ LVM_INT32 RoomSize = 0;
LVM_FLOAT T60;
LVM_FLOAT Coefs[5];
- if(pPrivate->NewParams.RoomSize == 0)
- {
+ if (pPrivate->NewParams.RoomSize == 0) {
RoomSize = 1;
- }
- else
- {
+ } else {
RoomSize = (LVM_INT32)pPrivate->NewParams.RoomSize;
}
- if(pPrivate->NewParams.T60 < 100)
- {
+ if (pPrivate->NewParams.T60 < 100) {
T60 = 100 * LVREV_T60_SCALE;
- }
- else
- {
+ } else {
T60 = pPrivate->NewParams.T60 * LVREV_T60_SCALE;
}
/* Find the nearest room size in table */
- for(i = 0; i < 24; i++)
- {
- if(RoomSize <= LVREV_GainPolyTable[i][0])
- {
+ for (i = 0; i < 24; i++) {
+ if (RoomSize <= LVREV_GainPolyTable[i][0]) {
Index = i;
break;
}
}
- if(RoomSize == LVREV_GainPolyTable[Index][0])
- {
+ if (RoomSize == LVREV_GainPolyTable[Index][0]) {
/* Take table values if the room size is in table */
- for(i = 1; i < 5; i++)
- {
- Coefs[i-1] = LVREV_GainPolyTable[Index][i];
+ for (i = 1; i < 5; i++) {
+ Coefs[i - 1] = LVREV_GainPolyTable[Index][i];
}
Coefs[4] = 0;
- Gain = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
- }
- else
- {
+ Gain = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
+ } else {
/* Interpolate the gain between nearest room sizes */
- LVM_FLOAT Gain1,Gain2;
- LVM_INT32 Tot_Dist,Dist;
+ LVM_FLOAT Gain1, Gain2;
+ LVM_INT32 Tot_Dist, Dist;
- Tot_Dist = (LVM_UINT32)LVREV_GainPolyTable[Index][0] - \
- (LVM_UINT32)LVREV_GainPolyTable[Index-1][0];
+ Tot_Dist = (LVM_UINT32)LVREV_GainPolyTable[Index][0] -
+ (LVM_UINT32)LVREV_GainPolyTable[Index - 1][0];
Dist = RoomSize - (LVM_UINT32)LVREV_GainPolyTable[Index - 1][0];
/* Get gain for first */
- for(i = 1; i < 5; i++)
- {
- Coefs[i-1] = LVREV_GainPolyTable[Index-1][i];
+ for (i = 1; i < 5; i++) {
+ Coefs[i - 1] = LVREV_GainPolyTable[Index - 1][i];
}
Coefs[4] = 0;
- Gain1 = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
+ Gain1 = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
/* Get gain for second */
- for(i = 1; i < 5; i++)
- {
- Coefs[i-1] = LVREV_GainPolyTable[Index][i];
+ for (i = 1; i < 5; i++) {
+ Coefs[i - 1] = LVREV_GainPolyTable[Index][i];
}
Coefs[4] = 0;
- Gain2 = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
+ Gain2 = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
/* Linear Interpolate the gain */
Gain = Gain1 + (((Gain2 - Gain1) * Dist) / (Tot_Dist));
@@ -461,32 +387,27 @@
* Get the inverse of gain: Q.15
* Gain is mostly above one except few cases, take only gains above 1
*/
- if(Gain < 1)
- {
+ if (Gain < 1) {
pPrivate->Gain = 1;
- }
- else
- {
+ } else {
pPrivate->Gain = 1 / Gain;
}
Index_FLOAT = 100.0f / (LVM_FLOAT)(100 + pPrivate->NewParams.Level);
pPrivate->Gain = pPrivate->Gain * Index_FLOAT;
- pPrivate->GainMixer.Target = (pPrivate->Gain*Index_FLOAT) / 2;
+ pPrivate->GainMixer.Target = (pPrivate->Gain * Index_FLOAT) / 2;
}
/*
* Update the all pass comb filter coefficient
*/
- if( (pPrivate->NewParams.Density != pPrivate->CurrentParams.Density) ||
- (pPrivate->bFirstControl == LVM_TRUE))
- {
- LVM_INT16 i;
- LVM_FLOAT b = (LVM_FLOAT)pPrivate->NewParams.Density * LVREV_B_8_on_1000;
+ if ((pPrivate->NewParams.Density != pPrivate->CurrentParams.Density) ||
+ (pPrivate->bFirstControl == LVM_TRUE)) {
+ LVM_INT16 i;
+ LVM_FLOAT b = (LVM_FLOAT)pPrivate->NewParams.Density * LVREV_B_8_on_1000;
- for (i = 0; i < 4; i++)
- {
- pPrivate->Mixer_SGFeedback[i].Target = b;
+ for (i = 0; i < 4; i++) {
+ pPrivate->Mixer_SGFeedback[i].Target = b;
pPrivate->Mixer_SGFeedforward[i].Target = b;
}
}
@@ -494,11 +415,10 @@
/*
* Update the bypass mixer time constant
*/
- if((pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
- (pPrivate->bFirstControl == LVM_TRUE))
- {
- LVM_UINT16 NumChannels = 1; /* Assume MONO format */
- LVM_FLOAT Alpha;
+ if ((pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
+ (pPrivate->bFirstControl == LVM_TRUE)) {
+ LVM_UINT16 NumChannels = 1; /* Assume MONO format */
+ LVM_FLOAT Alpha;
Alpha = LVM_Mixer_TimeConstant(LVREV_FEEDBACKMIXER_TC,
LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
@@ -508,67 +428,55 @@
pPrivate->FeedbackMixer[2].Alpha = Alpha;
pPrivate->FeedbackMixer[3].Alpha = Alpha;
- NumChannels = 2; /* Always stereo output */
- pPrivate->BypassMixer.Alpha1 = LVM_Mixer_TimeConstant(LVREV_BYPASSMIXER_TC,
- LVM_GetFsFromTable(pPrivate->NewParams.SampleRate), NumChannels);
+ NumChannels = 2; /* Always stereo output */
+ pPrivate->BypassMixer.Alpha1 = LVM_Mixer_TimeConstant(
+ LVREV_BYPASSMIXER_TC, LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
+ NumChannels);
pPrivate->BypassMixer.Alpha2 = pPrivate->BypassMixer.Alpha1;
- pPrivate->GainMixer.Alpha = pPrivate->BypassMixer.Alpha1;
+ pPrivate->GainMixer.Alpha = pPrivate->BypassMixer.Alpha1;
}
/*
* Update the bypass mixer targets
*/
- if( (pPrivate->NewParams.Level != pPrivate->CurrentParams.Level) &&
- (pPrivate->NewParams.OperatingMode == LVM_MODE_ON))
- {
- pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level ) / 100.0f;
+ if ((pPrivate->NewParams.Level != pPrivate->CurrentParams.Level) &&
+ (pPrivate->NewParams.OperatingMode == LVM_MODE_ON)) {
+ pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level) / 100.0f;
pPrivate->BypassMixer.Target1 = 0x00000000;
- if ((pPrivate->NewParams.Level == 0) && (pPrivate->bFirstControl == LVM_FALSE))
- {
+ if ((pPrivate->NewParams.Level == 0) && (pPrivate->bFirstControl == LVM_FALSE)) {
pPrivate->BypassMixer.CallbackSet2 = LVM_TRUE;
}
- if (pPrivate->NewParams.Level != 0)
- {
+ if (pPrivate->NewParams.Level != 0) {
pPrivate->bDisableReverb = LVM_FALSE;
}
}
- if(pPrivate->NewParams.OperatingMode != pPrivate->CurrentParams.OperatingMode)
- {
- if(pPrivate->NewParams.OperatingMode == LVM_MODE_ON)
- {
- pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level ) / 100.0f;
+ if (pPrivate->NewParams.OperatingMode != pPrivate->CurrentParams.OperatingMode) {
+ if (pPrivate->NewParams.OperatingMode == LVM_MODE_ON) {
+ pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level) / 100.0f;
pPrivate->BypassMixer.Target1 = 0x00000000;
pPrivate->BypassMixer.CallbackSet2 = LVM_FALSE;
- OperatingMode = LVM_MODE_ON;
- if (pPrivate->NewParams.Level == 0)
- {
+ OperatingMode = LVM_MODE_ON;
+ if (pPrivate->NewParams.Level == 0) {
pPrivate->bDisableReverb = LVM_TRUE;
- }
- else
- {
+ } else {
pPrivate->bDisableReverb = LVM_FALSE;
}
- }
- else if (pPrivate->bFirstControl == LVM_FALSE)
- {
+ } else if (pPrivate->bFirstControl == LVM_FALSE) {
pPrivate->BypassMixer.Target2 = 0x00000000;
pPrivate->BypassMixer.Target1 = 0x00000000;
pPrivate->BypassMixer.CallbackSet2 = LVM_TRUE;
- pPrivate->GainMixer.Target = 0.03125f;
+ pPrivate->GainMixer.Target = 0.03125f;
OperatingMode = LVM_MODE_ON;
- }
- else
- {
+ } else {
OperatingMode = LVM_MODE_OFF;
}
}
/* If it is the first call to ApplyNew settings force the current to the target \
to begin immediate playback of the effect */
- if(pPrivate->bFirstControl == LVM_TRUE)
- {
+ if (pPrivate->bFirstControl == LVM_TRUE) {
pPrivate->BypassMixer.Current1 = pPrivate->BypassMixer.Target1;
pPrivate->BypassMixer.Current2 = pPrivate->BypassMixer.Target2;
}
@@ -582,8 +490,7 @@
/*
* Update flag
*/
- if(pPrivate->bFirstControl == LVM_TRUE)
- {
+ if (pPrivate->bFirstControl == LVM_TRUE) {
pPrivate->bFirstControl = LVM_FALSE;
}
@@ -606,12 +513,9 @@
/* NOTES: */
/* */
/****************************************************************************************/
-LVM_INT32 BypassMixer_Callback (void *pCallbackData,
- void *pGeneralPurpose,
- LVM_INT16 GeneralPurpose )
-{
-
- LVREV_Instance_st *pLVREV_Private = (LVREV_Instance_st *)pCallbackData;
+LVM_INT32 BypassMixer_Callback(void* pCallbackData, void* pGeneralPurpose,
+ LVM_INT16 GeneralPurpose) {
+ LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)pCallbackData;
/*
* Avoid build warnings
@@ -623,11 +527,10 @@
* Turn off
*/
pLVREV_Private->CurrentParams.OperatingMode = LVM_MODE_OFF;
- pLVREV_Private->bDisableReverb = LVM_TRUE;
+ pLVREV_Private->bDisableReverb = LVM_TRUE;
LVREV_ClearAudioBuffers((LVREV_Handle_t)pCallbackData);
return 0;
}
/* End of file */
-
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
index 586539f..5c83ce5 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
@@ -41,17 +41,14 @@
/* 1. This function must not be interrupted by the LVM_Process function */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t hInstance)
-{
-
- LVREV_Instance_st *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t hInstance) {
+ LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
/*
* Check for error conditions
*/
/* Check for NULL pointers */
- if(hInstance == LVM_NULL)
- {
+ if (hInstance == LVM_NULL) {
return LVREV_NULLADDRESS;
}
@@ -59,18 +56,13 @@
* Clear all filter tap data, delay-lines and other signal related data
*/
- LoadConst_Float(0,
- (LVM_FLOAT *)&pLVREV_Private->pFastData->HPTaps,
- 2);
- LoadConst_Float(0,
- (LVM_FLOAT *)&pLVREV_Private->pFastData->LPTaps,
- 2);
- if((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
- {
- LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[3], 2);
- LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[2], 2);
- LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[1], 2);
- LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
+ LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->HPTaps, 2);
+ LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->LPTaps, 2);
+ if ((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
+ LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[3], 2);
+ LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[2], 2);
+ LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[1], 2);
+ LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
LoadConst_Float(0, pLVREV_Private->pDelay_T[3], LVREV_MAX_T3_DELAY);
LoadConst_Float(0, pLVREV_Private->pDelay_T[2], LVREV_MAX_T2_DELAY);
@@ -78,18 +70,16 @@
LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
}
- if((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays >= LVREV_DELAYLINES_2)
- {
- LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[1], 2);
- LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
+ if ((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays >= LVREV_DELAYLINES_2) {
+ LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[1], 2);
+ LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
LoadConst_Float(0, pLVREV_Private->pDelay_T[1], LVREV_MAX_T1_DELAY);
LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
}
- if((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays >= LVREV_DELAYLINES_1)
- {
- LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
+ if ((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays >= LVREV_DELAYLINES_1) {
+ LoadConst_Float(0, (LVM_FLOAT*)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
}
return LVREV_SUCCESS;
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp
index e0b0142..f858b74 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp
@@ -42,17 +42,14 @@
/* 1. This function may be interrupted by the LVREV_Process function */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t hInstance,
- LVREV_ControlParams_st *pControlParams)
-{
-
- LVREV_Instance_st *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t hInstance,
+ LVREV_ControlParams_st* pControlParams) {
+ LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
/*
* Check for error conditions
*/
- if((hInstance == LVM_NULL) || (pControlParams == LVM_NULL))
- {
+ if ((hInstance == LVM_NULL) || (pControlParams == LVM_NULL)) {
return LVREV_NULLADDRESS;
}
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
index 68f883a..b5db23b 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
@@ -45,35 +45,29 @@
/* NOTES: */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t *phInstance,
- LVREV_MemoryTable_st *pMemoryTable,
- LVREV_InstanceParams_st *pInstanceParams)
-{
-
- INST_ALLOC SlowData;
- INST_ALLOC FastData;
- INST_ALLOC FastCoef;
- INST_ALLOC Temporary;
- LVREV_Instance_st *pLVREV_Private;
- LVM_INT16 i;
- LVM_UINT16 MaxBlockSize;
+LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t* phInstance,
+ LVREV_MemoryTable_st* pMemoryTable,
+ LVREV_InstanceParams_st* pInstanceParams) {
+ INST_ALLOC SlowData;
+ INST_ALLOC FastData;
+ INST_ALLOC FastCoef;
+ INST_ALLOC Temporary;
+ LVREV_Instance_st* pLVREV_Private;
+ LVM_INT16 i;
+ LVM_UINT16 MaxBlockSize;
/*
* Check for error conditions
*/
/* Check for NULL pointers */
- if((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pInstanceParams == LVM_NULL))
- {
+ if ((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pInstanceParams == LVM_NULL)) {
return LVREV_NULLADDRESS;
}
/* Check the memory table for NULL pointers */
- for (i = 0; i < LVREV_NR_MEMORY_REGIONS; i++)
- {
- if (pMemoryTable->Region[i].Size!=0)
- {
- if (pMemoryTable->Region[i].pBaseAddress==LVM_NULL)
- {
- return(LVREV_NULLADDRESS);
+ for (i = 0; i < LVREV_NR_MEMORY_REGIONS; i++) {
+ if (pMemoryTable->Region[i].Size != 0) {
+ if (pMemoryTable->Region[i].pBaseAddress == LVM_NULL) {
+ return (LVREV_NULLADDRESS);
}
}
}
@@ -82,101 +76,81 @@
* Check all instance parameters are in range
*/
/* Check for a non-zero block size */
- if (pInstanceParams->MaxBlockSize == 0)
- {
+ if (pInstanceParams->MaxBlockSize == 0) {
return LVREV_OUTOFRANGE;
}
/* Check for a valid number of delay lines */
- if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1)&&
- (pInstanceParams->NumDelays != LVREV_DELAYLINES_2)&&
- (pInstanceParams->NumDelays != LVREV_DELAYLINES_4))
- {
+ if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1) &&
+ (pInstanceParams->NumDelays != LVREV_DELAYLINES_2) &&
+ (pInstanceParams->NumDelays != LVREV_DELAYLINES_4)) {
return LVREV_OUTOFRANGE;
}
/*
* Initialise the InstAlloc instances
*/
- InstAlloc_Init(&SlowData, pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress);
- InstAlloc_Init(&FastData, pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress);
- InstAlloc_Init(&FastCoef, pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress);
+ InstAlloc_Init(&SlowData, pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress);
+ InstAlloc_Init(&FastData, pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress);
+ InstAlloc_Init(&FastCoef, pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress);
InstAlloc_Init(&Temporary, pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress);
/*
* Zero all memory regions
*/
- LoadConst_Float(0,
- (LVM_FLOAT *)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress,
- (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size) / \
- sizeof(LVM_FLOAT)));
- LoadConst_Float(0,
- (LVM_FLOAT *)pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress,
- (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size) / \
- sizeof(LVM_FLOAT)));
- LoadConst_Float(0,
- (LVM_FLOAT *)pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress,
- (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size) / \
- sizeof(LVM_FLOAT)));
- LoadConst_Float(0,
- (LVM_FLOAT *)pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress,
- (LVM_INT16)((pMemoryTable->Region[LVM_TEMPORARY_FAST].Size) / \
- sizeof(LVM_FLOAT)));
+ LoadConst_Float(
+ 0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress,
+ (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size) / sizeof(LVM_FLOAT)));
+ LoadConst_Float(
+ 0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress,
+ (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size) / sizeof(LVM_FLOAT)));
+ LoadConst_Float(
+ 0, (LVM_FLOAT*)pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress,
+ (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size) / sizeof(LVM_FLOAT)));
+ LoadConst_Float(
+ 0, (LVM_FLOAT*)pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress,
+ (LVM_INT16)((pMemoryTable->Region[LVM_TEMPORARY_FAST].Size) / sizeof(LVM_FLOAT)));
/*
* Set the instance handle if not already initialised
*/
- if (*phInstance == LVM_NULL)
- {
+ if (*phInstance == LVM_NULL) {
*phInstance = InstAlloc_AddMember(&SlowData, sizeof(LVREV_Instance_st));
}
- pLVREV_Private =(LVREV_Instance_st *)*phInstance;
+ pLVREV_Private = (LVREV_Instance_st*)*phInstance;
pLVREV_Private->MemoryTable = *pMemoryTable;
- if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_4)
- {
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
MaxBlockSize = LVREV_MAX_AP3_DELAY;
- }
- else if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_2)
- {
+ } else if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
MaxBlockSize = LVREV_MAX_AP1_DELAY;
- }
- else
- {
+ } else {
MaxBlockSize = LVREV_MAX_AP0_DELAY;
}
- if(MaxBlockSize>pInstanceParams->MaxBlockSize)
- {
- MaxBlockSize=pInstanceParams->MaxBlockSize;
+ if (MaxBlockSize > pInstanceParams->MaxBlockSize) {
+ MaxBlockSize = pInstanceParams->MaxBlockSize;
}
/*
* Set the data, coefficient and temporary memory pointers
*/
/* Fast data memory base address */
- pLVREV_Private->pFastData = (LVREV_FastData_st *)
- InstAlloc_AddMember(&FastData, sizeof(LVREV_FastData_st));
- if(pInstanceParams->NumDelays == LVREV_DELAYLINES_4)
- {
- pLVREV_Private->pDelay_T[3] =
- (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T3_DELAY * \
- sizeof(LVM_FLOAT));
- pLVREV_Private->pDelay_T[2] =
- (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T2_DELAY * \
- sizeof(LVM_FLOAT));
- pLVREV_Private->pDelay_T[1] =
- (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * \
- sizeof(LVM_FLOAT));
- pLVREV_Private->pDelay_T[0] =
- (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * \
- sizeof(LVM_FLOAT));
+ pLVREV_Private->pFastData =
+ (LVREV_FastData_st*)InstAlloc_AddMember(&FastData, sizeof(LVREV_FastData_st));
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
+ pLVREV_Private->pDelay_T[3] =
+ (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T3_DELAY * sizeof(LVM_FLOAT));
+ pLVREV_Private->pDelay_T[2] =
+ (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T2_DELAY * sizeof(LVM_FLOAT));
+ pLVREV_Private->pDelay_T[1] =
+ (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * sizeof(LVM_FLOAT));
+ pLVREV_Private->pDelay_T[0] =
+ (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
- for(i = 0; i < 4; i++)
- {
+ for (i = 0; i < 4; i++) {
/* Scratch for each delay line output */
- pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT *)InstAlloc_AddMember(&Temporary,
- sizeof(LVM_FLOAT) * \
- MaxBlockSize);
+ pLVREV_Private->pScratchDelayLine[i] =
+ (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
}
LoadConst_Float(0, pLVREV_Private->pDelay_T[3], LVREV_MAX_T3_DELAY);
@@ -185,60 +159,50 @@
LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
}
- if(pInstanceParams->NumDelays == LVREV_DELAYLINES_2)
- {
- pLVREV_Private->pDelay_T[1] = (LVM_FLOAT *)
- InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * \
- sizeof(LVM_FLOAT));
- pLVREV_Private->pDelay_T[0] = (LVM_FLOAT *)
- InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * \
- sizeof(LVM_FLOAT));
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
+ pLVREV_Private->pDelay_T[1] =
+ (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * sizeof(LVM_FLOAT));
+ pLVREV_Private->pDelay_T[0] =
+ (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
- for(i = 0; i < 2; i++)
- {
+ for (i = 0; i < 2; i++) {
/* Scratch for each delay line output */
- pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT *)InstAlloc_AddMember(&Temporary,
- sizeof(LVM_FLOAT) * \
- MaxBlockSize);
+ pLVREV_Private->pScratchDelayLine[i] =
+ (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
}
LoadConst_Float(0, pLVREV_Private->pDelay_T[1], (LVM_INT16)LVREV_MAX_T1_DELAY);
LoadConst_Float(0, pLVREV_Private->pDelay_T[0], (LVM_INT16)LVREV_MAX_T0_DELAY);
}
- if(pInstanceParams->NumDelays == LVREV_DELAYLINES_1)
- {
- pLVREV_Private->pDelay_T[0] = (LVM_FLOAT *)InstAlloc_AddMember(&FastData,
- LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_1) {
+ pLVREV_Private->pDelay_T[0] =
+ (LVM_FLOAT*)InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
- for(i = 0; i < 1; i++)
- {
+ for (i = 0; i < 1; i++) {
/* Scratch for each delay line output */
- pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT *)InstAlloc_AddMember(&Temporary,
- sizeof(LVM_FLOAT) * \
- MaxBlockSize);
+ pLVREV_Private->pScratchDelayLine[i] =
+ (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
}
LoadConst_Float(0, pLVREV_Private->pDelay_T[0], (LVM_INT16)LVREV_MAX_T0_DELAY);
}
/* All-pass delay buffer addresses and sizes */
- pLVREV_Private->T[0] = LVREV_MAX_T0_DELAY;
- pLVREV_Private->T[1] = LVREV_MAX_T1_DELAY;
- pLVREV_Private->T[2] = LVREV_MAX_T2_DELAY;
- pLVREV_Private->T[3] = LVREV_MAX_T3_DELAY;
- pLVREV_Private->AB_Selection = 1; /* Select smoothing A to B */
+ pLVREV_Private->T[0] = LVREV_MAX_T0_DELAY;
+ pLVREV_Private->T[1] = LVREV_MAX_T1_DELAY;
+ pLVREV_Private->T[2] = LVREV_MAX_T2_DELAY;
+ pLVREV_Private->T[3] = LVREV_MAX_T3_DELAY;
+ pLVREV_Private->AB_Selection = 1; /* Select smoothing A to B */
/* Fast coefficient memory base address */
- pLVREV_Private->pFastCoef =
- (LVREV_FastCoef_st *)InstAlloc_AddMember(&FastCoef, sizeof(LVREV_FastCoef_st));
+ pLVREV_Private->pFastCoef =
+ (LVREV_FastCoef_st*)InstAlloc_AddMember(&FastCoef, sizeof(LVREV_FastCoef_st));
/* General purpose scratch */
- pLVREV_Private->pScratch =
- (LVM_FLOAT *)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * \
- MaxBlockSize);
+ pLVREV_Private->pScratch =
+ (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
/* Mono->stereo input save for end mix */
- pLVREV_Private->pInputSave =
- (LVM_FLOAT *)InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * \
- MaxBlockSize);
+ pLVREV_Private->pInputSave =
+ (LVM_FLOAT*)InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * MaxBlockSize);
LoadConst_Float(0, pLVREV_Private->pInputSave, (LVM_INT16)(MaxBlockSize * 2));
/*
@@ -249,91 +213,90 @@
/*
* Set the parameters to invalid
*/
- pLVREV_Private->CurrentParams.SampleRate = LVM_FS_INVALID;
+ pLVREV_Private->CurrentParams.SampleRate = LVM_FS_INVALID;
pLVREV_Private->CurrentParams.OperatingMode = LVM_MODE_DUMMY;
- pLVREV_Private->CurrentParams.SourceFormat = LVM_SOURCE_DUMMY;
+ pLVREV_Private->CurrentParams.SourceFormat = LVM_SOURCE_DUMMY;
- pLVREV_Private->bControlPending = LVM_FALSE;
- pLVREV_Private->bFirstControl = LVM_TRUE;
- pLVREV_Private->bDisableReverb = LVM_FALSE;
+ pLVREV_Private->bControlPending = LVM_FALSE;
+ pLVREV_Private->bFirstControl = LVM_TRUE;
+ pLVREV_Private->bDisableReverb = LVM_FALSE;
/*
* Set mixer parameters
*/
- pLVREV_Private->BypassMixer.CallbackParam2 = 0;
- pLVREV_Private->BypassMixer.pCallbackHandle2 = pLVREV_Private;
- pLVREV_Private->BypassMixer.pGeneralPurpose2 = LVM_NULL;
- pLVREV_Private->BypassMixer.pCallBack2 = BypassMixer_Callback;
- pLVREV_Private->BypassMixer.CallbackSet2 = LVM_FALSE;
- pLVREV_Private->BypassMixer.Current2 = 0;
- pLVREV_Private->BypassMixer.Target2 = 0;
- pLVREV_Private->BypassMixer.CallbackParam1 = 0;
- pLVREV_Private->BypassMixer.pCallbackHandle1 = LVM_NULL;
- pLVREV_Private->BypassMixer.pGeneralPurpose1 = LVM_NULL;
- pLVREV_Private->BypassMixer.pCallBack1 = LVM_NULL;
- pLVREV_Private->BypassMixer.CallbackSet1 = LVM_FALSE;
- pLVREV_Private->BypassMixer.Current1 = 0x00000000;
- pLVREV_Private->BypassMixer.Target1 = 0x00000000;
+ pLVREV_Private->BypassMixer.CallbackParam2 = 0;
+ pLVREV_Private->BypassMixer.pCallbackHandle2 = pLVREV_Private;
+ pLVREV_Private->BypassMixer.pGeneralPurpose2 = LVM_NULL;
+ pLVREV_Private->BypassMixer.pCallBack2 = BypassMixer_Callback;
+ pLVREV_Private->BypassMixer.CallbackSet2 = LVM_FALSE;
+ pLVREV_Private->BypassMixer.Current2 = 0;
+ pLVREV_Private->BypassMixer.Target2 = 0;
+ pLVREV_Private->BypassMixer.CallbackParam1 = 0;
+ pLVREV_Private->BypassMixer.pCallbackHandle1 = LVM_NULL;
+ pLVREV_Private->BypassMixer.pGeneralPurpose1 = LVM_NULL;
+ pLVREV_Private->BypassMixer.pCallBack1 = LVM_NULL;
+ pLVREV_Private->BypassMixer.CallbackSet1 = LVM_FALSE;
+ pLVREV_Private->BypassMixer.Current1 = 0x00000000;
+ pLVREV_Private->BypassMixer.Target1 = 0x00000000;
- pLVREV_Private->RoomSizeInms = 100; // 100 msec
+ pLVREV_Private->RoomSizeInms = 100; // 100 msec
/*
* Set the output gain mixer parameters
*/
- pLVREV_Private->GainMixer.CallbackParam = 0;
- pLVREV_Private->GainMixer.pCallbackHandle = LVM_NULL;
- pLVREV_Private->GainMixer.pGeneralPurpose = LVM_NULL;
- pLVREV_Private->GainMixer.pCallBack = LVM_NULL;
- pLVREV_Private->GainMixer.CallbackSet = LVM_FALSE;
- pLVREV_Private->GainMixer.Current = 0.03125f;//0x03ffffff;
- pLVREV_Private->GainMixer.Target = 0.03125f;//0x03ffffff;
+ pLVREV_Private->GainMixer.CallbackParam = 0;
+ pLVREV_Private->GainMixer.pCallbackHandle = LVM_NULL;
+ pLVREV_Private->GainMixer.pGeneralPurpose = LVM_NULL;
+ pLVREV_Private->GainMixer.pCallBack = LVM_NULL;
+ pLVREV_Private->GainMixer.CallbackSet = LVM_FALSE;
+ pLVREV_Private->GainMixer.Current = 0.03125f; // 0x03ffffff;
+ pLVREV_Private->GainMixer.Target = 0.03125f; // 0x03ffffff;
/*
* Set the All-Pass Filter mixers
*/
- for (i=0; i<4; i++)
- {
+ for (i = 0; i < 4; i++) {
pLVREV_Private->pOffsetA[i] = pLVREV_Private->pDelay_T[i];
pLVREV_Private->pOffsetB[i] = pLVREV_Private->pDelay_T[i];
/* Delay tap selection mixer */
- pLVREV_Private->Mixer_APTaps[i].CallbackParam2 = 0;
+ pLVREV_Private->Mixer_APTaps[i].CallbackParam2 = 0;
pLVREV_Private->Mixer_APTaps[i].pCallbackHandle2 = LVM_NULL;
pLVREV_Private->Mixer_APTaps[i].pGeneralPurpose2 = LVM_NULL;
- pLVREV_Private->Mixer_APTaps[i].pCallBack2 = LVM_NULL;
- pLVREV_Private->Mixer_APTaps[i].CallbackSet2 = LVM_FALSE;
- pLVREV_Private->Mixer_APTaps[i].Current2 = 0;
- pLVREV_Private->Mixer_APTaps[i].Target2 = 0;
- pLVREV_Private->Mixer_APTaps[i].CallbackParam1 = 0;
+ pLVREV_Private->Mixer_APTaps[i].pCallBack2 = LVM_NULL;
+ pLVREV_Private->Mixer_APTaps[i].CallbackSet2 = LVM_FALSE;
+ pLVREV_Private->Mixer_APTaps[i].Current2 = 0;
+ pLVREV_Private->Mixer_APTaps[i].Target2 = 0;
+ pLVREV_Private->Mixer_APTaps[i].CallbackParam1 = 0;
pLVREV_Private->Mixer_APTaps[i].pCallbackHandle1 = LVM_NULL;
pLVREV_Private->Mixer_APTaps[i].pGeneralPurpose1 = LVM_NULL;
- pLVREV_Private->Mixer_APTaps[i].pCallBack1 = LVM_NULL;
- pLVREV_Private->Mixer_APTaps[i].CallbackSet1 = LVM_FALSE;
- pLVREV_Private->Mixer_APTaps[i].Current1 = 0;
- pLVREV_Private->Mixer_APTaps[i].Target1 = 1;
+ pLVREV_Private->Mixer_APTaps[i].pCallBack1 = LVM_NULL;
+ pLVREV_Private->Mixer_APTaps[i].CallbackSet1 = LVM_FALSE;
+ pLVREV_Private->Mixer_APTaps[i].Current1 = 0;
+ pLVREV_Private->Mixer_APTaps[i].Target1 = 1;
/* Feedforward mixer */
- pLVREV_Private->Mixer_SGFeedforward[i].CallbackParam = 0;
+ pLVREV_Private->Mixer_SGFeedforward[i].CallbackParam = 0;
pLVREV_Private->Mixer_SGFeedforward[i].pCallbackHandle = LVM_NULL;
pLVREV_Private->Mixer_SGFeedforward[i].pGeneralPurpose = LVM_NULL;
- pLVREV_Private->Mixer_SGFeedforward[i].pCallBack = LVM_NULL;
- pLVREV_Private->Mixer_SGFeedforward[i].CallbackSet = LVM_FALSE;
- pLVREV_Private->Mixer_SGFeedforward[i].Current = 0;
- pLVREV_Private->Mixer_SGFeedforward[i].Target = 0;
+ pLVREV_Private->Mixer_SGFeedforward[i].pCallBack = LVM_NULL;
+ pLVREV_Private->Mixer_SGFeedforward[i].CallbackSet = LVM_FALSE;
+ pLVREV_Private->Mixer_SGFeedforward[i].Current = 0;
+ pLVREV_Private->Mixer_SGFeedforward[i].Target = 0;
/* Feedback mixer */
- pLVREV_Private->Mixer_SGFeedback[i].CallbackParam = 0;
+ pLVREV_Private->Mixer_SGFeedback[i].CallbackParam = 0;
pLVREV_Private->Mixer_SGFeedback[i].pCallbackHandle = LVM_NULL;
pLVREV_Private->Mixer_SGFeedback[i].pGeneralPurpose = LVM_NULL;
- pLVREV_Private->Mixer_SGFeedback[i].pCallBack = LVM_NULL;
- pLVREV_Private->Mixer_SGFeedback[i].CallbackSet = LVM_FALSE;
- pLVREV_Private->Mixer_SGFeedback[i].Current = 0;
- pLVREV_Private->Mixer_SGFeedback[i].Target = 0;
+ pLVREV_Private->Mixer_SGFeedback[i].pCallBack = LVM_NULL;
+ pLVREV_Private->Mixer_SGFeedback[i].CallbackSet = LVM_FALSE;
+ pLVREV_Private->Mixer_SGFeedback[i].Current = 0;
+ pLVREV_Private->Mixer_SGFeedback[i].Target = 0;
/* Feedback gain mixer */
- pLVREV_Private->FeedbackMixer[i].CallbackParam = 0;
- pLVREV_Private->FeedbackMixer[i].pCallbackHandle = LVM_NULL;
- pLVREV_Private->FeedbackMixer[i].pGeneralPurpose = LVM_NULL;
- pLVREV_Private->FeedbackMixer[i].pCallBack = LVM_NULL;
- pLVREV_Private->FeedbackMixer[i].CallbackSet = LVM_FALSE;
- pLVREV_Private->FeedbackMixer[i].Current = 0;
- pLVREV_Private->FeedbackMixer[i].Target = 0;
+ pLVREV_Private->FeedbackMixer[i].CallbackParam = 0;
+ pLVREV_Private->FeedbackMixer[i].pCallbackHandle = LVM_NULL;
+ pLVREV_Private->FeedbackMixer[i].pGeneralPurpose = LVM_NULL;
+ pLVREV_Private->FeedbackMixer[i].pCallBack = LVM_NULL;
+ pLVREV_Private->FeedbackMixer[i].CallbackSet = LVM_FALSE;
+ pLVREV_Private->FeedbackMixer[i].Current = 0;
+ pLVREV_Private->FeedbackMixer[i].Target = 0;
}
/* Delay tap index */
pLVREV_Private->A_DelaySize[0] = LVREV_MAX_AP0_DELAY;
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
index f59933c..2c1e04d 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
@@ -56,46 +56,40 @@
/* 1. This function may be interrupted by the LVREV_Process function */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance,
- LVREV_MemoryTable_st *pMemoryTable,
- LVREV_InstanceParams_st *pInstanceParams)
-{
-
- INST_ALLOC SlowData;
- INST_ALLOC FastData;
- INST_ALLOC FastCoef;
- INST_ALLOC Temporary;
- LVM_INT16 i;
- LVM_UINT16 MaxBlockSize;
+LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance,
+ LVREV_MemoryTable_st* pMemoryTable,
+ LVREV_InstanceParams_st* pInstanceParams) {
+ INST_ALLOC SlowData;
+ INST_ALLOC FastData;
+ INST_ALLOC FastCoef;
+ INST_ALLOC Temporary;
+ LVM_INT16 i;
+ LVM_UINT16 MaxBlockSize;
/*
* Check for error conditions
*/
/* Check for NULL pointer */
- if (pMemoryTable == LVM_NULL)
- {
- return(LVREV_NULLADDRESS);
+ if (pMemoryTable == LVM_NULL) {
+ return (LVREV_NULLADDRESS);
}
/*
* Check all instance parameters are in range
*/
- if (pInstanceParams != LVM_NULL)
- {
+ if (pInstanceParams != LVM_NULL) {
/*
* Call for memory allocation, so check the parameters
*/
/* Check for a non-zero block size */
- if (pInstanceParams->MaxBlockSize == 0)
- {
+ if (pInstanceParams->MaxBlockSize == 0) {
return LVREV_OUTOFRANGE;
}
/* Check for a valid number of delay lines */
if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1) &&
(pInstanceParams->NumDelays != LVREV_DELAYLINES_2) &&
- (pInstanceParams->NumDelays != LVREV_DELAYLINES_4))
- {
+ (pInstanceParams->NumDelays != LVREV_DELAYLINES_4)) {
return LVREV_OUTOFRANGE;
}
}
@@ -103,86 +97,75 @@
/*
* Initialise the InstAlloc instances
*/
- InstAlloc_Init(&SlowData, (void *)LVM_NULL);
- InstAlloc_Init(&FastData, (void *)LVM_NULL);
- InstAlloc_Init(&FastCoef, (void *)LVM_NULL);
- InstAlloc_Init(&Temporary, (void *)LVM_NULL);
+ InstAlloc_Init(&SlowData, (void*)LVM_NULL);
+ InstAlloc_Init(&FastData, (void*)LVM_NULL);
+ InstAlloc_Init(&FastCoef, (void*)LVM_NULL);
+ InstAlloc_Init(&Temporary, (void*)LVM_NULL);
/*
* Fill in the memory table
*/
- if (hInstance == LVM_NULL)
- {
+ if (hInstance == LVM_NULL) {
/*
* Check for null pointers
*/
- if (pInstanceParams == LVM_NULL)
- {
- return(LVREV_NULLADDRESS);
+ if (pInstanceParams == LVM_NULL) {
+ return (LVREV_NULLADDRESS);
}
/*
* Select the maximum internal block size
*/
- if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_4)
- {
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
MaxBlockSize = LVREV_MAX_AP3_DELAY;
- }
- else if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_2)
- {
+ } else if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
MaxBlockSize = LVREV_MAX_AP1_DELAY;
- }
- else
- {
+ } else {
MaxBlockSize = LVREV_MAX_AP0_DELAY;
}
- if(MaxBlockSize>pInstanceParams->MaxBlockSize)
- {
- MaxBlockSize=pInstanceParams->MaxBlockSize;
+ if (MaxBlockSize > pInstanceParams->MaxBlockSize) {
+ MaxBlockSize = pInstanceParams->MaxBlockSize;
}
/*
* Slow data memory
*/
InstAlloc_AddMember(&SlowData, sizeof(LVREV_Instance_st));
- pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size = InstAlloc_GetTotal(&SlowData);
- pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
+ pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size = InstAlloc_GetTotal(&SlowData);
+ pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type = LVM_PERSISTENT_SLOW_DATA;
pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
/*
* Persistent fast data memory
*/
InstAlloc_AddMember(&FastData, sizeof(LVREV_FastData_st));
- if(pInstanceParams->NumDelays == LVREV_DELAYLINES_4)
- {
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
InstAlloc_AddMember(&FastData, LVREV_MAX_T3_DELAY * sizeof(LVM_FLOAT));
InstAlloc_AddMember(&FastData, LVREV_MAX_T2_DELAY * sizeof(LVM_FLOAT));
InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * sizeof(LVM_FLOAT));
InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
}
- if(pInstanceParams->NumDelays == LVREV_DELAYLINES_2)
- {
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * sizeof(LVM_FLOAT));
InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
}
- if(pInstanceParams->NumDelays == LVREV_DELAYLINES_1)
- {
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_1) {
InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
}
- pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size = InstAlloc_GetTotal(&FastData);
- pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
+ pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size = InstAlloc_GetTotal(&FastData);
+ pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type = LVM_PERSISTENT_FAST_DATA;
pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
/*
* Persistent fast coefficient memory
*/
InstAlloc_AddMember(&FastCoef, sizeof(LVREV_FastCoef_st));
- pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size = InstAlloc_GetTotal(&FastCoef);
- pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
+ pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size = InstAlloc_GetTotal(&FastCoef);
+ pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type = LVM_PERSISTENT_FAST_COEF;
pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
/*
@@ -192,41 +175,33 @@
InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
/* Mono->stereo input saved for end mix */
InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * MaxBlockSize);
- if(pInstanceParams->NumDelays == LVREV_DELAYLINES_4)
- {
- for(i=0; i<4; i++)
- {
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
+ for (i = 0; i < 4; i++) {
/* A Scratch buffer for each delay line */
InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
}
}
- if(pInstanceParams->NumDelays == LVREV_DELAYLINES_2)
- {
- for(i=0; i<2; i++)
- {
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
+ for (i = 0; i < 2; i++) {
/* A Scratch buffer for each delay line */
InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
}
}
- if(pInstanceParams->NumDelays == LVREV_DELAYLINES_1)
- {
- for(i=0; i<1; i++)
- {
+ if (pInstanceParams->NumDelays == LVREV_DELAYLINES_1) {
+ for (i = 0; i < 1; i++) {
/* A Scratch buffer for each delay line */
InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
}
}
- pMemoryTable->Region[LVM_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&Temporary);
- pMemoryTable->Region[LVM_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
+ pMemoryTable->Region[LVM_TEMPORARY_FAST].Size = InstAlloc_GetTotal(&Temporary);
+ pMemoryTable->Region[LVM_TEMPORARY_FAST].Type = LVM_TEMPORARY_FAST;
pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
- }
- else
- {
- LVREV_Instance_st *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+ } else {
+ LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
/*
* Read back memory allocation table
@@ -234,7 +209,7 @@
*pMemoryTable = pLVREV_Private->MemoryTable;
}
- return(LVREV_SUCCESS);
+ return (LVREV_SUCCESS);
}
/* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h b/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
index 2c27c6e..b6edb03 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
@@ -37,63 +37,63 @@
/* */
/****************************************************************************************/
/* General */
-#define ONE_OVER_SQRT_TWO 0.707107f /* 1/sqrt(2) * 2^15 */
-#define LVREV_B_8_on_1000 0.008f /* 0.8 * 2^31 */
-#define LVREV_HEADROOM 0.25f /* -12dB * 2^15 */
-#define LVREV_2_9_INQ29 2.9f /* 2.9 in Q29 format */
-#define LVREV_MIN3DB 0.7079457f /* -3dB in Q15 format */
+#define ONE_OVER_SQRT_TWO 0.707107f /* 1/sqrt(2) * 2^15 */
+#define LVREV_B_8_on_1000 0.008f /* 0.8 * 2^31 */
+#define LVREV_HEADROOM 0.25f /* -12dB * 2^15 */
+#define LVREV_2_9_INQ29 2.9f /* 2.9 in Q29 format */
+#define LVREV_MIN3DB 0.7079457f /* -3dB in Q15 format */
/* Intenal constants */
-#define LVREV_LP_Poly_Order 4
-#define LVREV_LP_Poly_Shift 5
+#define LVREV_LP_Poly_Order 4
+#define LVREV_LP_Poly_Shift 5
-#define LVREV_T60_SCALE 0.000142f /*(1/7000) */
+#define LVREV_T60_SCALE 0.000142f /*(1/7000) */
-#define LVREV_T_3_Power_0_on_4 1.0f
-#define LVREV_T_3_Power_1_on_4 1.316074f
-#define LVREV_T_3_Power_2_on_4 1.732051f
-#define LVREV_T_3_Power_3_on_4 2.279507f
-#define LVREV_T_3_Power_minus0_on_4 1.0f /* 3^(-0/4) * 2^15 */
-#define LVREV_T_3_Power_minus1_on_4 0.759836f /* 3^(-1/4) * 2^15 */
-#define LVREV_T_3_Power_minus2_on_4 0.577350f /* 3^(-2/4) * 2^15 */
-#define LVREV_T_3_Power_minus3_on_4 0.438691f /* 3^(-3/4) * 2^15 */
+#define LVREV_T_3_Power_0_on_4 1.0f
+#define LVREV_T_3_Power_1_on_4 1.316074f
+#define LVREV_T_3_Power_2_on_4 1.732051f
+#define LVREV_T_3_Power_3_on_4 2.279507f
+#define LVREV_T_3_Power_minus0_on_4 1.0f /* 3^(-0/4) * 2^15 */
+#define LVREV_T_3_Power_minus1_on_4 0.759836f /* 3^(-1/4) * 2^15 */
+#define LVREV_T_3_Power_minus2_on_4 0.577350f /* 3^(-2/4) * 2^15 */
+#define LVREV_T_3_Power_minus3_on_4 0.438691f /* 3^(-3/4) * 2^15 */
- /* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T3_DELAY 10108
- /* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T2_DELAY 13304
- /* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T1_DELAY 17508
- /* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T0_DELAY 23040
- /* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP3_DELAY 6740
- /* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP2_DELAY 8872
- /* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP1_DELAY 11672
- /* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP0_DELAY 15360
+/* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T3_DELAY 10108
+/* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T2_DELAY 13304
+/* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T1_DELAY 17508
+/* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T0_DELAY 23040
+/* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP3_DELAY 6740
+/* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP2_DELAY 8872
+/* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP1_DELAY 11672
+/* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP0_DELAY 15360
-#define LVREV_BYPASSMIXER_TC 1000 /* Bypass mixer time constant*/
-#define LVREV_ALLPASS_TC 1000 /* All-pass filter time constant */
-#define LVREV_ALLPASS_TAP_TC 10000 /* All-pass filter dely tap change */
-#define LVREV_FEEDBACKMIXER_TC 100 /* Feedback mixer time constant*/
-#define LVREV_OUTPUTGAIN_SHIFT 5 /* Bits shift for output gain correction */
+#define LVREV_BYPASSMIXER_TC 1000 /* Bypass mixer time constant*/
+#define LVREV_ALLPASS_TC 1000 /* All-pass filter time constant */
+#define LVREV_ALLPASS_TAP_TC 10000 /* All-pass filter dely tap change */
+#define LVREV_FEEDBACKMIXER_TC 100 /* Feedback mixer time constant*/
+#define LVREV_OUTPUTGAIN_SHIFT 5 /* Bits shift for output gain correction */
/* Parameter limits */
-#define LVREV_NUM_FS 13 /* Number of supported sample rates */
+#define LVREV_NUM_FS 13 /* Number of supported sample rates */
-#define LVREV_MAXBLKSIZE_LIMIT 64 /* Maximum block size low limit */
-#define LVREV_MAX_LEVEL 100 /* Maximum level, 100% */
-#define LVREV_MIN_LPF_CORNER 50 /* Low pass filter limits */
-#define LVREV_MAX_LPF_CORNER 23999
-#define LVREV_MIN_HPF_CORNER 20 /* High pass filrer limits */
-#define LVREV_MAX_HPF_CORNER 1000
-#define LVREV_MAX_T60 7000 /* Maximum T60 time in ms */
-#define LVREV_MAX_DENSITY 100 /* Maximum density, 100% */
-#define LVREV_MAX_DAMPING 100 /* Maximum damping, 100% */
-#define LVREV_MAX_ROOMSIZE 100 /* Maximum room size, 100% */
+#define LVREV_MAXBLKSIZE_LIMIT 64 /* Maximum block size low limit */
+#define LVREV_MAX_LEVEL 100 /* Maximum level, 100% */
+#define LVREV_MIN_LPF_CORNER 50 /* Low pass filter limits */
+#define LVREV_MAX_LPF_CORNER 23999
+#define LVREV_MIN_HPF_CORNER 20 /* High pass filrer limits */
+#define LVREV_MAX_HPF_CORNER 1000
+#define LVREV_MAX_T60 7000 /* Maximum T60 time in ms */
+#define LVREV_MAX_DENSITY 100 /* Maximum density, 100% */
+#define LVREV_MAX_DAMPING 100 /* Maximum damping, 100% */
+#define LVREV_MAX_ROOMSIZE 100 /* Maximum room size, 100% */
/****************************************************************************************/
/* */
@@ -102,72 +102,68 @@
/****************************************************************************************/
/* Fast data structure */
-typedef struct
-{
- Biquad_1I_Order1_FLOAT_Taps_t HPTaps; /* High pass filter taps */
- Biquad_1I_Order1_FLOAT_Taps_t LPTaps; /* Low pass filter taps */
- Biquad_1I_Order1_FLOAT_Taps_t RevLPTaps[4]; /* Reverb low pass filters taps */
+typedef struct {
+ Biquad_1I_Order1_FLOAT_Taps_t HPTaps; /* High pass filter taps */
+ Biquad_1I_Order1_FLOAT_Taps_t LPTaps; /* Low pass filter taps */
+ Biquad_1I_Order1_FLOAT_Taps_t RevLPTaps[4]; /* Reverb low pass filters taps */
} LVREV_FastData_st;
/* Fast coefficient structure */
-typedef struct
-{
-
- Biquad_FLOAT_Instance_t HPCoefs; /* High pass filter coefficients */
- Biquad_FLOAT_Instance_t LPCoefs; /* Low pass filter coefficients */
- Biquad_FLOAT_Instance_t RevLPCoefs[4]; /* Reverb low pass filters coefficients */
+typedef struct {
+ Biquad_FLOAT_Instance_t HPCoefs; /* High pass filter coefficients */
+ Biquad_FLOAT_Instance_t LPCoefs; /* Low pass filter coefficients */
+ Biquad_FLOAT_Instance_t RevLPCoefs[4]; /* Reverb low pass filters coefficients */
} LVREV_FastCoef_st;
-typedef struct
-{
+typedef struct {
/* General */
- LVREV_InstanceParams_st InstanceParams; /* Initialisation time instance parameters */
- LVREV_MemoryTable_st MemoryTable; /* Memory table */
- LVREV_ControlParams_st CurrentParams; /* Parameters being used */
- LVREV_ControlParams_st NewParams; /* New parameters from the \
- calling application */
- LVM_CHAR bControlPending; /* Flag to indicate new parameters \
- are available */
- LVM_CHAR bFirstControl; /* Flag to indicate that the control \
- function is called for the first time */
- LVM_CHAR bDisableReverb; /* Flag to indicate that the mix level is
- 0% and the reverb can be disabled */
- LVM_INT32 RoomSizeInms; /* Room size in msec */
- LVM_INT32 MaxBlkLen; /* Maximum block size for internal
- processing */
+ LVREV_InstanceParams_st InstanceParams; /* Initialisation time instance parameters */
+ LVREV_MemoryTable_st MemoryTable; /* Memory table */
+ LVREV_ControlParams_st CurrentParams; /* Parameters being used */
+ LVREV_ControlParams_st NewParams; /* New parameters from the \
+ calling application */
+ LVM_CHAR bControlPending; /* Flag to indicate new parameters \
+ are available */
+ LVM_CHAR bFirstControl; /* Flag to indicate that the control \
+ function is called for the first time */
+ LVM_CHAR bDisableReverb; /* Flag to indicate that the mix level is
+ 0% and the reverb can be disabled */
+ LVM_INT32 RoomSizeInms; /* Room size in msec */
+ LVM_INT32 MaxBlkLen; /* Maximum block size for internal
+ processing */
/* Aligned memory pointers */
- LVREV_FastData_st *pFastData; /* Fast data memory base address */
- LVREV_FastCoef_st *pFastCoef; /* Fast coefficient memory base address */
- LVM_FLOAT *pScratchDelayLine[4]; /* Delay line scratch memory */
- LVM_FLOAT *pScratch; /* Multi ussge scratch */
- LVM_FLOAT *pInputSave; /* Reverb block input save for dry/wet
- mixing*/
+ LVREV_FastData_st* pFastData; /* Fast data memory base address */
+ LVREV_FastCoef_st* pFastCoef; /* Fast coefficient memory base address */
+ LVM_FLOAT* pScratchDelayLine[4]; /* Delay line scratch memory */
+ LVM_FLOAT* pScratch; /* Multi ussge scratch */
+ LVM_FLOAT* pInputSave; /* Reverb block input save for dry/wet
+ mixing*/
/* Feedback matrix */
- Mix_1St_Cll_FLOAT_t FeedbackMixer[4]; /* Mixer for Pop and Click Supression \
- caused by feedback Gain */
+ Mix_1St_Cll_FLOAT_t FeedbackMixer[4]; /* Mixer for Pop and Click Suppression \
+ caused by feedback Gain */
/* All-Pass Filter */
- LVM_INT32 T[4]; /* Maximum delay size of buffer */
- LVM_FLOAT *pDelay_T[4]; /* Pointer to delay buffers */
- LVM_INT32 Delay_AP[4]; /* Offset to AP delay buffer start */
- LVM_INT16 AB_Selection; /* Smooth from tap A to B when 1 \
- otherwise B to A */
- LVM_INT32 A_DelaySize[4]; /* A delay length in samples */
- LVM_INT32 B_DelaySize[4]; /* B delay length in samples */
- LVM_FLOAT *pOffsetA[4]; /* Offset for the A delay tap */
- LVM_FLOAT *pOffsetB[4]; /* Offset for the B delay tap */
- Mix_2St_Cll_FLOAT_t Mixer_APTaps[4]; /* Smoothed AP delay mixer */
- Mix_1St_Cll_FLOAT_t Mixer_SGFeedback[4]; /* Smoothed SAfeedback gain */
- Mix_1St_Cll_FLOAT_t Mixer_SGFeedforward[4]; /* Smoothed AP feedforward gain */
+ LVM_INT32 T[4]; /* Maximum delay size of buffer */
+ LVM_FLOAT* pDelay_T[4]; /* Pointer to delay buffers */
+ LVM_INT32 Delay_AP[4]; /* Offset to AP delay buffer start */
+ LVM_INT16 AB_Selection; /* Smooth from tap A to B when 1 \
+ otherwise B to A */
+ LVM_INT32 A_DelaySize[4]; /* A delay length in samples */
+ LVM_INT32 B_DelaySize[4]; /* B delay length in samples */
+ LVM_FLOAT* pOffsetA[4]; /* Offset for the A delay tap */
+ LVM_FLOAT* pOffsetB[4]; /* Offset for the B delay tap */
+ Mix_2St_Cll_FLOAT_t Mixer_APTaps[4]; /* Smoothed AP delay mixer */
+ Mix_1St_Cll_FLOAT_t Mixer_SGFeedback[4]; /* Smoothed SAfeedback gain */
+ Mix_1St_Cll_FLOAT_t Mixer_SGFeedforward[4]; /* Smoothed AP feedforward gain */
/* Output gain */
- Mix_2St_Cll_FLOAT_t BypassMixer; /* Dry/wet mixer */
- LVM_FLOAT Gain; /* Gain applied to output to maintain
- average signal power */
- Mix_1St_Cll_FLOAT_t GainMixer; /* Gain smoothing */
+ Mix_2St_Cll_FLOAT_t BypassMixer; /* Dry/wet mixer */
+ LVM_FLOAT Gain; /* Gain applied to output to maintain
+ average signal power */
+ Mix_1St_Cll_FLOAT_t GainMixer; /* Gain smoothing */
} LVREV_Instance_st;
@@ -177,15 +173,12 @@
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_ApplyNewSettings(LVREV_Instance_st *pPrivate);
-void ReverbBlock(LVM_FLOAT *pInput,
- LVM_FLOAT *pOutput,
- LVREV_Instance_st *pPrivate,
- LVM_UINT16 NumSamples);
-LVM_INT32 BypassMixer_Callback(void *pCallbackData,
- void *pGeneralPurpose,
- LVM_INT16 GeneralPurpose );
+LVREV_ReturnStatus_en LVREV_ApplyNewSettings(LVREV_Instance_st* pPrivate);
+void ReverbBlock(LVM_FLOAT* pInput, LVM_FLOAT* pOutput, LVREV_Instance_st* pPrivate,
+ LVM_UINT16 NumSamples);
+LVM_INT32 BypassMixer_Callback(void* pCallbackData, void* pGeneralPurpose,
+ LVM_INT16 GeneralPurpose);
-#endif /** __LVREV_PRIVATE_H__ **/
+#endif /** __LVREV_PRIVATE_H__ **/
/* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp
index 35f9ad8..ed3b89c 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp
@@ -45,43 +45,37 @@
/* 1. The input and output buffers must be 32-bit aligned */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- const LVM_UINT16 NumSamples)
-{
- LVREV_Instance_st *pLVREV_Private = (LVREV_Instance_st *)hInstance;
- LVM_FLOAT *pInput = (LVM_FLOAT *)pInData;
- LVM_FLOAT *pOutput = pOutData;
- LVM_INT32 SamplesToProcess, RemainingSamples;
- LVM_INT32 format = 1;
+LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, const LVM_UINT16 NumSamples) {
+ LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
+ LVM_FLOAT* pInput = (LVM_FLOAT*)pInData;
+ LVM_FLOAT* pOutput = pOutData;
+ LVM_INT32 SamplesToProcess, RemainingSamples;
+ LVM_INT32 format = 1;
/*
* Check for error conditions
*/
/* Check for NULL pointers */
- if((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL))
- {
+ if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL)) {
return LVREV_NULLADDRESS;
}
/*
* Apply the new controls settings if required
*/
- if(pLVREV_Private->bControlPending == LVM_TRUE)
- {
- LVREV_ReturnStatus_en errorCode;
+ if (pLVREV_Private->bControlPending == LVM_TRUE) {
+ LVREV_ReturnStatus_en errorCode;
/*
* Clear the pending flag and update the control settings
*/
pLVREV_Private->bControlPending = LVM_FALSE;
- errorCode = LVREV_ApplyNewSettings (pLVREV_Private);
+ errorCode = LVREV_ApplyNewSettings(pLVREV_Private);
- if(errorCode != LVREV_SUCCESS)
- {
+ if (errorCode != LVREV_SUCCESS) {
return errorCode;
}
}
@@ -89,27 +83,23 @@
/*
* Trap the case where the number of samples is zero.
*/
- if (NumSamples == 0)
- {
+ if (NumSamples == 0) {
return LVREV_SUCCESS;
}
/*
* If OFF copy and reformat the data as necessary
*/
- if (pLVREV_Private->CurrentParams.OperatingMode == LVM_MODE_OFF)
- {
- if(pInput != pOutput)
- {
+ if (pLVREV_Private->CurrentParams.OperatingMode == LVM_MODE_OFF) {
+ if (pInput != pOutput) {
/*
* Copy the data to the output buffer, convert to stereo is required
*/
- if(pLVREV_Private->CurrentParams.SourceFormat == LVM_MONO){
+ if (pLVREV_Private->CurrentParams.SourceFormat == LVM_MONO) {
MonoTo2I_Float(pInput, pOutput, NumSamples);
} else {
- Copy_Float(pInput,
- pOutput,
- (LVM_INT16)(NumSamples << 1)); // 32 bit data, stereo
+ Copy_Float(pInput, pOutput,
+ (LVM_INT16)(NumSamples << 1)); // 32 bit data, stereo
}
}
@@ -118,31 +108,26 @@
RemainingSamples = (LVM_INT32)NumSamples;
- if (pLVREV_Private->CurrentParams.SourceFormat != LVM_MONO)
- {
+ if (pLVREV_Private->CurrentParams.SourceFormat != LVM_MONO) {
format = 2;
}
- while (RemainingSamples!=0)
- {
+ while (RemainingSamples != 0) {
/*
* Process the data
*/
- if(RemainingSamples > pLVREV_Private->MaxBlkLen)
- {
- SamplesToProcess = pLVREV_Private->MaxBlkLen;
+ if (RemainingSamples > pLVREV_Private->MaxBlkLen) {
+ SamplesToProcess = pLVREV_Private->MaxBlkLen;
RemainingSamples = (LVM_INT16)(RemainingSamples - SamplesToProcess);
- }
- else
- {
+ } else {
SamplesToProcess = RemainingSamples;
RemainingSamples = 0;
}
ReverbBlock(pInput, pOutput, pLVREV_Private, (LVM_UINT16)SamplesToProcess);
- pInput = (LVM_FLOAT *)(pInput + (SamplesToProcess * format));
- pOutput = (LVM_FLOAT *)(pOutput + (SamplesToProcess * 2)); // Always stereo output
+ pInput = (LVM_FLOAT*)(pInput + (SamplesToProcess * format));
+ pOutput = (LVM_FLOAT*)(pOutput + (SamplesToProcess * 2)); // Always stereo output
}
return LVREV_SUCCESS;
@@ -170,16 +155,15 @@
/* 1. The input and output buffers must be 32-bit aligned */
/* */
/****************************************************************************************/
-void ReverbBlock(LVM_FLOAT *pInput, LVM_FLOAT *pOutput,
- LVREV_Instance_st *pPrivate, LVM_UINT16 NumSamples)
-{
- LVM_INT16 j, size;
- LVM_FLOAT *pDelayLine;
- LVM_FLOAT *pDelayLineInput = pPrivate->pScratch;
- LVM_FLOAT *pScratch = pPrivate->pScratch;
- LVM_FLOAT *pIn;
- LVM_FLOAT *pTemp = pPrivate->pInputSave;
- LVM_INT32 NumberOfDelayLines;
+void ReverbBlock(LVM_FLOAT* pInput, LVM_FLOAT* pOutput, LVREV_Instance_st* pPrivate,
+ LVM_UINT16 NumSamples) {
+ LVM_INT16 j, size;
+ LVM_FLOAT* pDelayLine;
+ LVM_FLOAT* pDelayLineInput = pPrivate->pScratch;
+ LVM_FLOAT* pScratch = pPrivate->pScratch;
+ LVM_FLOAT* pIn;
+ LVM_FLOAT* pTemp = pPrivate->pInputSave;
+ LVM_INT32 NumberOfDelayLines;
/******************************************************************************
* All calculations will go into the buffer pointed to by pTemp, this will *
@@ -196,85 +180,60 @@
* and the final output is converted to STEREO after the mixer *
******************************************************************************/
- if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
- {
+ if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
NumberOfDelayLines = 4;
- }
- else if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2)
- {
+ } else if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2) {
NumberOfDelayLines = 2;
- }
- else
- {
+ } else {
NumberOfDelayLines = 1;
}
- if(pPrivate->CurrentParams.SourceFormat == LVM_MONO)
- {
+ if (pPrivate->CurrentParams.SourceFormat == LVM_MONO) {
pIn = pInput;
- }
- else
- {
+ } else {
/*
* Stereo to mono conversion
*/
- From2iToMono_Float(pInput,
- pTemp,
- (LVM_INT16)NumSamples);
+ From2iToMono_Float(pInput, pTemp, (LVM_INT16)NumSamples);
pIn = pTemp;
}
- Mult3s_Float(pIn,
- (LVM_FLOAT)LVREV_HEADROOM,
- pTemp,
- (LVM_INT16)NumSamples);
+ Mult3s_Float(pIn, (LVM_FLOAT)LVREV_HEADROOM, pTemp, (LVM_INT16)NumSamples);
/*
* High pass filter
*/
- FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->HPCoefs,
- pTemp,
- pTemp,
- (LVM_INT16)NumSamples);
+ FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->HPCoefs, pTemp, pTemp, (LVM_INT16)NumSamples);
/*
* Low pass filter
*/
- FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->LPCoefs,
- pTemp,
- pTemp,
- (LVM_INT16)NumSamples);
+ FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->LPCoefs, pTemp, pTemp, (LVM_INT16)NumSamples);
/*
* Process all delay lines
*/
- for(j = 0; j < NumberOfDelayLines; j++)
- {
+ for (j = 0; j < NumberOfDelayLines; j++) {
pDelayLine = pPrivate->pScratchDelayLine[j];
/*
* All-pass filter with pop and click suppression
*/
/* Get the smoothed, delayed output. Put it in the output buffer */
- MixSoft_2St_D32C31_SAT(&pPrivate->Mixer_APTaps[j],
- pPrivate->pOffsetA[j],
- pPrivate->pOffsetB[j],
- pDelayLine,
- (LVM_INT16)NumSamples);
+ MixSoft_2St_D32C31_SAT(&pPrivate->Mixer_APTaps[j], pPrivate->pOffsetA[j],
+ pPrivate->pOffsetB[j], pDelayLine, (LVM_INT16)NumSamples);
/* Re-align the all pass filter delay buffer and copying the fixed delay data \
to the AP delay in the process */
- Copy_Float(&pPrivate->pDelay_T[j][NumSamples],
- pPrivate->pDelay_T[j],
- (LVM_INT16)(pPrivate->T[j] - NumSamples)); /* 32-bit data */
+ Copy_Float(&pPrivate->pDelay_T[j][NumSamples], pPrivate->pDelay_T[j],
+ (LVM_INT16)(pPrivate->T[j] - NumSamples)); /* 32-bit data */
/* Apply the smoothed feedback and save to fixed delay input (currently empty) */
- MixSoft_1St_D32C31_WRA(&pPrivate->Mixer_SGFeedback[j],
- pDelayLine,
+ MixSoft_1St_D32C31_WRA(&pPrivate->Mixer_SGFeedback[j], pDelayLine,
&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
(LVM_INT16)NumSamples);
/* Sum into the AP delay line */
Mac3s_Sat_Float(&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
- -1.0f, /* Invert since the feedback coefficient is negative */
+ -1.0f, /* Invert since the feedback coefficient is negative */
&pPrivate->pDelay_T[j][pPrivate->Delay_AP[j] - NumSamples],
(LVM_INT16)NumSamples);
/* Apply smoothed feedforward sand save to fixed delay input (currently empty) */
@@ -283,9 +242,7 @@
&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
(LVM_INT16)NumSamples);
/* Sum into the AP output */
- Mac3s_Sat_Float(&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
- 1.0f,
- pDelayLine,
+ Mac3s_Sat_Float(&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples], 1.0f, pDelayLine,
(LVM_INT16)NumSamples);
/*
@@ -296,34 +253,27 @@
/*
* Low pass filter
*/
- FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->RevLPCoefs[j],
- pDelayLine,
- pDelayLine,
+ FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->RevLPCoefs[j], pDelayLine, pDelayLine,
(LVM_INT16)NumSamples);
}
/*
* Apply rotation matrix and delay samples
*/
- for(j = 0; j < NumberOfDelayLines; j++)
- {
-
- Copy_Float(pTemp,
- pDelayLineInput,
- (LVM_INT16)(NumSamples));
+ for (j = 0; j < NumberOfDelayLines; j++) {
+ Copy_Float(pTemp, pDelayLineInput, (LVM_INT16)(NumSamples));
/*
* Rotation matrix mix
*/
- switch(j)
- {
+ switch (j) {
case 3:
/*
* Add delay line 1 and 2 contribution
*/
- Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
- pDelayLineInput, (LVM_INT16)NumSamples);
- Mac3s_Sat_Float(pPrivate->pScratchDelayLine[2], -1.0f,
- pDelayLineInput, (LVM_INT16)NumSamples);
+ Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+ (LVM_INT16)NumSamples);
+ Mac3s_Sat_Float(pPrivate->pScratchDelayLine[2], -1.0f, pDelayLineInput,
+ (LVM_INT16)NumSamples);
break;
case 2:
@@ -331,61 +281,52 @@
/*
* Add delay line 0 and 3 contribution
*/
- Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f,
- pDelayLineInput, (LVM_INT16)NumSamples);
- Mac3s_Sat_Float(pPrivate->pScratchDelayLine[3], -1.0f,
- pDelayLineInput, (LVM_INT16)NumSamples);
+ Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pDelayLineInput,
+ (LVM_INT16)NumSamples);
+ Mac3s_Sat_Float(pPrivate->pScratchDelayLine[3], -1.0f, pDelayLineInput,
+ (LVM_INT16)NumSamples);
break;
case 1:
- if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
- {
+ if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
/*
* Add delay line 0 and 3 contribution
*/
- Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f,
- pDelayLineInput, (LVM_INT16)NumSamples);
+ Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pDelayLineInput,
+ (LVM_INT16)NumSamples);
Add2_Sat_Float(pPrivate->pScratchDelayLine[3], pDelayLineInput,
(LVM_INT16)NumSamples);
- }
- else
- {
+ } else {
/*
* Add delay line 0 and 1 contribution
*/
- Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f,
- pDelayLineInput, (LVM_INT16)NumSamples);
- Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
- pDelayLineInput, (LVM_INT16)NumSamples);
-
+ Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pDelayLineInput,
+ (LVM_INT16)NumSamples);
+ Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+ (LVM_INT16)NumSamples);
}
break;
case 0:
- if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
- {
+ if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
/*
* Add delay line 1 and 2 contribution
*/
- Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
- pDelayLineInput, (LVM_INT16)NumSamples);
+ Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+ (LVM_INT16)NumSamples);
Add2_Sat_Float(pPrivate->pScratchDelayLine[2], pDelayLineInput,
(LVM_INT16)NumSamples);
- }
- else if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2)
- {
+ } else if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2) {
/*
* Add delay line 0 and 1 contribution
*/
Add2_Sat_Float(pPrivate->pScratchDelayLine[0], pDelayLineInput,
(LVM_INT16)NumSamples);
- Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
- pDelayLineInput, (LVM_INT16)NumSamples);
+ Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+ (LVM_INT16)NumSamples);
- }
- else
- {
+ } else {
/*
* Add delay line 0 contribution
*/
@@ -402,54 +343,37 @@
/*
* Delay samples
*/
- Copy_Float(pDelayLineInput,
- &pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
- (LVM_INT16)(NumSamples)); /* 32-bit data */
+ Copy_Float(pDelayLineInput, &pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
+ (LVM_INT16)(NumSamples)); /* 32-bit data */
}
/*
* Create stereo output
*/
- switch(pPrivate->InstanceParams.NumDelays)
- {
+ switch (pPrivate->InstanceParams.NumDelays) {
case LVREV_DELAYLINES_4:
- Add2_Sat_Float(pPrivate->pScratchDelayLine[3],
- pPrivate->pScratchDelayLine[0],
- (LVM_INT16)NumSamples);
- Add2_Sat_Float(pPrivate->pScratchDelayLine[2],
- pPrivate->pScratchDelayLine[1],
- (LVM_INT16)NumSamples);
+ Add2_Sat_Float(pPrivate->pScratchDelayLine[3], pPrivate->pScratchDelayLine[0],
+ (LVM_INT16)NumSamples);
+ Add2_Sat_Float(pPrivate->pScratchDelayLine[2], pPrivate->pScratchDelayLine[1],
+ (LVM_INT16)NumSamples);
- JoinTo2i_Float(pPrivate->pScratchDelayLine[0],
- pPrivate->pScratchDelayLine[1],
- pTemp,
+ JoinTo2i_Float(pPrivate->pScratchDelayLine[0], pPrivate->pScratchDelayLine[1], pTemp,
(LVM_INT16)NumSamples);
break;
case LVREV_DELAYLINES_2:
- Copy_Float(pPrivate->pScratchDelayLine[1],
- pScratch,
- (LVM_INT16)(NumSamples));
+ Copy_Float(pPrivate->pScratchDelayLine[1], pScratch, (LVM_INT16)(NumSamples));
- Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0],
- -1.0f,
- pScratch,
- (LVM_INT16)NumSamples);
+ Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pScratch, (LVM_INT16)NumSamples);
- Add2_Sat_Float(pPrivate->pScratchDelayLine[1],
- pPrivate->pScratchDelayLine[0],
- (LVM_INT16)NumSamples);
+ Add2_Sat_Float(pPrivate->pScratchDelayLine[1], pPrivate->pScratchDelayLine[0],
+ (LVM_INT16)NumSamples);
- JoinTo2i_Float(pPrivate->pScratchDelayLine[0],
- pScratch,
- pTemp,
- (LVM_INT16)NumSamples);
+ JoinTo2i_Float(pPrivate->pScratchDelayLine[0], pScratch, pTemp, (LVM_INT16)NumSamples);
break;
case LVREV_DELAYLINES_1:
- MonoTo2I_Float(pPrivate->pScratchDelayLine[0],
- pTemp,
- (LVM_INT16)NumSamples);
+ MonoTo2I_Float(pPrivate->pScratchDelayLine[0], pTemp, (LVM_INT16)NumSamples);
break;
default:
break;
@@ -460,25 +384,14 @@
*/
size = (LVM_INT16)(NumSamples << 1);
- MixSoft_2St_D32C31_SAT(&pPrivate->BypassMixer,
- pTemp,
- pTemp,
- pOutput,
- size);
+ MixSoft_2St_D32C31_SAT(&pPrivate->BypassMixer, pTemp, pTemp, pOutput, size);
/* Apply Gain*/
- Shift_Sat_Float(LVREV_OUTPUTGAIN_SHIFT,
- pOutput,
- pOutput,
- size);
+ Shift_Sat_Float(LVREV_OUTPUTGAIN_SHIFT, pOutput, pOutput, size);
- MixSoft_1St_D32C31_WRA(&pPrivate->GainMixer,
- pOutput,
- pOutput,
- size);
+ MixSoft_1St_D32C31_WRA(&pPrivate->GainMixer, pOutput, pOutput, size);
return;
}
/* End of file */
-
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp
index 2a75559..e5a0bc8 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp
@@ -42,84 +42,67 @@
/* 1. This function may be interrupted by the LVREV_Process function */
/* */
/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t hInstance,
- LVREV_ControlParams_st *pNewParams)
-{
-
- LVREV_Instance_st *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t hInstance,
+ LVREV_ControlParams_st* pNewParams) {
+ LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
/*
* Check for error conditions
*/
- if((hInstance == LVM_NULL) || (pNewParams == LVM_NULL))
- {
+ if ((hInstance == LVM_NULL) || (pNewParams == LVM_NULL)) {
return LVREV_NULLADDRESS;
}
/*
* Check all new control parameters are in range
*/
- if( ((pNewParams->OperatingMode != LVM_MODE_OFF) && (pNewParams->OperatingMode != LVM_MODE_ON)) ||
- (
- (pNewParams->SampleRate != LVM_FS_8000) && (pNewParams->SampleRate != LVM_FS_11025) && (pNewParams->SampleRate != LVM_FS_12000) &&
- (pNewParams->SampleRate != LVM_FS_16000) && (pNewParams->SampleRate != LVM_FS_22050) && (pNewParams->SampleRate != LVM_FS_24000) &&
- (pNewParams->SampleRate != LVM_FS_32000) &&
- (pNewParams->SampleRate != LVM_FS_44100) &&
- (pNewParams->SampleRate != LVM_FS_48000)
- && (pNewParams->SampleRate != LVM_FS_88200) && (pNewParams->SampleRate != LVM_FS_96000)
- && (pNewParams->SampleRate != LVM_FS_176400) && (pNewParams->SampleRate != LVM_FS_192000)
- )
-#ifdef SUPPORT_MC
- || ((pNewParams->SourceFormat != LVM_STEREO) &&
- (pNewParams->SourceFormat != LVM_MONOINSTEREO) &&
- (pNewParams->SourceFormat != LVM_MONO) &&
- (pNewParams->SourceFormat != LVM_MULTICHANNEL)))
-#else
- || ((pNewParams->SourceFormat != LVM_STEREO) && (pNewParams->SourceFormat != LVM_MONOINSTEREO) && (pNewParams->SourceFormat != LVM_MONO)) )
-#endif
- {
+ if (((pNewParams->OperatingMode != LVM_MODE_OFF) &&
+ (pNewParams->OperatingMode != LVM_MODE_ON)) ||
+ ((pNewParams->SampleRate != LVM_FS_8000) && (pNewParams->SampleRate != LVM_FS_11025) &&
+ (pNewParams->SampleRate != LVM_FS_12000) && (pNewParams->SampleRate != LVM_FS_16000) &&
+ (pNewParams->SampleRate != LVM_FS_22050) && (pNewParams->SampleRate != LVM_FS_24000) &&
+ (pNewParams->SampleRate != LVM_FS_32000) && (pNewParams->SampleRate != LVM_FS_44100) &&
+ (pNewParams->SampleRate != LVM_FS_48000) && (pNewParams->SampleRate != LVM_FS_88200) &&
+ (pNewParams->SampleRate != LVM_FS_96000) && (pNewParams->SampleRate != LVM_FS_176400) &&
+ (pNewParams->SampleRate != LVM_FS_192000)) ||
+ ((pNewParams->SourceFormat != LVM_STEREO) &&
+ (pNewParams->SourceFormat != LVM_MONOINSTEREO) && (pNewParams->SourceFormat != LVM_MONO) &&
+ (pNewParams->SourceFormat != LVM_MULTICHANNEL))) {
return (LVREV_OUTOFRANGE);
}
- if (pNewParams->Level > LVREV_MAX_LEVEL)
- {
+ if (pNewParams->Level > LVREV_MAX_LEVEL) {
return LVREV_OUTOFRANGE;
}
- if ((pNewParams->LPF < LVREV_MIN_LPF_CORNER) || (pNewParams->LPF > LVREV_MAX_LPF_CORNER))
- {
+ if ((pNewParams->LPF < LVREV_MIN_LPF_CORNER) || (pNewParams->LPF > LVREV_MAX_LPF_CORNER)) {
return LVREV_OUTOFRANGE;
}
- if ((pNewParams->HPF < LVREV_MIN_HPF_CORNER) || (pNewParams->HPF > LVREV_MAX_HPF_CORNER))
- {
+ if ((pNewParams->HPF < LVREV_MIN_HPF_CORNER) || (pNewParams->HPF > LVREV_MAX_HPF_CORNER)) {
return LVREV_OUTOFRANGE;
}
- if (pNewParams->T60 > LVREV_MAX_T60)
- {
+ if (pNewParams->T60 > LVREV_MAX_T60) {
return LVREV_OUTOFRANGE;
}
- if (pNewParams->Density > LVREV_MAX_DENSITY)
- {
+ if (pNewParams->Density > LVREV_MAX_DENSITY) {
return LVREV_OUTOFRANGE;
}
- if (pNewParams->Damping > LVREV_MAX_DAMPING)
- {
+ if (pNewParams->Damping > LVREV_MAX_DAMPING) {
return LVREV_OUTOFRANGE;
}
- if (pNewParams->RoomSize > LVREV_MAX_ROOMSIZE)
- {
+ if (pNewParams->RoomSize > LVREV_MAX_ROOMSIZE) {
return LVREV_OUTOFRANGE;
}
/*
* Copy the new parameters and set the flag to indicate they are available
*/
- pLVREV_Private->NewParams = *pNewParams;
+ pLVREV_Private->NewParams = *pNewParams;
pLVREV_Private->bControlPending = LVM_TRUE;
return LVREV_SUCCESS;
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp
index 5cd623e..35a6522 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp
@@ -30,25 +30,11 @@
/****************************************************************************************/
/* Table with supported sampling rates. The table can be indexed using LVM_Fs_en */
-const LVM_UINT32 LVM_FsTable[] = {
- 8000 ,
- 11025,
- 12000,
- 16000,
- 22050,
- 24000,
- 32000,
- 44100,
- 48000,
- 88200,
- 96000,
- 176400,
- 192000
-};
+const LVM_UINT32 LVM_FsTable[] = {8000, 11025, 12000, 16000, 22050, 24000, 32000,
+ 44100, 48000, 88200, 96000, 176400, 192000};
/* Table with supported sampling rates. The table can be indexed using LVM_Fs_en */
-LVM_UINT32 LVM_GetFsFromTable(LVM_Fs_en FsIndex){
- if (FsIndex > LVM_FS_192000)
- return 0;
+LVM_UINT32 LVM_GetFsFromTable(LVM_Fs_en FsIndex) {
+ if (FsIndex > LVM_FS_192000) return 0;
return (LVM_FsTable[FsIndex]);
}
@@ -73,30 +59,174 @@
*/
/* Normalizing output including Reverb Level part (only shift up)*/
-const LVM_FLOAT LVREV_GainPolyTable[24][5]={{1,1.045909f,7.681098f,-7.211500f,3.025605f,},
- {2,1.088194f,10.291749f,-11.513787f,5.265817f,},
- {3,0.988919f,8.299956f,-8.920862f,3.979806f,},
- {4,1.035927f,10.182567f,-10.346134f,4.546533f,},
- {5,1.130313f,12.538727f,-13.627023f,6.165208f,},
- {6,1.060743f,8.091713f,-8.588079f,3.834230f,},
- {7,1.040381f,10.406566f,-11.176650f,5.075132f,},
- {8,1.026944f,8.387302f,-8.689796f,3.895863f,},
- {9,1.013312f,9.727236f,-10.534165f,4.742272f,},
- {10,0.996095f,8.492249f,-7.947677f,3.478917f,},
- {13,1.079346f,8.894425f,-9.641768f,4.434442f,},
- {15,0.994327f,7.441335f,-8.003979f,3.581177f,},
- {17,0.991067f,7.208373f,-7.257859f,3.167774f,},
- {20,1.033445f,7.476371f,-7.546960f,3.369703f,},
- {25,0.982830f,5.913867f,-5.638448f,2.420932f,},
- {30,0.928782f,5.035343f,-4.492104f,1.844904f,},
- {40,0.953714f,5.060232f,-4.472204f,1.829642f,},
- {50,0.899258f,4.273357f,-3.537492f,1.387576f,},
- {60,0.943584f,4.093228f,-3.469658f,1.410911f,},
- {70,0.926021f,3.973125f,-3.331985f,1.344690f,},
- {75,0.894853f,2.871747f,-1.438758f,0.311856f,},
- {80,0.935122f,2.991857f,-2.038882f,0.686395f,},
- {90,0.953872f,2.880315f,-2.122365f,0.784032f,},
- {100,0.951005f,2.894294f,-2.009086f,0.698316f,},
+const LVM_FLOAT LVREV_GainPolyTable[24][5] = {
+ {
+ 1,
+ 1.045909f,
+ 7.681098f,
+ -7.211500f,
+ 3.025605f,
+ },
+ {
+ 2,
+ 1.088194f,
+ 10.291749f,
+ -11.513787f,
+ 5.265817f,
+ },
+ {
+ 3,
+ 0.988919f,
+ 8.299956f,
+ -8.920862f,
+ 3.979806f,
+ },
+ {
+ 4,
+ 1.035927f,
+ 10.182567f,
+ -10.346134f,
+ 4.546533f,
+ },
+ {
+ 5,
+ 1.130313f,
+ 12.538727f,
+ -13.627023f,
+ 6.165208f,
+ },
+ {
+ 6,
+ 1.060743f,
+ 8.091713f,
+ -8.588079f,
+ 3.834230f,
+ },
+ {
+ 7,
+ 1.040381f,
+ 10.406566f,
+ -11.176650f,
+ 5.075132f,
+ },
+ {
+ 8,
+ 1.026944f,
+ 8.387302f,
+ -8.689796f,
+ 3.895863f,
+ },
+ {
+ 9,
+ 1.013312f,
+ 9.727236f,
+ -10.534165f,
+ 4.742272f,
+ },
+ {
+ 10,
+ 0.996095f,
+ 8.492249f,
+ -7.947677f,
+ 3.478917f,
+ },
+ {
+ 13,
+ 1.079346f,
+ 8.894425f,
+ -9.641768f,
+ 4.434442f,
+ },
+ {
+ 15,
+ 0.994327f,
+ 7.441335f,
+ -8.003979f,
+ 3.581177f,
+ },
+ {
+ 17,
+ 0.991067f,
+ 7.208373f,
+ -7.257859f,
+ 3.167774f,
+ },
+ {
+ 20,
+ 1.033445f,
+ 7.476371f,
+ -7.546960f,
+ 3.369703f,
+ },
+ {
+ 25,
+ 0.982830f,
+ 5.913867f,
+ -5.638448f,
+ 2.420932f,
+ },
+ {
+ 30,
+ 0.928782f,
+ 5.035343f,
+ -4.492104f,
+ 1.844904f,
+ },
+ {
+ 40,
+ 0.953714f,
+ 5.060232f,
+ -4.472204f,
+ 1.829642f,
+ },
+ {
+ 50,
+ 0.899258f,
+ 4.273357f,
+ -3.537492f,
+ 1.387576f,
+ },
+ {
+ 60,
+ 0.943584f,
+ 4.093228f,
+ -3.469658f,
+ 1.410911f,
+ },
+ {
+ 70,
+ 0.926021f,
+ 3.973125f,
+ -3.331985f,
+ 1.344690f,
+ },
+ {
+ 75,
+ 0.894853f,
+ 2.871747f,
+ -1.438758f,
+ 0.311856f,
+ },
+ {
+ 80,
+ 0.935122f,
+ 2.991857f,
+ -2.038882f,
+ 0.686395f,
+ },
+ {
+ 90,
+ 0.953872f,
+ 2.880315f,
+ -2.122365f,
+ 0.784032f,
+ },
+ {
+ 100,
+ 0.951005f,
+ 2.894294f,
+ -2.009086f,
+ 0.698316f,
+ },
};
/* End of file */
-
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h
index e100d8a..4b0dcca 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h
@@ -31,11 +31,11 @@
/* */
/****************************************************************************************/
-extern const LVM_UINT32 LVM_FsTable[];
-extern LVM_UINT32 LVM_GetFsFromTable(LVM_Fs_en FsIndex);
+extern const LVM_UINT32 LVM_FsTable[];
+extern LVM_UINT32 LVM_GetFsFromTable(LVM_Fs_en FsIndex);
-extern const LVM_FLOAT LVREV_GainPolyTable[24][5];
+extern const LVM_FLOAT LVREV_GainPolyTable[24][5];
-#endif /** _LVREV_TABLES_H_ **/
+#endif /** _LVREV_TABLES_H_ **/
/* End of file */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h
index c9fa7ad..85e3ab9 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h
@@ -22,255 +22,191 @@
/****************************************************************************************/
/* */
-/* CONSTANTS DEFINITIONS */
-/* */
-/****************************************************************************************/
-
-/* Memory table*/
-#define LVPSA_NR_MEMORY_REGIONS 4 /* Number of memory regions */
-
-/****************************************************************************************/
-/* */
/* TYPES DEFINITIONS */
/* */
/****************************************************************************************/
-/* Memory Types */
-typedef enum
-{
- LVPSA_PERSISTENT = LVM_PERSISTENT,
- LVPSA_PERSISTENT_DATA = LVM_PERSISTENT_DATA,
- LVPSA_PERSISTENT_COEF = LVM_PERSISTENT_COEF,
- LVPSA_SCRATCH = LVM_SCRATCH,
- LVPSA_MEMORY_DUMMY = LVM_MAXINT_32 /* Force 32 bits enum, don't use it! */
-} LVPSA_MemoryTypes_en;
-
/* Level detection speed control parameters */
-typedef enum
-{
- LVPSA_SPEED_LOW, /* Low speed level detection */
- LVPSA_SPEED_MEDIUM, /* Medium speed level detection */
- LVPSA_SPEED_HIGH, /* High speed level detection */
- LVPSA_SPEED_DUMMY = LVM_MAXINT_32 /* Force 32 bits enum, don't use it! */
+typedef enum {
+ LVPSA_SPEED_LOW, /* Low speed level detection */
+ LVPSA_SPEED_MEDIUM, /* Medium speed level detection */
+ LVPSA_SPEED_HIGH, /* High speed level detection */
+ LVPSA_SPEED_DUMMY = LVM_MAXINT_32 /* Force 32 bits enum, don't use it! */
} LVPSA_LevelDetectSpeed_en;
/* Filter control parameters */
-typedef struct
-{
- LVM_UINT16 CenterFrequency; /* Center frequency of the band-pass filter (in Hz) */
- LVM_UINT16 QFactor; /* Quality factor of the filter (in 1/100) */
- LVM_INT16 PostGain; /* Postgain to apply after the filtering (in dB Q16.0) */
+typedef struct {
+ LVM_UINT16 CenterFrequency; /* Center frequency of the band-pass filter (in Hz) */
+ LVM_UINT16 QFactor; /* Quality factor of the filter (in 1/100) */
+ LVM_INT16 PostGain; /* Postgain to apply after the filtering (in dB Q16.0) */
} LVPSA_FilterParam_t;
/* LVPSA initialization parameters */
-typedef struct
-{
- LVM_UINT16 SpectralDataBufferDuration; /* Spectral data buffer duration in time (ms in Q16.0) */
- LVM_UINT16 MaxInputBlockSize; /* Maximum expected input block size (in samples) */
- LVM_UINT16 nBands; /* Number of bands of the SA */
- LVPSA_FilterParam_t *pFiltersParams; /* Points to nBands filter param structures for filters settings */
+typedef struct {
+ LVM_UINT16
+ SpectralDataBufferDuration; /* Spectral data buffer duration in time (ms in Q16.0) */
+ LVM_UINT16 MaxInputBlockSize; /* Maximum expected input block size (in samples) */
+ LVM_UINT16 nBands; /* Number of bands of the SA */
+ LVPSA_FilterParam_t*
+ pFiltersParams; /* Points to nBands filter param structures for filters settings */
} LVPSA_InitParams_t, *pLVPSA_InitParams_t;
/* LVPSA control parameters */
-typedef struct
-{
- LVM_Fs_en Fs; /* Input sampling rate */
- LVPSA_LevelDetectSpeed_en LevelDetectionSpeed; /* Level detection speed */
+typedef struct {
+ LVM_Fs_en Fs; /* Input sampling rate */
+ LVPSA_LevelDetectSpeed_en LevelDetectionSpeed; /* Level detection speed */
} LVPSA_ControlParams_t, *pLVPSA_ControlParams_t;
-/* Memory region definition */
-typedef struct
-{
- LVM_UINT32 Size; /* Region size in bytes */
- LVPSA_MemoryTypes_en Type; /* Region type */
- void *pBaseAddress; /* Pointer to the region base address */
-} LVPSA_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
- LVPSA_MemoryRegion_t Region[LVPSA_NR_MEMORY_REGIONS];/* One definition for each region */
-} LVPSA_MemTab_t;
-
/* Audio time type */
typedef LVM_INT32 LVPSA_Time;
/* Module instance Handle */
-typedef void *pLVPSA_Handle_t;
+typedef void* pLVPSA_Handle_t;
/* LVPSA return codes */
-typedef enum
-{
- LVPSA_OK, /* The function ran without any problem */
- LVPSA_ERROR_INVALIDPARAM, /* A parameter is incorrect */
- LVPSA_ERROR_WRONGTIME, /* An incorrect AudioTime is used */
- LVPSA_ERROR_NULLADDRESS, /* A pointer has a NULL value */
- LVPSA_RETURN_DUMMY = LVM_MAXINT_32 /* Force 32 bits enum, don't use it! */
+typedef enum {
+ LVPSA_OK, /* The function ran without any problem */
+ LVPSA_ERROR_INVALIDPARAM, /* A parameter is incorrect */
+ LVPSA_ERROR_WRONGTIME, /* An incorrect AudioTime is used */
+ LVPSA_ERROR_NULLADDRESS, /* A pointer has a NULL value */
+ LVPSA_RETURN_DUMMY = LVM_MAXINT_32 /* Force 32 bits enum, don't use it! */
} LVPSA_RETURN;
/*********************************************************************************************************************************
FUNCTIONS PROTOTYPE
**********************************************************************************************************************************/
-/*********************************************************************************************************************************/
-/* */
-/* FUNCTION: LVPSA_Memory */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. It can be called in */
-/* two ways: */
-/* */
-/* hInstance = NULL Returns the memory requirements */
-/* hInstance = Instance handle Returns the memory requirements and */
-/* allocated base addresses for the instance */
-/* */
-/* When this function is called for memory allocation (hInstance=NULL) the memory */
-/* base address pointers are NULL on return. */
-/* */
-/* When the function is called for free (hInstance = Instance Handle) the memory */
-/* table returns the allocated memory and base addresses used during initialisation. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pMemoryTable Pointer to an empty memory definition table */
-/* pInitParams Pointer to the instance init parameters */
-/* */
-/* RETURNS: */
-/* LVPSA_OK Succeeds */
-/* otherwise Error due to bad parameters */
-/* */
-/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Memory ( pLVPSA_Handle_t hInstance,
- LVPSA_MemTab_t *pMemoryTable,
- LVPSA_InitParams_t *pInitParams );
+/************************************************************************************/
+/* */
+/* FUNCTION: LVPSA_Init */
+/* */
+/* DESCRIPTION: */
+/* Create and Initialize the LVPSA module including instance handle */
+/* */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to the instance handle */
+/* InitParams Init parameters structure */
+/* ControlParams Control parameters structure */
+/* pScratch Pointer to bundle scratch memory area */
+/* */
+/* */
+/* RETURNS: */
+/* LVPSA_OK Succeeds */
+/* otherwise Error due to bad parameters */
+/* */
+/************************************************************************************/
+LVPSA_RETURN LVPSA_Init(pLVPSA_Handle_t* phInstance, LVPSA_InitParams_t* pInitParams,
+ LVPSA_ControlParams_t* pControlParams, void* pScratch);
+
+/************************************************************************************/
+/* */
+/* FUNCTION: LVPSA_DeInit */
+/* */
+/* DESCRIPTION: */
+/* Free the memories created in LVPSA_Init call including instance handle */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to the instance handle */
+/* */
+/************************************************************************************/
+void LVPSA_DeInit(pLVPSA_Handle_t* phInstance);
/*********************************************************************************************************************************/
/* */
-/* FUNCTION: LVPSA_Init */
+/* FUNCTION: LVPSA_Control */
/* */
-/* DESCRIPTION: */
-/* Initializes the LVPSA module. */
+/* DESCRIPTION: */
+/* Controls the LVPSA module. */
/* */
+/* PARAMETERS: */
+/* hInstance Instance Handle */
+/* pNewParams Pointer to the instance new control parameters */
/* */
-/* PARAMETERS: */
-/* phInstance Pointer to the instance Handle */
-/* pInitParams Pointer to the instance init parameters */
-/* pControlParams Pointer to the instance control parameters */
-/* pMemoryTable Pointer to the memory definition table */
-/* */
-/* */
-/* RETURNS: */
-/* LVPSA_OK Succeeds */
-/* otherwise Error due to bad parameters */
+/* RETURNS: */
+/* LVPSA_OK Succeeds */
+/* otherwise Error due to bad parameters */
/* */
/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Init ( pLVPSA_Handle_t *phInstance,
- LVPSA_InitParams_t *pInitParams,
- LVPSA_ControlParams_t *pControlParams,
- LVPSA_MemTab_t *pMemoryTable );
+LVPSA_RETURN LVPSA_Control(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pNewParams);
/*********************************************************************************************************************************/
/* */
-/* FUNCTION: LVPSA_Control */
+/* FUNCTION: LVPSA_Process */
/* */
-/* DESCRIPTION: */
-/* Controls the LVPSA module. */
+/* DESCRIPTION: */
+/* The process calculates the levels of the frequency bands. */
/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pNewParams Pointer to the instance new control parameters */
+/* PARAMETERS: */
+/* hInstance Instance Handle */
+/* pLVPSA_InputSamples Pointer to the input samples buffer */
+/* InputBlockSize Number of mono samples to process */
+/* AudioTime Playback time of the first input sample */
/* */
-/* RETURNS: */
-/* LVPSA_OK Succeeds */
-/* otherwise Error due to bad parameters */
+/* */
+/* RETURNS: */
+/* LVPSA_OK Succeeds */
+/* otherwise Error due to bad parameters */
/* */
/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Control ( pLVPSA_Handle_t hInstance,
- LVPSA_ControlParams_t *pNewParams );
+LVPSA_RETURN LVPSA_Process(pLVPSA_Handle_t hInstance, LVM_FLOAT* pLVPSA_InputSamples,
+ LVM_UINT16 InputBlockSize, LVPSA_Time AudioTime);
+/*********************************************************************************************************************************/
+/* */
+/* FUNCTION: LVPSA_GetSpectrum */
+/* */
+/* DESCRIPTION: */
+/* This function is used for memory allocation and free. */
+/* */
+/* */
+/* PARAMETERS: */
+/* hInstance Instance Handle */
+/* GetSpectrumAudioTime Time to retrieve the values at */
+/* pCurrentValues Pointer to an empty buffer : Current level values output */
+/* pPeakValues Pointer to an empty buffer : Peak level values output */
+/* */
+/* */
+/* RETURNS: */
+/* LVPSA_OK Succeeds */
+/* otherwise Error due to bad parameters */
+/* */
+/*********************************************************************************************************************************/
+LVPSA_RETURN LVPSA_GetSpectrum(pLVPSA_Handle_t hInstance, LVPSA_Time GetSpectrumAudioTime,
+ LVM_UINT8* pCurrentValues, LVM_UINT8* pPeakValues);
/*********************************************************************************************************************************/
/* */
-/* FUNCTION: LVPSA_Process */
+/* FUNCTION: LVPSA_GetControlParams */
/* */
-/* DESCRIPTION: */
-/* The process calculates the levels of the frequency bands. */
+/* DESCRIPTION: */
+/* Get the current control parameters of the LVPSA module. */
/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pLVPSA_InputSamples Pointer to the input samples buffer */
-/* InputBlockSize Number of mono samples to process */
-/* AudioTime Playback time of the first input sample */
-/* */
-/* */
-/* RETURNS: */
-/* LVPSA_OK Succeeds */
-/* otherwise Error due to bad parameters */
+/* PARAMETERS: */
+/* hInstance Instance Handle */
+/* pParams Pointer to an empty control parameters structure */
+/* RETURNS: */
+/* LVPSA_OK Succeeds */
+/* otherwise Error due to bad parameters */
/* */
/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Process ( pLVPSA_Handle_t hInstance,
- LVM_FLOAT *pLVPSA_InputSamples,
- LVM_UINT16 InputBlockSize,
- LVPSA_Time AudioTime );
-/*********************************************************************************************************************************/
-/* */
-/* FUNCTION: LVPSA_GetSpectrum */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. */
-/* */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* GetSpectrumAudioTime Time to retrieve the values at */
-/* pCurrentValues Pointer to an empty buffer : Current level values output */
-/* pPeakValues Pointer to an empty buffer : Peak level values output */
-/* */
-/* */
-/* RETURNS: */
-/* LVPSA_OK Succeeds */
-/* otherwise Error due to bad parameters */
-/* */
-/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_GetSpectrum ( pLVPSA_Handle_t hInstance,
- LVPSA_Time GetSpectrumAudioTime,
- LVM_UINT8 *pCurrentValues,
- LVM_UINT8 *pPeakValues );
+LVPSA_RETURN LVPSA_GetControlParams(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pParams);
/*********************************************************************************************************************************/
/* */
-/* FUNCTION: LVPSA_GetControlParams */
+/* FUNCTION: LVPSA_GetInitParams */
/* */
-/* DESCRIPTION: */
-/* Get the current control parameters of the LVPSA module. */
+/* DESCRIPTION: */
+/* Get the initialization parameters of the LVPSA module. */
/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pParams Pointer to an empty control parameters structure */
-/* RETURNS: */
-/* LVPSA_OK Succeeds */
-/* otherwise Error due to bad parameters */
+/* PARAMETERS: */
+/* hInstance Instance Handle */
+/* pParams Pointer to an empty init parameters structure */
+/* RETURNS: */
+/* LVPSA_OK Succeeds */
+/* otherwise Error due to bad parameters */
/* */
/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_GetControlParams ( pLVPSA_Handle_t hInstance,
- LVPSA_ControlParams_t *pParams );
-
-/*********************************************************************************************************************************/
-/* */
-/* FUNCTION: LVPSA_GetInitParams */
-/* */
-/* DESCRIPTION: */
-/* Get the initialization parameters of the LVPSA module. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pParams Pointer to an empty init parameters structure */
-/* RETURNS: */
-/* LVPSA_OK Succeeds */
-/* otherwise Error due to bad parameters */
-/* */
-/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_GetInitParams ( pLVPSA_Handle_t hInstance,
- LVPSA_InitParams_t *pParams );
+LVPSA_RETURN LVPSA_GetInitParams(pLVPSA_Handle_t hInstance, LVPSA_InitParams_t* pParams);
#endif /* _LVPSA_H */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
index deafaa7..4e90a42 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
@@ -15,30 +15,25 @@
* limitations under the License.
*/
-#include "LVPSA.h"
-#include "LVPSA_Private.h"
-#include "VectorArithmetic.h"
+#include "LVPSA.h"
+#include "LVPSA_Private.h"
+#include "VectorArithmetic.h"
-#define LOW_FREQ 298 /* 32768/110 for low test frequency */
-#define HIGH_FREQ 386 /* 32768/85 for high test frequency */
+#define LOW_FREQ 298 /* 32768/110 for low test frequency */
+#define HIGH_FREQ 386 /* 32768/85 for high test frequency */
-LVPSA_RETURN LVPSA_SetBPFiltersType ( LVPSA_InstancePr_t *pInst,
- LVPSA_ControlParams_t *pParams );
+LVPSA_RETURN LVPSA_SetBPFiltersType(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams);
-LVPSA_RETURN LVPSA_SetQPFCoefficients( LVPSA_InstancePr_t *pInst,
- LVPSA_ControlParams_t *pParams );
+LVPSA_RETURN LVPSA_SetQPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams);
-LVPSA_RETURN LVPSA_BPSinglePrecCoefs( LVM_UINT16 Fs,
- LVPSA_FilterParam_t *pFilterParams,
- BP_FLOAT_Coefs_t *pCoefficients);
+LVPSA_RETURN LVPSA_BPSinglePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+ BP_FLOAT_Coefs_t* pCoefficients);
-LVPSA_RETURN LVPSA_BPDoublePrecCoefs( LVM_UINT16 Fs,
- LVPSA_FilterParam_t *pFilterParams,
- BP_FLOAT_Coefs_t *pCoefficients);
-LVPSA_RETURN LVPSA_SetBPFCoefficients( LVPSA_InstancePr_t *pInst,
- LVPSA_ControlParams_t *pParams );
+LVPSA_RETURN LVPSA_BPDoublePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+ BP_FLOAT_Coefs_t* pCoefficients);
+LVPSA_RETURN LVPSA_SetBPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams);
-LVPSA_RETURN LVPSA_ClearFilterHistory( LVPSA_InstancePr_t *pInst);
+LVPSA_RETURN LVPSA_ClearFilterHistory(LVPSA_InstancePr_t* pInst);
/************************************************************************************/
/* */
@@ -56,29 +51,23 @@
/* otherwise Error due to bad parameters */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_Control ( pLVPSA_Handle_t hInstance,
- LVPSA_ControlParams_t *pNewParams )
-{
+LVPSA_RETURN LVPSA_Control(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pNewParams) {
+ LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
- LVPSA_InstancePr_t *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
-
- if((hInstance == LVM_NULL) || (pNewParams == LVM_NULL))
- {
- return(LVPSA_ERROR_NULLADDRESS);
+ if ((hInstance == LVM_NULL) || (pNewParams == LVM_NULL)) {
+ return (LVPSA_ERROR_NULLADDRESS);
}
- if(pNewParams->Fs >= LVPSA_NR_SUPPORTED_RATE)
- {
- return(LVPSA_ERROR_INVALIDPARAM);
+ if (pNewParams->Fs >= LVPSA_NR_SUPPORTED_RATE) {
+ return (LVPSA_ERROR_INVALIDPARAM);
}
- if(pNewParams->LevelDetectionSpeed >= LVPSA_NR_SUPPORTED_SPEED)
- {
- return(LVPSA_ERROR_INVALIDPARAM);
+ if (pNewParams->LevelDetectionSpeed >= LVPSA_NR_SUPPORTED_SPEED) {
+ return (LVPSA_ERROR_INVALIDPARAM);
}
pLVPSA_Inst->NewParams = *pNewParams;
pLVPSA_Inst->bControlPending = LVM_TRUE;
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
/************************************************************************************/
@@ -96,20 +85,17 @@
/* otherwise Error due to bad parameters */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_GetControlParams ( pLVPSA_Handle_t hInstance,
- LVPSA_ControlParams_t *pParams )
-{
- LVPSA_InstancePr_t *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+LVPSA_RETURN LVPSA_GetControlParams(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pParams) {
+ LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
- if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
- {
- return(LVPSA_ERROR_NULLADDRESS);
+ if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
+ return (LVPSA_ERROR_NULLADDRESS);
}
- pParams->Fs = pLVPSA_Inst->CurrentParams.Fs;
- pParams->LevelDetectionSpeed = pLVPSA_Inst->CurrentParams.LevelDetectionSpeed;
+ pParams->Fs = pLVPSA_Inst->CurrentParams.Fs;
+ pParams->LevelDetectionSpeed = pLVPSA_Inst->CurrentParams.LevelDetectionSpeed;
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
/************************************************************************************/
@@ -127,22 +113,19 @@
/* otherwise Error due to bad parameters */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_GetInitParams ( pLVPSA_Handle_t hInstance,
- LVPSA_InitParams_t *pParams )
-{
- LVPSA_InstancePr_t *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+LVPSA_RETURN LVPSA_GetInitParams(pLVPSA_Handle_t hInstance, LVPSA_InitParams_t* pParams) {
+ LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
- if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
- {
- return(LVPSA_ERROR_NULLADDRESS);
+ if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
+ return (LVPSA_ERROR_NULLADDRESS);
}
- pParams->SpectralDataBufferDuration = pLVPSA_Inst->SpectralDataBufferDuration;
- pParams->MaxInputBlockSize = pLVPSA_Inst->MaxInputBlockSize;
- pParams->nBands = pLVPSA_Inst->nBands;
- pParams->pFiltersParams = pLVPSA_Inst->pFiltersParams;
+ pParams->SpectralDataBufferDuration = pLVPSA_Inst->SpectralDataBufferDuration;
+ pParams->MaxInputBlockSize = pLVPSA_Inst->MaxInputBlockSize;
+ pParams->nBands = pLVPSA_Inst->nBands;
+ pParams->pFiltersParams = pLVPSA_Inst->pFiltersParams;
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
/************************************************************************************/
@@ -163,42 +146,38 @@
/* NOTES: */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_ApplyNewSettings (LVPSA_InstancePr_t *pInst)
-{
+LVPSA_RETURN LVPSA_ApplyNewSettings(LVPSA_InstancePr_t* pInst) {
LVM_UINT16 ii;
LVM_UINT16 Freq;
- LVPSA_ControlParams_t Params;
- extern LVM_INT16 LVPSA_nSamplesBufferUpdate[];
- extern LVM_UINT32 LVPSA_SampleRateTab[];
- extern LVM_UINT16 LVPSA_DownSamplingFactor[];
+ LVPSA_ControlParams_t Params;
+ extern LVM_INT16 LVPSA_nSamplesBufferUpdate[];
+ extern LVM_UINT32 LVPSA_SampleRateTab[];
+ extern LVM_UINT16 LVPSA_DownSamplingFactor[];
- if(pInst == 0)
- {
- return(LVPSA_ERROR_NULLADDRESS);
+ if (pInst == 0) {
+ return (LVPSA_ERROR_NULLADDRESS);
}
Params = pInst->NewParams;
/* Modifies filters types and coefficients, clear the taps and
re-initializes parameters if sample frequency has changed */
- if(Params.Fs != pInst->CurrentParams.Fs)
- {
+ if (Params.Fs != pInst->CurrentParams.Fs) {
pInst->CurrentParams.Fs = Params.Fs;
/* Initialize the center freqeuncies as a function of the sample rate */
- Freq = (LVM_UINT16) ((LVPSA_SampleRateTab[pInst->CurrentParams.Fs]>>1) / (pInst->nBands + 1));
- for(ii = pInst->nBands; ii > 0; ii--)
- {
- pInst->pFiltersParams[ii-1].CenterFrequency = (LVM_UINT16) (Freq * ii);
+ Freq = (LVM_UINT16)((LVPSA_SampleRateTab[pInst->CurrentParams.Fs] >> 1) /
+ (pInst->nBands + 1));
+ for (ii = pInst->nBands; ii > 0; ii--) {
+ pInst->pFiltersParams[ii - 1].CenterFrequency = (LVM_UINT16)(Freq * ii);
}
/* Count the number of relevant filters. If the center frequency of the filter is
bigger than the nyquist frequency, then the filter is not relevant and doesn't
need to be used */
- for(ii = pInst->nBands; ii > 0; ii--)
- {
- if(pInst->pFiltersParams[ii-1].CenterFrequency < (LVPSA_SampleRateTab[pInst->CurrentParams.Fs]>>1))
- {
+ for (ii = pInst->nBands; ii > 0; ii--) {
+ if (pInst->pFiltersParams[ii - 1].CenterFrequency <
+ (LVPSA_SampleRateTab[pInst->CurrentParams.Fs] >> 1)) {
pInst->nRelevantFilters = ii;
break;
}
@@ -211,19 +190,14 @@
pInst->BufferUpdateSamplesCount = 0;
pInst->DownSamplingFactor = LVPSA_DownSamplingFactor[Params.Fs];
pInst->DownSamplingCount = 0;
- for(ii = 0; ii < (pInst->nBands * pInst->SpectralDataBufferLength); ii++)
- {
+ for (ii = 0; ii < (pInst->nBands * pInst->SpectralDataBufferLength); ii++) {
pInst->pSpectralDataBufferStart[ii] = 0;
}
- for(ii = 0; ii < pInst->nBands; ii++)
- {
+ for (ii = 0; ii < pInst->nBands; ii++) {
pInst->pPreviousPeaks[ii] = 0;
}
- }
- else
- {
- if(Params.LevelDetectionSpeed != pInst->CurrentParams.LevelDetectionSpeed)
- {
+ } else {
+ if (Params.LevelDetectionSpeed != pInst->CurrentParams.LevelDetectionSpeed) {
LVPSA_SetQPFCoefficients(pInst, &Params);
}
}
@@ -253,47 +227,43 @@
/* Single precision otherwise */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_SetBPFiltersType ( LVPSA_InstancePr_t *pInst,
- LVPSA_ControlParams_t *pParams )
-{
- extern LVM_UINT32 LVPSA_SampleRateTab[]; /* Sample rate table */
- LVM_UINT16 ii; /* Filter band index */
- LVM_UINT32 fs = (LVM_UINT32)LVPSA_SampleRateTab[(LVM_UINT16)pParams->Fs]; /* Sample rate */
- LVM_UINT32 fc; /* Filter centre frequency */
- LVM_INT16 QFactor; /* Filter Q factor */
+LVPSA_RETURN LVPSA_SetBPFiltersType(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams) {
+ extern LVM_UINT32 LVPSA_SampleRateTab[]; /* Sample rate table */
+ LVM_UINT16 ii; /* Filter band index */
+ LVM_UINT32 fs = (LVM_UINT32)LVPSA_SampleRateTab[(LVM_UINT16)pParams->Fs]; /* Sample rate */
+ LVM_UINT32 fc; /* Filter centre frequency */
+ LVM_INT16 QFactor; /* Filter Q factor */
- for (ii = 0; ii < pInst->nRelevantFilters; ii++)
- {
+ for (ii = 0; ii < pInst->nRelevantFilters; ii++) {
/*
* Get the filter settings
*/
- fc = (LVM_UINT32)pInst->pFiltersParams[ii].CenterFrequency; /* Get the band centre frequency */
- QFactor =(LVM_INT16) pInst->pFiltersParams[ii].QFactor; /* Get the band Q factor */
+ fc = (LVM_UINT32)pInst->pFiltersParams[ii]
+ .CenterFrequency; /* Get the band centre frequency */
+ QFactor = (LVM_INT16)pInst->pFiltersParams[ii].QFactor; /* Get the band Q factor */
/*
* For each filter set the type of biquad required
*/
- pInst->pBPFiltersPrecision[ii] = LVPSA_SimplePrecisionFilter; /* Default to single precision */
- if ((LOW_FREQ * fs) >= (fc << 15))
- {
+ pInst->pBPFiltersPrecision[ii] =
+ LVPSA_SimplePrecisionFilter; /* Default to single precision */
+ if ((LOW_FREQ * fs) >= (fc << 15)) {
/*
* fc <= fs/110
*/
pInst->pBPFiltersPrecision[ii] = LVPSA_DoublePrecisionFilter;
- }
- else
- {
- if (((LOW_FREQ * fs) < (fc << 15)) && ((fc << 15) < (HIGH_FREQ * fs)) && (QFactor > 300))
- {
+ } else {
+ if (((LOW_FREQ * fs) < (fc << 15)) && ((fc << 15) < (HIGH_FREQ * fs)) &&
+ (QFactor > 300)) {
/*
- * (fs/110 < fc < fs/85) & (Q>3)
- */
+ * (fs/110 < fc < fs/85) & (Q>3)
+ */
pInst->pBPFiltersPrecision[ii] = LVPSA_DoublePrecisionFilter;
}
}
}
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
/************************************************************************************/
@@ -314,60 +284,49 @@
/* NOTES: */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_SetBPFCoefficients( LVPSA_InstancePr_t *pInst,
- LVPSA_ControlParams_t *pParams)
-{
-
- LVM_UINT16 ii;
+LVPSA_RETURN LVPSA_SetBPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams) {
+ LVM_UINT16 ii;
/*
* Set the coefficients for each band by the init function
*/
- for (ii = 0; ii < pInst->nRelevantFilters; ii++)
- {
- switch (pInst->pBPFiltersPrecision[ii])
- {
- case LVPSA_DoublePrecisionFilter:
- {
- BP_FLOAT_Coefs_t Coefficients;
+ for (ii = 0; ii < pInst->nRelevantFilters; ii++) {
+ switch (pInst->pBPFiltersPrecision[ii]) {
+ case LVPSA_DoublePrecisionFilter: {
+ BP_FLOAT_Coefs_t Coefficients;
/*
* Calculate the double precision coefficients
*/
- LVPSA_BPDoublePrecCoefs((LVM_UINT16)pParams->Fs,
- &pInst->pFiltersParams[ii],
+ LVPSA_BPDoublePrecCoefs((LVM_UINT16)pParams->Fs, &pInst->pFiltersParams[ii],
&Coefficients);
/*
* Set the coefficients
*/
- BP_1I_D16F32Cll_TRC_WRA_01_Init ( &pInst->pBP_Instances[ii],
- &pInst->pBP_Taps[ii],
- &Coefficients);
+ BP_1I_D16F32Cll_TRC_WRA_01_Init(&pInst->pBP_Instances[ii], &pInst->pBP_Taps[ii],
+ &Coefficients);
break;
}
- case LVPSA_SimplePrecisionFilter:
- {
- BP_FLOAT_Coefs_t Coefficients;
+ case LVPSA_SimplePrecisionFilter: {
+ BP_FLOAT_Coefs_t Coefficients;
/*
* Calculate the single precision coefficients
*/
- LVPSA_BPSinglePrecCoefs((LVM_UINT16)pParams->Fs,
- &pInst->pFiltersParams[ii],
+ LVPSA_BPSinglePrecCoefs((LVM_UINT16)pParams->Fs, &pInst->pFiltersParams[ii],
&Coefficients);
/*
* Set the coefficients
*/
- BP_1I_D16F16Css_TRC_WRA_01_Init (&pInst->pBP_Instances[ii],
- &pInst->pBP_Taps[ii],
- &Coefficients);
+ BP_1I_D16F16Css_TRC_WRA_01_Init(&pInst->pBP_Instances[ii], &pInst->pBP_Taps[ii],
+ &Coefficients);
break;
}
}
}
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
/************************************************************************************/
@@ -388,26 +347,20 @@
/* NOTES: */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_SetQPFCoefficients( LVPSA_InstancePr_t *pInst,
- LVPSA_ControlParams_t *pParams )
-{
- LVM_UINT16 ii;
- LVM_Fs_en Fs = pParams->Fs;
- QPD_FLOAT_Coefs *pCoefficients;
- extern QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[];
+LVPSA_RETURN LVPSA_SetQPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams) {
+ LVM_UINT16 ii;
+ LVM_Fs_en Fs = pParams->Fs;
+ QPD_FLOAT_Coefs* pCoefficients;
+ extern QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[];
- pCoefficients = &LVPSA_QPD_Float_Coefs[(pParams->LevelDetectionSpeed * \
- LVPSA_NR_SUPPORTED_RATE) + Fs];
+ pCoefficients =
+ &LVPSA_QPD_Float_Coefs[(pParams->LevelDetectionSpeed * LVPSA_NR_SUPPORTED_RATE) + Fs];
- for (ii = 0; ii < pInst->nRelevantFilters; ii++)
- {
- LVPSA_QPD_Init_Float (&pInst->pQPD_States[ii],
- &pInst->pQPD_Taps[ii],
- pCoefficients );
+ for (ii = 0; ii < pInst->nRelevantFilters; ii++) {
+ LVPSA_QPD_Init_Float(&pInst->pQPD_States[ii], &pInst->pQPD_Taps[ii], pCoefficients);
}
- return(LVPSA_OK);
-
+ return (LVPSA_OK);
}
/****************************************************************************************/
@@ -443,49 +396,46 @@
/* of the n bands equalizer (LVEQNB */
/* */
/****************************************************************************************/
-LVPSA_RETURN LVPSA_BPSinglePrecCoefs( LVM_UINT16 Fs,
- LVPSA_FilterParam_t *pFilterParams,
- BP_FLOAT_Coefs_t *pCoefficients)
-{
-
- extern LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
- extern LVM_FLOAT LVPSA_Float_CosCoef[];
+LVPSA_RETURN LVPSA_BPSinglePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+ BP_FLOAT_Coefs_t* pCoefficients) {
+ extern LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
+ extern LVM_FLOAT LVPSA_Float_CosCoef[];
/*
* Intermediate variables and temporary values
*/
- LVM_FLOAT T0;
- LVM_FLOAT D;
- LVM_FLOAT A0;
- LVM_FLOAT B1;
- LVM_FLOAT B2;
- LVM_FLOAT Dt0;
- LVM_FLOAT B2_Den;
- LVM_FLOAT B2_Num;
- LVM_FLOAT COS_T0;
- LVM_FLOAT coef;
- LVM_FLOAT factor;
- LVM_FLOAT t0;
- LVM_INT16 i;
+ LVM_FLOAT T0;
+ LVM_FLOAT D;
+ LVM_FLOAT A0;
+ LVM_FLOAT B1;
+ LVM_FLOAT B2;
+ LVM_FLOAT Dt0;
+ LVM_FLOAT B2_Den;
+ LVM_FLOAT B2_Num;
+ LVM_FLOAT COS_T0;
+ LVM_FLOAT coef;
+ LVM_FLOAT factor;
+ LVM_FLOAT t0;
+ LVM_INT16 i;
/*
* Get the filter definition
*/
- LVM_FLOAT Frequency = (LVM_FLOAT)(pFilterParams->CenterFrequency);
- LVM_FLOAT QFactor = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
+ LVM_FLOAT Frequency = (LVM_FLOAT)(pFilterParams->CenterFrequency);
+ LVM_FLOAT QFactor = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
/*
* Calculating the intermediate values
*/
- T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
- D = 3200; /* Floating point value 1.000000 (1*100*2^5) */
- /* Force D = 1 : the function was originally used for a peaking filter.
- The D parameter do not exist for a BandPass filter coefficients */
+ T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
+ D = 3200; /* Floating point value 1.000000 (1*100*2^5) */
+ /* Force D = 1 : the function was originally used for a peaking filter.
+ The D parameter do not exist for a BandPass filter coefficients */
/*
* Calculate the B2 coefficient
*/
- Dt0 = T0 / 2048 ;
+ Dt0 = T0 / 2048;
B2_Den = QFactor + Dt0;
B2_Num = Dt0 - QFactor;
B2 = B2_Num / (2 * B2_Den);
@@ -495,20 +445,19 @@
*
* Cos += coef(n) * t0^n For n = 0 to 6
*/
- T0 = (T0 / 2048) * 0.63658558f; /* Scale to 1.0 in 16-bit for range 0 to fs/2 */
- t0 = T0 ;
- factor = 1.0f; /* Initialise to 1.0 for the a0 coefficient */
- COS_T0 = 0.0f; /* Initialise the error to zero */
- for (i = 1; i < 7; i++)
- {
- coef = LVPSA_Float_CosCoef[i]; /* Get the nth coefficient */
- COS_T0 += (factor * coef); /* The nth partial sum */
- factor = (factor * t0) ; /* Calculate t0^n */
+ T0 = (T0 / 2048) * 0.63658558f; /* Scale to 1.0 in 16-bit for range 0 to fs/2 */
+ t0 = T0;
+ factor = 1.0f; /* Initialise to 1.0 for the a0 coefficient */
+ COS_T0 = 0.0f; /* Initialise the error to zero */
+ for (i = 1; i < 7; i++) {
+ coef = LVPSA_Float_CosCoef[i]; /* Get the nth coefficient */
+ COS_T0 += (factor * coef); /* The nth partial sum */
+ factor = (factor * t0); /* Calculate t0^n */
}
- COS_T0 = COS_T0 * 8; /*LVPSA_CosCoef_float[0]*/ /* Correct the scaling */
+ COS_T0 = COS_T0 * 8; /*LVPSA_CosCoef_float[0]*/ /* Correct the scaling */
- B1 = ((LVM_FLOAT)0.5 - B2) * (COS_T0); /* B1 = (0.5 - b2) * cos(t0) */
- A0 = ((LVM_FLOAT)0.5 + B2) / 2; /* A0 = (0.5 + b2) / 2 */
+ B1 = ((LVM_FLOAT)0.5 - B2) * (COS_T0); /* B1 = (0.5 - b2) * cos(t0) */
+ A0 = ((LVM_FLOAT)0.5 + B2) / 2; /* A0 = (0.5 + b2) / 2 */
/*
* Write coeff into the data structure
@@ -517,7 +466,7 @@
pCoefficients->B1 = B1 * 2;
pCoefficients->B2 = B2 * 2;
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
/****************************************************************************************/
/* */
@@ -561,49 +510,46 @@
/* of the n bands equalizer (LVEQNB */
/* */
/****************************************************************************************/
-LVPSA_RETURN LVPSA_BPDoublePrecCoefs( LVM_UINT16 Fs,
- LVPSA_FilterParam_t *pFilterParams,
- BP_FLOAT_Coefs_t *pCoefficients)
-{
-
- extern LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
- extern LVM_FLOAT LVPSA_Float_DPCosCoef[];
+LVPSA_RETURN LVPSA_BPDoublePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+ BP_FLOAT_Coefs_t* pCoefficients) {
+ extern LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
+ extern LVM_FLOAT LVPSA_Float_DPCosCoef[];
/*
* Intermediate variables and temporary values
*/
- LVM_FLOAT T0;
- LVM_FLOAT D;
- LVM_FLOAT A0;
- LVM_FLOAT B1;
- LVM_FLOAT B2;
- LVM_FLOAT Dt0;
- LVM_FLOAT B2_Den;
- LVM_FLOAT B2_Num;
- LVM_FLOAT CosErr;
- LVM_FLOAT coef;
- LVM_FLOAT factor;
- LVM_FLOAT t0;
- LVM_INT16 i;
+ LVM_FLOAT T0;
+ LVM_FLOAT D;
+ LVM_FLOAT A0;
+ LVM_FLOAT B1;
+ LVM_FLOAT B2;
+ LVM_FLOAT Dt0;
+ LVM_FLOAT B2_Den;
+ LVM_FLOAT B2_Num;
+ LVM_FLOAT CosErr;
+ LVM_FLOAT coef;
+ LVM_FLOAT factor;
+ LVM_FLOAT t0;
+ LVM_INT16 i;
/*
* Get the filter definition
*/
- LVM_FLOAT Frequency = (LVM_FLOAT)(pFilterParams->CenterFrequency);
- LVM_FLOAT QFactor = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
+ LVM_FLOAT Frequency = (LVM_FLOAT)(pFilterParams->CenterFrequency);
+ LVM_FLOAT QFactor = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
/*
* Calculating the intermediate values
*/
- T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
- D = 3200; /* Floating point value 1.000000 (1*100*2^5) */
- /* Force D = 1 : the function was originally used for a peaking filter.
- The D parameter do not exist for a BandPass filter coefficients */
+ T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
+ D = 3200; /* Floating point value 1.000000 (1*100*2^5) */
+ /* Force D = 1 : the function was originally used for a peaking filter.
+ The D parameter do not exist for a BandPass filter coefficients */
/*
* Calculate the B2 coefficient
*/
- Dt0 = T0 / 2048 ;
+ Dt0 = T0 / 2048;
B2_Den = QFactor + Dt0;
B2_Num = Dt0 - QFactor;
B2 = B2_Num / (2 * B2_Den);
@@ -613,25 +559,24 @@
*
* CosErr += coef(n) * t0^n For n = 0 to 4
*/
- T0 = T0 * 0.994750f; /* Scale to 1.0 in 16-bit for range 0 to fs/50 */
+ T0 = T0 * 0.994750f; /* Scale to 1.0 in 16-bit for range 0 to fs/50 */
t0 = T0;
- factor = 1.0f; /* Initialise to 1.0 for the a0 coefficient */
- CosErr = 0.0f; /* Initialise the error to zero */
- for (i = 1; i < 5; i++)
- {
- coef = LVPSA_Float_DPCosCoef[i]; /* Get the nth coefficient */
+ factor = 1.0f; /* Initialise to 1.0 for the a0 coefficient */
+ CosErr = 0.0f; /* Initialise the error to zero */
+ for (i = 1; i < 5; i++) {
+ coef = LVPSA_Float_DPCosCoef[i]; /* Get the nth coefficient */
CosErr += factor * coef; /* The nth partial sum */
- factor = factor * t0; /* Calculate t0^n */
+ factor = factor * t0; /* Calculate t0^n */
}
- CosErr = CosErr * 2; /* Correct the scaling */
+ CosErr = CosErr * 2; /* Correct the scaling */
/*
* Calculate the B1 and A0 coefficients
*/
- B1 = ((LVM_FLOAT)0.5 - B2); /* B1 = (0.5 - b2) */
- A0 = B1 * CosErr ; /* Temporary storage for (0.5 - b2) * coserr(t0) */
- B1 -= A0; /* B1 = (0.5 - b2) * (1 - coserr(t0)) */
- A0 = ((LVM_FLOAT)0.5 + B2) / 2; /* A0 = (0.5 + b2) / 2 */
+ B1 = ((LVM_FLOAT)0.5 - B2); /* B1 = (0.5 - b2) */
+ A0 = B1 * CosErr; /* Temporary storage for (0.5 - b2) * coserr(t0) */
+ B1 -= A0; /* B1 = (0.5 - b2) * (1 - coserr(t0)) */
+ A0 = ((LVM_FLOAT)0.5 + B2) / 2; /* A0 = (0.5 + b2) / 2 */
/*
* Write coeff into the data structure
@@ -640,7 +585,7 @@
pCoefficients->B1 = B1;
pCoefficients->B2 = B2;
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
/************************************************************************************/
/* */
@@ -658,24 +603,20 @@
/* NOTES: */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_ClearFilterHistory(LVPSA_InstancePr_t *pInst)
-{
- LVM_INT8 *pTapAddress;
- LVM_UINT32 i;
+LVPSA_RETURN LVPSA_ClearFilterHistory(LVPSA_InstancePr_t* pInst) {
+ LVM_INT8* pTapAddress;
+ LVM_UINT32 i;
/* Band Pass filters taps */
- pTapAddress = (LVM_INT8 *)pInst->pBP_Taps;
- for(i = 0; i < pInst->nBands * sizeof(Biquad_1I_Order2_FLOAT_Taps_t); i++)
- {
+ pTapAddress = (LVM_INT8*)pInst->pBP_Taps;
+ for (i = 0; i < pInst->nBands * sizeof(Biquad_1I_Order2_FLOAT_Taps_t); i++) {
pTapAddress[i] = 0;
}
/* Quasi-peak filters taps */
- pTapAddress = (LVM_INT8 *)pInst->pQPD_Taps;
- for(i = 0; i < pInst->nBands * sizeof(QPD_Taps_t); i++)
- {
+ pTapAddress = (LVM_INT8*)pInst->pQPD_Taps;
+ for (i = 0; i < pInst->nBands * sizeof(QPD_Taps_t); i++) {
pTapAddress[i] = 0;
}
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
-
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
index 9fcd82f..9a2b29f 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
@@ -15,23 +15,24 @@
* limitations under the License.
*/
-#include "LVPSA.h"
-#include "LVPSA_Private.h"
-#include "InstAlloc.h"
+#include <stdlib.h>
+#include "LVPSA.h"
+#include "LVPSA_Private.h"
+#include "InstAlloc.h"
/************************************************************************************/
/* */
/* FUNCTION: LVPSA_Init */
/* */
/* DESCRIPTION: */
-/* Initialize the LVPSA module */
+/* Create and Initialize the LVPSA module including instance handle */
/* */
/* */
/* PARAMETERS: */
-/* phInstance Pointer to pointer to the instance */
+/* phInstance Pointer to the instance handle */
/* InitParams Init parameters structure */
/* ControlParams Control parameters structure */
-/* pMemoryTable Memory table that contains memory areas definition */
+/* pScratch Pointer to bundle scratch memory area */
/* */
/* */
/* RETURNS: */
@@ -39,75 +40,22 @@
/* otherwise Error due to bad parameters */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_Init ( pLVPSA_Handle_t *phInstance,
- LVPSA_InitParams_t *pInitParams,
- LVPSA_ControlParams_t *pControlParams,
- LVPSA_MemTab_t *pMemoryTable )
-{
- LVPSA_InstancePr_t *pLVPSA_Inst;
- LVPSA_RETURN errorCode = LVPSA_OK;
- LVM_UINT32 ii;
- extern LVM_FLOAT LVPSA_Float_GainTable[];
- LVM_UINT32 BufferLength = 0;
-
- /* Ints_Alloc instances, needed for memory alignment management */
- INST_ALLOC Instance;
- INST_ALLOC Scratch;
- INST_ALLOC Data;
- INST_ALLOC Coef;
-
- /* Check parameters */
- if((phInstance == LVM_NULL) || (pInitParams == LVM_NULL) || (pControlParams == LVM_NULL) || (pMemoryTable == LVM_NULL))
- {
- return(LVPSA_ERROR_NULLADDRESS);
- }
- if( (pInitParams->SpectralDataBufferDuration > LVPSA_MAXBUFFERDURATION) ||
- (pInitParams->SpectralDataBufferDuration == 0) ||
- (pInitParams->MaxInputBlockSize > LVPSA_MAXINPUTBLOCKSIZE) ||
- (pInitParams->MaxInputBlockSize == 0) ||
- (pInitParams->nBands < LVPSA_NBANDSMIN) ||
- (pInitParams->nBands > LVPSA_NBANDSMAX) ||
- (pInitParams->pFiltersParams == 0))
- {
- return(LVPSA_ERROR_INVALIDPARAM);
- }
- for(ii = 0; ii < pInitParams->nBands; ii++)
- {
- if((pInitParams->pFiltersParams[ii].CenterFrequency > LVPSA_MAXCENTERFREQ) ||
- (pInitParams->pFiltersParams[ii].PostGain > LVPSA_MAXPOSTGAIN) ||
- (pInitParams->pFiltersParams[ii].PostGain < LVPSA_MINPOSTGAIN) ||
- (pInitParams->pFiltersParams[ii].QFactor < LVPSA_MINQFACTOR) ||
- (pInitParams->pFiltersParams[ii].QFactor > LVPSA_MAXQFACTOR))
- {
- return(LVPSA_ERROR_INVALIDPARAM);
- }
- }
-
- /*Inst_Alloc instances initialization */
- InstAlloc_Init( &Instance , pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].pBaseAddress);
- InstAlloc_Init( &Scratch , pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].pBaseAddress);
- InstAlloc_Init( &Data , pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].pBaseAddress);
- InstAlloc_Init( &Coef , pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].pBaseAddress);
+LVPSA_RETURN LVPSA_Init(pLVPSA_Handle_t* phInstance, LVPSA_InitParams_t* pInitParams,
+ LVPSA_ControlParams_t* pControlParams, void* pScratch) {
+ LVPSA_InstancePr_t* pLVPSA_Inst;
+ LVPSA_RETURN errorCode = LVPSA_OK;
+ LVM_UINT32 ii;
+ extern LVM_FLOAT LVPSA_Float_GainTable[];
+ LVM_UINT32 BufferLength = 0;
/* Set the instance handle if not already initialised */
- if (*phInstance == LVM_NULL)
- {
- *phInstance = InstAlloc_AddMember( &Instance, sizeof(LVPSA_InstancePr_t) );
+ *phInstance = calloc(1, sizeof(*pLVPSA_Inst));
+ if (*phInstance == LVM_NULL) {
+ return LVPSA_ERROR_NULLADDRESS;
}
- pLVPSA_Inst =(LVPSA_InstancePr_t*)*phInstance;
+ pLVPSA_Inst = (LVPSA_InstancePr_t*)*phInstance;
- /* Check the memory table for NULL pointers */
- for (ii = 0; ii < LVPSA_NR_MEMORY_REGIONS; ii++)
- {
- if (pMemoryTable->Region[ii].Size!=0)
- {
- if (pMemoryTable->Region[ii].pBaseAddress==LVM_NULL)
- {
- return(LVPSA_ERROR_NULLADDRESS);
- }
- pLVPSA_Inst->MemoryTable.Region[ii] = pMemoryTable->Region[ii];
- }
- }
+ pLVPSA_Inst->pScratch = pScratch;
/* Initialize module's internal parameters */
pLVPSA_Inst->bControlPending = LVM_FALSE;
@@ -117,81 +65,150 @@
pLVPSA_Inst->CurrentParams.Fs = LVM_FS_DUMMY;
pLVPSA_Inst->CurrentParams.LevelDetectionSpeed = LVPSA_SPEED_DUMMY;
- { /* for avoiding QAC warnings */
- LVM_INT32 SDBD=(LVM_INT32)pLVPSA_Inst->SpectralDataBufferDuration;
- LVM_INT32 IRTI=(LVM_INT32)LVPSA_InternalRefreshTimeInv;
+ { /* for avoiding QAC warnings */
+ LVM_INT32 SDBD = (LVM_INT32)pLVPSA_Inst->SpectralDataBufferDuration;
+ LVM_INT32 IRTI = (LVM_INT32)LVPSA_InternalRefreshTimeInv;
LVM_INT32 BL;
- MUL32x32INTO32(SDBD,IRTI,BL,LVPSA_InternalRefreshTimeShift)
+ MUL32x32INTO32(SDBD, IRTI, BL, LVPSA_InternalRefreshTimeShift)
- BufferLength=(LVM_UINT32)BL;
+ BufferLength = (LVM_UINT32)BL;
}
- if((BufferLength * LVPSA_InternalRefreshTime) != pLVPSA_Inst->SpectralDataBufferDuration)
- {
+ if ((BufferLength * LVPSA_InternalRefreshTime) != pLVPSA_Inst->SpectralDataBufferDuration) {
pLVPSA_Inst->SpectralDataBufferLength = BufferLength + 1;
- }
- else
- {
+ } else {
pLVPSA_Inst->SpectralDataBufferLength = BufferLength;
}
/* Assign the pointers */
- pLVPSA_Inst->pPostGains =
- (LVM_FLOAT *)InstAlloc_AddMember(&Instance, pInitParams->nBands * sizeof(LVM_FLOAT));
- pLVPSA_Inst->pFiltersParams = (LVPSA_FilterParam_t *)
- InstAlloc_AddMember(&Instance, pInitParams->nBands * sizeof(LVPSA_FilterParam_t));
- pLVPSA_Inst->pSpectralDataBufferStart = (LVM_UINT8 *)
- InstAlloc_AddMember(&Instance, pInitParams->nBands * \
- pLVPSA_Inst->SpectralDataBufferLength * sizeof(LVM_UINT8));
- pLVPSA_Inst->pPreviousPeaks = (LVM_UINT8 *)
- InstAlloc_AddMember(&Instance, pInitParams->nBands * sizeof(LVM_UINT8));
- pLVPSA_Inst->pBPFiltersPrecision = (LVPSA_BPFilterPrecision_en *)
- InstAlloc_AddMember(&Instance, pInitParams->nBands * \
- sizeof(LVPSA_BPFilterPrecision_en));
- pLVPSA_Inst->pBP_Instances = (Biquad_FLOAT_Instance_t *)
- InstAlloc_AddMember(&Coef, pInitParams->nBands * \
- sizeof(Biquad_FLOAT_Instance_t));
- pLVPSA_Inst->pQPD_States = (QPD_FLOAT_State_t *)
- InstAlloc_AddMember(&Coef, pInitParams->nBands * \
- sizeof(QPD_FLOAT_State_t));
-
- pLVPSA_Inst->pBP_Taps = (Biquad_1I_Order2_FLOAT_Taps_t *)
- InstAlloc_AddMember(&Data, pInitParams->nBands * \
- sizeof(Biquad_1I_Order2_FLOAT_Taps_t));
- pLVPSA_Inst->pQPD_Taps = (QPD_FLOAT_Taps_t *)
- InstAlloc_AddMember(&Data, pInitParams->nBands * \
- sizeof(QPD_FLOAT_Taps_t));
+ pLVPSA_Inst->pPostGains =
+ (LVM_FLOAT*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pPostGains)));
+ if (pLVPSA_Inst->pPostGains == LVM_NULL) {
+ return LVPSA_ERROR_NULLADDRESS;
+ }
+ pLVPSA_Inst->pFiltersParams = (LVPSA_FilterParam_t*)calloc(
+ pInitParams->nBands, sizeof(*(pLVPSA_Inst->pFiltersParams)));
+ if (pLVPSA_Inst->pFiltersParams == LVM_NULL) {
+ return LVPSA_ERROR_NULLADDRESS;
+ }
+ pLVPSA_Inst->pSpectralDataBufferStart = (LVM_UINT8*)calloc(
+ pInitParams->nBands, pLVPSA_Inst->SpectralDataBufferLength *
+ sizeof(*(pLVPSA_Inst->pSpectralDataBufferStart)));
+ if (pLVPSA_Inst->pSpectralDataBufferStart == LVM_NULL) {
+ return LVPSA_ERROR_NULLADDRESS;
+ }
+ pLVPSA_Inst->pPreviousPeaks =
+ (LVM_UINT8*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pPreviousPeaks)));
+ if (pLVPSA_Inst->pPreviousPeaks == LVM_NULL) {
+ return LVPSA_ERROR_NULLADDRESS;
+ }
+ pLVPSA_Inst->pBPFiltersPrecision = (LVPSA_BPFilterPrecision_en*)calloc(
+ pInitParams->nBands, sizeof(*(pLVPSA_Inst->pBPFiltersPrecision)));
+ if (pLVPSA_Inst->pBPFiltersPrecision == LVM_NULL) {
+ return LVPSA_ERROR_NULLADDRESS;
+ }
+ pLVPSA_Inst->pBP_Instances = (Biquad_FLOAT_Instance_t*)calloc(
+ pInitParams->nBands, sizeof(*(pLVPSA_Inst->pBP_Instances)));
+ if (pLVPSA_Inst->pBP_Instances == LVM_NULL) {
+ return LVPSA_ERROR_NULLADDRESS;
+ }
+ pLVPSA_Inst->pQPD_States =
+ (QPD_FLOAT_State_t*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pQPD_States)));
+ if (pLVPSA_Inst->pQPD_States == LVM_NULL) {
+ return LVPSA_ERROR_NULLADDRESS;
+ }
+ pLVPSA_Inst->pBP_Taps = (Biquad_1I_Order2_FLOAT_Taps_t*)calloc(
+ pInitParams->nBands, sizeof(*(pLVPSA_Inst->pBP_Taps)));
+ if (pLVPSA_Inst->pBP_Taps == LVM_NULL) {
+ return LVPSA_ERROR_NULLADDRESS;
+ }
+ pLVPSA_Inst->pQPD_Taps =
+ (QPD_FLOAT_Taps_t*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pQPD_Taps)));
+ if (pLVPSA_Inst->pQPD_Taps == LVM_NULL) {
+ return LVPSA_ERROR_NULLADDRESS;
+ }
/* Copy filters parameters in the private instance */
- for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
- {
+ for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
pLVPSA_Inst->pFiltersParams[ii] = pInitParams->pFiltersParams[ii];
}
/* Set Post filters gains*/
- for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
- {
- pLVPSA_Inst->pPostGains[ii] = LVPSA_Float_GainTable[15 + \
- pInitParams->pFiltersParams[ii].PostGain];
+ for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
+ pLVPSA_Inst->pPostGains[ii] =
+ LVPSA_Float_GainTable[15 + pInitParams->pFiltersParams[ii].PostGain];
}
pLVPSA_Inst->pSpectralDataBufferWritePointer = pLVPSA_Inst->pSpectralDataBufferStart;
/* Initialize control dependant internal parameters */
- errorCode = LVPSA_Control (*phInstance, pControlParams);
+ errorCode = LVPSA_Control(*phInstance, pControlParams);
- if(errorCode!=0)
- {
+ if (errorCode != 0) {
return errorCode;
}
- errorCode = LVPSA_ApplyNewSettings (pLVPSA_Inst);
+ errorCode = LVPSA_ApplyNewSettings(pLVPSA_Inst);
- if(errorCode!=0)
- {
+ if (errorCode != 0) {
return errorCode;
}
- return(errorCode);
+ return (errorCode);
}
+/************************************************************************************/
+/* */
+/* FUNCTION: LVPSA_DeInit */
+/* */
+/* DESCRIPTION: */
+/* Free the memories created in LVPSA_Init call including instance handle */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to the instance handle */
+/* */
+/************************************************************************************/
+void LVPSA_DeInit(pLVPSA_Handle_t* phInstance) {
+ LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)*phInstance;
+ if (pLVPSA_Inst == LVM_NULL) {
+ return;
+ }
+ if (pLVPSA_Inst->pPostGains != LVM_NULL) {
+ free(pLVPSA_Inst->pPostGains);
+ pLVPSA_Inst->pPostGains = LVM_NULL;
+ }
+ if (pLVPSA_Inst->pFiltersParams != LVM_NULL) {
+ free(pLVPSA_Inst->pFiltersParams);
+ pLVPSA_Inst->pFiltersParams = LVM_NULL;
+ }
+ if (pLVPSA_Inst->pSpectralDataBufferStart != LVM_NULL) {
+ free(pLVPSA_Inst->pSpectralDataBufferStart);
+ pLVPSA_Inst->pSpectralDataBufferStart = LVM_NULL;
+ }
+ if (pLVPSA_Inst->pPreviousPeaks != LVM_NULL) {
+ free(pLVPSA_Inst->pPreviousPeaks);
+ pLVPSA_Inst->pPreviousPeaks = LVM_NULL;
+ }
+ if (pLVPSA_Inst->pBPFiltersPrecision != LVM_NULL) {
+ free(pLVPSA_Inst->pBPFiltersPrecision);
+ pLVPSA_Inst->pBPFiltersPrecision = LVM_NULL;
+ }
+ if (pLVPSA_Inst->pBP_Instances != LVM_NULL) {
+ free(pLVPSA_Inst->pBP_Instances);
+ pLVPSA_Inst->pBP_Instances = LVM_NULL;
+ }
+ if (pLVPSA_Inst->pQPD_States != LVM_NULL) {
+ free(pLVPSA_Inst->pQPD_States);
+ pLVPSA_Inst->pQPD_States = LVM_NULL;
+ }
+ if (pLVPSA_Inst->pBP_Taps != LVM_NULL) {
+ free(pLVPSA_Inst->pBP_Taps);
+ pLVPSA_Inst->pBP_Taps = LVM_NULL;
+ }
+ if (pLVPSA_Inst->pQPD_Taps != LVM_NULL) {
+ free(pLVPSA_Inst->pQPD_Taps);
+ pLVPSA_Inst->pQPD_Taps = LVM_NULL;
+ }
+ free(pLVPSA_Inst);
+ *phInstance = LVM_NULL;
+}
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Memory.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Memory.cpp
deleted file mode 100644
index eafcbe6..0000000
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Memory.cpp
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "LVPSA.h"
-#include "LVPSA_Private.h"
-#include "InstAlloc.h"
-
-/****************************************************************************************/
-/* */
-/* FUNCTION: LVEQNB_Memory */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. It can be called in */
-/* two ways: */
-/* */
-/* hInstance = NULL Returns the memory requirements */
-/* hInstance = Instance handle Returns the memory requirements and */
-/* allocated base addresses for the instance */
-/* */
-/* When this function is called for memory allocation (hInstance=NULL) the memory */
-/* base address pointers are NULL on return. */
-/* */
-/* When the function is called for free (hInstance = Instance Handle) the memory */
-/* table returns the allocated memory and base addresses used during initialisation. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pMemoryTable Pointer to an empty memory definition table */
-/* InitParams Pointer to the instance init parameters */
-/* */
-/* RETURNS: */
-/* LVPSA_OK Succeeds */
-/* otherwise Error due to bad parameters */
-/* */
-/****************************************************************************************/
-LVPSA_RETURN LVPSA_Memory ( pLVPSA_Handle_t hInstance,
- LVPSA_MemTab_t *pMemoryTable,
- LVPSA_InitParams_t *pInitParams )
-{
- LVM_UINT32 ii;
- LVM_UINT32 BufferLength;
- INST_ALLOC Instance;
- INST_ALLOC Scratch;
- INST_ALLOC Data;
- INST_ALLOC Coef;
- LVPSA_InstancePr_t *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
-
- InstAlloc_Init( &Instance , LVM_NULL);
- InstAlloc_Init( &Scratch , LVM_NULL);
- InstAlloc_Init( &Data , LVM_NULL);
- InstAlloc_Init( &Coef , LVM_NULL);
-
- if((pMemoryTable == LVM_NULL) || (pInitParams == LVM_NULL))
- {
- return(LVPSA_ERROR_NULLADDRESS);
- }
-
- /*
- * Fill in the memory table
- */
- if (hInstance == LVM_NULL)
- {
-
- /* Check init parameter */
- if( (pInitParams->SpectralDataBufferDuration > LVPSA_MAXBUFFERDURATION) ||
- (pInitParams->SpectralDataBufferDuration == 0) ||
- (pInitParams->MaxInputBlockSize > LVPSA_MAXINPUTBLOCKSIZE) ||
- (pInitParams->MaxInputBlockSize == 0) ||
- (pInitParams->nBands < LVPSA_NBANDSMIN) ||
- (pInitParams->nBands > LVPSA_NBANDSMAX) ||
- (pInitParams->pFiltersParams == 0))
- {
- return(LVPSA_ERROR_INVALIDPARAM);
- }
- for(ii = 0; ii < pInitParams->nBands; ii++)
- {
- if((pInitParams->pFiltersParams[ii].CenterFrequency > LVPSA_MAXCENTERFREQ) ||
- (pInitParams->pFiltersParams[ii].PostGain > LVPSA_MAXPOSTGAIN) ||
- (pInitParams->pFiltersParams[ii].PostGain < LVPSA_MINPOSTGAIN) ||
- (pInitParams->pFiltersParams[ii].QFactor < LVPSA_MINQFACTOR) ||
- (pInitParams->pFiltersParams[ii].QFactor > LVPSA_MAXQFACTOR))
- {
- return(LVPSA_ERROR_INVALIDPARAM);
- }
- }
-
- /*
- * Instance memory
- */
-
- InstAlloc_AddMember( &Instance, sizeof(LVPSA_InstancePr_t) );
- InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVM_FLOAT) );
- InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVPSA_FilterParam_t) );
-
- {
- /* for avoiding QAC warnings as MUL32x32INTO32 works on LVM_INT32 only*/
- LVM_INT32 SDBD=(LVM_INT32)pInitParams->SpectralDataBufferDuration;
- LVM_INT32 IRTI=(LVM_INT32)LVPSA_InternalRefreshTimeInv;
- LVM_INT32 BL;
-
- MUL32x32INTO32(SDBD,IRTI,BL,LVPSA_InternalRefreshTimeShift)
- BufferLength=(LVM_UINT32)BL;
- }
-
- if((BufferLength * LVPSA_InternalRefreshTime) != pInitParams->SpectralDataBufferDuration)
- {
- BufferLength++;
- }
- InstAlloc_AddMember( &Instance, pInitParams->nBands * BufferLength * sizeof(LVM_UINT8) );
- InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVM_UINT8) );
- InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVPSA_BPFilterPrecision_en) );
- pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].Size = InstAlloc_GetTotal(&Instance);
- pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].Type = LVPSA_PERSISTENT;
- pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].pBaseAddress = LVM_NULL;
-
- /*
- * Scratch memory
- */
- InstAlloc_AddMember( &Scratch, 2 * pInitParams->MaxInputBlockSize * sizeof(LVM_FLOAT) );
- pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].Size = InstAlloc_GetTotal(&Scratch);
- pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].Type = LVPSA_SCRATCH;
- pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].pBaseAddress = LVM_NULL;
-
- /*
- * Persistent coefficients memory
- */
- InstAlloc_AddMember( &Coef, pInitParams->nBands * sizeof(Biquad_FLOAT_Instance_t) );
- InstAlloc_AddMember( &Coef, pInitParams->nBands * sizeof(QPD_FLOAT_State_t) );
- pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].Size = InstAlloc_GetTotal(&Coef);
- pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].Type = LVPSA_PERSISTENT_COEF;
- pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].pBaseAddress = LVM_NULL;
-
- /*
- * Persistent data memory
- */
- InstAlloc_AddMember( &Data, pInitParams->nBands * sizeof(Biquad_1I_Order2_FLOAT_Taps_t) );
- InstAlloc_AddMember( &Data, pInitParams->nBands * sizeof(QPD_FLOAT_Taps_t) );
- pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].Size = InstAlloc_GetTotal(&Data);
- pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].Type = LVPSA_PERSISTENT_DATA;
- pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].pBaseAddress = LVM_NULL;
-
- }
- else
- {
- /* Read back memory allocation table */
- *pMemoryTable = pLVPSA_Inst->MemoryTable;
- }
-
- return(LVPSA_OK);
-}
-
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h
index 61987b5..e00c11c 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h
@@ -27,95 +27,100 @@
CONSTANT DEFINITIONS
***********************************************************************************/
-/* Memory */
-#define LVPSA_INSTANCE_ALIGN 4 /* 32-bit alignment for structures */
-#define LVPSA_SCRATCH_ALIGN 4 /* 32-bit alignment for long data */
-#define LVPSA_COEF_ALIGN 4 /* 32-bit alignment for long words */
-#define LVPSA_DATA_ALIGN 4 /* 32-bit alignment for long data */
+#define LVPSA_NR_SUPPORTED_RATE 13 /* From 8000Hz to 192000Hz*/
+#define LVPSA_NR_SUPPORTED_SPEED \
+ 3 /* LOW, MEDIUM, HIGH */
-#define LVPSA_MEMREGION_INSTANCE 0 /* Offset to instance memory region in memory table */
-#define LVPSA_MEMREGION_PERSISTENT_COEF 1 /* Offset to persistent coefficients memory region in memory table */
-#define LVPSA_MEMREGION_PERSISTENT_DATA 2 /* Offset to persistent taps memory region in memory table */
-#define LVPSA_MEMREGION_SCRATCH 3 /* Offset to scratch memory region in memory table */
-#define LVPSA_NR_SUPPORTED_RATE 13 /* From 8000Hz to 192000Hz*/
-#define LVPSA_NR_SUPPORTED_SPEED 3 /* LOW, MEDIUM, HIGH */
+#define LVPSA_MAXBUFFERDURATION \
+ 4000 /* Maximum length in ms of the levels buffer */
+#define LVPSA_MAXINPUTBLOCKSIZE \
+ 5000 /* Maximum length in mono samples of the block to process */
+#define LVPSA_NBANDSMIN 1 /* Minimum number of frequency band */
+#define LVPSA_NBANDSMAX 30 /* Maximum number of frequency band */
+#define LVPSA_MAXCENTERFREQ \
+ 20000 /* Maximum possible center frequency */
+#define LVPSA_MINPOSTGAIN \
+ (-15) /* Minimum possible post gain */
+#define LVPSA_MAXPOSTGAIN 15 /* Maximum possible post gain */
+#define LVPSA_MINQFACTOR 25 /* Minimum possible Q factor */
+#define LVPSA_MAXQFACTOR 1200 /* Maximum possible Q factor */
-#define LVPSA_MAXBUFFERDURATION 4000 /* Maximum length in ms of the levels buffer */
-#define LVPSA_MAXINPUTBLOCKSIZE 5000 /* Maximum length in mono samples of the block to process */
-#define LVPSA_NBANDSMIN 1 /* Minimum number of frequency band */
-#define LVPSA_NBANDSMAX 30 /* Maximum number of frequency band */
-#define LVPSA_MAXCENTERFREQ 20000 /* Maximum possible center frequency */
-#define LVPSA_MINPOSTGAIN (-15) /* Minimum possible post gain */
-#define LVPSA_MAXPOSTGAIN 15 /* Maximum possible post gain */
-#define LVPSA_MINQFACTOR 25 /* Minimum possible Q factor */
-#define LVPSA_MAXQFACTOR 1200 /* Maximum possible Q factor */
+#define LVPSA_MAXLEVELDECAYFACTOR \
+ 0x4111 /* Decay factor for the maximum values calculation */
+#define LVPSA_MAXLEVELDECAYSHIFT \
+ 14 /* Decay shift for the maximum values calculation */
-#define LVPSA_MAXLEVELDECAYFACTOR 0x4111 /* Decay factor for the maximum values calculation */
-#define LVPSA_MAXLEVELDECAYSHIFT 14 /* Decay shift for the maximum values calculation */
+#define LVPSA_MAXUNSIGNEDCHAR 0xFF
-#define LVPSA_MAXUNSIGNEDCHAR 0xFF
-
-#define LVPSA_FsInvertShift 31
-#define LVPSA_GAINSHIFT 11
-#define LVPSA_FREQSHIFT 25
+#define LVPSA_FsInvertShift 31
+#define LVPSA_GAINSHIFT 11
+#define LVPSA_FREQSHIFT 25
/**********************************************************************************
TYPES DEFINITIONS
***********************************************************************************/
-#define LVPSA_InternalRefreshTime 0x0014 /* 20 ms (50Hz) in Q16.0 */
-#define LVPSA_InternalRefreshTimeInv 0x0666 /* 1/20ms left shifted by 15 */
-#define LVPSA_InternalRefreshTimeShift 15
+#define LVPSA_InternalRefreshTime 0x0014 /* 20 ms (50Hz) in Q16.0 */
+#define LVPSA_InternalRefreshTimeInv 0x0666 /* 1/20ms left shifted by 15 */
+#define LVPSA_InternalRefreshTimeShift 15
/* Precision of the filter */
-typedef enum
-{
- LVPSA_SimplePrecisionFilter, /* Simple precision */
- LVPSA_DoublePrecisionFilter /* Double precision */
+typedef enum {
+ LVPSA_SimplePrecisionFilter, /* Simple precision */
+ LVPSA_DoublePrecisionFilter /* Double precision */
} LVPSA_BPFilterPrecision_en;
-typedef struct
-{
- LVM_CHAR bControlPending; /* Flag incating a change of the control parameters */
- LVM_UINT16 nBands; /* Number of bands of the spectrum analyzer */
- LVM_UINT16 MaxInputBlockSize; /* Maximum input data buffer size */
+typedef struct {
+ LVM_CHAR bControlPending; /* Flag incating a change of the control parameters */
+ LVM_UINT16 nBands; /* Number of bands of the spectrum analyzer */
+ LVM_UINT16 MaxInputBlockSize; /* Maximum input data buffer size */
- LVPSA_ControlParams_t CurrentParams; /* Current control parameters of the module */
- LVPSA_ControlParams_t NewParams; /* New control parameters given by the user */
- LVPSA_MemTab_t MemoryTable;
+ LVPSA_ControlParams_t CurrentParams; /* Current control parameters of the module */
+ LVPSA_ControlParams_t NewParams; /* New control parameters given by the user */
+ void* pScratch;
+ /* Pointer to bundle scratch buffer */
- LVPSA_BPFilterPrecision_en *pBPFiltersPrecision; /* Points a nBands elements array that contains the filter precision for each band */
- Biquad_FLOAT_Instance_t *pBP_Instances;
+ LVPSA_BPFilterPrecision_en* pBPFiltersPrecision; /* Points a nBands elements array that contains
+ the filter precision for each band */
+ Biquad_FLOAT_Instance_t* pBP_Instances;
/* Points a nBands elements array that contains the band pass filter taps for each band */
- Biquad_1I_Order2_FLOAT_Taps_t *pBP_Taps;
+ Biquad_1I_Order2_FLOAT_Taps_t* pBP_Taps;
/* Points a nBands elements array that contains the QPD filter instance for each band */
- QPD_FLOAT_State_t *pQPD_States;
+ QPD_FLOAT_State_t* pQPD_States;
/* Points a nBands elements array that contains the QPD filter taps for each band */
- QPD_FLOAT_Taps_t *pQPD_Taps;
+ QPD_FLOAT_Taps_t* pQPD_Taps;
/* Points a nBands elements array that contains the post-filter gains for each band */
- LVM_FLOAT *pPostGains;
- LVPSA_FilterParam_t *pFiltersParams; /* Copy of the filters parameters from the input parameters */
+ LVM_FLOAT* pPostGains;
+ LVPSA_FilterParam_t*
+ pFiltersParams; /* Copy of the filters parameters from the input parameters */
- LVM_UINT16 nSamplesBufferUpdate; /* Number of samples to make 20ms */
- LVM_INT32 BufferUpdateSamplesCount; /* Counter used to know when to put a new value in the buffer */
- LVM_UINT16 nRelevantFilters; /* Number of relevent filters depending on sampling frequency and bands center frequency */
- LVM_UINT16 LocalSamplesCount; /* Counter used to update the SpectralDataBufferAudioTime */
+ LVM_UINT16 nSamplesBufferUpdate; /* Number of samples to make 20ms */
+ LVM_INT32 BufferUpdateSamplesCount; /* Counter used to know when to put a new value in the
+ buffer */
+ LVM_UINT16 nRelevantFilters; /* Number of relevant filters depending on sampling frequency and
+ bands center frequency */
+ LVM_UINT16 LocalSamplesCount; /* Counter used to update the SpectralDataBufferAudioTime */
- LVM_UINT16 DownSamplingFactor; /* Down sampling factor depending on the sampling frequency */
- LVM_UINT16 DownSamplingCount; /* Counter used for the downsampling handling */
+ LVM_UINT16 DownSamplingFactor; /* Down sampling factor depending on the sampling frequency */
+ LVM_UINT16 DownSamplingCount; /* Counter used for the downsampling handling */
- LVM_UINT16 SpectralDataBufferDuration; /* Length of the buffer in time (ms) defined by the application */
- LVM_UINT8 *pSpectralDataBufferStart; /* Starting address of the buffer */
- LVM_UINT8 *pSpectralDataBufferWritePointer; /* Current position of the writting pointer of the buffer */
- LVPSA_Time SpectralDataBufferAudioTime; /* AudioTime at which the last value save occured in the buffer */
- LVM_UINT32 SpectralDataBufferLength; /* Number of spectrum data value that the buffer can contain (per band)
- = SpectralDataBufferDuration/20ms */
+ LVM_UINT16 SpectralDataBufferDuration; /* Length of the buffer in time (ms) defined by the
+ application */
+ LVM_UINT8* pSpectralDataBufferStart; /* Starting address of the buffer */
+ LVM_UINT8* pSpectralDataBufferWritePointer; /* Current position of the writing pointer of the
+ buffer */
+ LVPSA_Time SpectralDataBufferAudioTime; /* AudioTime at which the last value save occurred in
+ the buffer */
+ LVM_UINT32
+ SpectralDataBufferLength; /* Number of spectrum data value that the buffer can contain
+ (per band) = SpectralDataBufferDuration/20ms */
- LVM_UINT8 *pPreviousPeaks; /* Points to a nBands elements array that contains the previous peak value of the level
- detection. Those values are decremented after each call to the GetSpectrum function */
+ LVM_UINT8* pPreviousPeaks; /* Points to a nBands elements array that contains the previous peak
+ value of the level detection. Those values are decremented after
+ each call to the GetSpectrum function */
-}LVPSA_InstancePr_t, *pLVPSA_InstancePr_t;
+} LVPSA_InstancePr_t, *pLVPSA_InstancePr_t;
/**********************************************************************************
FUNCTIONS PROTOTYPE
@@ -135,6 +140,6 @@
/* LVPSA_OK Always succeeds */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_ApplyNewSettings (LVPSA_InstancePr_t *pInst);
+LVPSA_RETURN LVPSA_ApplyNewSettings(LVPSA_InstancePr_t* pInst);
#endif /* _LVPSA_PRIVATE_H */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp
index 81a88c5..299dfd2 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp
@@ -15,23 +15,23 @@
* limitations under the License.
*/
-#include "LVPSA.h"
-#include "LVPSA_Private.h"
-#include "LVM_Macros.h"
-#include "VectorArithmetic.h"
+#include "LVPSA.h"
+#include "LVPSA_Private.h"
+#include "LVM_Macros.h"
+#include "VectorArithmetic.h"
-#define LVM_MININT_32 0x80000000
+#define LVM_MININT_32 0x80000000
static LVM_INT32 mult32x32in32_shiftr(LVM_INT32 a, LVM_INT32 b, LVM_INT32 c) {
- LVM_INT64 result = ((LVM_INT64)a * b) >> c;
+ LVM_INT64 result = ((LVM_INT64)a * b) >> c;
- if (result >= INT32_MAX) {
- return INT32_MAX;
- } else if (result <= INT32_MIN) {
- return INT32_MIN;
- } else {
- return (LVM_INT32)result;
- }
+ if (result >= INT32_MAX) {
+ return INT32_MAX;
+ } else if (result <= INT32_MIN) {
+ return INT32_MIN;
+ } else {
+ return (LVM_INT32)result;
+ }
}
/************************************************************************************/
@@ -54,42 +54,36 @@
/* otherwise Error due to bad parameters */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_Process ( pLVPSA_Handle_t hInstance,
- LVM_FLOAT *pLVPSA_InputSamples,
- LVM_UINT16 InputBlockSize,
- LVPSA_Time AudioTime )
+LVPSA_RETURN LVPSA_Process(pLVPSA_Handle_t hInstance, LVM_FLOAT* pLVPSA_InputSamples,
+ LVM_UINT16 InputBlockSize, LVPSA_Time AudioTime)
{
- LVPSA_InstancePr_t *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
- LVM_FLOAT *pScratch;
- LVM_INT16 ii;
- LVM_INT32 AudioTimeInc;
- extern LVM_UINT32 LVPSA_SampleRateInvTab[];
- LVM_UINT8 *pWrite_Save; /* Position of the write pointer
- at the beginning of the process */
+ LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+ LVM_FLOAT* pScratch;
+ LVM_INT16 ii;
+ LVM_INT32 AudioTimeInc;
+ extern LVM_UINT32 LVPSA_SampleRateInvTab[];
+ LVM_UINT8* pWrite_Save; /* Position of the write pointer
+ at the beginning of the process */
/******************************************************************************
CHECK PARAMETERS
*******************************************************************************/
- if(hInstance == LVM_NULL || pLVPSA_InputSamples == LVM_NULL)
- {
- return(LVPSA_ERROR_NULLADDRESS);
+ if (hInstance == LVM_NULL || pLVPSA_InputSamples == LVM_NULL) {
+ return (LVPSA_ERROR_NULLADDRESS);
}
- if(InputBlockSize == 0 || InputBlockSize > pLVPSA_Inst->MaxInputBlockSize)
- {
- return(LVPSA_ERROR_INVALIDPARAM);
+ if (InputBlockSize == 0 || InputBlockSize > pLVPSA_Inst->MaxInputBlockSize) {
+ return (LVPSA_ERROR_INVALIDPARAM);
}
-
- pScratch = (LVM_FLOAT*)pLVPSA_Inst->MemoryTable.Region[LVPSA_MEMREGION_SCRATCH].pBaseAddress;
+ pScratch = (LVM_FLOAT*)pLVPSA_Inst->pScratch;
pWrite_Save = pLVPSA_Inst->pSpectralDataBufferWritePointer;
/******************************************************************************
APPLY NEW SETTINGS IF NEEDED
*******************************************************************************/
- if (pLVPSA_Inst->bControlPending == LVM_TRUE)
- {
+ if (pLVPSA_Inst->bControlPending == LVM_TRUE) {
pLVPSA_Inst->bControlPending = 0;
- LVPSA_ApplyNewSettings( pLVPSA_Inst);
+ LVPSA_ApplyNewSettings(pLVPSA_Inst);
}
/******************************************************************************
@@ -99,39 +93,30 @@
Copy_Float(pLVPSA_InputSamples, pScratch, (LVM_INT16)InputBlockSize);
Shift_Sat_Float(-1, pScratch, pScratch, (LVM_INT16)InputBlockSize);
- for (ii = 0; ii < pLVPSA_Inst->nRelevantFilters; ii++)
- {
- switch(pLVPSA_Inst->pBPFiltersPrecision[ii])
- {
+ for (ii = 0; ii < pLVPSA_Inst->nRelevantFilters; ii++) {
+ switch (pLVPSA_Inst->pBPFiltersPrecision[ii]) {
case LVPSA_SimplePrecisionFilter:
- BP_1I_D16F16C14_TRC_WRA_01 ( &pLVPSA_Inst->pBP_Instances[ii],
- pScratch,
- pScratch + InputBlockSize,
- (LVM_INT16)InputBlockSize);
+ BP_1I_D16F16C14_TRC_WRA_01(&pLVPSA_Inst->pBP_Instances[ii], pScratch,
+ pScratch + InputBlockSize, (LVM_INT16)InputBlockSize);
break;
case LVPSA_DoublePrecisionFilter:
- BP_1I_D16F32C30_TRC_WRA_01 ( &pLVPSA_Inst->pBP_Instances[ii],
- pScratch,
- pScratch + InputBlockSize,
- (LVM_INT16)InputBlockSize);
+ BP_1I_D16F32C30_TRC_WRA_01(&pLVPSA_Inst->pBP_Instances[ii], pScratch,
+ pScratch + InputBlockSize, (LVM_INT16)InputBlockSize);
break;
default:
break;
}
- LVPSA_QPD_Process_Float ( pLVPSA_Inst,
- pScratch + InputBlockSize,
- (LVM_INT16)InputBlockSize,
- ii);
+ LVPSA_QPD_Process_Float(pLVPSA_Inst, pScratch + InputBlockSize, (LVM_INT16)InputBlockSize,
+ ii);
}
/******************************************************************************
UPDATE SpectralDataBufferAudioTime
*******************************************************************************/
- if(pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite_Save)
- {
+ if (pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite_Save) {
AudioTimeInc = mult32x32in32_shiftr(
(AudioTime + ((LVM_INT32)pLVPSA_Inst->LocalSamplesCount * 1000)),
(LVM_INT32)LVPSA_SampleRateInvTab[pLVPSA_Inst->CurrentParams.Fs],
@@ -139,7 +124,7 @@
pLVPSA_Inst->SpectralDataBufferAudioTime = AudioTime + AudioTimeInc;
}
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
/************************************************************************************/
@@ -162,99 +147,95 @@
/* otherwise Error due to bad parameters */
/* */
/************************************************************************************/
-LVPSA_RETURN LVPSA_GetSpectrum ( pLVPSA_Handle_t hInstance,
- LVPSA_Time GetSpectrumAudioTime,
- LVM_UINT8 *pCurrentValues,
- LVM_UINT8 *pPeakValues )
+LVPSA_RETURN LVPSA_GetSpectrum(pLVPSA_Handle_t hInstance, LVPSA_Time GetSpectrumAudioTime,
+ LVM_UINT8* pCurrentValues, LVM_UINT8* pPeakValues)
{
+ LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+ LVM_INT32 StatusDelta, ii;
+ LVM_UINT8* pRead;
- LVPSA_InstancePr_t *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
- LVM_INT32 StatusDelta, ii;
- LVM_UINT8 *pRead;
-
- if(hInstance == LVM_NULL || pCurrentValues == LVM_NULL || pPeakValues == LVM_NULL)
- {
- return(LVPSA_ERROR_NULLADDRESS);
+ if (hInstance == LVM_NULL || pCurrentValues == LVM_NULL || pPeakValues == LVM_NULL) {
+ return (LVPSA_ERROR_NULLADDRESS);
}
/* First find the place where to look in the status buffer */
- if(GetSpectrumAudioTime <= pLVPSA_Inst->SpectralDataBufferAudioTime)
- {
- MUL32x32INTO32((pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime),LVPSA_InternalRefreshTimeInv,StatusDelta,LVPSA_InternalRefreshTimeShift);
- if((StatusDelta * LVPSA_InternalRefreshTime) != (pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime))
- {
+ if (GetSpectrumAudioTime <= pLVPSA_Inst->SpectralDataBufferAudioTime) {
+ MUL32x32INTO32((pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime),
+ LVPSA_InternalRefreshTimeInv, StatusDelta, LVPSA_InternalRefreshTimeShift);
+ if ((StatusDelta * LVPSA_InternalRefreshTime) !=
+ (pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime)) {
StatusDelta += 1;
}
- }
- else
- {
+ } else {
/* This part handles the wrap around */
- MUL32x32INTO32(((pLVPSA_Inst->SpectralDataBufferAudioTime - (LVM_INT32)LVM_MININT_32) + ((LVM_INT32)LVM_MAXINT_32 - GetSpectrumAudioTime)),LVPSA_InternalRefreshTimeInv,StatusDelta,LVPSA_InternalRefreshTimeShift)
- if(((LVM_INT32)(StatusDelta * LVPSA_InternalRefreshTime)) != ((LVM_INT32)((pLVPSA_Inst->SpectralDataBufferAudioTime - (LVM_INT32)LVM_MININT_32) + ((LVM_INT32)LVM_MAXINT_32 - GetSpectrumAudioTime))))
- {
+ MUL32x32INTO32(
+ ((pLVPSA_Inst->SpectralDataBufferAudioTime - (LVM_INT32)LVM_MININT_32) +
+ ((LVM_INT32)LVM_MAXINT_32 - GetSpectrumAudioTime)),
+ LVPSA_InternalRefreshTimeInv, StatusDelta,
+ LVPSA_InternalRefreshTimeShift) if (((LVM_INT32)(StatusDelta *
+ LVPSA_InternalRefreshTime)) !=
+ ((LVM_INT32)(
+ (pLVPSA_Inst
+ ->SpectralDataBufferAudioTime -
+ (LVM_INT32)LVM_MININT_32) +
+ ((LVM_INT32)LVM_MAXINT_32 -
+ GetSpectrumAudioTime)))) {
StatusDelta += 1;
}
}
/* Check whether the desired level is not too "old" (see 2.10 in LVPSA_DesignNotes.doc)*/
- if(
- ((GetSpectrumAudioTime < pLVPSA_Inst->SpectralDataBufferAudioTime)&&
- ((GetSpectrumAudioTime<0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime>0))&&
- (((LVM_INT32)(-GetSpectrumAudioTime + pLVPSA_Inst->SpectralDataBufferAudioTime))>LVM_MAXINT_32))||
+ if (((GetSpectrumAudioTime < pLVPSA_Inst->SpectralDataBufferAudioTime) &&
+ ((GetSpectrumAudioTime < 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime > 0)) &&
+ (((LVM_INT32)(-GetSpectrumAudioTime + pLVPSA_Inst->SpectralDataBufferAudioTime)) >
+ LVM_MAXINT_32)) ||
- ((GetSpectrumAudioTime > pLVPSA_Inst->SpectralDataBufferAudioTime)&&
- (((GetSpectrumAudioTime>=0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime>=0))||
- ((GetSpectrumAudioTime<=0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime<=0))||
- (((GetSpectrumAudioTime>=0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime<=0))&&
- (((LVM_INT32)(GetSpectrumAudioTime - pLVPSA_Inst->SpectralDataBufferAudioTime))<LVM_MAXINT_32))))||
+ ((GetSpectrumAudioTime > pLVPSA_Inst->SpectralDataBufferAudioTime) &&
+ (((GetSpectrumAudioTime >= 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime >= 0)) ||
+ ((GetSpectrumAudioTime <= 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime <= 0)) ||
+ (((GetSpectrumAudioTime >= 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime <= 0)) &&
+ (((LVM_INT32)(GetSpectrumAudioTime - pLVPSA_Inst->SpectralDataBufferAudioTime)) <
+ LVM_MAXINT_32)))) ||
- (StatusDelta > (LVM_INT32)pLVPSA_Inst->SpectralDataBufferLength) ||
- (!StatusDelta))
- {
- for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
- {
- pCurrentValues[ii] = 0;
- pPeakValues[ii] = 0;
+ (StatusDelta > (LVM_INT32)pLVPSA_Inst->SpectralDataBufferLength) || (!StatusDelta)) {
+ for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
+ pCurrentValues[ii] = 0;
+ pPeakValues[ii] = 0;
}
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
/* Set the reading pointer */
- if((LVM_INT32)(StatusDelta * pLVPSA_Inst->nBands) > (pLVPSA_Inst->pSpectralDataBufferWritePointer - pLVPSA_Inst->pSpectralDataBufferStart))
- {
- pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer + (pLVPSA_Inst->SpectralDataBufferLength - (LVM_UINT32)StatusDelta) * pLVPSA_Inst->nBands;
- }
- else
- {
- pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer - StatusDelta * pLVPSA_Inst->nBands;
+ if ((LVM_INT32)(StatusDelta * pLVPSA_Inst->nBands) >
+ (pLVPSA_Inst->pSpectralDataBufferWritePointer - pLVPSA_Inst->pSpectralDataBufferStart)) {
+ pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer +
+ (pLVPSA_Inst->SpectralDataBufferLength - (LVM_UINT32)StatusDelta) *
+ pLVPSA_Inst->nBands;
+ } else {
+ pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer - StatusDelta * pLVPSA_Inst->nBands;
}
/* Read the status buffer and fill the output buffers */
- for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
- {
+ for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
pCurrentValues[ii] = pRead[ii];
- if(pLVPSA_Inst->pPreviousPeaks[ii] <= pRead[ii])
- {
+ if (pLVPSA_Inst->pPreviousPeaks[ii] <= pRead[ii]) {
pLVPSA_Inst->pPreviousPeaks[ii] = pRead[ii];
- }
- else if(pLVPSA_Inst->pPreviousPeaks[ii] != 0)
- {
+ } else if (pLVPSA_Inst->pPreviousPeaks[ii] != 0) {
LVM_INT32 temp;
/*Re-compute max values for decay */
temp = (LVM_INT32)(LVPSA_MAXUNSIGNEDCHAR - pLVPSA_Inst->pPreviousPeaks[ii]);
- temp = ((temp * LVPSA_MAXLEVELDECAYFACTOR)>>LVPSA_MAXLEVELDECAYSHIFT);
+ temp = ((temp * LVPSA_MAXLEVELDECAYFACTOR) >> LVPSA_MAXLEVELDECAYSHIFT);
/* If the gain has no effect, "help" the value to increase */
- if(temp == (LVPSA_MAXUNSIGNEDCHAR - pLVPSA_Inst->pPreviousPeaks[ii]))
- {
+ if (temp == (LVPSA_MAXUNSIGNEDCHAR - pLVPSA_Inst->pPreviousPeaks[ii])) {
temp += 1;
}
/* Saturate */
temp = (temp > LVPSA_MAXUNSIGNEDCHAR) ? LVPSA_MAXUNSIGNEDCHAR : temp;
/* Store new max level */
- pLVPSA_Inst->pPreviousPeaks[ii] = (LVM_UINT8)(LVPSA_MAXUNSIGNEDCHAR - temp);
+ pLVPSA_Inst->pPreviousPeaks[ii] = (LVM_UINT8)(LVPSA_MAXUNSIGNEDCHAR - temp);
}
pPeakValues[ii] = pLVPSA_Inst->pPreviousPeaks[ii];
}
- return(LVPSA_OK);
+ return (LVPSA_OK);
}
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h
index 609a485..2f752bf 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h
@@ -20,41 +20,35 @@
#include "LVM_Types.h"
-typedef struct
-{
- LVM_INT32 *pDelay; /* pointer to the delayed samples (data of 32 bits) */
- LVM_INT32 Coefs[2]; /* pointer to the filter coefficients */
-}QPD_State_t, *pQPD_State_t;
+typedef struct {
+ LVM_INT32* pDelay; /* pointer to the delayed samples (data of 32 bits) */
+ LVM_INT32 Coefs[2]; /* pointer to the filter coefficients */
+} QPD_State_t, *pQPD_State_t;
-typedef struct
-{
+typedef struct {
/* pointer to the delayed samples (data of 32 bits) */
- LVM_FLOAT *pDelay;
- LVM_FLOAT Coefs[2]; /* pointer to the filter coefficients */
-}QPD_FLOAT_State_t, *pQPD_FLOAT_State_t;
+ LVM_FLOAT* pDelay;
+ LVM_FLOAT Coefs[2]; /* pointer to the filter coefficients */
+} QPD_FLOAT_State_t, *pQPD_FLOAT_State_t;
-typedef struct
-{
- LVM_INT32 KP; /*should store a0*/
- LVM_INT32 KM; /*should store b2*/
+typedef struct {
+ LVM_INT32 KP; /*should store a0*/
+ LVM_INT32 KM; /*should store b2*/
} QPD_C32_Coefs, *PQPD_C32_Coefs;
-typedef struct
-{
- LVM_FLOAT KP; /*should store a0*/
- LVM_FLOAT KM; /*should store b2*/
+typedef struct {
+ LVM_FLOAT KP; /*should store a0*/
+ LVM_FLOAT KM; /*should store b2*/
} QPD_FLOAT_Coefs, *PQPD_FLOAT_Coefs;
-typedef struct
-{
+typedef struct {
LVM_INT32 Storage[1];
} QPD_Taps_t, *pQPD_Taps_t;
-typedef struct
-{
+typedef struct {
LVM_FLOAT Storage[1];
} QPD_FLOAT_Taps_t, *pQPD_FLOAT_Taps_t;
@@ -72,15 +66,11 @@
/* RETURNS: void */
/* */
/************************************************************************************/
-void LVPSA_QPD_Process ( void *hInstance,
- LVM_INT16 *pInSamps,
- LVM_INT16 numSamples,
- LVM_INT16 BandIndex);
+void LVPSA_QPD_Process(void* hInstance, LVM_INT16* pInSamps, LVM_INT16 numSamples,
+ LVM_INT16 BandIndex);
-void LVPSA_QPD_Process_Float ( void *hInstance,
- LVM_FLOAT *pInSamps,
- LVM_INT16 numSamples,
- LVM_INT16 BandIndex);
+void LVPSA_QPD_Process_Float(void* hInstance, LVM_FLOAT* pInSamps, LVM_INT16 numSamples,
+ LVM_INT16 BandIndex);
/************************************************************************************/
/* */
/* FUNCTION: LVPSA_QPD_Init */
@@ -96,13 +86,9 @@
/* RETURNS: void */
/* */
/************************************************************************************/
-void LVPSA_QPD_Init ( QPD_State_t *pInstance,
- QPD_Taps_t *pTaps,
- QPD_C32_Coefs *pCoef );
+void LVPSA_QPD_Init(QPD_State_t* pInstance, QPD_Taps_t* pTaps, QPD_C32_Coefs* pCoef);
-void LVPSA_QPD_Init_Float ( QPD_FLOAT_State_t *pInstance,
- QPD_FLOAT_Taps_t *pTaps,
- QPD_FLOAT_Coefs *pCoef );
+void LVPSA_QPD_Init_Float(QPD_FLOAT_State_t* pInstance, QPD_FLOAT_Taps_t* pTaps,
+ QPD_FLOAT_Coefs* pCoef);
#endif
-
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp
index 2dbf694..c5023c3 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp
@@ -32,20 +32,15 @@
/* RETURNS: void */
/* */
/************************************************************************************/
-void LVPSA_QPD_Init ( pQPD_State_t pQPD_State,
- QPD_Taps_t *pTaps,
- QPD_C32_Coefs *pCoef )
-{
- pQPD_State->pDelay = pTaps->Storage;
- pQPD_State->Coefs[0] = pCoef->KP;
- pQPD_State->Coefs[1] = pCoef->KM;
+void LVPSA_QPD_Init(pQPD_State_t pQPD_State, QPD_Taps_t* pTaps, QPD_C32_Coefs* pCoef) {
+ pQPD_State->pDelay = pTaps->Storage;
+ pQPD_State->Coefs[0] = pCoef->KP;
+ pQPD_State->Coefs[1] = pCoef->KM;
}
-void LVPSA_QPD_Init_Float ( pQPD_FLOAT_State_t pQPD_State,
- QPD_FLOAT_Taps_t *pTaps,
- QPD_FLOAT_Coefs *pCoef )
-{
- pQPD_State->pDelay = pTaps->Storage;
- pQPD_State->Coefs[0] = ((LVM_FLOAT)pCoef->KP);
- pQPD_State->Coefs[1] = ((LVM_FLOAT)pCoef->KM);
+void LVPSA_QPD_Init_Float(pQPD_FLOAT_State_t pQPD_State, QPD_FLOAT_Taps_t* pTaps,
+ QPD_FLOAT_Coefs* pCoef) {
+ pQPD_State->pDelay = pTaps->Storage;
+ pQPD_State->Coefs[0] = ((LVM_FLOAT)pCoef->KP);
+ pQPD_State->Coefs[1] = ((LVM_FLOAT)pCoef->KM);
}
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp
index 8805420..e301cf9 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp
@@ -34,15 +34,11 @@
/* RETURNS: void */
/* */
/************************************************************************************/
-void LVPSA_QPD_WritePeak( pLVPSA_InstancePr_t pLVPSA_Inst,
- LVM_UINT8 **ppWrite,
- LVM_INT16 BandIndex,
- LVM_INT16 Value );
+void LVPSA_QPD_WritePeak(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite, LVM_INT16 BandIndex,
+ LVM_INT16 Value);
-void LVPSA_QPD_WritePeak_Float( pLVPSA_InstancePr_t pLVPSA_Inst,
- LVM_UINT8 **ppWrite,
- LVM_INT16 BandIndex,
- LVM_FLOAT Value );
+void LVPSA_QPD_WritePeak_Float(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite,
+ LVM_INT16 BandIndex, LVM_FLOAT Value);
/************************************************************************************/
/* */
/* FUNCTION: LVPSA_QPD_Process */
@@ -56,38 +52,34 @@
/* RETURNS: void */
/* */
/************************************************************************************/
-void LVPSA_QPD_Process_Float ( void *hInstance,
- LVM_FLOAT *pInSamps,
- LVM_INT16 numSamples,
- LVM_INT16 BandIndex)
-{
-
+void LVPSA_QPD_Process_Float(void* hInstance, LVM_FLOAT* pInSamps, LVM_INT16 numSamples,
+ LVM_INT16 BandIndex) {
/******************************************************************************
PARAMETERS
*******************************************************************************/
- LVPSA_InstancePr_t *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
- QPD_FLOAT_State_t *pQPDState = (QPD_FLOAT_State_t*)&pLVPSA_Inst->pQPD_States[BandIndex];
+ LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+ QPD_FLOAT_State_t* pQPDState = (QPD_FLOAT_State_t*)&pLVPSA_Inst->pQPD_States[BandIndex];
/* Pointer to taps */
- LVM_FLOAT* pDelay = pQPDState->pDelay;
+ LVM_FLOAT* pDelay = pQPDState->pDelay;
/* Parameters needed during quasi peak calculations */
- LVM_FLOAT X0;
- LVM_FLOAT temp,temp2;
- LVM_FLOAT accu;
- LVM_FLOAT Xg0;
- LVM_FLOAT D0;
- LVM_FLOAT V0 = (LVM_FLOAT)(*pDelay);
+ LVM_FLOAT X0;
+ LVM_FLOAT temp, temp2;
+ LVM_FLOAT accu;
+ LVM_FLOAT Xg0;
+ LVM_FLOAT D0;
+ LVM_FLOAT V0 = (LVM_FLOAT)(*pDelay);
/* Filter's coef */
- LVM_FLOAT Kp = ((LVM_FLOAT)(pQPDState->Coefs[0]));
- LVM_FLOAT Km = ((LVM_FLOAT)(pQPDState->Coefs[1]));
+ LVM_FLOAT Kp = ((LVM_FLOAT)(pQPDState->Coefs[0]));
+ LVM_FLOAT Km = ((LVM_FLOAT)(pQPDState->Coefs[1]));
- LVM_INT16 ii = numSamples;
+ LVM_INT16 ii = numSamples;
- LVM_UINT8 *pWrite = pLVPSA_Inst->pSpectralDataBufferWritePointer;
- LVM_INT32 BufferUpdateSamplesCount = pLVPSA_Inst->BufferUpdateSamplesCount;
- LVM_UINT16 DownSamplingFactor = pLVPSA_Inst->DownSamplingFactor;
+ LVM_UINT8* pWrite = pLVPSA_Inst->pSpectralDataBufferWritePointer;
+ LVM_INT32 BufferUpdateSamplesCount = pLVPSA_Inst->BufferUpdateSamplesCount;
+ LVM_UINT16 DownSamplingFactor = pLVPSA_Inst->DownSamplingFactor;
/******************************************************************************
INITIALIZATION
@@ -97,29 +89,27 @@
/* Correct also the number of samples */
ii = (LVM_INT16)(ii - (LVM_INT16)pLVPSA_Inst->DownSamplingCount);
- while (ii > 0)
- {
+ while (ii > 0) {
/* Apply post gain */
/* - 1 to compensate scaling in process function*/
X0 = (*pInSamps) * pLVPSA_Inst->pPostGains[BandIndex];
pInSamps = pInSamps + DownSamplingFactor;
/* Saturate and take absolute value */
- if(X0 < 0.0f)
- X0 = -X0;
+ if (X0 < 0.0f) X0 = -X0;
if (X0 > 1.0f)
Xg0 = 1.0f;
else
- Xg0 =X0;
+ Xg0 = X0;
/* Quasi peak filter calculation */
- D0 = Xg0 - V0;
+ D0 = Xg0 - V0;
temp2 = D0;
accu = temp2 * Kp;
- D0 = D0 / 2.0f;
- if (D0 < 0.0f){
+ D0 = D0 / 2.0f;
+ if (D0 < 0.0f) {
D0 = -D0;
}
@@ -130,17 +120,13 @@
if (accu > 1.0f)
accu = 1.0f;
- else if(accu < 0.0f)
+ else if (accu < 0.0f)
accu = 0.0f;
V0 = accu;
- if(((pLVPSA_Inst->nSamplesBufferUpdate - BufferUpdateSamplesCount) < DownSamplingFactor))
- {
- LVPSA_QPD_WritePeak_Float( pLVPSA_Inst,
- &pWrite,
- BandIndex,
- V0);
+ if (((pLVPSA_Inst->nSamplesBufferUpdate - BufferUpdateSamplesCount) < DownSamplingFactor)) {
+ LVPSA_QPD_WritePeak_Float(pLVPSA_Inst, &pWrite, BandIndex, V0);
BufferUpdateSamplesCount -= pLVPSA_Inst->nSamplesBufferUpdate;
pLVPSA_Inst->LocalSamplesCount = (LVM_UINT16)(numSamples - ii);
@@ -148,7 +134,6 @@
BufferUpdateSamplesCount += DownSamplingFactor;
ii = (LVM_INT16)(ii - DownSamplingFactor);
-
}
/* Store last taps in memory */
@@ -156,20 +141,15 @@
/* If this is the last call to the function after last band processing,
update the parameters. */
- if(BandIndex == (pLVPSA_Inst->nRelevantFilters - 1))
- {
+ if (BandIndex == (pLVPSA_Inst->nRelevantFilters - 1)) {
pLVPSA_Inst->pSpectralDataBufferWritePointer = pWrite;
/* Adjustment for 11025Hz input, 220,5 is normally
the exact number of samples for 20ms.*/
- if((pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite)&&
- (pLVPSA_Inst->CurrentParams.Fs == LVM_FS_11025))
- {
- if(pLVPSA_Inst->nSamplesBufferUpdate == 220)
- {
+ if ((pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite) &&
+ (pLVPSA_Inst->CurrentParams.Fs == LVM_FS_11025)) {
+ if (pLVPSA_Inst->nSamplesBufferUpdate == 220) {
pLVPSA_Inst->nSamplesBufferUpdate = 221;
- }
- else
- {
+ } else {
pLVPSA_Inst->nSamplesBufferUpdate = 220;
}
}
@@ -194,37 +174,29 @@
/* RETURNS: void */
/* */
/************************************************************************************/
-void LVPSA_QPD_WritePeak( pLVPSA_InstancePr_t pLVPSA_Inst,
- LVM_UINT8 **ppWrite,
- LVM_INT16 BandIndex,
- LVM_INT16 Value )
-{
- LVM_UINT8 *pWrite = *ppWrite;
+void LVPSA_QPD_WritePeak(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite, LVM_INT16 BandIndex,
+ LVM_INT16 Value) {
+ LVM_UINT8* pWrite = *ppWrite;
/* Write the value and update the write pointer */
- *(pWrite + BandIndex) = (LVM_UINT8)(Value>>7);
+ *(pWrite + BandIndex) = (LVM_UINT8)(Value >> 7);
pWrite += pLVPSA_Inst->nBands;
- if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart + pLVPSA_Inst->nBands * pLVPSA_Inst->SpectralDataBufferLength))
- {
+ if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart +
+ pLVPSA_Inst->nBands * pLVPSA_Inst->SpectralDataBufferLength)) {
pWrite = pLVPSA_Inst->pSpectralDataBufferStart;
}
*ppWrite = pWrite;
-
}
-void LVPSA_QPD_WritePeak_Float( pLVPSA_InstancePr_t pLVPSA_Inst,
- LVM_UINT8 **ppWrite,
- LVM_INT16 BandIndex,
- LVM_FLOAT Value )
-{
- LVM_UINT8 *pWrite = *ppWrite;
+void LVPSA_QPD_WritePeak_Float(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite,
+ LVM_INT16 BandIndex, LVM_FLOAT Value) {
+ LVM_UINT8* pWrite = *ppWrite;
/* Write the value and update the write pointer */
*(pWrite + BandIndex) = (LVM_UINT8)(Value * 256);
pWrite += pLVPSA_Inst->nBands;
- if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart + pLVPSA_Inst->nBands * \
- pLVPSA_Inst->SpectralDataBufferLength))
- {
+ if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart +
+ pLVPSA_Inst->nBands * pLVPSA_Inst->SpectralDataBufferLength)) {
pWrite = pLVPSA_Inst->pSpectralDataBufferStart;
}
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp
index 9f0aa02..4fbff6f 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp
@@ -34,19 +34,9 @@
* Sample rate table for converting between the enumerated type and the actual
* frequency
*/
-const LVM_UINT32 LVPSA_SampleRateTab[] = { 8000, /* 8kS/s */
- 11025,
- 12000,
- 16000,
- 22050,
- 24000,
- 32000,
- 44100,
- 48000,
- 88200,
- 96000,
- 176400,
- 192000}; /* 192kS/s */
+const LVM_UINT32 LVPSA_SampleRateTab[] = {8000, /* 8kS/s */
+ 11025, 12000, 16000, 22050, 24000, 32000,
+ 44100, 48000, 88200, 96000, 176400, 192000}; /* 192kS/s */
/************************************************************************************/
/* */
@@ -58,20 +48,11 @@
* Sample rate table for converting between the enumerated type and the actual
* frequency
*/
-const LVM_UINT32 LVPSA_SampleRateInvTab[] = { 268435, /* 8kS/s */
- 194783,
- 178957,
- 134218,
- 97391,
- 89478,
- 67109,
- 48696,
- 44739
- ,24348
- ,22369
- ,12174
- ,11185 /* 192kS/s */
- };
+const LVM_UINT32 LVPSA_SampleRateInvTab[] = {
+ 268435, /* 8kS/s */
+ 194783, 178957, 134218, 97391, 89478, 67109,
+ 48696, 44739, 24348, 22369, 12174, 11185 /* 192kS/s */
+};
/************************************************************************************/
/* */
@@ -83,20 +64,10 @@
* Table for converting between the enumerated type and the number of samples
* during 20ms
*/
-const LVM_UINT16 LVPSA_nSamplesBufferUpdate[] = { 160, /* 8kS/s */
- 220,
- 240,
- 320,
- 441,
- 480,
- 640,
- 882,
- 960
- ,1764
- ,1920
- ,3528
- ,3840 /* 192kS/s */
- };
+const LVM_UINT16 LVPSA_nSamplesBufferUpdate[] = {
+ 160, /* 8kS/s */
+ 220, 240, 320, 441, 480, 640, 882, 960, 1764, 1920, 3528, 3840 /* 192kS/s */
+};
/************************************************************************************/
/* */
/* Down sampling factors */
@@ -106,20 +77,25 @@
/*
* Table for converting between the enumerated type and the down sampling factor
*/
-const LVM_UINT16 LVPSA_DownSamplingFactor[] = { 5, /* 8000 S/s */
- 7, /* 11025 S/s */
- 8, /* 12000 S/s */
- 10, /* 16000 S/s */
- 15, /* 22050 S/s */
- 16, /* 24000 S/s */
- 21, /* 32000 S/s */
- 30, /* 44100 S/s */
- 32 /* 48000 S/s */
- ,60 /* 88200 S/s */
- ,64 /* 96000 S/s */
- ,120 /* 176400 S/s */
- ,128 /*192000 S/s */
- };
+const LVM_UINT16 LVPSA_DownSamplingFactor[] = {
+ 5, /* 8000 S/s */
+ 7, /* 11025 S/s */
+ 8, /* 12000 S/s */
+ 10, /* 16000 S/s */
+ 15, /* 22050 S/s */
+ 16, /* 24000 S/s */
+ 21, /* 32000 S/s */
+ 30, /* 44100 S/s */
+ 32 /* 48000 S/s */
+ ,
+ 60 /* 88200 S/s */
+ ,
+ 64 /* 96000 S/s */
+ ,
+ 120 /* 176400 S/s */
+ ,
+ 128 /*192000 S/s */
+};
/************************************************************************************/
/* */
@@ -130,102 +106,34 @@
/*
* Table for 2 * Pi / Fs
*/
-const LVM_INT16 LVPSA_TwoPiOnFsTable[] = { 26354, /* 8kS/s */
- 19123,
- 17569,
- 13177,
- 9561,
- 8785,
- 6588,
- 4781,
- 4392
- ,2390
- ,2196
- ,1195
- ,1098 /* 192kS/s */
- };
+const LVM_INT16 LVPSA_TwoPiOnFsTable[] = {
+ 26354, /* 8kS/s */
+ 19123, 17569, 13177, 9561, 8785, 6588, 4781, 4392, 2390, 2196, 1195, 1098 /* 192kS/s */
+};
-const LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[] = { 0.8042847f, /* 8kS/s */
- 0.5836054f,
- 0.5361796f,
- 0.4021423f,
- 0.2917874f,
- 0.2681051f,
- 0.2010559f,
- 0.1459089f,
- 0.1340372f
- ,0.0729476f
- ,0.0670186f
- ,0.0364738f
- ,0.0335093f /* 192kS/s */
- };
+const LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[] = {
+ 0.8042847f, /* 8kS/s */
+ 0.5836054f, 0.5361796f, 0.4021423f, 0.2917874f, 0.2681051f, 0.2010559f,
+ 0.1459089f, 0.1340372f, 0.0729476f, 0.0670186f, 0.0364738f, 0.0335093f /* 192kS/s */
+};
/*
* Gain table
*/
-const LVM_INT16 LVPSA_GainTable[] = { 364, /* -15dB gain */
- 408,
- 458,
- 514,
- 577,
- 647,
- 726,
- 815,
- 914,
- 1026,
- 1151,
- 1292,
- 1449,
- 1626,
- 1825,
- 2048, /* 0dB gain */
- 2297,
- 2578,
- 2892,
- 3245,
- 3641,
- 4096,
- 4584,
- 5144,
- 5772,
- 6476,
- 7266,
- 8153,
- 9148,
- 10264,
- 11576}; /* +15dB gain */
+const LVM_INT16 LVPSA_GainTable[] = {364, /* -15dB gain */
+ 408, 458, 514, 577, 647, 726, 815, 914,
+ 1026, 1151, 1292, 1449, 1626, 1825, 2048, /* 0dB gain */
+ 2297, 2578, 2892, 3245, 3641, 4096, 4584, 5144,
+ 5772, 6476, 7266, 8153, 9148, 10264, 11576}; /* +15dB gain */
-const LVM_FLOAT LVPSA_Float_GainTable[]={ 0.177734375f, /* -15dB gain */
- 0.199218750f,
- 0.223632812f,
- 0.250976562f,
- 0.281738281f,
- 0.315917968f,
- 0.354492187f,
- 0.397949218f,
- 0.446289062f,
- 0.500976562f,
- 0.562011718f,
- 0.630859375f,
- 0.707519531f,
- 0.793945312f,
- 0.891113281f,
- 1.000000000f, /* 0dB gain */
- 1.121582031f,
- 1.258789062f,
- 1.412109375f,
- 1.584472656f,
- 1.777832031f,
- 2.000000000f,
- 2.238281250f,
- 2.511718750f,
- 2.818359375f,
- 3.162109375f,
- 3.547851562f,
- 3.980957031f,
- 4.466796875f,
- 5.011718750f,
- 5.652343750f}; /* +15dB gain */
+const LVM_FLOAT LVPSA_Float_GainTable[] = {
+ 0.177734375f, /* -15dB gain */
+ 0.199218750f, 0.223632812f, 0.250976562f, 0.281738281f, 0.315917968f,
+ 0.354492187f, 0.397949218f, 0.446289062f, 0.500976562f, 0.562011718f,
+ 0.630859375f, 0.707519531f, 0.793945312f, 0.891113281f, 1.000000000f, /* 0dB gain */
+ 1.121582031f, 1.258789062f, 1.412109375f, 1.584472656f, 1.777832031f,
+ 2.000000000f, 2.238281250f, 2.511718750f, 2.818359375f, 3.162109375f,
+ 3.547851562f, 3.980957031f, 4.466796875f, 5.011718750f, 5.652343750f}; /* +15dB gain */
/************************************************************************************/
/* */
/* Cosone polynomial coefficients */
@@ -241,20 +149,20 @@
* a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
* +1.0 to -1.0
*/
-const LVM_INT16 LVPSA_CosCoef[] = { 3, /* Shifts */
- 4096, /* a0 */
- -36, /* a1 */
- -19725, /* a2 */
- -2671, /* a3 */
- 23730, /* a4 */
- -9490}; /* a5 */
-const LVM_FLOAT LVPSA_Float_CosCoef[] = { 3, /* Shifts */
- 0.1250038f, /* a0 */
- -0.0010986f, /* a1 */
- -0.6019775f, /* a2 */
- -0.0815149f, /* a3 */
- 0.7242042f, /* a4 */
- -0.2896206f}; /* a5 */
+const LVM_INT16 LVPSA_CosCoef[] = {3, /* Shifts */
+ 4096, /* a0 */
+ -36, /* a1 */
+ -19725, /* a2 */
+ -2671, /* a3 */
+ 23730, /* a4 */
+ -9490}; /* a5 */
+const LVM_FLOAT LVPSA_Float_CosCoef[] = {3, /* Shifts */
+ 0.1250038f, /* a0 */
+ -0.0010986f, /* a1 */
+ -0.6019775f, /* a2 */
+ -0.0815149f, /* a3 */
+ 0.7242042f, /* a4 */
+ -0.2896206f}; /* a5 */
/*
* Coefficients for calculating the cosine error with the equation:
*
@@ -269,101 +177,100 @@
*
* Cos(x) = 1.0 - CosErr(x)
*/
-const LVM_INT16 LVPSA_DPCosCoef[] = { 1, /* Shifts */
- 0, /* a0 */
- -6, /* a1 */
- 16586, /* a2 */
- -44}; /* a3 */
-const LVM_FLOAT LVPSA_Float_DPCosCoef[] = {1.0f, /* Shifts */
- 0.0f, /* a0 */
- -0.00008311f, /* a1 */
- 0.50617999f, /* a2 */
- -0.00134281f}; /* a3 */
+const LVM_INT16 LVPSA_DPCosCoef[] = {1, /* Shifts */
+ 0, /* a0 */
+ -6, /* a1 */
+ 16586, /* a2 */
+ -44}; /* a3 */
+const LVM_FLOAT LVPSA_Float_DPCosCoef[] = {1.0f, /* Shifts */
+ 0.0f, /* a0 */
+ -0.00008311f, /* a1 */
+ 0.50617999f, /* a2 */
+ -0.00134281f}; /* a3 */
/************************************************************************************/
/* */
/* Quasi peak filter coefficients table */
/* */
/************************************************************************************/
-const QPD_C32_Coefs LVPSA_QPD_Coefs[] = {
- /* 8kS/s */ /* LVPSA_SPEED_LOW */
- {(LVM_INT32)0x80CEFD2B,0x00CB9B17},
- {(LVM_INT32)0x80D242E7,0x00CED11D},
- {(LVM_INT32)0x80DCBAF5,0x00D91679},
- {(LVM_INT32)0x80CEFD2B,0x00CB9B17},
- {(LVM_INT32)0x80E13739,0x00DD7CD3},
- {(LVM_INT32)0x80DCBAF5,0x00D91679},
- {(LVM_INT32)0x80D94BAF,0x00D5B7E7},
- {(LVM_INT32)0x80E13739,0x00DD7CD3},
- {(LVM_INT32)0x80DCBAF5,0x00D91679}, /* 48kS/s */
+const QPD_C32_Coefs LVPSA_QPD_Coefs[] = {
+ /* 8kS/s */ /* LVPSA_SPEED_LOW */
+ {(LVM_INT32)0x80CEFD2B, 0x00CB9B17},
+ {(LVM_INT32)0x80D242E7, 0x00CED11D},
+ {(LVM_INT32)0x80DCBAF5, 0x00D91679},
+ {(LVM_INT32)0x80CEFD2B, 0x00CB9B17},
+ {(LVM_INT32)0x80E13739, 0x00DD7CD3},
+ {(LVM_INT32)0x80DCBAF5, 0x00D91679},
+ {(LVM_INT32)0x80D94BAF, 0x00D5B7E7},
+ {(LVM_INT32)0x80E13739, 0x00DD7CD3},
+ {(LVM_INT32)0x80DCBAF5, 0x00D91679}, /* 48kS/s */
- /* 8kS/s */ /* LVPSA_SPEED_MEDIUM */
- {(LVM_INT32)0x8587513D,0x055C22CF},
- {(LVM_INT32)0x859D2967,0x0570F007},
- {(LVM_INT32)0x85E2EFAC,0x05B34D79},
- {(LVM_INT32)0x8587513D,0x055C22CF},
- {(LVM_INT32)0x8600C7B9,0x05CFA6CF},
- {(LVM_INT32)0x85E2EFAC,0x05B34D79},
- {(LVM_INT32)0x85CC1018,0x059D8F69},
- {(LVM_INT32)0x8600C7B9,0x05CFA6CF},
- {(LVM_INT32)0x85E2EFAC,0x05B34D79}, /* 48kS/s */
+ /* 8kS/s */ /* LVPSA_SPEED_MEDIUM */
+ {(LVM_INT32)0x8587513D, 0x055C22CF},
+ {(LVM_INT32)0x859D2967, 0x0570F007},
+ {(LVM_INT32)0x85E2EFAC, 0x05B34D79},
+ {(LVM_INT32)0x8587513D, 0x055C22CF},
+ {(LVM_INT32)0x8600C7B9, 0x05CFA6CF},
+ {(LVM_INT32)0x85E2EFAC, 0x05B34D79},
+ {(LVM_INT32)0x85CC1018, 0x059D8F69},
+ {(LVM_INT32)0x8600C7B9, 0x05CFA6CF},
+ {(LVM_INT32)0x85E2EFAC, 0x05B34D79}, /* 48kS/s */
- /* 8kS/s */ /* LVPSA_SPEED_HIGH */
- {(LVM_INT32)0xA115EA7A,0x1CDB3F5C},
- {(LVM_INT32)0xA18475F0,0x1D2C83A2},
- {(LVM_INT32)0xA2E1E950,0x1E2A532E},
- {(LVM_INT32)0xA115EA7A,0x1CDB3F5C},
- {(LVM_INT32)0xA375B2C6,0x1E943BBC},
- {(LVM_INT32)0xA2E1E950,0x1E2A532E},
- {(LVM_INT32)0xA26FF6BD,0x1DD81530},
- {(LVM_INT32)0xA375B2C6,0x1E943BBC},
- {(LVM_INT32)0xA2E1E950,0x1E2A532E}}; /* 48kS/s */
+ /* 8kS/s */ /* LVPSA_SPEED_HIGH */
+ {(LVM_INT32)0xA115EA7A, 0x1CDB3F5C},
+ {(LVM_INT32)0xA18475F0, 0x1D2C83A2},
+ {(LVM_INT32)0xA2E1E950, 0x1E2A532E},
+ {(LVM_INT32)0xA115EA7A, 0x1CDB3F5C},
+ {(LVM_INT32)0xA375B2C6, 0x1E943BBC},
+ {(LVM_INT32)0xA2E1E950, 0x1E2A532E},
+ {(LVM_INT32)0xA26FF6BD, 0x1DD81530},
+ {(LVM_INT32)0xA375B2C6, 0x1E943BBC},
+ {(LVM_INT32)0xA2E1E950, 0x1E2A532E}}; /* 48kS/s */
-const QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[] = {
+const QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[] = {
- /* 8kS/s */ /* LVPSA_SPEED_LOW */
- {-0.9936831989325583f,0.0062135565094650f},
- {-0.9935833332128823f,0.0063115493394434f},
- {-0.9932638457976282f,0.0066249934025109f},
- {-0.9936831989325583f,0.0062135565094650f},
- {-0.9931269618682563f,0.0067592649720609f},
- {-0.9932638457976282f,0.0066249934025109f},
- {-0.9933686633594334f,0.0065221670083702f},
- {-0.9931269618682563f,0.0067592649720609f},
- /* 48kS/s */
- {-0.9932638457976282f,0.0066249934025109f},
- {-0.9931269618682563f,0.0067592649720609f},
- {-0.9932638457976282f,0.0066249934025109f},
- {-0.9931269618682563f,0.0067592649720609f},
- {-0.9932638457976282f,0.0066249934025109f},
- /* 8kS/s */ /* LVPSA_SPEED_MEDIUM */
- {-0.9568079425953329f,0.0418742666952312f},
- {-0.9561413046903908f,0.0425090822391212f},
- {-0.9540119562298059f,0.0445343819446862f},
- {-0.9568079425953329f,0.0418742666952312f},
- {-0.9531011912040412f,0.0453995238058269f},
- {-0.9540119562298059f,0.0445343819446862f},
- {-0.9547099955379963f,0.0438708555884659f},
- //{0x8600C7B9,0x05CFA6CF},
- {-0.9531011912040412f,0.0453995238058269f},
- /* 48kS/s */
- {-0.9540119562298059f,0.0445343819446862f},
- {-0.9531011912040412f,0.0453995238058269f},
- {-0.9540119562298059f,0.0445343819446862f},
- {-0.9531011912040412f,0.0453995238058269f},
- {-0.9540119562298059f,0.0445343819446862f},
- /* 8kS/s */ /* LVPSA_SPEED_HIGH */
- {-0.7415186790749431f,0.2254409026354551f},
- {-0.7381451204419136f,0.2279209652915597f},
- {-0.7274807319045067f,0.2356666540727019f},
- {-0.7415186790749431f,0.2254409026354551f},
- {-0.7229706319049001f,0.2388987224549055f},
- {-0.7274807319045067f,0.2356666540727019f},
- {-0.7309581353329122f,0.2331568226218224f},
- {-0.7229706319049001f,0.2388987224549055f},
- /* 48kS/s */
- {-0.7274807319045067f,0.2356666540727019f}
- ,{-0.7229706319049001f,0.2388987224549055f}
- ,{-0.7274807319045067f,0.2356666540727019f}
- ,{-0.7229706319049001f,0.2388987224549055f}
- ,{-0.7274807319045067f,0.2356666540727019f}
- };
+ /* 8kS/s */ /* LVPSA_SPEED_LOW */
+ {-0.9936831989325583f, 0.0062135565094650f},
+ {-0.9935833332128823f, 0.0063115493394434f},
+ {-0.9932638457976282f, 0.0066249934025109f},
+ {-0.9936831989325583f, 0.0062135565094650f},
+ {-0.9931269618682563f, 0.0067592649720609f},
+ {-0.9932638457976282f, 0.0066249934025109f},
+ {-0.9933686633594334f, 0.0065221670083702f},
+ {-0.9931269618682563f, 0.0067592649720609f},
+ /* 48kS/s */
+ {-0.9932638457976282f, 0.0066249934025109f},
+ {-0.9931269618682563f, 0.0067592649720609f},
+ {-0.9932638457976282f, 0.0066249934025109f},
+ {-0.9931269618682563f, 0.0067592649720609f},
+ {-0.9932638457976282f, 0.0066249934025109f},
+ /* 8kS/s */ /* LVPSA_SPEED_MEDIUM */
+ {-0.9568079425953329f, 0.0418742666952312f},
+ {-0.9561413046903908f, 0.0425090822391212f},
+ {-0.9540119562298059f, 0.0445343819446862f},
+ {-0.9568079425953329f, 0.0418742666952312f},
+ {-0.9531011912040412f, 0.0453995238058269f},
+ {-0.9540119562298059f, 0.0445343819446862f},
+ {-0.9547099955379963f, 0.0438708555884659f},
+ //{0x8600C7B9,0x05CFA6CF},
+ {-0.9531011912040412f, 0.0453995238058269f},
+ /* 48kS/s */
+ {-0.9540119562298059f, 0.0445343819446862f},
+ {-0.9531011912040412f, 0.0453995238058269f},
+ {-0.9540119562298059f, 0.0445343819446862f},
+ {-0.9531011912040412f, 0.0453995238058269f},
+ {-0.9540119562298059f, 0.0445343819446862f},
+ /* 8kS/s */ /* LVPSA_SPEED_HIGH */
+ {-0.7415186790749431f, 0.2254409026354551f},
+ {-0.7381451204419136f, 0.2279209652915597f},
+ {-0.7274807319045067f, 0.2356666540727019f},
+ {-0.7415186790749431f, 0.2254409026354551f},
+ {-0.7229706319049001f, 0.2388987224549055f},
+ {-0.7274807319045067f, 0.2356666540727019f},
+ {-0.7309581353329122f, 0.2331568226218224f},
+ {-0.7229706319049001f, 0.2388987224549055f},
+ /* 48kS/s */
+ {-0.7274807319045067f, 0.2356666540727019f},
+ {-0.7229706319049001f, 0.2388987224549055f},
+ {-0.7274807319045067f, 0.2356666540727019f},
+ {-0.7229706319049001f, 0.2388987224549055f},
+ {-0.7274807319045067f, 0.2356666540727019f}};
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h
index 65872fe..c771dad 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h
@@ -27,7 +27,7 @@
* Sample rate table for converting between the enumerated type and the actual
* frequency
*/
-extern const LVM_UINT32 LVPSA_SampleRateTab[];
+extern const LVM_UINT32 LVPSA_SampleRateTab[];
/************************************************************************************/
/* */
@@ -39,7 +39,7 @@
* Sample rate table for converting between the enumerated type and the actual
* frequency
*/
-extern const LVM_UINT32 LVPSA_SampleRateInvTab[];
+extern const LVM_UINT32 LVPSA_SampleRateInvTab[];
/************************************************************************************/
/* */
@@ -51,7 +51,7 @@
* Table for converting between the enumerated type and the number of samples
* during 20ms
*/
-extern const LVM_UINT16 LVPSA_nSamplesBufferUpdate[];
+extern const LVM_UINT16 LVPSA_nSamplesBufferUpdate[];
/************************************************************************************/
/* */
@@ -62,7 +62,7 @@
/*
* Table for converting between the enumerated type and the down sampling factor
*/
-extern const LVM_UINT16 LVPSA_DownSamplingFactor[];
+extern const LVM_UINT16 LVPSA_DownSamplingFactor[];
/************************************************************************************/
/* */
@@ -73,14 +73,14 @@
/*
* Table for 2 * Pi / Fs
*/
-extern const LVM_INT16 LVPSA_TwoPiOnFsTable[];
-extern const LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
+extern const LVM_INT16 LVPSA_TwoPiOnFsTable[];
+extern const LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
/*
* Gain table
*/
-extern const LVM_INT16 LVPSA_GainTable[];
-extern const LVM_FLOAT LVPSA_Float_GainTable[];
+extern const LVM_INT16 LVPSA_GainTable[];
+extern const LVM_FLOAT LVPSA_Float_GainTable[];
/************************************************************************************/
/* */
@@ -97,8 +97,8 @@
* a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
* +1.0 to -1.0
*/
-extern const LVM_INT16 LVPSA_CosCoef[];
-extern const LVM_FLOAT LVPSA_Float_CosCoef[];
+extern const LVM_INT16 LVPSA_CosCoef[];
+extern const LVM_FLOAT LVPSA_Float_CosCoef[];
/*
* Coefficients for calculating the cosine error with the equation:
@@ -114,15 +114,15 @@
*
* Cos(x) = 1.0 - CosErr(x)
*/
-extern const LVM_INT16 LVPSA_DPCosCoef[];
-extern const LVM_FLOAT LVPSA_Float_DPCosCoef[];
+extern const LVM_INT16 LVPSA_DPCosCoef[];
+extern const LVM_FLOAT LVPSA_Float_DPCosCoef[];
/************************************************************************************/
/* */
/* Quasi peak filter coefficients table */
/* */
/************************************************************************************/
-extern const QPD_C32_Coefs LVPSA_QPD_Coefs[];
-extern const QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[];
+extern const QPD_C32_Coefs LVPSA_QPD_Coefs[];
+extern const QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[];
#endif /* __LVPSA_TABLES_H__ */
diff --git a/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h b/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h
index 0adfd1b..ffe7902 100644
--- a/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h
+++ b/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h
@@ -71,21 +71,14 @@
/* */
/****************************************************************************************/
-/* Memory table */
-#define LVCS_MEMREGION_PERSISTENT_SLOW_DATA 0 /* Offset to the instance memory region */
-#define LVCS_MEMREGION_PERSISTENT_FAST_DATA 1 /* Offset to the persistent data memory region */
-#define LVCS_MEMREGION_PERSISTENT_FAST_COEF 2 /* Offset to the persistent coefficient memory region */
-#define LVCS_MEMREGION_TEMPORARY_FAST 3 /* Offset to temporary memory region */
-#define LVCS_NR_MEMORY_REGIONS 4 /* Number of memory regions */
-
/* Effect Level */
-#define LVCS_EFFECT_LOW 16384 /* Effect scaling 50% */
-#define LVCS_EFFECT_MEDIUM 24576 /* Effect scaling 75% */
-#define LVCS_EFFECT_HIGH 32767 /* Effect Scaling 100% */
+#define LVCS_EFFECT_LOW 16384 /* Effect scaling 50% */
+#define LVCS_EFFECT_MEDIUM 24576 /* Effect scaling 75% */
+#define LVCS_EFFECT_HIGH 32767 /* Effect Scaling 100% */
/* Callback events */
-#define LVCS_EVENT_NONE 0x0000 /* Not a valid event */
-#define LVCS_EVENT_ALGOFF 0x0001 /* CS has completed switch off */
+#define LVCS_EVENT_NONE 0x0000 /* Not a valid event */
+#define LVCS_EVENT_ALGOFF 0x0001 /* CS has completed switch off */
/****************************************************************************************/
/* */
@@ -94,70 +87,49 @@
/****************************************************************************************/
/* Instance handle */
-typedef void *LVCS_Handle_t;
+typedef void* LVCS_Handle_t;
/* Operating modes */
-typedef enum
-{
- LVCS_OFF = 0,
- LVCS_ON = 15,
- LVCS_MAX = LVM_MAXENUM
-} LVCS_Modes_en;
-
-/* Memory Types */
-typedef enum
-{
- LVCS_SCRATCH = 0,
- LVCS_DATA = 1,
- LVCS_COEFFICIENT = 2,
- LVCS_PERSISTENT = 3,
- LVCS_MEMORYTYPE_MAX = LVM_MAXENUM
-} LVCS_MemoryTypes_en;
+typedef enum { LVCS_OFF = 0, LVCS_ON = 15, LVCS_MAX = LVM_MAXENUM } LVCS_Modes_en;
/* Function return status */
-typedef enum
-{
- LVCS_SUCCESS = 0, /* Successful return from a routine */
- LVCS_ALIGNMENTERROR = 1, /* Memory alignment error */
- LVCS_NULLADDRESS = 2, /* NULL allocation address */
- LVCS_TOOMANYSAMPLES = 3, /* Maximum block size exceeded */
- LVCS_INVALIDBUFFER = 4, /* Invalid buffer processing request */
- LVCS_STATUSMAX = LVM_MAXENUM
+typedef enum {
+ LVCS_SUCCESS = 0, /* Successful return from a routine */
+ LVCS_NULLADDRESS = 1, /* NULL allocation address */
+ LVCS_TOOMANYSAMPLES = 2, /* Maximum block size exceeded */
+ LVCS_STATUSMAX = LVM_MAXENUM
} LVCS_ReturnStatus_en;
/*
* Source data formats
*/
-typedef enum
-{
- LVCS_STEREO = 0,
+typedef enum {
+ LVCS_STEREO = 0,
LVCS_MONOINSTEREO = 1,
- LVCS_SOURCEMAX = LVM_MAXENUM
+ LVCS_SOURCEMAX = LVM_MAXENUM
} LVCS_SourceFormat_en;
/*
* Supported output devices
*/
-typedef enum
-{
- LVCS_HEADPHONES = 0,
- LVCS_EX_HEADPHONES = 1,
- LVCS_SPEAKERTYPE_MAX = LVM_MAXENUM
+typedef enum {
+ LVCS_HEADPHONES = 0,
+ LVCS_EX_HEADPHONES = 1,
+ LVCS_SPEAKERTYPE_MAX = LVM_MAXENUM
} LVCS_SpeakerType_en;
/*
* Speaker Coefficients Table
*/
-typedef struct
-{
- void *pTable1;
- void *pTable2;
- void *pTable3;
- void *pTable4;
- void *pTable5;
- void *pTable6;
- void *pTable7;
- void *pTable8;
+typedef struct {
+ void* pTable1;
+ void* pTable2;
+ void* pTable3;
+ void* pTable4;
+ void* pTable5;
+ void* pTable6;
+ void* pTable7;
+ void* pTable8;
} LVCS_CSMS_Coef_Tables_t;
/****************************************************************************************/
@@ -166,44 +138,26 @@
/* */
/****************************************************************************************/
-/* Memory region definition */
-typedef struct
-{
- LVM_UINT32 Size; /* Region size in bytes */
- LVCS_MemoryTypes_en Type; /* Region type */
- void *pBaseAddress; /* Pointer to the region base address */
-} LVCS_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
- LVCS_MemoryRegion_t Region[LVCS_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVCS_MemTab_t;
-
/* Concert Sound parameter structure */
-typedef struct
-{
- LVCS_Modes_en OperatingMode; /* Algorithm mode */
- LVCS_SpeakerType_en SpeakerType; /* Output device type */
- LVCS_SourceFormat_en SourceFormat; /* Source data format */
- LVM_Mode_en CompressorMode; /* Non-Linear Compressor Mode */
- LVM_Fs_en SampleRate; /* Sampling rate */
- LVM_INT16 EffectLevel; /* Effect level */
- LVM_UINT16 ReverbLevel; /* Reverb level in % */
-#ifdef SUPPORT_MC
- LVM_INT32 NrChannels;
-#endif
+typedef struct {
+ LVCS_Modes_en OperatingMode; /* Algorithm mode */
+ LVCS_SpeakerType_en SpeakerType; /* Output device type */
+ LVCS_SourceFormat_en SourceFormat; /* Source data format */
+ LVM_Mode_en CompressorMode; /* Non-Linear Compressor Mode */
+ LVM_Fs_en SampleRate; /* Sampling rate */
+ LVM_INT16 EffectLevel; /* Effect level */
+ LVM_UINT16 ReverbLevel; /* Reverb level in % */
+ LVM_INT32 NrChannels;
} LVCS_Params_t;
/* Concert Sound Capability structure */
-typedef struct
-{
+typedef struct {
/* General parameters */
- LVM_UINT16 MaxBlockSize; /* Maximum block size in sample pairs */
+ LVM_UINT16 MaxBlockSize; /* Maximum block size in sample pairs */
/* Callback parameters */
- LVM_Callback CallBack; /* Bundle callback */
- void *pBundleInstance; /* Bundle instance handle */
+ LVM_Callback CallBack; /* Bundle callback */
+ void* pBundleInstance; /* Bundle instance handle */
} LVCS_Capabilities_t;
@@ -213,82 +167,44 @@
/* */
/****************************************************************************************/
-/****************************************************************************************/
-/* */
-/* FUNCTION: LVCS_Memory */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. It can be called in */
-/* two ways: */
-/* */
-/* hInstance = NULL Returns the memory requirements */
-/* hInstance = Instance handle Returns the memory requirements and */
-/* allocated base addresses for the instance */
-/* */
-/* When this function is called for memory allocation (hInstance=NULL) it is */
-/* passed the default capabilities, of these only the buffer processing setting is */
-/* used. */
-/* */
-/* When called for memory allocation the memory base address pointers are NULL on */
-/* return. */
-/* */
-/* When the function is called for free (hInstance = Instance Handle) the */
-/* capabilities are ignored and the memory table returns the allocated memory and */
-/* base addresses used during initialisation. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pMemoryTable Pointer to an empty memory definition table */
-/* pCapabilities Pointer to the default capabilites */
-/* */
-/* RETURNS: */
-/* LVCS_Success Succeeded */
-/* */
-/* NOTES: */
-/* 1. This function may be interrupted by the LVCS_Process function */
-/* */
-/****************************************************************************************/
+/************************************************************************************/
+/* */
+/* FUNCTION: LVCS_Init */
+/* */
+/* DESCRIPTION: */
+/* Create and initialisation function for the Concert Sound module */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to instance handle */
+/* pCapabilities Pointer to the capabilities structure */
+/* pScratch Pointer to the scratch buffer */
+/* */
+/* RETURNS: */
+/* LVCS_Success Initialisation succeeded */
+/* LVDBE_NULLADDRESS One or more memory has a NULL pointer */
+/* */
+/* NOTES: */
+/* 1. This function must not be interrupted by the LVCS_Process function */
+/* */
+/************************************************************************************/
+LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t* phInstance, LVCS_Capabilities_t* pCapabilities,
+ void* pScratch);
-LVCS_ReturnStatus_en LVCS_Memory(LVCS_Handle_t hInstance,
- LVCS_MemTab_t *pMemoryTable,
- LVCS_Capabilities_t *pCapabilities);
-
-/****************************************************************************************/
-/* */
-/* FUNCTION: LVCS_Init */
-/* */
-/* DESCRIPTION: */
-/* Create and initialisation function for the Concert Sound module */
-/* */
-/* This function can be used to create an algorithm instance by calling with */
-/* hInstance set to NULL. In this case the algorithm returns the new instance */
-/* handle. */
-/* */
-/* This function can be used to force a full re-initialisation of the algorithm */
-/* by calling with hInstance = Instance Handle. In this case the memory table */
-/* should be correct for the instance, this can be ensured by calling the function */
-/* LVCS_Memory before calling this function. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance handle */
-/* pMemoryTable Pointer to the memory definition table */
-/* pCapabilities Pointer to the initialisation capabilities */
-/* */
-/* RETURNS: */
-/* LVCS_Success Initialisation succeeded */
-/* LVCS_AlignmentError Instance or scratch memory on incorrect alignment */
-/* LVCS_NullAddress Instance or scratch memory has a NULL pointer */
-/* */
-/* NOTES: */
-/* 1. The instance handle is the pointer to the base address of the first memory */
-/* region. */
-/* 2. This function must not be interrupted by the LVCS_Process function */
-/* */
-/****************************************************************************************/
-
-LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t *phInstance,
- LVCS_MemTab_t *pMemoryTable,
- LVCS_Capabilities_t *pCapabilities);
+/************************************************************************************/
+/* */
+/* FUNCTION: LVCS_DeInit */
+/* */
+/* DESCRIPTION: */
+/* Free memories created during the LVCS_Init call including instance handle */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to instance handle */
+/* */
+/* NOTES: */
+/* 1. This function must not be interrupted by the LVCS_Process function */
+/* */
+/************************************************************************************/
+void LVCS_DeInit(LVCS_Handle_t* phInstance);
/****************************************************************************************/
/* */
@@ -310,8 +226,7 @@
/* */
/****************************************************************************************/
-LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams);
+LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
/****************************************************************************************/
/* */
@@ -332,8 +247,7 @@
/* */
/****************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams);
+LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
/****************************************************************************************/
/* */
@@ -356,9 +270,7 @@
/* NOTES: */
/* */
/****************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples);
+LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
-#endif /* LVCS_H */
+#endif /* LVCS_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
index ba152c0..efca27d 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
@@ -32,9 +32,8 @@
/* Function Prototypes */
/* */
/****************************************************************************************/
-LVM_INT32 LVCS_MixerCallback( LVCS_Handle_t hInstance,
- void *pGeneralPurpose,
- LVM_INT16 CallbackParam);
+LVM_INT32 LVCS_MixerCallback(LVCS_Handle_t hInstance, void* pGeneralPurpose,
+ LVM_INT16 CallbackParam);
/************************************************************************************/
/* */
@@ -65,29 +64,22 @@
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams)
-{
-
- LVM_UINT16 Offset;
- LVM_FLOAT Gain;
- LVM_FLOAT Current;
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
- LVCS_BypassMix_t *pConfig = (LVCS_BypassMix_t *)&pInstance->BypassMix;
- const Gain_t *pOutputGainTable;
+LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+ LVM_UINT16 Offset;
+ LVM_FLOAT Gain;
+ LVM_FLOAT Current;
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+ LVCS_BypassMix_t* pConfig = (LVCS_BypassMix_t*)&pInstance->BypassMix;
+ const Gain_t* pOutputGainTable;
/*
* Set the transition gain
*/
- if ((pParams->OperatingMode == LVCS_ON) &&
- (pInstance->bTimerDone == LVM_TRUE)
- && (pInstance->MSTarget1 != 0x7FFF) /* this indicates an off->on transtion */
- )
- {
+ if ((pParams->OperatingMode == LVCS_ON) && (pInstance->bTimerDone == LVM_TRUE) &&
+ (pInstance->MSTarget1 != 0x7FFF) /* this indicates an off->on transition */
+ ) {
pInstance->TransitionGain = ((LVM_FLOAT)pParams->EffectLevel / 32767);
- }
- else
- {
+ } else {
/* Select no effect level */
pInstance->TransitionGain = 0;
}
@@ -95,18 +87,19 @@
/*
* Calculate the output gain table offset
*/
- Offset = (LVM_UINT16)(pParams->SpeakerType + (pParams->SourceFormat*(1+LVCS_EX_HEADPHONES)));
+ Offset =
+ (LVM_UINT16)(pParams->SpeakerType + (pParams->SourceFormat * (1 + LVCS_EX_HEADPHONES)));
pOutputGainTable = (Gain_t*)&LVCS_OutputGainTable[0];
/*
* Setup the mixer gain for the processed path
*/
- Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss * pInstance->TransitionGain);
+ Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss * pInstance->TransitionGain);
pConfig->Mixer_Instance.MixerStream[0].CallbackParam = 0;
pConfig->Mixer_Instance.MixerStream[0].pCallbackHandle = LVM_NULL;
pConfig->Mixer_Instance.MixerStream[0].pCallBack = LVM_NULL;
- pConfig->Mixer_Instance.MixerStream[0].CallbackSet=1;
+ pConfig->Mixer_Instance.MixerStream[0].CallbackSet = 1;
Current = LVC_Mixer_GetCurrent(&pConfig->Mixer_Instance.MixerStream[0]);
LVC_Mixer_Init(&pConfig->Mixer_Instance.MixerStream[0], (LVM_FLOAT)(Gain), Current);
@@ -116,8 +109,8 @@
/*
* Setup the mixer gain for the unprocessed path
*/
- Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss * (1.0 - \
- (LVM_FLOAT)pInstance->TransitionGain));
+ Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss *
+ (1.0 - (LVM_FLOAT)pInstance->TransitionGain));
Gain = (LVM_FLOAT)pOutputGainTable[Offset].UnprocLoss * Gain;
Current = LVC_Mixer_GetCurrent(&pConfig->Mixer_Instance.MixerStream[1]);
LVC_Mixer_Init(&pConfig->Mixer_Instance.MixerStream[1], (LVM_FLOAT)(Gain), Current);
@@ -125,7 +118,7 @@
LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
pConfig->Mixer_Instance.MixerStream[1].CallbackParam = 0;
pConfig->Mixer_Instance.MixerStream[1].pCallbackHandle = hInstance;
- pConfig->Mixer_Instance.MixerStream[1].CallbackSet=1;
+ pConfig->Mixer_Instance.MixerStream[1].CallbackSet = 1;
pConfig->Mixer_Instance.MixerStream[1].pCallBack = LVCS_MixerCallback;
/*
@@ -137,45 +130,42 @@
* Correct gain for the effect level
*/
{
- LVM_FLOAT GainCorrect;
- LVM_FLOAT Gain1;
- LVM_FLOAT Gain2;
+ LVM_FLOAT GainCorrect;
+ LVM_FLOAT Gain1;
+ LVM_FLOAT Gain2;
Gain1 = LVC_Mixer_GetTarget(&pConfig->Mixer_Instance.MixerStream[0]);
Gain2 = LVC_Mixer_GetTarget(&pConfig->Mixer_Instance.MixerStream[1]);
/*
* Calculate the gain correction
*/
- if (pInstance->Params.CompressorMode == LVM_MODE_ON)
- {
- GainCorrect = (LVM_FLOAT)( pInstance->VolCorrect.GainMin
- - (((LVM_FLOAT)pInstance->VolCorrect.GainMin * \
- ((LVM_FLOAT)pInstance->TransitionGain)))
- + (((LVM_FLOAT)pInstance->VolCorrect.GainFull * \
- ((LVM_FLOAT)pInstance->TransitionGain))));
+ if (pInstance->Params.CompressorMode == LVM_MODE_ON) {
+ GainCorrect = (LVM_FLOAT)(pInstance->VolCorrect.GainMin -
+ (((LVM_FLOAT)pInstance->VolCorrect.GainMin *
+ ((LVM_FLOAT)pInstance->TransitionGain))) +
+ (((LVM_FLOAT)pInstance->VolCorrect.GainFull *
+ ((LVM_FLOAT)pInstance->TransitionGain))));
- /*
- * Apply the gain correction
- */
- Gain1 = (Gain1 * GainCorrect);
- Gain2 = (Gain2 * GainCorrect);
-
+ /*
+ * Apply the gain correction
+ */
+ Gain1 = (Gain1 * GainCorrect);
+ Gain2 = (Gain2 * GainCorrect);
}
/*
* Set the gain values
*/
pConfig->Output_Shift = pConfig->Output_Shift;
- LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[0],Gain1);
+ LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[0], Gain1);
LVC_Mixer_VarSlope_SetTimeConstant(&pConfig->Mixer_Instance.MixerStream[0],
LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
- LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[1],Gain2);
+ LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[1], Gain2);
LVC_Mixer_VarSlope_SetTimeConstant(&pConfig->Mixer_Instance.MixerStream[1],
LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
}
- return(LVCS_SUCCESS);
-
+ return (LVCS_SUCCESS);
}
/************************************************************************************/
@@ -205,39 +195,29 @@
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t hInstance,
- const LVM_FLOAT *pProcessed,
- const LVM_FLOAT *pUnprocessed,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples)
-{
-
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
- LVCS_BypassMix_t *pConfig = (LVCS_BypassMix_t *)&pInstance->BypassMix;
+LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t hInstance, const LVM_FLOAT* pProcessed,
+ const LVM_FLOAT* pUnprocessed, LVM_FLOAT* pOutData,
+ LVM_UINT16 NumSamples) {
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+ LVCS_BypassMix_t* pConfig = (LVCS_BypassMix_t*)&pInstance->BypassMix;
/*
* Check if the bypass mixer is enabled
*/
- if ((pInstance->Params.OperatingMode & LVCS_BYPASSMIXSWITCH) != 0)
- {
+ if ((pInstance->Params.OperatingMode & LVCS_BYPASSMIXSWITCH) != 0) {
/*
* Apply the bypass mix
*/
- LVC_MixSoft_2St_D16C31_SAT(&pConfig->Mixer_Instance,
- pProcessed,
- (LVM_FLOAT *) pUnprocessed,
- pOutData,
- (LVM_INT16)(2 * NumSamples));
+ LVC_MixSoft_2St_D16C31_SAT(&pConfig->Mixer_Instance, pProcessed, (LVM_FLOAT*)pUnprocessed,
+ pOutData, (LVM_INT16)(2 * NumSamples));
/*
* Apply output gain correction shift
*/
- Shift_Sat_Float((LVM_INT16)pConfig->Output_Shift,
- (LVM_FLOAT*)pOutData,
- (LVM_FLOAT*)pOutData,
- (LVM_INT16)(2 * NumSamples)); /* Left and right*/
+ Shift_Sat_Float((LVM_INT16)pConfig->Output_Shift, (LVM_FLOAT*)pOutData,
+ (LVM_FLOAT*)pOutData, (LVM_INT16)(2 * NumSamples)); /* Left and right*/
}
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
/************************************************************************************/
@@ -245,22 +225,18 @@
/* FUNCTION: LVCS_MixerCallback */
/* */
/************************************************************************************/
-LVM_INT32 LVCS_MixerCallback(LVCS_Handle_t hInstance,
- void *pGeneralPurpose,
- LVM_INT16 CallbackParam)
-{
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
+LVM_INT32 LVCS_MixerCallback(LVCS_Handle_t hInstance, void* pGeneralPurpose,
+ LVM_INT16 CallbackParam) {
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
- (void)pGeneralPurpose;
+ (void)pGeneralPurpose;
/*
* Off transition has completed in Headphone mode
*/
- if ((pInstance->OutputDevice == LVCS_HEADPHONE) &&
- (pInstance->bInOperatingModeTransition) &&
- (pInstance->MSTarget0 == 0x0000)&& /* this indicates an on->off transition */
- (CallbackParam == 0))
- {
+ if ((pInstance->OutputDevice == LVCS_HEADPHONE) && (pInstance->bInOperatingModeTransition) &&
+ (pInstance->MSTarget0 == 0x0000) && /* this indicates an on->off transition */
+ (CallbackParam == 0)) {
/* Set operating mode to OFF */
pInstance->Params.OperatingMode = LVCS_OFF;
@@ -268,21 +244,17 @@
pInstance->bInOperatingModeTransition = LVM_FALSE;
/* Signal to the bundle */
- if((*pInstance->Capabilities.CallBack) != LVM_NULL){
- (*pInstance->Capabilities.CallBack)(pInstance->Capabilities.pBundleInstance,
- LVM_NULL,
+ if ((*pInstance->Capabilities.CallBack) != LVM_NULL) {
+ (*pInstance->Capabilities.CallBack)(pInstance->Capabilities.pBundleInstance, LVM_NULL,
(ALGORITHM_CS_ID | LVCS_EVENT_ALGOFF));
}
}
- if ((pInstance->OutputDevice == LVCS_HEADPHONE) &&
- (pInstance->MSTarget0 == 1) &&
- (pInstance->bTimerDone == LVM_TRUE)){
-
+ if ((pInstance->OutputDevice == LVCS_HEADPHONE) && (pInstance->MSTarget0 == 1) &&
+ (pInstance->bTimerDone == LVM_TRUE)) {
/* Exit transition state */
pInstance->bInOperatingModeTransition = LVM_FALSE;
}
return 1;
}
-
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h
index fcd8ee3..69afcbb 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h
@@ -33,20 +33,18 @@
/************************************************************************************/
/* Bypass mixer structure */
-typedef struct
-{
+typedef struct {
/* Mixer settings */
- LVMixer3_2St_FLOAT_st Mixer_Instance; /* Mixer instance */
- LVM_UINT16 Output_Shift; /* Correcting gain output shift */
+ LVMixer3_2St_FLOAT_st Mixer_Instance; /* Mixer instance */
+ LVM_UINT16 Output_Shift; /* Correcting gain output shift */
} LVCS_BypassMix_t;
-typedef struct
-{
+typedef struct {
/* Output gain settings, Gain = (Loss/32768) * 2^Shift */
- LVM_UINT16 Shift; /* Left shifts required */
- LVM_FLOAT Loss; /* Loss required */
- LVM_FLOAT UnprocLoss; /* Unprocessed path loss */
+ LVM_UINT16 Shift; /* Left shifts required */
+ LVM_FLOAT Loss; /* Loss required */
+ LVM_FLOAT UnprocLoss; /* Unprocessed path loss */
} Gain_t;
/************************************************************************************/
/* */
@@ -54,13 +52,10 @@
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams);
+LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
-LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t hInstance,
- const LVM_FLOAT *pProcessed,
- const LVM_FLOAT *unProcessed,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples);
+LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t hInstance, const LVM_FLOAT* pProcessed,
+ const LVM_FLOAT* unProcessed, LVM_FLOAT* pOutData,
+ LVM_UINT16 NumSamples);
-#endif /* BYPASSMIX_H */
+#endif /* BYPASSMIX_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
index 50db03d..8f88986 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
@@ -45,15 +45,12 @@
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams)
-{
-
- LVCS_Instance_t *pInstance =(LVCS_Instance_t *)hInstance;
+LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
*pParams = pInstance->Params;
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
/************************************************************************************/
@@ -75,34 +72,29 @@
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams)
-{
- LVM_INT16 Offset;
- LVCS_Instance_t *pInstance =(LVCS_Instance_t *)hInstance;
- LVCS_ReturnStatus_en err;
- LVCS_Modes_en OperatingModeSave = pInstance->Params.OperatingMode;
+LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+ LVM_INT16 Offset;
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+ LVCS_ReturnStatus_en err;
+ LVCS_Modes_en OperatingModeSave = pInstance->Params.OperatingMode;
- if (pParams->SampleRate != pInstance->Params.SampleRate)
- {
+ if (pParams->SampleRate != pInstance->Params.SampleRate) {
pInstance->TimerParams.SamplingRate = LVCS_SampleRateTable[pParams->SampleRate];
}
/*
* If the reverb level has changed
*/
- if(pInstance->Params.ReverbLevel != pParams->ReverbLevel)
- {
- err=LVCS_ReverbGeneratorInit(hInstance,pParams);
+ if (pInstance->Params.ReverbLevel != pParams->ReverbLevel) {
+ err = LVCS_ReverbGeneratorInit(hInstance, pParams);
}
/*
* If the sample rate or speaker has changed then perform a full re-initialisation
*/
if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
- (pInstance->Params.SpeakerType != pParams->SpeakerType))
- {
- const LVCS_VolCorrect_t *pLVCS_VolCorrectTable;
+ (pInstance->Params.SpeakerType != pParams->SpeakerType)) {
+ const LVCS_VolCorrect_t* pLVCS_VolCorrectTable;
/*
* Output device
@@ -114,15 +106,16 @@
*/
/* Use internal coefficient table */
pLVCS_VolCorrectTable = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
- Offset = (LVM_INT16)(pParams->SpeakerType + pParams->SourceFormat*(1+LVCS_EX_HEADPHONES));
+ Offset = (LVM_INT16)(pParams->SpeakerType +
+ pParams->SourceFormat * (1 + LVCS_EX_HEADPHONES));
pInstance->VolCorrect = pLVCS_VolCorrectTable[Offset];
pInstance->CompressGain = pInstance->VolCorrect.CompMin;
LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[0], 0, 0);
{
- LVM_FLOAT Gain;
- const Gain_t *pOutputGainTable = (Gain_t*)&LVCS_OutputGainTable[0];
+ LVM_FLOAT Gain;
+ const Gain_t* pOutputGainTable = (Gain_t*)&LVCS_OutputGainTable[0];
Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss);
Gain = (LVM_FLOAT)pOutputGainTable[Offset].UnprocLoss * (Gain);
@@ -133,22 +126,18 @@
LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[1], 0, Gain);
LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMix.Mixer_Instance.MixerStream[0],
- LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
+ LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMix.Mixer_Instance.MixerStream[1],
- LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
+ LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
}
- err=LVCS_SEnhancerInit(hInstance,
- pParams);
+ err = LVCS_SEnhancerInit(hInstance, pParams);
- err=LVCS_ReverbGeneratorInit(hInstance,
- pParams);
+ err = LVCS_ReverbGeneratorInit(hInstance, pParams);
- err=LVCS_EqualiserInit(hInstance,
- pParams);
+ err = LVCS_EqualiserInit(hInstance, pParams);
- err=LVCS_BypassMixInit(hInstance,
- pParams);
+ err = LVCS_BypassMixInit(hInstance, pParams);
}
@@ -156,30 +145,26 @@
* Check if the effect level or source format has changed
*/
else if ((pInstance->Params.EffectLevel != pParams->EffectLevel) ||
- (pInstance->Params.SourceFormat != pParams->SourceFormat))
- {
- const LVCS_VolCorrect_t *pLVCS_VolCorrectTable;
+ (pInstance->Params.SourceFormat != pParams->SourceFormat)) {
+ const LVCS_VolCorrect_t* pLVCS_VolCorrectTable;
/*
* Get the volume correction parameters
*/
/* Use internal coefficient table */
pLVCS_VolCorrectTable = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
- Offset = (LVM_INT16)(pParams->SpeakerType + pParams->SourceFormat*(1+LVCS_EX_HEADPHONES));
+ Offset = (LVM_INT16)(pParams->SpeakerType +
+ pParams->SourceFormat * (1 + LVCS_EX_HEADPHONES));
pInstance->VolCorrect = pLVCS_VolCorrectTable[Offset];
/* Update the effect level and alpha-mixer gains */
- err=LVCS_BypassMixInit(hInstance,
- pParams);
+ err = LVCS_BypassMixInit(hInstance, pParams);
- if(err != LVCS_SUCCESS)
- {
+ if (err != LVCS_SUCCESS) {
return err;
}
- }
- else
- {
+ } else {
pInstance->Params = *pParams;
}
@@ -189,40 +174,36 @@
pInstance->Params = *pParams;
/* Stay on the current operating mode until the transition is done */
- if((pParams->OperatingMode != OperatingModeSave) ||
- (pInstance->bInOperatingModeTransition == LVM_TRUE)){
-
+ if ((pParams->OperatingMode != OperatingModeSave) ||
+ (pInstance->bInOperatingModeTransition == LVM_TRUE)) {
/* Set the reverb delay timeout */
- if(pInstance->bInOperatingModeTransition != LVM_TRUE){
+ if (pInstance->bInOperatingModeTransition != LVM_TRUE) {
pInstance->bTimerDone = LVM_FALSE;
pInstance->TimerParams.TimeInMs =
- (LVM_INT16)(((pInstance->Reverberation.DelaySize << 2)
- /pInstance->TimerParams.SamplingRate) + 1);
- LVM_Timer_Init ( &pInstance->TimerInstance,
- &pInstance->TimerParams);
+ (LVM_INT16)(((pInstance->Reverberation.DelaySize << 2) /
+ pInstance->TimerParams.SamplingRate) +
+ 1);
+ LVM_Timer_Init(&pInstance->TimerInstance, &pInstance->TimerParams);
}
/* Update the effect level and alpha-mixer gains */
- err=LVCS_BypassMixInit(hInstance,
- pParams);
+ err = LVCS_BypassMixInit(hInstance, pParams);
/* Change transition bypass mixer settings if needed depending on transition type */
- if(pParams->OperatingMode != LVCS_OFF){
- pInstance->MSTarget0=LVM_MAXINT_16;
- pInstance->MSTarget1=0;
- }
- else
- {
+ if (pParams->OperatingMode != LVCS_OFF) {
+ pInstance->MSTarget0 = LVM_MAXINT_16;
+ pInstance->MSTarget1 = 0;
+ } else {
pInstance->Params.OperatingMode = OperatingModeSave;
- pInstance->MSTarget1=LVM_MAXINT_16;
- pInstance->MSTarget0=0;
+ pInstance->MSTarget1 = LVM_MAXINT_16;
+ pInstance->MSTarget0 = 0;
}
/* Set transition flag */
pInstance->bInOperatingModeTransition = LVM_TRUE;
}
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
/****************************************************************************************/
@@ -233,12 +214,11 @@
/* CallBack function of the Timer. */
/* */
/****************************************************************************************/
-void LVCS_TimerCallBack (void* hInstance, void* pCallBackParams, LVM_INT32 CallbackParam)
-{
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
+void LVCS_TimerCallBack(void* hInstance, void* pCallBackParams, LVM_INT32 CallbackParam) {
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
/* Avoid warnings because pCallBackParams and CallbackParam are not used*/
- if((pCallBackParams != LVM_NULL) || (CallbackParam != 0)){
+ if ((pCallBackParams != LVM_NULL) || (CallbackParam != 0)) {
pCallBackParams = hInstance;
CallbackParam = 0;
return;
@@ -248,4 +228,3 @@
return;
}
-
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
index 431b7e3..bad9aef 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
@@ -53,29 +53,22 @@
/* NOTES: */
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams)
-{
+LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+ LVM_UINT16 Offset;
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+ LVCS_Equaliser_t* pConfig = (LVCS_Equaliser_t*)&pInstance->Equaliser;
+ LVCS_Data_t* pData;
+ LVCS_Coefficient_t* pCoefficients;
+ BQ_FLOAT_Coefs_t Coeffs;
+ const BiquadA012B12CoefsSP_t* pEqualiserCoefTable;
- LVM_UINT16 Offset;
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
- LVCS_Equaliser_t *pConfig = (LVCS_Equaliser_t *)&pInstance->Equaliser;
- LVCS_Data_t *pData;
- LVCS_Coefficient_t *pCoefficients;
- BQ_FLOAT_Coefs_t Coeffs;
- const BiquadA012B12CoefsSP_t *pEqualiserCoefTable;
-
- pData = (LVCS_Data_t *) \
- pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress;
-
- pCoefficients = (LVCS_Coefficient_t *) \
- pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+ pData = (LVCS_Data_t*)pInstance->pData;
+ pCoefficients = (LVCS_Coefficient_t*)pInstance->pCoeff;
/*
* If the sample rate changes re-initialise the filters
*/
if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
- (pInstance->Params.SpeakerType != pParams->SpeakerType))
- {
+ (pInstance->Params.SpeakerType != pParams->SpeakerType)) {
/*
* Setup the filter coefficients and clear the history
*/
@@ -84,37 +77,35 @@
/* Left and right filters */
/* Convert incoming coefficients to the required format/ordering */
- Coeffs.A0 = (LVM_FLOAT) pEqualiserCoefTable[Offset].A0;
- Coeffs.A1 = (LVM_FLOAT) pEqualiserCoefTable[Offset].A1;
- Coeffs.A2 = (LVM_FLOAT) pEqualiserCoefTable[Offset].A2;
+ Coeffs.A0 = (LVM_FLOAT)pEqualiserCoefTable[Offset].A0;
+ Coeffs.A1 = (LVM_FLOAT)pEqualiserCoefTable[Offset].A1;
+ Coeffs.A2 = (LVM_FLOAT)pEqualiserCoefTable[Offset].A2;
Coeffs.B1 = (LVM_FLOAT)-pEqualiserCoefTable[Offset].B1;
Coeffs.B2 = (LVM_FLOAT)-pEqualiserCoefTable[Offset].B2;
- LoadConst_Float((LVM_INT16)0, /* Value */
- (LVM_FLOAT *)&pData->EqualiserBiquadTaps, /* Destination */
+ LoadConst_Float((LVM_INT16)0, /* Value */
+ (LVM_FLOAT*)&pData->EqualiserBiquadTaps, /* Destination */
/* Number of words */
(LVM_UINT16)(sizeof(pData->EqualiserBiquadTaps) / sizeof(LVM_FLOAT)));
BQ_2I_D16F32Css_TRC_WRA_01_Init(&pCoefficients->EqualiserBiquadInstance,
- &pData->EqualiserBiquadTaps,
- &Coeffs);
+ &pData->EqualiserBiquadTaps, &Coeffs);
/* Callbacks */
- switch(pEqualiserCoefTable[Offset].Scale)
- {
+ switch (pEqualiserCoefTable[Offset].Scale) {
case 13:
- pConfig->pBiquadCallBack = BQ_2I_D16F32C13_TRC_WRA_01;
+ pConfig->pBiquadCallBack = BQ_2I_D16F32C13_TRC_WRA_01;
break;
case 14:
- pConfig->pBiquadCallBack = BQ_2I_D16F32C14_TRC_WRA_01;
+ pConfig->pBiquadCallBack = BQ_2I_D16F32C14_TRC_WRA_01;
break;
case 15:
- pConfig->pBiquadCallBack = BQ_2I_D16F32C15_TRC_WRA_01;
+ pConfig->pBiquadCallBack = BQ_2I_D16F32C15_TRC_WRA_01;
break;
}
}
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
/************************************************************************************/
/* */
@@ -135,30 +126,23 @@
/* 1. Always processes in place. */
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t hInstance,
- LVM_FLOAT *pInputOutput,
- LVM_UINT16 NumSamples)
-{
+LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t hInstance, LVM_FLOAT* pInputOutput,
+ LVM_UINT16 NumSamples) {
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+ LVCS_Equaliser_t* pConfig = (LVCS_Equaliser_t*)&pInstance->Equaliser;
+ LVCS_Coefficient_t* pCoefficients;
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
- LVCS_Equaliser_t *pConfig = (LVCS_Equaliser_t *)&pInstance->Equaliser;
- LVCS_Coefficient_t *pCoefficients;
-
- pCoefficients = (LVCS_Coefficient_t *) \
- pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+ pCoefficients = (LVCS_Coefficient_t*)pInstance->pCoeff;
/*
* Check if the equaliser is required
*/
- if ((pInstance->Params.OperatingMode & LVCS_EQUALISERSWITCH) != 0)
- {
+ if ((pInstance->Params.OperatingMode & LVCS_EQUALISERSWITCH) != 0) {
/* Apply filter to the left and right channels */
- (pConfig->pBiquadCallBack)((Biquad_FLOAT_Instance_t*) \
- &pCoefficients->EqualiserBiquadInstance,
- (LVM_FLOAT *)pInputOutput,
- (LVM_FLOAT *)pInputOutput,
- (LVM_INT16)NumSamples);
+ (pConfig->pBiquadCallBack)(
+ (Biquad_FLOAT_Instance_t*)&pCoefficients->EqualiserBiquadInstance,
+ (LVM_FLOAT*)pInputOutput, (LVM_FLOAT*)pInputOutput, (LVM_INT16)NumSamples);
}
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h
index 918d931..c0d0950 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h
@@ -25,9 +25,8 @@
/************************************************************************************/
/* Equaliser structure */
-typedef struct
-{
- void (*pBiquadCallBack) (Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
+typedef struct {
+ void (*pBiquadCallBack)(Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
} LVCS_Equaliser_t;
/************************************************************************************/
@@ -36,10 +35,8 @@
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams);
-LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t hInstance,
- LVM_FLOAT *pInputOutput,
- LVM_UINT16 NumSamples);
+LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
+LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t hInstance, LVM_FLOAT* pInputOutput,
+ LVM_UINT16 NumSamples);
-#endif /* EQUALISER_H */
+#endif /* EQUALISER_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
index c7ee232..69c46c6 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
@@ -24,201 +24,201 @@
/* */
/************************************************************************************/
/* Stereo Enhancer coefficients for 8000 Hz sample rate, scaled with 0.161258 */
-#define CS_MIDDLE_8000_A0 0.227720
-#define CS_MIDDLE_8000_A1 (-0.215125)
-#define CS_MIDDLE_8000_A2 0.000000
-#define CS_MIDDLE_8000_B1 (-0.921899)
-#define CS_MIDDLE_8000_B2 0.000000
-#define CS_MIDDLE_8000_SCALE 15
-#define CS_SIDE_8000_A0 0.611441
-#define CS_SIDE_8000_A1 (-0.380344)
-#define CS_SIDE_8000_A2 (-0.231097)
-#define CS_SIDE_8000_B1 (-0.622470)
-#define CS_SIDE_8000_B2 (-0.130759)
-#define CS_SIDE_8000_SCALE 15
+#define CS_MIDDLE_8000_A0 0.227720
+#define CS_MIDDLE_8000_A1 (-0.215125)
+#define CS_MIDDLE_8000_A2 0.000000
+#define CS_MIDDLE_8000_B1 (-0.921899)
+#define CS_MIDDLE_8000_B2 0.000000
+#define CS_MIDDLE_8000_SCALE 15
+#define CS_SIDE_8000_A0 0.611441
+#define CS_SIDE_8000_A1 (-0.380344)
+#define CS_SIDE_8000_A2 (-0.231097)
+#define CS_SIDE_8000_B1 (-0.622470)
+#define CS_SIDE_8000_B2 (-0.130759)
+#define CS_SIDE_8000_SCALE 15
/* Stereo Enhancer coefficients for 11025Hz sample rate, scaled with 0.162943 */
-#define CS_MIDDLE_11025_A0 0.230838
-#define CS_MIDDLE_11025_A1 (-0.221559)
-#define CS_MIDDLE_11025_A2 0.000000
-#define CS_MIDDLE_11025_B1 (-0.943056)
-#define CS_MIDDLE_11025_B2 0.000000
-#define CS_MIDDLE_11025_SCALE 15
-#define CS_SIDE_11025_A0 0.557372
-#define CS_SIDE_11025_A1 (-0.391490)
-#define CS_SIDE_11025_A2 (-0.165881)
-#define CS_SIDE_11025_B1 (-0.880608)
-#define CS_SIDE_11025_B2 0.032397
-#define CS_SIDE_11025_SCALE 15
+#define CS_MIDDLE_11025_A0 0.230838
+#define CS_MIDDLE_11025_A1 (-0.221559)
+#define CS_MIDDLE_11025_A2 0.000000
+#define CS_MIDDLE_11025_B1 (-0.943056)
+#define CS_MIDDLE_11025_B2 0.000000
+#define CS_MIDDLE_11025_SCALE 15
+#define CS_SIDE_11025_A0 0.557372
+#define CS_SIDE_11025_A1 (-0.391490)
+#define CS_SIDE_11025_A2 (-0.165881)
+#define CS_SIDE_11025_B1 (-0.880608)
+#define CS_SIDE_11025_B2 0.032397
+#define CS_SIDE_11025_SCALE 15
/* Stereo Enhancer coefficients for 12000Hz sample rate, scaled with 0.162191 */
-#define CS_MIDDLE_12000_A0 0.229932
-#define CS_MIDDLE_12000_A1 (-0.221436)
-#define CS_MIDDLE_12000_A2 0.000000
-#define CS_MIDDLE_12000_B1 (-0.947616)
-#define CS_MIDDLE_12000_B2 0.000000
-#define CS_MIDDLE_12000_SCALE 15
-#define CS_SIDE_12000_A0 0.558398
-#define CS_SIDE_12000_A1 (-0.392211)
-#define CS_SIDE_12000_A2 (-0.166187)
-#define CS_SIDE_12000_B1 (-0.892550)
-#define CS_SIDE_12000_B2 0.032856
-#define CS_SIDE_12000_SCALE 15
+#define CS_MIDDLE_12000_A0 0.229932
+#define CS_MIDDLE_12000_A1 (-0.221436)
+#define CS_MIDDLE_12000_A2 0.000000
+#define CS_MIDDLE_12000_B1 (-0.947616)
+#define CS_MIDDLE_12000_B2 0.000000
+#define CS_MIDDLE_12000_SCALE 15
+#define CS_SIDE_12000_A0 0.558398
+#define CS_SIDE_12000_A1 (-0.392211)
+#define CS_SIDE_12000_A2 (-0.166187)
+#define CS_SIDE_12000_B1 (-0.892550)
+#define CS_SIDE_12000_B2 0.032856
+#define CS_SIDE_12000_SCALE 15
/* Stereo Enhancer coefficients for 16000Hz sample rate, scaled with 0.162371 */
-#define CS_MIDDLE_16000_A0 0.230638
-#define CS_MIDDLE_16000_A1 (-0.224232)
-#define CS_MIDDLE_16000_A2 0.000000
-#define CS_MIDDLE_16000_B1 (-0.960550)
-#define CS_MIDDLE_16000_B2 0.000000
-#define CS_MIDDLE_16000_SCALE 15
-#define CS_SIDE_16000_A0 0.499695
-#define CS_SIDE_16000_A1 (-0.355543)
-#define CS_SIDE_16000_A2 (-0.144152)
-#define CS_SIDE_16000_B1 (-1.050788)
-#define CS_SIDE_16000_B2 0.144104
-#define CS_SIDE_16000_SCALE 14
+#define CS_MIDDLE_16000_A0 0.230638
+#define CS_MIDDLE_16000_A1 (-0.224232)
+#define CS_MIDDLE_16000_A2 0.000000
+#define CS_MIDDLE_16000_B1 (-0.960550)
+#define CS_MIDDLE_16000_B2 0.000000
+#define CS_MIDDLE_16000_SCALE 15
+#define CS_SIDE_16000_A0 0.499695
+#define CS_SIDE_16000_A1 (-0.355543)
+#define CS_SIDE_16000_A2 (-0.144152)
+#define CS_SIDE_16000_B1 (-1.050788)
+#define CS_SIDE_16000_B2 0.144104
+#define CS_SIDE_16000_SCALE 14
/* Stereo Enhancer coefficients for 22050Hz sample rate, scaled with 0.160781 */
-#define CS_MIDDLE_22050_A0 0.228749
-#define CS_MIDDLE_22050_A1 (-0.224128)
-#define CS_MIDDLE_22050_A2 0.000000
-#define CS_MIDDLE_22050_B1 (-0.971262)
-#define CS_MIDDLE_22050_B2 0.000000
-#define CS_MIDDLE_22050_SCALE 15
-#define CS_SIDE_22050_A0 0.440112
-#define CS_SIDE_22050_A1 (-0.261096)
-#define CS_SIDE_22050_A2 (-0.179016)
-#define CS_SIDE_22050_B1 (-1.116786)
-#define CS_SIDE_22050_B2 0.182507
-#define CS_SIDE_22050_SCALE 14
+#define CS_MIDDLE_22050_A0 0.228749
+#define CS_MIDDLE_22050_A1 (-0.224128)
+#define CS_MIDDLE_22050_A2 0.000000
+#define CS_MIDDLE_22050_B1 (-0.971262)
+#define CS_MIDDLE_22050_B2 0.000000
+#define CS_MIDDLE_22050_SCALE 15
+#define CS_SIDE_22050_A0 0.440112
+#define CS_SIDE_22050_A1 (-0.261096)
+#define CS_SIDE_22050_A2 (-0.179016)
+#define CS_SIDE_22050_B1 (-1.116786)
+#define CS_SIDE_22050_B2 0.182507
+#define CS_SIDE_22050_SCALE 14
/* Stereo Enhancer coefficients for 24000Hz sample rate, scaled with 0.161882 */
-#define CS_MIDDLE_24000_A0 0.230395
-#define CS_MIDDLE_24000_A1 (-0.226117)
-#define CS_MIDDLE_24000_A2 0.000000
-#define CS_MIDDLE_24000_B1 (-0.973573)
-#define CS_MIDDLE_24000_B2 0.000000
-#define CS_MIDDLE_24000_SCALE 15
-#define CS_SIDE_24000_A0 0.414770
-#define CS_SIDE_24000_A1 (-0.287182)
-#define CS_SIDE_24000_A2 (-0.127588)
-#define CS_SIDE_24000_B1 (-1.229648)
-#define CS_SIDE_24000_B2 0.282177
-#define CS_SIDE_24000_SCALE 14
+#define CS_MIDDLE_24000_A0 0.230395
+#define CS_MIDDLE_24000_A1 (-0.226117)
+#define CS_MIDDLE_24000_A2 0.000000
+#define CS_MIDDLE_24000_B1 (-0.973573)
+#define CS_MIDDLE_24000_B2 0.000000
+#define CS_MIDDLE_24000_SCALE 15
+#define CS_SIDE_24000_A0 0.414770
+#define CS_SIDE_24000_A1 (-0.287182)
+#define CS_SIDE_24000_A2 (-0.127588)
+#define CS_SIDE_24000_B1 (-1.229648)
+#define CS_SIDE_24000_B2 0.282177
+#define CS_SIDE_24000_SCALE 14
/* Stereo Enhancer coefficients for 32000Hz sample rate, scaled with 0.160322 */
-#define CS_MIDDLE_32000_A0 0.228400
-#define CS_MIDDLE_32000_A1 (-0.225214)
-#define CS_MIDDLE_32000_A2 0.000000
-#define CS_MIDDLE_32000_B1 (-0.980126)
-#define CS_MIDDLE_32000_B2 0.000000
-#define CS_MIDDLE_32000_SCALE 15
-#define CS_SIDE_32000_A0 0.364579
-#define CS_SIDE_32000_A1 (-0.207355)
-#define CS_SIDE_32000_A2 (-0.157224)
-#define CS_SIDE_32000_B1 (-1.274231)
-#define CS_SIDE_32000_B2 0.312495
-#define CS_SIDE_32000_SCALE 14
+#define CS_MIDDLE_32000_A0 0.228400
+#define CS_MIDDLE_32000_A1 (-0.225214)
+#define CS_MIDDLE_32000_A2 0.000000
+#define CS_MIDDLE_32000_B1 (-0.980126)
+#define CS_MIDDLE_32000_B2 0.000000
+#define CS_MIDDLE_32000_SCALE 15
+#define CS_SIDE_32000_A0 0.364579
+#define CS_SIDE_32000_A1 (-0.207355)
+#define CS_SIDE_32000_A2 (-0.157224)
+#define CS_SIDE_32000_B1 (-1.274231)
+#define CS_SIDE_32000_B2 0.312495
+#define CS_SIDE_32000_SCALE 14
/* Stereo Enhancer coefficients for 44100Hz sample rate, scaled with 0.163834 */
-#define CS_MIDDLE_44100_A0 0.233593
-#define CS_MIDDLE_44100_A1 (-0.231225)
-#define CS_MIDDLE_44100_A2 0.000000
-#define CS_MIDDLE_44100_B1 (-0.985545)
-#define CS_MIDDLE_44100_B2 0.000000
-#define CS_MIDDLE_44100_SCALE 15
-#define CS_SIDE_44100_A0 0.284573
-#define CS_SIDE_44100_A1 (-0.258910)
-#define CS_SIDE_44100_A2 (-0.025662)
-#define CS_SIDE_44100_B1 (-1.572248)
-#define CS_SIDE_44100_B2 0.588399
-#define CS_SIDE_44100_SCALE 14
+#define CS_MIDDLE_44100_A0 0.233593
+#define CS_MIDDLE_44100_A1 (-0.231225)
+#define CS_MIDDLE_44100_A2 0.000000
+#define CS_MIDDLE_44100_B1 (-0.985545)
+#define CS_MIDDLE_44100_B2 0.000000
+#define CS_MIDDLE_44100_SCALE 15
+#define CS_SIDE_44100_A0 0.284573
+#define CS_SIDE_44100_A1 (-0.258910)
+#define CS_SIDE_44100_A2 (-0.025662)
+#define CS_SIDE_44100_B1 (-1.572248)
+#define CS_SIDE_44100_B2 0.588399
+#define CS_SIDE_44100_SCALE 14
/* Stereo Enhancer coefficients for 48000Hz sample rate, scaled with 0.164402 */
-#define CS_MIDDLE_48000_A0 0.234445
-#define CS_MIDDLE_48000_A1 (-0.232261)
-#define CS_MIDDLE_48000_A2 0.000000
-#define CS_MIDDLE_48000_B1 (-0.986713)
-#define CS_MIDDLE_48000_B2 0.000000
-#define CS_MIDDLE_48000_SCALE 15
-#define CS_SIDE_48000_A0 0.272606
-#define CS_SIDE_48000_A1 (-0.266952)
-#define CS_SIDE_48000_A2 (-0.005654)
-#define CS_SIDE_48000_B1 (-1.617141)
-#define CS_SIDE_48000_B2 0.630405
-#define CS_SIDE_48000_SCALE 14
+#define CS_MIDDLE_48000_A0 0.234445
+#define CS_MIDDLE_48000_A1 (-0.232261)
+#define CS_MIDDLE_48000_A2 0.000000
+#define CS_MIDDLE_48000_B1 (-0.986713)
+#define CS_MIDDLE_48000_B2 0.000000
+#define CS_MIDDLE_48000_SCALE 15
+#define CS_SIDE_48000_A0 0.272606
+#define CS_SIDE_48000_A1 (-0.266952)
+#define CS_SIDE_48000_A2 (-0.005654)
+#define CS_SIDE_48000_B1 (-1.617141)
+#define CS_SIDE_48000_B2 0.630405
+#define CS_SIDE_48000_SCALE 14
/* Coefficients for 88200Hz sample rate.
* The filter coefficients are obtained by carrying out
* state-space analysis using the coefficients available
* for 44100Hz.
*/
-#define CS_MIDDLE_88200_A0 0.233846f
-#define CS_MIDDLE_88200_A1 (-0.232657f)
-#define CS_MIDDLE_88200_A2 0.000000f
-#define CS_MIDDLE_88200_B1 (-0.992747f)
-#define CS_MIDDLE_88200_B2 0.000000f
-#define CS_MIDDLE_88200_SCALE 15
-#define CS_SIDE_88200_A0 0.231541f
-#define CS_SIDE_88200_A1 (-0.289586f)
-#define CS_SIDE_88200_A2 0.058045f
-#define CS_SIDE_88200_B1 (-1.765300f)
-#define CS_SIDE_88200_B2 0.769816f
-#define CS_SIDE_88200_SCALE 14
+#define CS_MIDDLE_88200_A0 0.233846f
+#define CS_MIDDLE_88200_A1 (-0.232657f)
+#define CS_MIDDLE_88200_A2 0.000000f
+#define CS_MIDDLE_88200_B1 (-0.992747f)
+#define CS_MIDDLE_88200_B2 0.000000f
+#define CS_MIDDLE_88200_SCALE 15
+#define CS_SIDE_88200_A0 0.231541f
+#define CS_SIDE_88200_A1 (-0.289586f)
+#define CS_SIDE_88200_A2 0.058045f
+#define CS_SIDE_88200_B1 (-1.765300f)
+#define CS_SIDE_88200_B2 0.769816f
+#define CS_SIDE_88200_SCALE 14
/* Stereo Enhancer coefficients for 96000Hz sample rate, scaled with 0.165*/
/* high pass filter with cutoff frequency 102.18 Hz*/
-#define CS_MIDDLE_96000_A0 0.235532
-#define CS_MIDDLE_96000_A1 (-0.234432)
-#define CS_MIDDLE_96000_A2 0.000000
-#define CS_MIDDLE_96000_B1 (-0.993334)
-#define CS_MIDDLE_96000_B2 0.000000
-#define CS_MIDDLE_96000_SCALE 15
+#define CS_MIDDLE_96000_A0 0.235532
+#define CS_MIDDLE_96000_A1 (-0.234432)
+#define CS_MIDDLE_96000_A2 0.000000
+#define CS_MIDDLE_96000_B1 (-0.993334)
+#define CS_MIDDLE_96000_B2 0.000000
+#define CS_MIDDLE_96000_SCALE 15
/* Coefficients calculated using tf2ss and ss2tf functions based on
* coefficients available for 48000Hz sampling frequency
*/
-#define CS_SIDE_96000_A0 0.224326f
-#define CS_SIDE_96000_A1 (-0.294937f)
-#define CS_SIDE_96000_A2 0.070611f
-#define CS_SIDE_96000_B1 (-1.792166f)
-#define CS_SIDE_96000_B2 0.795830f
-#define CS_SIDE_96000_SCALE 14
+#define CS_SIDE_96000_A0 0.224326f
+#define CS_SIDE_96000_A1 (-0.294937f)
+#define CS_SIDE_96000_A2 0.070611f
+#define CS_SIDE_96000_B1 (-1.792166f)
+#define CS_SIDE_96000_B2 0.795830f
+#define CS_SIDE_96000_SCALE 14
/* Stereo Enhancer coefficients for 176400Hz sample rate.
* The filter coefficients are obtained by carrying out
* state-space analysis using the coefficients available
* for 44100Hz.
*/
-#define CS_MIDDLE_176400_A0 0.233973f
-#define CS_MIDDLE_176400_A1 (-0.233378f)
-#define CS_MIDDLE_176400_A2 0.000000f
-#define CS_MIDDLE_176400_B1 (-0.996367f)
-#define CS_MIDDLE_176400_B2 0.000000f
-#define CS_MIDDLE_176400_SCALE 15
-#define CS_SIDE_176400_A0 0.199836f
-#define CS_SIDE_176400_A1 (-0.307544f)
-#define CS_SIDE_176400_A2 0.107708f
-#define CS_SIDE_176400_B1 (-1.876572f)
-#define CS_SIDE_176400_B2 0.877771f
-#define CS_SIDE_176400_SCALE 14
+#define CS_MIDDLE_176400_A0 0.233973f
+#define CS_MIDDLE_176400_A1 (-0.233378f)
+#define CS_MIDDLE_176400_A2 0.000000f
+#define CS_MIDDLE_176400_B1 (-0.996367f)
+#define CS_MIDDLE_176400_B2 0.000000f
+#define CS_MIDDLE_176400_SCALE 15
+#define CS_SIDE_176400_A0 0.199836f
+#define CS_SIDE_176400_A1 (-0.307544f)
+#define CS_SIDE_176400_A2 0.107708f
+#define CS_SIDE_176400_B1 (-1.876572f)
+#define CS_SIDE_176400_B2 0.877771f
+#define CS_SIDE_176400_SCALE 14
/* Stereo Enhancer coefficients for 192000Hz sample rate, scaled with 0.1689*/
-#define CS_MIDDLE_192000_A0 0.241219
-#define CS_MIDDLE_192000_A1 (-0.240656)
-#define CS_MIDDLE_192000_A2 0.000000
-#define CS_MIDDLE_192000_B1 (-0.996661)
-#define CS_MIDDLE_192000_B2 0.000000
-#define CS_MIDDLE_192000_SCALE 15
+#define CS_MIDDLE_192000_A0 0.241219
+#define CS_MIDDLE_192000_A1 (-0.240656)
+#define CS_MIDDLE_192000_A2 0.000000
+#define CS_MIDDLE_192000_B1 (-0.996661)
+#define CS_MIDDLE_192000_B2 0.000000
+#define CS_MIDDLE_192000_SCALE 15
/* Coefficients calculated using tf2ss and ss2tf functions based on
* coefficients available for 48000Hz sampling frequency
*/
-#define CS_SIDE_192000_A0 0.196039f
-#define CS_SIDE_192000_A1 (-0.311027f)
-#define CS_SIDE_192000_A2 0.114988f
-#define CS_SIDE_192000_B1 (-1.891380f)
-#define CS_SIDE_192000_B2 0.8923460f
-#define CS_SIDE_192000_SCALE 14
+#define CS_SIDE_192000_A0 0.196039f
+#define CS_SIDE_192000_A1 (-0.311027f)
+#define CS_SIDE_192000_A2 0.114988f
+#define CS_SIDE_192000_B1 (-1.891380f)
+#define CS_SIDE_192000_B2 0.8923460f
+#define CS_SIDE_192000_SCALE 14
/************************************************************************************/
/* */
@@ -227,133 +227,133 @@
/************************************************************************************/
/* Reverb delay settings in samples */
-#define LVCS_STEREODELAY_CS_8KHZ 93 /* Sample rate 8kS/s */
-#define LVCS_STEREODELAY_CS_11KHZ 128 /* Sample rate 11kS/s */
-#define LVCS_STEREODELAY_CS_12KHZ 139 /* Sample rate 12kS/s */
-#define LVCS_STEREODELAY_CS_16KHZ 186 /* Sample rate 16kS/s */
-#define LVCS_STEREODELAY_CS_22KHZ 256 /* Sample rate 22kS/s */
-#define LVCS_STEREODELAY_CS_24KHZ 279 /* Sample rate 24kS/s */
-#define LVCS_STEREODELAY_CS_32KHZ 372 /* Sample rate 32kS/s */
-#define LVCS_STEREODELAY_CS_44KHZ 512 /* Sample rate 44kS/s */
-#define LVCS_STEREODELAY_CS_48KHZ 557 /* Sample rate 48kS/s */
-#define LVCS_STEREODELAY_CS_88KHZ 1024 /* Sample rate 88.2kS/s */
-#define LVCS_STEREODELAY_CS_96KHZ 1115 /* Sample rate 96kS/s */
-#define LVCS_STEREODELAY_CS_176KHZ 2048 /* Sample rate 176.4kS/s */
-#define LVCS_STEREODELAY_CS_192KHZ 2229 /* Sample rate 196kS/s */
-#define LVCS_STEREODELAY_CS_MAX_VAL LVCS_STEREODELAY_CS_192KHZ
+#define LVCS_STEREODELAY_CS_8KHZ 93 /* Sample rate 8kS/s */
+#define LVCS_STEREODELAY_CS_11KHZ 128 /* Sample rate 11kS/s */
+#define LVCS_STEREODELAY_CS_12KHZ 139 /* Sample rate 12kS/s */
+#define LVCS_STEREODELAY_CS_16KHZ 186 /* Sample rate 16kS/s */
+#define LVCS_STEREODELAY_CS_22KHZ 256 /* Sample rate 22kS/s */
+#define LVCS_STEREODELAY_CS_24KHZ 279 /* Sample rate 24kS/s */
+#define LVCS_STEREODELAY_CS_32KHZ 372 /* Sample rate 32kS/s */
+#define LVCS_STEREODELAY_CS_44KHZ 512 /* Sample rate 44kS/s */
+#define LVCS_STEREODELAY_CS_48KHZ 557 /* Sample rate 48kS/s */
+#define LVCS_STEREODELAY_CS_88KHZ 1024 /* Sample rate 88.2kS/s */
+#define LVCS_STEREODELAY_CS_96KHZ 1115 /* Sample rate 96kS/s */
+#define LVCS_STEREODELAY_CS_176KHZ 2048 /* Sample rate 176.4kS/s */
+#define LVCS_STEREODELAY_CS_192KHZ 2229 /* Sample rate 196kS/s */
+#define LVCS_STEREODELAY_CS_MAX_VAL LVCS_STEREODELAY_CS_192KHZ
/* Reverb coefficients for 8000 Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_8000_A0 0.667271
-#define CS_REVERB_8000_A1 (-0.667271)
-#define CS_REVERB_8000_A2 0.000000
-#define CS_REVERB_8000_B1 (-0.668179)
-#define CS_REVERB_8000_B2 0.000000
-#define CS_REVERB_8000_SCALE 15
+#define CS_REVERB_8000_A0 0.667271
+#define CS_REVERB_8000_A1 (-0.667271)
+#define CS_REVERB_8000_A2 0.000000
+#define CS_REVERB_8000_B1 (-0.668179)
+#define CS_REVERB_8000_B2 0.000000
+#define CS_REVERB_8000_SCALE 15
/* Reverb coefficients for 11025Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_11025_A0 0.699638
-#define CS_REVERB_11025_A1 (-0.699638)
-#define CS_REVERB_11025_A2 0.000000
-#define CS_REVERB_11025_B1 (-0.749096)
-#define CS_REVERB_11025_B2 0.000000
-#define CS_REVERB_11025_SCALE 15
+#define CS_REVERB_11025_A0 0.699638
+#define CS_REVERB_11025_A1 (-0.699638)
+#define CS_REVERB_11025_A2 0.000000
+#define CS_REVERB_11025_B1 (-0.749096)
+#define CS_REVERB_11025_B2 0.000000
+#define CS_REVERB_11025_SCALE 15
/* Reverb coefficients for 12000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_12000_A0 0.706931
-#define CS_REVERB_12000_A1 (-0.706931)
-#define CS_REVERB_12000_A2 0.000000
-#define CS_REVERB_12000_B1 (-0.767327)
-#define CS_REVERB_12000_B2 0.000000
-#define CS_REVERB_12000_SCALE 15
+#define CS_REVERB_12000_A0 0.706931
+#define CS_REVERB_12000_A1 (-0.706931)
+#define CS_REVERB_12000_A2 0.000000
+#define CS_REVERB_12000_B1 (-0.767327)
+#define CS_REVERB_12000_B2 0.000000
+#define CS_REVERB_12000_SCALE 15
/* Reverb coefficients for 16000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_16000_A0 0.728272
-#define CS_REVERB_16000_A1 (-0.728272)
-#define CS_REVERB_16000_A2 0.000000
-#define CS_REVERB_16000_B1 (-0.820679)
-#define CS_REVERB_16000_B2 0.000000
-#define CS_REVERB_16000_SCALE 15
+#define CS_REVERB_16000_A0 0.728272
+#define CS_REVERB_16000_A1 (-0.728272)
+#define CS_REVERB_16000_A2 0.000000
+#define CS_REVERB_16000_B1 (-0.820679)
+#define CS_REVERB_16000_B2 0.000000
+#define CS_REVERB_16000_SCALE 15
/* Reverb coefficients for 22050Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_22050_A0 0.516396
-#define CS_REVERB_22050_A1 0.000000
-#define CS_REVERB_22050_A2 (-0.516396)
-#define CS_REVERB_22050_B1 (-0.518512)
-#define CS_REVERB_22050_B2 (-0.290990)
-#define CS_REVERB_22050_SCALE 15
+#define CS_REVERB_22050_A0 0.516396
+#define CS_REVERB_22050_A1 0.000000
+#define CS_REVERB_22050_A2 (-0.516396)
+#define CS_REVERB_22050_B1 (-0.518512)
+#define CS_REVERB_22050_B2 (-0.290990)
+#define CS_REVERB_22050_SCALE 15
/* Reverb coefficients for 24000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_24000_A0 0.479565
-#define CS_REVERB_24000_A1 0.000000
-#define CS_REVERB_24000_A2 (-0.479565)
-#define CS_REVERB_24000_B1 (-0.637745)
-#define CS_REVERB_24000_B2 (-0.198912)
-#define CS_REVERB_24000_SCALE 15
+#define CS_REVERB_24000_A0 0.479565
+#define CS_REVERB_24000_A1 0.000000
+#define CS_REVERB_24000_A2 (-0.479565)
+#define CS_REVERB_24000_B1 (-0.637745)
+#define CS_REVERB_24000_B2 (-0.198912)
+#define CS_REVERB_24000_SCALE 15
/* Reverb coefficients for 32000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_32000_A0 0.380349
-#define CS_REVERB_32000_A1 0.000000
-#define CS_REVERB_32000_A2 (-0.380349)
-#define CS_REVERB_32000_B1 (-0.950873)
-#define CS_REVERB_32000_B2 0.049127
-#define CS_REVERB_32000_SCALE 15
+#define CS_REVERB_32000_A0 0.380349
+#define CS_REVERB_32000_A1 0.000000
+#define CS_REVERB_32000_A2 (-0.380349)
+#define CS_REVERB_32000_B1 (-0.950873)
+#define CS_REVERB_32000_B2 0.049127
+#define CS_REVERB_32000_SCALE 15
/* Reverb coefficients for 44100Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_44100_A0 0.297389
-#define CS_REVERB_44100_A1 0.000000
-#define CS_REVERB_44100_A2 (-0.297389)
-#define CS_REVERB_44100_B1 (-1.200423)
-#define CS_REVERB_44100_B2 0.256529
-#define CS_REVERB_44100_SCALE 14
+#define CS_REVERB_44100_A0 0.297389
+#define CS_REVERB_44100_A1 0.000000
+#define CS_REVERB_44100_A2 (-0.297389)
+#define CS_REVERB_44100_B1 (-1.200423)
+#define CS_REVERB_44100_B2 0.256529
+#define CS_REVERB_44100_SCALE 14
/* Reverb coefficients for 48000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_48000_A0 0.278661
-#define CS_REVERB_48000_A1 0.000000
-#define CS_REVERB_48000_A2 (-0.278661)
-#define CS_REVERB_48000_B1 (-1.254993)
-#define CS_REVERB_48000_B2 0.303347
-#define CS_REVERB_48000_SCALE 14
+#define CS_REVERB_48000_A0 0.278661
+#define CS_REVERB_48000_A1 0.000000
+#define CS_REVERB_48000_A2 (-0.278661)
+#define CS_REVERB_48000_B1 (-1.254993)
+#define CS_REVERB_48000_B2 0.303347
+#define CS_REVERB_48000_SCALE 14
/* Reverb coefficients for 88200Hz sample rate, scaled with 0.8 */
/* Band pass filter with fc1=500 and fc2=8000 */
-#define CS_REVERB_88200_A0 0.171901f
-#define CS_REVERB_88200_A1 0.000000f
-#define CS_REVERB_88200_A2 (-0.171901f)
-#define CS_REVERB_88200_B1 (-1.553948f)
-#define CS_REVERB_88200_B2 (0.570248f)
-#define CS_REVERB_88200_SCALE 14
+#define CS_REVERB_88200_A0 0.171901f
+#define CS_REVERB_88200_A1 0.000000f
+#define CS_REVERB_88200_A2 (-0.171901f)
+#define CS_REVERB_88200_B1 (-1.553948f)
+#define CS_REVERB_88200_B2 (0.570248f)
+#define CS_REVERB_88200_SCALE 14
/* Reverb coefficients for 96000Hz sample rate, scaled with 0.8 */
/* Band pass filter with fc1=500 and fc2=8000*/
-#define CS_REVERB_96000_A0 0.1602488
-#define CS_REVERB_96000_A1 0.000000
-#define CS_REVERB_96000_A2 (-0.1602488)
-#define CS_REVERB_96000_B1 (-1.585413)
-#define CS_REVERB_96000_B2 0.599377
-#define CS_REVERB_96000_SCALE 14
+#define CS_REVERB_96000_A0 0.1602488
+#define CS_REVERB_96000_A1 0.000000
+#define CS_REVERB_96000_A2 (-0.1602488)
+#define CS_REVERB_96000_B1 (-1.585413)
+#define CS_REVERB_96000_B2 0.599377
+#define CS_REVERB_96000_SCALE 14
/* Reverb coefficients for 176400Hz sample rate, scaled with 0.8 */
/* Band pass filter with fc1=500 and fc2=8000 */
-#define CS_REVERB_176400_A0 0.094763f
-#define CS_REVERB_176400_A1 0.000000f
-#define CS_REVERB_176400_A2 (-0.094763f)
-#define CS_REVERB_176400_B1 (-1.758593f)
-#define CS_REVERB_176400_B2 (0.763091f)
-#define CS_REVERB_176400_SCALE 14
+#define CS_REVERB_176400_A0 0.094763f
+#define CS_REVERB_176400_A1 0.000000f
+#define CS_REVERB_176400_A2 (-0.094763f)
+#define CS_REVERB_176400_B1 (-1.758593f)
+#define CS_REVERB_176400_B2 (0.763091f)
+#define CS_REVERB_176400_SCALE 14
/* Reverb coefficients for 192000Hz sample rate, scaled with 0.8 */
/* Band pass filter with fc1=500 and fc2=8000*/
-#define CS_REVERB_192000_A0 0.0878369
-#define CS_REVERB_192000_A1 0.000000
-#define CS_REVERB_192000_A2 (-0.0878369)
-#define CS_REVERB_192000_B1 (-1.7765764)
-#define CS_REVERB_192000_B2 0.7804076
-#define CS_REVERB_192000_SCALE 14
+#define CS_REVERB_192000_A0 0.0878369
+#define CS_REVERB_192000_A1 0.000000
+#define CS_REVERB_192000_A2 (-0.0878369)
+#define CS_REVERB_192000_B1 (-1.7765764)
+#define CS_REVERB_192000_B2 0.7804076
+#define CS_REVERB_192000_SCALE 14
/* Reverb Gain Settings */
-#define LVCS_HEADPHONE_DELAYGAIN 0.800000 /* Algorithm delay path gain */
-#define LVCS_HEADPHONE_OUTPUTGAIN 1.000000 /* Algorithm output gain */
-#define LVCS_HEADPHONE_PROCGAIN 18403 /* Processed path gain */
-#define LVCS_HEADPHONE_UNPROCGAIN 18403 /* Unprocessed path gain */
-#define LVCS_HEADPHONE_GAINCORRECT 1.009343 /* Delay mixer gain correction */
+#define LVCS_HEADPHONE_DELAYGAIN 0.800000 /* Algorithm delay path gain */
+#define LVCS_HEADPHONE_OUTPUTGAIN 1.000000 /* Algorithm output gain */
+#define LVCS_HEADPHONE_PROCGAIN 18403 /* Processed path gain */
+#define LVCS_HEADPHONE_UNPROCGAIN 18403 /* Unprocessed path gain */
+#define LVCS_HEADPHONE_GAINCORRECT 1.009343 /* Delay mixer gain correction */
/************************************************************************************/
/* */
@@ -363,205 +363,204 @@
/* Equaliser coefficients for 8000 Hz sample rate, \
CS scaled with 1.038497 and CSEX scaled with 0.775480 */
-#define CS_EQUALISER_8000_A0 1.263312
-#define CS_EQUALISER_8000_A1 (-0.601748)
-#define CS_EQUALISER_8000_A2 (-0.280681)
-#define CS_EQUALISER_8000_B1 (-0.475865)
-#define CS_EQUALISER_8000_B2 (-0.408154)
-#define CS_EQUALISER_8000_SCALE 14
-#define CSEX_EQUALISER_8000_A0 0.943357
-#define CSEX_EQUALISER_8000_A1 (-0.449345)
-#define CSEX_EQUALISER_8000_A2 (-0.209594)
-#define CSEX_EQUALISER_8000_B1 (-0.475865)
-#define CSEX_EQUALISER_8000_B2 (-0.408154)
-#define CSEX_EQUALISER_8000_SCALE 15
+#define CS_EQUALISER_8000_A0 1.263312
+#define CS_EQUALISER_8000_A1 (-0.601748)
+#define CS_EQUALISER_8000_A2 (-0.280681)
+#define CS_EQUALISER_8000_B1 (-0.475865)
+#define CS_EQUALISER_8000_B2 (-0.408154)
+#define CS_EQUALISER_8000_SCALE 14
+#define CSEX_EQUALISER_8000_A0 0.943357
+#define CSEX_EQUALISER_8000_A1 (-0.449345)
+#define CSEX_EQUALISER_8000_A2 (-0.209594)
+#define CSEX_EQUALISER_8000_B1 (-0.475865)
+#define CSEX_EQUALISER_8000_B2 (-0.408154)
+#define CSEX_EQUALISER_8000_SCALE 15
/* Equaliser coefficients for 11025Hz sample rate, \
CS scaled with 1.027761 and CSEX scaled with 0.767463 */
-#define CS_EQUALISER_11025_A0 1.101145
-#define CS_EQUALISER_11025_A1 0.139020
-#define CS_EQUALISER_11025_A2 (-0.864423)
-#define CS_EQUALISER_11025_B1 0.024541
-#define CS_EQUALISER_11025_B2 (-0.908930)
-#define CS_EQUALISER_11025_SCALE 14
-#define CSEX_EQUALISER_11025_A0 0.976058
-#define CSEX_EQUALISER_11025_A1 (-0.695326)
-#define CSEX_EQUALISER_11025_A2 (-0.090809)
-#define CSEX_EQUALISER_11025_B1 (-0.610594)
-#define CSEX_EQUALISER_11025_B2 (-0.311149)
-#define CSEX_EQUALISER_11025_SCALE 15
+#define CS_EQUALISER_11025_A0 1.101145
+#define CS_EQUALISER_11025_A1 0.139020
+#define CS_EQUALISER_11025_A2 (-0.864423)
+#define CS_EQUALISER_11025_B1 0.024541
+#define CS_EQUALISER_11025_B2 (-0.908930)
+#define CS_EQUALISER_11025_SCALE 14
+#define CSEX_EQUALISER_11025_A0 0.976058
+#define CSEX_EQUALISER_11025_A1 (-0.695326)
+#define CSEX_EQUALISER_11025_A2 (-0.090809)
+#define CSEX_EQUALISER_11025_B1 (-0.610594)
+#define CSEX_EQUALISER_11025_B2 (-0.311149)
+#define CSEX_EQUALISER_11025_SCALE 15
/* Equaliser coefficients for 12000Hz sample rate, \
CS scaled with 1.032521 and CSEX scaled with 0.771017 */
-#define CS_EQUALISER_12000_A0 1.276661
-#define CS_EQUALISER_12000_A1 (-1.017519)
-#define CS_EQUALISER_12000_A2 (-0.044128)
-#define CS_EQUALISER_12000_B1 (-0.729616)
-#define CS_EQUALISER_12000_B2 (-0.204532)
-#define CS_EQUALISER_12000_SCALE 14
-#define CSEX_EQUALISER_12000_A0 1.007095
-#define CSEX_EQUALISER_12000_A1 (-0.871912)
-#define CSEX_EQUALISER_12000_A2 0.023232
-#define CSEX_EQUALISER_12000_B1 (-0.745857)
-#define CSEX_EQUALISER_12000_B2 (-0.189171)
-#define CSEX_EQUALISER_12000_SCALE 14
+#define CS_EQUALISER_12000_A0 1.276661
+#define CS_EQUALISER_12000_A1 (-1.017519)
+#define CS_EQUALISER_12000_A2 (-0.044128)
+#define CS_EQUALISER_12000_B1 (-0.729616)
+#define CS_EQUALISER_12000_B2 (-0.204532)
+#define CS_EQUALISER_12000_SCALE 14
+#define CSEX_EQUALISER_12000_A0 1.007095
+#define CSEX_EQUALISER_12000_A1 (-0.871912)
+#define CSEX_EQUALISER_12000_A2 0.023232
+#define CSEX_EQUALISER_12000_B1 (-0.745857)
+#define CSEX_EQUALISER_12000_B2 (-0.189171)
+#define CSEX_EQUALISER_12000_SCALE 14
/* Equaliser coefficients for 16000Hz sample rate, \
CS scaled with 1.031378 and CSEX scaled with 0.770164 */
-#define CS_EQUALISER_16000_A0 1.281629
-#define CS_EQUALISER_16000_A1 (-1.075872)
-#define CS_EQUALISER_16000_A2 (-0.041365)
-#define CS_EQUALISER_16000_B1 (-0.725239)
-#define CS_EQUALISER_16000_B2 (-0.224358)
-#define CS_EQUALISER_16000_SCALE 14
-#define CSEX_EQUALISER_16000_A0 1.081091
-#define CSEX_EQUALISER_16000_A1 (-0.867183)
-#define CSEX_EQUALISER_16000_A2 (-0.070247)
-#define CSEX_EQUALISER_16000_B1 (-0.515121)
-#define CSEX_EQUALISER_16000_B2 (-0.425893)
-#define CSEX_EQUALISER_16000_SCALE 14
+#define CS_EQUALISER_16000_A0 1.281629
+#define CS_EQUALISER_16000_A1 (-1.075872)
+#define CS_EQUALISER_16000_A2 (-0.041365)
+#define CS_EQUALISER_16000_B1 (-0.725239)
+#define CS_EQUALISER_16000_B2 (-0.224358)
+#define CS_EQUALISER_16000_SCALE 14
+#define CSEX_EQUALISER_16000_A0 1.081091
+#define CSEX_EQUALISER_16000_A1 (-0.867183)
+#define CSEX_EQUALISER_16000_A2 (-0.070247)
+#define CSEX_EQUALISER_16000_B1 (-0.515121)
+#define CSEX_EQUALISER_16000_B2 (-0.425893)
+#define CSEX_EQUALISER_16000_SCALE 14
/* Equaliser coefficients for 22050Hz sample rate, \
CS scaled with 1.041576 and CSEX scaled with 0.777779 */
-#define CS_EQUALISER_22050_A0 1.388605
-#define CS_EQUALISER_22050_A1 (-1.305799)
-#define CS_EQUALISER_22050_A2 0.039922
-#define CS_EQUALISER_22050_B1 (-0.719494)
-#define CS_EQUALISER_22050_B2 (-0.243245)
-#define CS_EQUALISER_22050_SCALE 14
-#define CSEX_EQUALISER_22050_A0 1.272910
-#define CSEX_EQUALISER_22050_A1 (-1.341014)
-#define CSEX_EQUALISER_22050_A2 0.167462
-#define CSEX_EQUALISER_22050_B1 (-0.614219)
-#define CSEX_EQUALISER_22050_B2 (-0.345384)
-#define CSEX_EQUALISER_22050_SCALE 14
+#define CS_EQUALISER_22050_A0 1.388605
+#define CS_EQUALISER_22050_A1 (-1.305799)
+#define CS_EQUALISER_22050_A2 0.039922
+#define CS_EQUALISER_22050_B1 (-0.719494)
+#define CS_EQUALISER_22050_B2 (-0.243245)
+#define CS_EQUALISER_22050_SCALE 14
+#define CSEX_EQUALISER_22050_A0 1.272910
+#define CSEX_EQUALISER_22050_A1 (-1.341014)
+#define CSEX_EQUALISER_22050_A2 0.167462
+#define CSEX_EQUALISER_22050_B1 (-0.614219)
+#define CSEX_EQUALISER_22050_B2 (-0.345384)
+#define CSEX_EQUALISER_22050_SCALE 14
/* Equaliser coefficients for 24000Hz sample rate, \
CS scaled with 1.034495 and CSEX scaled with 0.772491 */
-#define CS_EQUALISER_24000_A0 1.409832
-#define CS_EQUALISER_24000_A1 (-1.456506)
-#define CS_EQUALISER_24000_A2 0.151410
-#define CS_EQUALISER_24000_B1 (-0.804201)
-#define CS_EQUALISER_24000_B2 (-0.163783)
-#define CS_EQUALISER_24000_SCALE 14
-#define CSEX_EQUALISER_24000_A0 1.299198
-#define CSEX_EQUALISER_24000_A1 (-1.452447)
-#define CSEX_EQUALISER_24000_A2 0.240489
-#define CSEX_EQUALISER_24000_B1 (-0.669303)
-#define CSEX_EQUALISER_24000_B2 (-0.294984)
-#define CSEX_EQUALISER_24000_SCALE 14
+#define CS_EQUALISER_24000_A0 1.409832
+#define CS_EQUALISER_24000_A1 (-1.456506)
+#define CS_EQUALISER_24000_A2 0.151410
+#define CS_EQUALISER_24000_B1 (-0.804201)
+#define CS_EQUALISER_24000_B2 (-0.163783)
+#define CS_EQUALISER_24000_SCALE 14
+#define CSEX_EQUALISER_24000_A0 1.299198
+#define CSEX_EQUALISER_24000_A1 (-1.452447)
+#define CSEX_EQUALISER_24000_A2 0.240489
+#define CSEX_EQUALISER_24000_B1 (-0.669303)
+#define CSEX_EQUALISER_24000_B2 (-0.294984)
+#define CSEX_EQUALISER_24000_SCALE 14
/* Equaliser coefficients for 32000Hz sample rate, \
CS scaled with 1.044559 and CSEX scaled with 0.780006 */
-#define CS_EQUALISER_32000_A0 1.560988
-#define CS_EQUALISER_32000_A1 (-1.877724)
-#define CS_EQUALISER_32000_A2 0.389741
-#define CS_EQUALISER_32000_B1 (-0.907410)
-#define CS_EQUALISER_32000_B2 (-0.070489)
-#define CS_EQUALISER_32000_SCALE 14
-#define CSEX_EQUALISER_32000_A0 1.785049
-#define CSEX_EQUALISER_32000_A1 (-2.233497)
-#define CSEX_EQUALISER_32000_A2 0.526431
-#define CSEX_EQUALISER_32000_B1 (-0.445939)
-#define CSEX_EQUALISER_32000_B2 (-0.522446)
-#define CSEX_EQUALISER_32000_SCALE 13
+#define CS_EQUALISER_32000_A0 1.560988
+#define CS_EQUALISER_32000_A1 (-1.877724)
+#define CS_EQUALISER_32000_A2 0.389741
+#define CS_EQUALISER_32000_B1 (-0.907410)
+#define CS_EQUALISER_32000_B2 (-0.070489)
+#define CS_EQUALISER_32000_SCALE 14
+#define CSEX_EQUALISER_32000_A0 1.785049
+#define CSEX_EQUALISER_32000_A1 (-2.233497)
+#define CSEX_EQUALISER_32000_A2 0.526431
+#define CSEX_EQUALISER_32000_B1 (-0.445939)
+#define CSEX_EQUALISER_32000_B2 (-0.522446)
+#define CSEX_EQUALISER_32000_SCALE 13
/* Equaliser coefficients for 44100Hz sample rate, \
CS scaled with 1.022170 and CSEX scaled with 0.763288 */
-#define CS_EQUALISER_44100_A0 1.623993
-#define CS_EQUALISER_44100_A1 (-2.270743)
-#define CS_EQUALISER_44100_A2 0.688829
-#define CS_EQUALISER_44100_B1 (-1.117190)
-#define CS_EQUALISER_44100_B2 0.130208
-#define CS_EQUALISER_44100_SCALE 13
-#define CSEX_EQUALISER_44100_A0 2.028315
-#define CSEX_EQUALISER_44100_A1 (-2.882459)
-#define CSEX_EQUALISER_44100_A2 0.904535
-#define CSEX_EQUALISER_44100_B1 (-0.593308)
-#define CSEX_EQUALISER_44100_B2 (-0.385816)
-#define CSEX_EQUALISER_44100_SCALE 13
+#define CS_EQUALISER_44100_A0 1.623993
+#define CS_EQUALISER_44100_A1 (-2.270743)
+#define CS_EQUALISER_44100_A2 0.688829
+#define CS_EQUALISER_44100_B1 (-1.117190)
+#define CS_EQUALISER_44100_B2 0.130208
+#define CS_EQUALISER_44100_SCALE 13
+#define CSEX_EQUALISER_44100_A0 2.028315
+#define CSEX_EQUALISER_44100_A1 (-2.882459)
+#define CSEX_EQUALISER_44100_A2 0.904535
+#define CSEX_EQUALISER_44100_B1 (-0.593308)
+#define CSEX_EQUALISER_44100_B2 (-0.385816)
+#define CSEX_EQUALISER_44100_SCALE 13
/* Equaliser coefficients for 48000Hz sample rate, \
CS scaled with 1.018635 and CSEX scaled with 0.760648 */
-#define CS_EQUALISER_48000_A0 1.641177
-#define CS_EQUALISER_48000_A1 (-2.364687)
-#define CS_EQUALISER_48000_A2 0.759910
-#define CS_EQUALISER_48000_B1 (-1.166774)
-#define CS_EQUALISER_48000_B2 0.178074
-#define CS_EQUALISER_48000_SCALE 13
-#define CSEX_EQUALISER_48000_A0 2.099655
-#define CSEX_EQUALISER_48000_A1 (-3.065220)
-#define CSEX_EQUALISER_48000_A2 1.010417
-#define CSEX_EQUALISER_48000_B1 (-0.634021)
-#define CSEX_EQUALISER_48000_B2 (-0.347332)
-#define CSEX_EQUALISER_48000_SCALE 13
+#define CS_EQUALISER_48000_A0 1.641177
+#define CS_EQUALISER_48000_A1 (-2.364687)
+#define CS_EQUALISER_48000_A2 0.759910
+#define CS_EQUALISER_48000_B1 (-1.166774)
+#define CS_EQUALISER_48000_B2 0.178074
+#define CS_EQUALISER_48000_SCALE 13
+#define CSEX_EQUALISER_48000_A0 2.099655
+#define CSEX_EQUALISER_48000_A1 (-3.065220)
+#define CSEX_EQUALISER_48000_A2 1.010417
+#define CSEX_EQUALISER_48000_B1 (-0.634021)
+#define CSEX_EQUALISER_48000_B2 (-0.347332)
+#define CSEX_EQUALISER_48000_SCALE 13
/* Equaliser coefficients for 88200Hz sample rate.
* The filter coefficients are obtained by carrying out
* state-space analysis using the coefficients available
* for 44100Hz.
*/
-#define CS_EQUALISER_88200_A0 1.771899f
-#define CS_EQUALISER_88200_A1 (-2.930762f)
-#define CS_EQUALISER_88200_A2 1.172175f
-#define CS_EQUALISER_88200_B1 (-1.438349f)
-#define CS_EQUALISER_88200_B2 0.442520f
-#define CS_EQUALISER_88200_SCALE 13
-#define CSEX_EQUALISER_88200_A0 2.675241f
-#define CSEX_EQUALISER_88200_A1 (-4.466154f)
-#define CSEX_EQUALISER_88200_A2 1.810305f
-#define CSEX_EQUALISER_88200_B1 (-0.925350f)
-#define CSEX_EQUALISER_88200_B2 (-0.066616f)
-#define CSEX_EQUALISER_88200_SCALE 13
+#define CS_EQUALISER_88200_A0 1.771899f
+#define CS_EQUALISER_88200_A1 (-2.930762f)
+#define CS_EQUALISER_88200_A2 1.172175f
+#define CS_EQUALISER_88200_B1 (-1.438349f)
+#define CS_EQUALISER_88200_B2 0.442520f
+#define CS_EQUALISER_88200_SCALE 13
+#define CSEX_EQUALISER_88200_A0 2.675241f
+#define CSEX_EQUALISER_88200_A1 (-4.466154f)
+#define CSEX_EQUALISER_88200_A2 1.810305f
+#define CSEX_EQUALISER_88200_B1 (-0.925350f)
+#define CSEX_EQUALISER_88200_B2 (-0.066616f)
+#define CSEX_EQUALISER_88200_SCALE 13
-#define CS_EQUALISER_96000_A0 1.784497
-#define CS_EQUALISER_96000_A1 (-3.001435)
-#define CS_EQUALISER_96000_A2 1.228422
-#define CS_EQUALISER_96000_B1 (-1.477804)
-#define CS_EQUALISER_96000_B2 0.481369
-#define CS_EQUALISER_96000_SCALE 13
-#define CSEX_EQUALISER_96000_A0 2.7573
-#define CSEX_EQUALISER_96000_A1 (-4.6721)
-#define CSEX_EQUALISER_96000_A2 1.9317
-#define CSEX_EQUALISER_96000_B1 (-0.971718)
-#define CSEX_EQUALISER_96000_B2 (-0.021216)
-#define CSEX_EQUALISER_96000_SCALE 13
+#define CS_EQUALISER_96000_A0 1.784497
+#define CS_EQUALISER_96000_A1 (-3.001435)
+#define CS_EQUALISER_96000_A2 1.228422
+#define CS_EQUALISER_96000_B1 (-1.477804)
+#define CS_EQUALISER_96000_B2 0.481369
+#define CS_EQUALISER_96000_SCALE 13
+#define CSEX_EQUALISER_96000_A0 2.7573
+#define CSEX_EQUALISER_96000_A1 (-4.6721)
+#define CSEX_EQUALISER_96000_A2 1.9317
+#define CSEX_EQUALISER_96000_B1 (-0.971718)
+#define CSEX_EQUALISER_96000_B2 (-0.021216)
+#define CSEX_EQUALISER_96000_SCALE 13
/* Equaliser coefficients for 176400Hz sample rate.
* The filter coefficients are obtained by carrying out
* state-space analysis using the coefficients available
* for 44100Hz.
*/
-#define CS_EQUALISER_176400_A0 1.883440f
-#define CS_EQUALISER_176400_A1 (-3.414272f)
-#define CS_EQUALISER_176400_A2 1.534702f
-#define CS_EQUALISER_176400_B1 (-1.674614f)
-#define CS_EQUALISER_176400_B2 0.675827f
-#define CS_EQUALISER_176400_SCALE 13
-#define CSEX_EQUALISER_176400_A0 3.355068f
-#define CSEX_EQUALISER_176400_A1 (-6.112578f)
-#define CSEX_EQUALISER_176400_A2 2.764135f
-#define CSEX_EQUALISER_176400_B1 (-1.268533f)
-#define CSEX_EQUALISER_176400_B2 0.271277f
-#define CSEX_EQUALISER_176400_SCALE 13
+#define CS_EQUALISER_176400_A0 1.883440f
+#define CS_EQUALISER_176400_A1 (-3.414272f)
+#define CS_EQUALISER_176400_A2 1.534702f
+#define CS_EQUALISER_176400_B1 (-1.674614f)
+#define CS_EQUALISER_176400_B2 0.675827f
+#define CS_EQUALISER_176400_SCALE 13
+#define CSEX_EQUALISER_176400_A0 3.355068f
+#define CSEX_EQUALISER_176400_A1 (-6.112578f)
+#define CSEX_EQUALISER_176400_A2 2.764135f
+#define CSEX_EQUALISER_176400_B1 (-1.268533f)
+#define CSEX_EQUALISER_176400_B2 0.271277f
+#define CSEX_EQUALISER_176400_SCALE 13
-#define CS_EQUALISER_192000_A0 1.889582
-#define CS_EQUALISER_192000_A1 (-3.456140)
-#define CS_EQUALISER_192000_A2 1.569864
-#define CS_EQUALISER_192000_B1 (-1.700798)
-#define CS_EQUALISER_192000_B2 0.701824
-#define CS_EQUALISER_192000_SCALE 13
-#define CSEX_EQUALISER_192000_A0 3.4273
-#define CSEX_EQUALISER_192000_A1 (-6.2936)
-#define CSEX_EQUALISER_192000_A2 2.8720
-#define CSEX_EQUALISER_192000_B1 (-1.31074)
-#define CSEX_EQUALISER_192000_B2 0.31312
-#define CSEX_EQUALISER_192000_SCALE 13
+#define CS_EQUALISER_192000_A0 1.889582
+#define CS_EQUALISER_192000_A1 (-3.456140)
+#define CS_EQUALISER_192000_A2 1.569864
+#define CS_EQUALISER_192000_B1 (-1.700798)
+#define CS_EQUALISER_192000_B2 0.701824
+#define CS_EQUALISER_192000_SCALE 13
+#define CSEX_EQUALISER_192000_A0 3.4273
+#define CSEX_EQUALISER_192000_A1 (-6.2936)
+#define CSEX_EQUALISER_192000_A2 2.8720
+#define CSEX_EQUALISER_192000_B1 (-1.31074)
+#define CSEX_EQUALISER_192000_B2 0.31312
+#define CSEX_EQUALISER_192000_SCALE 13
-#define LVCS_HEADPHONE_SHIFT 2 /* Output Shift */
-#define LVCS_HEADPHONE_SHIFTLOSS 0.8477735 /* Output Shift loss */
-#define LVCS_HEADPHONE_GAIN 0.2087465 /* Unprocessed path gain */
-#define LVCS_EX_HEADPHONE_SHIFT 3 /* EX Output Shift */
-#define LVCS_EX_HEADPHONE_SHIFTLOSS 0.569225 /* EX Output Shift loss */
-#define LVCS_EX_HEADPHONE_GAIN 0.07794425 /* EX Unprocessed path gain */
+#define LVCS_HEADPHONE_SHIFT 2 /* Output Shift */
+#define LVCS_HEADPHONE_SHIFTLOSS 0.8477735 /* Output Shift loss */
+#define LVCS_HEADPHONE_GAIN 0.2087465 /* Unprocessed path gain */
+#define LVCS_EX_HEADPHONE_SHIFT 3 /* EX Output Shift */
+#define LVCS_EX_HEADPHONE_SHIFTLOSS 0.569225 /* EX Output Shift loss */
+#define LVCS_EX_HEADPHONE_GAIN 0.07794425 /* EX Unprocessed path gain */
#endif
-
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
index 630ecf7..5c8f1ae 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
@@ -20,99 +20,11 @@
/* Includes */
/* */
/************************************************************************************/
-
+#include <stdlib.h>
#include "LVCS.h"
#include "LVCS_Private.h"
#include "LVCS_Tables.h"
-/****************************************************************************************/
-/* */
-/* FUNCTION: LVCS_Memory */
-/* */
-/* DESCRIPTION: */
-/* This function is used for memory allocation and free. It can be called in */
-/* two ways: */
-/* */
-/* hInstance = NULL Returns the memory requirements */
-/* hInstance = Instance handle Returns the memory requirements and */
-/* allocated base addresses for the instance */
-/* */
-/* When this function is called for memory allocation (hInstance=NULL) it is */
-/* passed the default capabilities. */
-/* */
-/* When called for memory allocation the memory base address pointers are NULL on */
-/* return. */
-/* */
-/* When the function is called for free (hInstance = Instance Handle) the */
-/* capabilities are ignored and the memory table returns the allocated memory and */
-/* base addresses used during initialisation. */
-/* */
-/* PARAMETERS: */
-/* hInstance Instance Handle */
-/* pMemoryTable Pointer to an empty memory definition table */
-/* pCapabilities Pointer to the default capabilites */
-/* */
-/* RETURNS: */
-/* LVCS_Success Succeeded */
-/* */
-/* NOTES: */
-/* 1. This function may be interrupted by the LVCS_Process function */
-/* */
-/****************************************************************************************/
-
-LVCS_ReturnStatus_en LVCS_Memory(LVCS_Handle_t hInstance,
- LVCS_MemTab_t *pMemoryTable,
- LVCS_Capabilities_t *pCapabilities)
-{
-
- LVM_UINT32 ScratchSize;
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
-
- /*
- * Fill in the memory table
- */
- if (hInstance == LVM_NULL)
- {
- /*
- * Instance memory
- */
- pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].Size = (LVM_UINT32)sizeof(LVCS_Instance_t);
- pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].Type = LVCS_PERSISTENT;
- pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
- /*
- * Data memory
- */
- pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].Size = (LVM_UINT32)sizeof(LVCS_Data_t);
- pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].Type = LVCS_DATA;
- pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-
- /*
- * Coefficient memory
- */
- pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].Size = (LVM_UINT32)sizeof(LVCS_Coefficient_t);
- pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].Type = LVCS_COEFFICIENT;
- pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-
- /*
- * Scratch memory
- */
- /* Inplace processing */
- ScratchSize = (LVM_UINT32) \
- (LVCS_SCRATCHBUFFERS * sizeof(LVM_FLOAT) * pCapabilities->MaxBlockSize);
- pMemoryTable->Region[LVCS_MEMREGION_TEMPORARY_FAST].Size = ScratchSize;
- pMemoryTable->Region[LVCS_MEMREGION_TEMPORARY_FAST].Type = LVCS_SCRATCH;
- pMemoryTable->Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
- }
- else
- {
- /* Read back memory allocation table */
- *pMemoryTable = pInstance->MemoryTable;
- }
-
- return(LVCS_SUCCESS);
-}
-
/************************************************************************************/
/* */
/* FUNCTION: LVCS_Init */
@@ -120,91 +32,106 @@
/* DESCRIPTION: */
/* Create and initialisation function for the Concert Sound module */
/* */
-/* This function can be used to create an algorithm instance by calling with */
-/* hInstance set to LVM_NULL. In this case the algorithm returns the new instance */
-/* handle. */
-/* */
-/* This function can be used to force a full re-initialisation of the algorithm */
-/* by calling with hInstance = Instance Handle. In this case the memory table */
-/* should be correct for the instance, this can be ensured by calling the function */
-/* LVCS_Memory before calling this function. */
-/* */
/* PARAMETERS: */
-/* hInstance Instance handle */
-/* pMemoryTable Pointer to the memory definition table */
+/* phInstance Pointer to instance handle */
/* pCapabilities Pointer to the capabilities structure */
+/* pScratch Pointer to scratch buffer */
/* */
/* RETURNS: */
/* LVCS_Success Initialisation succeeded */
+/* LVDBE_NULLADDRESS One or more memory has a NULL pointer - malloc failure */
/* */
/* NOTES: */
-/* 1. The instance handle is the pointer to the base address of the first memory */
-/* region. */
-/* 2. This function must not be interrupted by the LVCS_Process function */
-/* 3. This function must be called with the same capabilities as used for the */
-/* call to the memory function */
+/* 1. This function must not be interrupted by the LVCS_Process function */
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t *phInstance,
- LVCS_MemTab_t *pMemoryTable,
- LVCS_Capabilities_t *pCapabilities)
-{
-
- LVCS_Instance_t *pInstance;
- LVCS_VolCorrect_t *pLVCS_VolCorrectTable;
+LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t* phInstance, LVCS_Capabilities_t* pCapabilities,
+ void* pScratch) {
+ LVCS_Instance_t* pInstance;
+ LVCS_VolCorrect_t* pLVCS_VolCorrectTable;
/*
- * Set the instance handle if not already initialised
+ * Create the instance handle if not already initialised
*/
- if (*phInstance == LVM_NULL)
- {
- *phInstance = (LVCS_Handle_t)pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress;
+ if (*phInstance == LVM_NULL) {
+ *phInstance = calloc(1, sizeof(*pInstance));
}
- pInstance =(LVCS_Instance_t *)*phInstance;
+ if (*phInstance == LVM_NULL) {
+ return LVCS_NULLADDRESS;
+ }
+ pInstance = (LVCS_Instance_t*)*phInstance;
/*
* Save the capabilities in the instance structure
*/
pInstance->Capabilities = *pCapabilities;
- /*
- * Save the memory table in the instance structure
- */
- pInstance->MemoryTable = *pMemoryTable;
+ pInstance->pScratch = pScratch;
/*
* Set all initial parameters to invalid to force a full initialisation
*/
- pInstance->Params.OperatingMode = LVCS_OFF;
- pInstance->Params.SpeakerType = LVCS_SPEAKERTYPE_MAX;
- pInstance->OutputDevice = LVCS_HEADPHONE;
- pInstance->Params.SourceFormat = LVCS_SOURCEMAX;
+ pInstance->Params.OperatingMode = LVCS_OFF;
+ pInstance->Params.SpeakerType = LVCS_SPEAKERTYPE_MAX;
+ pInstance->OutputDevice = LVCS_HEADPHONE;
+ pInstance->Params.SourceFormat = LVCS_SOURCEMAX;
pInstance->Params.CompressorMode = LVM_MODE_OFF;
- pInstance->Params.SampleRate = LVM_FS_INVALID;
- pInstance->Params.EffectLevel = 0;
- pInstance->Params.ReverbLevel = (LVM_UINT16)0x8000;
- pLVCS_VolCorrectTable = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
- pInstance->VolCorrect = pLVCS_VolCorrectTable[0];
- pInstance->TransitionGain = 0;
+ pInstance->Params.SampleRate = LVM_FS_INVALID;
+ pInstance->Params.EffectLevel = 0;
+ pInstance->Params.ReverbLevel = (LVM_UINT16)0x8000;
+ pLVCS_VolCorrectTable = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
+ pInstance->VolCorrect = pLVCS_VolCorrectTable[0];
+ pInstance->TransitionGain = 0;
/* These current and target values are intialized again in LVCS_Control.c */
- LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[0],0,0);
+ LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[0], 0, 0);
/* These current and target values are intialized again in LVCS_Control.c */
- LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[1],0,0);
+ LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[1], 0, 0);
/*
* Initialise the bypass variables
*/
- pInstance->MSTarget0=0;
- pInstance->MSTarget1=0;
- pInstance->bInOperatingModeTransition = LVM_FALSE;
- pInstance->bTimerDone = LVM_FALSE;
- pInstance->TimerParams.CallBackParam = 0;
- pInstance->TimerParams.pCallBack = LVCS_TimerCallBack;
- pInstance->TimerParams.pCallbackInstance = pInstance;
- pInstance->TimerParams.pCallBackParams = LVM_NULL;
+ pInstance->MSTarget0 = 0;
+ pInstance->MSTarget1 = 0;
+ pInstance->bInOperatingModeTransition = LVM_FALSE;
+ pInstance->bTimerDone = LVM_FALSE;
+ pInstance->TimerParams.CallBackParam = 0;
+ pInstance->TimerParams.pCallBack = LVCS_TimerCallBack;
+ pInstance->TimerParams.pCallbackInstance = pInstance;
+ pInstance->TimerParams.pCallBackParams = LVM_NULL;
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
+/************************************************************************************/
+/* */
+/* FUNCTION: LVCS_DeInit */
+/* */
+/* DESCRIPTION: */
+/* Free memories created during the LVCS_Init call including instance handle */
+/* */
+/* PARAMETERS: */
+/* phInstance Pointer to instance handle */
+/* */
+/* NOTES: */
+/* 1. This function must not be interrupted by the LVCS_Process function */
+/* */
+/************************************************************************************/
+void LVCS_DeInit(LVCS_Handle_t* phInstance) {
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)*phInstance;
+ if (pInstance == LVM_NULL) {
+ return;
+ }
+ if (pInstance->pCoeff != LVM_NULL) {
+ free(pInstance->pCoeff);
+ pInstance->pCoeff = LVM_NULL;
+ }
+ if (pInstance->pData != LVM_NULL) {
+ free(pInstance->pData);
+ pInstance->pData = LVM_NULL;
+ }
+ free(pInstance);
+ *phInstance = LVM_NULL;
+ return;
+}
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h
index 154ea55..f9c23b3 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h
@@ -33,11 +33,11 @@
/* */
/************************************************************************************/
-#include "LVCS.h" /* Calling or Application layer definitions */
-#include "LVCS_StereoEnhancer.h" /* Stereo enhancer module definitions */
-#include "LVCS_ReverbGenerator.h" /* Reverberation module definitions */
-#include "LVCS_Equaliser.h" /* Equaliser module definitions */
-#include "LVCS_BypassMix.h" /* Bypass Mixer module definitions */
+#include "LVCS.h" /* Calling or Application layer definitions */
+#include "LVCS_StereoEnhancer.h" /* Stereo enhancer module definitions */
+#include "LVCS_ReverbGenerator.h" /* Reverberation module definitions */
+#include "LVCS_Equaliser.h" /* Equaliser module definitions */
+#include "LVCS_BypassMix.h" /* Bypass Mixer module definitions */
#include "LVM_Timer.h"
/************************************************************************************/
@@ -47,35 +47,29 @@
/************************************************************************************/
/* Configuration switch controls */
-#define LVCS_STEREOENHANCESWITCH 0x0001 /* Stereo enhancement enable control */
-#define LVCS_REVERBSWITCH 0x0002 /* Reverberation enable control */
-#define LVCS_EQUALISERSWITCH 0x0004 /* Equaliser enable control */
-#define LVCS_BYPASSMIXSWITCH 0x0008 /* Bypass mixer enable control */
-#define LVCS_COMPGAINFRAME 64 /* Compressor gain update interval */
+#define LVCS_STEREOENHANCESWITCH 0x0001 /* Stereo enhancement enable control */
+#define LVCS_REVERBSWITCH 0x0002 /* Reverberation enable control */
+#define LVCS_EQUALISERSWITCH 0x0004 /* Equaliser enable control */
+#define LVCS_BYPASSMIXSWITCH 0x0008 /* Bypass mixer enable control */
+#define LVCS_COMPGAINFRAME 64 /* Compressor gain update interval */
/* Memory */
-#ifdef SUPPORT_MC
-#define LVCS_SCRATCHBUFFERS 8 /* Number of buffers required for inplace processing */
-#else
-#define LVCS_SCRATCHBUFFERS 6 /* Number of buffers required for inplace processing */
-#endif
-#ifdef SUPPORT_MC
+#define LVCS_SCRATCHBUFFERS 8 /* Number of buffers required for inplace processing */
/*
* The Concert Surround module applies processing only on the first two
* channels of a multichannel input. The data of first two channels is copied
* from the multichannel input into scratch buffer. The buffers added here
* are used for this purpose
*/
-#define LVCS_MC_SCRATCHBUFFERS 2
-#endif
+#define LVCS_MC_SCRATCHBUFFERS 2
/* General */
-#define LVCS_INVALID 0xFFFF /* Invalid init parameter */
-#define LVCS_BYPASS_MIXER_TC 100 /* Bypass mixer time */
+#define LVCS_INVALID 0xFFFF /* Invalid init parameter */
+#define LVCS_BYPASS_MIXER_TC 100 /* Bypass mixer time */
/* Access to external coefficients table */
-#define LVCS_NR_OF_FS 9
-#define LVCS_NR_OF_CHAN_CFG 2
+#define LVCS_NR_OF_FS 9
+#define LVCS_NR_OF_CHAN_CFG 2
/************************************************************************************/
/* */
@@ -83,13 +77,9 @@
/* */
/************************************************************************************/
-typedef LVM_UINT16 LVCS_Configuration_t; /* Internal algorithm configuration */
+typedef LVM_UINT16 LVCS_Configuration_t; /* Internal algorithm configuration */
-typedef enum
-{
- LVCS_HEADPHONE = 0,
- LVCS_DEVICE_MAX = LVM_MAXENUM
-} LVCS_OutputDevice_en;
+typedef enum { LVCS_HEADPHONE = 0, LVCS_DEVICE_MAX = LVM_MAXENUM } LVCS_OutputDevice_en;
/************************************************************************************/
/* */
@@ -98,65 +88,60 @@
/************************************************************************************/
/* Volume correction structure */
-typedef struct
-{
- LVM_FLOAT CompFull; /* Post CS compression 100% effect */
- LVM_FLOAT CompMin; /* Post CS compression 0% effect */
- LVM_FLOAT GainFull; /* CS gain correct 100% effect */
- LVM_FLOAT GainMin; /* CS gain correct 0% effect */
+typedef struct {
+ LVM_FLOAT CompFull; /* Post CS compression 100% effect */
+ LVM_FLOAT CompMin; /* Post CS compression 0% effect */
+ LVM_FLOAT GainFull; /* CS gain correct 100% effect */
+ LVM_FLOAT GainMin; /* CS gain correct 0% effect */
} LVCS_VolCorrect_t;
/* Instance structure */
-typedef struct
-{
+typedef struct {
/* Public parameters */
- LVCS_MemTab_t MemoryTable; /* Instance memory allocation table */
- LVCS_Params_t Params; /* Instance parameters */
- LVCS_Capabilities_t Capabilities; /* Initialisation capabilities */
+ LVCS_Params_t Params; /* Instance parameters */
+ LVCS_Capabilities_t Capabilities; /* Initialisation capabilities */
/* Private parameters */
- LVCS_OutputDevice_en OutputDevice; /* Selected output device type */
- LVCS_VolCorrect_t VolCorrect; /* Volume correction settings */
- LVM_FLOAT TransitionGain; /* Transition gain */
- LVM_FLOAT CompressGain; /* Last used compressor gain*/
+ LVCS_OutputDevice_en OutputDevice; /* Selected output device type */
+ LVCS_VolCorrect_t VolCorrect; /* Volume correction settings */
+ LVM_FLOAT TransitionGain; /* Transition gain */
+ LVM_FLOAT CompressGain; /* Last used compressor gain*/
/* Sub-block configurations */
- LVCS_StereoEnhancer_t StereoEnhancer; /* Stereo enhancer configuration */
- LVCS_ReverbGenerator_t Reverberation; /* Reverberation configuration */
- LVCS_Equaliser_t Equaliser; /* Equaliser configuration */
- LVCS_BypassMix_t BypassMix; /* Bypass mixer configuration */
+ LVCS_StereoEnhancer_t StereoEnhancer; /* Stereo enhancer configuration */
+ LVCS_ReverbGenerator_t Reverberation; /* Reverberation configuration */
+ LVCS_Equaliser_t Equaliser; /* Equaliser configuration */
+ LVCS_BypassMix_t BypassMix; /* Bypass mixer configuration */
/* Bypass variable */
- LVM_INT16 MSTarget0; /* Mixer state control variable for smooth transtion */
- LVM_INT16 MSTarget1; /* Mixer state control variable for smooth transtion */
- LVM_INT16 bInOperatingModeTransition; /* Operating mode transition flag */
- LVM_INT16 bTimerDone; /* Timer completion flag */
- LVM_Timer_Params_t TimerParams; /* Timer parameters */
- LVM_Timer_Instance_t TimerInstance; /* Timer instance */
+ LVM_INT16 MSTarget0; /* Mixer state control variable for smooth transition */
+ LVM_INT16 MSTarget1; /* Mixer state control variable for smooth transition */
+ LVM_INT16 bInOperatingModeTransition; /* Operating mode transition flag */
+ LVM_INT16 bTimerDone; /* Timer completion flag */
+ LVM_Timer_Params_t TimerParams; /* Timer parameters */
+ LVM_Timer_Instance_t TimerInstance; /* Timer instance */
+ void* pCoeff; /* pointer to buffer for equaliser filter coeffs */
+ void* pData; /* pointer to buffer for equaliser filter states */
+ void* pScratch; /* Pointer to bundle scratch buffer */
} LVCS_Instance_t;
/* Coefficient Structure */
-typedef struct
-{
- Biquad_FLOAT_Instance_t EqualiserBiquadInstance;
- Biquad_FLOAT_Instance_t ReverbBiquadInstance;
- Biquad_FLOAT_Instance_t SEBiquadInstanceMid;
- Biquad_FLOAT_Instance_t SEBiquadInstanceSide;
+typedef struct {
+ Biquad_FLOAT_Instance_t EqualiserBiquadInstance;
+ Biquad_FLOAT_Instance_t ReverbBiquadInstance;
+ Biquad_FLOAT_Instance_t SEBiquadInstanceMid;
+ Biquad_FLOAT_Instance_t SEBiquadInstanceSide;
} LVCS_Coefficient_t;
/* Data Structure */
-typedef struct
-{
+typedef struct {
Biquad_2I_Order2_FLOAT_Taps_t EqualiserBiquadTaps;
Biquad_2I_Order2_FLOAT_Taps_t ReverbBiquadTaps;
Biquad_1I_Order1_FLOAT_Taps_t SEBiquadTapsMid;
Biquad_1I_Order2_FLOAT_Taps_t SEBiquadTapsSide;
} LVCS_Data_t;
-void LVCS_TimerCallBack ( void* hInstance,
- void* pCallBackParams,
- LVM_INT32 CallbackParam);
+void LVCS_TimerCallBack(void* hInstance, void* pCallBackParams, LVM_INT32 CallbackParam);
-#endif /* PRIVATE_H */
-
+#endif /* PRIVATE_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
index 8e09be2..d18f2c3 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
@@ -65,18 +65,14 @@
/* NOTES: */
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Process_CS(LVCS_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples)
-{
- const LVM_FLOAT *pInput;
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
- LVM_FLOAT *pScratch;
+LVCS_ReturnStatus_en LVCS_Process_CS(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+ const LVM_FLOAT* pInput;
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+ LVM_FLOAT* pScratch;
LVCS_ReturnStatus_en err;
-#ifdef SUPPORT_MC
- LVM_FLOAT *pStIn;
- LVM_INT32 channels = pInstance->Params.NrChannels;
+ LVM_FLOAT* pStIn;
+ LVM_INT32 channels = pInstance->Params.NrChannels;
#define NrFrames NumSamples // alias for clarity
/*In case of mono processing, stereo input is created from mono
@@ -85,96 +81,64 @@
*at this point.
*So to treat the pInData as stereo we are setting channels to 2
*/
- if (channels == 1)
- {
+ if (channels == 1) {
channels = 2;
}
-#endif
- pScratch = (LVM_FLOAT *) \
- pInstance->MemoryTable.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress;
+ pScratch = (LVM_FLOAT*)pInstance->pScratch;
/*
* Check if the processing is inplace
*/
-#ifdef SUPPORT_MC
/*
* The pInput buffer holds the first 2 (Left, Right) channels information.
* Hence the memory required by this buffer is 2 * NumFrames.
* The Concert Surround module carries out processing only on L, R.
*/
pInput = pScratch + (2 * NrFrames);
- pStIn = pScratch + ((LVCS_SCRATCHBUFFERS - 2) * NrFrames);
+ pStIn = pScratch + ((LVCS_SCRATCHBUFFERS - 2) * NrFrames);
/* The first two channel data is extracted from the input data and
* copied into pInput buffer
*/
- Copy_Float_Mc_Stereo((LVM_FLOAT *)pInData,
- (LVM_FLOAT *)pInput,
- NrFrames,
- channels);
- Copy_Float((LVM_FLOAT *)pInput,
- (LVM_FLOAT *)pStIn,
- (LVM_INT16)(2 * NrFrames));
-#else
- if (pInData == pOutData)
- {
- /* Processing inplace */
- pInput = pScratch + (2 * NumSamples);
- Copy_Float((LVM_FLOAT *)pInData, /* Source */
- (LVM_FLOAT *)pInput, /* Destination */
- (LVM_INT16)(2 * NumSamples)); /* Left and right */
- }
- else
- {
- /* Processing outplace */
- pInput = pInData;
- }
-#endif
+ Copy_Float_Mc_Stereo((LVM_FLOAT*)pInData, (LVM_FLOAT*)pInput, NrFrames, channels);
+ Copy_Float((LVM_FLOAT*)pInput, (LVM_FLOAT*)pStIn, (LVM_INT16)(2 * NrFrames));
/*
* Call the stereo enhancer
*/
-#ifdef SUPPORT_MC
- err = LVCS_StereoEnhancer(hInstance, /* Instance handle */
- pStIn, /* Pointer to the input data */
- pOutData, /* Pointer to the output data */
- NrFrames); /* Number of frames to process */
-#else
- err = LVCS_StereoEnhancer(hInstance, /* Instance handle */
- pInData, /* Pointer to the input data */
- pOutData, /* Pointer to the output data */
- NumSamples); /* Number of samples to process */
-#endif
+ err = LVCS_StereoEnhancer(hInstance, /* Instance handle */
+ pStIn, /* Pointer to the input data */
+ pOutData, /* Pointer to the output data */
+ NrFrames); /* Number of frames to process */
/*
* Call the reverb generator
*/
- err = LVCS_ReverbGenerator(hInstance, /* Instance handle */
- pOutData, /* Pointer to the input data */
- pOutData, /* Pointer to the output data */
- NumSamples); /* Number of samples to process */
+ err = LVCS_ReverbGenerator(hInstance, /* Instance handle */
+ pOutData, /* Pointer to the input data */
+ pOutData, /* Pointer to the output data */
+ NumSamples); /* Number of samples to process */
/*
* Call the equaliser
*/
- err = LVCS_Equaliser(hInstance, /* Instance handle */
- pOutData, /* Pointer to the input data */
- NumSamples); /* Number of samples to process */
+ err = LVCS_Equaliser(hInstance, /* Instance handle */
+ pOutData, /* Pointer to the input data */
+ NumSamples); /* Number of samples to process */
/*
* Call the bypass mixer
*/
- err = LVCS_BypassMixer(hInstance, /* Instance handle */
- pOutData, /* Pointer to the processed data */
- pInput, /* Pointer to the input (unprocessed) data */
- pOutData, /* Pointer to the output data */
- NumSamples); /* Number of samples to process */
+ err = LVCS_BypassMixer(hInstance, /* Instance handle */
+ pOutData, /* Pointer to the processed data */
+ pInput, /* Pointer to the input (unprocessed) data */
+ pOutData, /* Pointer to the output data */
+ NumSamples); /* Number of samples to process */
- if(err != LVCS_SUCCESS)
- {
+ if (err != LVCS_SUCCESS) {
return err;
}
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
/************************************************************************************/
/* */
@@ -202,171 +166,114 @@
/* NOTES: */
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples)
-{
-
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
+LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
LVCS_ReturnStatus_en err;
-#ifdef SUPPORT_MC
/*Extract number of Channels info*/
LVM_INT32 channels = pInstance->Params.NrChannels;
#define NrFrames NumSamples // alias for clarity
- if (channels == 1)
- {
+ if (channels == 1) {
channels = 2;
}
-#endif
/*
* Check the number of samples is not too large
*/
- if (NumSamples > pInstance->Capabilities.MaxBlockSize)
- {
- return(LVCS_TOOMANYSAMPLES);
+ if (NumSamples > pInstance->Capabilities.MaxBlockSize) {
+ return (LVCS_TOOMANYSAMPLES);
}
/*
* Check if the algorithm is enabled
*/
- if (pInstance->Params.OperatingMode != LVCS_OFF)
- {
-#ifdef SUPPORT_MC
- LVM_FLOAT *pStereoOut;
+ if (pInstance->Params.OperatingMode != LVCS_OFF) {
+ LVM_FLOAT* pStereoOut;
/*
* LVCS_Process_CS uses output buffer to store intermediate outputs of StereoEnhancer,
* Equalizer, ReverbGenerator and BypassMixer.
* So, to avoid i/o data overlapping, when i/o buffers are common, use scratch buffer
* to store intermediate outputs.
*/
- if (pOutData == pInData)
- {
- /*
- * Scratch memory is used in 4 chunks of (2 * NrFrames) size.
- * First chunk of memory is used by LVCS_StereoEnhancer and LVCS_ReverbGenerator,
- * second and fourth are used as input buffers by pInput and pStIn in LVCS_Process_CS.
- * Hence, pStereoOut is pointed to use unused third portion of scratch memory.
- */
- pStereoOut = (LVM_FLOAT *) \
- pInstance->MemoryTable. \
- Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress +
- ((LVCS_SCRATCHBUFFERS - 4) * NrFrames);
- }
- else
- {
+ if (pOutData == pInData) {
+ /*
+ * Scratch memory is used in 4 chunks of (2 * NrFrames) size.
+ * First chunk of memory is used by LVCS_StereoEnhancer and LVCS_ReverbGenerator,
+ * second and fourth are used as input buffers by pInput and pStIn in LVCS_Process_CS.
+ * Hence, pStereoOut is pointed to use unused third portion of scratch memory.
+ */
+ pStereoOut = (LVM_FLOAT*)pInstance->pScratch + ((LVCS_SCRATCHBUFFERS - 4) * NrFrames);
+ } else {
pStereoOut = pOutData;
}
/*
* Call CS process function
*/
- err = LVCS_Process_CS(hInstance,
- pInData,
- pStereoOut,
- NrFrames);
-#else
- err = LVCS_Process_CS(hInstance,
- pInData,
- pOutData,
- NumSamples);
-#endif
+ err = LVCS_Process_CS(hInstance, pInData, pStereoOut, NrFrames);
/*
* Compress to reduce expansion effect of Concert Sound and correct volume
* differences for difference settings. Not applied in test modes
*/
- if ((pInstance->Params.OperatingMode == LVCS_ON)&& \
- (pInstance->Params.CompressorMode == LVM_MODE_ON))
- {
+ if ((pInstance->Params.OperatingMode == LVCS_ON) &&
+ (pInstance->Params.CompressorMode == LVM_MODE_ON)) {
LVM_FLOAT Gain = pInstance->VolCorrect.CompMin;
LVM_FLOAT Current1;
Current1 = LVC_Mixer_GetCurrent(&pInstance->BypassMix.Mixer_Instance.MixerStream[0]);
- Gain = (LVM_FLOAT)( pInstance->VolCorrect.CompMin
- - (((LVM_FLOAT)pInstance->VolCorrect.CompMin * (Current1)))
- + (((LVM_FLOAT)pInstance->VolCorrect.CompFull * (Current1))));
+ Gain = (LVM_FLOAT)(pInstance->VolCorrect.CompMin -
+ (((LVM_FLOAT)pInstance->VolCorrect.CompMin * (Current1))) +
+ (((LVM_FLOAT)pInstance->VolCorrect.CompFull * (Current1))));
- if(NumSamples < LVCS_COMPGAINFRAME)
- {
-#ifdef SUPPORT_MC
- NonLinComp_Float(Gain, /* Compressor gain setting */
- pStereoOut,
- pStereoOut,
- (LVM_INT32)(2 * NrFrames));
-#else
- NonLinComp_Float(Gain, /* Compressor gain setting */
- pOutData,
- pOutData,
- (LVM_INT32)(2 * NumSamples));
-#endif
- }
- else
- {
- LVM_FLOAT GainStep;
- LVM_FLOAT FinalGain;
- LVM_INT16 SampleToProcess = NumSamples;
- LVM_FLOAT *pOutPtr;
+ if (NumSamples < LVCS_COMPGAINFRAME) {
+ NonLinComp_Float(Gain, /* Compressor gain setting */
+ pStereoOut, pStereoOut, (LVM_INT32)(2 * NrFrames));
+ } else {
+ LVM_FLOAT GainStep;
+ LVM_FLOAT FinalGain;
+ LVM_INT16 SampleToProcess = NumSamples;
+ LVM_FLOAT* pOutPtr;
/* Large changes in Gain can cause clicks in output
Split data into small blocks and use interpolated gain values */
- GainStep = (LVM_FLOAT)(((Gain-pInstance->CompressGain) * \
- LVCS_COMPGAINFRAME) / NumSamples);
+ GainStep = (LVM_FLOAT)(((Gain - pInstance->CompressGain) * LVCS_COMPGAINFRAME) /
+ NumSamples);
- if((GainStep == 0) && (pInstance->CompressGain < Gain))
- {
+ if ((GainStep == 0) && (pInstance->CompressGain < Gain)) {
GainStep = 1;
- }
- else
- {
- if((GainStep == 0) && (pInstance->CompressGain > Gain))
- {
+ } else {
+ if ((GainStep == 0) && (pInstance->CompressGain > Gain)) {
GainStep = -1;
}
}
FinalGain = Gain;
Gain = pInstance->CompressGain;
-#ifdef SUPPORT_MC
pOutPtr = pStereoOut;
-#else
- pOutPtr = pOutData;
-#endif
- while(SampleToProcess > 0)
- {
+ while (SampleToProcess > 0) {
Gain = (LVM_FLOAT)(Gain + GainStep);
- if((GainStep > 0) && (FinalGain <= Gain))
- {
+ if ((GainStep > 0) && (FinalGain <= Gain)) {
Gain = FinalGain;
GainStep = 0;
}
- if((GainStep < 0) && (FinalGain > Gain))
- {
+ if ((GainStep < 0) && (FinalGain > Gain)) {
Gain = FinalGain;
GainStep = 0;
}
- if(SampleToProcess > LVCS_COMPGAINFRAME)
- {
- NonLinComp_Float(Gain, /* Compressor gain setting */
- pOutPtr,
- pOutPtr,
- (LVM_INT32)(2 * LVCS_COMPGAINFRAME));
+ if (SampleToProcess > LVCS_COMPGAINFRAME) {
+ NonLinComp_Float(Gain, /* Compressor gain setting */
+ pOutPtr, pOutPtr, (LVM_INT32)(2 * LVCS_COMPGAINFRAME));
pOutPtr += (2 * LVCS_COMPGAINFRAME);
SampleToProcess = (LVM_INT16)(SampleToProcess - LVCS_COMPGAINFRAME);
- }
- else
- {
- NonLinComp_Float(Gain, /* Compressor gain setting */
- pOutPtr,
- pOutPtr,
- (LVM_INT32)(2 * SampleToProcess));
+ } else {
+ NonLinComp_Float(Gain, /* Compressor gain setting */
+ pOutPtr, pOutPtr, (LVM_INT32)(2 * SampleToProcess));
SampleToProcess = 0;
}
-
}
}
@@ -374,57 +281,33 @@
pInstance->CompressGain = Gain;
}
- if(pInstance->bInOperatingModeTransition == LVM_TRUE){
-
+ if (pInstance->bInOperatingModeTransition == LVM_TRUE) {
/*
* Re-init bypass mix when timer has completed
*/
if ((pInstance->bTimerDone == LVM_TRUE) &&
- (pInstance->BypassMix.Mixer_Instance.MixerStream[1].CallbackSet == 0))
- {
- err = LVCS_BypassMixInit(hInstance,
- &pInstance->Params);
+ (pInstance->BypassMix.Mixer_Instance.MixerStream[1].CallbackSet == 0)) {
+ err = LVCS_BypassMixInit(hInstance, &pInstance->Params);
- if(err != LVCS_SUCCESS)
- {
+ if (err != LVCS_SUCCESS) {
return err;
}
- }
- else{
- LVM_Timer ( &pInstance->TimerInstance,
- (LVM_INT16)NumSamples);
+ } else {
+ LVM_Timer(&pInstance->TimerInstance, (LVM_INT16)NumSamples);
}
}
-#ifdef SUPPORT_MC
- Copy_Float_Stereo_Mc(pInData,
- pStereoOut,
- pOutData,
- NrFrames,
- channels);
-#endif
- }
- else
- {
- if (pInData != pOutData)
- {
-#ifdef SUPPORT_MC
+ Copy_Float_Stereo_Mc(pInData, pStereoOut, pOutData, NrFrames, channels);
+ } else {
+ if (pInData != pOutData) {
/*
* The algorithm is disabled so just copy the data
*/
- Copy_Float((LVM_FLOAT *)pInData, /* Source */
- (LVM_FLOAT *)pOutData, /* Destination */
- (LVM_INT16)(channels * NrFrames)); /* All Channels*/
-#else
- /*
- * The algorithm is disabled so just copy the data
- */
- Copy_Float((LVM_FLOAT *)pInData, /* Source */
- (LVM_FLOAT *)pOutData, /* Destination */
- (LVM_INT16)(2 * NumSamples)); /* Left and right */
-#endif
+ Copy_Float((LVM_FLOAT*)pInData, /* Source */
+ (LVM_FLOAT*)pOutData, /* Destination */
+ (LVM_INT16)(channels * NrFrames)); /* All Channels*/
}
}
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
index d0e6e09..f6d2453 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
@@ -20,7 +20,7 @@
/* Includes */
/* */
/************************************************************************************/
-
+#include <stdlib.h>
#include "LVCS.h"
#include "LVCS_Private.h"
#include "LVCS_ReverbGenerator.h"
@@ -57,31 +57,39 @@
/* 2. The numerator coefficients of the filter are negated to cause an inversion. */
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams)
-{
+LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+ LVM_UINT16 Delay;
+ LVM_UINT16 Offset;
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+ LVCS_ReverbGenerator_t* pConfig = (LVCS_ReverbGenerator_t*)&pInstance->Reverberation;
+ LVCS_Data_t* pData;
+ LVCS_Coefficient_t* pCoefficients;
+ BQ_FLOAT_Coefs_t Coeffs;
+ const BiquadA012B12CoefsSP_t* pReverbCoefTable;
- LVM_UINT16 Delay;
- LVM_UINT16 Offset;
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
- LVCS_ReverbGenerator_t *pConfig = (LVCS_ReverbGenerator_t *)&pInstance->Reverberation;
- LVCS_Data_t *pData;
- LVCS_Coefficient_t *pCoefficients;
- BQ_FLOAT_Coefs_t Coeffs;
- const BiquadA012B12CoefsSP_t *pReverbCoefTable;
-
- pData = (LVCS_Data_t *) \
- pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress;
-
- pCoefficients = (LVCS_Coefficient_t *) \
- pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+ if (pInstance->pData == LVM_NULL) {
+ pInstance->pData = pData = (LVCS_Data_t*)calloc(1, sizeof(*pData));
+ if (pData == LVM_NULL) {
+ return LVCS_NULLADDRESS;
+ }
+ } else {
+ pData = (LVCS_Data_t*)pInstance->pData;
+ }
+ if (pInstance->pCoeff == LVM_NULL) {
+ pInstance->pCoeff = pCoefficients = (LVCS_Coefficient_t*)calloc(1, sizeof(*pCoefficients));
+ if (pCoefficients == LVM_NULL) {
+ return LVCS_NULLADDRESS;
+ }
+ } else {
+ pCoefficients = (LVCS_Coefficient_t*)pInstance->pCoeff;
+ }
/*
* Initialise the delay and filters if:
* - the sample rate has changed
* - the speaker type has changed to or from the mobile speaker
*/
- if(pInstance->Params.SampleRate != pParams->SampleRate ) /* Sample rate change test */
+ if (pInstance->Params.SampleRate != pParams->SampleRate) /* Sample rate change test */
{
/*
@@ -89,10 +97,10 @@
*/
Delay = (LVM_UINT16)LVCS_StereoDelayCS[(LVM_UINT16)pParams->SampleRate];
- pConfig->DelaySize = (LVM_INT16)(2 * Delay);
- pConfig->DelayOffset = 0;
- LoadConst_Float(0, /* Value */
- (LVM_FLOAT *)&pConfig->StereoSamples[0], /* Destination */
+ pConfig->DelaySize = (LVM_INT16)(2 * Delay);
+ pConfig->DelayOffset = 0;
+ LoadConst_Float(0, /* Value */
+ (LVM_FLOAT*)&pConfig->StereoSamples[0], /* Destination */
/* Number of words */
(LVM_UINT16)(sizeof(pConfig->StereoSamples) / sizeof(LVM_FLOAT)));
/*
@@ -108,23 +116,21 @@
Coeffs.B1 = (LVM_FLOAT)-pReverbCoefTable[Offset].B1;
Coeffs.B2 = (LVM_FLOAT)-pReverbCoefTable[Offset].B2;
- LoadConst_Float(0, /* Value */
- (LVM_FLOAT *)&pData->ReverbBiquadTaps, /* Destination */
+ LoadConst_Float(0, /* Value */
+ (LVM_FLOAT*)&pData->ReverbBiquadTaps, /* Destination */
/* Number of words */
(LVM_UINT16)(sizeof(pData->ReverbBiquadTaps) / sizeof(LVM_FLOAT)));
BQ_2I_D16F16Css_TRC_WRA_01_Init(&pCoefficients->ReverbBiquadInstance,
- &pData->ReverbBiquadTaps,
- &Coeffs);
+ &pData->ReverbBiquadTaps, &Coeffs);
/* Callbacks */
- switch(pReverbCoefTable[Offset].Scale)
- {
+ switch (pReverbCoefTable[Offset].Scale) {
case 14:
- pConfig->pBiquadCallBack = BQ_2I_D16F16C14_TRC_WRA_01;
+ pConfig->pBiquadCallBack = BQ_2I_D16F16C14_TRC_WRA_01;
break;
case 15:
- pConfig->pBiquadCallBack = BQ_2I_D16F16C15_TRC_WRA_01;
+ pConfig->pBiquadCallBack = BQ_2I_D16F16C15_TRC_WRA_01;
break;
}
@@ -132,16 +138,15 @@
* Setup the mixer
*/
pConfig->ProcGain = (LVM_UINT16)(HEADPHONEGAINPROC);
- pConfig->UnprocGain = (LVM_UINT16)(HEADPHONEGAINUNPROC);
+ pConfig->UnprocGain = (LVM_UINT16)(HEADPHONEGAINUNPROC);
}
- if(pInstance->Params.ReverbLevel != pParams->ReverbLevel)
- {
- LVM_INT32 ReverbPercentage = 83886; // 1 Percent Reverb i.e 1/100 in Q 23 format
+ if (pInstance->Params.ReverbLevel != pParams->ReverbLevel) {
+ LVM_INT32 ReverbPercentage = 83886; // 1 Percent Reverb i.e 1/100 in Q 23 format
ReverbPercentage *= pParams->ReverbLevel; // Actual Reverb Level in Q 23 format
- pConfig->ReverbLevel = ((LVM_FLOAT)(ReverbPercentage>>8)) / 32767.0f;
+ pConfig->ReverbLevel = ((LVM_FLOAT)(ReverbPercentage >> 8)) / 32767.0f;
}
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
/************************************************************************************/
/* */
@@ -181,46 +186,37 @@
/* 2. The Gain is combined with the LPF and incorporated in to the coefficients */
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples)
-{
+LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+ LVCS_ReverbGenerator_t* pConfig = (LVCS_ReverbGenerator_t*)&pInstance->Reverberation;
+ LVCS_Coefficient_t* pCoefficients;
+ LVM_FLOAT* pScratch;
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
- LVCS_ReverbGenerator_t *pConfig = (LVCS_ReverbGenerator_t *)&pInstance->Reverberation;
- LVCS_Coefficient_t *pCoefficients;
- LVM_FLOAT *pScratch;
-
- pCoefficients = (LVCS_Coefficient_t *)\
- pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
-
- pScratch = (LVM_FLOAT *)\
- pInstance->MemoryTable.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress;
+ pCoefficients = (LVCS_Coefficient_t*)pInstance->pCoeff;
+ pScratch = (LVM_FLOAT*)pInstance->pScratch;
/*
* Copy the data to the output in outplace processing
*/
- if (pInData != pOutData)
- {
+ if (pInData != pOutData) {
/*
* Reverb not required so just copy the data
*/
- Copy_Float((LVM_FLOAT *)pInData, /* Source */
- (LVM_FLOAT *)pOutData, /* Destination */
- (LVM_INT16)(2 * NumSamples)); /* Left and right */
+ Copy_Float((LVM_FLOAT*)pInData, /* Source */
+ (LVM_FLOAT*)pOutData, /* Destination */
+ (LVM_INT16)(2 * NumSamples)); /* Left and right */
}
/*
* Check if the reverb is required
*/
/* Disable when CS4MS in stereo mode */
- if ((((LVCS_OutputDevice_en)pInstance->Params.SpeakerType == LVCS_HEADPHONE) || \
+ if ((((LVCS_OutputDevice_en)pInstance->Params.SpeakerType == LVCS_HEADPHONE) ||
(pInstance->Params.SpeakerType == LVCS_EX_HEADPHONES) ||
- (pInstance->Params.SourceFormat != LVCS_STEREO)) &&
- /* For validation testing */
- ((pInstance->Params.OperatingMode & LVCS_REVERBSWITCH) !=0))
- {
+ (pInstance->Params.SourceFormat != LVCS_STEREO)) &&
+ /* For validation testing */
+ ((pInstance->Params.OperatingMode & LVCS_REVERBSWITCH) != 0)) {
/********************************************************************************/
/* */
/* Copy the input data to scratch memory and filter it */
@@ -230,34 +226,26 @@
/*
* Copy the input data to the scratch memory
*/
- Copy_Float((LVM_FLOAT *)pInData, /* Source */
- (LVM_FLOAT *)pScratch, /* Destination */
- (LVM_INT16)(2 * NumSamples)); /* Left and right */
+ Copy_Float((LVM_FLOAT*)pInData, /* Source */
+ (LVM_FLOAT*)pScratch, /* Destination */
+ (LVM_INT16)(2 * NumSamples)); /* Left and right */
/*
* Filter the data
*/
(pConfig->pBiquadCallBack)((Biquad_FLOAT_Instance_t*)&pCoefficients->ReverbBiquadInstance,
- (LVM_FLOAT *)pScratch,
- (LVM_FLOAT *)pScratch,
+ (LVM_FLOAT*)pScratch, (LVM_FLOAT*)pScratch,
(LVM_INT16)NumSamples);
- Mult3s_Float( (LVM_FLOAT *)pScratch,
- pConfig->ReverbLevel,
- (LVM_FLOAT *)pScratch,
- (LVM_INT16)(2 * NumSamples));
+ Mult3s_Float((LVM_FLOAT*)pScratch, pConfig->ReverbLevel, (LVM_FLOAT*)pScratch,
+ (LVM_INT16)(2 * NumSamples));
/*
* Apply the delay mix
*/
- DelayMix_Float((LVM_FLOAT *)pScratch,
- &pConfig->StereoSamples[0],
- pConfig->DelaySize,
- pOutData,
- &pConfig->DelayOffset,
- (LVM_INT16)NumSamples);
-
+ DelayMix_Float((LVM_FLOAT*)pScratch, &pConfig->StereoSamples[0], pConfig->DelaySize,
+ pOutData, &pConfig->DelayOffset, (LVM_INT16)NumSamples);
}
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
index 1bc4338..b666da3 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
@@ -32,8 +32,8 @@
/* */
/************************************************************************************/
-#define HEADPHONEGAINPROC LVCS_HEADPHONE_PROCGAIN
-#define HEADPHONEGAINUNPROC LVCS_HEADPHONE_UNPROCGAIN
+#define HEADPHONEGAINPROC LVCS_HEADPHONE_PROCGAIN
+#define HEADPHONEGAINUNPROC LVCS_HEADPHONE_UNPROCGAIN
/************************************************************************************/
/* */
@@ -42,20 +42,17 @@
/************************************************************************************/
/* Reverberation module structure */
-typedef struct
-{
-
+typedef struct {
/* Stereo delay */
- LVM_INT16 DelaySize;
- LVM_INT16 DelayOffset;
- LVM_INT16 ProcGain;
- LVM_INT16 UnprocGain;
- LVM_FLOAT StereoSamples[2 * LVCS_STEREODELAY_CS_MAX_VAL];
+ LVM_INT16 DelaySize;
+ LVM_INT16 DelayOffset;
+ LVM_INT16 ProcGain;
+ LVM_INT16 UnprocGain;
+ LVM_FLOAT StereoSamples[2 * LVCS_STEREODELAY_CS_MAX_VAL];
/* Reverb Level */
- LVM_FLOAT ReverbLevel;
+ LVM_FLOAT ReverbLevel;
/* Filter */
- void (*pBiquadCallBack) (Biquad_FLOAT_Instance_t*,
- LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
+ void (*pBiquadCallBack)(Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
} LVCS_ReverbGenerator_t;
/************************************************************************************/
@@ -64,11 +61,8 @@
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams);
-LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t hInstance,
- const LVM_FLOAT *pInput,
- LVM_FLOAT *pOutput,
- LVM_UINT16 NumSamples);
+LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
+LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t hInstance, const LVM_FLOAT* pInput,
+ LVM_FLOAT* pOutput, LVM_UINT16 NumSamples);
-#endif /* REVERB_H */
+#endif /* REVERB_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
index 7fd8444..ffa9c9b 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
@@ -49,31 +49,24 @@
/* NOTES: */
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams)
-{
+LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+ LVM_UINT16 Offset;
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+ LVCS_StereoEnhancer_t* pConfig = (LVCS_StereoEnhancer_t*)&pInstance->StereoEnhancer;
+ LVCS_Data_t* pData;
+ LVCS_Coefficient_t* pCoefficient;
+ FO_FLOAT_Coefs_t CoeffsMid;
+ BQ_FLOAT_Coefs_t CoeffsSide;
+ const BiquadA012B12CoefsSP_t* pSESideCoefs;
- LVM_UINT16 Offset;
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
- LVCS_StereoEnhancer_t *pConfig = (LVCS_StereoEnhancer_t *)&pInstance->StereoEnhancer;
- LVCS_Data_t *pData;
- LVCS_Coefficient_t *pCoefficient;
- FO_FLOAT_Coefs_t CoeffsMid;
- BQ_FLOAT_Coefs_t CoeffsSide;
- const BiquadA012B12CoefsSP_t *pSESideCoefs;
-
- pData = (LVCS_Data_t *) \
- pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress;
-
- pCoefficient = (LVCS_Coefficient_t *) \
- pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+ pData = (LVCS_Data_t*)pInstance->pData;
+ pCoefficient = (LVCS_Coefficient_t*)pInstance->pCoeff;
/*
* If the sample rate or speaker type has changed update the filters
*/
if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
- (pInstance->Params.SpeakerType != pParams->SpeakerType))
- {
+ (pInstance->Params.SpeakerType != pParams->SpeakerType)) {
/*
* Set the filter coefficients based on the sample rate
*/
@@ -81,24 +74,22 @@
Offset = (LVM_UINT16)pParams->SampleRate;
/* Convert incoming coefficients to the required format/ordering */
- CoeffsMid.A0 = (LVM_FLOAT) LVCS_SEMidCoefTable[Offset].A0;
- CoeffsMid.A1 = (LVM_FLOAT) LVCS_SEMidCoefTable[Offset].A1;
+ CoeffsMid.A0 = (LVM_FLOAT)LVCS_SEMidCoefTable[Offset].A0;
+ CoeffsMid.A1 = (LVM_FLOAT)LVCS_SEMidCoefTable[Offset].A1;
CoeffsMid.B1 = (LVM_FLOAT)-LVCS_SEMidCoefTable[Offset].B1;
/* Clear the taps */
- LoadConst_Float(0, /* Value */
- (LVM_FLOAT *)&pData->SEBiquadTapsMid, /* Destination */
+ LoadConst_Float(0, /* Value */
+ (LVM_FLOAT*)&pData->SEBiquadTapsMid, /* Destination */
/* Number of words */
(LVM_UINT16)(sizeof(pData->SEBiquadTapsMid) / sizeof(LVM_FLOAT)));
- FO_1I_D16F16Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceMid,
- &pData->SEBiquadTapsMid,
+ FO_1I_D16F16Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceMid, &pData->SEBiquadTapsMid,
&CoeffsMid);
/* Callbacks */
- if(LVCS_SEMidCoefTable[Offset].Scale == 15)
- {
- pConfig->pBiquadCallBack_Mid = FO_1I_D16F16C15_TRC_WRA_01;
+ if (LVCS_SEMidCoefTable[Offset].Scale == 15) {
+ pConfig->pBiquadCallBack_Mid = FO_1I_D16F16C15_TRC_WRA_01;
}
Offset = (LVM_UINT16)(pParams->SampleRate);
@@ -106,39 +97,35 @@
/* Side filter */
/* Convert incoming coefficients to the required format/ordering */
- CoeffsSide.A0 = (LVM_FLOAT) pSESideCoefs[Offset].A0;
- CoeffsSide.A1 = (LVM_FLOAT) pSESideCoefs[Offset].A1;
- CoeffsSide.A2 = (LVM_FLOAT) pSESideCoefs[Offset].A2;
+ CoeffsSide.A0 = (LVM_FLOAT)pSESideCoefs[Offset].A0;
+ CoeffsSide.A1 = (LVM_FLOAT)pSESideCoefs[Offset].A1;
+ CoeffsSide.A2 = (LVM_FLOAT)pSESideCoefs[Offset].A2;
CoeffsSide.B1 = (LVM_FLOAT)-pSESideCoefs[Offset].B1;
CoeffsSide.B2 = (LVM_FLOAT)-pSESideCoefs[Offset].B2;
/* Clear the taps */
- LoadConst_Float(0, /* Value */
- (LVM_FLOAT *)&pData->SEBiquadTapsSide, /* Destination */
+ LoadConst_Float(0, /* Value */
+ (LVM_FLOAT*)&pData->SEBiquadTapsSide, /* Destination */
/* Number of words */
(LVM_UINT16)(sizeof(pData->SEBiquadTapsSide) / sizeof(LVM_FLOAT)));
/* Callbacks */
- switch(pSESideCoefs[Offset].Scale)
- {
+ switch (pSESideCoefs[Offset].Scale) {
case 14:
BQ_1I_D16F32Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceSide,
- &pData->SEBiquadTapsSide,
- &CoeffsSide);
+ &pData->SEBiquadTapsSide, &CoeffsSide);
- pConfig->pBiquadCallBack_Side = BQ_1I_D16F32C14_TRC_WRA_01;
+ pConfig->pBiquadCallBack_Side = BQ_1I_D16F32C14_TRC_WRA_01;
break;
case 15:
BQ_1I_D16F16Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceSide,
- &pData->SEBiquadTapsSide,
- &CoeffsSide);
+ &pData->SEBiquadTapsSide, &CoeffsSide);
- pConfig->pBiquadCallBack_Side = BQ_1I_D16F16C15_TRC_WRA_01;
+ pConfig->pBiquadCallBack_Side = BQ_1I_D16F16C15_TRC_WRA_01;
break;
}
-
}
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
/************************************************************************************/
/* */
@@ -178,85 +165,61 @@
/* 1. The side filter is not used in Mobile Speaker mode */
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples)
-{
-
- LVCS_Instance_t *pInstance = (LVCS_Instance_t *)hInstance;
- LVCS_StereoEnhancer_t *pConfig = (LVCS_StereoEnhancer_t *)&pInstance->StereoEnhancer;
- LVCS_Coefficient_t *pCoefficient;
- LVM_FLOAT *pScratch;
-
- pCoefficient = (LVCS_Coefficient_t *) \
- pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
-
- pScratch = (LVM_FLOAT *) \
- pInstance->MemoryTable.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress;
+LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+ LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+ LVCS_StereoEnhancer_t* pConfig = (LVCS_StereoEnhancer_t*)&pInstance->StereoEnhancer;
+ LVCS_Coefficient_t* pCoefficient;
+ LVM_FLOAT* pScratch;
+ pCoefficient = (LVCS_Coefficient_t*)pInstance->pCoeff;
+ pScratch = (LVM_FLOAT*)pInstance->pScratch;
/*
* Check if the Stereo Enhancer is enabled
*/
- if ((pInstance->Params.OperatingMode & LVCS_STEREOENHANCESWITCH) != 0)
- {
+ if ((pInstance->Params.OperatingMode & LVCS_STEREOENHANCESWITCH) != 0) {
/*
* Convert from stereo to middle and side
*/
- From2iToMS_Float(pInData,
- pScratch,
- pScratch + NumSamples,
- (LVM_INT16)NumSamples);
+ From2iToMS_Float(pInData, pScratch, pScratch + NumSamples, (LVM_INT16)NumSamples);
/*
* Apply filter to the middle signal
*/
- if (pInstance->OutputDevice == LVCS_HEADPHONE)
- {
- (pConfig->pBiquadCallBack_Mid)((Biquad_FLOAT_Instance_t*)\
- &pCoefficient->SEBiquadInstanceMid,
- (LVM_FLOAT *)pScratch,
- (LVM_FLOAT *)pScratch,
- (LVM_INT16)NumSamples);
- }
- else
- {
- Mult3s_Float(pScratch, /* Source */
- (LVM_FLOAT)pConfig->MidGain, /* Gain */
- pScratch, /* Destination */
- (LVM_INT16)NumSamples); /* Number of samples */
+ if (pInstance->OutputDevice == LVCS_HEADPHONE) {
+ (pConfig->pBiquadCallBack_Mid)(
+ (Biquad_FLOAT_Instance_t*)&pCoefficient->SEBiquadInstanceMid,
+ (LVM_FLOAT*)pScratch, (LVM_FLOAT*)pScratch, (LVM_INT16)NumSamples);
+ } else {
+ Mult3s_Float(pScratch, /* Source */
+ (LVM_FLOAT)pConfig->MidGain, /* Gain */
+ pScratch, /* Destination */
+ (LVM_INT16)NumSamples); /* Number of samples */
}
/*
* Apply the filter the side signal only in stereo mode for headphones
* and in all modes for mobile speakers
*/
- if (pInstance->Params.SourceFormat == LVCS_STEREO)
- {
- (pConfig->pBiquadCallBack_Side)((Biquad_FLOAT_Instance_t*) \
- &pCoefficient->SEBiquadInstanceSide,
- (LVM_FLOAT *)(pScratch + NumSamples),
- (LVM_FLOAT *)(pScratch + NumSamples),
- (LVM_INT16)NumSamples);
+ if (pInstance->Params.SourceFormat == LVCS_STEREO) {
+ (pConfig->pBiquadCallBack_Side)(
+ (Biquad_FLOAT_Instance_t*)&pCoefficient->SEBiquadInstanceSide,
+ (LVM_FLOAT*)(pScratch + NumSamples), (LVM_FLOAT*)(pScratch + NumSamples),
+ (LVM_INT16)NumSamples);
}
/*
* Convert from middle and side to stereo
*/
- MSTo2i_Sat_Float(pScratch,
- pScratch + NumSamples,
- pOutData,
- (LVM_INT16)NumSamples);
+ MSTo2i_Sat_Float(pScratch, pScratch + NumSamples, pOutData, (LVM_INT16)NumSamples);
- }
- else
- {
+ } else {
/*
* The stereo enhancer is disabled so just copy the data
*/
- Copy_Float((LVM_FLOAT *)pInData, /* Source */
- (LVM_FLOAT *)pOutData, /* Destination */
- (LVM_INT16)(2 * NumSamples)); /* Left and right */
+ Copy_Float((LVM_FLOAT*)pInData, /* Source */
+ (LVM_FLOAT*)pOutData, /* Destination */
+ (LVM_INT16)(2 * NumSamples)); /* Left and right */
}
- return(LVCS_SUCCESS);
+ return (LVCS_SUCCESS);
}
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h
index 12a5982..c92f8a5 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h
@@ -24,8 +24,8 @@
/* */
/************************************************************************************/
-#include "Filters.h" /* Filter definitions */
-#include "LVCS_Headphone_Coeffs.h" /* Headphone coefficients */
+#include "Filters.h" /* Filter definitions */
+#include "LVCS_Headphone_Coeffs.h" /* Headphone coefficients */
#include "BIQUAD.h"
/************************************************************************************/
@@ -35,21 +35,17 @@
/************************************************************************************/
/* Stereo enhancer structure */
-typedef struct
-{
-
+typedef struct {
/*
* Middle filter
*/
- void (*pBiquadCallBack_Mid)(Biquad_FLOAT_Instance_t*,
- LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
+ void (*pBiquadCallBack_Mid)(Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
/*
* Side filter
*/
- void (*pBiquadCallBack_Side)(Biquad_FLOAT_Instance_t*,
- LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
- LVM_FLOAT MidGain; /* Middle gain in mobile speaker mode */
+ void (*pBiquadCallBack_Side)(Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
+ LVM_FLOAT MidGain; /* Middle gain in mobile speaker mode */
} LVCS_StereoEnhancer_t;
/************************************************************************************/
@@ -58,12 +54,9 @@
/* */
/************************************************************************************/
-LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t hInstance,
- LVCS_Params_t *pParams);
+LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
-LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t hInstance,
- const LVM_FLOAT *pInData,
- LVM_FLOAT *pOutData,
- LVM_UINT16 NumSamples);
+LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+ LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
-#endif /* STEREOENHANCE_H */
+#endif /* STEREOENHANCE_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp
index d79db61..55b5243 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp
@@ -23,9 +23,9 @@
#include "LVCS_Private.h"
#include "LVCS_Tables.h"
-#include "Filters.h" /* Filter definitions */
-#include "BIQUAD.h" /* Biquad definitions */
-#include "LVCS_Headphone_Coeffs.h" /* Headphone coefficients */
+#include "Filters.h" /* Filter definitions */
+#include "BIQUAD.h" /* Biquad definitions */
+#include "LVCS_Headphone_Coeffs.h" /* Headphone coefficients */
/************************************************************************************/
/* */
@@ -35,144 +35,75 @@
/* Coefficient table for the middle filter */
const BiquadA01B1CoefsSP_t LVCS_SEMidCoefTable[] = {
- {CS_MIDDLE_8000_A0, /* 8kS/s coefficients */
- CS_MIDDLE_8000_A1,
- CS_MIDDLE_8000_B1,
- (LVM_UINT16 )CS_MIDDLE_8000_SCALE},
- {CS_MIDDLE_11025_A0, /* 11kS/s coefficients */
- CS_MIDDLE_11025_A1,
- CS_MIDDLE_11025_B1,
- (LVM_UINT16 )CS_MIDDLE_11025_SCALE},
- {CS_MIDDLE_12000_A0, /* 12kS/s coefficients */
- CS_MIDDLE_12000_A1,
- CS_MIDDLE_12000_B1,
- (LVM_UINT16 )CS_MIDDLE_12000_SCALE},
- {CS_MIDDLE_16000_A0, /* 16kS/s coefficients */
- CS_MIDDLE_16000_A1,
- CS_MIDDLE_16000_B1,
- (LVM_UINT16 )CS_MIDDLE_16000_SCALE},
- {CS_MIDDLE_22050_A0, /* 22kS/s coefficients */
- CS_MIDDLE_22050_A1,
- CS_MIDDLE_22050_B1,
- (LVM_UINT16 )CS_MIDDLE_22050_SCALE},
- {CS_MIDDLE_24000_A0, /* 24kS/s coefficients */
- CS_MIDDLE_24000_A1,
- CS_MIDDLE_24000_B1,
- (LVM_UINT16 )CS_MIDDLE_24000_SCALE},
- {CS_MIDDLE_32000_A0, /* 32kS/s coefficients */
- CS_MIDDLE_32000_A1,
- CS_MIDDLE_32000_B1,
- (LVM_UINT16 )CS_MIDDLE_32000_SCALE},
- {CS_MIDDLE_44100_A0, /* 44kS/s coefficients */
- CS_MIDDLE_44100_A1,
- CS_MIDDLE_44100_B1,
- (LVM_UINT16 )CS_MIDDLE_44100_SCALE},
- {CS_MIDDLE_48000_A0, /* 48kS/s coefficients */
- CS_MIDDLE_48000_A1,
- CS_MIDDLE_48000_B1,
- (LVM_UINT16 )CS_MIDDLE_48000_SCALE}
- ,
- {CS_MIDDLE_88200_A0, /* 88kS/s coefficients */
- CS_MIDDLE_88200_A1,
- CS_MIDDLE_88200_B1,
- (LVM_UINT16)CS_MIDDLE_88200_SCALE},
- {CS_MIDDLE_96000_A0, /* 96kS/s coefficients */
- CS_MIDDLE_96000_A1,
- CS_MIDDLE_96000_B1,
- (LVM_UINT16 )CS_MIDDLE_96000_SCALE},
- {CS_MIDDLE_176400_A0, /* 176kS/s coefficients */
- CS_MIDDLE_176400_A1,
- CS_MIDDLE_176400_B1,
- (LVM_UINT16)CS_MIDDLE_176400_SCALE},
- {CS_MIDDLE_192000_A0, /* 192kS/s coefficients */
- CS_MIDDLE_192000_A1,
- CS_MIDDLE_192000_B1,
- (LVM_UINT16 )CS_MIDDLE_192000_SCALE}
- };
+ {CS_MIDDLE_8000_A0, /* 8kS/s coefficients */
+ CS_MIDDLE_8000_A1, CS_MIDDLE_8000_B1, (LVM_UINT16)CS_MIDDLE_8000_SCALE},
+ {CS_MIDDLE_11025_A0, /* 11kS/s coefficients */
+ CS_MIDDLE_11025_A1, CS_MIDDLE_11025_B1, (LVM_UINT16)CS_MIDDLE_11025_SCALE},
+ {CS_MIDDLE_12000_A0, /* 12kS/s coefficients */
+ CS_MIDDLE_12000_A1, CS_MIDDLE_12000_B1, (LVM_UINT16)CS_MIDDLE_12000_SCALE},
+ {CS_MIDDLE_16000_A0, /* 16kS/s coefficients */
+ CS_MIDDLE_16000_A1, CS_MIDDLE_16000_B1, (LVM_UINT16)CS_MIDDLE_16000_SCALE},
+ {CS_MIDDLE_22050_A0, /* 22kS/s coefficients */
+ CS_MIDDLE_22050_A1, CS_MIDDLE_22050_B1, (LVM_UINT16)CS_MIDDLE_22050_SCALE},
+ {CS_MIDDLE_24000_A0, /* 24kS/s coefficients */
+ CS_MIDDLE_24000_A1, CS_MIDDLE_24000_B1, (LVM_UINT16)CS_MIDDLE_24000_SCALE},
+ {CS_MIDDLE_32000_A0, /* 32kS/s coefficients */
+ CS_MIDDLE_32000_A1, CS_MIDDLE_32000_B1, (LVM_UINT16)CS_MIDDLE_32000_SCALE},
+ {CS_MIDDLE_44100_A0, /* 44kS/s coefficients */
+ CS_MIDDLE_44100_A1, CS_MIDDLE_44100_B1, (LVM_UINT16)CS_MIDDLE_44100_SCALE},
+ {CS_MIDDLE_48000_A0, /* 48kS/s coefficients */
+ CS_MIDDLE_48000_A1, CS_MIDDLE_48000_B1, (LVM_UINT16)CS_MIDDLE_48000_SCALE},
+ {CS_MIDDLE_88200_A0, /* 88kS/s coefficients */
+ CS_MIDDLE_88200_A1, CS_MIDDLE_88200_B1, (LVM_UINT16)CS_MIDDLE_88200_SCALE},
+ {CS_MIDDLE_96000_A0, /* 96kS/s coefficients */
+ CS_MIDDLE_96000_A1, CS_MIDDLE_96000_B1, (LVM_UINT16)CS_MIDDLE_96000_SCALE},
+ {CS_MIDDLE_176400_A0, /* 176kS/s coefficients */
+ CS_MIDDLE_176400_A1, CS_MIDDLE_176400_B1, (LVM_UINT16)CS_MIDDLE_176400_SCALE},
+ {CS_MIDDLE_192000_A0, /* 192kS/s coefficients */
+ CS_MIDDLE_192000_A1, CS_MIDDLE_192000_B1, (LVM_UINT16)CS_MIDDLE_192000_SCALE}};
/* Coefficient table for the side filter */
const BiquadA012B12CoefsSP_t LVCS_SESideCoefTable[] = {
- /* Headphone Side coefficients */
- {CS_SIDE_8000_A0, /* 8kS/s coefficients */
- CS_SIDE_8000_A1,
- CS_SIDE_8000_A2,
- CS_SIDE_8000_B1,
- CS_SIDE_8000_B2,
- (LVM_UINT16 )CS_SIDE_8000_SCALE},
- {CS_SIDE_11025_A0, /* 11kS/s coefficients */
- CS_SIDE_11025_A1,
- CS_SIDE_11025_A2,
- CS_SIDE_11025_B1,
- CS_SIDE_11025_B2,
- (LVM_UINT16 )CS_SIDE_11025_SCALE},
- {CS_SIDE_12000_A0, /* 12kS/s coefficients */
- CS_SIDE_12000_A1,
- CS_SIDE_12000_A2,
- CS_SIDE_12000_B1,
- CS_SIDE_12000_B2,
- (LVM_UINT16 )CS_SIDE_12000_SCALE},
- {CS_SIDE_16000_A0, /* 16kS/s coefficients */
- CS_SIDE_16000_A1,
- CS_SIDE_16000_A2,
- CS_SIDE_16000_B1,
- CS_SIDE_16000_B2,
- (LVM_UINT16 )CS_SIDE_16000_SCALE},
- {CS_SIDE_22050_A0, /* 22kS/s coefficients */
- CS_SIDE_22050_A1,
- CS_SIDE_22050_A2,
- CS_SIDE_22050_B1,
- CS_SIDE_22050_B2,
- (LVM_UINT16 )CS_SIDE_22050_SCALE},
- {CS_SIDE_24000_A0, /* 24kS/s coefficients */
- CS_SIDE_24000_A1,
- CS_SIDE_24000_A2,
- CS_SIDE_24000_B1,
- CS_SIDE_24000_B2,
- (LVM_UINT16 )CS_SIDE_24000_SCALE},
- {CS_SIDE_32000_A0, /* 32kS/s coefficients */
- CS_SIDE_32000_A1,
- CS_SIDE_32000_A2,
- CS_SIDE_32000_B1,
- CS_SIDE_32000_B2,
- (LVM_UINT16 )CS_SIDE_32000_SCALE},
- {CS_SIDE_44100_A0, /* 44kS/s coefficients */
- CS_SIDE_44100_A1,
- CS_SIDE_44100_A2,
- CS_SIDE_44100_B1,
- CS_SIDE_44100_B2,
- (LVM_UINT16 )CS_SIDE_44100_SCALE},
- {CS_SIDE_48000_A0, /* 48kS/s coefficients */
- CS_SIDE_48000_A1,
- CS_SIDE_48000_A2,
- CS_SIDE_48000_B1,
- CS_SIDE_48000_B2,
- (LVM_UINT16 )CS_SIDE_48000_SCALE}
- ,
- {CS_SIDE_88200_A0, /* 88kS/s coefficients */
- CS_SIDE_88200_A1,
- CS_SIDE_88200_A2,
- CS_SIDE_88200_B1,
- CS_SIDE_88200_B2,
- (LVM_UINT16)CS_SIDE_88200_SCALE},
- {CS_SIDE_96000_A0, /* 96kS/s coefficients */
- CS_SIDE_96000_A1,
- CS_SIDE_96000_A2,
- CS_SIDE_96000_B1,
- CS_SIDE_96000_B2,
- (LVM_UINT16 )CS_SIDE_96000_SCALE},
- {CS_SIDE_176400_A0, /*176kS/s coefficients */
- CS_SIDE_176400_A1,
- CS_SIDE_176400_A2,
- CS_SIDE_176400_B1,
- CS_SIDE_176400_B2,
- (LVM_UINT16)CS_SIDE_176400_SCALE},
- {CS_SIDE_192000_A0, /* 192kS/s coefficients */
- CS_SIDE_192000_A1,
- CS_SIDE_192000_A2,
- CS_SIDE_192000_B1,
- CS_SIDE_192000_B2,
- (LVM_UINT16 )CS_SIDE_192000_SCALE}
-};
+ /* Headphone Side coefficients */
+ {CS_SIDE_8000_A0, /* 8kS/s coefficients */
+ CS_SIDE_8000_A1, CS_SIDE_8000_A2, CS_SIDE_8000_B1, CS_SIDE_8000_B2,
+ (LVM_UINT16)CS_SIDE_8000_SCALE},
+ {CS_SIDE_11025_A0, /* 11kS/s coefficients */
+ CS_SIDE_11025_A1, CS_SIDE_11025_A2, CS_SIDE_11025_B1, CS_SIDE_11025_B2,
+ (LVM_UINT16)CS_SIDE_11025_SCALE},
+ {CS_SIDE_12000_A0, /* 12kS/s coefficients */
+ CS_SIDE_12000_A1, CS_SIDE_12000_A2, CS_SIDE_12000_B1, CS_SIDE_12000_B2,
+ (LVM_UINT16)CS_SIDE_12000_SCALE},
+ {CS_SIDE_16000_A0, /* 16kS/s coefficients */
+ CS_SIDE_16000_A1, CS_SIDE_16000_A2, CS_SIDE_16000_B1, CS_SIDE_16000_B2,
+ (LVM_UINT16)CS_SIDE_16000_SCALE},
+ {CS_SIDE_22050_A0, /* 22kS/s coefficients */
+ CS_SIDE_22050_A1, CS_SIDE_22050_A2, CS_SIDE_22050_B1, CS_SIDE_22050_B2,
+ (LVM_UINT16)CS_SIDE_22050_SCALE},
+ {CS_SIDE_24000_A0, /* 24kS/s coefficients */
+ CS_SIDE_24000_A1, CS_SIDE_24000_A2, CS_SIDE_24000_B1, CS_SIDE_24000_B2,
+ (LVM_UINT16)CS_SIDE_24000_SCALE},
+ {CS_SIDE_32000_A0, /* 32kS/s coefficients */
+ CS_SIDE_32000_A1, CS_SIDE_32000_A2, CS_SIDE_32000_B1, CS_SIDE_32000_B2,
+ (LVM_UINT16)CS_SIDE_32000_SCALE},
+ {CS_SIDE_44100_A0, /* 44kS/s coefficients */
+ CS_SIDE_44100_A1, CS_SIDE_44100_A2, CS_SIDE_44100_B1, CS_SIDE_44100_B2,
+ (LVM_UINT16)CS_SIDE_44100_SCALE},
+ {CS_SIDE_48000_A0, /* 48kS/s coefficients */
+ CS_SIDE_48000_A1, CS_SIDE_48000_A2, CS_SIDE_48000_B1, CS_SIDE_48000_B2,
+ (LVM_UINT16)CS_SIDE_48000_SCALE},
+ {CS_SIDE_88200_A0, /* 88kS/s coefficients */
+ CS_SIDE_88200_A1, CS_SIDE_88200_A2, CS_SIDE_88200_B1, CS_SIDE_88200_B2,
+ (LVM_UINT16)CS_SIDE_88200_SCALE},
+ {CS_SIDE_96000_A0, /* 96kS/s coefficients */
+ CS_SIDE_96000_A1, CS_SIDE_96000_A2, CS_SIDE_96000_B1, CS_SIDE_96000_B2,
+ (LVM_UINT16)CS_SIDE_96000_SCALE},
+ {CS_SIDE_176400_A0, /*176kS/s coefficients */
+ CS_SIDE_176400_A1, CS_SIDE_176400_A2, CS_SIDE_176400_B1, CS_SIDE_176400_B2,
+ (LVM_UINT16)CS_SIDE_176400_SCALE},
+ {CS_SIDE_192000_A0, /* 192kS/s coefficients */
+ CS_SIDE_192000_A1, CS_SIDE_192000_A2, CS_SIDE_192000_B1, CS_SIDE_192000_B2,
+ (LVM_UINT16)CS_SIDE_192000_SCALE}};
/************************************************************************************/
/* */
@@ -181,167 +112,87 @@
/************************************************************************************/
const BiquadA012B12CoefsSP_t LVCS_EqualiserCoefTable[] = {
- /* Headphone coefficients */
- {CS_EQUALISER_8000_A0, /* 8kS/s coefficients */
- CS_EQUALISER_8000_A1,
- CS_EQUALISER_8000_A2,
- CS_EQUALISER_8000_B1,
- CS_EQUALISER_8000_B2,
- (LVM_UINT16 )CS_EQUALISER_8000_SCALE},
- {CS_EQUALISER_11025_A0, /* 11kS/s coefficients */
- CS_EQUALISER_11025_A1,
- CS_EQUALISER_11025_A2,
- CS_EQUALISER_11025_B1,
- CS_EQUALISER_11025_B2,
- (LVM_UINT16 )CS_EQUALISER_11025_SCALE},
- {CS_EQUALISER_12000_A0, /* 12kS/s coefficients */
- CS_EQUALISER_12000_A1,
- CS_EQUALISER_12000_A2,
- CS_EQUALISER_12000_B1,
- CS_EQUALISER_12000_B2,
- (LVM_UINT16 )CS_EQUALISER_12000_SCALE},
- {CS_EQUALISER_16000_A0, /* 16kS/s coefficients */
- CS_EQUALISER_16000_A1,
- CS_EQUALISER_16000_A2,
- CS_EQUALISER_16000_B1,
- CS_EQUALISER_16000_B2,
- (LVM_UINT16 )CS_EQUALISER_16000_SCALE},
- {CS_EQUALISER_22050_A0, /* 22kS/s coefficients */
- CS_EQUALISER_22050_A1,
- CS_EQUALISER_22050_A2,
- CS_EQUALISER_22050_B1,
- CS_EQUALISER_22050_B2,
- (LVM_UINT16 )CS_EQUALISER_22050_SCALE},
- {CS_EQUALISER_24000_A0, /* 24kS/s coefficients */
- CS_EQUALISER_24000_A1,
- CS_EQUALISER_24000_A2,
- CS_EQUALISER_24000_B1,
- CS_EQUALISER_24000_B2,
- (LVM_UINT16 )CS_EQUALISER_24000_SCALE},
- {CS_EQUALISER_32000_A0, /* 32kS/s coefficients */
- CS_EQUALISER_32000_A1,
- CS_EQUALISER_32000_A2,
- CS_EQUALISER_32000_B1,
- CS_EQUALISER_32000_B2,
- (LVM_UINT16 )CS_EQUALISER_32000_SCALE},
- {CS_EQUALISER_44100_A0, /* 44kS/s coefficients */
- CS_EQUALISER_44100_A1,
- CS_EQUALISER_44100_A2,
- CS_EQUALISER_44100_B1,
- CS_EQUALISER_44100_B2,
- (LVM_UINT16 )CS_EQUALISER_44100_SCALE},
- {CS_EQUALISER_48000_A0, /* 48kS/s coefficients */
- CS_EQUALISER_48000_A1,
- CS_EQUALISER_48000_A2,
- CS_EQUALISER_48000_B1,
- CS_EQUALISER_48000_B2,
- (LVM_UINT16 )CS_EQUALISER_48000_SCALE},
- {CS_EQUALISER_88200_A0, /* 88kS/s coeffieients */
- CS_EQUALISER_88200_A1,
- CS_EQUALISER_88200_A2,
- CS_EQUALISER_88200_B1,
- CS_EQUALISER_88200_B2,
- (LVM_UINT16)CS_EQUALISER_88200_SCALE},
- {CS_EQUALISER_96000_A0, /* 96kS/s coefficients */
- CS_EQUALISER_96000_A1,
- CS_EQUALISER_96000_A2,
- CS_EQUALISER_96000_B1,
- CS_EQUALISER_96000_B2,
- (LVM_UINT16 )CS_EQUALISER_96000_SCALE},
- {CS_EQUALISER_176400_A0, /* 176kS/s coefficients */
- CS_EQUALISER_176400_A1,
- CS_EQUALISER_176400_A2,
- CS_EQUALISER_176400_B1,
- CS_EQUALISER_176400_B2,
- (LVM_UINT16)CS_EQUALISER_176400_SCALE},
- {CS_EQUALISER_192000_A0, /* 192kS/s coefficients */
- CS_EQUALISER_192000_A1,
- CS_EQUALISER_192000_A2,
- CS_EQUALISER_192000_B1,
- CS_EQUALISER_192000_B2,
- (LVM_UINT16 )CS_EQUALISER_192000_SCALE},
+ /* Headphone coefficients */
+ {CS_EQUALISER_8000_A0, /* 8kS/s coefficients */
+ CS_EQUALISER_8000_A1, CS_EQUALISER_8000_A2, CS_EQUALISER_8000_B1, CS_EQUALISER_8000_B2,
+ (LVM_UINT16)CS_EQUALISER_8000_SCALE},
+ {CS_EQUALISER_11025_A0, /* 11kS/s coefficients */
+ CS_EQUALISER_11025_A1, CS_EQUALISER_11025_A2, CS_EQUALISER_11025_B1, CS_EQUALISER_11025_B2,
+ (LVM_UINT16)CS_EQUALISER_11025_SCALE},
+ {CS_EQUALISER_12000_A0, /* 12kS/s coefficients */
+ CS_EQUALISER_12000_A1, CS_EQUALISER_12000_A2, CS_EQUALISER_12000_B1, CS_EQUALISER_12000_B2,
+ (LVM_UINT16)CS_EQUALISER_12000_SCALE},
+ {CS_EQUALISER_16000_A0, /* 16kS/s coefficients */
+ CS_EQUALISER_16000_A1, CS_EQUALISER_16000_A2, CS_EQUALISER_16000_B1, CS_EQUALISER_16000_B2,
+ (LVM_UINT16)CS_EQUALISER_16000_SCALE},
+ {CS_EQUALISER_22050_A0, /* 22kS/s coefficients */
+ CS_EQUALISER_22050_A1, CS_EQUALISER_22050_A2, CS_EQUALISER_22050_B1, CS_EQUALISER_22050_B2,
+ (LVM_UINT16)CS_EQUALISER_22050_SCALE},
+ {CS_EQUALISER_24000_A0, /* 24kS/s coefficients */
+ CS_EQUALISER_24000_A1, CS_EQUALISER_24000_A2, CS_EQUALISER_24000_B1, CS_EQUALISER_24000_B2,
+ (LVM_UINT16)CS_EQUALISER_24000_SCALE},
+ {CS_EQUALISER_32000_A0, /* 32kS/s coefficients */
+ CS_EQUALISER_32000_A1, CS_EQUALISER_32000_A2, CS_EQUALISER_32000_B1, CS_EQUALISER_32000_B2,
+ (LVM_UINT16)CS_EQUALISER_32000_SCALE},
+ {CS_EQUALISER_44100_A0, /* 44kS/s coefficients */
+ CS_EQUALISER_44100_A1, CS_EQUALISER_44100_A2, CS_EQUALISER_44100_B1, CS_EQUALISER_44100_B2,
+ (LVM_UINT16)CS_EQUALISER_44100_SCALE},
+ {CS_EQUALISER_48000_A0, /* 48kS/s coefficients */
+ CS_EQUALISER_48000_A1, CS_EQUALISER_48000_A2, CS_EQUALISER_48000_B1, CS_EQUALISER_48000_B2,
+ (LVM_UINT16)CS_EQUALISER_48000_SCALE},
+ {CS_EQUALISER_88200_A0, /* 88kS/s coeffieients */
+ CS_EQUALISER_88200_A1, CS_EQUALISER_88200_A2, CS_EQUALISER_88200_B1, CS_EQUALISER_88200_B2,
+ (LVM_UINT16)CS_EQUALISER_88200_SCALE},
+ {CS_EQUALISER_96000_A0, /* 96kS/s coefficients */
+ CS_EQUALISER_96000_A1, CS_EQUALISER_96000_A2, CS_EQUALISER_96000_B1, CS_EQUALISER_96000_B2,
+ (LVM_UINT16)CS_EQUALISER_96000_SCALE},
+ {CS_EQUALISER_176400_A0, /* 176kS/s coefficients */
+ CS_EQUALISER_176400_A1, CS_EQUALISER_176400_A2, CS_EQUALISER_176400_B1,
+ CS_EQUALISER_176400_B2, (LVM_UINT16)CS_EQUALISER_176400_SCALE},
+ {CS_EQUALISER_192000_A0, /* 192kS/s coefficients */
+ CS_EQUALISER_192000_A1, CS_EQUALISER_192000_A2, CS_EQUALISER_192000_B1,
+ CS_EQUALISER_192000_B2, (LVM_UINT16)CS_EQUALISER_192000_SCALE},
- /* Concert Sound EX Headphone coefficients */
- {CSEX_EQUALISER_8000_A0, /* 8kS/s coefficients */
- CSEX_EQUALISER_8000_A1,
- CSEX_EQUALISER_8000_A2,
- CSEX_EQUALISER_8000_B1,
- CSEX_EQUALISER_8000_B2,
- (LVM_UINT16 )CSEX_EQUALISER_8000_SCALE},
- {CSEX_EQUALISER_11025_A0, /* 11kS/s coefficients */
- CSEX_EQUALISER_11025_A1,
- CSEX_EQUALISER_11025_A2,
- CSEX_EQUALISER_11025_B1,
- CSEX_EQUALISER_11025_B2,
- (LVM_UINT16 )CSEX_EQUALISER_11025_SCALE},
- {CSEX_EQUALISER_12000_A0, /* 12kS/s coefficients */
- CSEX_EQUALISER_12000_A1,
- CSEX_EQUALISER_12000_A2,
- CSEX_EQUALISER_12000_B1,
- CSEX_EQUALISER_12000_B2,
- (LVM_UINT16 )CSEX_EQUALISER_12000_SCALE},
- {CSEX_EQUALISER_16000_A0, /* 16kS/s coefficients */
- CSEX_EQUALISER_16000_A1,
- CSEX_EQUALISER_16000_A2,
- CSEX_EQUALISER_16000_B1,
- CSEX_EQUALISER_16000_B2,
- (LVM_UINT16 )CSEX_EQUALISER_16000_SCALE},
- {CSEX_EQUALISER_22050_A0, /* 22kS/s coefficients */
- CSEX_EQUALISER_22050_A1,
- CSEX_EQUALISER_22050_A2,
- CSEX_EQUALISER_22050_B1,
- CSEX_EQUALISER_22050_B2,
- (LVM_UINT16 )CSEX_EQUALISER_22050_SCALE},
- {CSEX_EQUALISER_24000_A0, /* 24kS/s coefficients */
- CSEX_EQUALISER_24000_A1,
- CSEX_EQUALISER_24000_A2,
- CSEX_EQUALISER_24000_B1,
- CSEX_EQUALISER_24000_B2,
- (LVM_UINT16 )CSEX_EQUALISER_24000_SCALE},
- {CSEX_EQUALISER_32000_A0, /* 32kS/s coefficients */
- CSEX_EQUALISER_32000_A1,
- CSEX_EQUALISER_32000_A2,
- CSEX_EQUALISER_32000_B1,
- CSEX_EQUALISER_32000_B2,
- (LVM_UINT16 )CSEX_EQUALISER_32000_SCALE},
- {CSEX_EQUALISER_44100_A0, /* 44kS/s coefficients */
- CSEX_EQUALISER_44100_A1,
- CSEX_EQUALISER_44100_A2,
- CSEX_EQUALISER_44100_B1,
- CSEX_EQUALISER_44100_B2,
- (LVM_UINT16 )CSEX_EQUALISER_44100_SCALE},
- {CSEX_EQUALISER_48000_A0, /* 48kS/s coefficients */
- CSEX_EQUALISER_48000_A1,
- CSEX_EQUALISER_48000_A2,
- CSEX_EQUALISER_48000_B1,
- CSEX_EQUALISER_48000_B2,
- (LVM_UINT16 )CSEX_EQUALISER_48000_SCALE}
- ,
- {CSEX_EQUALISER_88200_A0, /* 88kS/s coefficients */
- CSEX_EQUALISER_88200_A1,
- CSEX_EQUALISER_88200_A2,
- CSEX_EQUALISER_88200_B1,
- CSEX_EQUALISER_88200_B2,
- (LVM_UINT16)CSEX_EQUALISER_88200_SCALE},
- {CSEX_EQUALISER_96000_A0, /* 96kS/s coefficients */
- CSEX_EQUALISER_96000_A1,
- CSEX_EQUALISER_96000_A2,
- CSEX_EQUALISER_96000_B1,
- CSEX_EQUALISER_96000_B2,
- (LVM_UINT16 )CSEX_EQUALISER_96000_SCALE},
- {CSEX_EQUALISER_176400_A0, /* 176kS/s coefficients */
- CSEX_EQUALISER_176400_A1,
- CSEX_EQUALISER_176400_A2,
- CSEX_EQUALISER_176400_B1,
- CSEX_EQUALISER_176400_B2,
- (LVM_UINT16)CSEX_EQUALISER_176400_SCALE},
- {CSEX_EQUALISER_192000_A0, /* 192kS/s coefficients */
- CSEX_EQUALISER_192000_A1,
- CSEX_EQUALISER_192000_A2,
- CSEX_EQUALISER_192000_B1,
- CSEX_EQUALISER_192000_B2,
- (LVM_UINT16 )CSEX_EQUALISER_192000_SCALE}
-};
+ /* Concert Sound EX Headphone coefficients */
+ {CSEX_EQUALISER_8000_A0, /* 8kS/s coefficients */
+ CSEX_EQUALISER_8000_A1, CSEX_EQUALISER_8000_A2, CSEX_EQUALISER_8000_B1,
+ CSEX_EQUALISER_8000_B2, (LVM_UINT16)CSEX_EQUALISER_8000_SCALE},
+ {CSEX_EQUALISER_11025_A0, /* 11kS/s coefficients */
+ CSEX_EQUALISER_11025_A1, CSEX_EQUALISER_11025_A2, CSEX_EQUALISER_11025_B1,
+ CSEX_EQUALISER_11025_B2, (LVM_UINT16)CSEX_EQUALISER_11025_SCALE},
+ {CSEX_EQUALISER_12000_A0, /* 12kS/s coefficients */
+ CSEX_EQUALISER_12000_A1, CSEX_EQUALISER_12000_A2, CSEX_EQUALISER_12000_B1,
+ CSEX_EQUALISER_12000_B2, (LVM_UINT16)CSEX_EQUALISER_12000_SCALE},
+ {CSEX_EQUALISER_16000_A0, /* 16kS/s coefficients */
+ CSEX_EQUALISER_16000_A1, CSEX_EQUALISER_16000_A2, CSEX_EQUALISER_16000_B1,
+ CSEX_EQUALISER_16000_B2, (LVM_UINT16)CSEX_EQUALISER_16000_SCALE},
+ {CSEX_EQUALISER_22050_A0, /* 22kS/s coefficients */
+ CSEX_EQUALISER_22050_A1, CSEX_EQUALISER_22050_A2, CSEX_EQUALISER_22050_B1,
+ CSEX_EQUALISER_22050_B2, (LVM_UINT16)CSEX_EQUALISER_22050_SCALE},
+ {CSEX_EQUALISER_24000_A0, /* 24kS/s coefficients */
+ CSEX_EQUALISER_24000_A1, CSEX_EQUALISER_24000_A2, CSEX_EQUALISER_24000_B1,
+ CSEX_EQUALISER_24000_B2, (LVM_UINT16)CSEX_EQUALISER_24000_SCALE},
+ {CSEX_EQUALISER_32000_A0, /* 32kS/s coefficients */
+ CSEX_EQUALISER_32000_A1, CSEX_EQUALISER_32000_A2, CSEX_EQUALISER_32000_B1,
+ CSEX_EQUALISER_32000_B2, (LVM_UINT16)CSEX_EQUALISER_32000_SCALE},
+ {CSEX_EQUALISER_44100_A0, /* 44kS/s coefficients */
+ CSEX_EQUALISER_44100_A1, CSEX_EQUALISER_44100_A2, CSEX_EQUALISER_44100_B1,
+ CSEX_EQUALISER_44100_B2, (LVM_UINT16)CSEX_EQUALISER_44100_SCALE},
+ {CSEX_EQUALISER_48000_A0, /* 48kS/s coefficients */
+ CSEX_EQUALISER_48000_A1, CSEX_EQUALISER_48000_A2, CSEX_EQUALISER_48000_B1,
+ CSEX_EQUALISER_48000_B2, (LVM_UINT16)CSEX_EQUALISER_48000_SCALE},
+ {CSEX_EQUALISER_88200_A0, /* 88kS/s coefficients */
+ CSEX_EQUALISER_88200_A1, CSEX_EQUALISER_88200_A2, CSEX_EQUALISER_88200_B1,
+ CSEX_EQUALISER_88200_B2, (LVM_UINT16)CSEX_EQUALISER_88200_SCALE},
+ {CSEX_EQUALISER_96000_A0, /* 96kS/s coefficients */
+ CSEX_EQUALISER_96000_A1, CSEX_EQUALISER_96000_A2, CSEX_EQUALISER_96000_B1,
+ CSEX_EQUALISER_96000_B2, (LVM_UINT16)CSEX_EQUALISER_96000_SCALE},
+ {CSEX_EQUALISER_176400_A0, /* 176kS/s coefficients */
+ CSEX_EQUALISER_176400_A1, CSEX_EQUALISER_176400_A2, CSEX_EQUALISER_176400_B1,
+ CSEX_EQUALISER_176400_B2, (LVM_UINT16)CSEX_EQUALISER_176400_SCALE},
+ {CSEX_EQUALISER_192000_A0, /* 192kS/s coefficients */
+ CSEX_EQUALISER_192000_A1, CSEX_EQUALISER_192000_A2, CSEX_EQUALISER_192000_B1,
+ CSEX_EQUALISER_192000_B2, (LVM_UINT16)CSEX_EQUALISER_192000_SCALE}};
/************************************************************************************/
/* */
@@ -350,20 +201,12 @@
/************************************************************************************/
/* Stereo delay table for Concert Sound */
-const LVM_UINT16 LVCS_StereoDelayCS[] = {
- LVCS_STEREODELAY_CS_8KHZ,
- LVCS_STEREODELAY_CS_11KHZ,
- LVCS_STEREODELAY_CS_12KHZ,
- LVCS_STEREODELAY_CS_16KHZ,
- LVCS_STEREODELAY_CS_22KHZ,
- LVCS_STEREODELAY_CS_24KHZ,
- LVCS_STEREODELAY_CS_32KHZ,
- LVCS_STEREODELAY_CS_44KHZ,
- LVCS_STEREODELAY_CS_48KHZ,
- LVCS_STEREODELAY_CS_88KHZ,
- LVCS_STEREODELAY_CS_96KHZ,
- LVCS_STEREODELAY_CS_176KHZ,
- LVCS_STEREODELAY_CS_192KHZ,
+const LVM_UINT16 LVCS_StereoDelayCS[] = {
+ LVCS_STEREODELAY_CS_8KHZ, LVCS_STEREODELAY_CS_11KHZ, LVCS_STEREODELAY_CS_12KHZ,
+ LVCS_STEREODELAY_CS_16KHZ, LVCS_STEREODELAY_CS_22KHZ, LVCS_STEREODELAY_CS_24KHZ,
+ LVCS_STEREODELAY_CS_32KHZ, LVCS_STEREODELAY_CS_44KHZ, LVCS_STEREODELAY_CS_48KHZ,
+ LVCS_STEREODELAY_CS_88KHZ, LVCS_STEREODELAY_CS_96KHZ, LVCS_STEREODELAY_CS_176KHZ,
+ LVCS_STEREODELAY_CS_192KHZ,
};
/************************************************************************************/
@@ -373,87 +216,46 @@
/************************************************************************************/
const BiquadA012B12CoefsSP_t LVCS_ReverbCoefTable[] = {
- /* Headphone coefficients */
- {CS_REVERB_8000_A0, /* 8kS/s coefficients */
- CS_REVERB_8000_A1,
- CS_REVERB_8000_A2,
- CS_REVERB_8000_B1,
- CS_REVERB_8000_B2,
- (LVM_UINT16 )CS_REVERB_8000_SCALE},
- {CS_REVERB_11025_A0, /* 11kS/s coefficients */
- CS_REVERB_11025_A1,
- CS_REVERB_11025_A2,
- CS_REVERB_11025_B1,
- CS_REVERB_11025_B2,
- (LVM_UINT16 )CS_REVERB_11025_SCALE},
- {CS_REVERB_12000_A0, /* 12kS/s coefficients */
- CS_REVERB_12000_A1,
- CS_REVERB_12000_A2,
- CS_REVERB_12000_B1,
- CS_REVERB_12000_B2,
- (LVM_UINT16 )CS_REVERB_12000_SCALE},
- {CS_REVERB_16000_A0, /* 16kS/s coefficients */
- CS_REVERB_16000_A1,
- CS_REVERB_16000_A2,
- CS_REVERB_16000_B1,
- CS_REVERB_16000_B2,
- (LVM_UINT16 )CS_REVERB_16000_SCALE},
- {CS_REVERB_22050_A0, /* 22kS/s coefficients */
- CS_REVERB_22050_A1,
- CS_REVERB_22050_A2,
- CS_REVERB_22050_B1,
- CS_REVERB_22050_B2,
- (LVM_UINT16 )CS_REVERB_22050_SCALE},
- {CS_REVERB_24000_A0, /* 24kS/s coefficients */
- CS_REVERB_24000_A1,
- CS_REVERB_24000_A2,
- CS_REVERB_24000_B1,
- CS_REVERB_24000_B2,
- (LVM_UINT16 )CS_REVERB_24000_SCALE},
- {CS_REVERB_32000_A0, /* 32kS/s coefficients */
- CS_REVERB_32000_A1,
- CS_REVERB_32000_A2,
- CS_REVERB_32000_B1,
- CS_REVERB_32000_B2,
- (LVM_UINT16 )CS_REVERB_32000_SCALE},
- {CS_REVERB_44100_A0, /* 44kS/s coefficients */
- CS_REVERB_44100_A1,
- CS_REVERB_44100_A2,
- CS_REVERB_44100_B1,
- CS_REVERB_44100_B2,
- (LVM_UINT16 )CS_REVERB_44100_SCALE},
- {CS_REVERB_48000_A0, /* 48kS/s coefficients */
- CS_REVERB_48000_A1,
- CS_REVERB_48000_A2,
- CS_REVERB_48000_B1,
- CS_REVERB_48000_B2,
- (LVM_UINT16 )CS_REVERB_48000_SCALE}
- ,
- {CS_REVERB_88200_A0, /* 88kS/s coefficients */
- CS_REVERB_88200_A1,
- CS_REVERB_88200_A2,
- CS_REVERB_88200_B1,
- CS_REVERB_88200_B2,
- (LVM_UINT16)CS_REVERB_88200_SCALE},
- {CS_REVERB_96000_A0, /* 96kS/s coefficients */
- CS_REVERB_96000_A1,
- CS_REVERB_96000_A2,
- CS_REVERB_96000_B1,
- CS_REVERB_96000_B2,
- (LVM_UINT16 )CS_REVERB_96000_SCALE},
- {CS_REVERB_176400_A0, /* 176kS/s coefficients */
- CS_REVERB_176400_A1,
- CS_REVERB_176400_A2,
- CS_REVERB_176400_B1,
- CS_REVERB_176400_B2,
- (LVM_UINT16)CS_REVERB_176400_SCALE},
- {CS_REVERB_192000_A0, /* 192kS/s coefficients */
- CS_REVERB_192000_A1,
- CS_REVERB_192000_A2,
- CS_REVERB_192000_B1,
- CS_REVERB_192000_B2,
- (LVM_UINT16 )CS_REVERB_192000_SCALE}
-};
+ /* Headphone coefficients */
+ {CS_REVERB_8000_A0, /* 8kS/s coefficients */
+ CS_REVERB_8000_A1, CS_REVERB_8000_A2, CS_REVERB_8000_B1, CS_REVERB_8000_B2,
+ (LVM_UINT16)CS_REVERB_8000_SCALE},
+ {CS_REVERB_11025_A0, /* 11kS/s coefficients */
+ CS_REVERB_11025_A1, CS_REVERB_11025_A2, CS_REVERB_11025_B1, CS_REVERB_11025_B2,
+ (LVM_UINT16)CS_REVERB_11025_SCALE},
+ {CS_REVERB_12000_A0, /* 12kS/s coefficients */
+ CS_REVERB_12000_A1, CS_REVERB_12000_A2, CS_REVERB_12000_B1, CS_REVERB_12000_B2,
+ (LVM_UINT16)CS_REVERB_12000_SCALE},
+ {CS_REVERB_16000_A0, /* 16kS/s coefficients */
+ CS_REVERB_16000_A1, CS_REVERB_16000_A2, CS_REVERB_16000_B1, CS_REVERB_16000_B2,
+ (LVM_UINT16)CS_REVERB_16000_SCALE},
+ {CS_REVERB_22050_A0, /* 22kS/s coefficients */
+ CS_REVERB_22050_A1, CS_REVERB_22050_A2, CS_REVERB_22050_B1, CS_REVERB_22050_B2,
+ (LVM_UINT16)CS_REVERB_22050_SCALE},
+ {CS_REVERB_24000_A0, /* 24kS/s coefficients */
+ CS_REVERB_24000_A1, CS_REVERB_24000_A2, CS_REVERB_24000_B1, CS_REVERB_24000_B2,
+ (LVM_UINT16)CS_REVERB_24000_SCALE},
+ {CS_REVERB_32000_A0, /* 32kS/s coefficients */
+ CS_REVERB_32000_A1, CS_REVERB_32000_A2, CS_REVERB_32000_B1, CS_REVERB_32000_B2,
+ (LVM_UINT16)CS_REVERB_32000_SCALE},
+ {CS_REVERB_44100_A0, /* 44kS/s coefficients */
+ CS_REVERB_44100_A1, CS_REVERB_44100_A2, CS_REVERB_44100_B1, CS_REVERB_44100_B2,
+ (LVM_UINT16)CS_REVERB_44100_SCALE},
+ {CS_REVERB_48000_A0, /* 48kS/s coefficients */
+ CS_REVERB_48000_A1, CS_REVERB_48000_A2, CS_REVERB_48000_B1, CS_REVERB_48000_B2,
+ (LVM_UINT16)CS_REVERB_48000_SCALE},
+ {CS_REVERB_88200_A0, /* 88kS/s coefficients */
+ CS_REVERB_88200_A1, CS_REVERB_88200_A2, CS_REVERB_88200_B1, CS_REVERB_88200_B2,
+ (LVM_UINT16)CS_REVERB_88200_SCALE},
+ {CS_REVERB_96000_A0, /* 96kS/s coefficients */
+ CS_REVERB_96000_A1, CS_REVERB_96000_A2, CS_REVERB_96000_B1, CS_REVERB_96000_B2,
+ (LVM_UINT16)CS_REVERB_96000_SCALE},
+ {CS_REVERB_176400_A0, /* 176kS/s coefficients */
+ CS_REVERB_176400_A1, CS_REVERB_176400_A2, CS_REVERB_176400_B1, CS_REVERB_176400_B2,
+ (LVM_UINT16)CS_REVERB_176400_SCALE},
+ {CS_REVERB_192000_A0, /* 192kS/s coefficients */
+ CS_REVERB_192000_A1, CS_REVERB_192000_A2, CS_REVERB_192000_B1, CS_REVERB_192000_B2,
+ (LVM_UINT16)CS_REVERB_192000_SCALE}};
/************************************************************************************/
/* */
@@ -461,20 +263,14 @@
/* */
/************************************************************************************/
-const Gain_t LVCS_OutputGainTable[] = {
- {LVCS_HEADPHONE_SHIFT, /* Headphone, stereo mode */
- LVCS_HEADPHONE_SHIFTLOSS,
- LVCS_HEADPHONE_GAIN},
- {LVCS_EX_HEADPHONE_SHIFT, /* EX Headphone, stereo mode */
- LVCS_EX_HEADPHONE_SHIFTLOSS,
- LVCS_EX_HEADPHONE_GAIN},
- {LVCS_HEADPHONE_SHIFT, /* Headphone, mono mode */
- LVCS_HEADPHONE_SHIFTLOSS,
- LVCS_HEADPHONE_GAIN},
- {LVCS_EX_HEADPHONE_SHIFT, /* EX Headphone, mono mode */
- LVCS_EX_HEADPHONE_SHIFTLOSS,
- LVCS_EX_HEADPHONE_GAIN}
-};
+const Gain_t LVCS_OutputGainTable[] = {{LVCS_HEADPHONE_SHIFT, /* Headphone, stereo mode */
+ LVCS_HEADPHONE_SHIFTLOSS, LVCS_HEADPHONE_GAIN},
+ {LVCS_EX_HEADPHONE_SHIFT, /* EX Headphone, stereo mode */
+ LVCS_EX_HEADPHONE_SHIFTLOSS, LVCS_EX_HEADPHONE_GAIN},
+ {LVCS_HEADPHONE_SHIFT, /* Headphone, mono mode */
+ LVCS_HEADPHONE_SHIFTLOSS, LVCS_HEADPHONE_GAIN},
+ {LVCS_EX_HEADPHONE_SHIFT, /* EX Headphone, mono mode */
+ LVCS_EX_HEADPHONE_SHIFTLOSS, LVCS_EX_HEADPHONE_GAIN}};
/************************************************************************************/
/* */
@@ -501,24 +297,14 @@
/* 1024 is -12dB gain */
/* */
/************************************************************************************/
-const LVCS_VolCorrect_t LVCS_VolCorrectTable[] = {
- {0.433362f, /* Headphone, stereo mode */
- 0.000000f,
- 1.000024f,
- 1.412640f},
- {0.433362f, /* EX Headphone, stereo mode */
- 0.000000f,
- 1.000024f,
- 1.412640f},
- {1.000000f, /* Headphone, mono mode */
- 0.000000f,
- 1.000024f,
- 1.412640f},
- {1.000000f, /* EX Headphone, mono mode */
- 0.000000f,
- 1.000024f,
- 1.412640f}
-};
+const LVCS_VolCorrect_t LVCS_VolCorrectTable[] = {{0.433362f, /* Headphone, stereo mode */
+ 0.000000f, 1.000024f, 1.412640f},
+ {0.433362f, /* EX Headphone, stereo mode */
+ 0.000000f, 1.000024f, 1.412640f},
+ {1.000000f, /* Headphone, mono mode */
+ 0.000000f, 1.000024f, 1.412640f},
+ {1.000000f, /* EX Headphone, mono mode */
+ 0.000000f, 1.000024f, 1.412640f}};
/************************************************************************************/
/* */
@@ -526,51 +312,32 @@
/* */
/************************************************************************************/
-#define LVCS_VOL_TC_Fs8000 32580 /* Floating point value 0.994262695 */
-#define LVCS_VOL_TC_Fs11025 32632 /* Floating point value 0.995849609 */
-#define LVCS_VOL_TC_Fs12000 32643 /* Floating point value 0.996185303 */
-#define LVCS_VOL_TC_Fs16000 32674 /* Floating point value 0.997131348 */
-#define LVCS_VOL_TC_Fs22050 32700 /* Floating point value 0.997924805 */
-#define LVCS_VOL_TC_Fs24000 32705 /* Floating point value 0.998077393 */
-#define LVCS_VOL_TC_Fs32000 32721 /* Floating point value 0.998565674 */
-#define LVCS_VOL_TC_Fs44100 32734 /* Floating point value 0.998962402 */
-#define LVCS_VOL_TC_Fs48000 32737 /* Floating point value 0.999053955 */
-#define LVCS_VOL_TC_Fs88200 32751 /* Floating point value 0.999481066 */
-#define LVCS_VOL_TC_Fs96000 32751 /* Floating point value 0.999511703 */ /* Todo @ need to re check this value*/
-#define LVCS_VOL_TC_Fs176400 32759 /* Floating point value 0.999740499 */
-#define LVCS_VOL_TC_Fs192000 32763 /* Floating point value 0.999877925 */ /* Todo @ need to re check this value*/
+#define LVCS_VOL_TC_Fs8000 32580 /* Floating point value 0.994262695 */
+#define LVCS_VOL_TC_Fs11025 32632 /* Floating point value 0.995849609 */
+#define LVCS_VOL_TC_Fs12000 32643 /* Floating point value 0.996185303 */
+#define LVCS_VOL_TC_Fs16000 32674 /* Floating point value 0.997131348 */
+#define LVCS_VOL_TC_Fs22050 32700 /* Floating point value 0.997924805 */
+#define LVCS_VOL_TC_Fs24000 32705 /* Floating point value 0.998077393 */
+#define LVCS_VOL_TC_Fs32000 32721 /* Floating point value 0.998565674 */
+#define LVCS_VOL_TC_Fs44100 32734 /* Floating point value 0.998962402 */
+#define LVCS_VOL_TC_Fs48000 32737 /* Floating point value 0.999053955 */
+#define LVCS_VOL_TC_Fs88200 32751 /* Floating point value 0.999481066 */
+#define LVCS_VOL_TC_Fs96000 \
+ 32751 /* Floating point value 0.999511703 */ /* Todo @ need to re check this value*/
+#define LVCS_VOL_TC_Fs176400 32759 /* Floating point value 0.999740499 */
+#define LVCS_VOL_TC_Fs192000 \
+ 32763 /* Floating point value 0.999877925 */ /* Todo @ need to re check this value*/
-const LVM_INT16 LVCS_VolumeTCTable[13] = {LVCS_VOL_TC_Fs8000,
- LVCS_VOL_TC_Fs11025,
- LVCS_VOL_TC_Fs12000,
- LVCS_VOL_TC_Fs16000,
- LVCS_VOL_TC_Fs22050,
- LVCS_VOL_TC_Fs24000,
- LVCS_VOL_TC_Fs32000,
- LVCS_VOL_TC_Fs44100,
- LVCS_VOL_TC_Fs48000,
- LVCS_VOL_TC_Fs88200,
- LVCS_VOL_TC_Fs96000,
- LVCS_VOL_TC_Fs176400,
- LVCS_VOL_TC_Fs192000
-};
+const LVM_INT16 LVCS_VolumeTCTable[13] = {
+ LVCS_VOL_TC_Fs8000, LVCS_VOL_TC_Fs11025, LVCS_VOL_TC_Fs12000, LVCS_VOL_TC_Fs16000,
+ LVCS_VOL_TC_Fs22050, LVCS_VOL_TC_Fs24000, LVCS_VOL_TC_Fs32000, LVCS_VOL_TC_Fs44100,
+ LVCS_VOL_TC_Fs48000, LVCS_VOL_TC_Fs88200, LVCS_VOL_TC_Fs96000, LVCS_VOL_TC_Fs176400,
+ LVCS_VOL_TC_Fs192000};
/************************************************************************************/
/* */
/* Sample rate table */
/* */
/************************************************************************************/
-const LVM_INT32 LVCS_SampleRateTable[13] = {8000,
- 11025,
- 12000,
- 16000,
- 22050,
- 24000,
- 32000,
- 44100,
- 48000,
- 88200,
- 96000,
- 176400,
- 192000
-};
+const LVM_INT32 LVCS_SampleRateTable[13] = {8000, 11025, 12000, 16000, 22050, 24000, 32000,
+ 44100, 48000, 88200, 96000, 176400, 192000};
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h
index 5490699..766f5f2 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h
@@ -24,7 +24,7 @@
/* */
/************************************************************************************/
-#include "BIQUAD.h" /* Biquad definitions */
+#include "BIQUAD.h" /* Biquad definitions */
/************************************************************************************/
/* */
@@ -106,35 +106,34 @@
/* */
/************************************************************************************/
-extern const LVM_INT32 LVCS_SampleRateTable[];
+extern const LVM_INT32 LVCS_SampleRateTable[];
/*Speaker coeffient tables*/
-extern LVM_UINT16 LVCS_MS_Small_SEMiddleGainTable[];
-extern BiquadA012B12CoefsSP_t LVCS_MS_Small_SESideCoefTable[];
-extern BiquadA012B12CoefsSP_t LVCS_MS_Small_EqualiserCoefTable[];
-extern BiquadA012B12CoefsSP_t LVCS_MS_Small_ReverbCoefTable[] ;
-extern LVM_UINT16 LVCS_MS_Small_StereoDelayCS4MS[];
-extern Gain_t LVCS_MS_Small_OutputGainTable[];
-extern LVCS_VolCorrect_t LVCS_MS_Small_VolCorrectTable[];
-extern LVM_UINT16 LVCS_MS_Small_ReverbGainTable[];
+extern LVM_UINT16 LVCS_MS_Small_SEMiddleGainTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Small_SESideCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Small_EqualiserCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Small_ReverbCoefTable[];
+extern LVM_UINT16 LVCS_MS_Small_StereoDelayCS4MS[];
+extern Gain_t LVCS_MS_Small_OutputGainTable[];
+extern LVCS_VolCorrect_t LVCS_MS_Small_VolCorrectTable[];
+extern LVM_UINT16 LVCS_MS_Small_ReverbGainTable[];
-extern LVM_UINT16 LVCS_MS_Medium_SEMiddleGainTable[];
-extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_SESideCoefTable[];
-extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_EqualiserCoefTable[];
-extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_ReverbCoefTable[] ;
-extern LVM_UINT16 LVCS_MS_Medium_StereoDelayCS4MS[];
-extern Gain_t LVCS_MS_Medium_OutputGainTable[];
-extern LVCS_VolCorrect_t LVCS_MS_Medium_VolCorrectTable[];
-extern LVM_UINT16 LVCS_MS_Medium_ReverbGainTable[];
+extern LVM_UINT16 LVCS_MS_Medium_SEMiddleGainTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_SESideCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_EqualiserCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_ReverbCoefTable[];
+extern LVM_UINT16 LVCS_MS_Medium_StereoDelayCS4MS[];
+extern Gain_t LVCS_MS_Medium_OutputGainTable[];
+extern LVCS_VolCorrect_t LVCS_MS_Medium_VolCorrectTable[];
+extern LVM_UINT16 LVCS_MS_Medium_ReverbGainTable[];
-extern LVM_UINT16 LVCS_MS_Large_SEMiddleGainTable[];
-extern BiquadA012B12CoefsSP_t LVCS_MS_Large_SESideCoefTable[];
-extern BiquadA012B12CoefsSP_t LVCS_MS_Large_EqualiserCoefTable[];
-extern BiquadA012B12CoefsSP_t LVCS_MS_Large_ReverbCoefTable[] ;
-extern LVM_UINT16 LVCS_MS_Large_StereoDelayCS4MS[];
-extern Gain_t LVCS_MS_Large_OutputGainTable[];
-extern LVCS_VolCorrect_t LVCS_MS_Large_VolCorrectTable[];
-extern LVM_UINT16 LVCS_MS_Large_ReverbGainTable[];
+extern LVM_UINT16 LVCS_MS_Large_SEMiddleGainTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Large_SESideCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Large_EqualiserCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Large_ReverbCoefTable[];
+extern LVM_UINT16 LVCS_MS_Large_StereoDelayCS4MS[];
+extern Gain_t LVCS_MS_Large_OutputGainTable[];
+extern LVCS_VolCorrect_t LVCS_MS_Large_VolCorrectTable[];
+extern LVM_UINT16 LVCS_MS_Large_ReverbGainTable[];
#endif /* __LVCS_TABLES_H__ */
-
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 674c246..d026ab6 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -44,6 +44,36 @@
}
cc_test {
+ name: "reverb_test",
+ host_supported: false,
+ proprietary: true,
+
+ include_dirs: [
+ "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
+ ],
+
+ header_libs: [
+ "libaudioeffects",
+ ],
+
+ shared_libs: [
+ "libaudioutils",
+ "liblog",
+ "libreverbwrapper",
+ ],
+
+ srcs: [
+ "reverb_test.cpp",
+ ],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+}
+
+cc_test {
name: "snr",
host_supported: false,
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
index a97acc9..e96263c 100755
--- a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
@@ -23,7 +23,7 @@
echo "========================================"
echo "testing lvm"
adb shell mkdir -p $testdir
-adb push $ANDROID_BUILD_TOP/cts/tests/tests/media/res/raw/sinesweepraw.raw $testdir
+adb push $ANDROID_BUILD_TOP/frameworks/av/media/libeffects/res/raw/sinesweepraw.raw $testdir
adb push $OUT/testcases/snr/arm64/snr $testdir
E_VAL=1
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests_reverb.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests_reverb.sh
new file mode 100755
index 0000000..86b21ae
--- /dev/null
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests_reverb.sh
@@ -0,0 +1,105 @@
+#!/bin/bash
+#
+# reverb test
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+ echo "Android build environment not set"
+ exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm -j
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount
+
+# location of test files
+testdir="/data/local/tmp/revTest"
+
+echo "========================================"
+echo "testing reverb"
+adb shell mkdir -p $testdir
+adb push $ANDROID_BUILD_TOP/frameworks/av/media/libeffects/res/raw/sinesweepraw.raw $testdir
+
+E_VAL=1
+cmds="adb push $OUT/testcases/reverb_test/arm/reverb_test $testdir"
+
+fs_arr=(
+ 8000
+ 16000
+ 22050
+ 32000
+ 44100
+ 48000
+ 88200
+ 96000
+ 176400
+ 192000
+)
+
+flags_arr=(
+ "--M --fch 1"
+ "--fch 2"
+)
+
+# run reverb at different configs, saving only the stereo channel
+# pair.
+error_count=0
+testcase_count=0
+for cmd in "${cmds[@]}"
+do
+ $cmd
+ for flags in "${flags_arr[@]}"
+ do
+ for preset_val in {0..6}
+ do
+ for fs in ${fs_arr[*]}
+ do
+ for chMask in {0..22}
+ do
+ adb shell LD_LIBRARY_PATH=/system/vendor/lib/soundfx $testdir/reverb_test \
+ --input $testdir/sinesweepraw.raw \
+ --output $testdir/sinesweep_$((chMask))_$((fs)).raw \
+ --chMask $chMask $flags --fs $fs --preset $preset_val
+
+ shell_ret=$?
+ if [ $shell_ret -ne 0 ]; then
+ echo "error: $shell_ret"
+ ((++error_count))
+ fi
+
+ if [[ "$chMask" -gt 0 ]] && [[ $flags != *"--fch 2"* ]]
+ then
+ # single channel files should be identical to higher channel
+ # computation (first channel).
+ adb shell cmp $testdir/sinesweep_0_$((fs)).raw \
+ $testdir/sinesweep_$((chMask))_$((fs)).raw
+ elif [[ "$chMask" -gt 1 ]]
+ then
+ # two channel files should be identical to higher channel
+ # computation (first 2 channels).
+ adb shell cmp $testdir/sinesweep_1_$((fs)).raw \
+ $testdir/sinesweep_$((chMask))_$((fs)).raw
+ fi
+
+ # cmp returns EXIT_FAILURE on mismatch.
+ shell_ret=$?
+ if [ $shell_ret -ne 0 ]; then
+ echo "error: $shell_ret"
+ ((++error_count))
+ fi
+ ((++testcase_count))
+ done
+ done
+ done
+ done
+done
+
+adb shell rm -r $testdir
+echo "$testcase_count tests performed"
+echo "$error_count errors"
+exit $error_count
diff --git a/media/libeffects/lvm/tests/lvmtest.cpp b/media/libeffects/lvm/tests/lvmtest.cpp
index a4ace6c..5c5f646 100644
--- a/media/libeffects/lvm/tests/lvmtest.cpp
+++ b/media/libeffects/lvm/tests/lvmtest.cpp
@@ -33,198 +33,148 @@
#define ALOGVV ALOGV
#else
#define ALOGVV(a...) \
- do { \
- } while (false)
+ do { \
+ } while (false)
#endif
-#define CHECK_ARG(cond) \
- { \
- if (!(cond)) { \
- ALOGE("\tLVM_ERROR : Invalid argument: " #cond); \
- return -EINVAL; \
- } \
- \
-}
+#define CHECK_ARG(cond) \
+ { \
+ if (!(cond)) { \
+ ALOGE("\tLVM_ERROR : Invalid argument: " #cond); \
+ return -EINVAL; \
+ } \
+ }
-#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc) \
- { \
- if ((LvmStatus) == LVM_NULLADDRESS) { \
- ALOGE( \
- "\tLVM_ERROR : Parameter error - " \
- "null pointer returned by %s in %s\n\n\n\n", \
- callingFunc, calledFunc); \
- } \
- if ((LvmStatus) == LVM_ALIGNMENTERROR) { \
- ALOGE( \
- "\tLVM_ERROR : Parameter error - " \
- "bad alignment returned by %s in %s\n\n\n\n", \
- callingFunc, calledFunc); \
- } \
- if ((LvmStatus) == LVM_INVALIDNUMSAMPLES) { \
- ALOGE( \
- "\tLVM_ERROR : Parameter error - " \
- "bad number of samples returned by %s in %s\n\n\n\n", \
- callingFunc, calledFunc); \
- } \
- if ((LvmStatus) == LVM_OUTOFRANGE) { \
- ALOGE( \
- "\tLVM_ERROR : Parameter error - " \
- "out of range returned by %s in %s\n", \
- callingFunc, calledFunc); \
- } \
- }
+#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc) \
+ { \
+ if ((LvmStatus) == LVM_NULLADDRESS) { \
+ ALOGE("\tLVM_ERROR : Parameter error - " \
+ "null pointer returned by %s in %s\n\n\n\n", \
+ callingFunc, calledFunc); \
+ } \
+ if ((LvmStatus) == LVM_ALIGNMENTERROR) { \
+ ALOGE("\tLVM_ERROR : Parameter error - " \
+ "bad alignment returned by %s in %s\n\n\n\n", \
+ callingFunc, calledFunc); \
+ } \
+ if ((LvmStatus) == LVM_INVALIDNUMSAMPLES) { \
+ ALOGE("\tLVM_ERROR : Parameter error - " \
+ "bad number of samples returned by %s in %s\n\n\n\n", \
+ callingFunc, calledFunc); \
+ } \
+ if ((LvmStatus) == LVM_OUTOFRANGE) { \
+ ALOGE("\tLVM_ERROR : Parameter error - " \
+ "out of range returned by %s in %s\n", \
+ callingFunc, calledFunc); \
+ } \
+ }
struct lvmConfigParams_t {
- int samplingFreq = 44100;
- int nrChannels = 2;
- int chMask = AUDIO_CHANNEL_OUT_STEREO;
- int vcBal = 0;
- int fChannels = 2;
- bool monoMode = false;
- int bassEffectLevel = 0;
- int eqPresetLevel = 0;
- int frameLength = 256;
- LVM_BE_Mode_en bassEnable = LVM_BE_OFF;
- LVM_TE_Mode_en trebleEnable = LVM_TE_OFF;
- LVM_EQNB_Mode_en eqEnable = LVM_EQNB_OFF;
- LVM_Mode_en csEnable = LVM_MODE_OFF;
+ int samplingFreq = 44100;
+ int nrChannels = 2;
+ int chMask = AUDIO_CHANNEL_OUT_STEREO;
+ int vcBal = 0;
+ int fChannels = 2;
+ bool monoMode = false;
+ int bassEffectLevel = 0;
+ int eqPresetLevel = 0;
+ int frameLength = 256;
+ LVM_BE_Mode_en bassEnable = LVM_BE_OFF;
+ LVM_TE_Mode_en trebleEnable = LVM_TE_OFF;
+ LVM_EQNB_Mode_en eqEnable = LVM_EQNB_OFF;
+ LVM_Mode_en csEnable = LVM_MODE_OFF;
};
constexpr audio_channel_mask_t lvmConfigChMask[] = {
- AUDIO_CHANNEL_OUT_MONO,
- AUDIO_CHANNEL_OUT_STEREO,
- AUDIO_CHANNEL_OUT_2POINT1,
- AUDIO_CHANNEL_OUT_2POINT0POINT2,
- AUDIO_CHANNEL_OUT_QUAD,
- AUDIO_CHANNEL_OUT_QUAD_BACK,
- AUDIO_CHANNEL_OUT_QUAD_SIDE,
- AUDIO_CHANNEL_OUT_SURROUND,
- (1 << 4) - 1,
- AUDIO_CHANNEL_OUT_2POINT1POINT2,
- AUDIO_CHANNEL_OUT_3POINT0POINT2,
- AUDIO_CHANNEL_OUT_PENTA,
- (1 << 5) - 1,
- AUDIO_CHANNEL_OUT_3POINT1POINT2,
- AUDIO_CHANNEL_OUT_5POINT1,
- AUDIO_CHANNEL_OUT_5POINT1_BACK,
- AUDIO_CHANNEL_OUT_5POINT1_SIDE,
- (1 << 6) - 1,
- AUDIO_CHANNEL_OUT_6POINT1,
- (1 << 7) - 1,
- AUDIO_CHANNEL_OUT_5POINT1POINT2,
- AUDIO_CHANNEL_OUT_7POINT1,
- (1 << 8) - 1,
+ AUDIO_CHANNEL_OUT_MONO,
+ AUDIO_CHANNEL_OUT_STEREO,
+ AUDIO_CHANNEL_OUT_2POINT1,
+ AUDIO_CHANNEL_OUT_2POINT0POINT2,
+ AUDIO_CHANNEL_OUT_QUAD,
+ AUDIO_CHANNEL_OUT_QUAD_BACK,
+ AUDIO_CHANNEL_OUT_QUAD_SIDE,
+ AUDIO_CHANNEL_OUT_SURROUND,
+ AUDIO_CHANNEL_INDEX_MASK_4,
+ AUDIO_CHANNEL_OUT_2POINT1POINT2,
+ AUDIO_CHANNEL_OUT_3POINT0POINT2,
+ AUDIO_CHANNEL_OUT_PENTA,
+ AUDIO_CHANNEL_INDEX_MASK_5,
+ AUDIO_CHANNEL_OUT_3POINT1POINT2,
+ AUDIO_CHANNEL_OUT_5POINT1,
+ AUDIO_CHANNEL_OUT_5POINT1_BACK,
+ AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+ AUDIO_CHANNEL_INDEX_MASK_6,
+ AUDIO_CHANNEL_OUT_6POINT1,
+ AUDIO_CHANNEL_INDEX_MASK_7,
+ AUDIO_CHANNEL_OUT_5POINT1POINT2,
+ AUDIO_CHANNEL_OUT_7POINT1,
+ AUDIO_CHANNEL_INDEX_MASK_8,
};
-
void printUsage() {
- printf("\nUsage: ");
- printf("\n <executable> -i:<input_file> -o:<out_file> [options]\n");
- printf("\nwhere, \n <inputfile> is the input file name");
- printf("\n on which LVM effects are applied");
- printf("\n <outputfile> processed output file");
- printf("\n and options are mentioned below");
- printf("\n");
- printf("\n -help (or) -h");
- printf("\n Prints this usage information");
- printf("\n");
- printf("\n -chMask:<channel_mask>\n");
- printf("\n 0 - AUDIO_CHANNEL_OUT_MONO");
- printf("\n 1 - AUDIO_CHANNEL_OUT_STEREO");
- printf("\n 2 - AUDIO_CHANNEL_OUT_2POINT1");
- printf("\n 3 - AUDIO_CHANNEL_OUT_2POINT0POINT2");
- printf("\n 4 - AUDIO_CHANNEL_OUT_QUAD");
- printf("\n 5 - AUDIO_CHANNEL_OUT_QUAD_BACK");
- printf("\n 6 - AUDIO_CHANNEL_OUT_QUAD_SIDE");
- printf("\n 7 - AUDIO_CHANNEL_OUT_SURROUND");
- printf("\n 8 - canonical channel index mask for 4 ch: (1 << 4) - 1");
- printf("\n 9 - AUDIO_CHANNEL_OUT_2POINT1POINT2");
- printf("\n 10 - AUDIO_CHANNEL_OUT_3POINT0POINT2");
- printf("\n 11 - AUDIO_CHANNEL_OUT_PENTA");
- printf("\n 12 - canonical channel index mask for 5 ch: (1 << 5) - 1");
- printf("\n 13 - AUDIO_CHANNEL_OUT_3POINT1POINT2");
- printf("\n 14 - AUDIO_CHANNEL_OUT_5POINT1");
- printf("\n 15 - AUDIO_CHANNEL_OUT_5POINT1_BACK");
- printf("\n 16 - AUDIO_CHANNEL_OUT_5POINT1_SIDE");
- printf("\n 17 - canonical channel index mask for 6 ch: (1 << 6) - 1");
- printf("\n 18 - AUDIO_CHANNEL_OUT_6POINT1");
- printf("\n 19 - canonical channel index mask for 7 ch: (1 << 7) - 1");
- printf("\n 20 - AUDIO_CHANNEL_OUT_5POINT1POINT2");
- printf("\n 21 - AUDIO_CHANNEL_OUT_7POINT1");
- printf("\n 22 - canonical channel index mask for 8 ch: (1 << 8) - 1");
- printf("\n default 0");
- printf("\n -vcBal:<Left Right Balance control in dB [-96 to 96 dB]>");
- printf("\n -ve values reduce Right channel while +ve value reduces Left channel");
- printf("\n default 0");
- printf("\n -fch:<file_channels> (1 through 8)\n\n");
- printf("\n -M");
- printf("\n Mono mode (force all input audio channels to be identical)");
- printf("\n -basslvl:<effect_level>");
- printf("\n A value that ranges between %d - %d default 0", LVM_BE_MIN_EFFECTLEVEL,
- LVM_BE_MAX_EFFECTLEVEL);
- printf("\n");
- printf("\n -eqPreset:<preset Value>");
- const size_t numPresetLvls = std::size(gEqualizerPresets);
- for (size_t i = 0; i < numPresetLvls; ++i) {
- printf("\n %zu - %s", i, gEqualizerPresets[i].name);
- }
- printf("\n default - 0");
- printf("\n -bE ");
- printf("\n Enable Dynamic Bass Enhancement");
- printf("\n");
- printf("\n -tE ");
- printf("\n Enable Treble Boost");
- printf("\n");
- printf("\n -csE ");
- printf("\n Enable Concert Surround");
- printf("\n");
- printf("\n -eqE ");
- printf("\n Enable Equalizer");
-}
-
-//----------------------------------------------------------------------------
-// LvmEffect_free()
-//----------------------------------------------------------------------------
-// Purpose: Free all memory associated with the Bundle.
-//
-// Inputs:
-// pContext: effect engine context
-//
-// Outputs:
-//
-//----------------------------------------------------------------------------
-
-void LvmEffect_free(struct EffectContext *pContext) {
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
- LVM_MemTab_t MemTab;
-
- /* Free the algorithm memory */
- LvmStatus = LVM_GetMemoryTable(pContext->pBundledContext->hInstance, &MemTab,
- LVM_NULL);
-
- LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmEffect_free")
-
- for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
- if (MemTab.Region[i].Size != 0) {
- if (MemTab.Region[i].pBaseAddress != NULL) {
- ALOGV("\tLvmEffect_free - START freeing %" PRIu32
- " bytes for region %u at %p\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-
- free(MemTab.Region[i].pBaseAddress);
-
- ALOGV("\tLvmEffect_free - END freeing %" PRIu32
- " bytes for region %u at %p\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
- } else {
- ALOGE(
- "\tLVM_ERROR : LvmEffect_free - trying to free with NULL pointer "
- "%" PRIu32 " bytes for region %u at %p ERROR\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
- }
+ printf("\nUsage: ");
+ printf("\n <executable> -i:<input_file> -o:<out_file> [options]\n");
+ printf("\nwhere, \n <inputfile> is the input file name");
+ printf("\n on which LVM effects are applied");
+ printf("\n <outputfile> processed output file");
+ printf("\n and options are mentioned below");
+ printf("\n");
+ printf("\n -help (or) -h");
+ printf("\n Prints this usage information");
+ printf("\n");
+ printf("\n -chMask:<channel_mask>\n");
+ printf("\n 0 - AUDIO_CHANNEL_OUT_MONO");
+ printf("\n 1 - AUDIO_CHANNEL_OUT_STEREO");
+ printf("\n 2 - AUDIO_CHANNEL_OUT_2POINT1");
+ printf("\n 3 - AUDIO_CHANNEL_OUT_2POINT0POINT2");
+ printf("\n 4 - AUDIO_CHANNEL_OUT_QUAD");
+ printf("\n 5 - AUDIO_CHANNEL_OUT_QUAD_BACK");
+ printf("\n 6 - AUDIO_CHANNEL_OUT_QUAD_SIDE");
+ printf("\n 7 - AUDIO_CHANNEL_OUT_SURROUND");
+ printf("\n 8 - canonical channel index mask for 4 ch: (1 << 4) - 1");
+ printf("\n 9 - AUDIO_CHANNEL_OUT_2POINT1POINT2");
+ printf("\n 10 - AUDIO_CHANNEL_OUT_3POINT0POINT2");
+ printf("\n 11 - AUDIO_CHANNEL_OUT_PENTA");
+ printf("\n 12 - canonical channel index mask for 5 ch: (1 << 5) - 1");
+ printf("\n 13 - AUDIO_CHANNEL_OUT_3POINT1POINT2");
+ printf("\n 14 - AUDIO_CHANNEL_OUT_5POINT1");
+ printf("\n 15 - AUDIO_CHANNEL_OUT_5POINT1_BACK");
+ printf("\n 16 - AUDIO_CHANNEL_OUT_5POINT1_SIDE");
+ printf("\n 17 - canonical channel index mask for 6 ch: (1 << 6) - 1");
+ printf("\n 18 - AUDIO_CHANNEL_OUT_6POINT1");
+ printf("\n 19 - canonical channel index mask for 7 ch: (1 << 7) - 1");
+ printf("\n 20 - AUDIO_CHANNEL_OUT_5POINT1POINT2");
+ printf("\n 21 - AUDIO_CHANNEL_OUT_7POINT1");
+ printf("\n 22 - canonical channel index mask for 8 ch: (1 << 8) - 1");
+ printf("\n default 0");
+ printf("\n -vcBal:<Left Right Balance control in dB [-96 to 96 dB]>");
+ printf("\n -ve values reduce Right channel while +ve value reduces Left channel");
+ printf("\n default 0");
+ printf("\n -fch:<file_channels> (1 through 8)\n\n");
+ printf("\n -M");
+ printf("\n Mono mode (force all input audio channels to be identical)");
+ printf("\n -basslvl:<effect_level>");
+ printf("\n A value that ranges between %d - %d default 0", LVM_BE_MIN_EFFECTLEVEL,
+ LVM_BE_MAX_EFFECTLEVEL);
+ printf("\n");
+ printf("\n -eqPreset:<preset Value>");
+ const size_t numPresetLvls = std::size(gEqualizerPresets);
+ for (size_t i = 0; i < numPresetLvls; ++i) {
+ printf("\n %zu - %s", i, gEqualizerPresets[i].name);
}
- }
-} /* end LvmEffect_free */
+ printf("\n default - 0");
+ printf("\n -bE ");
+ printf("\n Enable Dynamic Bass Enhancement");
+ printf("\n");
+ printf("\n -tE ");
+ printf("\n Enable Treble Boost");
+ printf("\n");
+ printf("\n -csE ");
+ printf("\n Enable Concert Surround");
+ printf("\n");
+ printf("\n -eqE ");
+ printf("\n Enable Equalizer");
+}
//----------------------------------------------------------------------------
// LvmBundle_init()
@@ -239,586 +189,510 @@
//
//----------------------------------------------------------------------------
-int LvmBundle_init(struct EffectContext *pContext, LVM_ControlParams_t *params) {
- ALOGV("\tLvmBundle_init start");
+int LvmBundle_init(struct EffectContext* pContext, LVM_ControlParams_t* params) {
+ ALOGV("\tLvmBundle_init start");
- pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
- pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
- pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
- pContext->config.inputCfg.samplingRate = 44100;
- pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
- pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
- pContext->config.inputCfg.bufferProvider.cookie = NULL;
- pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
- pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
- pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
- pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
- pContext->config.outputCfg.samplingRate = 44100;
- pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
- pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
- pContext->config.outputCfg.bufferProvider.cookie = NULL;
- pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+ pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+ pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
+ pContext->config.inputCfg.samplingRate = 44100;
+ pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
+ pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+ pContext->config.inputCfg.bufferProvider.cookie = NULL;
+ pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+ pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+ pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
+ pContext->config.outputCfg.samplingRate = 44100;
+ pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
+ pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
+ pContext->config.outputCfg.bufferProvider.cookie = NULL;
+ pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
- if (pContext->pBundledContext->hInstance != NULL) {
- ALOGV(
- "\tLvmBundle_init pContext->pBassBoost != NULL "
- "-> Calling pContext->pBassBoost->free()");
+ if (pContext->pBundledContext->hInstance != NULL) {
+ ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
+ "-> Calling pContext->pBassBoost->free()");
+ LVM_DelInstanceHandle(&pContext->pBundledContext->hInstance);
- LvmEffect_free(pContext);
-
- ALOGV(
- "\tLvmBundle_init pContext->pBassBoost != NULL "
- "-> Called pContext->pBassBoost->free()");
- }
-
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
- LVM_InstParams_t InstParams; /* Instance parameters */
- LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS]; /* Equaliser band definitions */
- LVM_HeadroomParams_t HeadroomParams; /* Headroom parameters */
- LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
- LVM_MemTab_t MemTab; /* Memory allocation table */
- bool bMallocFailure = LVM_FALSE;
-
- /* Set the capabilities */
- InstParams.BufferMode = LVM_UNMANAGED_BUFFERS;
- InstParams.MaxBlockSize = MAX_CALL_SIZE;
- InstParams.EQNB_NumBands = MAX_NUM_BANDS;
- InstParams.PSA_Included = LVM_PSA_ON;
-
- /* Allocate memory, forcing alignment */
- LvmStatus = LVM_GetMemoryTable(LVM_NULL, &MemTab, &InstParams);
-
- LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmBundle_init");
- if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
- ALOGV("\tCreateInstance Succesfully called LVM_GetMemoryTable\n");
-
- /* Allocate memory */
- for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
- if (MemTab.Region[i].Size != 0) {
- MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
-
- if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
- ALOGE(
- "\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate "
- "%" PRIu32 " bytes for region %u\n",
- MemTab.Region[i].Size, i);
- bMallocFailure = LVM_TRUE;
- break;
- } else {
- ALOGV("\tLvmBundle_init CreateInstance allocated %" PRIu32
- " bytes for region %u at %p\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
- }
+ ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
+ "-> Called pContext->pBassBoost->free()");
}
- }
- /* If one or more of the memory regions failed to allocate, free the regions
- * that were
- * succesfully allocated and return with an error
- */
- if (bMallocFailure == LVM_TRUE) {
- for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
- if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
- ALOGE(
- "\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate "
- "%" PRIu32 " bytes for region %u Not freeing\n",
- MemTab.Region[i].Size, i);
- } else {
- ALOGE(
- "\tLVM_ERROR :LvmBundle_init CreateInstance Failed: but allocated "
- "%" PRIu32 " bytes for region %u at %p- free\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
- free(MemTab.Region[i].pBaseAddress);
- }
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+ LVM_InstParams_t InstParams; /* Instance parameters */
+ LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS]; /* Equaliser band definitions */
+ LVM_HeadroomParams_t HeadroomParams; /* Headroom parameters */
+ LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
+
+ /* Set the capabilities */
+ InstParams.BufferMode = LVM_UNMANAGED_BUFFERS;
+ InstParams.MaxBlockSize = MAX_CALL_SIZE;
+ InstParams.EQNB_NumBands = MAX_NUM_BANDS;
+ InstParams.PSA_Included = LVM_PSA_ON;
+
+ LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance, &InstParams);
+
+ LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "LvmBundle_init");
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+
+ ALOGV("\tLvmBundle_init CreateInstance Successfully called "
+ "LVM_GetInstanceHandle\n");
+
+ /* Set the initial process parameters */
+ /* General parameters */
+ params->OperatingMode = LVM_MODE_ON;
+ params->SampleRate = LVM_FS_44100;
+ params->SourceFormat = LVM_STEREO;
+ params->ChMask = AUDIO_CHANNEL_OUT_STEREO;
+ params->SpeakerType = LVM_HEADPHONES;
+
+ pContext->pBundledContext->SampleRate = LVM_FS_44100;
+
+ /* Concert Sound parameters */
+ params->VirtualizerOperatingMode = LVM_MODE_OFF;
+ params->VirtualizerType = LVM_CONCERTSOUND;
+ params->VirtualizerReverbLevel = 100;
+ params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
+
+ /* N-Band Equaliser parameters */
+ params->EQNB_OperatingMode = LVM_EQNB_ON;
+ params->EQNB_NBands = FIVEBAND_NUMBANDS;
+ params->pEQNB_BandDefinition = &BandDefs[0];
+
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+ BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
+ BandDefs[i].Gain = EQNB_5BandSoftPresets[i];
}
- return -EINVAL;
- }
- ALOGV("\tLvmBundle_init CreateInstance Succesfully malloc'd memory\n");
- /* Initialise */
- pContext->pBundledContext->hInstance = LVM_NULL;
+ /* Volume Control parameters */
+ params->VC_EffectLevel = 0;
+ params->VC_Balance = 0;
- /* Init sets the instance handle */
- LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance,
- &MemTab, &InstParams);
+ /* Treble Enhancement parameters */
+ params->TE_OperatingMode = LVM_TE_OFF;
+ params->TE_EffectLevel = 0;
- LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "LvmBundle_init");
- if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+ /* PSA Control parameters */
+ params->PSA_Enable = LVM_PSA_OFF;
+ params->PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
- ALOGV(
- "\tLvmBundle_init CreateInstance Succesfully called "
- "LVM_GetInstanceHandle\n");
+ /* Bass Enhancement parameters */
+ params->BE_OperatingMode = LVM_BE_ON;
+ params->BE_EffectLevel = 0;
+ params->BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+ params->BE_HPF = LVM_BE_HPF_ON;
- /* Set the initial process parameters */
- /* General parameters */
- params->OperatingMode = LVM_MODE_ON;
- params->SampleRate = LVM_FS_44100;
- params->SourceFormat = LVM_STEREO;
- params->ChMask = AUDIO_CHANNEL_OUT_STEREO;
- params->SpeakerType = LVM_HEADPHONES;
+ /* PSA Control parameters */
+ params->PSA_Enable = LVM_PSA_OFF;
+ params->PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
- pContext->pBundledContext->SampleRate = LVM_FS_44100;
+ /* TE Control parameters */
+ params->TE_OperatingMode = LVM_TE_OFF;
+ params->TE_EffectLevel = 0;
- /* Concert Sound parameters */
- params->VirtualizerOperatingMode = LVM_MODE_OFF;
- params->VirtualizerType = LVM_CONCERTSOUND;
- params->VirtualizerReverbLevel = 100;
- params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
+ /* Activate the initial settings */
+ LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
- /* N-Band Equaliser parameters */
- params->EQNB_OperatingMode = LVM_EQNB_ON;
- params->EQNB_NBands = FIVEBAND_NUMBANDS;
- params->pEQNB_BandDefinition = &BandDefs[0];
+ LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
- BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
- BandDefs[i].Gain = EQNB_5BandSoftPresets[i];
- }
+ ALOGV("\tLvmBundle_init CreateInstance Successfully called "
+ "LVM_SetControlParameters\n");
- /* Volume Control parameters */
- params->VC_EffectLevel = 0;
- params->VC_Balance = 0;
+ /* Set the headroom parameters */
+ HeadroomBandDef[0].Limit_Low = 20;
+ HeadroomBandDef[0].Limit_High = 4999;
+ HeadroomBandDef[0].Headroom_Offset = 0;
+ HeadroomBandDef[1].Limit_Low = 5000;
+ HeadroomBandDef[1].Limit_High = 24000;
+ HeadroomBandDef[1].Headroom_Offset = 0;
+ HeadroomParams.pHeadroomDefinition = &HeadroomBandDef[0];
+ HeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
+ HeadroomParams.NHeadroomBands = 2;
- /* Treble Enhancement parameters */
- params->TE_OperatingMode = LVM_TE_OFF;
- params->TE_EffectLevel = 0;
+ LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance, &HeadroomParams);
- /* PSA Control parameters */
- params->PSA_Enable = LVM_PSA_OFF;
- params->PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
+ LVM_ERROR_CHECK(LvmStatus, "LVM_SetHeadroomParams", "LvmBundle_init");
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- /* Bass Enhancement parameters */
- params->BE_OperatingMode = LVM_BE_ON;
- params->BE_EffectLevel = 0;
- params->BE_CentreFreq = LVM_BE_CENTRE_90Hz;
- params->BE_HPF = LVM_BE_HPF_ON;
-
- /* PSA Control parameters */
- params->PSA_Enable = LVM_PSA_OFF;
- params->PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
-
- /* TE Control parameters */
- params->TE_OperatingMode = LVM_TE_OFF;
- params->TE_EffectLevel = 0;
-
- /* Activate the initial settings */
- LvmStatus =
- LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
-
- LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
- if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
- ALOGV(
- "\tLvmBundle_init CreateInstance Succesfully called "
- "LVM_SetControlParameters\n");
-
- /* Set the headroom parameters */
- HeadroomBandDef[0].Limit_Low = 20;
- HeadroomBandDef[0].Limit_High = 4999;
- HeadroomBandDef[0].Headroom_Offset = 0;
- HeadroomBandDef[1].Limit_Low = 5000;
- HeadroomBandDef[1].Limit_High = 24000;
- HeadroomBandDef[1].Headroom_Offset = 0;
- HeadroomParams.pHeadroomDefinition = &HeadroomBandDef[0];
- HeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
- HeadroomParams.NHeadroomBands = 2;
-
- LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance,
- &HeadroomParams);
-
- LVM_ERROR_CHECK(LvmStatus, "LVM_SetHeadroomParams", "LvmBundle_init");
- if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
- ALOGV(
- "\tLvmBundle_init CreateInstance Succesfully called "
- "LVM_SetHeadroomParams\n");
- ALOGV("\tLvmBundle_init End");
- return 0;
+ ALOGV("\tLvmBundle_init CreateInstance Successfully called "
+ "LVM_SetHeadroomParams\n");
+ ALOGV("\tLvmBundle_init End");
+ return 0;
} /* end LvmBundle_init */
-int lvmCreate(struct EffectContext *pContext,
- lvmConfigParams_t *plvmConfigParams,
- LVM_ControlParams_t *params) {
- int ret = 0;
- pContext->pBundledContext = NULL;
- pContext->pBundledContext = (BundledEffectContext *)malloc(sizeof(struct BundledEffectContext));
- if (NULL == pContext->pBundledContext) {
- return -EINVAL;
- }
-
- pContext->pBundledContext->SessionNo = 0;
- pContext->pBundledContext->SessionId = 0;
- pContext->pBundledContext->hInstance = NULL;
- pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
- pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
- pContext->pBundledContext->bBassEnabled = LVM_FALSE;
- pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
- pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
- pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
- pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
- pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
- pContext->pBundledContext->NumberEffectsEnabled = 0;
- pContext->pBundledContext->NumberEffectsCalled = 0;
- pContext->pBundledContext->firstVolume = LVM_TRUE;
- pContext->pBundledContext->volume = 0;
-
- /* Saved strength is used to return the exact strength that was used in the
- * set to the get
- * because we map the original strength range of 0:1000 to 1:15, and this will
- * avoid
- * quantisation like effect when returning
- */
- pContext->pBundledContext->BassStrengthSaved = 0;
- pContext->pBundledContext->VirtStrengthSaved = 0;
- pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
- pContext->pBundledContext->levelSaved = 0;
- pContext->pBundledContext->bMuteEnabled = LVM_FALSE;
- pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE;
- pContext->pBundledContext->positionSaved = 0;
- pContext->pBundledContext->workBuffer = NULL;
- pContext->pBundledContext->frameCount = -1;
- pContext->pBundledContext->SamplesToExitCountVirt = 0;
- pContext->pBundledContext->SamplesToExitCountBb = 0;
- pContext->pBundledContext->SamplesToExitCountEq = 0;
- for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i];
- }
- pContext->config.inputCfg.channels = plvmConfigParams->nrChannels;
- ALOGV("\tEffectCreate - Calling LvmBundle_init");
- ret = LvmBundle_init(pContext, params);
-
- if (ret < 0) {
- ALOGE("\tLVM_ERROR : lvmCreate() Bundle init failed");
- return ret;
- }
- return 0;
-}
-
-int lvmControl(struct EffectContext *pContext,
- lvmConfigParams_t *plvmConfigParams,
- LVM_ControlParams_t *params) {
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
-
- /* Set the initial process parameters */
- /* General parameters */
- params->OperatingMode = LVM_MODE_ON;
- params->SpeakerType = LVM_HEADPHONES;
-
- params->ChMask = plvmConfigParams->chMask;
- params->NrChannels = plvmConfigParams->nrChannels;
- if (params->NrChannels == 1) {
- params->SourceFormat = LVM_MONO;
- } else if (params->NrChannels == 2) {
- params->SourceFormat = LVM_STEREO;
- } else if (params->NrChannels > 2 && params->NrChannels <= 8) { // FCC_2 FCC_8
- params->SourceFormat = LVM_MULTICHANNEL;
- } else {
- return -EINVAL;
- }
-
- LVM_Fs_en sampleRate;
- switch (plvmConfigParams->samplingFreq) {
- case 8000:
- sampleRate = LVM_FS_8000;
- break;
- case 11025:
- sampleRate = LVM_FS_11025;
- break;
- case 12000:
- sampleRate = LVM_FS_12000;
- break;
- case 16000:
- sampleRate = LVM_FS_16000;
- break;
- case 22050:
- sampleRate = LVM_FS_22050;
- break;
- case 24000:
- sampleRate = LVM_FS_24000;
- break;
- case 32000:
- sampleRate = LVM_FS_32000;
- break;
- case 44100:
- sampleRate = LVM_FS_44100;
- break;
- case 48000:
- sampleRate = LVM_FS_48000;
- break;
- case 88200:
- sampleRate = LVM_FS_88200;
- break;
- case 96000:
- sampleRate = LVM_FS_96000;
- break;
- case 176400:
- sampleRate = LVM_FS_176400;
- break;
- case 192000:
- sampleRate = LVM_FS_192000;
- break;
- default:
- return -EINVAL;
- }
- params->SampleRate = sampleRate;
-
- /* Concert Sound parameters */
- params->VirtualizerOperatingMode = plvmConfigParams->csEnable;
- params->VirtualizerType = LVM_CONCERTSOUND;
- params->VirtualizerReverbLevel = 100;
- params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
-
- /* N-Band Equaliser parameters */
- const int eqPresetLevel = plvmConfigParams->eqPresetLevel;
- LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS]; /* Equaliser band definitions */
- for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
- BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
- BandDefs[i].Gain =
- EQNB_5BandSoftPresets[(FIVEBAND_NUMBANDS * eqPresetLevel) + i];
- }
- params->EQNB_OperatingMode = plvmConfigParams->eqEnable;
- // Caution: raw pointer to stack data, stored in instance by LVM_SetControlParameters.
- params->pEQNB_BandDefinition = &BandDefs[0];
-
- /* Volume Control parameters */
- params->VC_EffectLevel = 0;
- params->VC_Balance = plvmConfigParams->vcBal;
-
- /* Treble Enhancement parameters */
- params->TE_OperatingMode = plvmConfigParams->trebleEnable;
-
- /* PSA Control parameters */
- params->PSA_Enable = LVM_PSA_ON;
-
- /* Bass Enhancement parameters */
- params->BE_OperatingMode = plvmConfigParams->bassEnable;
-
- /* Activate the initial settings */
- LvmStatus =
- LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
-
- LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
- if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
- LvmStatus = LVM_ApplyNewSettings(pContext->pBundledContext->hInstance);
-
- if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
- return 0;
-}
-
-int lvmExecute(float *floatIn, float *floatOut, struct EffectContext *pContext,
- lvmConfigParams_t *plvmConfigParams) {
- const int frameLength = plvmConfigParams->frameLength;
- return
- LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
- floatIn, /* Input buffer */
- floatOut, /* Output buffer */
- (LVM_UINT16)frameLength, /* Number of samples to read */
- 0); /* Audio Time */
-}
-
-int lvmMainProcess(EffectContext *pContext,
- LVM_ControlParams_t *pParams,
- lvmConfigParams_t *plvmConfigParams,
- FILE *finp,
- FILE *fout) {
- int errCode = lvmControl(pContext, plvmConfigParams, pParams);
- if (errCode) {
- ALOGE("Error: lvmControl returned with %d\n", errCode);
- return errCode;
- }
-
- const int channelCount = plvmConfigParams->nrChannels;
- const int frameLength = plvmConfigParams->frameLength;
- const int frameSize = channelCount * sizeof(float); // processing size
- const int ioChannelCount = plvmConfigParams->fChannels;
- const int ioFrameSize = ioChannelCount * sizeof(short); // file load size
- const int maxChannelCount = std::max(channelCount, ioChannelCount);
- /*
- * Mono input will be converted to 2 channels internally in the process call
- * by copying the same data into the second channel.
- * Hence when channelCount is 1, output buffer should be allocated for
- * 2 channels. The memAllocChCount takes care of allocation of sufficient
- * memory for the output buffer.
- */
- const int memAllocChCount = (channelCount == 1 ? 2 : channelCount);
-
- std::vector<short> in(frameLength * maxChannelCount);
- std::vector<short> out(frameLength * maxChannelCount);
- std::vector<float> floatIn(frameLength * channelCount);
- std::vector<float> floatOut(frameLength * memAllocChCount);
-
- int frameCounter = 0;
- while (fread(in.data(), ioFrameSize, frameLength, finp) == (size_t)frameLength) {
- if (ioChannelCount != channelCount) {
- adjust_channels(in.data(), ioChannelCount, in.data(), channelCount,
- sizeof(short), frameLength * ioFrameSize);
+int lvmCreate(struct EffectContext* pContext, lvmConfigParams_t* plvmConfigParams,
+ LVM_ControlParams_t* params) {
+ int ret = 0;
+ pContext->pBundledContext = NULL;
+ pContext->pBundledContext = (BundledEffectContext*)malloc(sizeof(struct BundledEffectContext));
+ if (NULL == pContext->pBundledContext) {
+ return -EINVAL;
}
- memcpy_to_float_from_i16(floatIn.data(), in.data(), frameLength * channelCount);
- // Mono mode will replicate the first channel to all other channels.
- // This ensures all audio channels are identical. This is useful for testing
- // Bass Boost, which extracts a mono signal for processing.
- if (plvmConfigParams->monoMode && channelCount > 1) {
- for (int i = 0; i < frameLength; ++i) {
- auto *fp = &floatIn[i * channelCount];
- std::fill(fp + 1, fp + channelCount, *fp); // replicate ch 0
+ pContext->pBundledContext->SessionNo = 0;
+ pContext->pBundledContext->SessionId = 0;
+ pContext->pBundledContext->hInstance = NULL;
+ pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
+ pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
+ pContext->pBundledContext->bBassEnabled = LVM_FALSE;
+ pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
+ pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
+ pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
+ pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
+ pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
+ pContext->pBundledContext->NumberEffectsEnabled = 0;
+ pContext->pBundledContext->NumberEffectsCalled = 0;
+ pContext->pBundledContext->firstVolume = LVM_TRUE;
+ pContext->pBundledContext->volume = 0;
+
+ /* Saved strength is used to return the exact strength that was used in the
+ * set to the get
+ * because we map the original strength range of 0:1000 to 1:15, and this will
+ * avoid
+ * quantisation like effect when returning
+ */
+ pContext->pBundledContext->BassStrengthSaved = 0;
+ pContext->pBundledContext->VirtStrengthSaved = 0;
+ pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
+ pContext->pBundledContext->levelSaved = 0;
+ pContext->pBundledContext->bMuteEnabled = LVM_FALSE;
+ pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE;
+ pContext->pBundledContext->positionSaved = 0;
+ pContext->pBundledContext->workBuffer = NULL;
+ pContext->pBundledContext->frameCount = -1;
+ pContext->pBundledContext->SamplesToExitCountVirt = 0;
+ pContext->pBundledContext->SamplesToExitCountBb = 0;
+ pContext->pBundledContext->SamplesToExitCountEq = 0;
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i];
+ }
+ pContext->config.inputCfg.channels = plvmConfigParams->nrChannels;
+ ALOGV("\tEffectCreate - Calling LvmBundle_init");
+ ret = LvmBundle_init(pContext, params);
+
+ if (ret < 0) {
+ ALOGE("\tLVM_ERROR : lvmCreate() Bundle init failed");
+ return ret;
+ }
+ return 0;
+}
+
+int lvmControl(struct EffectContext* pContext, lvmConfigParams_t* plvmConfigParams,
+ LVM_ControlParams_t* params) {
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+
+ /* Set the initial process parameters */
+ /* General parameters */
+ params->OperatingMode = LVM_MODE_ON;
+ params->SpeakerType = LVM_HEADPHONES;
+
+ params->ChMask = plvmConfigParams->chMask;
+ params->NrChannels = plvmConfigParams->nrChannels;
+ if (params->NrChannels == 1) {
+ params->SourceFormat = LVM_MONO;
+ } else if (params->NrChannels == 2) {
+ params->SourceFormat = LVM_STEREO;
+ } else if (params->NrChannels > 2 && params->NrChannels <= 8) { // FCC_2 FCC_8
+ params->SourceFormat = LVM_MULTICHANNEL;
+ } else {
+ return -EINVAL;
+ }
+
+ LVM_Fs_en sampleRate;
+ switch (plvmConfigParams->samplingFreq) {
+ case 8000:
+ sampleRate = LVM_FS_8000;
+ break;
+ case 11025:
+ sampleRate = LVM_FS_11025;
+ break;
+ case 12000:
+ sampleRate = LVM_FS_12000;
+ break;
+ case 16000:
+ sampleRate = LVM_FS_16000;
+ break;
+ case 22050:
+ sampleRate = LVM_FS_22050;
+ break;
+ case 24000:
+ sampleRate = LVM_FS_24000;
+ break;
+ case 32000:
+ sampleRate = LVM_FS_32000;
+ break;
+ case 44100:
+ sampleRate = LVM_FS_44100;
+ break;
+ case 48000:
+ sampleRate = LVM_FS_48000;
+ break;
+ case 88200:
+ sampleRate = LVM_FS_88200;
+ break;
+ case 96000:
+ sampleRate = LVM_FS_96000;
+ break;
+ case 176400:
+ sampleRate = LVM_FS_176400;
+ break;
+ case 192000:
+ sampleRate = LVM_FS_192000;
+ break;
+ default:
+ return -EINVAL;
+ }
+ params->SampleRate = sampleRate;
+
+ /* Concert Sound parameters */
+ params->VirtualizerOperatingMode = plvmConfigParams->csEnable;
+ params->VirtualizerType = LVM_CONCERTSOUND;
+ params->VirtualizerReverbLevel = 100;
+ params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
+
+ /* N-Band Equaliser parameters */
+ const int eqPresetLevel = plvmConfigParams->eqPresetLevel;
+ LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS]; /* Equaliser band definitions */
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+ BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
+ BandDefs[i].Gain = EQNB_5BandSoftPresets[(FIVEBAND_NUMBANDS * eqPresetLevel) + i];
+ }
+ params->EQNB_OperatingMode = plvmConfigParams->eqEnable;
+ // Caution: raw pointer to stack data, stored in instance by LVM_SetControlParameters.
+ params->pEQNB_BandDefinition = &BandDefs[0];
+
+ /* Volume Control parameters */
+ params->VC_EffectLevel = 0;
+ params->VC_Balance = plvmConfigParams->vcBal;
+
+ /* Treble Enhancement parameters */
+ params->TE_OperatingMode = plvmConfigParams->trebleEnable;
+
+ /* PSA Control parameters */
+ params->PSA_Enable = LVM_PSA_ON;
+
+ /* Bass Enhancement parameters */
+ params->BE_OperatingMode = plvmConfigParams->bassEnable;
+
+ /* Activate the initial settings */
+ LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
+
+ LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+
+ LvmStatus = LVM_ApplyNewSettings(pContext->pBundledContext->hInstance);
+
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+
+ return 0;
+}
+
+int lvmExecute(float* floatIn, float* floatOut, struct EffectContext* pContext,
+ lvmConfigParams_t* plvmConfigParams) {
+ const int frameLength = plvmConfigParams->frameLength;
+ return LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
+ floatIn, /* Input buffer */
+ floatOut, /* Output buffer */
+ (LVM_UINT16)frameLength, /* Number of samples to read */
+ 0); /* Audio Time */
+}
+
+int lvmMainProcess(EffectContext* pContext, LVM_ControlParams_t* pParams,
+ lvmConfigParams_t* plvmConfigParams, FILE* finp, FILE* fout) {
+ int errCode = lvmControl(pContext, plvmConfigParams, pParams);
+ if (errCode) {
+ ALOGE("Error: lvmControl returned with %d\n", errCode);
+ return errCode;
+ }
+
+ const int channelCount = plvmConfigParams->nrChannels;
+ const int frameLength = plvmConfigParams->frameLength;
+ const int frameSize = channelCount * sizeof(float); // processing size
+ const int ioChannelCount = plvmConfigParams->fChannels;
+ const int ioFrameSize = ioChannelCount * sizeof(short); // file load size
+ const int maxChannelCount = std::max(channelCount, ioChannelCount);
+ /*
+ * Mono input will be converted to 2 channels internally in the process call
+ * by copying the same data into the second channel.
+ * Hence when channelCount is 1, output buffer should be allocated for
+ * 2 channels. The memAllocChCount takes care of allocation of sufficient
+ * memory for the output buffer.
+ */
+ const int memAllocChCount = (channelCount == 1 ? 2 : channelCount);
+
+ std::vector<short> in(frameLength * maxChannelCount);
+ std::vector<short> out(frameLength * maxChannelCount);
+ std::vector<float> floatIn(frameLength * channelCount);
+ std::vector<float> floatOut(frameLength * memAllocChCount);
+
+ int frameCounter = 0;
+ while (fread(in.data(), ioFrameSize, frameLength, finp) == (size_t)frameLength) {
+ if (ioChannelCount != channelCount) {
+ adjust_channels(in.data(), ioChannelCount, in.data(), channelCount, sizeof(short),
+ frameLength * ioFrameSize);
+ }
+ memcpy_to_float_from_i16(floatIn.data(), in.data(), frameLength * channelCount);
+
+ // Mono mode will replicate the first channel to all other channels.
+ // This ensures all audio channels are identical. This is useful for testing
+ // Bass Boost, which extracts a mono signal for processing.
+ if (plvmConfigParams->monoMode && channelCount > 1) {
+ for (int i = 0; i < frameLength; ++i) {
+ auto* fp = &floatIn[i * channelCount];
+ std::fill(fp + 1, fp + channelCount, *fp); // replicate ch 0
+ }
+ }
+#ifndef BYPASS_EXEC
+ errCode = lvmExecute(floatIn.data(), floatOut.data(), pContext, plvmConfigParams);
+ if (errCode) {
+ printf("\nError: lvmExecute returned with %d\n", errCode);
+ return errCode;
+ }
+
+ (void)frameSize; // eliminate warning
+#else
+ memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
+#endif
+ memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * channelCount);
+ if (ioChannelCount != channelCount) {
+ adjust_channels(out.data(), channelCount, out.data(), ioChannelCount, sizeof(short),
+ frameLength * channelCount * sizeof(short));
+ }
+ (void)fwrite(out.data(), ioFrameSize, frameLength, fout);
+ frameCounter += frameLength;
+ }
+ printf("frameCounter: [%d]\n", frameCounter);
+ return 0;
+}
+
+int main(int argc, const char* argv[]) {
+ if (argc == 1) {
+ printUsage();
+ return -1;
+ }
+
+ lvmConfigParams_t lvmConfigParams{}; // default initialize
+ const char* infile = nullptr;
+ const char* outfile = nullptr;
+
+ for (int i = 1; i < argc; i++) {
+ printf("%s ", argv[i]);
+ if (!strncmp(argv[i], "-i:", 3)) {
+ infile = argv[i] + 3;
+ } else if (!strncmp(argv[i], "-o:", 3)) {
+ outfile = argv[i] + 3;
+ } else if (!strncmp(argv[i], "-fs:", 4)) {
+ const int samplingFreq = atoi(argv[i] + 4);
+ if (samplingFreq != 8000 && samplingFreq != 11025 && samplingFreq != 12000 &&
+ samplingFreq != 16000 && samplingFreq != 22050 && samplingFreq != 24000 &&
+ samplingFreq != 32000 && samplingFreq != 44100 && samplingFreq != 48000 &&
+ samplingFreq != 88200 && samplingFreq != 96000 && samplingFreq != 176400 &&
+ samplingFreq != 192000) {
+ printf("Error: Unsupported Sampling Frequency : %d\n", samplingFreq);
+ return -1;
+ }
+ lvmConfigParams.samplingFreq = samplingFreq;
+ } else if (!strncmp(argv[i], "-chMask:", 8)) {
+ const int chMaskConfigIdx = atoi(argv[i] + 8);
+ if (chMaskConfigIdx < 0 || (size_t)chMaskConfigIdx >= std::size(lvmConfigChMask)) {
+ ALOGE("\nError: Unsupported Channel Mask : %d\n", chMaskConfigIdx);
+ return -1;
+ }
+ const audio_channel_mask_t chMask = lvmConfigChMask[chMaskConfigIdx];
+ lvmConfigParams.chMask = chMask;
+ lvmConfigParams.nrChannels = audio_channel_count_from_out_mask(chMask);
+ } else if (!strncmp(argv[i], "-vcBal:", 7)) {
+ const int vcBalance = atoi(argv[i] + 7);
+ if (vcBalance > 96 || vcBalance < -96) {
+ ALOGE("\nError: Unsupported volume balance value: %d\n", vcBalance);
+ }
+ lvmConfigParams.vcBal = vcBalance;
+ } else if (!strncmp(argv[i], "-fch:", 5)) {
+ const int fChannels = atoi(argv[i] + 5);
+ if (fChannels > 8 || fChannels < 1) {
+ printf("Error: Unsupported number of file channels : %d\n", fChannels);
+ return -1;
+ }
+ lvmConfigParams.fChannels = fChannels;
+ } else if (!strcmp(argv[i], "-M")) {
+ lvmConfigParams.monoMode = true;
+ } else if (!strncmp(argv[i], "-basslvl:", 9)) {
+ const int bassEffectLevel = atoi(argv[i] + 9);
+ if (bassEffectLevel > LVM_BE_MAX_EFFECTLEVEL ||
+ bassEffectLevel < LVM_BE_MIN_EFFECTLEVEL) {
+ printf("Error: Unsupported Bass Effect Level : %d\n", bassEffectLevel);
+ printUsage();
+ return -1;
+ }
+ lvmConfigParams.bassEffectLevel = bassEffectLevel;
+ } else if (!strncmp(argv[i], "-eqPreset:", 10)) {
+ const int eqPresetLevel = atoi(argv[i] + 10);
+ const int numPresetLvls = std::size(gEqualizerPresets);
+ if (eqPresetLevel >= numPresetLvls || eqPresetLevel < 0) {
+ printf("Error: Unsupported Equalizer Preset : %d\n", eqPresetLevel);
+ printUsage();
+ return -1;
+ }
+ lvmConfigParams.eqPresetLevel = eqPresetLevel;
+ } else if (!strcmp(argv[i], "-bE")) {
+ lvmConfigParams.bassEnable = LVM_BE_ON;
+ } else if (!strcmp(argv[i], "-eqE")) {
+ lvmConfigParams.eqEnable = LVM_EQNB_ON;
+ } else if (!strcmp(argv[i], "-tE")) {
+ lvmConfigParams.trebleEnable = LVM_TE_ON;
+ } else if (!strcmp(argv[i], "-csE")) {
+ lvmConfigParams.csEnable = LVM_MODE_ON;
+ } else if (!strcmp(argv[i], "-h")) {
+ printUsage();
+ return 0;
}
}
-#ifndef BYPASS_EXEC
- errCode = lvmExecute(floatIn.data(), floatOut.data(), pContext, plvmConfigParams);
- if (errCode) {
- printf("\nError: lvmExecute returned with %d\n", errCode);
- return errCode;
- }
- (void)frameSize; // eliminate warning
-#else
- memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
-#endif
- memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * channelCount);
- if (ioChannelCount != channelCount) {
- adjust_channels(out.data(), channelCount, out.data(), ioChannelCount,
- sizeof(short), frameLength * channelCount * sizeof(short));
- }
- (void) fwrite(out.data(), ioFrameSize, frameLength, fout);
- frameCounter += frameLength;
- }
- printf("frameCounter: [%d]\n", frameCounter);
- return 0;
-}
-
-int main(int argc, const char *argv[]) {
- if (argc == 1) {
- printUsage();
- return -1;
- }
-
- lvmConfigParams_t lvmConfigParams{}; // default initialize
- const char *infile = nullptr;
- const char *outfile = nullptr;
-
- for (int i = 1; i < argc; i++) {
- printf("%s ", argv[i]);
- if (!strncmp(argv[i], "-i:", 3)) {
- infile = argv[i] + 3;
- } else if (!strncmp(argv[i], "-o:", 3)) {
- outfile = argv[i] + 3;
- } else if (!strncmp(argv[i], "-fs:", 4)) {
- const int samplingFreq = atoi(argv[i] + 4);
- if (samplingFreq != 8000 && samplingFreq != 11025 &&
- samplingFreq != 12000 && samplingFreq != 16000 &&
- samplingFreq != 22050 && samplingFreq != 24000 &&
- samplingFreq != 32000 && samplingFreq != 44100 &&
- samplingFreq != 48000 && samplingFreq != 88200 &&
- samplingFreq != 96000 && samplingFreq != 176400 &&
- samplingFreq != 192000) {
- printf("Error: Unsupported Sampling Frequency : %d\n", samplingFreq);
- return -1;
- }
- lvmConfigParams.samplingFreq = samplingFreq;
- } else if (!strncmp(argv[i], "-chMask:", 8)) {
- const int chMaskConfigIdx = atoi(argv[i] + 8);
- if (chMaskConfigIdx < 0 || (size_t)chMaskConfigIdx >= std::size(lvmConfigChMask)) {
- ALOGE("\nError: Unsupported Channel Mask : %d\n", chMaskConfigIdx);
- return -1;
- }
- const audio_channel_mask_t chMask = lvmConfigChMask[chMaskConfigIdx];
- lvmConfigParams.chMask = chMask;
- lvmConfigParams.nrChannels = audio_channel_count_from_out_mask(chMask);
- } else if (!strncmp(argv[i], "-vcBal:", 7)) {
- const int vcBalance = atoi(argv[i] + 7);
- if (vcBalance > 96 || vcBalance < -96) {
- ALOGE("\nError: Unsupported volume balance value: %d\n", vcBalance);
- }
- lvmConfigParams.vcBal = vcBalance;
- } else if (!strncmp(argv[i], "-fch:", 5)) {
- const int fChannels = atoi(argv[i] + 5);
- if (fChannels > 8 || fChannels < 1) {
- printf("Error: Unsupported number of file channels : %d\n", fChannels);
- return -1;
- }
- lvmConfigParams.fChannels = fChannels;
- } else if (!strcmp(argv[i],"-M")) {
- lvmConfigParams.monoMode = true;
- } else if (!strncmp(argv[i], "-basslvl:", 9)) {
- const int bassEffectLevel = atoi(argv[i] + 9);
- if (bassEffectLevel > LVM_BE_MAX_EFFECTLEVEL || bassEffectLevel < LVM_BE_MIN_EFFECTLEVEL) {
- printf("Error: Unsupported Bass Effect Level : %d\n",
- bassEffectLevel);
+ if (infile == nullptr || outfile == nullptr) {
+ printf("Error: missing input/output files\n");
printUsage();
return -1;
- }
- lvmConfigParams.bassEffectLevel = bassEffectLevel;
- } else if (!strncmp(argv[i], "-eqPreset:", 10)) {
- const int eqPresetLevel = atoi(argv[i] + 10);
- const int numPresetLvls = std::size(gEqualizerPresets);
- if (eqPresetLevel >= numPresetLvls || eqPresetLevel < 0) {
- printf("Error: Unsupported Equalizer Preset : %d\n", eqPresetLevel);
- printUsage();
- return -1;
- }
- lvmConfigParams.eqPresetLevel = eqPresetLevel;
- } else if (!strcmp(argv[i], "-bE")) {
- lvmConfigParams.bassEnable = LVM_BE_ON;
- } else if (!strcmp(argv[i], "-eqE")) {
- lvmConfigParams.eqEnable = LVM_EQNB_ON;
- } else if (!strcmp(argv[i], "-tE")) {
- lvmConfigParams.trebleEnable = LVM_TE_ON;
- } else if (!strcmp(argv[i], "-csE")) {
- lvmConfigParams.csEnable = LVM_MODE_ON;
- } else if (!strcmp(argv[i], "-h")) {
- printUsage();
- return 0;
}
- }
- if (infile == nullptr || outfile == nullptr) {
- printf("Error: missing input/output files\n");
- printUsage();
- return -1;
- }
+ FILE* finp = fopen(infile, "rb");
+ if (finp == nullptr) {
+ printf("Cannot open input file %s", infile);
+ return -1;
+ }
- FILE *finp = fopen(infile, "rb");
- if (finp == nullptr) {
- printf("Cannot open input file %s", infile);
- return -1;
- }
+ FILE* fout = fopen(outfile, "wb");
+ if (fout == nullptr) {
+ printf("Cannot open output file %s", outfile);
+ fclose(finp);
+ return -1;
+ }
- FILE *fout = fopen(outfile, "wb");
- if (fout == nullptr) {
- printf("Cannot open output file %s", outfile);
+ EffectContext context;
+ LVM_ControlParams_t params;
+ int errCode = lvmCreate(&context, &lvmConfigParams, ¶ms);
+ if (errCode == 0) {
+ errCode = lvmMainProcess(&context, ¶ms, &lvmConfigParams, finp, fout);
+ if (errCode != 0) {
+ printf("Error: lvmMainProcess returned with the error: %d", errCode);
+ }
+ } else {
+ printf("Error: lvmCreate returned with the error: %d", errCode);
+ }
fclose(finp);
- return -1;
- }
-
- EffectContext context;
- LVM_ControlParams_t params;
- int errCode = lvmCreate(&context, &lvmConfigParams, ¶ms);
- if (errCode == 0) {
- errCode = lvmMainProcess(&context, ¶ms, &lvmConfigParams, finp, fout);
- if (errCode != 0) {
- printf("Error: lvmMainProcess returned with the error: %d",errCode);
+ fclose(fout);
+ /* Free the allocated buffers */
+ if (context.pBundledContext != nullptr) {
+ if (context.pBundledContext->hInstance != nullptr) {
+ LVM_DelInstanceHandle(&context.pBundledContext->hInstance);
+ }
+ free(context.pBundledContext);
}
- } else {
- printf("Error: lvmCreate returned with the error: %d", errCode);
- }
- fclose(finp);
- fclose(fout);
- /* Free the allocated buffers */
- if (context.pBundledContext != nullptr) {
- if (context.pBundledContext->hInstance != nullptr) {
- LvmEffect_free(&context);
- }
- free(context.pBundledContext);
- }
- if (errCode) {
- return -1;
- }
- return 0;
+ if (errCode) {
+ return -1;
+ }
+ return 0;
}
diff --git a/media/libeffects/lvm/tests/reverb_test.cpp b/media/libeffects/lvm/tests/reverb_test.cpp
new file mode 100644
index 0000000..7cbca9b
--- /dev/null
+++ b/media/libeffects/lvm/tests/reverb_test.cpp
@@ -0,0 +1,396 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <assert.h>
+#include <getopt.h>
+#include <inttypes.h>
+#include <iterator>
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+#include <vector>
+
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <log/log.h>
+#include <system/audio.h>
+
+#include "EffectReverb.h"
+
+// This is the only symbol that needs to be exported
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+// Global Variables
+enum ReverbParams {
+ ARG_HELP = 1,
+ ARG_INPUT,
+ ARG_OUTPUT,
+ ARG_FS,
+ ARG_CH_MASK,
+ ARG_PRESET,
+ ARG_AUX,
+ ARG_MONO_MODE,
+ ARG_FILE_CH,
+};
+
+const effect_uuid_t kReverbUuids[] = {
+ {0x172cdf00,
+ 0xa3bc,
+ 0x11df,
+ 0xa72f,
+ {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // preset-insert mode
+ {0xf29a1400,
+ 0xa3bb,
+ 0x11df,
+ 0x8ddc,
+ {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // preset-aux mode
+};
+
+// structures
+struct reverbConfigParams_t {
+ int fChannels = 2;
+ int monoMode = false;
+ int frameLength = 256;
+ int preset = 0;
+ int nrChannels = 2;
+ int sampleRate = 48000;
+ int auxiliary = 0;
+ audio_channel_mask_t chMask = AUDIO_CHANNEL_OUT_STEREO;
+};
+
+constexpr audio_channel_mask_t kReverbConfigChMask[] = {
+ AUDIO_CHANNEL_OUT_MONO,
+ AUDIO_CHANNEL_OUT_STEREO,
+ AUDIO_CHANNEL_OUT_2POINT1,
+ AUDIO_CHANNEL_OUT_2POINT0POINT2,
+ AUDIO_CHANNEL_OUT_QUAD,
+ AUDIO_CHANNEL_OUT_QUAD_BACK,
+ AUDIO_CHANNEL_OUT_QUAD_SIDE,
+ AUDIO_CHANNEL_OUT_SURROUND,
+ AUDIO_CHANNEL_INDEX_MASK_4,
+ AUDIO_CHANNEL_OUT_2POINT1POINT2,
+ AUDIO_CHANNEL_OUT_3POINT0POINT2,
+ AUDIO_CHANNEL_OUT_PENTA,
+ AUDIO_CHANNEL_INDEX_MASK_5,
+ AUDIO_CHANNEL_OUT_3POINT1POINT2,
+ AUDIO_CHANNEL_OUT_5POINT1,
+ AUDIO_CHANNEL_OUT_5POINT1_BACK,
+ AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+ AUDIO_CHANNEL_INDEX_MASK_6,
+ AUDIO_CHANNEL_OUT_6POINT1,
+ AUDIO_CHANNEL_INDEX_MASK_7,
+ AUDIO_CHANNEL_OUT_5POINT1POINT2,
+ AUDIO_CHANNEL_OUT_7POINT1,
+ AUDIO_CHANNEL_INDEX_MASK_8,
+};
+
+constexpr int kReverbConfigChMaskCount = std::size(kReverbConfigChMask);
+
+int reverbCreateEffect(effect_handle_t* pEffectHandle, effect_config_t* pConfig, int sessionId,
+ int ioId, int auxFlag) {
+ if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&kReverbUuids[auxFlag], sessionId,
+ ioId, pEffectHandle);
+ status != 0) {
+ ALOGE("Reverb create returned an error = %d\n", status);
+ return EXIT_FAILURE;
+ }
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ (**pEffectHandle)
+ ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t), pConfig,
+ &replySize, &reply);
+ return reply;
+}
+
+int reverbSetConfigParam(uint32_t paramType, uint32_t paramValue, effect_handle_t effectHandle) {
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ uint32_t paramData[2] = {paramType, paramValue};
+ effect_param_t* effectParam = (effect_param_t*)malloc(sizeof(*effectParam) + sizeof(paramData));
+ memcpy(&effectParam->data[0], ¶mData[0], sizeof(paramData));
+ effectParam->psize = sizeof(paramData[0]);
+ effectParam->vsize = sizeof(paramData[1]);
+ int status = (*effectHandle)
+ ->command(effectHandle, EFFECT_CMD_SET_PARAM,
+ sizeof(effect_param_t) + sizeof(paramData), effectParam,
+ &replySize, &reply);
+ free(effectParam);
+ if (status != 0) {
+ ALOGE("Reverb set config returned an error = %d\n", status);
+ return status;
+ }
+ return reply;
+}
+
+void printUsage() {
+ printf("\nUsage: ");
+ printf("\n <executable> [options]\n");
+ printf("\nwhere options are, ");
+ printf("\n --input <inputfile>");
+ printf("\n path to the input file");
+ printf("\n --output <outputfile>");
+ printf("\n path to the output file");
+ printf("\n --help");
+ printf("\n prints this usage information");
+ printf("\n --chMask <channel_mask>\n");
+ printf("\n 0 - AUDIO_CHANNEL_OUT_MONO");
+ printf("\n 1 - AUDIO_CHANNEL_OUT_STEREO");
+ printf("\n 2 - AUDIO_CHANNEL_OUT_2POINT1");
+ printf("\n 3 - AUDIO_CHANNEL_OUT_2POINT0POINT2");
+ printf("\n 4 - AUDIO_CHANNEL_OUT_QUAD");
+ printf("\n 5 - AUDIO_CHANNEL_OUT_QUAD_BACK");
+ printf("\n 6 - AUDIO_CHANNEL_OUT_QUAD_SIDE");
+ printf("\n 7 - AUDIO_CHANNEL_OUT_SURROUND");
+ printf("\n 8 - canonical channel index mask for 4 ch: (1 << 4) - 1");
+ printf("\n 9 - AUDIO_CHANNEL_OUT_2POINT1POINT2");
+ printf("\n 10 - AUDIO_CHANNEL_OUT_3POINT0POINT2");
+ printf("\n 11 - AUDIO_CHANNEL_OUT_PENTA");
+ printf("\n 12 - canonical channel index mask for 5 ch: (1 << 5) - 1");
+ printf("\n 13 - AUDIO_CHANNEL_OUT_3POINT1POINT2");
+ printf("\n 14 - AUDIO_CHANNEL_OUT_5POINT1");
+ printf("\n 15 - AUDIO_CHANNEL_OUT_5POINT1_BACK");
+ printf("\n 16 - AUDIO_CHANNEL_OUT_5POINT1_SIDE");
+ printf("\n 17 - canonical channel index mask for 6 ch: (1 << 6) - 1");
+ printf("\n 18 - AUDIO_CHANNEL_OUT_6POINT1");
+ printf("\n 19 - canonical channel index mask for 7 ch: (1 << 7) - 1");
+ printf("\n 20 - AUDIO_CHANNEL_OUT_5POINT1POINT2");
+ printf("\n 21 - AUDIO_CHANNEL_OUT_7POINT1");
+ printf("\n 22 - canonical channel index mask for 8 ch: (1 << 8) - 1");
+ printf("\n default 0");
+ printf("\n --fs <sampling_freq>");
+ printf("\n Sampling frequency in Hz, default 48000.");
+ printf("\n --preset <preset_value>");
+ printf("\n 0 - None");
+ printf("\n 1 - Small Room");
+ printf("\n 2 - Medium Room");
+ printf("\n 3 - Large Room");
+ printf("\n 4 - Medium Hall");
+ printf("\n 5 - Large Hall");
+ printf("\n 6 - Plate");
+ printf("\n default 0");
+ printf("\n --fch <file_channels>");
+ printf("\n number of channels in input file (1 through 8), default 1");
+ printf("\n --M");
+ printf("\n Mono mode (force all input audio channels to be identical)");
+ printf("\n --aux <auxiliary_flag> ");
+ printf("\n 0 - Insert Mode on");
+ printf("\n 1 - auxiliary Mode on");
+ printf("\n default 0");
+ printf("\n");
+}
+
+int main(int argc, const char* argv[]) {
+ if (argc == 1) {
+ printUsage();
+ return EXIT_FAILURE;
+ }
+
+ reverbConfigParams_t revConfigParams{}; // default initialize
+ const char* inputFile = nullptr;
+ const char* outputFile = nullptr;
+
+ const option long_opts[] = {
+ {"help", no_argument, nullptr, ARG_HELP},
+ {"input", required_argument, nullptr, ARG_INPUT},
+ {"output", required_argument, nullptr, ARG_OUTPUT},
+ {"fs", required_argument, nullptr, ARG_FS},
+ {"chMask", required_argument, nullptr, ARG_CH_MASK},
+ {"preset", required_argument, nullptr, ARG_PRESET},
+ {"aux", required_argument, nullptr, ARG_AUX},
+ {"M", no_argument, &revConfigParams.monoMode, true},
+ {"fch", required_argument, nullptr, ARG_FILE_CH},
+ {nullptr, 0, nullptr, 0},
+ };
+
+ while (true) {
+ const int opt = getopt_long(argc, (char* const*)argv, "i:o:", long_opts, nullptr);
+ if (opt == -1) {
+ break;
+ }
+ switch (opt) {
+ case ARG_HELP:
+ printUsage();
+ return EXIT_SUCCESS;
+ case ARG_INPUT: {
+ inputFile = (char*)optarg;
+ break;
+ }
+ case ARG_OUTPUT: {
+ outputFile = (char*)optarg;
+ break;
+ }
+ case ARG_FS: {
+ revConfigParams.sampleRate = atoi(optarg);
+ break;
+ }
+ case ARG_CH_MASK: {
+ int chMaskIdx = atoi(optarg);
+ if (chMaskIdx < 0 or chMaskIdx > kReverbConfigChMaskCount) {
+ ALOGE("Channel Mask index not in correct range\n");
+ printUsage();
+ return EXIT_FAILURE;
+ }
+ revConfigParams.chMask = kReverbConfigChMask[chMaskIdx];
+ break;
+ }
+ case ARG_PRESET: {
+ revConfigParams.preset = atoi(optarg);
+ break;
+ }
+ case ARG_AUX: {
+ revConfigParams.auxiliary = atoi(optarg);
+ break;
+ }
+ case ARG_MONO_MODE: {
+ break;
+ }
+ case ARG_FILE_CH: {
+ revConfigParams.fChannels = atoi(optarg);
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ if (inputFile == nullptr) {
+ ALOGE("Error: missing input files\n");
+ printUsage();
+ return EXIT_FAILURE;
+ }
+ std::unique_ptr<FILE, decltype(&fclose)> inputFp(fopen(inputFile, "rb"), &fclose);
+
+ if (inputFp == nullptr) {
+ ALOGE("Cannot open input file %s\n", inputFile);
+ return EXIT_FAILURE;
+ }
+
+ if (outputFile == nullptr) {
+ ALOGE("Error: missing output files\n");
+ printUsage();
+ return EXIT_FAILURE;
+ }
+ std::unique_ptr<FILE, decltype(&fclose)> outputFp(fopen(outputFile, "wb"), &fclose);
+
+ if (outputFp == nullptr) {
+ ALOGE("Cannot open output file %s\n", outputFile);
+ return EXIT_FAILURE;
+ }
+
+ int32_t sessionId = 1;
+ int32_t ioId = 1;
+ effect_handle_t effectHandle = nullptr;
+ effect_config_t config;
+ config.inputCfg.samplingRate = config.outputCfg.samplingRate = revConfigParams.sampleRate;
+ config.inputCfg.channels = config.outputCfg.channels = revConfigParams.chMask;
+ config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+ if (AUDIO_CHANNEL_OUT_MONO == revConfigParams.chMask) {
+ config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ }
+ if (int status = reverbCreateEffect(&effectHandle, &config, sessionId, ioId,
+ revConfigParams.auxiliary);
+ status != 0) {
+ ALOGE("Create effect call returned error %i", status);
+ return EXIT_FAILURE;
+ }
+
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ (*effectHandle)->command(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+ if (reply != 0) {
+ ALOGE("Command enable call returned error %d\n", reply);
+ return EXIT_FAILURE;
+ }
+
+ if (int status = reverbSetConfigParam(REVERB_PARAM_PRESET, (uint32_t)revConfigParams.preset,
+ effectHandle);
+ status != 0) {
+ ALOGE("Invalid reverb preset. Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+
+ revConfigParams.nrChannels = audio_channel_count_from_out_mask(revConfigParams.chMask);
+ const int channelCount = revConfigParams.nrChannels;
+ const int frameLength = revConfigParams.frameLength;
+#ifdef BYPASS_EXEC
+ const int frameSize = (int)channelCount * sizeof(float);
+#endif
+ const int ioChannelCount = revConfigParams.fChannels;
+ const int ioFrameSize = ioChannelCount * sizeof(short);
+ const int maxChannelCount = std::max(channelCount, ioChannelCount);
+ /*
+ * Mono input will be converted to 2 channels internally in the process call
+ * by copying the same data into the second channel.
+ * Hence when channelCount is 1, output buffer should be allocated for
+ * 2 channels. The outChannelCount takes care of allocation of sufficient
+ * memory for the output buffer.
+ */
+ const int outChannelCount = (channelCount == 1 ? 2 : channelCount);
+
+ std::vector<short> in(frameLength * maxChannelCount);
+ std::vector<short> out(frameLength * maxChannelCount);
+ std::vector<float> floatIn(frameLength * channelCount);
+ std::vector<float> floatOut(frameLength * outChannelCount);
+
+ int frameCounter = 0;
+
+ while (fread(in.data(), ioFrameSize, frameLength, inputFp.get()) == (size_t)frameLength) {
+ if (ioChannelCount != channelCount) {
+ adjust_channels(in.data(), ioChannelCount, in.data(), channelCount, sizeof(short),
+ frameLength * ioFrameSize);
+ }
+ memcpy_to_float_from_i16(floatIn.data(), in.data(), frameLength * channelCount);
+
+ // Mono mode will replicate the first channel to all other channels.
+ // This ensures all audio channels are identical. This is useful for testing
+ // Bass Boost, which extracts a mono signal for processing.
+ if (revConfigParams.monoMode && channelCount > 1) {
+ for (int i = 0; i < frameLength; ++i) {
+ auto* fp = &floatIn[i * channelCount];
+ std::fill(fp + 1, fp + channelCount, *fp); // replicate ch 0
+ }
+ }
+
+ audio_buffer_t inputBuffer, outputBuffer;
+ inputBuffer.frameCount = outputBuffer.frameCount = frameLength;
+ inputBuffer.f32 = floatIn.data();
+ outputBuffer.f32 = floatOut.data();
+#ifndef BYPASS_EXEC
+ if (int status = (*effectHandle)->process(effectHandle, &inputBuffer, &outputBuffer);
+ status != 0) {
+ ALOGE("\nError: Process returned with error %d\n", status);
+ return EXIT_FAILURE;
+ }
+#else
+ memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
+#endif
+ memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * outChannelCount);
+
+ if (ioChannelCount != outChannelCount) {
+ adjust_channels(out.data(), outChannelCount, out.data(), ioChannelCount, sizeof(short),
+ frameLength * outChannelCount * sizeof(short));
+ }
+ (void)fwrite(out.data(), ioFrameSize, frameLength, outputFp.get());
+ frameCounter += frameLength;
+ }
+
+ if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+ ALOGE("Audio Preprocessing release returned an error = %d\n", status);
+ return EXIT_FAILURE;
+ }
+ printf("frameCounter: [%d]\n", frameCounter);
+
+ return EXIT_SUCCESS;
+}
diff --git a/media/libeffects/lvm/tests/snr.cpp b/media/libeffects/lvm/tests/snr.cpp
index 885994c..0fef334 100644
--- a/media/libeffects/lvm/tests/snr.cpp
+++ b/media/libeffects/lvm/tests/snr.cpp
@@ -22,84 +22,83 @@
#include <vector>
template <typename T, typename A = float>
-std::pair<A, A> getSignalNoise(FILE *finp, FILE *fref) {
- constexpr size_t framesize = 256;
- std::vector<T> in(framesize);
- std::vector<T> ref(framesize);
- A signal{};
- A noise{};
+std::pair<A, A> getSignalNoise(FILE* finp, FILE* fref) {
+ constexpr size_t framesize = 256;
+ std::vector<T> in(framesize);
+ std::vector<T> ref(framesize);
+ A signal{};
+ A noise{};
- for (;;) {
- size_t read_samples_in = fread(&in[0], sizeof(T), framesize, finp);
- const size_t read_samples_ref = fread(&ref[0], sizeof(T), framesize, fref);
- if (read_samples_in != read_samples_ref) {
- printf("file sizes do not match (last %zu %zu)", read_samples_in, read_samples_ref);
- read_samples_in = std::min(read_samples_in, read_samples_ref);
+ for (;;) {
+ size_t read_samples_in = fread(&in[0], sizeof(T), framesize, finp);
+ const size_t read_samples_ref = fread(&ref[0], sizeof(T), framesize, fref);
+ if (read_samples_in != read_samples_ref) {
+ printf("file sizes do not match (last %zu %zu)", read_samples_in, read_samples_ref);
+ read_samples_in = std::min(read_samples_in, read_samples_ref);
+ }
+ if (read_samples_in == 0) {
+ return {signal, noise};
+ }
+ for (size_t i = 0; i < read_samples_in; ++i) {
+ const A value(ref[i]);
+ const A diff(A(in[i]) - value);
+ signal += value * value;
+ noise += diff * diff;
+ }
}
- if (read_samples_in == 0) {
- return { signal, noise };
- }
- for (size_t i = 0; i < read_samples_in; ++i) {
- const A value(ref[i]);
- const A diff(A(in[i]) - value);
- signal += value * value;
- noise += diff * diff;
- }
- }
}
void printUsage() {
- printf("\nUsage: ");
- printf("\n snr <ref_file> <test_file> [options]\n");
- printf("\nwhere, \n <ref_file> is the reference file name");
- printf("\n on which will be taken as pure signal");
- printf("\n <test_file> is test file for snr calculation");
- printf("\n and options are mentioned below");
- printf("\n");
- printf("\n -pcm_format:<pcm format of input files>");
- printf("\n 0 - 16 bit pcm");
- printf("\n 1 - 32 bit float");
- printf("\n default 0");
- printf("\n -thr:<threshold value>");
- printf("\n default - negative infinity\n\n");
+ printf("\nUsage: ");
+ printf("\n snr <ref_file> <test_file> [options]\n");
+ printf("\nwhere, \n <ref_file> is the reference file name");
+ printf("\n on which will be taken as pure signal");
+ printf("\n <test_file> is test file for snr calculation");
+ printf("\n and options are mentioned below");
+ printf("\n");
+ printf("\n -pcm_format:<pcm format of input files>");
+ printf("\n 0 - 16 bit pcm");
+ printf("\n 1 - 32 bit float");
+ printf("\n default 0");
+ printf("\n -thr:<threshold value>");
+ printf("\n default - negative infinity\n\n");
}
-int main(int argc, const char *argv[]) {
- if (argc < 3) {
- printUsage();
- return -1;
- }
- int pcm_format = 0;
- float thr = - std::numeric_limits<float>::infinity();
- FILE *fref = fopen(argv[1], "rb");
- FILE *finp = fopen(argv[2], "rb");
- for (int i = 3; i < argc; i++) {
- if (!strncmp(argv[i], "-pcm_format:", 12)) {
- pcm_format = atoi(argv[i] + 12);
- } else if (!strncmp(argv[i], "-thr:", 5)) {
- thr = atof(argv[i] + 5);
+int main(int argc, const char* argv[]) {
+ if (argc < 3) {
+ printUsage();
+ return -1;
}
- }
- if (finp == nullptr || fref == nullptr) {
- printf("\nError: missing input/reference files\n");
- return -1;
- }
- int ret = EXIT_SUCCESS;
- auto sn = pcm_format == 0
- ? getSignalNoise<short>(finp, fref)
- : getSignalNoise<float>(finp, fref);
- if (sn.first > 0.f && sn.second > 0.f) {
- float snr = 10.f * log(sn.first / sn.second);
- // compare the measured snr value with threshold
- if (snr < thr) {
- printf("%.6f less than threshold %.6f\n", snr, thr);
- ret = EXIT_FAILURE;
- } else {
- printf("%.6f\n", snr);
+ int pcm_format = 0;
+ float thr = -std::numeric_limits<float>::infinity();
+ FILE* fref = fopen(argv[1], "rb");
+ FILE* finp = fopen(argv[2], "rb");
+ for (int i = 3; i < argc; i++) {
+ if (!strncmp(argv[i], "-pcm_format:", 12)) {
+ pcm_format = atoi(argv[i] + 12);
+ } else if (!strncmp(argv[i], "-thr:", 5)) {
+ thr = atof(argv[i] + 5);
+ }
}
- }
- fclose(finp);
- fclose(fref);
+ if (finp == nullptr || fref == nullptr) {
+ printf("\nError: missing input/reference files\n");
+ return -1;
+ }
+ int ret = EXIT_SUCCESS;
+ auto sn =
+ pcm_format == 0 ? getSignalNoise<short>(finp, fref) : getSignalNoise<float>(finp, fref);
+ if (sn.first > 0.f && sn.second > 0.f) {
+ float snr = 10.f * log(sn.first / sn.second);
+ // compare the measured snr value with threshold
+ if (snr < thr) {
+ printf("%.6f less than threshold %.6f\n", snr, thr);
+ ret = EXIT_FAILURE;
+ } else {
+ printf("%.6f\n", snr);
+ }
+ }
+ fclose(finp);
+ fclose(fref);
- return ret;
+ return ret;
}
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index afc4220..be60aae 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -1,5 +1,5 @@
// music bundle wrapper
-cc_library_shared {
+cc_library {
name: "libbundlewrapper",
arch: {
@@ -13,7 +13,6 @@
cppflags: [
"-fvisibility=hidden",
- "-DSUPPORT_MC",
"-Wall",
"-Werror",
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 6fca0e7..865baad 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -18,7 +18,7 @@
typedef float LVM_FLOAT;
#endif
#define LOG_TAG "Bundle"
-#define ARRAY_SIZE(array) (sizeof (array) / sizeof (array)[0])
+#define ARRAY_SIZE(array) (sizeof(array) / sizeof(array)[0])
//#define LOG_NDEBUG 0
#include <assert.h>
@@ -42,26 +42,33 @@
#ifdef VERY_VERY_VERBOSE_LOGGING
#define ALOGVV ALOGV
#else
-#define ALOGVV(a...) do { } while (false)
+#define ALOGVV(a...) \
+ do { \
+ } while (false)
#endif
-#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc){\
- if ((LvmStatus) == LVM_NULLADDRESS){\
- ALOGV("\tLVM_ERROR : Parameter error - "\
- "null pointer returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
- }\
- if ((LvmStatus) == LVM_ALIGNMENTERROR){\
- ALOGV("\tLVM_ERROR : Parameter error - "\
- "bad alignment returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
- }\
- if ((LvmStatus) == LVM_INVALIDNUMSAMPLES){\
- ALOGV("\tLVM_ERROR : Parameter error - "\
- "bad number of samples returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
- }\
- if ((LvmStatus) == LVM_OUTOFRANGE){\
- ALOGV("\tLVM_ERROR : Parameter error - "\
- "out of range returned by %s in %s\n", callingFunc, calledFunc);\
- }\
+#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc) \
+ { \
+ if ((LvmStatus) == LVM_NULLADDRESS) { \
+ ALOGV("\tLVM_ERROR : Parameter error - " \
+ "null pointer returned by %s in %s\n\n\n\n", \
+ callingFunc, calledFunc); \
+ } \
+ if ((LvmStatus) == LVM_ALIGNMENTERROR) { \
+ ALOGV("\tLVM_ERROR : Parameter error - " \
+ "bad alignment returned by %s in %s\n\n\n\n", \
+ callingFunc, calledFunc); \
+ } \
+ if ((LvmStatus) == LVM_INVALIDNUMSAMPLES) { \
+ ALOGV("\tLVM_ERROR : Parameter error - " \
+ "bad number of samples returned by %s in %s\n\n\n\n", \
+ callingFunc, calledFunc); \
+ } \
+ if ((LvmStatus) == LVM_OUTOFRANGE) { \
+ ALOGV("\tLVM_ERROR : Parameter error - " \
+ "out of range returned by %s in %s\n", \
+ callingFunc, calledFunc); \
+ } \
}
// Namespaces
@@ -74,20 +81,21 @@
int SessionIndex[LVM_MAX_SESSIONS];
/* local functions */
-#define CHECK_ARG(cond) { \
- if (!(cond)) { \
- ALOGV("\tLVM_ERROR : Invalid argument: "#cond); \
- return -EINVAL; \
- } \
-}
+#define CHECK_ARG(cond) \
+ { \
+ if (!(cond)) { \
+ ALOGV("\tLVM_ERROR : Invalid argument: " #cond); \
+ return -EINVAL; \
+ } \
+ }
// NXP SW BassBoost UUID
const effect_descriptor_t gBassBoostDescriptor = {
- {0x0634f220, 0xddd4, 0x11db, 0xa0fc, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }},
- {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid
+ {0x0634f220, 0xddd4, 0x11db, 0xa0fc, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid
EFFECT_CONTROL_API_VERSION,
- (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_DEVICE_IND
- | EFFECT_FLAG_VOLUME_CTRL),
+ (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_DEVICE_IND |
+ EFFECT_FLAG_VOLUME_CTRL),
BASS_BOOST_CUP_LOAD_ARM9E,
BUNDLE_MEM_USAGE,
"Dynamic Bass Boost",
@@ -99,8 +107,8 @@
{0x37cc2c00, 0xdddd, 0x11db, 0x8577, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
{0x1d4033c0, 0x8557, 0x11df, 0x9f2d, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
EFFECT_CONTROL_API_VERSION,
- (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_DEVICE_IND
- | EFFECT_FLAG_VOLUME_CTRL),
+ (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_DEVICE_IND |
+ EFFECT_FLAG_VOLUME_CTRL),
VIRTUALIZER_CUP_LOAD_ARM9E,
BUNDLE_MEM_USAGE,
"Virtualizer",
@@ -109,8 +117,8 @@
// NXP SW Equalizer UUID
const effect_descriptor_t gEqualizerDescriptor = {
- {0x0bed4300, 0xddd6, 0x11db, 0x8f34, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type
- {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid Eq NXP
+ {0x0bed4300, 0xddd6, 0x11db, 0x8f34, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type
+ {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid Eq NXP
EFFECT_CONTROL_API_VERSION,
(EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_VOLUME_CTRL),
EQUALIZER_CUP_LOAD_ARM9E,
@@ -121,8 +129,8 @@
// NXP SW Volume UUID
const effect_descriptor_t gVolumeDescriptor = {
- {0x09e8ede0, 0xddde, 0x11db, 0xb4f6, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }},
- {0x119341a0, 0x8469, 0x11df, 0x81f9, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, //uuid VOL NXP
+ {0x09e8ede0, 0xddde, 0x11db, 0xb4f6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ {0x119341a0, 0x8469, 0x11df, 0x81f9, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid VOL NXP
EFFECT_CONTROL_API_VERSION,
(EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_VOLUME_CTRL),
VOLUME_CUP_LOAD_ARM9E,
@@ -132,77 +140,50 @@
};
//--- local function prototypes
-void LvmGlobalBundle_init (void);
-int LvmBundle_init (EffectContext *pContext);
-int LvmEffect_enable (EffectContext *pContext);
-int LvmEffect_disable (EffectContext *pContext);
-void LvmEffect_free (EffectContext *pContext);
-int Effect_setConfig (EffectContext *pContext, effect_config_t *pConfig);
-void Effect_getConfig (EffectContext *pContext, effect_config_t *pConfig);
-int BassBoost_setParameter (EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t valueSize,
- void *pValue);
-int BassBoost_getParameter (EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue);
-int Virtualizer_setParameter (EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t valueSize,
- void *pValue);
-int Virtualizer_getParameter (EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue);
-int Equalizer_setParameter (EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t valueSize,
- void *pValue);
-int Equalizer_getParameter (EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue);
-int Volume_setParameter (EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t valueSize,
- void *pValue);
-int Volume_getParameter (EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue);
-int Effect_setEnabled(EffectContext *pContext, bool enabled);
+void LvmGlobalBundle_init(void);
+int LvmBundle_init(EffectContext* pContext);
+int LvmEffect_enable(EffectContext* pContext);
+int LvmEffect_disable(EffectContext* pContext);
+int Effect_setConfig(EffectContext* pContext, effect_config_t* pConfig);
+void Effect_getConfig(EffectContext* pContext, effect_config_t* pConfig);
+int BassBoost_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t valueSize, void* pValue);
+int BassBoost_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t* pValueSize, void* pValue);
+int Virtualizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t valueSize, void* pValue);
+int Virtualizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t* pValueSize, void* pValue);
+int Equalizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t valueSize, void* pValue);
+int Equalizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t* pValueSize, void* pValue);
+int Volume_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t valueSize, void* pValue);
+int Volume_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t* pValueSize, void* pValue);
+int Effect_setEnabled(EffectContext* pContext, bool enabled);
/* Effect Library Interface Implementation */
-extern "C" int EffectCreate(const effect_uuid_t *uuid,
- int32_t sessionId,
- int32_t ioId __unused,
- effect_handle_t *pHandle){
+extern "C" int EffectCreate(const effect_uuid_t* uuid, int32_t sessionId, int32_t ioId __unused,
+ effect_handle_t* pHandle) {
int ret = 0;
int sessionNo = -1;
int i;
- EffectContext *pContext = NULL;
+ EffectContext* pContext = NULL;
bool newBundle = false;
- SessionContext *pSessionContext;
+ SessionContext* pSessionContext;
ALOGV("\n\tEffectCreate start session %d", sessionId);
- if (pHandle == NULL || uuid == NULL){
+ if (pHandle == NULL || uuid == NULL) {
ALOGV("\tLVM_ERROR : EffectCreate() called with NULL pointer");
ret = -EINVAL;
goto exit;
}
- if(LvmInitFlag == LVM_FALSE){
+ if (LvmInitFlag == LVM_FALSE) {
LvmInitFlag = LVM_TRUE;
ALOGV("\tEffectCreate - Initializing all global memory");
LvmGlobalBundle_init();
@@ -210,7 +191,7 @@
// Find sessionNo: if one already exists for the sessionId use it,
// otherwise choose the first available empty slot.
- for(i=0; i<LVM_MAX_SESSIONS; i++){
+ for (i = 0; i < LVM_MAX_SESSIONS; i++) {
if (SessionIndex[i] == sessionId) {
sessionNo = i;
break;
@@ -232,106 +213,102 @@
pContext = new EffectContext;
// If this is the first create in this session
- if(GlobalSessionMemory[sessionNo].bBundledEffectsEnabled == LVM_FALSE){
+ if (GlobalSessionMemory[sessionNo].bBundledEffectsEnabled == LVM_FALSE) {
ALOGV("\tEffectCreate - This is the first effect in current sessionId %d sessionNo %d",
- sessionId, sessionNo);
+ sessionId, sessionNo);
GlobalSessionMemory[sessionNo].bBundledEffectsEnabled = LVM_TRUE;
- GlobalSessionMemory[sessionNo].pBundledContext = new BundledEffectContext;
+ GlobalSessionMemory[sessionNo].pBundledContext = new BundledEffectContext;
newBundle = true;
pContext->pBundledContext = GlobalSessionMemory[sessionNo].pBundledContext;
- pContext->pBundledContext->SessionNo = sessionNo;
- pContext->pBundledContext->SessionId = sessionId;
- pContext->pBundledContext->hInstance = NULL;
- pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
- pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
- pContext->pBundledContext->bBassEnabled = LVM_FALSE;
- pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
- pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
+ pContext->pBundledContext->SessionNo = sessionNo;
+ pContext->pBundledContext->SessionId = sessionId;
+ pContext->pBundledContext->hInstance = NULL;
+ pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
+ pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
+ pContext->pBundledContext->bBassEnabled = LVM_FALSE;
+ pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
+ pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
- pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
+ pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
- pContext->pBundledContext->NumberEffectsEnabled = 0;
- pContext->pBundledContext->NumberEffectsCalled = 0;
- pContext->pBundledContext->firstVolume = LVM_TRUE;
- pContext->pBundledContext->volume = 0;
-
+ pContext->pBundledContext->NumberEffectsEnabled = 0;
+ pContext->pBundledContext->NumberEffectsCalled = 0;
+ pContext->pBundledContext->firstVolume = LVM_TRUE;
+ pContext->pBundledContext->volume = 0;
/* Saved strength is used to return the exact strength that was used in the set to the get
* because we map the original strength range of 0:1000 to 1:15, and this will avoid
* quantisation like effect when returning
*/
- pContext->pBundledContext->BassStrengthSaved = 0;
- pContext->pBundledContext->VirtStrengthSaved = 0;
- pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
- pContext->pBundledContext->levelSaved = 0;
- pContext->pBundledContext->bMuteEnabled = LVM_FALSE;
- pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE;
- pContext->pBundledContext->positionSaved = 0;
- pContext->pBundledContext->workBuffer = NULL;
- pContext->pBundledContext->frameCount = -1;
- pContext->pBundledContext->SamplesToExitCountVirt = 0;
- pContext->pBundledContext->SamplesToExitCountBb = 0;
- pContext->pBundledContext->SamplesToExitCountEq = 0;
+ pContext->pBundledContext->BassStrengthSaved = 0;
+ pContext->pBundledContext->VirtStrengthSaved = 0;
+ pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
+ pContext->pBundledContext->levelSaved = 0;
+ pContext->pBundledContext->bMuteEnabled = LVM_FALSE;
+ pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE;
+ pContext->pBundledContext->positionSaved = 0;
+ pContext->pBundledContext->workBuffer = NULL;
+ pContext->pBundledContext->frameCount = -1;
+ pContext->pBundledContext->SamplesToExitCountVirt = 0;
+ pContext->pBundledContext->SamplesToExitCountBb = 0;
+ pContext->pBundledContext->SamplesToExitCountEq = 0;
for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i];
}
- pContext->pBundledContext->effectProcessCalled = 0;
- pContext->pBundledContext->effectInDrain = 0;
+ pContext->pBundledContext->effectProcessCalled = 0;
+ pContext->pBundledContext->effectInDrain = 0;
ALOGV("\tEffectCreate - Calling LvmBundle_init");
ret = LvmBundle_init(pContext);
- if (ret < 0){
+ if (ret < 0) {
ALOGV("\tLVM_ERROR : EffectCreate() Bundle init failed");
goto exit;
}
- }
- else{
+ } else {
ALOGV("\tEffectCreate - Assigning memory for previously created effect on sessionNo %d",
- sessionNo);
- pContext->pBundledContext =
- GlobalSessionMemory[sessionNo].pBundledContext;
+ sessionNo);
+ pContext->pBundledContext = GlobalSessionMemory[sessionNo].pBundledContext;
}
ALOGV("\tEffectCreate - pBundledContext is %p", pContext->pBundledContext);
pSessionContext = &GlobalSessionMemory[pContext->pBundledContext->SessionNo];
// Create each Effect
- if (memcmp(uuid, &gBassBoostDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+ if (memcmp(uuid, &gBassBoostDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
// Create Bass Boost
ALOGV("\tEffectCreate - Effect to be created is LVM_BASS_BOOST");
pSessionContext->bBassInstantiated = LVM_TRUE;
pContext->pBundledContext->SamplesToExitCountBb = 0;
- pContext->itfe = &gLvmEffectInterface;
+ pContext->itfe = &gLvmEffectInterface;
pContext->EffectType = LVM_BASS_BOOST;
- } else if (memcmp(uuid, &gVirtualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+ } else if (memcmp(uuid, &gVirtualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
// Create Virtualizer
ALOGV("\tEffectCreate - Effect to be created is LVM_VIRTUALIZER");
- pSessionContext->bVirtualizerInstantiated=LVM_TRUE;
+ pSessionContext->bVirtualizerInstantiated = LVM_TRUE;
pContext->pBundledContext->SamplesToExitCountVirt = 0;
- pContext->itfe = &gLvmEffectInterface;
+ pContext->itfe = &gLvmEffectInterface;
pContext->EffectType = LVM_VIRTUALIZER;
- } else if (memcmp(uuid, &gEqualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+ } else if (memcmp(uuid, &gEqualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
// Create Equalizer
ALOGV("\tEffectCreate - Effect to be created is LVM_EQUALIZER");
pSessionContext->bEqualizerInstantiated = LVM_TRUE;
pContext->pBundledContext->SamplesToExitCountEq = 0;
- pContext->itfe = &gLvmEffectInterface;
+ pContext->itfe = &gLvmEffectInterface;
pContext->EffectType = LVM_EQUALIZER;
- } else if (memcmp(uuid, &gVolumeDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+ } else if (memcmp(uuid, &gVolumeDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
// Create Volume
ALOGV("\tEffectCreate - Effect to be created is LVM_VOLUME");
pSessionContext->bVolumeInstantiated = LVM_TRUE;
- pContext->itfe = &gLvmEffectInterface;
+ pContext->itfe = &gLvmEffectInterface;
pContext->EffectType = LVM_VOLUME;
- }
- else{
+ } else {
ALOGV("\tLVM_ERROR : EffectCreate() invalid UUID");
ret = -EINVAL;
goto exit;
@@ -347,66 +324,64 @@
}
delete pContext;
}
- if (pHandle != NULL)
- *pHandle = (effect_handle_t)NULL;
+ if (pHandle != NULL) *pHandle = (effect_handle_t)NULL;
} else {
- if (pHandle != NULL)
- *pHandle = (effect_handle_t)pContext;
+ if (pHandle != NULL) *pHandle = (effect_handle_t)pContext;
}
ALOGV("\tEffectCreate end..\n\n");
return ret;
} /* end EffectCreate */
-extern "C" int EffectRelease(effect_handle_t handle){
+extern "C" int EffectRelease(effect_handle_t handle) {
ALOGV("\n\tEffectRelease start %p", handle);
- EffectContext * pContext = (EffectContext *)handle;
+ EffectContext* pContext = (EffectContext*)handle;
ALOGV("\tEffectRelease start handle: %p, context %p", handle, pContext->pBundledContext);
- if (pContext == NULL){
+ if (pContext == NULL) {
ALOGV("\tLVM_ERROR : EffectRelease called with NULL pointer");
return -EINVAL;
}
- SessionContext *pSessionContext = &GlobalSessionMemory[pContext->pBundledContext->SessionNo];
+ SessionContext* pSessionContext = &GlobalSessionMemory[pContext->pBundledContext->SessionNo];
// Clear the instantiated flag for the effect
// protect agains the case where an effect is un-instantiated without being disabled
- int &effectInDrain = pContext->pBundledContext->effectInDrain;
- if(pContext->EffectType == LVM_BASS_BOOST) {
+ int& effectInDrain = pContext->pBundledContext->effectInDrain;
+ if (pContext->EffectType == LVM_BASS_BOOST) {
ALOGV("\tEffectRelease LVM_BASS_BOOST Clearing global intstantiated flag");
pSessionContext->bBassInstantiated = LVM_FALSE;
- if(pContext->pBundledContext->SamplesToExitCountBb > 0){
+ if (pContext->pBundledContext->SamplesToExitCountBb > 0) {
pContext->pBundledContext->NumberEffectsEnabled--;
}
pContext->pBundledContext->SamplesToExitCountBb = 0;
- } else if(pContext->EffectType == LVM_VIRTUALIZER) {
+ } else if (pContext->EffectType == LVM_VIRTUALIZER) {
ALOGV("\tEffectRelease LVM_VIRTUALIZER Clearing global intstantiated flag");
pSessionContext->bVirtualizerInstantiated = LVM_FALSE;
- if(pContext->pBundledContext->SamplesToExitCountVirt > 0){
+ if (pContext->pBundledContext->SamplesToExitCountVirt > 0) {
pContext->pBundledContext->NumberEffectsEnabled--;
}
pContext->pBundledContext->SamplesToExitCountVirt = 0;
- } else if(pContext->EffectType == LVM_EQUALIZER) {
+ } else if (pContext->EffectType == LVM_EQUALIZER) {
ALOGV("\tEffectRelease LVM_EQUALIZER Clearing global intstantiated flag");
- pSessionContext->bEqualizerInstantiated =LVM_FALSE;
- if(pContext->pBundledContext->SamplesToExitCountEq > 0){
+ pSessionContext->bEqualizerInstantiated = LVM_FALSE;
+ if (pContext->pBundledContext->SamplesToExitCountEq > 0) {
pContext->pBundledContext->NumberEffectsEnabled--;
}
pContext->pBundledContext->SamplesToExitCountEq = 0;
- } else if(pContext->EffectType == LVM_VOLUME) {
+ } else if (pContext->EffectType == LVM_VOLUME) {
ALOGV("\tEffectRelease LVM_VOLUME Clearing global intstantiated flag");
pSessionContext->bVolumeInstantiated = LVM_FALSE;
// There is no samplesToExitCount for volume so we also use the drain flag to check
// if we should decrement the effects enabled.
- if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE
- || (effectInDrain & 1 << LVM_VOLUME) != 0) {
+ if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE ||
+ (effectInDrain & 1 << LVM_VOLUME) != 0) {
pContext->pBundledContext->NumberEffectsEnabled--;
}
} else {
ALOGV("\tLVM_ERROR : EffectRelease : Unsupported effect\n\n\n\n\n\n\n");
}
- effectInDrain &= ~(1 << pContext->EffectType); // no need to drain if released
+ effectInDrain &= ~(1 << pContext->EffectType); // no need to drain if released
// Disable effect, in this case ignore errors (return codes)
// if an effect has already been disabled
@@ -414,17 +389,15 @@
// if all effects are no longer instantiaed free the lvm memory and delete BundledEffectContext
if ((pSessionContext->bBassInstantiated == LVM_FALSE) &&
- (pSessionContext->bVolumeInstantiated == LVM_FALSE) &&
- (pSessionContext->bEqualizerInstantiated ==LVM_FALSE) &&
- (pSessionContext->bVirtualizerInstantiated==LVM_FALSE))
- {
-
+ (pSessionContext->bVolumeInstantiated == LVM_FALSE) &&
+ (pSessionContext->bEqualizerInstantiated == LVM_FALSE) &&
+ (pSessionContext->bVirtualizerInstantiated == LVM_FALSE)) {
// Clear the SessionIndex
- for(int i=0; i<LVM_MAX_SESSIONS; i++){
- if(SessionIndex[i] == pContext->pBundledContext->SessionId){
+ for (int i = 0; i < LVM_MAX_SESSIONS; i++) {
+ if (SessionIndex[i] == pContext->pBundledContext->SessionId) {
SessionIndex[i] = LVM_UNUSED_SESSION;
- ALOGV("\tEffectRelease: Clearing SessionIndex SessionNo %d for SessionId %d\n",
- i, pContext->pBundledContext->SessionId);
+ ALOGV("\tEffectRelease: Clearing SessionIndex SessionNo %d for SessionId %d\n", i,
+ pContext->pBundledContext->SessionId);
break;
}
}
@@ -433,7 +406,7 @@
pSessionContext->bBundledEffectsEnabled = LVM_FALSE;
pSessionContext->pBundledContext = LVM_NULL;
ALOGV("\tEffectRelease: Freeing LVM Bundle memory\n");
- LvmEffect_free(pContext);
+ LVM_DelInstanceHandle(&pContext->pBundledContext->hInstance);
ALOGV("\tEffectRelease: Deleting LVM Bundle context %p\n", pContext->pBundledContext);
if (pContext->pBundledContext->workBuffer != NULL) {
free(pContext->pBundledContext->workBuffer);
@@ -449,11 +422,10 @@
} /* end EffectRelease */
-extern "C" int EffectGetDescriptor(const effect_uuid_t *uuid,
- effect_descriptor_t *pDescriptor) {
- const effect_descriptor_t *desc = NULL;
+extern "C" int EffectGetDescriptor(const effect_uuid_t* uuid, effect_descriptor_t* pDescriptor) {
+ const effect_descriptor_t* desc = NULL;
- if (pDescriptor == NULL || uuid == NULL){
+ if (pDescriptor == NULL || uuid == NULL) {
ALOGV("EffectGetDescriptor() called with NULL pointer");
return -EINVAL;
}
@@ -469,7 +441,7 @@
}
if (desc == NULL) {
- return -EINVAL;
+ return -EINVAL;
}
*pDescriptor = *desc;
@@ -477,15 +449,15 @@
return 0;
} /* end EffectGetDescriptor */
-void LvmGlobalBundle_init(){
+void LvmGlobalBundle_init() {
ALOGV("\tLvmGlobalBundle_init start");
- for(int i=0; i<LVM_MAX_SESSIONS; i++){
- GlobalSessionMemory[i].bBundledEffectsEnabled = LVM_FALSE;
- GlobalSessionMemory[i].bVolumeInstantiated = LVM_FALSE;
- GlobalSessionMemory[i].bEqualizerInstantiated = LVM_FALSE;
- GlobalSessionMemory[i].bBassInstantiated = LVM_FALSE;
+ for (int i = 0; i < LVM_MAX_SESSIONS; i++) {
+ GlobalSessionMemory[i].bBundledEffectsEnabled = LVM_FALSE;
+ GlobalSessionMemory[i].bVolumeInstantiated = LVM_FALSE;
+ GlobalSessionMemory[i].bEqualizerInstantiated = LVM_FALSE;
+ GlobalSessionMemory[i].bBassInstantiated = LVM_FALSE;
GlobalSessionMemory[i].bVirtualizerInstantiated = LVM_FALSE;
- GlobalSessionMemory[i].pBundledContext = LVM_NULL;
+ GlobalSessionMemory[i].pBundledContext = LVM_NULL;
SessionIndex[i] = LVM_UNUSED_SESSION;
}
@@ -504,203 +476,140 @@
//
//----------------------------------------------------------------------------
-int LvmBundle_init(EffectContext *pContext){
+int LvmBundle_init(EffectContext* pContext) {
ALOGV("\tLvmBundle_init start");
- pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
- pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
- pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
- pContext->config.inputCfg.samplingRate = 44100;
- pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
- pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
- pContext->config.inputCfg.bufferProvider.cookie = NULL;
- pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
- pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
- pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
- pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
- pContext->config.outputCfg.samplingRate = 44100;
- pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
+ pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+ pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
+ pContext->config.inputCfg.samplingRate = 44100;
+ pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
+ pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+ pContext->config.inputCfg.bufferProvider.cookie = NULL;
+ pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+ pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+ pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
+ pContext->config.outputCfg.samplingRate = 44100;
+ pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
- pContext->config.outputCfg.bufferProvider.cookie = NULL;
- pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+ pContext->config.outputCfg.bufferProvider.cookie = NULL;
+ pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
CHECK_ARG(pContext != NULL);
- if (pContext->pBundledContext->hInstance != NULL){
+ if (pContext->pBundledContext->hInstance != NULL) {
ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
- "-> Calling pContext->pBassBoost->free()");
-
- LvmEffect_free(pContext);
+ "-> Calling pContext->pBassBoost->free()");
+ LVM_DelInstanceHandle(&pContext->pBundledContext->hInstance);
ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
- "-> Called pContext->pBassBoost->free()");
+ "-> Called pContext->pBassBoost->free()");
}
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
- LVM_ControlParams_t params; /* Control Parameters */
- LVM_InstParams_t InstParams; /* Instance parameters */
- LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS]; /* Equaliser band definitions */
- LVM_HeadroomParams_t HeadroomParams; /* Headroom parameters */
- LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
- LVM_MemTab_t MemTab; /* Memory allocation table */
- bool bMallocFailure = LVM_FALSE;
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+ LVM_ControlParams_t params; /* Control Parameters */
+ LVM_InstParams_t InstParams; /* Instance parameters */
+ LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS]; /* Equaliser band definitions */
+ LVM_HeadroomParams_t HeadroomParams; /* Headroom parameters */
+ LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
/* Set the capabilities */
- InstParams.BufferMode = LVM_UNMANAGED_BUFFERS;
- InstParams.MaxBlockSize = MAX_CALL_SIZE;
- InstParams.EQNB_NumBands = MAX_NUM_BANDS;
- InstParams.PSA_Included = LVM_PSA_ON;
+ InstParams.BufferMode = LVM_UNMANAGED_BUFFERS;
+ InstParams.MaxBlockSize = MAX_CALL_SIZE;
+ InstParams.EQNB_NumBands = MAX_NUM_BANDS;
+ InstParams.PSA_Included = LVM_PSA_ON;
- /* Allocate memory, forcing alignment */
- LvmStatus = LVM_GetMemoryTable(LVM_NULL,
- &MemTab,
- &InstParams);
-
- LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmBundle_init")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-
- ALOGV("\tCreateInstance Succesfully called LVM_GetMemoryTable\n");
-
- /* Allocate memory */
- for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
- if (MemTab.Region[i].Size != 0){
- MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
-
- if (MemTab.Region[i].pBaseAddress == LVM_NULL){
- ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %" PRIu32
- " bytes for region %u\n", MemTab.Region[i].Size, i );
- bMallocFailure = LVM_TRUE;
- }else{
- ALOGV("\tLvmBundle_init CreateInstance allocated %" PRIu32
- " bytes for region %u at %p\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
- }
- }
- }
-
- /* If one or more of the memory regions failed to allocate, free the regions that were
- * succesfully allocated and return with an error
- */
- if(bMallocFailure == LVM_TRUE){
- for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
- if (MemTab.Region[i].pBaseAddress == LVM_NULL){
- ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %" PRIu32
- " bytes for region %u Not freeing\n", MemTab.Region[i].Size, i );
- }else{
- ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed: but allocated %" PRIu32
- " bytes for region %u at %p- free\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
- free(MemTab.Region[i].pBaseAddress);
- }
- }
- return -EINVAL;
- }
- ALOGV("\tLvmBundle_init CreateInstance Succesfully malloc'd memory\n");
-
- /* Initialise */
- pContext->pBundledContext->hInstance = LVM_NULL;
-
- /* Init sets the instance handle */
- LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance,
- &MemTab,
- &InstParams);
+ LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance, &InstParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "LvmBundle_init")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- ALOGV("\tLvmBundle_init CreateInstance Succesfully called LVM_GetInstanceHandle\n");
+ ALOGV("\tLvmBundle_init CreateInstance Successfully called LVM_GetInstanceHandle\n");
/* Set the initial process parameters */
/* General parameters */
- params.OperatingMode = LVM_MODE_ON;
- params.SampleRate = LVM_FS_44100;
- params.SourceFormat = LVM_STEREO;
- params.SpeakerType = LVM_HEADPHONES;
+ params.OperatingMode = LVM_MODE_ON;
+ params.SampleRate = LVM_FS_44100;
+ params.SourceFormat = LVM_STEREO;
+ params.SpeakerType = LVM_HEADPHONES;
pContext->pBundledContext->SampleRate = LVM_FS_44100;
-#ifdef SUPPORT_MC
pContext->pBundledContext->ChMask = AUDIO_CHANNEL_OUT_STEREO;
-#endif
/* Concert Sound parameters */
- params.VirtualizerOperatingMode = LVM_MODE_OFF;
- params.VirtualizerType = LVM_CONCERTSOUND;
- params.VirtualizerReverbLevel = 100;
- params.CS_EffectLevel = LVM_CS_EFFECT_NONE;
+ params.VirtualizerOperatingMode = LVM_MODE_OFF;
+ params.VirtualizerType = LVM_CONCERTSOUND;
+ params.VirtualizerReverbLevel = 100;
+ params.CS_EffectLevel = LVM_CS_EFFECT_NONE;
/* N-Band Equaliser parameters */
- params.EQNB_OperatingMode = LVM_EQNB_OFF;
- params.EQNB_NBands = FIVEBAND_NUMBANDS;
- params.pEQNB_BandDefinition = &BandDefs[0];
+ params.EQNB_OperatingMode = LVM_EQNB_OFF;
+ params.EQNB_NBands = FIVEBAND_NUMBANDS;
+ params.pEQNB_BandDefinition = &BandDefs[0];
- for (int i=0; i<FIVEBAND_NUMBANDS; i++)
- {
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
- BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
- BandDefs[i].Gain = EQNB_5BandSoftPresets[i];
+ BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
+ BandDefs[i].Gain = EQNB_5BandSoftPresets[i];
}
/* Volume Control parameters */
- params.VC_EffectLevel = 0;
- params.VC_Balance = 0;
+ params.VC_EffectLevel = 0;
+ params.VC_Balance = 0;
/* Treble Enhancement parameters */
- params.TE_OperatingMode = LVM_TE_OFF;
- params.TE_EffectLevel = 0;
+ params.TE_OperatingMode = LVM_TE_OFF;
+ params.TE_EffectLevel = 0;
/* PSA Control parameters */
- params.PSA_Enable = LVM_PSA_OFF;
- params.PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
+ params.PSA_Enable = LVM_PSA_OFF;
+ params.PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
/* Bass Enhancement parameters */
- params.BE_OperatingMode = LVM_BE_OFF;
- params.BE_EffectLevel = 0;
- params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
- params.BE_HPF = LVM_BE_HPF_ON;
+ params.BE_OperatingMode = LVM_BE_OFF;
+ params.BE_EffectLevel = 0;
+ params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+ params.BE_HPF = LVM_BE_HPF_ON;
/* PSA Control parameters */
- params.PSA_Enable = LVM_PSA_OFF;
- params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
+ params.PSA_Enable = LVM_PSA_OFF;
+ params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
/* TE Control parameters */
- params.TE_OperatingMode = LVM_TE_OFF;
- params.TE_EffectLevel = 0;
+ params.TE_OperatingMode = LVM_TE_OFF;
+ params.TE_EffectLevel = 0;
-#ifdef SUPPORT_MC
- params.NrChannels =
- audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
- params.ChMask = AUDIO_CHANNEL_OUT_STEREO;
-#endif
+ params.NrChannels = audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
+ params.ChMask = AUDIO_CHANNEL_OUT_STEREO;
/* Activate the initial settings */
- LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance,
- ¶ms);
+ LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, ¶ms);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- ALOGV("\tLvmBundle_init CreateInstance Succesfully called LVM_SetControlParameters\n");
+ ALOGV("\tLvmBundle_init CreateInstance Successfully called LVM_SetControlParameters\n");
/* Set the headroom parameters */
- HeadroomBandDef[0].Limit_Low = 20;
- HeadroomBandDef[0].Limit_High = 4999;
- HeadroomBandDef[0].Headroom_Offset = 0;
- HeadroomBandDef[1].Limit_Low = 5000;
- HeadroomBandDef[1].Limit_High = 24000;
- HeadroomBandDef[1].Headroom_Offset = 0;
- HeadroomParams.pHeadroomDefinition = &HeadroomBandDef[0];
+ HeadroomBandDef[0].Limit_Low = 20;
+ HeadroomBandDef[0].Limit_High = 4999;
+ HeadroomBandDef[0].Headroom_Offset = 0;
+ HeadroomBandDef[1].Limit_Low = 5000;
+ HeadroomBandDef[1].Limit_High = 24000;
+ HeadroomBandDef[1].Headroom_Offset = 0;
+ HeadroomParams.pHeadroomDefinition = &HeadroomBandDef[0];
HeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
- HeadroomParams.NHeadroomBands = 2;
+ HeadroomParams.NHeadroomBands = 2;
- LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance,
- &HeadroomParams);
+ LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance, &HeadroomParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetHeadroomParams", "LvmBundle_init")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- ALOGV("\tLvmBundle_init CreateInstance Succesfully called LVM_SetHeadroomParams\n");
+ ALOGV("\tLvmBundle_init CreateInstance Successfully called LVM_SetHeadroomParams\n");
ALOGV("\tLvmBundle_init End");
return 0;
-} /* end LvmBundle_init */
+} /* end LvmBundle_init */
//----------------------------------------------------------------------------
// LvmBundle_process()
@@ -719,25 +628,22 @@
// pOut: pointer to updated stereo 16 bit output data
//
//----------------------------------------------------------------------------
-int LvmBundle_process(effect_buffer_t *pIn,
- effect_buffer_t *pOut,
- int frameCount,
- EffectContext *pContext){
-
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
- effect_buffer_t *pOutTmp;
+int LvmBundle_process(effect_buffer_t* pIn, effect_buffer_t* pOut, int frameCount,
+ EffectContext* pContext) {
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+ effect_buffer_t* pOutTmp;
const LVM_INT32 NrChannels =
- audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
+ audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
- if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE){
+ if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE) {
pOutTmp = pOut;
- } else if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
+ } else if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
if (pContext->pBundledContext->frameCount != frameCount) {
if (pContext->pBundledContext->workBuffer != NULL) {
free(pContext->pBundledContext->workBuffer);
}
pContext->pBundledContext->workBuffer =
- (effect_buffer_t *)calloc(frameCount, sizeof(effect_buffer_t) * NrChannels);
+ (effect_buffer_t*)calloc(frameCount, sizeof(effect_buffer_t) * NrChannels);
if (pContext->pBundledContext->workBuffer == NULL) {
return -ENOMEM;
}
@@ -749,7 +655,6 @@
return -EINVAL;
}
-
/* Process the samples */
LvmStatus = LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
pIn, /* Input buffer */
@@ -757,16 +662,15 @@
(LVM_UINT16)frameCount, /* Number of samples to read */
0); /* Audio Time */
LVM_ERROR_CHECK(LvmStatus, "LVM_Process", "LvmBundle_process")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
- if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
+ if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
for (int i = 0; i < frameCount * NrChannels; i++) {
pOut[i] = pOut[i] + pOutTmp[i];
}
}
return 0;
-} /* end LvmBundle_process */
+} /* end LvmBundle_process */
//----------------------------------------------------------------------------
// EqualizerUpdateActiveParams()
@@ -779,29 +683,28 @@
// Outputs:
//
//----------------------------------------------------------------------------
-void EqualizerUpdateActiveParams(EffectContext *pContext) {
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
+void EqualizerUpdateActiveParams(EffectContext* pContext) {
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "EqualizerUpdateActiveParams")
- //ALOGV("\tEqualizerUpdateActiveParams Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tEqualizerUpdateActiveParams just Got -> %d\n",
+ // ALOGV("\tEqualizerUpdateActiveParams Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tEqualizerUpdateActiveParams just Got -> %d\n",
// ActiveParams.pEQNB_BandDefinition[band].Gain);
for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i];
- ActiveParams.pEQNB_BandDefinition[i].QFactor = EQNB_5BandPresetsQFactors[i];
- ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i];
- }
+ ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+ ActiveParams.pEQNB_BandDefinition[i].QFactor = EQNB_5BandPresetsQFactors[i];
+ ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i];
+ }
/* Activate the initial settings */
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "EqualizerUpdateActiveParams")
- //ALOGV("\tEqualizerUpdateActiveParams just Set -> %d\n",
+ // ALOGV("\tEqualizerUpdateActiveParams just Set -> %d\n",
// ActiveParams.pEQNB_BandDefinition[band].Gain);
-
}
//----------------------------------------------------------------------------
@@ -816,19 +719,19 @@
// Outputs:
//
//----------------------------------------------------------------------------
-void LvmEffect_limitLevel(EffectContext *pContext) {
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
+void LvmEffect_limitLevel(EffectContext* pContext) {
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_limitLevel")
- //ALOGV("\tLvmEffect_limitLevel Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tLvmEffect_limitLevel just Got -> %d\n",
+ // ALOGV("\tLvmEffect_limitLevel Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tLvmEffect_limitLevel just Got -> %d\n",
// ActiveParams.pEQNB_BandDefinition[band].Gain);
int gainCorrection = 0;
- //Count the energy contribution per band for EQ and BassBoost only if they are active.
+ // Count the energy contribution per band for EQ and BassBoost only if they are active.
float energyContribution = 0;
float energyCross = 0;
float energyBassBoost = 0;
@@ -838,88 +741,83 @@
bool bbEnabled = pContext->pBundledContext->bBassEnabled == LVM_TRUE;
bool viEnabled = pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE;
- //EQ contribution
+ // EQ contribution
if (eqEnabled) {
for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0;
+ float bandFactor = pContext->pBundledContext->bandGaindB[i] / 15.0;
float bandCoefficient = LimitLevel_bandEnergyCoefficient[i];
float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
- if (bandEnergy > 0)
- energyContribution += bandEnergy;
+ if (bandEnergy > 0) energyContribution += bandEnergy;
}
- //cross EQ coefficients
+ // cross EQ coefficients
float bandFactorSum = 0;
- for (int i = 0; i < FIVEBAND_NUMBANDS-1; i++) {
- float bandFactor1 = pContext->pBundledContext->bandGaindB[i]/15.0;
- float bandFactor2 = pContext->pBundledContext->bandGaindB[i+1]/15.0;
+ for (int i = 0; i < FIVEBAND_NUMBANDS - 1; i++) {
+ float bandFactor1 = pContext->pBundledContext->bandGaindB[i] / 15.0;
+ float bandFactor2 = pContext->pBundledContext->bandGaindB[i + 1] / 15.0;
if (bandFactor1 > 0 && bandFactor2 > 0) {
- float crossEnergy = bandFactor1 * bandFactor2 *
- LimitLevel_bandEnergyCrossCoefficient[i];
+ float crossEnergy =
+ bandFactor1 * bandFactor2 * LimitLevel_bandEnergyCrossCoefficient[i];
bandFactorSum += bandFactor1 * bandFactor2;
- if (crossEnergy > 0)
- energyCross += crossEnergy;
+ if (crossEnergy > 0) energyCross += crossEnergy;
}
}
bandFactorSum -= 1.0;
- if (bandFactorSum > 0)
- crossCorrection = bandFactorSum * 0.7;
+ if (bandFactorSum > 0) crossCorrection = bandFactorSum * 0.7;
}
- //BassBoost contribution
+ // BassBoost contribution
if (bbEnabled) {
- float boostFactor = (pContext->pBundledContext->BassStrengthSaved)/1000.0;
+ float boostFactor = (pContext->pBundledContext->BassStrengthSaved) / 1000.0;
float boostCoefficient = LimitLevel_bassBoostEnergyCoefficient;
energyContribution += boostFactor * boostCoefficient * boostCoefficient;
if (eqEnabled) {
for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0;
+ float bandFactor = pContext->pBundledContext->bandGaindB[i] / 15.0;
float bandCrossCoefficient = LimitLevel_bassBoostEnergyCrossCoefficient[i];
- float bandEnergy = boostFactor * bandFactor *
- bandCrossCoefficient;
- if (bandEnergy > 0)
- energyBassBoost += bandEnergy;
+ float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
+ if (bandEnergy > 0) energyBassBoost += bandEnergy;
}
}
}
- //Virtualizer contribution
+ // Virtualizer contribution
if (viEnabled) {
- energyContribution += LimitLevel_virtualizerContribution *
- LimitLevel_virtualizerContribution;
+ energyContribution +=
+ LimitLevel_virtualizerContribution * LimitLevel_virtualizerContribution;
}
- double totalEnergyEstimation = sqrt(energyContribution + energyCross + energyBassBoost) -
- crossCorrection;
+ double totalEnergyEstimation =
+ sqrt(energyContribution + energyCross + energyBassBoost) - crossCorrection;
ALOGV(" TOTAL energy estimation: %0.2f dB", totalEnergyEstimation);
- //roundoff
+ // roundoff
int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
if (maxLevelRound + pContext->pBundledContext->volume > 0) {
gainCorrection = maxLevelRound + pContext->pBundledContext->volume;
}
- ActiveParams.VC_EffectLevel = pContext->pBundledContext->volume - gainCorrection;
+ ActiveParams.VC_EffectLevel = pContext->pBundledContext->volume - gainCorrection;
if (ActiveParams.VC_EffectLevel < -96) {
ActiveParams.VC_EffectLevel = -96;
}
ALOGV("\tVol:%d, GainCorrection: %d, Actual vol: %d", pContext->pBundledContext->volume,
- gainCorrection, ActiveParams.VC_EffectLevel);
+ gainCorrection, ActiveParams.VC_EffectLevel);
/* Activate the initial settings */
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_limitLevel")
ALOGV("LVM_SetControlParameters return:%d", (int)LvmStatus);
- //ALOGV("\tLvmEffect_limitLevel just Set -> %d\n",
+ // ALOGV("\tLvmEffect_limitLevel just Set -> %d\n",
// ActiveParams.pEQNB_BandDefinition[band].Gain);
- //ALOGV("\tLvmEffect_limitLevel just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel );
- if (pContext->pBundledContext->firstVolume == LVM_TRUE){
+ // ALOGV("\tLvmEffect_limitLevel just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel );
+ if (pContext->pBundledContext->firstVolume == LVM_TRUE) {
LvmStatus = LVM_SetVolumeNoSmoothing(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetVolumeNoSmoothing", "LvmBundle_process")
ALOGV("\tLVM_VOLUME: Disabling Smoothing for first volume change to remove spikes/clicks");
@@ -939,42 +837,41 @@
//
//----------------------------------------------------------------------------
-int LvmEffect_enable(EffectContext *pContext){
- //ALOGV("\tLvmEffect_enable start");
+int LvmEffect_enable(EffectContext* pContext) {
+ // ALOGV("\tLvmEffect_enable start");
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
/* Get the current settings */
- LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
- &ActiveParams);
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_enable")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tLvmEffect_enable Succesfully called LVM_GetControlParameters\n");
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+ // ALOGV("\tLvmEffect_enable Successfully called LVM_GetControlParameters\n");
- if(pContext->EffectType == LVM_BASS_BOOST) {
+ if (pContext->EffectType == LVM_BASS_BOOST) {
ALOGV("\tLvmEffect_enable : Enabling LVM_BASS_BOOST");
- ActiveParams.BE_OperatingMode = LVM_BE_ON;
+ ActiveParams.BE_OperatingMode = LVM_BE_ON;
}
- if(pContext->EffectType == LVM_VIRTUALIZER) {
+ if (pContext->EffectType == LVM_VIRTUALIZER) {
ALOGV("\tLvmEffect_enable : Enabling LVM_VIRTUALIZER");
- ActiveParams.VirtualizerOperatingMode = LVM_MODE_ON;
+ ActiveParams.VirtualizerOperatingMode = LVM_MODE_ON;
}
- if(pContext->EffectType == LVM_EQUALIZER) {
+ if (pContext->EffectType == LVM_EQUALIZER) {
ALOGV("\tLvmEffect_enable : Enabling LVM_EQUALIZER");
- ActiveParams.EQNB_OperatingMode = LVM_EQNB_ON;
+ ActiveParams.EQNB_OperatingMode = LVM_EQNB_ON;
}
- if(pContext->EffectType == LVM_VOLUME) {
+ if (pContext->EffectType == LVM_VOLUME) {
ALOGV("\tLvmEffect_enable : Enabling LVM_VOLUME");
}
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_enable")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tLvmEffect_enable Succesfully called LVM_SetControlParameters\n");
- //ALOGV("\tLvmEffect_enable end");
+ // ALOGV("\tLvmEffect_enable Successfully called LVM_SetControlParameters\n");
+ // ALOGV("\tLvmEffect_enable end");
LvmEffect_limitLevel(pContext);
return 0;
}
@@ -991,82 +888,45 @@
//
//----------------------------------------------------------------------------
-int LvmEffect_disable(EffectContext *pContext){
- //ALOGV("\tLvmEffect_disable start");
+int LvmEffect_disable(EffectContext* pContext) {
+ // ALOGV("\tLvmEffect_disable start");
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
/* Get the current settings */
- LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
- &ActiveParams);
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_disable")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tLvmEffect_disable Succesfully called LVM_GetControlParameters\n");
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+ // ALOGV("\tLvmEffect_disable Successfully called LVM_GetControlParameters\n");
- if(pContext->EffectType == LVM_BASS_BOOST) {
+ if (pContext->EffectType == LVM_BASS_BOOST) {
ALOGV("\tLvmEffect_disable : Disabling LVM_BASS_BOOST");
- ActiveParams.BE_OperatingMode = LVM_BE_OFF;
+ ActiveParams.BE_OperatingMode = LVM_BE_OFF;
}
- if(pContext->EffectType == LVM_VIRTUALIZER) {
+ if (pContext->EffectType == LVM_VIRTUALIZER) {
ALOGV("\tLvmEffect_disable : Disabling LVM_VIRTUALIZER");
- ActiveParams.VirtualizerOperatingMode = LVM_MODE_OFF;
+ ActiveParams.VirtualizerOperatingMode = LVM_MODE_OFF;
}
- if(pContext->EffectType == LVM_EQUALIZER) {
+ if (pContext->EffectType == LVM_EQUALIZER) {
ALOGV("\tLvmEffect_disable : Disabling LVM_EQUALIZER");
- ActiveParams.EQNB_OperatingMode = LVM_EQNB_OFF;
+ ActiveParams.EQNB_OperatingMode = LVM_EQNB_OFF;
}
- if(pContext->EffectType == LVM_VOLUME) {
+ if (pContext->EffectType == LVM_VOLUME) {
ALOGV("\tLvmEffect_disable : Disabling LVM_VOLUME");
}
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_disable")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tLvmEffect_disable Succesfully called LVM_SetControlParameters\n");
- //ALOGV("\tLvmEffect_disable end");
+ // ALOGV("\tLvmEffect_disable Successfully called LVM_SetControlParameters\n");
+ // ALOGV("\tLvmEffect_disable end");
LvmEffect_limitLevel(pContext);
return 0;
}
//----------------------------------------------------------------------------
-// LvmEffect_free()
-//----------------------------------------------------------------------------
-// Purpose: Free all memory associated with the Bundle.
-//
-// Inputs:
-// pContext: effect engine context
-//
-// Outputs:
-//
-//----------------------------------------------------------------------------
-
-void LvmEffect_free(EffectContext *pContext){
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
- LVM_MemTab_t MemTab;
-
- /* Free the algorithm memory */
- LvmStatus = LVM_GetMemoryTable(pContext->pBundledContext->hInstance,
- &MemTab,
- LVM_NULL);
-
- LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmEffect_free")
-
- for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
- if (MemTab.Region[i].Size != 0){
- if (MemTab.Region[i].pBaseAddress != NULL){
- free(MemTab.Region[i].pBaseAddress);
- }else{
- ALOGV("\tLVM_ERROR : LvmEffect_free - trying to free with NULL pointer %" PRIu32
- " bytes for region %u at %p ERROR\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
- }
- }
- }
-} /* end LvmEffect_free */
-
-//----------------------------------------------------------------------------
// Effect_setConfig()
//----------------------------------------------------------------------------
// Purpose: Set input and output audio configuration.
@@ -1080,9 +940,9 @@
//
//----------------------------------------------------------------------------
-int Effect_setConfig(EffectContext *pContext, effect_config_t *pConfig){
- LVM_Fs_en SampleRate;
- //ALOGV("\tEffect_setConfig start");
+int Effect_setConfig(EffectContext* pContext, effect_config_t* pConfig) {
+ LVM_Fs_en SampleRate;
+ // ALOGV("\tEffect_setConfig start");
CHECK_ARG(pContext != NULL);
CHECK_ARG(pConfig != NULL);
@@ -1090,107 +950,103 @@
CHECK_ARG(pConfig->inputCfg.samplingRate == pConfig->outputCfg.samplingRate);
CHECK_ARG(pConfig->inputCfg.channels == pConfig->outputCfg.channels);
CHECK_ARG(pConfig->inputCfg.format == pConfig->outputCfg.format);
-#ifdef SUPPORT_MC
CHECK_ARG(audio_channel_count_from_out_mask(pConfig->inputCfg.channels) <= LVM_MAX_CHANNELS);
-#else
- CHECK_ARG(pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO);
-#endif
- CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE
- || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
+ CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE ||
+ pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
pContext->config = *pConfig;
const LVM_INT16 NrChannels = audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
switch (pConfig->inputCfg.samplingRate) {
- case 8000:
- SampleRate = LVM_FS_8000;
- pContext->pBundledContext->SamplesPerSecond = 8000 * NrChannels;
- break;
- case 16000:
- SampleRate = LVM_FS_16000;
- pContext->pBundledContext->SamplesPerSecond = 16000 * NrChannels;
- break;
- case 22050:
- SampleRate = LVM_FS_22050;
- pContext->pBundledContext->SamplesPerSecond = 22050 * NrChannels;
- break;
- case 32000:
- SampleRate = LVM_FS_32000;
- pContext->pBundledContext->SamplesPerSecond = 32000 * NrChannels;
- break;
- case 44100:
- SampleRate = LVM_FS_44100;
- pContext->pBundledContext->SamplesPerSecond = 44100 * NrChannels;
- break;
- case 48000:
- SampleRate = LVM_FS_48000;
- pContext->pBundledContext->SamplesPerSecond = 48000 * NrChannels;
- break;
- case 88200:
- SampleRate = LVM_FS_88200;
- pContext->pBundledContext->SamplesPerSecond = 88200 * NrChannels;
- break;
- case 96000:
- SampleRate = LVM_FS_96000;
- pContext->pBundledContext->SamplesPerSecond = 96000 * NrChannels;
- break;
- case 176400:
- SampleRate = LVM_FS_176400;
- pContext->pBundledContext->SamplesPerSecond = 176400 * NrChannels;
- break;
- case 192000:
- SampleRate = LVM_FS_192000;
- pContext->pBundledContext->SamplesPerSecond = 192000 * NrChannels;
- break;
- default:
- ALOGV("\tEffect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
- return -EINVAL;
+ case 8000:
+ SampleRate = LVM_FS_8000;
+ pContext->pBundledContext->SamplesPerSecond = 8000 * NrChannels;
+ break;
+ case 16000:
+ SampleRate = LVM_FS_16000;
+ pContext->pBundledContext->SamplesPerSecond = 16000 * NrChannels;
+ break;
+ case 22050:
+ SampleRate = LVM_FS_22050;
+ pContext->pBundledContext->SamplesPerSecond = 22050 * NrChannels;
+ break;
+ case 32000:
+ SampleRate = LVM_FS_32000;
+ pContext->pBundledContext->SamplesPerSecond = 32000 * NrChannels;
+ break;
+ case 44100:
+ SampleRate = LVM_FS_44100;
+ pContext->pBundledContext->SamplesPerSecond = 44100 * NrChannels;
+ break;
+ case 48000:
+ SampleRate = LVM_FS_48000;
+ pContext->pBundledContext->SamplesPerSecond = 48000 * NrChannels;
+ break;
+ case 88200:
+ SampleRate = LVM_FS_88200;
+ pContext->pBundledContext->SamplesPerSecond = 88200 * NrChannels;
+ break;
+ case 96000:
+ SampleRate = LVM_FS_96000;
+ pContext->pBundledContext->SamplesPerSecond = 96000 * NrChannels;
+ break;
+ case 176400:
+ SampleRate = LVM_FS_176400;
+ pContext->pBundledContext->SamplesPerSecond = 176400 * NrChannels;
+ break;
+ case 192000:
+ SampleRate = LVM_FS_192000;
+ pContext->pBundledContext->SamplesPerSecond = 192000 * NrChannels;
+ break;
+ default:
+ ALOGV("\tEffect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
+ return -EINVAL;
}
-#ifdef SUPPORT_MC
if (pContext->pBundledContext->SampleRate != SampleRate ||
pContext->pBundledContext->ChMask != pConfig->inputCfg.channels) {
-#else
- if(pContext->pBundledContext->SampleRate != SampleRate){
-#endif
-
- LVM_ControlParams_t ActiveParams;
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS;
+ LVM_ControlParams_t ActiveParams;
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS;
ALOGV("\tEffect_setConfig change sampling rate to %d", SampleRate);
/* Get the current settings */
- LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
- &ActiveParams);
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "Effect_setConfig")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
ActiveParams.SampleRate = SampleRate;
-#ifdef SUPPORT_MC
ActiveParams.NrChannels = NrChannels;
ActiveParams.ChMask = pConfig->inputCfg.channels;
-#endif
+
+ if (NrChannels == 1) {
+ ActiveParams.SourceFormat = LVM_MONO;
+ } else if (NrChannels == 2) {
+ ActiveParams.SourceFormat = LVM_STEREO;
+ } else if (NrChannels > 2 && NrChannels <= LVM_MAX_CHANNELS) {
+ ActiveParams.SourceFormat = LVM_MULTICHANNEL;
+ } else {
+ return -EINVAL;
+ }
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "Effect_setConfig")
- ALOGV("\tEffect_setConfig Succesfully called LVM_SetControlParameters\n");
+ ALOGV("\tEffect_setConfig Successfully called LVM_SetControlParameters\n");
pContext->pBundledContext->SampleRate = SampleRate;
-#ifdef SUPPORT_MC
pContext->pBundledContext->ChMask = pConfig->inputCfg.channels;
-#endif
LvmEffect_limitLevel(pContext);
- }else{
- //ALOGV("\tEffect_setConfig keep sampling rate at %d", SampleRate);
+ } else {
+ // ALOGV("\tEffect_setConfig keep sampling rate at %d", SampleRate);
}
- //ALOGV("\tEffect_setConfig End....");
+ // ALOGV("\tEffect_setConfig End....");
return 0;
-} /* end Effect_setConfig */
+} /* end Effect_setConfig */
//----------------------------------------------------------------------------
// Effect_getConfig()
@@ -1206,10 +1062,9 @@
//
//----------------------------------------------------------------------------
-void Effect_getConfig(EffectContext *pContext, effect_config_t *pConfig)
-{
+void Effect_getConfig(EffectContext* pContext, effect_config_t* pConfig) {
*pConfig = pContext->config;
-} /* end Effect_getConfig */
+} /* end Effect_getConfig */
//----------------------------------------------------------------------------
// BassGetStrength()
@@ -1225,32 +1080,31 @@
//
//----------------------------------------------------------------------------
-uint32_t BassGetStrength(EffectContext *pContext){
- //ALOGV("\tBassGetStrength() (0-1000) -> %d\n", pContext->pBundledContext->BassStrengthSaved);
+uint32_t BassGetStrength(EffectContext* pContext) {
+ // ALOGV("\tBassGetStrength() (0-1000) -> %d\n", pContext->pBundledContext->BassStrengthSaved);
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
/* Get the current settings */
- LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
- &ActiveParams);
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "BassGetStrength")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tBassGetStrength Succesfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tBassGetStrength Successfully returned from LVM_GetControlParameters\n");
/* Check that the strength returned matches the strength that was set earlier */
- if(ActiveParams.BE_EffectLevel !=
- (LVM_INT16)((15*pContext->pBundledContext->BassStrengthSaved)/1000)){
+ if (ActiveParams.BE_EffectLevel !=
+ (LVM_INT16)((15 * pContext->pBundledContext->BassStrengthSaved) / 1000)) {
ALOGV("\tLVM_ERROR : BassGetStrength module strength does not match savedStrength %d %d\n",
- ActiveParams.BE_EffectLevel, pContext->pBundledContext->BassStrengthSaved);
+ ActiveParams.BE_EffectLevel, pContext->pBundledContext->BassStrengthSaved);
return -EINVAL;
}
- //ALOGV("\tBassGetStrength() (0-15) -> %d\n", ActiveParams.BE_EffectLevel );
- //ALOGV("\tBassGetStrength() (saved) -> %d\n", pContext->pBundledContext->BassStrengthSaved );
+ // ALOGV("\tBassGetStrength() (0-15) -> %d\n", ActiveParams.BE_EffectLevel );
+ // ALOGV("\tBassGetStrength() (saved) -> %d\n", pContext->pBundledContext->BassStrengthSaved );
return pContext->pBundledContext->BassStrengthSaved;
-} /* end BassGetStrength */
+} /* end BassGetStrength */
//----------------------------------------------------------------------------
// BassSetStrength()
@@ -1264,35 +1118,34 @@
//
//----------------------------------------------------------------------------
-void BassSetStrength(EffectContext *pContext, uint32_t strength){
- //ALOGV("\tBassSetStrength(%d)", strength);
+void BassSetStrength(EffectContext* pContext, uint32_t strength) {
+ // ALOGV("\tBassSetStrength(%d)", strength);
pContext->pBundledContext->BassStrengthSaved = (int)strength;
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
/* Get the current settings */
- LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
- &ActiveParams);
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "BassSetStrength")
- //ALOGV("\tBassSetStrength Succesfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tBassSetStrength Successfully returned from LVM_GetControlParameters\n");
/* Bass Enhancement parameters */
- ActiveParams.BE_EffectLevel = (LVM_INT16)((15*strength)/1000);
- ActiveParams.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+ ActiveParams.BE_EffectLevel = (LVM_INT16)((15 * strength) / 1000);
+ ActiveParams.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
- //ALOGV("\tBassSetStrength() (0-15) -> %d\n", ActiveParams.BE_EffectLevel );
+ // ALOGV("\tBassSetStrength() (0-15) -> %d\n", ActiveParams.BE_EffectLevel );
/* Activate the initial settings */
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "BassSetStrength")
- //ALOGV("\tBassSetStrength Succesfully called LVM_SetControlParameters\n");
+ // ALOGV("\tBassSetStrength Successfully called LVM_SetControlParameters\n");
LvmEffect_limitLevel(pContext);
-} /* end BassSetStrength */
+} /* end BassSetStrength */
//----------------------------------------------------------------------------
// VirtualizerGetStrength()
@@ -1308,21 +1161,23 @@
//
//----------------------------------------------------------------------------
-uint32_t VirtualizerGetStrength(EffectContext *pContext){
- //ALOGV("\tVirtualizerGetStrength (0-1000) -> %d\n",pContext->pBundledContext->VirtStrengthSaved);
+uint32_t VirtualizerGetStrength(EffectContext* pContext) {
+ // ALOGV("\tVirtualizerGetStrength (0-1000) ->
+ // %d\n",pContext->pBundledContext->VirtStrengthSaved);
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VirtualizerGetStrength")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tVirtualizerGetStrength Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tVirtualizerGetStrength() (0-100) -> %d\n", ActiveParams.VirtualizerReverbLevel*10);
+ // ALOGV("\tVirtualizerGetStrength Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tVirtualizerGetStrength() (0-100) -> %d\n",
+ // ActiveParams.VirtualizerReverbLevel*10);
return pContext->pBundledContext->VirtStrengthSaved;
-} /* end getStrength */
+} /* end getStrength */
//----------------------------------------------------------------------------
// VirtualizerSetStrength()
@@ -1336,31 +1191,31 @@
//
//----------------------------------------------------------------------------
-void VirtualizerSetStrength(EffectContext *pContext, uint32_t strength){
- //ALOGV("\tVirtualizerSetStrength(%d)", strength);
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
+void VirtualizerSetStrength(EffectContext* pContext, uint32_t strength) {
+ // ALOGV("\tVirtualizerSetStrength(%d)", strength);
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
pContext->pBundledContext->VirtStrengthSaved = (int)strength;
/* Get the current settings */
- LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,&ActiveParams);
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VirtualizerSetStrength")
- //ALOGV("\tVirtualizerSetStrength Succesfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tVirtualizerSetStrength Successfully returned from LVM_GetControlParameters\n");
/* Virtualizer parameters */
- ActiveParams.CS_EffectLevel = (int)((strength*32767)/1000);
+ ActiveParams.CS_EffectLevel = (int)((strength * 32767) / 1000);
- ALOGV("\tVirtualizerSetStrength() (0-1000) -> %d\n", strength );
- ALOGV("\tVirtualizerSetStrength() (0- 100) -> %d\n", ActiveParams.CS_EffectLevel );
+ ALOGV("\tVirtualizerSetStrength() (0-1000) -> %d\n", strength);
+ ALOGV("\tVirtualizerSetStrength() (0- 100) -> %d\n", ActiveParams.CS_EffectLevel);
/* Activate the initial settings */
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VirtualizerSetStrength")
- //ALOGV("\tVirtualizerSetStrength Succesfully called LVM_SetControlParameters\n\n");
+ // ALOGV("\tVirtualizerSetStrength Successfully called LVM_SetControlParameters\n\n");
LvmEffect_limitLevel(pContext);
-} /* end setStrength */
+} /* end setStrength */
//----------------------------------------------------------------------------
// VirtualizerIsDeviceSupported()
@@ -1376,14 +1231,14 @@
//----------------------------------------------------------------------------
int VirtualizerIsDeviceSupported(audio_devices_t deviceType) {
switch (deviceType) {
- case AUDIO_DEVICE_OUT_WIRED_HEADSET:
- case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
- case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
- case AUDIO_DEVICE_OUT_USB_HEADSET:
- // case AUDIO_DEVICE_OUT_USB_DEVICE: // For USB testing of the virtualizer only.
- return 0;
- default :
- return -EINVAL;
+ case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+ case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+ case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+ case AUDIO_DEVICE_OUT_USB_HEADSET:
+ // case AUDIO_DEVICE_OUT_USB_DEVICE: // For USB testing of the virtualizer only.
+ return 0;
+ default:
+ return -EINVAL;
}
}
@@ -1401,9 +1256,9 @@
// 0 if the configuration is supported
//----------------------------------------------------------------------------
int VirtualizerIsConfigurationSupported(audio_channel_mask_t channelMask,
- audio_devices_t deviceType) {
+ audio_devices_t deviceType) {
uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
- if (channelCount < 1 || channelCount > FCC_2) { // TODO: update to 8 channels when supported.
+ if (channelCount < 1 || channelCount > FCC_2) { // TODO: update to 8 channels when supported.
return -EINVAL;
}
return VirtualizerIsDeviceSupported(deviceType);
@@ -1423,16 +1278,16 @@
// 0 if the device is supported and the virtualization mode forced
//
//----------------------------------------------------------------------------
-int VirtualizerForceVirtualizationMode(EffectContext *pContext, audio_devices_t forcedDevice) {
+int VirtualizerForceVirtualizationMode(EffectContext* pContext, audio_devices_t forcedDevice) {
ALOGV("VirtualizerForceVirtualizationMode: forcedDev=0x%x enabled=%d tmpDisabled=%d",
- forcedDevice, pContext->pBundledContext->bVirtualizerEnabled,
- pContext->pBundledContext->bVirtualizerTempDisabled);
+ forcedDevice, pContext->pBundledContext->bVirtualizerEnabled,
+ pContext->pBundledContext->bVirtualizerTempDisabled);
int status = 0;
bool useVirtualizer = false;
if (VirtualizerIsDeviceSupported(forcedDevice) != 0) {
if (forcedDevice != AUDIO_DEVICE_NONE) {
- //forced device is not supported, make it behave as a reset of forced mode
+ // forced device is not supported, make it behave as a reset of forced mode
forcedDevice = AUDIO_DEVICE_NONE;
// but return an error
status = -EINVAL;
@@ -1472,8 +1327,8 @@
}
ALOGV("\tafter VirtualizerForceVirtualizationMode: enabled=%d tmpDisabled=%d",
- pContext->pBundledContext->bVirtualizerEnabled,
- pContext->pBundledContext->bVirtualizerTempDisabled);
+ pContext->pBundledContext->bVirtualizerEnabled,
+ pContext->pBundledContext->bVirtualizerTempDisabled);
return status;
}
@@ -1499,23 +1354,23 @@
//
//----------------------------------------------------------------------------
void VirtualizerGetSpeakerAngles(audio_channel_mask_t channelMask,
- audio_devices_t deviceType __unused, int32_t *pSpeakerAngles) {
+ audio_devices_t deviceType __unused, int32_t* pSpeakerAngles) {
// the channel count is guaranteed to be 1 or 2
// the device is guaranteed to be of type headphone
// this virtualizer is always using 2 virtual speakers at -90 and 90deg of azimuth, 0deg of
// elevation but the return information is sized for nbChannels * 3, so we have to consider
// the (false here) case of a single channel, and return only 3 fields.
if (audio_channel_count_from_out_mask(channelMask) == 1) {
- *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_MONO; // same as FRONT_LEFT
- *pSpeakerAngles++ = 0; // azimuth
- *pSpeakerAngles = 0; // elevation
+ *pSpeakerAngles++ = (int32_t)AUDIO_CHANNEL_OUT_MONO; // same as FRONT_LEFT
+ *pSpeakerAngles++ = 0; // azimuth
+ *pSpeakerAngles = 0; // elevation
} else {
- *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_FRONT_LEFT;
- *pSpeakerAngles++ = -90; // azimuth
- *pSpeakerAngles++ = 0; // elevation
- *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_FRONT_RIGHT;
+ *pSpeakerAngles++ = (int32_t)AUDIO_CHANNEL_OUT_FRONT_LEFT;
+ *pSpeakerAngles++ = -90; // azimuth
+ *pSpeakerAngles++ = 0; // elevation
+ *pSpeakerAngles++ = (int32_t)AUDIO_CHANNEL_OUT_FRONT_RIGHT;
*pSpeakerAngles++ = 90; // azimuth
- *pSpeakerAngles = 0; // elevation
+ *pSpeakerAngles = 0; // elevation
}
}
@@ -1529,10 +1384,10 @@
// AUDIO_DEVICE_NONE if the effect is not virtualizing
// or the device type if the effect is virtualizing
//----------------------------------------------------------------------------
-audio_devices_t VirtualizerGetVirtualizationMode(EffectContext *pContext) {
+audio_devices_t VirtualizerGetVirtualizationMode(EffectContext* pContext) {
audio_devices_t virtDevice = AUDIO_DEVICE_NONE;
- if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE)
- && (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_FALSE)) {
+ if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) &&
+ (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_FALSE)) {
if (pContext->pBundledContext->nVirtualizerForcedDevice != AUDIO_DEVICE_NONE) {
// virtualization mode is forced, return that device
virtDevice = pContext->pBundledContext->nVirtualizerForcedDevice;
@@ -1557,8 +1412,8 @@
// Outputs:
//
//----------------------------------------------------------------------------
-int32_t EqualizerGetBandLevel(EffectContext *pContext, int32_t band){
- //ALOGV("\tEqualizerGetBandLevel -> %d\n", pContext->pBundledContext->bandGaindB[band] );
+int32_t EqualizerGetBandLevel(EffectContext* pContext, int32_t band) {
+ // ALOGV("\tEqualizerGetBandLevel -> %d\n", pContext->pBundledContext->bandGaindB[band] );
return pContext->pBundledContext->bandGaindB[band] * 100;
}
@@ -1576,14 +1431,14 @@
// Outputs:
//
//---------------------------------------------------------------------------
-void EqualizerSetBandLevel(EffectContext *pContext, int band, short Gain){
+void EqualizerSetBandLevel(EffectContext* pContext, int band, short Gain) {
int gainRounded;
- if(Gain > 0){
- gainRounded = (int)((Gain+50)/100);
- }else{
- gainRounded = (int)((Gain-50)/100);
+ if (Gain > 0) {
+ gainRounded = (int)((Gain + 50) / 100);
+ } else {
+ gainRounded = (int)((Gain - 50) / 100);
}
- //ALOGV("\tEqualizerSetBandLevel(%d)->(%d)", Gain, gainRounded);
+ // ALOGV("\tEqualizerSetBandLevel(%d)->(%d)", Gain, gainRounded);
pContext->pBundledContext->bandGaindB[band] = gainRounded;
pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
@@ -1603,23 +1458,22 @@
// Outputs:
//
//----------------------------------------------------------------------------
-int32_t EqualizerGetCentreFrequency(EffectContext *pContext, int32_t band){
- int32_t Frequency =0;
+int32_t EqualizerGetCentreFrequency(EffectContext* pContext, int32_t band) {
+ int32_t Frequency = 0;
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
- LVM_EQNB_BandDef_t *BandDef;
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+ LVM_EQNB_BandDef_t* BandDef;
/* Get the current settings */
- LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
- &ActiveParams);
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "EqualizerGetCentreFrequency")
- BandDef = ActiveParams.pEQNB_BandDefinition;
- Frequency = (int32_t)BandDef[band].Frequency*1000; // Convert to millibels
+ BandDef = ActiveParams.pEQNB_BandDefinition;
+ Frequency = (int32_t)BandDef[band].Frequency * 1000; // Convert to millibels
- //ALOGV("\tEqualizerGetCentreFrequency -> %d\n", Frequency );
- //ALOGV("\tEqualizerGetCentreFrequency Succesfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tEqualizerGetCentreFrequency -> %d\n", Frequency );
+ // ALOGV("\tEqualizerGetCentreFrequency Successfully returned from LVM_GetControlParameters\n");
return Frequency;
}
@@ -1641,10 +1495,10 @@
// pLow: lower band range
// pLow: upper band range
//----------------------------------------------------------------------------
-int32_t EqualizerGetBandFreqRange(EffectContext *pContext __unused, int32_t band, uint32_t *pLow,
- uint32_t *pHi){
+int32_t EqualizerGetBandFreqRange(EffectContext* pContext __unused, int32_t band, uint32_t* pLow,
+ uint32_t* pHi) {
*pLow = bandFreqRange[band][0];
- *pHi = bandFreqRange[band][1];
+ *pHi = bandFreqRange[band][1];
return 0;
}
@@ -1665,16 +1519,16 @@
// pLow: lower band range
// pLow: upper band range
//----------------------------------------------------------------------------
-int32_t EqualizerGetBand(EffectContext *pContext __unused, uint32_t targetFreq){
+int32_t EqualizerGetBand(EffectContext* pContext __unused, uint32_t targetFreq) {
int band = 0;
- if(targetFreq < bandFreqRange[0][0]){
+ if (targetFreq < bandFreqRange[0][0]) {
return -EINVAL;
- }else if(targetFreq == bandFreqRange[0][0]){
+ } else if (targetFreq == bandFreqRange[0][0]) {
return 0;
}
- for(int i=0; i<FIVEBAND_NUMBANDS;i++){
- if((targetFreq > bandFreqRange[i][0])&&(targetFreq <= bandFreqRange[i][1])){
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ if ((targetFreq > bandFreqRange[i][0]) && (targetFreq <= bandFreqRange[i][1])) {
band = i;
}
}
@@ -1694,7 +1548,7 @@
// pContext: effect engine context
//
//----------------------------------------------------------------------------
-int32_t EqualizerGetPreset(EffectContext *pContext){
+int32_t EqualizerGetPreset(EffectContext* pContext) {
return pContext->pBundledContext->CurPreset;
}
@@ -1711,14 +1565,12 @@
// preset The preset ID.
//
//----------------------------------------------------------------------------
-void EqualizerSetPreset(EffectContext *pContext, int preset){
-
- //ALOGV("\tEqualizerSetPreset(%d)", preset);
+void EqualizerSetPreset(EffectContext* pContext, int preset) {
+ // ALOGV("\tEqualizerSetPreset(%d)", preset);
pContext->pBundledContext->CurPreset = preset;
- //ActiveParams.pEQNB_BandDefinition = &BandDefs[0];
- for (int i=0; i<FIVEBAND_NUMBANDS; i++)
- {
+ // ActiveParams.pEQNB_BandDefinition = &BandDefs[0];
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
pContext->pBundledContext->bandGaindB[i] =
EQNB_5BandSoftPresets[i + preset * FIVEBAND_NUMBANDS];
}
@@ -1726,11 +1578,11 @@
EqualizerUpdateActiveParams(pContext);
LvmEffect_limitLevel(pContext);
- //ALOGV("\tEqualizerSetPreset Succesfully called LVM_SetControlParameters\n");
+ // ALOGV("\tEqualizerSetPreset Successfully called LVM_SetControlParameters\n");
return;
}
-int32_t EqualizerGetNumPresets(){
+int32_t EqualizerGetNumPresets() {
return sizeof(gEqualizerPresets) / sizeof(PresetConfig);
}
@@ -1745,14 +1597,14 @@
// preset The preset ID. Must be less than number of presets.
//
//-------------------------------------------------------------------------
-const char * EqualizerGetPresetName(int32_t preset){
- //ALOGV("\tEqualizerGetPresetName start(%d)", preset);
+const char* EqualizerGetPresetName(int32_t preset) {
+ // ALOGV("\tEqualizerGetPresetName start(%d)", preset);
if (preset == PRESET_CUSTOM) {
return "Custom";
} else {
return gEqualizerPresets[preset].name;
}
- //ALOGV("\tEqualizerGetPresetName end(%d)", preset);
+ // ALOGV("\tEqualizerGetPresetName end(%d)", preset);
return 0;
}
@@ -1767,8 +1619,7 @@
//
//----------------------------------------------------------------------------
-int VolumeSetVolumeLevel(EffectContext *pContext, int16_t level){
-
+int VolumeSetVolumeLevel(EffectContext* pContext, int16_t level) {
if (level > 0 || level < -9600) {
return -EINVAL;
}
@@ -1782,7 +1633,7 @@
LvmEffect_limitLevel(pContext);
return 0;
-} /* end VolumeSetVolumeLevel */
+} /* end VolumeSetVolumeLevel */
//----------------------------------------------------------------------------
// VolumeGetVolumeLevel()
@@ -1794,15 +1645,14 @@
//
//----------------------------------------------------------------------------
-int VolumeGetVolumeLevel(EffectContext *pContext, int16_t *level){
-
+int VolumeGetVolumeLevel(EffectContext* pContext, int16_t* level) {
if (pContext->pBundledContext->bMuteEnabled == LVM_TRUE) {
*level = pContext->pBundledContext->levelSaved * 100;
} else {
*level = pContext->pBundledContext->volume * 100;
}
return 0;
-} /* end VolumeGetVolumeLevel */
+} /* end VolumeGetVolumeLevel */
//----------------------------------------------------------------------------
// VolumeSetMute()
@@ -1815,23 +1665,23 @@
//
//----------------------------------------------------------------------------
-int32_t VolumeSetMute(EffectContext *pContext, uint32_t mute){
- //ALOGV("\tVolumeSetMute start(%d)", mute);
+int32_t VolumeSetMute(EffectContext* pContext, uint32_t mute) {
+ // ALOGV("\tVolumeSetMute start(%d)", mute);
pContext->pBundledContext->bMuteEnabled = mute;
/* Set appropriate volume level */
- if(pContext->pBundledContext->bMuteEnabled == LVM_TRUE){
+ if (pContext->pBundledContext->bMuteEnabled == LVM_TRUE) {
pContext->pBundledContext->levelSaved = pContext->pBundledContext->volume;
pContext->pBundledContext->volume = -96;
- }else{
+ } else {
pContext->pBundledContext->volume = pContext->pBundledContext->levelSaved;
}
LvmEffect_limitLevel(pContext);
return 0;
-} /* end setMute */
+} /* end setMute */
//----------------------------------------------------------------------------
// VolumeGetMute()
@@ -1845,26 +1695,25 @@
// mute: enable/disable flag
//----------------------------------------------------------------------------
-int32_t VolumeGetMute(EffectContext *pContext, uint32_t *mute){
- //ALOGV("\tVolumeGetMute start");
- if((pContext->pBundledContext->bMuteEnabled == LVM_FALSE)||
- (pContext->pBundledContext->bMuteEnabled == LVM_TRUE)){
+int32_t VolumeGetMute(EffectContext* pContext, uint32_t* mute) {
+ // ALOGV("\tVolumeGetMute start");
+ if ((pContext->pBundledContext->bMuteEnabled == LVM_FALSE) ||
+ (pContext->pBundledContext->bMuteEnabled == LVM_TRUE)) {
*mute = pContext->pBundledContext->bMuteEnabled;
return 0;
- }else{
+ } else {
ALOGV("\tLVM_ERROR : VolumeGetMute read an invalid value from context %d",
pContext->pBundledContext->bMuteEnabled);
return -EINVAL;
}
- //ALOGV("\tVolumeGetMute end");
-} /* end getMute */
+ // ALOGV("\tVolumeGetMute end");
+} /* end getMute */
-int16_t VolumeConvertStereoPosition(int16_t position){
+int16_t VolumeConvertStereoPosition(int16_t position) {
int16_t convertedPosition = 0;
- convertedPosition = (int16_t)(((float)position/1000)*96);
+ convertedPosition = (int16_t)(((float)position / 1000) * 96);
return convertedPosition;
-
}
//----------------------------------------------------------------------------
@@ -1879,55 +1728,55 @@
// Outputs:
//----------------------------------------------------------------------------
-int VolumeSetStereoPosition(EffectContext *pContext, int16_t position){
-
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
- LVM_INT16 Balance = 0;
+int VolumeSetStereoPosition(EffectContext* pContext, int16_t position) {
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+ LVM_INT16 Balance = 0;
pContext->pBundledContext->positionSaved = position;
Balance = VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
- //ALOGV("\tVolumeSetStereoPosition start pContext->pBundledContext->positionSaved = %d",
- //pContext->pBundledContext->positionSaved);
+ // ALOGV("\tVolumeSetStereoPosition start pContext->pBundledContext->positionSaved = %d",
+ // pContext->pBundledContext->positionSaved);
- if(pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE){
-
- //ALOGV("\tVolumeSetStereoPosition Position to be set is %d %d\n", position, Balance);
+ if (pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE) {
+ // ALOGV("\tVolumeSetStereoPosition Position to be set is %d %d\n", position, Balance);
pContext->pBundledContext->positionSaved = position;
/* Get the current settings */
LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeSetStereoPosition")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tVolumeSetStereoPosition Succesfully returned from LVM_GetControlParameters got:"
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+ // ALOGV("\tVolumeSetStereoPosition Successfully returned from LVM_GetControlParameters
+ // got:"
// " %d\n", ActiveParams.VC_Balance);
/* Volume parameters */
- ActiveParams.VC_Balance = Balance;
- //ALOGV("\tVolumeSetStereoPosition() (-96dB -> +96dB) -> %d\n", ActiveParams.VC_Balance );
+ ActiveParams.VC_Balance = Balance;
+ // ALOGV("\tVolumeSetStereoPosition() (-96dB -> +96dB) -> %d\n", ActiveParams.VC_Balance
+ // );
/* Activate the initial settings */
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VolumeSetStereoPosition")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tVolumeSetStereoPosition Succesfully called LVM_SetControlParameters\n");
+ // ALOGV("\tVolumeSetStereoPosition Successfully called LVM_SetControlParameters\n");
/* Get the current settings */
LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeSetStereoPosition")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tVolumeSetStereoPosition Succesfully returned from LVM_GetControlParameters got: "
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+ // ALOGV("\tVolumeSetStereoPosition Successfully returned from LVM_GetControlParameters got:
+ // "
// "%d\n", ActiveParams.VC_Balance);
+ } else {
+ // ALOGV("\tVolumeSetStereoPosition Position attempting to set, but not enabled %d %d\n",
+ // position, Balance);
}
- else{
- //ALOGV("\tVolumeSetStereoPosition Position attempting to set, but not enabled %d %d\n",
- //position, Balance);
- }
- //ALOGV("\tVolumeSetStereoPosition end pContext->pBundledContext->positionSaved = %d\n",
- //pContext->pBundledContext->positionSaved);
+ // ALOGV("\tVolumeSetStereoPosition end pContext->pBundledContext->positionSaved = %d\n",
+ // pContext->pBundledContext->positionSaved);
return 0;
-} /* end VolumeSetStereoPosition */
+} /* end VolumeSetStereoPosition */
//----------------------------------------------------------------------------
// VolumeGetStereoPosition()
@@ -1941,35 +1790,35 @@
// position: stereo position
//----------------------------------------------------------------------------
-int32_t VolumeGetStereoPosition(EffectContext *pContext, int16_t *position){
- //ALOGV("\tVolumeGetStereoPosition start");
+int32_t VolumeGetStereoPosition(EffectContext* pContext, int16_t* position) {
+ // ALOGV("\tVolumeGetStereoPosition start");
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
- LVM_INT16 balance;
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+ LVM_INT16 balance;
- //ALOGV("\tVolumeGetStereoPosition start pContext->pBundledContext->positionSaved = %d",
- //pContext->pBundledContext->positionSaved);
+ // ALOGV("\tVolumeGetStereoPosition start pContext->pBundledContext->positionSaved = %d",
+ // pContext->pBundledContext->positionSaved);
LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeGetStereoPosition")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tVolumeGetStereoPosition -> %d\n", ActiveParams.VC_Balance);
- //ALOGV("\tVolumeGetStereoPosition Succesfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tVolumeGetStereoPosition -> %d\n", ActiveParams.VC_Balance);
+ // ALOGV("\tVolumeGetStereoPosition Successfully returned from LVM_GetControlParameters\n");
balance = VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
- if(pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE){
- if(balance != ActiveParams.VC_Balance){
+ if (pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE) {
+ if (balance != ActiveParams.VC_Balance) {
return -EINVAL;
}
}
- *position = (LVM_INT16)pContext->pBundledContext->positionSaved; // Convert dB to millibels
- //ALOGV("\tVolumeGetStereoPosition end returning pContext->pBundledContext->positionSaved =%d\n",
- //pContext->pBundledContext->positionSaved);
+ *position = (LVM_INT16)pContext->pBundledContext->positionSaved; // Convert dB to millibels
+ // ALOGV("\tVolumeGetStereoPosition end returning pContext->pBundledContext->positionSaved
+ // =%d\n", pContext->pBundledContext->positionSaved);
return 0;
-} /* end VolumeGetStereoPosition */
+} /* end VolumeGetStereoPosition */
//----------------------------------------------------------------------------
// VolumeEnableStereoPosition()
@@ -1982,40 +1831,40 @@
//
//----------------------------------------------------------------------------
-int32_t VolumeEnableStereoPosition(EffectContext *pContext, uint32_t enabled){
- //ALOGV("\tVolumeEnableStereoPosition start()");
+int32_t VolumeEnableStereoPosition(EffectContext* pContext, uint32_t enabled) {
+ // ALOGV("\tVolumeEnableStereoPosition start()");
pContext->pBundledContext->bStereoPositionEnabled = enabled;
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeEnableStereoPosition")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tVolumeEnableStereoPosition Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tVolumeEnableStereoPosition to %d, position was %d\n",
+ // ALOGV("\tVolumeEnableStereoPosition Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tVolumeEnableStereoPosition to %d, position was %d\n",
// enabled, ActiveParams.VC_Balance );
/* Set appropriate stereo position */
- if(pContext->pBundledContext->bStereoPositionEnabled == LVM_FALSE){
+ if (pContext->pBundledContext->bStereoPositionEnabled == LVM_FALSE) {
ActiveParams.VC_Balance = 0;
- }else{
- ActiveParams.VC_Balance =
- VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
+ } else {
+ ActiveParams.VC_Balance =
+ VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
}
/* Activate the initial settings */
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VolumeEnableStereoPosition")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
- //ALOGV("\tVolumeEnableStereoPosition Succesfully called LVM_SetControlParameters\n");
- //ALOGV("\tVolumeEnableStereoPosition end()\n");
+ // ALOGV("\tVolumeEnableStereoPosition Successfully called LVM_SetControlParameters\n");
+ // ALOGV("\tVolumeEnableStereoPosition end()\n");
return 0;
-} /* end VolumeEnableStereoPosition */
+} /* end VolumeEnableStereoPosition */
//----------------------------------------------------------------------------
// BassBoost_getParameter()
@@ -2038,13 +1887,10 @@
//
//----------------------------------------------------------------------------
-int BassBoost_getParameter(EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue) {
+int BassBoost_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t* pValueSize, void* pValue) {
int status = 0;
- int32_t *params = (int32_t *)pParam;
+ int32_t* params = (int32_t*)pParam;
ALOGVV("%s start", __func__);
@@ -2055,28 +1901,27 @@
switch (params[0]) {
case BASSBOOST_PARAM_STRENGTH_SUPPORTED:
if (*pValueSize != sizeof(uint32_t)) { // legacy: check equality here.
- ALOGV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u",
- __func__, *pValueSize);
+ ALOGV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u", __func__,
+ *pValueSize);
status = -EINVAL;
break;
}
// no need to set *pValueSize
- *(uint32_t *)pValue = 1;
- ALOGVV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED %u", __func__, *(uint32_t *)pValue);
+ *(uint32_t*)pValue = 1;
+ ALOGVV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED %u", __func__, *(uint32_t*)pValue);
break;
case BASSBOOST_PARAM_STRENGTH:
if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
- ALOGV("%s BASSBOOST_PARAM_STRENGTH invalid *pValueSize %u",
- __func__, *pValueSize);
+ ALOGV("%s BASSBOOST_PARAM_STRENGTH invalid *pValueSize %u", __func__, *pValueSize);
status = -EINVAL;
break;
}
// no need to set *pValueSize
- *(int16_t *)pValue = BassGetStrength(pContext);
- ALOGVV("%s BASSBOOST_PARAM_STRENGTH %d", __func__, *(int16_t *)pValue);
+ *(int16_t*)pValue = BassGetStrength(pContext);
+ ALOGVV("%s BASSBOOST_PARAM_STRENGTH %d", __func__, *(int16_t*)pValue);
break;
default:
@@ -2104,13 +1949,10 @@
//
//----------------------------------------------------------------------------
-int BassBoost_setParameter(EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t valueSize,
- void *pValue) {
+int BassBoost_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t valueSize, void* pValue) {
int status = 0;
- int32_t *params = (int32_t *)pParam;
+ int32_t* params = (int32_t*)pParam;
ALOGVV("%s start", __func__);
@@ -2126,7 +1968,7 @@
break;
}
- const int16_t strength = *(int16_t *)pValue;
+ const int16_t strength = *(int16_t*)pValue;
ALOGVV("%s BASSBOOST_PARAM_STRENGTH %d", __func__, strength);
ALOGVV("%s BASSBOOST_PARAM_STRENGTH Calling BassSetStrength", __func__);
BassSetStrength(pContext, (int32_t)strength);
@@ -2164,13 +2006,10 @@
//
//----------------------------------------------------------------------------
-int Virtualizer_getParameter(EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue) {
+int Virtualizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t* pValueSize, void* pValue) {
int status = 0;
- int32_t *params = (int32_t *)pParam;
+ int32_t* params = (int32_t*)pParam;
ALOGVV("%s start", __func__);
@@ -2180,47 +2019,47 @@
}
switch (params[0]) {
case VIRTUALIZER_PARAM_STRENGTH_SUPPORTED:
- if (*pValueSize != sizeof(uint32_t)) { // legacy: check equality here.
- ALOGV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u",
- __func__, *pValueSize);
+ if (*pValueSize != sizeof(uint32_t)) { // legacy: check equality here.
+ ALOGV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u", __func__,
+ *pValueSize);
status = -EINVAL;
break;
}
// no need to set *pValueSize
- *(uint32_t *)pValue = 1;
- ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED %d", __func__, *(uint32_t *)pValue);
+ *(uint32_t*)pValue = 1;
+ ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED %d", __func__, *(uint32_t*)pValue);
break;
case VIRTUALIZER_PARAM_STRENGTH:
- if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
- ALOGV("%s VIRTUALIZER_PARAM_STRENGTH invalid *pValueSize %u",
- __func__, *pValueSize);
+ if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
+ ALOGV("%s VIRTUALIZER_PARAM_STRENGTH invalid *pValueSize %u", __func__,
+ *pValueSize);
status = -EINVAL;
break;
}
// no need to set *pValueSize
- *(int16_t *)pValue = VirtualizerGetStrength(pContext);
+ *(int16_t*)pValue = VirtualizerGetStrength(pContext);
- ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH %d", __func__, *(int16_t *)pValue);
+ ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH %d", __func__, *(int16_t*)pValue);
break;
case VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES: {
if (paramSize < 3 * sizeof(int32_t)) {
- ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid paramSize: %u",
- __func__, paramSize);
+ ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid paramSize: %u", __func__,
+ paramSize);
status = -EINVAL;
break;
}
- const audio_channel_mask_t channelMask = (audio_channel_mask_t) params[1];
- const audio_devices_t deviceType = (audio_devices_t) params[2];
+ const audio_channel_mask_t channelMask = (audio_channel_mask_t)params[1];
+ const audio_devices_t deviceType = (audio_devices_t)params[2];
const uint32_t nbChannels = audio_channel_count_from_out_mask(channelMask);
const uint32_t valueSizeRequired = 3 * nbChannels * sizeof(int32_t);
if (*pValueSize < valueSizeRequired) {
- ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid *pValueSize %u",
- __func__, *pValueSize);
+ ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid *pValueSize %u", __func__,
+ *pValueSize);
status = -EINVAL;
break;
}
@@ -2229,23 +2068,23 @@
// verify the configuration is supported
status = VirtualizerIsConfigurationSupported(channelMask, deviceType);
if (status == 0) {
- ALOGV("%s VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES mask=0x%x device=0x%x",
- __func__, channelMask, deviceType);
+ ALOGV("%s VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES mask=0x%x device=0x%x", __func__,
+ channelMask, deviceType);
// configuration is supported, get the angles
- VirtualizerGetSpeakerAngles(channelMask, deviceType, (int32_t *)pValue);
+ VirtualizerGetSpeakerAngles(channelMask, deviceType, (int32_t*)pValue);
}
} break;
case VIRTUALIZER_PARAM_VIRTUALIZATION_MODE:
- if (*pValueSize != sizeof(uint32_t)) { // legacy: check equality here.
- ALOGV("%s VIRTUALIZER_PARAM_VIRTUALIZATION_MODE invalid *pValueSize %u",
- __func__, *pValueSize);
+ if (*pValueSize != sizeof(uint32_t)) { // legacy: check equality here.
+ ALOGV("%s VIRTUALIZER_PARAM_VIRTUALIZATION_MODE invalid *pValueSize %u", __func__,
+ *pValueSize);
status = -EINVAL;
break;
}
// no need to set *pValueSize
- *(uint32_t *)pValue = (uint32_t) VirtualizerGetVirtualizationMode(pContext);
+ *(uint32_t*)pValue = (uint32_t)VirtualizerGetVirtualizationMode(pContext);
break;
default:
@@ -2273,17 +2112,14 @@
//
//----------------------------------------------------------------------------
-int Virtualizer_setParameter(EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t valueSize,
- void *pValue) {
+int Virtualizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t valueSize, void* pValue) {
int status = 0;
- int32_t *params = (int32_t *)pParam;
+ int32_t* params = (int32_t*)pParam;
ALOGVV("%s start", __func__);
- if (paramSize != sizeof(int32_t)) { // legacy: check equality here.
+ if (paramSize != sizeof(int32_t)) { // legacy: check equality here.
ALOGV("%s invalid paramSize: %u", __func__, paramSize);
return -EINVAL;
}
@@ -2295,7 +2131,7 @@
break;
}
- const int16_t strength = *(int16_t *)pValue;
+ const int16_t strength = *(int16_t*)pValue;
ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH %d", __func__, strength);
ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH Calling VirtualizerSetStrength", __func__);
VirtualizerSetStrength(pContext, (int32_t)strength);
@@ -2305,16 +2141,16 @@
case VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE: {
if (valueSize < sizeof(int32_t)) {
ALOGV("%s VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE invalid valueSize: %u",
- __func__, valueSize);
+ __func__, valueSize);
android_errorWriteLog(0x534e4554, "64478003");
status = -EINVAL;
break;
}
- const audio_devices_t deviceType = (audio_devices_t)*(int32_t *)pValue;
+ const audio_devices_t deviceType = (audio_devices_t) * (int32_t*)pValue;
status = VirtualizerForceVirtualizationMode(pContext, deviceType);
- ALOGVV("%s VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE device=%#x result=%d",
- __func__, deviceType, status);
+ ALOGVV("%s VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE device=%#x result=%d", __func__,
+ deviceType, status);
} break;
default:
@@ -2347,13 +2183,10 @@
// Side Effects:
//
//----------------------------------------------------------------------------
-int Equalizer_getParameter(EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue) {
+int Equalizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t* pValueSize, void* pValue) {
int status = 0;
- int32_t *params = (int32_t *)pParam;
+ int32_t* params = (int32_t*)pParam;
ALOGVV("%s start", __func__);
@@ -2362,211 +2195,210 @@
return -EINVAL;
}
switch (params[0]) {
- case EQ_PARAM_NUM_BANDS:
- if (*pValueSize < sizeof(uint16_t)) {
- ALOGV("%s EQ_PARAM_NUM_BANDS invalid *pValueSize %u", __func__, *pValueSize);
- status = -EINVAL;
- break;
- }
- *pValueSize = sizeof(uint16_t);
-
- *(uint16_t *)pValue = (uint16_t)FIVEBAND_NUMBANDS;
- ALOGVV("%s EQ_PARAM_NUM_BANDS %u", __func__, *(uint16_t *)pValue);
- break;
-
- case EQ_PARAM_CUR_PRESET:
- if (*pValueSize < sizeof(uint16_t)) {
- ALOGV("%s EQ_PARAM_CUR_PRESET invalid *pValueSize %u", __func__, *pValueSize);
- status = -EINVAL;
- break;
- }
- *pValueSize = sizeof(uint16_t);
-
- *(uint16_t *)pValue = (uint16_t)EqualizerGetPreset(pContext);
- ALOGVV("%s EQ_PARAM_CUR_PRESET %u", __func__, *(uint16_t *)pValue);
- break;
-
- case EQ_PARAM_GET_NUM_OF_PRESETS:
- if (*pValueSize < sizeof(uint16_t)) {
- ALOGV("%s EQ_PARAM_GET_NUM_OF_PRESETS invalid *pValueSize %u", __func__, *pValueSize);
- status = -EINVAL;
- break;
- }
- *pValueSize = sizeof(uint16_t);
-
- *(uint16_t *)pValue = (uint16_t)EqualizerGetNumPresets();
- ALOGVV("%s EQ_PARAM_GET_NUM_OF_PRESETS %u", __func__, *(uint16_t *)pValue);
- break;
-
- case EQ_PARAM_GET_BAND: {
- if (paramSize < 2 * sizeof(int32_t)) {
- ALOGV("%s EQ_PARAM_GET_BAND invalid paramSize: %u", __func__, paramSize);
- status = -EINVAL;
- break;
- }
- if (*pValueSize < sizeof(uint16_t)) {
- ALOGV("%s EQ_PARAM_GET_BAND invalid *pValueSize %u", __func__, *pValueSize);
- status = -EINVAL;
- break;
- }
- *pValueSize = sizeof(uint16_t);
-
- const int32_t frequency = params[1];
- *(uint16_t *)pValue = (uint16_t)EqualizerGetBand(pContext, frequency);
- ALOGVV("%s EQ_PARAM_GET_BAND frequency %d, band %u",
- __func__, frequency, *(uint16_t *)pValue);
- } break;
-
- case EQ_PARAM_BAND_LEVEL: {
- if (paramSize < 2 * sizeof(int32_t)) {
- ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize %u", __func__, paramSize);
- status = -EINVAL;
- break;
- }
- if (*pValueSize < sizeof(int16_t)) {
- ALOGV("%s EQ_PARAM_BAND_LEVEL invalid *pValueSize %u", __func__, *pValueSize);
- status = -EINVAL;
- break;
- }
- *pValueSize = sizeof(int16_t);
-
- const int32_t band = params[1];
- if (band < 0 || band >= FIVEBAND_NUMBANDS) {
- if (band < 0) {
- android_errorWriteLog(0x534e4554, "32438598");
- ALOGW("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
+ case EQ_PARAM_NUM_BANDS:
+ if (*pValueSize < sizeof(uint16_t)) {
+ ALOGV("%s EQ_PARAM_NUM_BANDS invalid *pValueSize %u", __func__, *pValueSize);
+ status = -EINVAL;
+ break;
}
- status = -EINVAL;
- break;
- }
- *(int16_t *)pValue = (int16_t)EqualizerGetBandLevel(pContext, band);
- ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d",
- __func__, band, *(int16_t *)pValue);
- } break;
+ *pValueSize = sizeof(uint16_t);
- case EQ_PARAM_LEVEL_RANGE:
- if (*pValueSize < 2 * sizeof(int16_t)) {
- ALOGV("%s EQ_PARAM_LEVEL_RANGE invalid *pValueSize %u", __func__, *pValueSize);
- status = -EINVAL;
+ *(uint16_t*)pValue = (uint16_t)FIVEBAND_NUMBANDS;
+ ALOGVV("%s EQ_PARAM_NUM_BANDS %u", __func__, *(uint16_t*)pValue);
break;
- }
- *pValueSize = 2 * sizeof(int16_t);
- *(int16_t *)pValue = -1500;
- *((int16_t *)pValue + 1) = 1500;
- ALOGVV("%s EQ_PARAM_LEVEL_RANGE min %d, max %d",
- __func__, *(int16_t *)pValue, *((int16_t *)pValue + 1));
- break;
-
- case EQ_PARAM_BAND_FREQ_RANGE: {
- if (paramSize < 2 * sizeof(int32_t)) {
- ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid paramSize: %u", __func__, paramSize);
- status = -EINVAL;
- break;
- }
- if (*pValueSize < 2 * sizeof(int32_t)) {
- ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid *pValueSize %u", __func__, *pValueSize);
- status = -EINVAL;
- break;
- }
- *pValueSize = 2 * sizeof(int32_t);
-
- const int32_t band = params[1];
- if (band < 0 || band >= FIVEBAND_NUMBANDS) {
- if (band < 0) {
- android_errorWriteLog(0x534e4554, "32247948");
- ALOGW("%s EQ_PARAM_BAND_FREQ_RANGE invalid band %d",
- __func__, band);
+ case EQ_PARAM_CUR_PRESET:
+ if (*pValueSize < sizeof(uint16_t)) {
+ ALOGV("%s EQ_PARAM_CUR_PRESET invalid *pValueSize %u", __func__, *pValueSize);
+ status = -EINVAL;
+ break;
}
- status = -EINVAL;
- break;
- }
- EqualizerGetBandFreqRange(pContext, band, (uint32_t *)pValue, ((uint32_t *)pValue + 1));
- ALOGVV("%s EQ_PARAM_BAND_FREQ_RANGE band %d, min %d, max %d",
- __func__, band, *(int32_t *)pValue, *((int32_t *)pValue + 1));
+ *pValueSize = sizeof(uint16_t);
- } break;
-
- case EQ_PARAM_CENTER_FREQ: {
- if (paramSize < 2 * sizeof(int32_t)) {
- ALOGV("%s EQ_PARAM_CENTER_FREQ invalid paramSize: %u", __func__, paramSize);
- status = -EINVAL;
+ *(uint16_t*)pValue = (uint16_t)EqualizerGetPreset(pContext);
+ ALOGVV("%s EQ_PARAM_CUR_PRESET %u", __func__, *(uint16_t*)pValue);
break;
- }
- if (*pValueSize < sizeof(int32_t)) {
- ALOGV("%s EQ_PARAM_CENTER_FREQ invalid *pValueSize %u", __func__, *pValueSize);
- status = -EINVAL;
- break;
- }
- *pValueSize = sizeof(int32_t);
- const int32_t band = params[1];
- if (band < 0 || band >= FIVEBAND_NUMBANDS) {
- status = -EINVAL;
- if (band < 0) {
- android_errorWriteLog(0x534e4554, "32436341");
- ALOGW("%s EQ_PARAM_CENTER_FREQ invalid band %d", __func__, band);
+ case EQ_PARAM_GET_NUM_OF_PRESETS:
+ if (*pValueSize < sizeof(uint16_t)) {
+ ALOGV("%s EQ_PARAM_GET_NUM_OF_PRESETS invalid *pValueSize %u", __func__,
+ *pValueSize);
+ status = -EINVAL;
+ break;
}
- break;
- }
- *(int32_t *)pValue = EqualizerGetCentreFrequency(pContext, band);
- ALOGVV("%s EQ_PARAM_CENTER_FREQ band %d, frequency %d",
- __func__, band, *(int32_t *)pValue);
- } break;
+ *pValueSize = sizeof(uint16_t);
- case EQ_PARAM_GET_PRESET_NAME: {
- if (paramSize < 2 * sizeof(int32_t)) {
- ALOGV("%s EQ_PARAM_PRESET_NAME invalid paramSize: %u", __func__, paramSize);
- status = -EINVAL;
+ *(uint16_t*)pValue = (uint16_t)EqualizerGetNumPresets();
+ ALOGVV("%s EQ_PARAM_GET_NUM_OF_PRESETS %u", __func__, *(uint16_t*)pValue);
break;
- }
- if (*pValueSize < 1) {
- android_errorWriteLog(0x534e4554, "37536407");
- status = -EINVAL;
- break;
- }
- const int32_t preset = params[1];
- if ((preset < 0 && preset != PRESET_CUSTOM) || preset >= EqualizerGetNumPresets()) {
- if (preset < 0) {
- android_errorWriteLog(0x534e4554, "32448258");
- ALOGE("%s EQ_PARAM_GET_PRESET_NAME preset %d", __func__, preset);
+ case EQ_PARAM_GET_BAND: {
+ if (paramSize < 2 * sizeof(int32_t)) {
+ ALOGV("%s EQ_PARAM_GET_BAND invalid paramSize: %u", __func__, paramSize);
+ status = -EINVAL;
+ break;
}
+ if (*pValueSize < sizeof(uint16_t)) {
+ ALOGV("%s EQ_PARAM_GET_BAND invalid *pValueSize %u", __func__, *pValueSize);
+ status = -EINVAL;
+ break;
+ }
+ *pValueSize = sizeof(uint16_t);
+
+ const int32_t frequency = params[1];
+ *(uint16_t*)pValue = (uint16_t)EqualizerGetBand(pContext, frequency);
+ ALOGVV("%s EQ_PARAM_GET_BAND frequency %d, band %u", __func__, frequency,
+ *(uint16_t*)pValue);
+ } break;
+
+ case EQ_PARAM_BAND_LEVEL: {
+ if (paramSize < 2 * sizeof(int32_t)) {
+ ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize %u", __func__, paramSize);
+ status = -EINVAL;
+ break;
+ }
+ if (*pValueSize < sizeof(int16_t)) {
+ ALOGV("%s EQ_PARAM_BAND_LEVEL invalid *pValueSize %u", __func__, *pValueSize);
+ status = -EINVAL;
+ break;
+ }
+ *pValueSize = sizeof(int16_t);
+
+ const int32_t band = params[1];
+ if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+ if (band < 0) {
+ android_errorWriteLog(0x534e4554, "32438598");
+ ALOGW("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
+ }
+ status = -EINVAL;
+ break;
+ }
+ *(int16_t*)pValue = (int16_t)EqualizerGetBandLevel(pContext, band);
+ ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d", __func__, band, *(int16_t*)pValue);
+ } break;
+
+ case EQ_PARAM_LEVEL_RANGE:
+ if (*pValueSize < 2 * sizeof(int16_t)) {
+ ALOGV("%s EQ_PARAM_LEVEL_RANGE invalid *pValueSize %u", __func__, *pValueSize);
+ status = -EINVAL;
+ break;
+ }
+ *pValueSize = 2 * sizeof(int16_t);
+
+ *(int16_t*)pValue = -1500;
+ *((int16_t*)pValue + 1) = 1500;
+ ALOGVV("%s EQ_PARAM_LEVEL_RANGE min %d, max %d", __func__, *(int16_t*)pValue,
+ *((int16_t*)pValue + 1));
+ break;
+
+ case EQ_PARAM_BAND_FREQ_RANGE: {
+ if (paramSize < 2 * sizeof(int32_t)) {
+ ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid paramSize: %u", __func__, paramSize);
+ status = -EINVAL;
+ break;
+ }
+ if (*pValueSize < 2 * sizeof(int32_t)) {
+ ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid *pValueSize %u", __func__, *pValueSize);
+ status = -EINVAL;
+ break;
+ }
+ *pValueSize = 2 * sizeof(int32_t);
+
+ const int32_t band = params[1];
+ if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+ if (band < 0) {
+ android_errorWriteLog(0x534e4554, "32247948");
+ ALOGW("%s EQ_PARAM_BAND_FREQ_RANGE invalid band %d", __func__, band);
+ }
+ status = -EINVAL;
+ break;
+ }
+ EqualizerGetBandFreqRange(pContext, band, (uint32_t*)pValue, ((uint32_t*)pValue + 1));
+ ALOGVV("%s EQ_PARAM_BAND_FREQ_RANGE band %d, min %d, max %d", __func__, band,
+ *(int32_t*)pValue, *((int32_t*)pValue + 1));
+
+ } break;
+
+ case EQ_PARAM_CENTER_FREQ: {
+ if (paramSize < 2 * sizeof(int32_t)) {
+ ALOGV("%s EQ_PARAM_CENTER_FREQ invalid paramSize: %u", __func__, paramSize);
+ status = -EINVAL;
+ break;
+ }
+ if (*pValueSize < sizeof(int32_t)) {
+ ALOGV("%s EQ_PARAM_CENTER_FREQ invalid *pValueSize %u", __func__, *pValueSize);
+ status = -EINVAL;
+ break;
+ }
+ *pValueSize = sizeof(int32_t);
+
+ const int32_t band = params[1];
+ if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+ status = -EINVAL;
+ if (band < 0) {
+ android_errorWriteLog(0x534e4554, "32436341");
+ ALOGW("%s EQ_PARAM_CENTER_FREQ invalid band %d", __func__, band);
+ }
+ break;
+ }
+ *(int32_t*)pValue = EqualizerGetCentreFrequency(pContext, band);
+ ALOGVV("%s EQ_PARAM_CENTER_FREQ band %d, frequency %d", __func__, band,
+ *(int32_t*)pValue);
+ } break;
+
+ case EQ_PARAM_GET_PRESET_NAME: {
+ if (paramSize < 2 * sizeof(int32_t)) {
+ ALOGV("%s EQ_PARAM_PRESET_NAME invalid paramSize: %u", __func__, paramSize);
+ status = -EINVAL;
+ break;
+ }
+ if (*pValueSize < 1) {
+ android_errorWriteLog(0x534e4554, "37536407");
+ status = -EINVAL;
+ break;
+ }
+
+ const int32_t preset = params[1];
+ if ((preset < 0 && preset != PRESET_CUSTOM) || preset >= EqualizerGetNumPresets()) {
+ if (preset < 0) {
+ android_errorWriteLog(0x534e4554, "32448258");
+ ALOGE("%s EQ_PARAM_GET_PRESET_NAME preset %d", __func__, preset);
+ }
+ status = -EINVAL;
+ break;
+ }
+
+ char* const name = (char*)pValue;
+ strncpy(name, EqualizerGetPresetName(preset), *pValueSize - 1);
+ name[*pValueSize - 1] = 0;
+ *pValueSize = strlen(name) + 1;
+ ALOGVV("%s EQ_PARAM_GET_PRESET_NAME preset %d, name %s len %d", __func__, preset, name,
+ *pValueSize);
+
+ } break;
+
+ case EQ_PARAM_PROPERTIES: {
+ constexpr uint32_t requiredValueSize = (2 + FIVEBAND_NUMBANDS) * sizeof(uint16_t);
+ if (*pValueSize < requiredValueSize) {
+ ALOGV("%s EQ_PARAM_PROPERTIES invalid *pValueSize %u", __func__, *pValueSize);
+ status = -EINVAL;
+ break;
+ }
+ *pValueSize = requiredValueSize;
+
+ int16_t* p = (int16_t*)pValue;
+ ALOGV("%s EQ_PARAM_PROPERTIES", __func__);
+ p[0] = (int16_t)EqualizerGetPreset(pContext);
+ p[1] = (int16_t)FIVEBAND_NUMBANDS;
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ p[2 + i] = (int16_t)EqualizerGetBandLevel(pContext, i);
+ }
+ } break;
+
+ default:
+ ALOGV("%s invalid param %d", __func__, params[0]);
status = -EINVAL;
break;
- }
-
- char * const name = (char *)pValue;
- strncpy(name, EqualizerGetPresetName(preset), *pValueSize - 1);
- name[*pValueSize - 1] = 0;
- *pValueSize = strlen(name) + 1;
- ALOGVV("%s EQ_PARAM_GET_PRESET_NAME preset %d, name %s len %d",
- __func__, preset, name, *pValueSize);
-
- } break;
-
- case EQ_PARAM_PROPERTIES: {
- constexpr uint32_t requiredValueSize = (2 + FIVEBAND_NUMBANDS) * sizeof(uint16_t);
- if (*pValueSize < requiredValueSize) {
- ALOGV("%s EQ_PARAM_PROPERTIES invalid *pValueSize %u", __func__, *pValueSize);
- status = -EINVAL;
- break;
- }
- *pValueSize = requiredValueSize;
-
- int16_t *p = (int16_t *)pValue;
- ALOGV("%s EQ_PARAM_PROPERTIES", __func__);
- p[0] = (int16_t)EqualizerGetPreset(pContext);
- p[1] = (int16_t)FIVEBAND_NUMBANDS;
- for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- p[2 + i] = (int16_t)EqualizerGetBandLevel(pContext, i);
- }
- } break;
-
- default:
- ALOGV("%s invalid param %d", __func__, params[0]);
- status = -EINVAL;
- break;
}
ALOGVV("%s end param: %d, status: %d", __func__, params[0], status);
@@ -2589,13 +2421,10 @@
// Outputs:
//
//----------------------------------------------------------------------------
-int Equalizer_setParameter(EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t valueSize,
- void *pValue) {
+int Equalizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t valueSize, void* pValue) {
int status = 0;
- int32_t *params = (int32_t *)pParam;
+ int32_t* params = (int32_t*)pParam;
ALOGVV("%s start", __func__);
@@ -2604,87 +2433,87 @@
return -EINVAL;
}
switch (params[0]) {
- case EQ_PARAM_CUR_PRESET: {
- if (valueSize < sizeof(int16_t)) {
- ALOGV("%s EQ_PARAM_CUR_PRESET invalid valueSize %u", __func__, valueSize);
- status = -EINVAL;
- break;
- }
- const int32_t preset = (int32_t)*(uint16_t *)pValue;
-
- ALOGVV("%s EQ_PARAM_CUR_PRESET %d", __func__, preset);
- if (preset >= EqualizerGetNumPresets() || preset < 0) {
- ALOGV("%s EQ_PARAM_CUR_PRESET invalid preset %d", __func__, preset);
- status = -EINVAL;
- break;
- }
- EqualizerSetPreset(pContext, preset);
- } break;
-
- case EQ_PARAM_BAND_LEVEL: {
- if (paramSize < 2 * sizeof(int32_t)) {
- ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize: %u", __func__, paramSize);
- status = -EINVAL;
- break;
- }
- if (valueSize < sizeof(int16_t)) {
- ALOGV("%s EQ_PARAM_BAND_LEVEL invalid valueSize %u", __func__, valueSize);
- status = -EINVAL;
- break;
- }
- const int32_t band = params[1];
- const int32_t level = (int32_t)*(int16_t *)pValue;
- ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d", __func__, band, level);
- if (band < 0 || band >= FIVEBAND_NUMBANDS) {
- if (band < 0) {
- android_errorWriteLog(0x534e4554, "32095626");
- ALOGE("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
- }
- status = -EINVAL;
- break;
- }
- EqualizerSetBandLevel(pContext, band, level);
- } break;
-
- case EQ_PARAM_PROPERTIES: {
- ALOGVV("%s EQ_PARAM_PROPERTIES", __func__);
- if (valueSize < sizeof(int16_t)) {
- ALOGV("%s EQ_PARAM_PROPERTIES invalid valueSize %u", __func__, valueSize);
- status = -EINVAL;
- break;
- }
- int16_t *p = (int16_t *)pValue;
- if ((int)p[0] >= EqualizerGetNumPresets()) {
- ALOGV("%s EQ_PARAM_PROPERTIES invalid preset %d", __func__, (int)p[0]);
- status = -EINVAL;
- break;
- }
- if (p[0] >= 0) {
- EqualizerSetPreset(pContext, (int)p[0]);
- } else {
- constexpr uint32_t valueSizeRequired = (2 + FIVEBAND_NUMBANDS) * sizeof(int16_t);
- if (valueSize < valueSizeRequired) {
- android_errorWriteLog(0x534e4554, "37563371");
- ALOGE("%s EQ_PARAM_PROPERTIES invalid valueSize %u < %u",
- __func__, valueSize, valueSizeRequired);
- status = -EINVAL;
- break;
- }
- if ((int)p[1] != FIVEBAND_NUMBANDS) {
- ALOGV("%s EQ_PARAM_PROPERTIES invalid bands %d", __func__, (int)p[1]);
+ case EQ_PARAM_CUR_PRESET: {
+ if (valueSize < sizeof(int16_t)) {
+ ALOGV("%s EQ_PARAM_CUR_PRESET invalid valueSize %u", __func__, valueSize);
status = -EINVAL;
break;
}
- for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- EqualizerSetBandLevel(pContext, i, (int)p[2 + i]);
- }
- }
- } break;
+ const int32_t preset = (int32_t) * (uint16_t*)pValue;
- default:
- ALOGV("%s invalid param %d", __func__, params[0]);
- status = -EINVAL;
- break;
+ ALOGVV("%s EQ_PARAM_CUR_PRESET %d", __func__, preset);
+ if (preset >= EqualizerGetNumPresets() || preset < 0) {
+ ALOGV("%s EQ_PARAM_CUR_PRESET invalid preset %d", __func__, preset);
+ status = -EINVAL;
+ break;
+ }
+ EqualizerSetPreset(pContext, preset);
+ } break;
+
+ case EQ_PARAM_BAND_LEVEL: {
+ if (paramSize < 2 * sizeof(int32_t)) {
+ ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize: %u", __func__, paramSize);
+ status = -EINVAL;
+ break;
+ }
+ if (valueSize < sizeof(int16_t)) {
+ ALOGV("%s EQ_PARAM_BAND_LEVEL invalid valueSize %u", __func__, valueSize);
+ status = -EINVAL;
+ break;
+ }
+ const int32_t band = params[1];
+ const int32_t level = (int32_t) * (int16_t*)pValue;
+ ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d", __func__, band, level);
+ if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+ if (band < 0) {
+ android_errorWriteLog(0x534e4554, "32095626");
+ ALOGE("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
+ }
+ status = -EINVAL;
+ break;
+ }
+ EqualizerSetBandLevel(pContext, band, level);
+ } break;
+
+ case EQ_PARAM_PROPERTIES: {
+ ALOGVV("%s EQ_PARAM_PROPERTIES", __func__);
+ if (valueSize < sizeof(int16_t)) {
+ ALOGV("%s EQ_PARAM_PROPERTIES invalid valueSize %u", __func__, valueSize);
+ status = -EINVAL;
+ break;
+ }
+ int16_t* p = (int16_t*)pValue;
+ if ((int)p[0] >= EqualizerGetNumPresets()) {
+ ALOGV("%s EQ_PARAM_PROPERTIES invalid preset %d", __func__, (int)p[0]);
+ status = -EINVAL;
+ break;
+ }
+ if (p[0] >= 0) {
+ EqualizerSetPreset(pContext, (int)p[0]);
+ } else {
+ constexpr uint32_t valueSizeRequired = (2 + FIVEBAND_NUMBANDS) * sizeof(int16_t);
+ if (valueSize < valueSizeRequired) {
+ android_errorWriteLog(0x534e4554, "37563371");
+ ALOGE("%s EQ_PARAM_PROPERTIES invalid valueSize %u < %u", __func__, valueSize,
+ valueSizeRequired);
+ status = -EINVAL;
+ break;
+ }
+ if ((int)p[1] != FIVEBAND_NUMBANDS) {
+ ALOGV("%s EQ_PARAM_PROPERTIES invalid bands %d", __func__, (int)p[1]);
+ status = -EINVAL;
+ break;
+ }
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ EqualizerSetBandLevel(pContext, i, (int)p[2 + i]);
+ }
+ }
+ } break;
+
+ default:
+ ALOGV("%s invalid param %d", __func__, params[0]);
+ status = -EINVAL;
+ break;
}
ALOGVV("%s end param: %d, status: %d", __func__, params[0], status);
@@ -2712,13 +2541,10 @@
//
//----------------------------------------------------------------------------
-int Volume_getParameter(EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue) {
+int Volume_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t* pValueSize, void* pValue) {
int status = 0;
- int32_t *params = (int32_t *)pParam;
+ int32_t* params = (int32_t*)pParam;
ALOGVV("%s start", __func__);
@@ -2728,19 +2554,19 @@
}
switch (params[0]) {
case VOLUME_PARAM_LEVEL:
- if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
+ if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
ALOGV("%s VOLUME_PARAM_LEVEL invalid *pValueSize %u", __func__, *pValueSize);
status = -EINVAL;
break;
}
// no need to set *pValueSize
- status = VolumeGetVolumeLevel(pContext, (int16_t *)(pValue));
- ALOGVV("%s VOLUME_PARAM_LEVEL %d", __func__, *(int16_t *)pValue);
+ status = VolumeGetVolumeLevel(pContext, (int16_t*)(pValue));
+ ALOGVV("%s VOLUME_PARAM_LEVEL %d", __func__, *(int16_t*)pValue);
break;
case VOLUME_PARAM_MAXLEVEL:
- if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
+ if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
ALOGV("%s VOLUME_PARAM_MAXLEVEL invalid *pValueSize %u", __func__, *pValueSize);
status = -EINVAL;
break;
@@ -2748,21 +2574,21 @@
// no need to set *pValueSize
// in millibel
- *(int16_t *)pValue = 0;
- ALOGVV("%s VOLUME_PARAM_MAXLEVEL %d", __func__, *(int16_t *)pValue);
+ *(int16_t*)pValue = 0;
+ ALOGVV("%s VOLUME_PARAM_MAXLEVEL %d", __func__, *(int16_t*)pValue);
break;
case VOLUME_PARAM_STEREOPOSITION:
- if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
- ALOGV("%s VOLUME_PARAM_STEREOPOSITION invalid *pValueSize %u",
- __func__, *pValueSize);
+ if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
+ ALOGV("%s VOLUME_PARAM_STEREOPOSITION invalid *pValueSize %u", __func__,
+ *pValueSize);
status = -EINVAL;
break;
}
// no need to set *pValueSize
- VolumeGetStereoPosition(pContext, (int16_t *)pValue);
- ALOGVV("%s VOLUME_PARAM_STEREOPOSITION %d", __func__, *(int16_t *)pValue);
+ VolumeGetStereoPosition(pContext, (int16_t*)pValue);
+ ALOGVV("%s VOLUME_PARAM_STEREOPOSITION %d", __func__, *(int16_t*)pValue);
break;
case VOLUME_PARAM_MUTE:
@@ -2773,21 +2599,21 @@
}
*pValueSize = sizeof(uint32_t);
- status = VolumeGetMute(pContext, (uint32_t *)pValue);
- ALOGV("%s VOLUME_PARAM_MUTE %u", __func__, *(uint32_t *)pValue);
+ status = VolumeGetMute(pContext, (uint32_t*)pValue);
+ ALOGV("%s VOLUME_PARAM_MUTE %u", __func__, *(uint32_t*)pValue);
break;
case VOLUME_PARAM_ENABLESTEREOPOSITION:
if (*pValueSize < sizeof(int32_t)) {
- ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid *pValueSize %u",
- __func__, *pValueSize);
+ ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid *pValueSize %u", __func__,
+ *pValueSize);
status = -EINVAL;
break;
}
*pValueSize = sizeof(int32_t);
- *(int32_t *)pValue = pContext->pBundledContext->bStereoPositionEnabled;
- ALOGVV("%s VOLUME_PARAM_ENABLESTEREOPOSITION %d", __func__, *(int32_t *)pValue);
+ *(int32_t*)pValue = pContext->pBundledContext->bStereoPositionEnabled;
+ ALOGVV("%s VOLUME_PARAM_ENABLESTEREOPOSITION %d", __func__, *(int32_t*)pValue);
break;
@@ -2816,13 +2642,10 @@
//
//----------------------------------------------------------------------------
-int Volume_setParameter(EffectContext *pContext,
- uint32_t paramSize,
- void *pParam,
- uint32_t valueSize,
- void *pValue) {
+int Volume_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+ uint32_t valueSize, void* pValue) {
int status = 0;
- int32_t *params = (int32_t *)pParam;
+ int32_t* params = (int32_t*)pParam;
ALOGVV("%s start", __func__);
@@ -2838,7 +2661,7 @@
break;
}
- const int16_t level = *(int16_t *)pValue;
+ const int16_t level = *(int16_t*)pValue;
ALOGVV("%s VOLUME_PARAM_LEVEL %d", __func__, level);
ALOGVV("%s VOLUME_PARAM_LEVEL Calling VolumeSetVolumeLevel", __func__);
status = VolumeSetVolumeLevel(pContext, level);
@@ -2853,7 +2676,7 @@
break;
}
- const uint32_t mute = *(uint32_t *)pValue;
+ const uint32_t mute = *(uint32_t*)pValue;
ALOGVV("%s VOLUME_PARAM_MUTE %d", __func__, mute);
ALOGVV("%s VOLUME_PARAM_MUTE Calling VolumeSetMute", __func__);
status = VolumeSetMute(pContext, mute);
@@ -2862,15 +2685,16 @@
case VOLUME_PARAM_ENABLESTEREOPOSITION: {
if (valueSize < sizeof(uint32_t)) {
- ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid valueSize %u",
- __func__, valueSize);
+ ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid valueSize %u", __func__,
+ valueSize);
status = -EINVAL;
break;
}
- const uint32_t positionEnabled = *(uint32_t *)pValue;
+ const uint32_t positionEnabled = *(uint32_t*)pValue;
status = VolumeEnableStereoPosition(pContext, positionEnabled)
- ?: VolumeSetStereoPosition(pContext, pContext->pBundledContext->positionSaved);
+ ?: VolumeSetStereoPosition(pContext,
+ pContext->pBundledContext->positionSaved);
ALOGVV("%s VOLUME_PARAM_ENABLESTEREOPOSITION called", __func__);
} break;
@@ -2881,13 +2705,11 @@
break;
}
- const int16_t position = *(int16_t *)pValue;
+ const int16_t position = *(int16_t*)pValue;
ALOGVV("%s VOLUME_PARAM_STEREOPOSITION %d", __func__, position);
- ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Calling VolumeSetStereoPosition",
- __func__);
+ ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Calling VolumeSetStereoPosition", __func__);
status = VolumeSetStereoPosition(pContext, position);
- ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Called VolumeSetStereoPosition",
- __func__);
+ ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Called VolumeSetStereoPosition", __func__);
} break;
default:
@@ -2912,18 +2734,15 @@
* Remarks :
****************************************************************************************/
-LVM_INT16 LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix)
-{
- LVM_INT16 db_fix;
- LVM_INT16 Shift;
- LVM_INT16 SmallRemainder;
- LVM_UINT32 Remainder = (LVM_UINT32)Lin_fix;
+LVM_INT16 LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix) {
+ LVM_INT16 db_fix;
+ LVM_INT16 Shift;
+ LVM_INT16 SmallRemainder;
+ LVM_UINT32 Remainder = (LVM_UINT32)Lin_fix;
/* Count leading bits, 1 cycle in assembly*/
- for (Shift = 0; Shift<32; Shift++)
- {
- if ((Remainder & 0x80000000U)!=0)
- {
+ for (Shift = 0; Shift < 32; Shift++) {
+ if ((Remainder & 0x80000000U) != 0) {
break;
}
Remainder = Remainder << 1;
@@ -2934,9 +2753,9 @@
*
* dB = -96 * Shift + 16 * (8 * Remainder - 2 * Remainder^2)
*/
- db_fix = (LVM_INT16)(-96 * Shift); /* Six dB steps in Q11.4 format*/
+ db_fix = (LVM_INT16)(-96 * Shift); /* Six dB steps in Q11.4 format*/
SmallRemainder = (LVM_INT16)((Remainder & 0x7fffffff) >> 24);
- db_fix = (LVM_INT16)(db_fix + SmallRemainder );
+ db_fix = (LVM_INT16)(db_fix + SmallRemainder);
SmallRemainder = (LVM_INT16)(SmallRemainder * SmallRemainder);
db_fix = (LVM_INT16)(db_fix - (LVM_INT16)((LVM_UINT16)SmallRemainder >> 9));
@@ -2960,11 +2779,10 @@
//
//----------------------------------------------------------------------------
-int Effect_setEnabled(EffectContext *pContext, bool enabled)
-{
- ALOGV("%s effectType %d, enabled %d, currently enabled %d", __func__,
- pContext->EffectType, enabled, pContext->pBundledContext->NumberEffectsEnabled);
- int &effectInDrain = pContext->pBundledContext->effectInDrain;
+int Effect_setEnabled(EffectContext* pContext, bool enabled) {
+ ALOGV("%s effectType %d, enabled %d, currently enabled %d", __func__, pContext->EffectType,
+ enabled, pContext->pBundledContext->NumberEffectsEnabled);
+ int& effectInDrain = pContext->pBundledContext->effectInDrain;
if (enabled) {
// Bass boost or Virtualizer can be temporarily disabled if playing over device speaker due
// to their nature.
@@ -2972,15 +2790,15 @@
switch (pContext->EffectType) {
case LVM_BASS_BOOST:
if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
- ALOGV("\tEffect_setEnabled() LVM_BASS_BOOST is already enabled");
- return -EINVAL;
+ ALOGV("\tEffect_setEnabled() LVM_BASS_BOOST is already enabled");
+ return -EINVAL;
}
- if(pContext->pBundledContext->SamplesToExitCountBb <= 0){
+ if (pContext->pBundledContext->SamplesToExitCountBb <= 0) {
pContext->pBundledContext->NumberEffectsEnabled++;
}
effectInDrain &= ~(1 << LVM_BASS_BOOST);
pContext->pBundledContext->SamplesToExitCountBb =
- (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
+ (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond * 0.1);
pContext->pBundledContext->bBassEnabled = LVM_TRUE;
tempDisabled = pContext->pBundledContext->bBassTempDisabled;
break;
@@ -2989,12 +2807,12 @@
ALOGV("\tEffect_setEnabled() LVM_EQUALIZER is already enabled");
return -EINVAL;
}
- if(pContext->pBundledContext->SamplesToExitCountEq <= 0){
+ if (pContext->pBundledContext->SamplesToExitCountEq <= 0) {
pContext->pBundledContext->NumberEffectsEnabled++;
}
effectInDrain &= ~(1 << LVM_EQUALIZER);
pContext->pBundledContext->SamplesToExitCountEq =
- (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
+ (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond * 0.1);
pContext->pBundledContext->bEqualizerEnabled = LVM_TRUE;
break;
case LVM_VIRTUALIZER:
@@ -3002,12 +2820,12 @@
ALOGV("\tEffect_setEnabled() LVM_VIRTUALIZER is already enabled");
return -EINVAL;
}
- if(pContext->pBundledContext->SamplesToExitCountVirt <= 0){
+ if (pContext->pBundledContext->SamplesToExitCountVirt <= 0) {
pContext->pBundledContext->NumberEffectsEnabled++;
}
effectInDrain &= ~(1 << LVM_VIRTUALIZER);
pContext->pBundledContext->SamplesToExitCountVirt =
- (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
+ (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond * 0.1);
pContext->pBundledContext->bVirtualizerEnabled = LVM_TRUE;
tempDisabled = pContext->pBundledContext->bVirtualizerTempDisabled;
break;
@@ -3084,41 +2902,39 @@
//
//-----------------------------------------------------------------------
-int16_t LVC_Convert_VolToDb(uint32_t vol){
- int16_t dB;
+int16_t LVC_Convert_VolToDb(uint32_t vol) {
+ int16_t dB;
- dB = LVC_ToDB_s32Tos16(vol <<7);
- dB = (dB +8)>>4;
- dB = (dB <-96) ? -96 : dB ;
+ dB = LVC_ToDB_s32Tos16(vol << 7);
+ dB = (dB + 8) >> 4;
+ dB = (dB < -96) ? -96 : dB;
return dB;
}
-} // namespace
-} // namespace
+} // namespace
+} // namespace android
extern "C" {
/* Effect Control Interface Implementation: Process */
-int Effect_process(effect_handle_t self,
- audio_buffer_t *inBuffer,
- audio_buffer_t *outBuffer){
- EffectContext * pContext = (EffectContext *) self;
- int status = 0;
- int processStatus = 0;
+int Effect_process(effect_handle_t self, audio_buffer_t* inBuffer, audio_buffer_t* outBuffer) {
+ EffectContext* pContext = (EffectContext*)self;
+ int status = 0;
+ int processStatus = 0;
const int NrChannels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
-//ALOGV("\tEffect_process Start : Enabled = %d Called = %d (%8d %8d %8d)",
-//pContext->pBundledContext->NumberEffectsEnabled,pContext->pBundledContext->NumberEffectsCalled,
-// pContext->pBundledContext->SamplesToExitCountBb,
-// pContext->pBundledContext->SamplesToExitCountVirt,
-// pContext->pBundledContext->SamplesToExitCountEq);
+ // ALOGV("\tEffect_process Start : Enabled = %d Called = %d (%8d %8d %8d)",
+ // pContext->pBundledContext->NumberEffectsEnabled,pContext->pBundledContext->NumberEffectsCalled,
+ // pContext->pBundledContext->SamplesToExitCountBb,
+ // pContext->pBundledContext->SamplesToExitCountVirt,
+ // pContext->pBundledContext->SamplesToExitCountEq);
- if (pContext == NULL){
+ if (pContext == NULL) {
ALOGV("\tLVM_ERROR : Effect_process() ERROR pContext == NULL");
return -EINVAL;
}
- //if(pContext->EffectType == LVM_BASS_BOOST){
+ // if(pContext->EffectType == LVM_BASS_BOOST){
// ALOGV("\tEffect_process: Effect type is BASS_BOOST");
//}else if(pContext->EffectType == LVM_EQUALIZER){
// ALOGV("\tEffect_process: Effect type is LVM_EQUALIZER");
@@ -3126,15 +2942,14 @@
// ALOGV("\tEffect_process: Effect type is LVM_VIRTUALIZER");
//}
- if (inBuffer == NULL || inBuffer->raw == NULL ||
- outBuffer == NULL || outBuffer->raw == NULL ||
- inBuffer->frameCount != outBuffer->frameCount){
+ if (inBuffer == NULL || inBuffer->raw == NULL || outBuffer == NULL || outBuffer->raw == NULL ||
+ inBuffer->frameCount != outBuffer->frameCount) {
ALOGV("\tLVM_ERROR : Effect_process() ERROR NULL INPUT POINTER OR FRAME COUNT IS WRONG");
return -EINVAL;
}
- int &effectProcessCalled = pContext->pBundledContext->effectProcessCalled;
- int &effectInDrain = pContext->pBundledContext->effectInDrain;
+ int& effectProcessCalled = pContext->pBundledContext->effectProcessCalled;
+ int& effectInDrain = pContext->pBundledContext->effectInDrain;
if ((effectProcessCalled & 1 << pContext->EffectType) != 0) {
ALOGW("Effect %d already called", pContext->EffectType);
const int undrainedEffects = effectInDrain & ~effectProcessCalled;
@@ -3164,12 +2979,12 @@
}
effectProcessCalled |= 1 << pContext->EffectType;
- if ((pContext->pBundledContext->bBassEnabled == LVM_FALSE)&&
- (pContext->EffectType == LVM_BASS_BOOST)){
- //ALOGV("\tEffect_process() LVM_BASS_BOOST Effect is not enabled");
- if(pContext->pBundledContext->SamplesToExitCountBb > 0){
+ if ((pContext->pBundledContext->bBassEnabled == LVM_FALSE) &&
+ (pContext->EffectType == LVM_BASS_BOOST)) {
+ // ALOGV("\tEffect_process() LVM_BASS_BOOST Effect is not enabled");
+ if (pContext->pBundledContext->SamplesToExitCountBb > 0) {
pContext->pBundledContext->SamplesToExitCountBb -= outBuffer->frameCount * NrChannels;
- //ALOGV("\tEffect_process: Waiting to turn off BASS_BOOST, %d samples left",
+ // ALOGV("\tEffect_process: Waiting to turn off BASS_BOOST, %d samples left",
// pContext->pBundledContext->SamplesToExitCountBb);
}
if (pContext->pBundledContext->SamplesToExitCountBb <= 0) {
@@ -3181,21 +2996,21 @@
ALOGV("\tEffect_process() this is the last frame for LVM_BASS_BOOST");
}
}
- if ((pContext->pBundledContext->bVolumeEnabled == LVM_FALSE)&&
- (pContext->EffectType == LVM_VOLUME)){
- //ALOGV("\tEffect_process() LVM_VOLUME Effect is not enabled");
+ if ((pContext->pBundledContext->bVolumeEnabled == LVM_FALSE) &&
+ (pContext->EffectType == LVM_VOLUME)) {
+ // ALOGV("\tEffect_process() LVM_VOLUME Effect is not enabled");
status = -ENODATA;
if ((effectInDrain & 1 << LVM_VOLUME) != 0) {
pContext->pBundledContext->NumberEffectsEnabled--;
effectInDrain &= ~(1 << LVM_VOLUME);
}
}
- if ((pContext->pBundledContext->bEqualizerEnabled == LVM_FALSE)&&
- (pContext->EffectType == LVM_EQUALIZER)){
- //ALOGV("\tEffect_process() LVM_EQUALIZER Effect is not enabled");
- if(pContext->pBundledContext->SamplesToExitCountEq > 0){
+ if ((pContext->pBundledContext->bEqualizerEnabled == LVM_FALSE) &&
+ (pContext->EffectType == LVM_EQUALIZER)) {
+ // ALOGV("\tEffect_process() LVM_EQUALIZER Effect is not enabled");
+ if (pContext->pBundledContext->SamplesToExitCountEq > 0) {
pContext->pBundledContext->SamplesToExitCountEq -= outBuffer->frameCount * NrChannels;
- //ALOGV("\tEffect_process: Waiting to turn off EQUALIZER, %d samples left",
+ // ALOGV("\tEffect_process: Waiting to turn off EQUALIZER, %d samples left",
// pContext->pBundledContext->SamplesToExitCountEq);
}
if (pContext->pBundledContext->SamplesToExitCountEq <= 0) {
@@ -3207,13 +3022,12 @@
ALOGV("\tEffect_process() this is the last frame for LVM_EQUALIZER");
}
}
- if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_FALSE)&&
- (pContext->EffectType == LVM_VIRTUALIZER)){
- //ALOGV("\tEffect_process() LVM_VIRTUALIZER Effect is not enabled");
- if(pContext->pBundledContext->SamplesToExitCountVirt > 0){
- pContext->pBundledContext->SamplesToExitCountVirt -=
- outBuffer->frameCount * NrChannels;
- //ALOGV("\tEffect_process: Waiting for to turn off VIRTUALIZER, %d samples left",
+ if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_FALSE) &&
+ (pContext->EffectType == LVM_VIRTUALIZER)) {
+ // ALOGV("\tEffect_process() LVM_VIRTUALIZER Effect is not enabled");
+ if (pContext->pBundledContext->SamplesToExitCountVirt > 0) {
+ pContext->pBundledContext->SamplesToExitCountVirt -= outBuffer->frameCount * NrChannels;
+ // ALOGV("\tEffect_process: Waiting for to turn off VIRTUALIZER, %d samples left",
// pContext->pBundledContext->SamplesToExitCountVirt);
}
if (pContext->pBundledContext->SamplesToExitCountVirt <= 0) {
@@ -3226,37 +3040,34 @@
}
}
- if(status != -ENODATA){
+ if (status != -ENODATA) {
pContext->pBundledContext->NumberEffectsCalled++;
}
if (pContext->pBundledContext->NumberEffectsCalled >=
- pContext->pBundledContext->NumberEffectsEnabled) {
-
+ pContext->pBundledContext->NumberEffectsEnabled) {
// We expect the # effects called to be equal to # effects enabled in sequence (including
// draining effects). Warn if this is not the case due to inconsistent calls.
ALOGW_IF(pContext->pBundledContext->NumberEffectsCalled >
- pContext->pBundledContext->NumberEffectsEnabled,
- "%s Number of effects called %d is greater than number of effects enabled %d",
- __func__, pContext->pBundledContext->NumberEffectsCalled,
- pContext->pBundledContext->NumberEffectsEnabled);
- effectProcessCalled = 0; // reset our consistency check.
+ pContext->pBundledContext->NumberEffectsEnabled,
+ "%s Number of effects called %d is greater than number of effects enabled %d",
+ __func__, pContext->pBundledContext->NumberEffectsCalled,
+ pContext->pBundledContext->NumberEffectsEnabled);
+ effectProcessCalled = 0; // reset our consistency check.
- //ALOGV("\tEffect_process Calling process with %d effects enabled, %d called: Effect %d",
- //pContext->pBundledContext->NumberEffectsEnabled,
- //pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
+ // ALOGV("\tEffect_process Calling process with %d effects enabled, %d called: Effect
+ // %d", pContext->pBundledContext->NumberEffectsEnabled,
+ // pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
- if (status == -ENODATA){
+ if (status == -ENODATA) {
ALOGV("\tEffect_process() processing last frame");
}
pContext->pBundledContext->NumberEffectsCalled = 0;
/* Process all the available frames, block processing is
handled internalLY by the LVM bundle */
- processStatus = android::LvmBundle_process(inBuffer->f32,
- outBuffer->f32,
- outBuffer->frameCount,
- pContext);
- if (processStatus != 0){
+ processStatus = android::LvmBundle_process(inBuffer->f32, outBuffer->f32,
+ outBuffer->frameCount, pContext);
+ if (processStatus != 0) {
ALOGV("\tLVM_ERROR : LvmBundle_process returned error %d", processStatus);
if (status == 0) {
status = processStatus;
@@ -3264,133 +3075,126 @@
return status;
}
} else {
- //ALOGV("\tEffect_process Not Calling process with %d effects enabled, %d called: Effect %d",
- //pContext->pBundledContext->NumberEffectsEnabled,
- //pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
+ // ALOGV("\tEffect_process Not Calling process with %d effects enabled, %d called: Effect
+ // %d", pContext->pBundledContext->NumberEffectsEnabled,
+ // pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
for (size_t i = 0; i < outBuffer->frameCount * NrChannels; ++i) {
outBuffer->f32[i] += inBuffer->f32[i];
}
} else if (outBuffer->raw != inBuffer->raw) {
- memcpy(outBuffer->raw,
- inBuffer->raw,
- outBuffer->frameCount * sizeof(effect_buffer_t) * FCC_2);
+ memcpy(outBuffer->raw, inBuffer->raw,
+ outBuffer->frameCount * sizeof(effect_buffer_t) * FCC_2);
}
}
return status;
-} /* end Effect_process */
+} /* end Effect_process */
// The value offset of an effect parameter is computed by rounding up
// the parameter size to the next 32 bit alignment.
-static inline uint32_t computeParamVOffset(const effect_param_t *p) {
- return ((p->psize + sizeof(int32_t) - 1) / sizeof(int32_t)) *
- sizeof(int32_t);
+static inline uint32_t computeParamVOffset(const effect_param_t* p) {
+ return ((p->psize + sizeof(int32_t) - 1) / sizeof(int32_t)) * sizeof(int32_t);
}
/* Effect Control Interface Implementation: Command */
-int Effect_command(effect_handle_t self,
- uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t *replySize,
- void *pReplyData){
- EffectContext * pContext = (EffectContext *) self;
+int Effect_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
+ uint32_t* replySize, void* pReplyData) {
+ EffectContext* pContext = (EffectContext*)self;
- //ALOGV("\t\nEffect_command start");
+ // ALOGV("\t\nEffect_command start");
- if(pContext->EffectType == LVM_BASS_BOOST){
- //ALOGV("\tEffect_command setting command for LVM_BASS_BOOST");
+ if (pContext->EffectType == LVM_BASS_BOOST) {
+ // ALOGV("\tEffect_command setting command for LVM_BASS_BOOST");
}
- if(pContext->EffectType == LVM_VIRTUALIZER){
- //ALOGV("\tEffect_command setting command for LVM_VIRTUALIZER");
+ if (pContext->EffectType == LVM_VIRTUALIZER) {
+ // ALOGV("\tEffect_command setting command for LVM_VIRTUALIZER");
}
- if(pContext->EffectType == LVM_EQUALIZER){
- //ALOGV("\tEffect_command setting command for LVM_EQUALIZER");
+ if (pContext->EffectType == LVM_EQUALIZER) {
+ // ALOGV("\tEffect_command setting command for LVM_EQUALIZER");
}
- if(pContext->EffectType == LVM_VOLUME){
- //ALOGV("\tEffect_command setting command for LVM_VOLUME");
+ if (pContext->EffectType == LVM_VOLUME) {
+ // ALOGV("\tEffect_command setting command for LVM_VOLUME");
}
- if (pContext == NULL){
+ if (pContext == NULL) {
ALOGV("\tLVM_ERROR : Effect_command ERROR pContext == NULL");
return -EINVAL;
}
- //ALOGV("\tEffect_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
+ // ALOGV("\tEffect_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
// Incase we disable an effect, next time process is
// called the number of effect called could be greater
// pContext->pBundledContext->NumberEffectsCalled = 0;
- //ALOGV("\tEffect_command NumberEffectsCalled = %d, NumberEffectsEnabled = %d",
+ // ALOGV("\tEffect_command NumberEffectsCalled = %d, NumberEffectsEnabled = %d",
// pContext->pBundledContext->NumberEffectsCalled,
// pContext->pBundledContext->NumberEffectsEnabled);
- switch (cmdCode){
+ switch (cmdCode) {
case EFFECT_CMD_INIT:
- if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){
+ if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
ALOGV("\tLVM_ERROR, EFFECT_CMD_INIT: ERROR for effect type %d",
- pContext->EffectType);
+ pContext->EffectType);
return -EINVAL;
}
- *(int *) pReplyData = 0;
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT start");
- if(pContext->EffectType == LVM_BASS_BOOST){
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_BASS_BOOST");
+ *(int*)pReplyData = 0;
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT start");
+ if (pContext->EffectType == LVM_BASS_BOOST) {
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_BASS_BOOST");
android::BassSetStrength(pContext, 0);
}
- if(pContext->EffectType == LVM_VIRTUALIZER){
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VIRTUALIZER");
+ if (pContext->EffectType == LVM_VIRTUALIZER) {
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VIRTUALIZER");
android::VirtualizerSetStrength(pContext, 0);
}
- if(pContext->EffectType == LVM_EQUALIZER){
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_EQUALIZER");
+ if (pContext->EffectType == LVM_EQUALIZER) {
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_EQUALIZER");
android::EqualizerSetPreset(pContext, 0);
}
- if(pContext->EffectType == LVM_VOLUME){
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VOLUME");
- *(int *) pReplyData = android::VolumeSetVolumeLevel(pContext, 0);
+ if (pContext->EffectType == LVM_VOLUME) {
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VOLUME");
+ *(int*)pReplyData = android::VolumeSetVolumeLevel(pContext, 0);
}
break;
case EFFECT_CMD_SET_CONFIG:
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG start");
- if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) ||
- pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG start");
+ if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || pReplyData == NULL ||
+ replySize == NULL || *replySize != sizeof(int)) {
ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: "
- "EFFECT_CMD_SET_CONFIG: ERROR");
+ "EFFECT_CMD_SET_CONFIG: ERROR");
return -EINVAL;
}
- *(int *) pReplyData = android::Effect_setConfig(pContext, (effect_config_t *) pCmdData);
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG end");
+ *(int*)pReplyData = android::Effect_setConfig(pContext, (effect_config_t*)pCmdData);
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG end");
break;
case EFFECT_CMD_GET_CONFIG:
if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(effect_config_t)) {
ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: "
- "EFFECT_CMD_GET_CONFIG: ERROR");
+ "EFFECT_CMD_GET_CONFIG: ERROR");
return -EINVAL;
}
- android::Effect_getConfig(pContext, (effect_config_t *)pReplyData);
+ android::Effect_getConfig(pContext, (effect_config_t*)pReplyData);
break;
case EFFECT_CMD_RESET:
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET start");
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET start");
android::Effect_setConfig(pContext, &pContext->config);
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET end");
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET end");
break;
- case EFFECT_CMD_GET_PARAM:{
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
+ case EFFECT_CMD_GET_PARAM: {
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
- effect_param_t *p = (effect_param_t *)pCmdData;
+ effect_param_t* p = (effect_param_t*)pCmdData;
if (pCmdData == NULL || cmdSize < sizeof(effect_param_t) ||
- cmdSize < (sizeof(effect_param_t) + p->psize) ||
- pReplyData == NULL || replySize == NULL ||
- *replySize < (sizeof(effect_param_t) + p->psize)) {
+ cmdSize < (sizeof(effect_param_t) + p->psize) || pReplyData == NULL ||
+ replySize == NULL || *replySize < (sizeof(effect_param_t) + p->psize)) {
ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: ERROR");
return -EINVAL;
}
@@ -3401,75 +3205,62 @@
}
const uint32_t paddedParamSize = computeParamVOffset(p);
if ((EFFECT_PARAM_SIZE_MAX - sizeof(effect_param_t) < paddedParamSize) ||
- (EFFECT_PARAM_SIZE_MAX - sizeof(effect_param_t) - paddedParamSize <
- p->vsize)) {
+ (EFFECT_PARAM_SIZE_MAX - sizeof(effect_param_t) - paddedParamSize < p->vsize)) {
ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: padded_psize or vsize too big");
return -EINVAL;
}
uint32_t expectedReplySize = sizeof(effect_param_t) + paddedParamSize + p->vsize;
if (*replySize < expectedReplySize) {
ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: min. replySize %u, got %u bytes",
- expectedReplySize, *replySize);
+ expectedReplySize, *replySize);
android_errorWriteLog(0x534e4554, "32705438");
return -EINVAL;
}
memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + p->psize);
- p = (effect_param_t *)pReplyData;
+ p = (effect_param_t*)pReplyData;
uint32_t voffset = paddedParamSize;
- if(pContext->EffectType == LVM_BASS_BOOST){
- p->status = android::BassBoost_getParameter(pContext,
- p->psize,
- p->data,
- &p->vsize,
+ if (pContext->EffectType == LVM_BASS_BOOST) {
+ p->status = android::BassBoost_getParameter(pContext, p->psize, p->data, &p->vsize,
p->data + voffset);
- //ALOGV("\tBassBoost_command EFFECT_CMD_GET_PARAM "
+ // ALOGV("\tBassBoost_command EFFECT_CMD_GET_PARAM "
// "*pCmdData %d, *replySize %d, *pReplyData %d ",
// *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
// *replySize,
// *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset));
}
- if(pContext->EffectType == LVM_VIRTUALIZER){
- p->status = android::Virtualizer_getParameter(pContext,
- p->psize,
- (void *)p->data,
- &p->vsize,
- p->data + voffset);
+ if (pContext->EffectType == LVM_VIRTUALIZER) {
+ p->status = android::Virtualizer_getParameter(pContext, p->psize, (void*)p->data,
+ &p->vsize, p->data + voffset);
- //ALOGV("\tVirtualizer_command EFFECT_CMD_GET_PARAM "
+ // ALOGV("\tVirtualizer_command EFFECT_CMD_GET_PARAM "
// "*pCmdData %d, *replySize %d, *pReplyData %d ",
// *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
// *replySize,
// *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset));
}
- if(pContext->EffectType == LVM_EQUALIZER){
- //ALOGV("\tEqualizer_command cmdCode Case: "
+ if (pContext->EffectType == LVM_EQUALIZER) {
+ // ALOGV("\tEqualizer_command cmdCode Case: "
// "EFFECT_CMD_GET_PARAM start");
- p->status = android::Equalizer_getParameter(pContext,
- p->psize,
- p->data,
- &p->vsize,
+ p->status = android::Equalizer_getParameter(pContext, p->psize, p->data, &p->vsize,
p->data + voffset);
- //ALOGV("\tEqualizer_command EFFECT_CMD_GET_PARAM *pCmdData %d, *replySize %d, "
+ // ALOGV("\tEqualizer_command EFFECT_CMD_GET_PARAM *pCmdData %d, *replySize %d, "
// "*pReplyData %08x %08x",
// *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), *replySize,
// *(int32_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset),
// *(int32_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset +
// sizeof(int32_t)));
}
- if(pContext->EffectType == LVM_VOLUME){
- //ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
- p->status = android::Volume_getParameter(pContext,
- p->psize,
- (void *)p->data,
- &p->vsize,
- p->data + voffset);
+ if (pContext->EffectType == LVM_VOLUME) {
+ // ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
+ p->status = android::Volume_getParameter(pContext, p->psize, (void*)p->data,
+ &p->vsize, p->data + voffset);
- //ALOGV("\tVolume_command EFFECT_CMD_GET_PARAM "
+ // ALOGV("\tVolume_command EFFECT_CMD_GET_PARAM "
// "*pCmdData %d, *replySize %d, *pReplyData %d ",
// *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
// *replySize,
@@ -3477,123 +3268,114 @@
}
*replySize = sizeof(effect_param_t) + voffset + p->vsize;
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM end");
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM end");
} break;
- case EFFECT_CMD_SET_PARAM:{
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
- if(pContext->EffectType == LVM_BASS_BOOST){
- //ALOGV("\tBassBoost_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d",
+ case EFFECT_CMD_SET_PARAM: {
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
+ if (pContext->EffectType == LVM_BASS_BOOST) {
+ // ALOGV("\tBassBoost_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value
+ // %d",
// *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
// *replySize,
// *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
- if (pCmdData == NULL ||
- cmdSize != (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int16_t)) ||
- pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
+ if (pCmdData == NULL ||
+ cmdSize != (sizeof(effect_param_t) + sizeof(int32_t) + sizeof(int16_t)) ||
+ pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
ALOGV("\tLVM_ERROR : BassBoost_command cmdCode Case: "
- "EFFECT_CMD_SET_PARAM: ERROR");
+ "EFFECT_CMD_SET_PARAM: ERROR");
return -EINVAL;
}
- effect_param_t * const p = (effect_param_t *) pCmdData;
+ effect_param_t* const p = (effect_param_t*)pCmdData;
const uint32_t voffset = computeParamVOffset(p);
- //ALOGV("\tnBassBoost_command cmdSize is %d\n"
+ // ALOGV("\tnBassBoost_command cmdSize is %d\n"
// "\tsizeof(effect_param_t) is %d\n"
// "\tp->psize is %d\n"
// "\tp->vsize is %d"
// "\n",
// cmdSize, sizeof(effect_param_t), p->psize, p->vsize );
- *(int *)pReplyData = android::BassBoost_setParameter(pContext,
- p->psize,
- (void *)p->data,
- p->vsize,
- p->data + voffset);
+ *(int*)pReplyData = android::BassBoost_setParameter(
+ pContext, p->psize, (void*)p->data, p->vsize, p->data + voffset);
}
- if(pContext->EffectType == LVM_VIRTUALIZER){
- // Warning this log will fail to properly read an int32_t value, assumes int16_t
- //ALOGV("\tVirtualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d",
- // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
- // *replySize,
- // *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
+ if (pContext->EffectType == LVM_VIRTUALIZER) {
+ // Warning this log will fail to properly read an int32_t value, assumes int16_t
+ // ALOGV("\tVirtualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value
+ // %d",
+ // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
+ // *replySize,
+ // *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) +
+ // sizeof(int32_t)));
- if (pCmdData == NULL ||
- // legal parameters are int16_t or int32_t
- cmdSize > (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int32_t)) ||
- cmdSize < (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int16_t)) ||
- pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
+ if (pCmdData == NULL ||
+ // legal parameters are int16_t or int32_t
+ cmdSize > (sizeof(effect_param_t) + sizeof(int32_t) + sizeof(int32_t)) ||
+ cmdSize < (sizeof(effect_param_t) + sizeof(int32_t) + sizeof(int16_t)) ||
+ pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
ALOGV("\tLVM_ERROR : Virtualizer_command cmdCode Case: "
- "EFFECT_CMD_SET_PARAM: ERROR");
+ "EFFECT_CMD_SET_PARAM: ERROR");
return -EINVAL;
}
- effect_param_t * const p = (effect_param_t *) pCmdData;
+ effect_param_t* const p = (effect_param_t*)pCmdData;
const uint32_t voffset = computeParamVOffset(p);
- //ALOGV("\tnVirtualizer_command cmdSize is %d\n"
+ // ALOGV("\tnVirtualizer_command cmdSize is %d\n"
// "\tsizeof(effect_param_t) is %d\n"
// "\tp->psize is %d\n"
// "\tp->vsize is %d"
// "\n",
// cmdSize, sizeof(effect_param_t), p->psize, p->vsize );
- *(int *)pReplyData = android::Virtualizer_setParameter(pContext,
- p->psize,
- (void *)p->data,
- p->vsize,
- p->data + voffset);
+ *(int*)pReplyData = android::Virtualizer_setParameter(
+ pContext, p->psize, (void*)p->data, p->vsize, p->data + voffset);
}
- if(pContext->EffectType == LVM_EQUALIZER){
- //ALOGV("\tEqualizer_command cmdCode Case: "
- // "EFFECT_CMD_SET_PARAM start");
- //ALOGV("\tEqualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
- // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
- // *replySize,
- // *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
+ if (pContext->EffectType == LVM_EQUALIZER) {
+ // ALOGV("\tEqualizer_command cmdCode Case: "
+ // "EFFECT_CMD_SET_PARAM start");
+ // ALOGV("\tEqualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d
+ // ",
+ // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
+ // *replySize,
+ // *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) +
+ // sizeof(int32_t)));
if (pCmdData == NULL || cmdSize < (sizeof(effect_param_t) + sizeof(int32_t)) ||
- pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
+ pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
ALOGV("\tLVM_ERROR : Equalizer_command cmdCode Case: "
- "EFFECT_CMD_SET_PARAM: ERROR");
+ "EFFECT_CMD_SET_PARAM: ERROR");
return -EINVAL;
}
- effect_param_t * const p = (effect_param_t *) pCmdData;
+ effect_param_t* const p = (effect_param_t*)pCmdData;
const uint32_t voffset = computeParamVOffset(p);
- *(int *)pReplyData = android::Equalizer_setParameter(pContext,
- p->psize,
- (void *)p->data,
- p->vsize,
- p->data + voffset);
+ *(int*)pReplyData = android::Equalizer_setParameter(
+ pContext, p->psize, (void*)p->data, p->vsize, p->data + voffset);
}
- if(pContext->EffectType == LVM_VOLUME){
- //ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
- //ALOGV("\tVolume_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
+ if (pContext->EffectType == LVM_VOLUME) {
+ // ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
+ // ALOGV("\tVolume_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
// *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
// *replySize,
// *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) +sizeof(int32_t)));
- if (pCmdData == NULL ||
- cmdSize < (sizeof(effect_param_t) + sizeof(int32_t)) ||
- pReplyData == NULL || replySize == NULL ||
- *replySize != sizeof(int32_t)) {
+ if (pCmdData == NULL || cmdSize < (sizeof(effect_param_t) + sizeof(int32_t)) ||
+ pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
ALOGV("\tLVM_ERROR : Volume_command cmdCode Case: "
- "EFFECT_CMD_SET_PARAM: ERROR");
+ "EFFECT_CMD_SET_PARAM: ERROR");
return -EINVAL;
}
- effect_param_t * const p = (effect_param_t *) pCmdData;
+ effect_param_t* const p = (effect_param_t*)pCmdData;
const uint32_t voffset = computeParamVOffset(p);
- *(int *)pReplyData = android::Volume_setParameter(pContext,
- p->psize,
- (void *)p->data,
- p->vsize,
- p->data + voffset);
+ *(int*)pReplyData = android::Volume_setParameter(pContext, p->psize, (void*)p->data,
+ p->vsize, p->data + voffset);
}
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM end");
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM end");
} break;
case EFFECT_CMD_ENABLE:
@@ -3603,57 +3385,56 @@
return -EINVAL;
}
- *(int *)pReplyData = android::Effect_setEnabled(pContext, LVM_TRUE);
+ *(int*)pReplyData = android::Effect_setEnabled(pContext, LVM_TRUE);
break;
case EFFECT_CMD_DISABLE:
- //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_DISABLE start");
+ // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_DISABLE start");
if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: EFFECT_CMD_DISABLE: ERROR");
return -EINVAL;
}
- *(int *)pReplyData = android::Effect_setEnabled(pContext, LVM_FALSE);
+ *(int*)pReplyData = android::Effect_setEnabled(pContext, LVM_FALSE);
break;
- case EFFECT_CMD_SET_DEVICE:
- {
+ case EFFECT_CMD_SET_DEVICE: {
ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE start");
- if (pCmdData == NULL){
+ if (pCmdData == NULL) {
ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: EFFECT_CMD_SET_DEVICE: ERROR");
return -EINVAL;
}
- uint32_t device = *(uint32_t *)pCmdData;
- pContext->pBundledContext->nOutputDevice = (audio_devices_t) device;
+ audio_devices_t device = *(audio_devices_t *)pCmdData;
+ pContext->pBundledContext->nOutputDevice = device;
if (pContext->EffectType == LVM_BASS_BOOST) {
- if((device == AUDIO_DEVICE_OUT_SPEAKER) ||
- (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) ||
- (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
+ if ((device == AUDIO_DEVICE_OUT_SPEAKER) ||
+ (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) ||
+ (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)) {
ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_BASS_BOOST %d",
- *(int32_t *)pCmdData);
+ *(int32_t*)pCmdData);
ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_BAS_BOOST");
- // If a device doesnt support bassboost the effect must be temporarily disabled
+ // If a device doesn't support bassboost the effect must be temporarily disabled
// the effect must still report its original state as this can only be changed
// by the ENABLE/DISABLE command
if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
ALOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_BASS_BOOST %d",
- *(int32_t *)pCmdData);
+ *(int32_t*)pCmdData);
android::LvmEffect_disable(pContext);
}
pContext->pBundledContext->bBassTempDisabled = LVM_TRUE;
} else {
ALOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_BASS_BOOST %d",
- *(int32_t *)pCmdData);
+ *(int32_t*)pCmdData);
// If a device supports bassboost and the effect has been temporarily disabled
// previously then re-enable it
if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
ALOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_BASS_BOOST %d",
- *(int32_t *)pCmdData);
+ *(int32_t*)pCmdData);
android::LvmEffect_enable(pContext);
}
pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
@@ -3664,129 +3445,128 @@
// default case unless configuration is forced
if (android::VirtualizerIsDeviceSupported(device) != 0) {
ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_VIRTUALIZER %d",
- *(int32_t *)pCmdData);
+ *(int32_t*)pCmdData);
ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_VIRTUALIZER");
- //If a device doesnt support virtualizer the effect must be temporarily
+ // If a device doesn't support virtualizer the effect must be temporarily
// disabled the effect must still report its original state as this can
// only be changed by the ENABLE/DISABLE command
if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
ALOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_VIRTUALIZER %d",
- *(int32_t *)pCmdData);
+ *(int32_t*)pCmdData);
android::LvmEffect_disable(pContext);
}
pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE;
} else {
ALOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_VIRTUALIZER %d",
- *(int32_t *)pCmdData);
+ *(int32_t*)pCmdData);
// If a device supports virtualizer and the effect has been temporarily
// disabled previously then re-enable it
- if(pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE){
+ if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
ALOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_VIRTUALIZER %d",
- *(int32_t *)pCmdData);
+ *(int32_t*)pCmdData);
android::LvmEffect_enable(pContext);
}
pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
}
- } // else virtualization mode is forced to a certain device, nothing to do
+ } // else virtualization mode is forced to a certain device, nothing to do
}
ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE end");
break;
}
- case EFFECT_CMD_SET_VOLUME:
- {
+ case EFFECT_CMD_SET_VOLUME: {
uint32_t leftVolume, rightVolume;
- int16_t leftdB, rightdB;
- int16_t maxdB, pandB;
- int32_t vol_ret[2] = {1<<24,1<<24}; // Apply no volume
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
+ int16_t leftdB, rightdB;
+ int16_t maxdB, pandB;
+ int32_t vol_ret[2] = {1 << 24, 1 << 24}; // Apply no volume
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
// if pReplyData is NULL, VOL_CTRL is delegated to another effect
- if(pReplyData == LVM_NULL){
+ if (pReplyData == LVM_NULL) {
break;
}
if (pCmdData == NULL || cmdSize != 2 * sizeof(uint32_t) || pReplyData == NULL ||
- replySize == NULL || *replySize < 2*sizeof(int32_t)) {
+ replySize == NULL || *replySize < 2 * sizeof(int32_t)) {
ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: "
- "EFFECT_CMD_SET_VOLUME: ERROR");
+ "EFFECT_CMD_SET_VOLUME: ERROR");
return -EINVAL;
}
- leftVolume = ((*(uint32_t *)pCmdData));
- rightVolume = ((*((uint32_t *)pCmdData + 1)));
+ leftVolume = ((*(uint32_t*)pCmdData));
+ rightVolume = ((*((uint32_t*)pCmdData + 1)));
- if(leftVolume == 0x1000000){
+ if (leftVolume == 0x1000000) {
leftVolume -= 1;
}
- if(rightVolume == 0x1000000){
+ if (rightVolume == 0x1000000) {
rightVolume -= 1;
}
// Convert volume to dB
- leftdB = android::LVC_Convert_VolToDb(leftVolume);
+ leftdB = android::LVC_Convert_VolToDb(leftVolume);
rightdB = android::LVC_Convert_VolToDb(rightVolume);
pandB = rightdB - leftdB;
// Calculate max volume in dB
maxdB = leftdB;
- if(rightdB > maxdB){
+ if (rightdB > maxdB) {
maxdB = rightdB;
}
- //ALOGV("\tEFFECT_CMD_SET_VOLUME Session: %d, SessionID: %d VOLUME is %d dB, "
+ // ALOGV("\tEFFECT_CMD_SET_VOLUME Session: %d, SessionID: %d VOLUME is %d dB, "
// "effect is %d",
- //pContext->pBundledContext->SessionNo, pContext->pBundledContext->SessionId,
+ // pContext->pBundledContext->SessionNo, pContext->pBundledContext->SessionId,
//(int32_t)maxdB, pContext->EffectType);
- //ALOGV("\tEFFECT_CMD_SET_VOLUME: Left is %d, Right is %d", leftVolume, rightVolume);
- //ALOGV("\tEFFECT_CMD_SET_VOLUME: Left %ddB, Right %ddB, Position %ddB",
+ // ALOGV("\tEFFECT_CMD_SET_VOLUME: Left is %d, Right is %d", leftVolume, rightVolume);
+ // ALOGV("\tEFFECT_CMD_SET_VOLUME: Left %ddB, Right %ddB, Position %ddB",
// leftdB, rightdB, pandB);
- memcpy(pReplyData, vol_ret, sizeof(int32_t)*2);
- android::VolumeSetVolumeLevel(pContext, (int16_t)(maxdB*100));
+ memcpy(pReplyData, vol_ret, sizeof(int32_t) * 2);
+ android::VolumeSetVolumeLevel(pContext, (int16_t)(maxdB * 100));
/* Get the current settings */
- LvmStatus =LVM_GetControlParameters(pContext->pBundledContext->hInstance,&ActiveParams);
+ LvmStatus =
+ LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeSetStereoPosition")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
/* Volume parameters */
- ActiveParams.VC_Balance = pandB;
- ALOGV("\t\tVolumeSetStereoPosition() (-96dB -> +96dB)-> %d\n", ActiveParams.VC_Balance );
+ ActiveParams.VC_Balance = pandB;
+ ALOGV("\t\tVolumeSetStereoPosition() (-96dB -> +96dB)-> %d\n", ActiveParams.VC_Balance);
/* Activate the initial settings */
- LvmStatus =LVM_SetControlParameters(pContext->pBundledContext->hInstance,&ActiveParams);
+ LvmStatus =
+ LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VolumeSetStereoPosition")
- if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVM_SUCCESS) return -EINVAL;
break;
- }
+ }
case EFFECT_CMD_SET_AUDIO_MODE:
break;
default:
return -EINVAL;
}
- //ALOGV("\tEffect_command end...\n\n");
+ // ALOGV("\tEffect_command end...\n\n");
return 0;
-} /* end Effect_command */
+} /* end Effect_command */
/* Effect Control Interface Implementation: get_descriptor */
-int Effect_getDescriptor(effect_handle_t self,
- effect_descriptor_t *pDescriptor)
-{
- EffectContext * pContext = (EffectContext *) self;
- const effect_descriptor_t *desc;
+int Effect_getDescriptor(effect_handle_t self, effect_descriptor_t* pDescriptor) {
+ EffectContext* pContext = (EffectContext*)self;
+ const effect_descriptor_t* desc;
if (pContext == NULL || pDescriptor == NULL) {
ALOGV("Effect_getDescriptor() invalid param");
return -EINVAL;
}
- switch(pContext->EffectType) {
+ switch (pContext->EffectType) {
case LVM_BASS_BOOST:
desc = &android::gBassBoostDescriptor;
break;
@@ -3806,26 +3586,24 @@
*pDescriptor = *desc;
return 0;
-} /* end Effect_getDescriptor */
+} /* end Effect_getDescriptor */
// effect_handle_t interface implementation for effect
const struct effect_interface_s gLvmEffectInterface = {
- Effect_process,
- Effect_command,
- Effect_getDescriptor,
- NULL,
-}; /* end gLvmEffectInterface */
+ Effect_process,
+ Effect_command,
+ Effect_getDescriptor,
+ NULL,
+}; /* end gLvmEffectInterface */
// This is the only symbol that needs to be exported
-__attribute__ ((visibility ("default")))
-audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
- .tag = AUDIO_EFFECT_LIBRARY_TAG,
- .version = EFFECT_LIBRARY_API_VERSION,
- .name = "Effect Bundle Library",
- .implementor = "NXP Software Ltd.",
- .create_effect = android::EffectCreate,
- .release_effect = android::EffectRelease,
- .get_descriptor = android::EffectGetDescriptor,
+__attribute__((visibility("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+ .tag = AUDIO_EFFECT_LIBRARY_TAG,
+ .version = EFFECT_LIBRARY_API_VERSION,
+ .name = "Effect Bundle Library",
+ .implementor = "NXP Software Ltd.",
+ .create_effect = android::EffectCreate,
+ .release_effect = android::EffectRelease,
+ .get_descriptor = android::EffectGetDescriptor,
};
-
}
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
index 524e103..f3e7884 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
@@ -23,176 +23,148 @@
#include <LVM.h>
#include <limits.h>
-#define FIVEBAND_NUMBANDS 5
-#define MAX_NUM_BANDS 5
-#define MAX_CALL_SIZE 256
-#define LVM_MAX_SESSIONS 32
-#define LVM_UNUSED_SESSION INT_MAX
-#define BASS_BOOST_CUP_LOAD_ARM9E 150 // Expressed in 0.1 MIPS
-#define VIRTUALIZER_CUP_LOAD_ARM9E 120 // Expressed in 0.1 MIPS
-#define EQUALIZER_CUP_LOAD_ARM9E 220 // Expressed in 0.1 MIPS
-#define VOLUME_CUP_LOAD_ARM9E 0 // Expressed in 0.1 MIPS
-#define BUNDLE_MEM_USAGE 25 // Expressed in kB
+#define FIVEBAND_NUMBANDS 5
+#define MAX_NUM_BANDS 5
+#define MAX_CALL_SIZE 256
+#define LVM_MAX_SESSIONS 32
+#define LVM_UNUSED_SESSION INT_MAX
+#define BASS_BOOST_CUP_LOAD_ARM9E 150 // Expressed in 0.1 MIPS
+#define VIRTUALIZER_CUP_LOAD_ARM9E 120 // Expressed in 0.1 MIPS
+#define EQUALIZER_CUP_LOAD_ARM9E 220 // Expressed in 0.1 MIPS
+#define VOLUME_CUP_LOAD_ARM9E 0 // Expressed in 0.1 MIPS
+#define BUNDLE_MEM_USAGE 25 // Expressed in kB
#ifndef OPENSL_ES_H_
-static const effect_uuid_t SL_IID_VOLUME_ = { 0x09e8ede0, 0xddde, 0x11db, 0xb4f6,
- { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
-const effect_uuid_t * const SL_IID_VOLUME = &SL_IID_VOLUME_;
-#endif //OPENSL_ES_H_
+static const effect_uuid_t SL_IID_VOLUME_ = {
+ 0x09e8ede0, 0xddde, 0x11db, 0xb4f6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+const effect_uuid_t* const SL_IID_VOLUME = &SL_IID_VOLUME_;
+#endif // OPENSL_ES_H_
-typedef enum
-{
- LVM_BASS_BOOST,
- LVM_VIRTUALIZER,
- LVM_EQUALIZER,
- LVM_VOLUME
-} lvm_effect_en;
+typedef enum { LVM_BASS_BOOST, LVM_VIRTUALIZER, LVM_EQUALIZER, LVM_VOLUME } lvm_effect_en;
// Preset configuration.
struct PresetConfig {
// Human-readable name.
- const char * name;
+ const char* name;
// An array of size nBands where each element is a configuration for the
// corresponding band.
- //const BandConfig * bandConfigs;
+ // const BandConfig * bandConfigs;
};
/* BundledEffectContext : One per session */
-struct BundledEffectContext{
- LVM_Handle_t hInstance; /* Instance handle */
- int SessionNo; /* Current session number */
- int SessionId; /* Current session id */
- bool bVolumeEnabled; /* Flag for Volume */
- bool bEqualizerEnabled; /* Flag for EQ */
- bool bBassEnabled; /* Flag for Bass */
- bool bBassTempDisabled; /* Flag for Bass to be re-enabled */
- bool bVirtualizerEnabled; /* Flag for Virtualizer */
- bool bVirtualizerTempDisabled; /* Flag for effect to be re-enabled */
- audio_devices_t nOutputDevice; /* Output device for the effect */
- audio_devices_t nVirtualizerForcedDevice; /* Forced device virtualization mode*/
- int NumberEffectsEnabled; /* Effects in this session */
- int NumberEffectsCalled; /* Effects called so far */
- bool firstVolume; /* No smoothing on first Vol change */
+struct BundledEffectContext {
+ LVM_Handle_t hInstance; /* Instance handle */
+ int SessionNo; /* Current session number */
+ int SessionId; /* Current session id */
+ bool bVolumeEnabled; /* Flag for Volume */
+ bool bEqualizerEnabled; /* Flag for EQ */
+ bool bBassEnabled; /* Flag for Bass */
+ bool bBassTempDisabled; /* Flag for Bass to be re-enabled */
+ bool bVirtualizerEnabled; /* Flag for Virtualizer */
+ bool bVirtualizerTempDisabled; /* Flag for effect to be re-enabled */
+ audio_devices_t nOutputDevice; /* Output device for the effect */
+ audio_devices_t nVirtualizerForcedDevice; /* Forced device virtualization mode*/
+ int NumberEffectsEnabled; /* Effects in this session */
+ int NumberEffectsCalled; /* Effects called so far */
+ bool firstVolume; /* No smoothing on first Vol change */
// Saved parameters for each effect */
// Bass Boost
- int BassStrengthSaved; /* Conversion between Get/Set */
+ int BassStrengthSaved; /* Conversion between Get/Set */
// Equalizer
- int CurPreset; /* Current preset being used */
+ int CurPreset; /* Current preset being used */
// Virtualzer
- int VirtStrengthSaved; /* Conversion between Get/Set */
+ int VirtStrengthSaved; /* Conversion between Get/Set */
// Volume
- int levelSaved; /* for when mute is set, level must be saved */
- int positionSaved;
- bool bMuteEnabled; /* Must store as mute = -96dB level */
- bool bStereoPositionEnabled;
- LVM_Fs_en SampleRate;
- int SamplesPerSecond;
- int SamplesToExitCountEq;
- int SamplesToExitCountBb;
- int SamplesToExitCountVirt;
- effect_buffer_t *workBuffer;
- int frameCount;
- int32_t bandGaindB[FIVEBAND_NUMBANDS];
- int volume;
-#ifdef SUPPORT_MC
- LVM_INT32 ChMask;
-#endif
+ int levelSaved; /* for when mute is set, level must be saved */
+ int positionSaved;
+ bool bMuteEnabled; /* Must store as mute = -96dB level */
+ bool bStereoPositionEnabled;
+ LVM_Fs_en SampleRate;
+ int SamplesPerSecond;
+ int SamplesToExitCountEq;
+ int SamplesToExitCountBb;
+ int SamplesToExitCountVirt;
+ effect_buffer_t* workBuffer;
+ int frameCount;
+ int32_t bandGaindB[FIVEBAND_NUMBANDS];
+ int volume;
+ LVM_INT32 ChMask;
/* Bitmask whether drain is in progress due to disabling the effect.
The corresponding bit to an effect is set by 1 << lvm_effect_en. */
- int effectInDrain;
+ int effectInDrain;
/* Bitmask whether process() was called for a particular effect.
The corresponding bit to an effect is set by 1 << lvm_effect_en. */
- int effectProcessCalled;
+ int effectProcessCalled;
};
/* SessionContext : One session */
-struct SessionContext{
- bool bBundledEffectsEnabled;
- bool bVolumeInstantiated;
- bool bEqualizerInstantiated;
- bool bBassInstantiated;
- bool bVirtualizerInstantiated;
- BundledEffectContext *pBundledContext;
+struct SessionContext {
+ bool bBundledEffectsEnabled;
+ bool bVolumeInstantiated;
+ bool bEqualizerInstantiated;
+ bool bBassInstantiated;
+ bool bVirtualizerInstantiated;
+ BundledEffectContext* pBundledContext;
};
-struct EffectContext{
- const struct effect_interface_s *itfe;
- effect_config_t config;
- lvm_effect_en EffectType;
- BundledEffectContext *pBundledContext;
+struct EffectContext {
+ const struct effect_interface_s* itfe;
+ effect_config_t config;
+ lvm_effect_en EffectType;
+ BundledEffectContext* pBundledContext;
};
/* enumerated parameter settings for Volume effect */
-typedef enum
-{
- VOLUME_PARAM_LEVEL, // type SLmillibel = typedef SLuint16 (set & get)
- VOLUME_PARAM_MAXLEVEL, // type SLmillibel = typedef SLuint16 (get)
- VOLUME_PARAM_MUTE, // type SLboolean = typedef SLuint32 (set & get)
- VOLUME_PARAM_ENABLESTEREOPOSITION, // type SLboolean = typedef SLuint32 (set & get)
- VOLUME_PARAM_STEREOPOSITION, // type SLpermille = typedef SLuint16 (set & get)
+typedef enum {
+ VOLUME_PARAM_LEVEL, // type SLmillibel = typedef SLuint16 (set & get)
+ VOLUME_PARAM_MAXLEVEL, // type SLmillibel = typedef SLuint16 (get)
+ VOLUME_PARAM_MUTE, // type SLboolean = typedef SLuint32 (set & get)
+ VOLUME_PARAM_ENABLESTEREOPOSITION, // type SLboolean = typedef SLuint32 (set & get)
+ VOLUME_PARAM_STEREOPOSITION, // type SLpermille = typedef SLuint16 (set & get)
} t_volume_params;
static const int PRESET_CUSTOM = -1;
-static const uint32_t bandFreqRange[FIVEBAND_NUMBANDS][2] = {
- {30000, 120000},
- {120001, 460000},
- {460001, 1800000},
- {1800001, 7000000},
- {7000001, 20000000}};
+static const uint32_t bandFreqRange[FIVEBAND_NUMBANDS][2] = {{30000, 120000},
+ {120001, 460000},
+ {460001, 1800000},
+ {1800001, 7000000},
+ {7000001, 20000000}};
-//Note: If these frequencies change, please update LimitLevel values accordingly.
-static const LVM_UINT16 EQNB_5BandPresetsFrequencies[] = {
- 60, /* Frequencies in Hz */
- 230,
- 910,
- 3600,
- 14000};
+// Note: If these frequencies change, please update LimitLevel values accordingly.
+static const LVM_UINT16 EQNB_5BandPresetsFrequencies[] = {60, /* Frequencies in Hz */
+ 230, 910, 3600, 14000};
-static const LVM_UINT16 EQNB_5BandPresetsQFactors[] = {
- 96, /* Q factor multiplied by 100 */
- 96,
- 96,
- 96,
- 96};
+static const LVM_UINT16 EQNB_5BandPresetsQFactors[] = {96, /* Q factor multiplied by 100 */
+ 96, 96, 96, 96};
-static const LVM_INT16 EQNB_5BandNormalPresets[] = {
- 3, 0, 0, 0, 3, /* Normal Preset */
- 8, 5, -3, 5, 6, /* Classical Preset */
- 15, -6, 7, 13, 10, /* Dance Preset */
- 0, 0, 0, 0, 0, /* Flat Preset */
- 6, -2, -2, 6, -3, /* Folk Preset */
- 8, -8, 13, -1, -4, /* Heavy Metal Preset */
- 10, 6, -4, 5, 8, /* Hip Hop Preset */
- 8, 5, -4, 5, 9, /* Jazz Preset */
- -6, 4, 9, 4, -5, /* Pop Preset */
- 10, 6, -1, 8, 10}; /* Rock Preset */
+static const LVM_INT16 EQNB_5BandNormalPresets[] = {3, 0, 0, 0, 3, /* Normal Preset */
+ 8, 5, -3, 5, 6, /* Classical Preset */
+ 15, -6, 7, 13, 10, /* Dance Preset */
+ 0, 0, 0, 0, 0, /* Flat Preset */
+ 6, -2, -2, 6, -3, /* Folk Preset */
+ 8, -8, 13, -1, -4, /* Heavy Metal Preset */
+ 10, 6, -4, 5, 8, /* Hip Hop Preset */
+ 8, 5, -4, 5, 9, /* Jazz Preset */
+ -6, 4, 9, 4, -5, /* Pop Preset */
+ 10, 6, -1, 8, 10}; /* Rock Preset */
-static const LVM_INT16 EQNB_5BandSoftPresets[] = {
- 3, 0, 0, 0, 3, /* Normal Preset */
- 5, 3, -2, 4, 4, /* Classical Preset */
- 6, 0, 2, 4, 1, /* Dance Preset */
- 0, 0, 0, 0, 0, /* Flat Preset */
- 3, 0, 0, 2, -1, /* Folk Preset */
- 4, 1, 9, 3, 0, /* Heavy Metal Preset */
- 5, 3, 0, 1, 3, /* Hip Hop Preset */
- 4, 2, -2, 2, 5, /* Jazz Preset */
- -1, 2, 5, 1, -2, /* Pop Preset */
- 5, 3, -1, 3, 5}; /* Rock Preset */
+static const LVM_INT16 EQNB_5BandSoftPresets[] = {3, 0, 0, 0, 3, /* Normal Preset */
+ 5, 3, -2, 4, 4, /* Classical Preset */
+ 6, 0, 2, 4, 1, /* Dance Preset */
+ 0, 0, 0, 0, 0, /* Flat Preset */
+ 3, 0, 0, 2, -1, /* Folk Preset */
+ 4, 1, 9, 3, 0, /* Heavy Metal Preset */
+ 5, 3, 0, 1, 3, /* Hip Hop Preset */
+ 4, 2, -2, 2, 5, /* Jazz Preset */
+ -1, 2, 5, 1, -2, /* Pop Preset */
+ 5, 3, -1, 3, 5}; /* Rock Preset */
-static const PresetConfig gEqualizerPresets[] = {
- {"Normal"},
- {"Classical"},
- {"Dance"},
- {"Flat"},
- {"Folk"},
- {"Heavy Metal"},
- {"Hip Hop"},
- {"Jazz"},
- {"Pop"},
- {"Rock"}};
+static const PresetConfig gEqualizerPresets[] = {{"Normal"}, {"Classical"}, {"Dance"}, {"Flat"},
+ {"Folk"}, {"Heavy Metal"}, {"Hip Hop"}, {"Jazz"},
+ {"Pop"}, {"Rock"}};
/* The following tables have been computed using the actual levels measured by the output of
* white noise or pink noise (IEC268-1) for the EQ and BassBoost Effects. These are estimates of
@@ -201,14 +173,14 @@
* updated.
*/
-static const float LimitLevel_bandEnergyCoefficient[FIVEBAND_NUMBANDS] = {
- 7.56, 9.69, 9.59, 7.37, 2.88};
+static const float LimitLevel_bandEnergyCoefficient[FIVEBAND_NUMBANDS] = {7.56, 9.69, 9.59, 7.37,
+ 2.88};
-static const float LimitLevel_bandEnergyCrossCoefficient[FIVEBAND_NUMBANDS-1] = {
- 126.0, 115.0, 125.0, 104.0 };
+static const float LimitLevel_bandEnergyCrossCoefficient[FIVEBAND_NUMBANDS - 1] = {126.0, 115.0,
+ 125.0, 104.0};
static const float LimitLevel_bassBoostEnergyCrossCoefficient[FIVEBAND_NUMBANDS] = {
- 221.21, 208.10, 28.16, 0.0, 0.0 };
+ 221.21, 208.10, 28.16, 0.0, 0.0};
static const float LimitLevel_bassBoostEnergyCoefficient = 9.00;
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 39f5bb6..9ea70ce 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -18,7 +18,7 @@
typedef float LVM_FLOAT;
#endif
#define LOG_TAG "Reverb"
-#define ARRAY_SIZE(array) (sizeof (array) / sizeof (array)[0])
+#define ARRAY_SIZE(array) (sizeof(array) / sizeof(array)[0])
//#define LOG_NDEBUG 0
#include <assert.h>
@@ -37,19 +37,23 @@
// effect_handle_t interface implementation for reverb
extern "C" const struct effect_interface_s gReverbInterface;
-#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc){\
- if ((LvmStatus) == LVREV_NULLADDRESS){\
- ALOGV("\tLVREV_ERROR : Parameter error - "\
- "null pointer returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
- }\
- if ((LvmStatus) == LVREV_INVALIDNUMSAMPLES){\
- ALOGV("\tLVREV_ERROR : Parameter error - "\
- "bad number of samples returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
- }\
- if ((LvmStatus) == LVREV_OUTOFRANGE){\
- ALOGV("\tLVREV_ERROR : Parameter error - "\
- "out of range returned by %s in %s\n", callingFunc, calledFunc);\
- }\
+#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc) \
+ { \
+ if ((LvmStatus) == LVREV_NULLADDRESS) { \
+ ALOGV("\tLVREV_ERROR : Parameter error - " \
+ "null pointer returned by %s in %s\n\n\n\n", \
+ callingFunc, calledFunc); \
+ } \
+ if ((LvmStatus) == LVREV_INVALIDNUMSAMPLES) { \
+ ALOGV("\tLVREV_ERROR : Parameter error - " \
+ "bad number of samples returned by %s in %s\n\n\n\n", \
+ callingFunc, calledFunc); \
+ } \
+ if ((LvmStatus) == LVREV_OUTOFRANGE) { \
+ ALOGV("\tLVREV_ERROR : Parameter error - " \
+ "out of range returned by %s in %s\n", \
+ callingFunc, calledFunc); \
+ } \
}
// Namespaces
@@ -81,8 +85,8 @@
// NXP SW auxiliary environmental reverb
const effect_descriptor_t gAuxEnvReverbDescriptor = {
- { 0xc2e5d5f0, 0x94bd, 0x4763, 0x9cac, { 0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e } },
- { 0x4a387fc0, 0x8ab3, 0x11df, 0x8bad, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } },
+ {0xc2e5d5f0, 0x94bd, 0x4763, 0x9cac, {0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e}},
+ {0x4a387fc0, 0x8ab3, 0x11df, 0x8bad, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
EFFECT_CONTROL_API_VERSION,
EFFECT_FLAG_TYPE_AUXILIARY,
LVREV_CUP_LOAD_ARM9E,
@@ -128,42 +132,39 @@
};
// gDescriptors contains pointers to all defined effect descriptor in this library
-static const effect_descriptor_t * const gDescriptors[] = {
- &gAuxEnvReverbDescriptor,
- &gInsertEnvReverbDescriptor,
- &gAuxPresetReverbDescriptor,
- &gInsertPresetReverbDescriptor
-};
+static const effect_descriptor_t* const gDescriptors[] = {
+ &gAuxEnvReverbDescriptor, &gInsertEnvReverbDescriptor, &gAuxPresetReverbDescriptor,
+ &gInsertPresetReverbDescriptor};
-typedef float process_buffer_t; // process in float
+typedef float process_buffer_t; // process in float
-struct ReverbContext{
- const struct effect_interface_s *itfe;
- effect_config_t config;
- LVREV_Handle_t hInstance;
- int16_t SavedRoomLevel;
- int16_t SavedHfLevel;
- int16_t SavedDecayTime;
- int16_t SavedDecayHfRatio;
- int16_t SavedReverbLevel;
- int16_t SavedDiffusion;
- int16_t SavedDensity;
- bool bEnabled;
- LVM_Fs_en SampleRate;
- process_buffer_t *InFrames;
- process_buffer_t *OutFrames;
- size_t bufferSizeIn;
- size_t bufferSizeOut;
- bool auxiliary;
- bool preset;
- uint16_t curPreset;
- uint16_t nextPreset;
- int SamplesToExitCount;
- LVM_INT16 leftVolume;
- LVM_INT16 rightVolume;
- LVM_INT16 prevLeftVolume;
- LVM_INT16 prevRightVolume;
- int volumeMode;
+struct ReverbContext {
+ const struct effect_interface_s* itfe;
+ effect_config_t config;
+ LVREV_Handle_t hInstance;
+ int16_t SavedRoomLevel;
+ int16_t SavedHfLevel;
+ int16_t SavedDecayTime;
+ int16_t SavedDecayHfRatio;
+ int16_t SavedReverbLevel;
+ int16_t SavedDiffusion;
+ int16_t SavedDensity;
+ bool bEnabled;
+ LVM_Fs_en SampleRate;
+ process_buffer_t* InFrames;
+ process_buffer_t* OutFrames;
+ size_t bufferSizeIn;
+ size_t bufferSizeOut;
+ bool auxiliary;
+ bool preset;
+ uint16_t curPreset;
+ uint16_t nextPreset;
+ int SamplesToExitCount;
+ LVM_INT16 leftVolume;
+ LVM_INT16 rightVolume;
+ LVM_INT16 prevLeftVolume;
+ LVM_INT16 prevRightVolume;
+ int volumeMode;
};
enum {
@@ -174,44 +175,38 @@
#define REVERB_DEFAULT_PRESET REVERB_PRESET_NONE
-#define REVERB_SEND_LEVEL 0.75f // 0.75 in 4.12 format
-#define REVERB_UNIT_VOLUME (0x1000) // 1.0 in 4.12 format
+#define REVERB_SEND_LEVEL 0.75f // 0.75 in 4.12 format
+#define REVERB_UNIT_VOLUME (0x1000) // 1.0 in 4.12 format
//--- local function prototypes
-int Reverb_init (ReverbContext *pContext);
-void Reverb_free (ReverbContext *pContext);
-int Reverb_setConfig (ReverbContext *pContext, effect_config_t *pConfig);
-void Reverb_getConfig (ReverbContext *pContext, effect_config_t *pConfig);
-int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue, int vsize);
-int Reverb_getParameter (ReverbContext *pContext,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue);
-int Reverb_LoadPreset (ReverbContext *pContext);
-int Reverb_paramValueSize (int32_t param);
+int Reverb_init(ReverbContext* pContext);
+void Reverb_free(ReverbContext* pContext);
+int Reverb_setConfig(ReverbContext* pContext, effect_config_t* pConfig);
+void Reverb_getConfig(ReverbContext* pContext, effect_config_t* pConfig);
+int Reverb_setParameter(ReverbContext* pContext, void* pParam, void* pValue, int vsize);
+int Reverb_getParameter(ReverbContext* pContext, void* pParam, uint32_t* pValueSize, void* pValue);
+int Reverb_LoadPreset(ReverbContext* pContext);
+int Reverb_paramValueSize(int32_t param);
/* Effect Library Interface Implementation */
-extern "C" int EffectCreate(const effect_uuid_t *uuid,
- int32_t sessionId __unused,
- int32_t ioId __unused,
- effect_handle_t *pHandle){
+extern "C" int EffectCreate(const effect_uuid_t* uuid, int32_t sessionId __unused,
+ int32_t ioId __unused, effect_handle_t* pHandle) {
int ret;
int i;
- int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *);
- const effect_descriptor_t *desc;
+ int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t*);
+ const effect_descriptor_t* desc;
ALOGV("\t\nEffectCreate start");
- if (pHandle == NULL || uuid == NULL){
+ if (pHandle == NULL || uuid == NULL) {
ALOGV("\tLVM_ERROR : EffectCreate() called with NULL pointer");
return -EINVAL;
}
for (i = 0; i < length; i++) {
desc = gDescriptors[i];
- if (memcmp(uuid, &desc->uuid, sizeof(effect_uuid_t))
- == 0) {
+ if (memcmp(uuid, &desc->uuid, sizeof(effect_uuid_t)) == 0) {
ALOGV("\tEffectCreate - UUID matched Reverb type %d, UUID = %x", i, desc->uuid.timeLow);
break;
}
@@ -221,16 +216,16 @@
return -ENOENT;
}
- ReverbContext *pContext = new ReverbContext;
+ ReverbContext* pContext = new ReverbContext;
- pContext->itfe = &gReverbInterface;
+ pContext->itfe = &gReverbInterface;
pContext->hInstance = NULL;
pContext->auxiliary = false;
- if ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY){
+ if ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
pContext->auxiliary = true;
ALOGV("\tEffectCreate - AUX");
- }else{
+ } else {
ALOGV("\tEffectCreate - INS");
}
@@ -241,14 +236,14 @@
pContext->curPreset = REVERB_PRESET_LAST + 1;
pContext->nextPreset = REVERB_DEFAULT_PRESET;
ALOGV("\tEffectCreate - PRESET");
- }else{
+ } else {
ALOGV("\tEffectCreate - ENVIRONMENTAL");
}
ALOGV("\tEffectCreate - Calling Reverb_init");
ret = Reverb_init(pContext);
- if (ret < 0){
+ if (ret < 0) {
ALOGV("\tLVM_ERROR : EffectCreate() init failed");
delete pContext;
return ret;
@@ -256,25 +251,25 @@
*pHandle = (effect_handle_t)pContext;
-
int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
+ channels = (pContext->auxiliary == true) ? channels : FCC_2;
// Allocate memory for reverb process (*2 is for STEREO)
pContext->bufferSizeIn = LVREV_MAX_FRAME_SIZE * sizeof(process_buffer_t) * channels;
pContext->bufferSizeOut = LVREV_MAX_FRAME_SIZE * sizeof(process_buffer_t) * FCC_2;
- pContext->InFrames = (process_buffer_t *)calloc(pContext->bufferSizeIn, 1 /* size */);
- pContext->OutFrames = (process_buffer_t *)calloc(pContext->bufferSizeOut, 1 /* size */);
+ pContext->InFrames = (process_buffer_t*)calloc(pContext->bufferSizeIn, 1 /* size */);
+ pContext->OutFrames = (process_buffer_t*)calloc(pContext->bufferSizeOut, 1 /* size */);
ALOGV("\tEffectCreate %p, size %zu", pContext, sizeof(ReverbContext));
ALOGV("\tEffectCreate end\n");
return 0;
} /* end EffectCreate */
-extern "C" int EffectRelease(effect_handle_t handle){
- ReverbContext * pContext = (ReverbContext *)handle;
+extern "C" int EffectRelease(effect_handle_t handle) {
+ ReverbContext* pContext = (ReverbContext*)handle;
ALOGV("\tEffectRelease %p", handle);
- if (pContext == NULL){
+ if (pContext == NULL) {
ALOGV("\tLVM_ERROR : EffectRelease called with NULL pointer");
return -EINVAL;
}
@@ -288,12 +283,11 @@
return 0;
} /* end EffectRelease */
-extern "C" int EffectGetDescriptor(const effect_uuid_t *uuid,
- effect_descriptor_t *pDescriptor) {
+extern "C" int EffectGetDescriptor(const effect_uuid_t* uuid, effect_descriptor_t* pDescriptor) {
int i;
- int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *);
+ int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t*);
- if (pDescriptor == NULL || uuid == NULL){
+ if (pDescriptor == NULL || uuid == NULL) {
ALOGV("EffectGetDescriptor() called with NULL pointer");
return -EINVAL;
}
@@ -301,8 +295,8 @@
for (i = 0; i < length; i++) {
if (memcmp(uuid, &gDescriptors[i]->uuid, sizeof(effect_uuid_t)) == 0) {
*pDescriptor = *gDescriptors[i];
- ALOGV("EffectGetDescriptor - UUID matched Reverb type %d, UUID = %x",
- i, gDescriptors[i]->uuid.timeLow);
+ ALOGV("EffectGetDescriptor - UUID matched Reverb type %d, UUID = %x", i,
+ gDescriptors[i]->uuid.timeLow);
return 0;
}
}
@@ -311,12 +305,13 @@
} /* end EffectGetDescriptor */
/* local functions */
-#define CHECK_ARG(cond) { \
- if (!(cond)) { \
- ALOGV("\tLVM_ERROR : Invalid argument: "#cond); \
- return -EINVAL; \
- } \
-}
+#define CHECK_ARG(cond) \
+ { \
+ if (!(cond)) { \
+ ALOGV("\tLVM_ERROR : Invalid argument: " #cond); \
+ return -EINVAL; \
+ } \
+ }
//----------------------------------------------------------------------------
// process()
@@ -335,91 +330,100 @@
// pOut: pointer to updated stereo 16 bit output data
//
//----------------------------------------------------------------------------
-int process( effect_buffer_t *pIn,
- effect_buffer_t *pOut,
- int frameCount,
- ReverbContext *pContext){
-
+int process(effect_buffer_t* pIn, effect_buffer_t* pOut, int frameCount, ReverbContext* pContext) {
int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
- LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
- // Check that the input is either mono or stereo
- if (!(channels == 1 || channels == FCC_2) ) {
- ALOGE("\tLVREV_ERROR : process invalid PCM format");
+ // Reverb only effects the stereo channels in multichannel source.
+ if (channels < 1 || channels > LVM_MAX_CHANNELS) {
+ ALOGE("\tLVREV_ERROR : process invalid PCM channels %d", channels);
return -EINVAL;
}
size_t inSize = frameCount * sizeof(process_buffer_t) * channels;
size_t outSize = frameCount * sizeof(process_buffer_t) * FCC_2;
- if (pContext->InFrames == NULL ||
- pContext->bufferSizeIn < inSize) {
+ if (pContext->InFrames == NULL || pContext->bufferSizeIn < inSize) {
free(pContext->InFrames);
pContext->bufferSizeIn = inSize;
- pContext->InFrames = (process_buffer_t *)calloc(1, pContext->bufferSizeIn);
+ pContext->InFrames = (process_buffer_t*)calloc(1, pContext->bufferSizeIn);
}
- if (pContext->OutFrames == NULL ||
- pContext->bufferSizeOut < outSize) {
+ if (pContext->OutFrames == NULL || pContext->bufferSizeOut < outSize) {
free(pContext->OutFrames);
pContext->bufferSizeOut = outSize;
- pContext->OutFrames = (process_buffer_t *)calloc(1, pContext->bufferSizeOut);
+ pContext->OutFrames = (process_buffer_t*)calloc(1, pContext->bufferSizeOut);
}
-
// Check for NULL pointers
if ((pContext->InFrames == NULL) || (pContext->OutFrames == NULL)) {
ALOGE("\tLVREV_ERROR : process failed to allocate memory for temporary buffers ");
return -EINVAL;
}
-
if (pContext->preset && pContext->nextPreset != pContext->curPreset) {
Reverb_LoadPreset(pContext);
}
if (pContext->auxiliary) {
static_assert(std::is_same<decltype(*pIn), decltype(*pContext->InFrames)>::value,
- "pIn and InFrames must be same type");
+ "pIn and InFrames must be same type");
memcpy(pContext->InFrames, pIn, frameCount * channels * sizeof(*pIn));
+ } else {
+ // mono input is duplicated
+ if (channels >= FCC_2) {
+ for (int i = 0; i < frameCount; i++) {
+ pContext->InFrames[FCC_2 * i] =
+ (process_buffer_t)pIn[channels * i] * REVERB_SEND_LEVEL;
+ pContext->InFrames[FCC_2 * i + 1] =
+ (process_buffer_t)pIn[channels * i + 1] * REVERB_SEND_LEVEL;
+ }
} else {
- // insert reverb input is always stereo
- for (int i = 0; i < frameCount; i++) {
- pContext->InFrames[2 * i] = (process_buffer_t)pIn[2 * i] * REVERB_SEND_LEVEL;
- pContext->InFrames[2 * i + 1] = (process_buffer_t)pIn[2 * i + 1] * REVERB_SEND_LEVEL;
+ for (int i = 0; i < frameCount; i++) {
+ pContext->InFrames[FCC_2 * i] = pContext->InFrames[FCC_2 * i + 1] =
+ (process_buffer_t)pIn[i] * REVERB_SEND_LEVEL;
+ }
}
}
if (pContext->preset && pContext->curPreset == REVERB_PRESET_NONE) {
memset(pContext->OutFrames, 0,
- frameCount * sizeof(*pContext->OutFrames) * FCC_2); //always stereo here
+ frameCount * sizeof(*pContext->OutFrames) * FCC_2); // always stereo here
} else {
- if(pContext->bEnabled == LVM_FALSE && pContext->SamplesToExitCount > 0) {
- memset(pContext->InFrames, 0,
- frameCount * sizeof(*pContext->OutFrames) * channels);
+ if (pContext->bEnabled == LVM_FALSE && pContext->SamplesToExitCount > 0) {
+ memset(pContext->InFrames, 0, frameCount * sizeof(*pContext->OutFrames) * channels);
ALOGV("\tZeroing %d samples per frame at the end of call", channels);
}
/* Process the samples, producing a stereo output */
- LvmStatus = LVREV_Process(pContext->hInstance, /* Instance handle */
- pContext->InFrames, /* Input buffer */
- pContext->OutFrames, /* Output buffer */
- frameCount); /* Number of samples to read */
+ LvmStatus = LVREV_Process(pContext->hInstance, /* Instance handle */
+ pContext->InFrames, /* Input buffer */
+ pContext->OutFrames, /* Output buffer */
+ frameCount); /* Number of samples to read */
}
LVM_ERROR_CHECK(LvmStatus, "LVREV_Process", "process")
- if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
// Convert to 16 bits
if (pContext->auxiliary) {
// nothing to do here
} else {
- for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
- // Mix with dry input
- pContext->OutFrames[i] += pIn[i];
+ if (channels >= FCC_2) {
+ for (int i = 0; i < frameCount; i++) {
+ // Mix with dry input
+ pContext->OutFrames[FCC_2 * i] += pIn[channels * i];
+ pContext->OutFrames[FCC_2 * i + 1] += pIn[channels * i + 1];
+ }
+ } else {
+ for (int i = 0; i < frameCount; i++) {
+ // Mix with dry input
+ pContext->OutFrames[FCC_2 * i] += pIn[i];
+ pContext->OutFrames[FCC_2 * i + 1] += pIn[i];
+ }
}
// apply volume with ramp if needed
if ((pContext->leftVolume != pContext->prevLeftVolume ||
- pContext->rightVolume != pContext->prevRightVolume) &&
- pContext->volumeMode == REVERB_VOLUME_RAMP) {
+ pContext->rightVolume != pContext->prevRightVolume) &&
+ pContext->volumeMode == REVERB_VOLUME_RAMP) {
// FIXME: still using int16 volumes.
// For reference: REVERB_UNIT_VOLUME (0x1000) // 1.0 in 4.12 format
float vl = (float)pContext->prevLeftVolume / 4096;
@@ -450,20 +454,35 @@
}
}
-
- // Accumulate if required
- if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
- //ALOGV("\tBuffer access is ACCUMULATE");
- for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
- pOut[i] += pContext->OutFrames[i];
+ if (channels > 2) {
+ // Accumulate if required
+ if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+ for (int i = 0; i < frameCount; i++) {
+ pOut[channels * i] += pContext->OutFrames[FCC_2 * i];
+ pOut[channels * i + 1] += pContext->OutFrames[FCC_2 * i + 1];
+ }
+ } else {
+ for (int i = 0; i < frameCount; i++) {
+ pOut[channels * i] = pContext->OutFrames[FCC_2 * i];
+ pOut[channels * i + 1] = pContext->OutFrames[FCC_2 * i + 1];
+ }
}
- }else{
- //ALOGV("\tBuffer access is WRITE");
- memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
+ for (int i = 0; i < frameCount; i++) {
+ for (int j = FCC_2; j < channels; j++) {
+ pOut[channels * i + j] = pIn[channels * i + j];
+ }
+ }
+ } else {
+ if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+ for (int i = 0; i < frameCount * FCC_2; i++) {
+ pOut[i] += pContext->OutFrames[i];
+ }
+ } else {
+ memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
+ }
}
-
return 0;
-} /* end process */
+} /* end process */
//----------------------------------------------------------------------------
// Reverb_free()
@@ -477,30 +496,28 @@
//
//----------------------------------------------------------------------------
-void Reverb_free(ReverbContext *pContext){
-
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
- LVREV_MemoryTable_st MemTab;
+void Reverb_free(ReverbContext* pContext) {
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+ LVREV_MemoryTable_st MemTab;
/* Free the algorithm memory */
- LvmStatus = LVREV_GetMemoryTable(pContext->hInstance,
- &MemTab,
- LVM_NULL);
+ LvmStatus = LVREV_GetMemoryTable(pContext->hInstance, &MemTab, LVM_NULL);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "Reverb_free")
- for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
- if (MemTab.Region[i].Size != 0){
- if (MemTab.Region[i].pBaseAddress != NULL){
+ for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
+ if (MemTab.Region[i].Size != 0) {
+ if (MemTab.Region[i].pBaseAddress != NULL) {
free(MemTab.Region[i].pBaseAddress);
- }else{
- ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %" PRIu32 " bytes "
- "for region %u at %p ERROR\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
+ } else {
+ ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %" PRIu32
+ " bytes "
+ "for region %u at %p ERROR\n",
+ MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}
}
}
-} /* end Reverb_free */
+} /* end Reverb_free */
//----------------------------------------------------------------------------
// Reverb_setConfig()
@@ -516,89 +533,89 @@
//
//----------------------------------------------------------------------------
-int Reverb_setConfig(ReverbContext *pContext, effect_config_t *pConfig){
- LVM_Fs_en SampleRate;
- //ALOGV("\tReverb_setConfig start");
+int Reverb_setConfig(ReverbContext* pContext, effect_config_t* pConfig) {
+ LVM_Fs_en SampleRate;
+ // ALOGV("\tReverb_setConfig start");
CHECK_ARG(pContext != NULL);
CHECK_ARG(pConfig != NULL);
CHECK_ARG(pConfig->inputCfg.samplingRate == pConfig->outputCfg.samplingRate);
CHECK_ARG(pConfig->inputCfg.format == pConfig->outputCfg.format);
+ int inputChannels = audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
CHECK_ARG((pContext->auxiliary && pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_MONO) ||
- ((!pContext->auxiliary) && pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO));
- CHECK_ARG(pConfig->outputCfg.channels == AUDIO_CHANNEL_OUT_STEREO);
- CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE
- || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
+ ((!pContext->auxiliary) && (inputChannels <= LVM_MAX_CHANNELS)));
+ int outputChannels = audio_channel_count_from_out_mask(pConfig->outputCfg.channels);
+ CHECK_ARG(outputChannels >= FCC_2 && outputChannels <= LVM_MAX_CHANNELS);
+ CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE ||
+ pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
- //ALOGV("\tReverb_setConfig calling memcpy");
+ // ALOGV("\tReverb_setConfig calling memcpy");
pContext->config = *pConfig;
switch (pConfig->inputCfg.samplingRate) {
- case 8000:
- SampleRate = LVM_FS_8000;
- break;
- case 16000:
- SampleRate = LVM_FS_16000;
- break;
- case 22050:
- SampleRate = LVM_FS_22050;
- break;
- case 32000:
- SampleRate = LVM_FS_32000;
- break;
- case 44100:
- SampleRate = LVM_FS_44100;
- break;
- case 48000:
- SampleRate = LVM_FS_48000;
- break;
- case 88200:
- SampleRate = LVM_FS_88200;
- break;
- case 96000:
- SampleRate = LVM_FS_96000;
- break;
- case 176400:
- SampleRate = LVM_FS_176400;
- break;
- case 192000:
- SampleRate = LVM_FS_192000;
- break;
- default:
- ALOGV("\rReverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
- return -EINVAL;
+ case 8000:
+ SampleRate = LVM_FS_8000;
+ break;
+ case 16000:
+ SampleRate = LVM_FS_16000;
+ break;
+ case 22050:
+ SampleRate = LVM_FS_22050;
+ break;
+ case 32000:
+ SampleRate = LVM_FS_32000;
+ break;
+ case 44100:
+ SampleRate = LVM_FS_44100;
+ break;
+ case 48000:
+ SampleRate = LVM_FS_48000;
+ break;
+ case 88200:
+ SampleRate = LVM_FS_88200;
+ break;
+ case 96000:
+ SampleRate = LVM_FS_96000;
+ break;
+ case 176400:
+ SampleRate = LVM_FS_176400;
+ break;
+ case 192000:
+ SampleRate = LVM_FS_192000;
+ break;
+ default:
+ ALOGV("\rReverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
+ return -EINVAL;
}
if (pContext->SampleRate != SampleRate) {
+ LVREV_ControlParams_st ActiveParams;
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS;
- LVREV_ControlParams_st ActiveParams;
- LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS;
-
- //ALOGV("\tReverb_setConfig change sampling rate to %d", SampleRate);
+ // ALOGV("\tReverb_setConfig change sampling rate to %d", SampleRate);
/* Get the current settings */
- LvmStatus = LVREV_GetControlParameters(pContext->hInstance,
- &ActiveParams);
+ LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "Reverb_setConfig")
- if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
ActiveParams.SampleRate = SampleRate;
LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "Reverb_setConfig")
- if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
- //ALOGV("\tReverb_setConfig Succesfully called LVREV_SetControlParameters\n");
+ if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
+ // ALOGV("\tReverb_setConfig Successfully called LVREV_SetControlParameters\n");
pContext->SampleRate = SampleRate;
- }else{
- //ALOGV("\tReverb_setConfig keep sampling rate at %d", SampleRate);
+ } else {
+ // ALOGV("\tReverb_setConfig keep sampling rate at %d", SampleRate);
}
- //ALOGV("\tReverb_setConfig End");
+ // ALOGV("\tReverb_setConfig End");
return 0;
-} /* end Reverb_setConfig */
+} /* end Reverb_setConfig */
//----------------------------------------------------------------------------
// Reverb_getConfig()
@@ -614,10 +631,9 @@
//
//----------------------------------------------------------------------------
-void Reverb_getConfig(ReverbContext *pContext, effect_config_t *pConfig)
-{
+void Reverb_getConfig(ReverbContext* pContext, effect_config_t* pConfig) {
*pConfig = pContext->config;
-} /* end Reverb_getConfig */
+} /* end Reverb_getConfig */
//----------------------------------------------------------------------------
// Reverb_init()
@@ -631,35 +647,35 @@
//
//----------------------------------------------------------------------------
-int Reverb_init(ReverbContext *pContext){
+int Reverb_init(ReverbContext* pContext) {
ALOGV("\tReverb_init start");
CHECK_ARG(pContext != NULL);
- if (pContext->hInstance != NULL){
+ if (pContext->hInstance != NULL) {
Reverb_free(pContext);
}
- pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+ pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
if (pContext->auxiliary) {
- pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_MONO;
+ pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_MONO;
} else {
- pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
}
- pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
- pContext->config.inputCfg.samplingRate = 44100;
- pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
- pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
- pContext->config.inputCfg.bufferProvider.cookie = NULL;
- pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
- pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
- pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
- pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
- pContext->config.outputCfg.samplingRate = 44100;
- pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
+ pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
+ pContext->config.inputCfg.samplingRate = 44100;
+ pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
+ pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+ pContext->config.inputCfg.bufferProvider.cookie = NULL;
+ pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+ pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+ pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
+ pContext->config.outputCfg.samplingRate = 44100;
+ pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
- pContext->config.outputCfg.bufferProvider.cookie = NULL;
- pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+ pContext->config.outputCfg.bufferProvider.cookie = NULL;
+ pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
pContext->leftVolume = REVERB_UNIT_VOLUME;
pContext->rightVolume = REVERB_UNIT_VOLUME;
@@ -667,40 +683,39 @@
pContext->prevRightVolume = REVERB_UNIT_VOLUME;
pContext->volumeMode = REVERB_VOLUME_FLAT;
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
- LVREV_ControlParams_st params; /* Control Parameters */
- LVREV_InstanceParams_st InstParams; /* Instance parameters */
- LVREV_MemoryTable_st MemTab; /* Memory allocation table */
- bool bMallocFailure = LVM_FALSE;
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+ LVREV_ControlParams_st params; /* Control Parameters */
+ LVREV_InstanceParams_st InstParams; /* Instance parameters */
+ LVREV_MemoryTable_st MemTab; /* Memory allocation table */
+ bool bMallocFailure = LVM_FALSE;
/* Set the capabilities */
- InstParams.MaxBlockSize = MAX_CALL_SIZE;
- InstParams.SourceFormat = LVM_STEREO; // Max format, could be mono during process
- InstParams.NumDelays = LVREV_DELAYLINES_4;
+ InstParams.MaxBlockSize = MAX_CALL_SIZE;
+ InstParams.SourceFormat = LVM_STEREO; // Max format, could be mono during process
+ InstParams.NumDelays = LVREV_DELAYLINES_4;
/* Allocate memory, forcing alignment */
- LvmStatus = LVREV_GetMemoryTable(LVM_NULL,
- &MemTab,
- &InstParams);
+ LvmStatus = LVREV_GetMemoryTable(LVM_NULL, &MemTab, &InstParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetMemoryTable", "Reverb_init")
- if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
- ALOGV("\tCreateInstance Succesfully called LVM_GetMemoryTable\n");
+ ALOGV("\tCreateInstance Successfully called LVM_GetMemoryTable\n");
/* Allocate memory */
- for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
- if (MemTab.Region[i].Size != 0){
- MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
+ for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
+ if (MemTab.Region[i].Size != 0) {
+ MemTab.Region[i].pBaseAddress = calloc(1, MemTab.Region[i].Size);
- if (MemTab.Region[i].pBaseAddress == LVM_NULL){
+ if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
ALOGV("\tLVREV_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
- " bytes for region %u\n", MemTab.Region[i].Size, i );
+ " bytes for region %u\n",
+ MemTab.Region[i].Size, i);
bMallocFailure = LVM_TRUE;
- }else{
+ } else {
ALOGV("\tReverb_init CreateInstance allocate %" PRIu32
- " bytes for region %u at %p\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
+ " bytes for region %u at %p\n",
+ MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}
}
}
@@ -708,82 +723,83 @@
/* If one or more of the memory regions failed to allocate, free the regions that were
* succesfully allocated and return with an error
*/
- if(bMallocFailure == LVM_TRUE){
- for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
- if (MemTab.Region[i].pBaseAddress == LVM_NULL){
+ if (bMallocFailure == LVM_TRUE) {
+ for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
+ if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
- " bytes for region %u - Not freeing\n", MemTab.Region[i].Size, i );
- }else{
+ " bytes for region %u - Not freeing\n",
+ MemTab.Region[i].Size, i);
+ } else {
ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed: but allocated %" PRIu32
- " bytes for region %u at %p- free\n",
- MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
+ " bytes for region %u at %p- free\n",
+ MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
free(MemTab.Region[i].pBaseAddress);
}
}
return -EINVAL;
}
- ALOGV("\tReverb_init CreateInstance Succesfully malloc'd memory\n");
+ ALOGV("\tReverb_init CreateInstance Successfully malloc'd memory\n");
/* Initialise */
pContext->hInstance = LVM_NULL;
/* Init sets the instance handle */
- LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance,
- &MemTab,
- &InstParams);
+ LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance, &MemTab, &InstParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "Reverb_init")
- if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
- ALOGV("\tReverb_init CreateInstance Succesfully called LVM_GetInstanceHandle\n");
+ ALOGV("\tReverb_init CreateInstance Successfully called LVM_GetInstanceHandle\n");
/* Set the initial process parameters */
/* General parameters */
- params.OperatingMode = LVM_MODE_ON;
- params.SampleRate = LVM_FS_44100;
- pContext->SampleRate = LVM_FS_44100;
+ params.OperatingMode = LVM_MODE_ON;
+ params.SampleRate = LVM_FS_44100;
+ pContext->SampleRate = LVM_FS_44100;
- if(pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_MONO){
- params.SourceFormat = LVM_MONO;
+ if (pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_MONO) {
+ params.SourceFormat = LVM_MONO;
} else {
- params.SourceFormat = LVM_STEREO;
+ params.SourceFormat = LVM_STEREO;
}
+ if ((pContext->auxiliary == false) && (params.SourceFormat == LVM_MONO)) {
+ params.SourceFormat = LVM_STEREO;
+ }
/* Reverb parameters */
- params.Level = 0;
- params.LPF = 23999;
- params.HPF = 50;
- params.T60 = 1490;
- params.Density = 100;
- params.Damping = 21;
- params.RoomSize = 100;
+ params.Level = 0;
+ params.LPF = 23999;
+ params.HPF = 50;
+ params.T60 = 1490;
+ params.Density = 100;
+ params.Damping = 21;
+ params.RoomSize = 100;
- pContext->SamplesToExitCount = (params.T60 * pContext->config.inputCfg.samplingRate)/1000;
+ pContext->SamplesToExitCount = (params.T60 * pContext->config.inputCfg.samplingRate) / 1000;
/* Saved strength is used to return the exact strength that was used in the set to the get
* because we map the original strength range of 0:1000 to 1:15, and this will avoid
* quantisation like effect when returning
*/
- pContext->SavedRoomLevel = -6000;
- pContext->SavedHfLevel = 0;
- pContext->bEnabled = LVM_FALSE;
- pContext->SavedDecayTime = params.T60;
- pContext->SavedDecayHfRatio = params.Damping*20;
- pContext->SavedDensity = params.RoomSize*10;
- pContext->SavedDiffusion = params.Density*10;
- pContext->SavedReverbLevel = -6000;
+ pContext->SavedRoomLevel = -6000;
+ pContext->SavedHfLevel = 0;
+ pContext->bEnabled = LVM_FALSE;
+ pContext->SavedDecayTime = params.T60;
+ pContext->SavedDecayHfRatio = params.Damping * 20;
+ pContext->SavedDensity = params.RoomSize * 10;
+ pContext->SavedDiffusion = params.Density * 10;
+ pContext->SavedReverbLevel = -6000;
/* Activate the initial settings */
- LvmStatus = LVREV_SetControlParameters(pContext->hInstance,
- ¶ms);
+ LvmStatus = LVREV_SetControlParameters(pContext->hInstance, ¶ms);
LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "Reverb_init")
- if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+ if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
- ALOGV("\tReverb_init CreateInstance Succesfully called LVREV_SetControlParameters\n");
+ ALOGV("\tReverb_init CreateInstance Successfully called LVREV_SetControlParameters\n");
ALOGV("\tReverb_init End");
return 0;
-} /* end Reverb_init */
+} /* end Reverb_init */
//----------------------------------------------------------------------------
// ReverbConvertLevel()
@@ -796,27 +812,21 @@
//
//----------------------------------------------------------------------------
-int16_t ReverbConvertLevel(int16_t level){
- static int16_t LevelArray[101] =
- {
- -12000, -4000, -3398, -3046, -2796, -2603, -2444, -2310, -2194, -2092,
- -2000, -1918, -1842, -1773, -1708, -1648, -1592, -1540, -1490, -1443,
- -1398, -1356, -1316, -1277, -1240, -1205, -1171, -1138, -1106, -1076,
- -1046, -1018, -990, -963, -938, -912, -888, -864, -841, -818,
- -796, -775, -754, -734, -714, -694, -675, -656, -638, -620,
- -603, -585, -568, -552, -536, -520, -504, -489, -474, -459,
- -444, -430, -416, -402, -388, -375, -361, -348, -335, -323,
- -310, -298, -286, -274, -262, -250, -239, -228, -216, -205,
- -194, -184, -173, -162, -152, -142, -132, -121, -112, -102,
- -92, -82, -73, -64, -54, -45, -36, -27, -18, -9,
- 0
- };
+int16_t ReverbConvertLevel(int16_t level) {
+ static int16_t LevelArray[101] = {
+ -12000, -4000, -3398, -3046, -2796, -2603, -2444, -2310, -2194, -2092, -2000, -1918,
+ -1842, -1773, -1708, -1648, -1592, -1540, -1490, -1443, -1398, -1356, -1316, -1277,
+ -1240, -1205, -1171, -1138, -1106, -1076, -1046, -1018, -990, -963, -938, -912,
+ -888, -864, -841, -818, -796, -775, -754, -734, -714, -694, -675, -656,
+ -638, -620, -603, -585, -568, -552, -536, -520, -504, -489, -474, -459,
+ -444, -430, -416, -402, -388, -375, -361, -348, -335, -323, -310, -298,
+ -286, -274, -262, -250, -239, -228, -216, -205, -194, -184, -173, -162,
+ -152, -142, -132, -121, -112, -102, -92, -82, -73, -64, -54, -45,
+ -36, -27, -18, -9, 0};
int16_t i;
- for(i = 0; i < 101; i++)
- {
- if(level <= LevelArray[i])
- break;
+ for (i = 0; i < 101; i++) {
+ if (level <= LevelArray[i]) break;
}
return i;
}
@@ -832,37 +842,31 @@
//
//----------------------------------------------------------------------------
-int16_t ReverbConvertHfLevel(int16_t Hflevel){
+int16_t ReverbConvertHfLevel(int16_t Hflevel) {
int16_t i;
- static LPFPair_t LPFArray[97] =
- { // Limit range to 50 for LVREV parameter range
- {-10000, 50}, { -5000, 50 }, { -4000, 50}, { -3000, 158}, { -2000, 502},
- {-1000, 1666},{ -900, 1897}, { -800, 2169}, { -700, 2496}, { -600, 2895},
- {-500, 3400}, { -400, 4066}, { -300, 5011}, { -200, 6537}, { -100, 9826},
- {-99, 9881 }, { -98, 9937 }, { -97, 9994 }, { -96, 10052}, { -95, 10111},
- {-94, 10171}, { -93, 10231}, { -92, 10293}, { -91, 10356}, { -90, 10419},
- {-89, 10484}, { -88, 10549}, { -87, 10616}, { -86, 10684}, { -85, 10753},
- {-84, 10823}, { -83, 10895}, { -82, 10968}, { -81, 11042}, { -80, 11117},
- {-79, 11194}, { -78, 11272}, { -77, 11352}, { -76, 11433}, { -75, 11516},
- {-74, 11600}, { -73, 11686}, { -72, 11774}, { -71, 11864}, { -70, 11955},
- {-69, 12049}, { -68, 12144}, { -67, 12242}, { -66, 12341}, { -65, 12443},
- {-64, 12548}, { -63, 12654}, { -62, 12763}, { -61, 12875}, { -60, 12990},
- {-59, 13107}, { -58, 13227}, { -57, 13351}, { -56, 13477}, { -55, 13607},
- {-54, 13741}, { -53, 13878}, { -52, 14019}, { -51, 14164}, { -50, 14313},
- {-49, 14467}, { -48, 14626}, { -47, 14789}, { -46, 14958}, { -45, 15132},
- {-44, 15312}, { -43, 15498}, { -42, 15691}, { -41, 15890}, { -40, 16097},
- {-39, 16311}, { -38, 16534}, { -37, 16766}, { -36, 17007}, { -35, 17259},
- {-34, 17521}, { -33, 17795}, { -32, 18081}, { -31, 18381}, { -30, 18696},
- {-29, 19027}, { -28, 19375}, { -27, 19742}, { -26, 20129}, { -25, 20540},
- {-24, 20976}, { -23, 21439}, { -22, 21934}, { -21, 22463}, { -20, 23031},
- {-19, 23643}, { -18, 23999}
- };
+ static LPFPair_t LPFArray[97] = {
+ // Limit range to 50 for LVREV parameter range
+ {-10000, 50}, {-5000, 50}, {-4000, 50}, {-3000, 158}, {-2000, 502}, {-1000, 1666},
+ {-900, 1897}, {-800, 2169}, {-700, 2496}, {-600, 2895}, {-500, 3400}, {-400, 4066},
+ {-300, 5011}, {-200, 6537}, {-100, 9826}, {-99, 9881}, {-98, 9937}, {-97, 9994},
+ {-96, 10052}, {-95, 10111}, {-94, 10171}, {-93, 10231}, {-92, 10293}, {-91, 10356},
+ {-90, 10419}, {-89, 10484}, {-88, 10549}, {-87, 10616}, {-86, 10684}, {-85, 10753},
+ {-84, 10823}, {-83, 10895}, {-82, 10968}, {-81, 11042}, {-80, 11117}, {-79, 11194},
+ {-78, 11272}, {-77, 11352}, {-76, 11433}, {-75, 11516}, {-74, 11600}, {-73, 11686},
+ {-72, 11774}, {-71, 11864}, {-70, 11955}, {-69, 12049}, {-68, 12144}, {-67, 12242},
+ {-66, 12341}, {-65, 12443}, {-64, 12548}, {-63, 12654}, {-62, 12763}, {-61, 12875},
+ {-60, 12990}, {-59, 13107}, {-58, 13227}, {-57, 13351}, {-56, 13477}, {-55, 13607},
+ {-54, 13741}, {-53, 13878}, {-52, 14019}, {-51, 14164}, {-50, 14313}, {-49, 14467},
+ {-48, 14626}, {-47, 14789}, {-46, 14958}, {-45, 15132}, {-44, 15312}, {-43, 15498},
+ {-42, 15691}, {-41, 15890}, {-40, 16097}, {-39, 16311}, {-38, 16534}, {-37, 16766},
+ {-36, 17007}, {-35, 17259}, {-34, 17521}, {-33, 17795}, {-32, 18081}, {-31, 18381},
+ {-30, 18696}, {-29, 19027}, {-28, 19375}, {-27, 19742}, {-26, 20129}, {-25, 20540},
+ {-24, 20976}, {-23, 21439}, {-22, 21934}, {-21, 22463}, {-20, 23031}, {-19, 23643},
+ {-18, 23999}};
- for(i = 0; i < 96; i++)
- {
- if(Hflevel <= LPFArray[i].Room_HF)
- break;
+ for (i = 0; i < 96; i++) {
+ if (Hflevel <= LPFArray[i].Room_HF) break;
}
return LPFArray[i].LPF;
}
@@ -879,26 +883,26 @@
//
//----------------------------------------------------------------------------
-void ReverbSetRoomHfLevel(ReverbContext *pContext, int16_t level){
- //ALOGV("\tReverbSetRoomHfLevel start (%d)", level);
+void ReverbSetRoomHfLevel(ReverbContext* pContext, int16_t level) {
+ // ALOGV("\tReverbSetRoomHfLevel start (%d)", level);
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetRoomHfLevel")
- //ALOGV("\tReverbSetRoomHfLevel Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbSetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
+ // ALOGV("\tReverbSetRoomHfLevel Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbSetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
ActiveParams.LPF = ReverbConvertHfLevel(level);
/* Activate the initial settings */
LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetRoomHfLevel")
- //ALOGV("\tReverbSetRoomhfLevel() just Set -> %d\n", ActiveParams.LPF);
+ // ALOGV("\tReverbSetRoomhfLevel() just Set -> %d\n", ActiveParams.LPF);
pContext->SavedHfLevel = level;
- //ALOGV("\tReverbSetHfRoomLevel end.. saving %d", pContext->SavedHfLevel);
+ // ALOGV("\tReverbSetHfRoomLevel end.. saving %d", pContext->SavedHfLevel);
return;
}
@@ -913,30 +917,31 @@
//
//----------------------------------------------------------------------------
-int16_t ReverbGetRoomHfLevel(ReverbContext *pContext){
+int16_t ReverbGetRoomHfLevel(ReverbContext* pContext) {
int16_t level;
- //ALOGV("\tReverbGetRoomHfLevel start, saved level is %d", pContext->SavedHfLevel);
+ // ALOGV("\tReverbGetRoomHfLevel start, saved level is %d", pContext->SavedHfLevel);
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetRoomHfLevel")
- //ALOGV("\tReverbGetRoomHfLevel Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbGetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
+ // ALOGV("\tReverbGetRoomHfLevel Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbGetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
level = ReverbConvertHfLevel(pContext->SavedHfLevel);
- //ALOGV("\tReverbGetRoomHfLevel() ActiveParams.LPFL %d, pContext->SavedHfLevel: %d, "
+ // ALOGV("\tReverbGetRoomHfLevel() ActiveParams.LPFL %d, pContext->SavedHfLevel: %d, "
// "converted level: %d\n", ActiveParams.LPF, pContext->SavedHfLevel, level);
- if((int16_t)ActiveParams.LPF != level){
- ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomHfLevel() has wrong level -> %d %d\n",
- ActiveParams.Level, level);
+ if ((int16_t)ActiveParams.LPF != level) {
+ ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomHfLevel() has wrong level -> %d "
+ "%d\n",
+ ActiveParams.Level, level);
}
- //ALOGV("\tReverbGetRoomHfLevel end");
+ // ALOGV("\tReverbGetRoomHfLevel end");
return pContext->SavedHfLevel;
}
@@ -952,35 +957,35 @@
//
//----------------------------------------------------------------------------
-void ReverbSetReverbLevel(ReverbContext *pContext, int16_t level){
- //ALOGV("\n\tReverbSetReverbLevel start (%d)", level);
+void ReverbSetReverbLevel(ReverbContext* pContext, int16_t level) {
+ // ALOGV("\n\tReverbSetReverbLevel start (%d)", level);
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
- LVM_INT32 CombinedLevel; // Sum of room and reverb level controls
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+ LVM_INT32 CombinedLevel; // Sum of room and reverb level controls
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetReverbLevel")
- //ALOGV("\tReverbSetReverbLevel Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbSetReverbLevel just Got -> %d\n", ActiveParams.Level);
+ // ALOGV("\tReverbSetReverbLevel Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbSetReverbLevel just Got -> %d\n", ActiveParams.Level);
// needs to subtract max levels for both RoomLevel and ReverbLevel
- CombinedLevel = (level + pContext->SavedRoomLevel)-LVREV_MAX_REVERB_LEVEL;
- //ALOGV("\tReverbSetReverbLevel() CombinedLevel is %d = %d + %d\n",
+ CombinedLevel = (level + pContext->SavedRoomLevel) - LVREV_MAX_REVERB_LEVEL;
+ // ALOGV("\tReverbSetReverbLevel() CombinedLevel is %d = %d + %d\n",
// CombinedLevel, level, pContext->SavedRoomLevel);
ActiveParams.Level = ReverbConvertLevel(CombinedLevel);
- //ALOGV("\tReverbSetReverbLevel() Trying to set -> %d\n", ActiveParams.Level);
+ // ALOGV("\tReverbSetReverbLevel() Trying to set -> %d\n", ActiveParams.Level);
/* Activate the initial settings */
LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetReverbLevel")
- //ALOGV("\tReverbSetReverbLevel() just Set -> %d\n", ActiveParams.Level);
+ // ALOGV("\tReverbSetReverbLevel() just Set -> %d\n", ActiveParams.Level);
pContext->SavedReverbLevel = level;
- //ALOGV("\tReverbSetReverbLevel end pContext->SavedReverbLevel is %d\n\n",
+ // ALOGV("\tReverbSetReverbLevel end pContext->SavedReverbLevel is %d\n\n",
// pContext->SavedReverbLevel);
return;
}
@@ -996,37 +1001,40 @@
//
//----------------------------------------------------------------------------
-int16_t ReverbGetReverbLevel(ReverbContext *pContext){
+int16_t ReverbGetReverbLevel(ReverbContext* pContext) {
int16_t level;
- //ALOGV("\tReverbGetReverbLevel start");
+ // ALOGV("\tReverbGetReverbLevel start");
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
- LVM_INT32 CombinedLevel; // Sum of room and reverb level controls
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+ LVM_INT32 CombinedLevel; // Sum of room and reverb level controls
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetReverbLevel")
- //ALOGV("\tReverbGetReverbLevel Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbGetReverbLevel() just Got -> %d\n", ActiveParams.Level);
+ // ALOGV("\tReverbGetReverbLevel Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbGetReverbLevel() just Got -> %d\n", ActiveParams.Level);
// needs to subtract max levels for both RoomLevel and ReverbLevel
- CombinedLevel = (pContext->SavedReverbLevel + pContext->SavedRoomLevel)-LVREV_MAX_REVERB_LEVEL;
+ CombinedLevel =
+ (pContext->SavedReverbLevel + pContext->SavedRoomLevel) - LVREV_MAX_REVERB_LEVEL;
- //ALOGV("\tReverbGetReverbLevel() CombinedLevel is %d = %d + %d\n",
- //CombinedLevel, pContext->SavedReverbLevel, pContext->SavedRoomLevel);
+ // ALOGV("\tReverbGetReverbLevel() CombinedLevel is %d = %d + %d\n",
+ // CombinedLevel, pContext->SavedReverbLevel, pContext->SavedRoomLevel);
level = ReverbConvertLevel(CombinedLevel);
- //ALOGV("\tReverbGetReverbLevel(): ActiveParams.Level: %d, pContext->SavedReverbLevel: %d, "
+ // ALOGV("\tReverbGetReverbLevel(): ActiveParams.Level: %d, pContext->SavedReverbLevel: %d, "
//"pContext->SavedRoomLevel: %d, CombinedLevel: %d, converted level: %d\n",
- //ActiveParams.Level, pContext->SavedReverbLevel,pContext->SavedRoomLevel, CombinedLevel,level);
+ // ActiveParams.Level, pContext->SavedReverbLevel,pContext->SavedRoomLevel,
+ // CombinedLevel,level);
- if(ActiveParams.Level != level){
- ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetReverbLevel() has wrong level -> %d %d\n",
- ActiveParams.Level, level);
+ if (ActiveParams.Level != level) {
+ ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetReverbLevel() has wrong level -> %d "
+ "%d\n",
+ ActiveParams.Level, level);
}
- //ALOGV("\tReverbGetReverbLevel end\n");
+ // ALOGV("\tReverbGetReverbLevel end\n");
return pContext->SavedReverbLevel;
}
@@ -1043,30 +1051,30 @@
//
//----------------------------------------------------------------------------
-void ReverbSetRoomLevel(ReverbContext *pContext, int16_t level){
- //ALOGV("\tReverbSetRoomLevel start (%d)", level);
+void ReverbSetRoomLevel(ReverbContext* pContext, int16_t level) {
+ // ALOGV("\tReverbSetRoomLevel start (%d)", level);
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
- LVM_INT32 CombinedLevel; // Sum of room and reverb level controls
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+ LVM_INT32 CombinedLevel; // Sum of room and reverb level controls
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetRoomLevel")
- //ALOGV("\tReverbSetRoomLevel Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbSetRoomLevel() just Got -> %d\n", ActiveParams.Level);
+ // ALOGV("\tReverbSetRoomLevel Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbSetRoomLevel() just Got -> %d\n", ActiveParams.Level);
// needs to subtract max levels for both RoomLevel and ReverbLevel
- CombinedLevel = (level + pContext->SavedReverbLevel)-LVREV_MAX_REVERB_LEVEL;
+ CombinedLevel = (level + pContext->SavedReverbLevel) - LVREV_MAX_REVERB_LEVEL;
ActiveParams.Level = ReverbConvertLevel(CombinedLevel);
/* Activate the initial settings */
LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetRoomLevel")
- //ALOGV("\tReverbSetRoomLevel() just Set -> %d\n", ActiveParams.Level);
+ // ALOGV("\tReverbSetRoomLevel() just Set -> %d\n", ActiveParams.Level);
pContext->SavedRoomLevel = level;
- //ALOGV("\tReverbSetRoomLevel end");
+ // ALOGV("\tReverbSetRoomLevel end");
return;
}
@@ -1081,35 +1089,36 @@
//
//----------------------------------------------------------------------------
-int16_t ReverbGetRoomLevel(ReverbContext *pContext){
+int16_t ReverbGetRoomLevel(ReverbContext* pContext) {
int16_t level;
- //ALOGV("\tReverbGetRoomLevel start");
+ // ALOGV("\tReverbGetRoomLevel start");
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
- LVM_INT32 CombinedLevel; // Sum of room and reverb level controls
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+ LVM_INT32 CombinedLevel; // Sum of room and reverb level controls
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetRoomLevel")
- //ALOGV("\tReverbGetRoomLevel Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbGetRoomLevel() just Got -> %d\n", ActiveParams.Level);
+ // ALOGV("\tReverbGetRoomLevel Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbGetRoomLevel() just Got -> %d\n", ActiveParams.Level);
// needs to subtract max levels for both RoomLevel and ReverbLevel
- CombinedLevel = (pContext->SavedRoomLevel + pContext->SavedReverbLevel-LVREV_MAX_REVERB_LEVEL);
+ CombinedLevel =
+ (pContext->SavedRoomLevel + pContext->SavedReverbLevel - LVREV_MAX_REVERB_LEVEL);
level = ReverbConvertLevel(CombinedLevel);
- //ALOGV("\tReverbGetRoomLevel, Level = %d, pContext->SavedRoomLevel = %d, "
+ // ALOGV("\tReverbGetRoomLevel, Level = %d, pContext->SavedRoomLevel = %d, "
// "pContext->SavedReverbLevel = %d, CombinedLevel = %d, level = %d",
// ActiveParams.Level, pContext->SavedRoomLevel,
// pContext->SavedReverbLevel, CombinedLevel, level);
- if(ActiveParams.Level != level){
+ if (ActiveParams.Level != level) {
ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomLevel() has wrong level -> %d %d\n",
ActiveParams.Level, level);
}
- //ALOGV("\tReverbGetRoomLevel end");
+ // ALOGV("\tReverbGetRoomLevel end");
return pContext->SavedRoomLevel;
}
@@ -1125,34 +1134,35 @@
//
//----------------------------------------------------------------------------
-void ReverbSetDecayTime(ReverbContext *pContext, uint32_t time){
- //ALOGV("\tReverbSetDecayTime start (%d)", time);
+void ReverbSetDecayTime(ReverbContext* pContext, uint32_t time) {
+ // ALOGV("\tReverbSetDecayTime start (%d)", time);
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDecayTime")
- //ALOGV("\tReverbSetDecayTime Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbSetDecayTime() just Got -> %d\n", ActiveParams.T60);
+ // ALOGV("\tReverbSetDecayTime Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbSetDecayTime() just Got -> %d\n", ActiveParams.T60);
if (time <= LVREV_MAX_T60) {
ActiveParams.T60 = (LVM_UINT16)time;
- }
- else {
+ } else {
ActiveParams.T60 = LVREV_MAX_T60;
}
/* Activate the initial settings */
LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDecayTime")
- //ALOGV("\tReverbSetDecayTime() just Set -> %d\n", ActiveParams.T60);
+ // ALOGV("\tReverbSetDecayTime() just Set -> %d\n", ActiveParams.T60);
- pContext->SamplesToExitCount = (ActiveParams.T60 * pContext->config.inputCfg.samplingRate)/1000;
- //ALOGV("\tReverbSetDecayTime() just Set SamplesToExitCount-> %d\n",pContext->SamplesToExitCount);
+ pContext->SamplesToExitCount =
+ (ActiveParams.T60 * pContext->config.inputCfg.samplingRate) / 1000;
+ // ALOGV("\tReverbSetDecayTime() just Set SamplesToExitCount->
+ // %d\n",pContext->SamplesToExitCount);
pContext->SavedDecayTime = (int16_t)time;
- //ALOGV("\tReverbSetDecayTime end");
+ // ALOGV("\tReverbSetDecayTime end");
return;
}
@@ -1167,25 +1177,25 @@
//
//----------------------------------------------------------------------------
-uint32_t ReverbGetDecayTime(ReverbContext *pContext){
- //ALOGV("\tReverbGetDecayTime start");
+uint32_t ReverbGetDecayTime(ReverbContext* pContext) {
+ // ALOGV("\tReverbGetDecayTime start");
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDecayTime")
- //ALOGV("\tReverbGetDecayTime Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbGetDecayTime() just Got -> %d\n", ActiveParams.T60);
+ // ALOGV("\tReverbGetDecayTime Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbGetDecayTime() just Got -> %d\n", ActiveParams.T60);
- if(ActiveParams.T60 != pContext->SavedDecayTime){
+ if (ActiveParams.T60 != pContext->SavedDecayTime) {
// This will fail if the decay time is set to more than 7000
- ALOGV("\tLVM_ERROR : ReverbGetDecayTime() has wrong level -> %d %d\n",
- ActiveParams.T60, pContext->SavedDecayTime);
+ ALOGV("\tLVM_ERROR : ReverbGetDecayTime() has wrong level -> %d %d\n", ActiveParams.T60,
+ pContext->SavedDecayTime);
}
- //ALOGV("\tReverbGetDecayTime end");
+ // ALOGV("\tReverbGetDecayTime end");
return (uint32_t)ActiveParams.T60;
}
@@ -1201,27 +1211,27 @@
//
//----------------------------------------------------------------------------
-void ReverbSetDecayHfRatio(ReverbContext *pContext, int16_t ratio){
- //ALOGV("\tReverbSetDecayHfRatioe start (%d)", ratio);
+void ReverbSetDecayHfRatio(ReverbContext* pContext, int16_t ratio) {
+ // ALOGV("\tReverbSetDecayHfRatioe start (%d)", ratio);
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDecayHfRatio")
- //ALOGV("\tReverbSetDecayHfRatio Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbSetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
+ // ALOGV("\tReverbSetDecayHfRatio Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbSetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
- ActiveParams.Damping = (LVM_INT16)(ratio/20);
+ ActiveParams.Damping = (LVM_INT16)(ratio / 20);
/* Activate the initial settings */
LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDecayHfRatio")
- //ALOGV("\tReverbSetDecayHfRatio() just Set -> %d\n", ActiveParams.Damping);
+ // ALOGV("\tReverbSetDecayHfRatio() just Set -> %d\n", ActiveParams.Damping);
pContext->SavedDecayHfRatio = ratio;
- //ALOGV("\tReverbSetDecayHfRatio end");
+ // ALOGV("\tReverbSetDecayHfRatio end");
return;
}
@@ -1236,24 +1246,24 @@
//
//----------------------------------------------------------------------------
-int32_t ReverbGetDecayHfRatio(ReverbContext *pContext){
- //ALOGV("\tReverbGetDecayHfRatio start");
+int32_t ReverbGetDecayHfRatio(ReverbContext* pContext) {
+ // ALOGV("\tReverbGetDecayHfRatio start");
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDecayHfRatio")
- //ALOGV("\tReverbGetDecayHfRatio Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbGetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
+ // ALOGV("\tReverbGetDecayHfRatio Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbGetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
- if(ActiveParams.Damping != (LVM_INT16)(pContext->SavedDecayHfRatio / 20)){
+ if (ActiveParams.Damping != (LVM_INT16)(pContext->SavedDecayHfRatio / 20)) {
ALOGV("\tLVM_ERROR : ReverbGetDecayHfRatio() has wrong level -> %d %d\n",
- ActiveParams.Damping, pContext->SavedDecayHfRatio);
+ ActiveParams.Damping, pContext->SavedDecayHfRatio);
}
- //ALOGV("\tReverbGetDecayHfRatio end");
+ // ALOGV("\tReverbGetDecayHfRatio end");
return pContext->SavedDecayHfRatio;
}
@@ -1269,27 +1279,27 @@
//
//----------------------------------------------------------------------------
-void ReverbSetDiffusion(ReverbContext *pContext, int16_t level){
- //ALOGV("\tReverbSetDiffusion start (%d)", level);
+void ReverbSetDiffusion(ReverbContext* pContext, int16_t level) {
+ // ALOGV("\tReverbSetDiffusion start (%d)", level);
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDiffusion")
- //ALOGV("\tReverbSetDiffusion Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbSetDiffusion() just Got -> %d\n", ActiveParams.Density);
+ // ALOGV("\tReverbSetDiffusion Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbSetDiffusion() just Got -> %d\n", ActiveParams.Density);
- ActiveParams.Density = (LVM_INT16)(level/10);
+ ActiveParams.Density = (LVM_INT16)(level / 10);
/* Activate the initial settings */
LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDiffusion")
- //ALOGV("\tReverbSetDiffusion() just Set -> %d\n", ActiveParams.Density);
+ // ALOGV("\tReverbSetDiffusion() just Set -> %d\n", ActiveParams.Density);
pContext->SavedDiffusion = level;
- //ALOGV("\tReverbSetDiffusion end");
+ // ALOGV("\tReverbSetDiffusion end");
return;
}
@@ -1304,26 +1314,26 @@
//
//----------------------------------------------------------------------------
-int32_t ReverbGetDiffusion(ReverbContext *pContext){
- //ALOGV("\tReverbGetDiffusion start");
+int32_t ReverbGetDiffusion(ReverbContext* pContext) {
+ // ALOGV("\tReverbGetDiffusion start");
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
- LVM_INT16 Temp;
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+ LVM_INT16 Temp;
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDiffusion")
- //ALOGV("\tReverbGetDiffusion Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbGetDiffusion just Got -> %d\n", ActiveParams.Density);
+ // ALOGV("\tReverbGetDiffusion Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbGetDiffusion just Got -> %d\n", ActiveParams.Density);
- Temp = (LVM_INT16)(pContext->SavedDiffusion/10);
+ Temp = (LVM_INT16)(pContext->SavedDiffusion / 10);
- if(ActiveParams.Density != Temp){
+ if (ActiveParams.Density != Temp) {
ALOGV("\tLVM_ERROR : ReverbGetDiffusion invalid value %d %d", Temp, ActiveParams.Density);
}
- //ALOGV("\tReverbGetDiffusion end");
+ // ALOGV("\tReverbGetDiffusion end");
return pContext->SavedDiffusion;
}
@@ -1339,27 +1349,27 @@
//
//----------------------------------------------------------------------------
-void ReverbSetDensity(ReverbContext *pContext, int16_t level){
- //ALOGV("\tReverbSetDensity start (%d)", level);
+void ReverbSetDensity(ReverbContext* pContext, int16_t level) {
+ // ALOGV("\tReverbSetDensity start (%d)", level);
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDensity")
- //ALOGV("\tReverbSetDensity Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbSetDensity just Got -> %d\n", ActiveParams.RoomSize);
+ // ALOGV("\tReverbSetDensity Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbSetDensity just Got -> %d\n", ActiveParams.RoomSize);
ActiveParams.RoomSize = (LVM_INT16)(((level * 99) / 1000) + 1);
/* Activate the initial settings */
LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDensity")
- //ALOGV("\tReverbSetDensity just Set -> %d\n", ActiveParams.RoomSize);
+ // ALOGV("\tReverbSetDensity just Set -> %d\n", ActiveParams.RoomSize);
pContext->SavedDensity = level;
- //ALOGV("\tReverbSetDensity end");
+ // ALOGV("\tReverbSetDensity end");
return;
}
@@ -1374,25 +1384,25 @@
//
//----------------------------------------------------------------------------
-int32_t ReverbGetDensity(ReverbContext *pContext){
- //ALOGV("\tReverbGetDensity start");
+int32_t ReverbGetDensity(ReverbContext* pContext) {
+ // ALOGV("\tReverbGetDensity start");
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
- LVM_INT16 Temp;
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+ LVM_INT16 Temp;
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDensity")
- //ALOGV("\tReverbGetDensity Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tReverbGetDensity() just Got -> %d\n", ActiveParams.RoomSize);
+ // ALOGV("\tReverbGetDensity Successfully returned from LVM_GetControlParameters\n");
+ // ALOGV("\tReverbGetDensity() just Got -> %d\n", ActiveParams.RoomSize);
Temp = (LVM_INT16)(((pContext->SavedDensity * 99) / 1000) + 1);
- if(Temp != ActiveParams.RoomSize){
+ if (Temp != ActiveParams.RoomSize) {
ALOGV("\tLVM_ERROR : ReverbGetDensity invalid value %d %d", Temp, ActiveParams.RoomSize);
}
- //ALOGV("\tReverbGetDensity end");
+ // ALOGV("\tReverbGetDensity end");
return pContext->SavedDensity;
}
@@ -1410,20 +1420,19 @@
// Side Effects:
//
//----------------------------------------------------------------------------
-int Reverb_LoadPreset(ReverbContext *pContext)
-{
- //TODO: add reflections delay, level and reverb delay when early reflections are
+int Reverb_LoadPreset(ReverbContext* pContext) {
+ // TODO: add reflections delay, level and reverb delay when early reflections are
// implemented
pContext->curPreset = pContext->nextPreset;
if (pContext->curPreset != REVERB_PRESET_NONE) {
- const t_reverb_settings *preset = &sReverbPresets[pContext->curPreset];
+ const t_reverb_settings* preset = &sReverbPresets[pContext->curPreset];
ReverbSetRoomLevel(pContext, preset->roomLevel);
ReverbSetRoomHfLevel(pContext, preset->roomHFLevel);
ReverbSetDecayTime(pContext, preset->decayTime);
ReverbSetDecayHfRatio(pContext, preset->decayHFRatio);
- //reflectionsLevel
- //reflectionsDelay
+ // reflectionsLevel
+ // reflectionsDelay
ReverbSetReverbLevel(pContext, preset->reverbLevel);
// reverbDelay
ReverbSetDiffusion(pContext, preset->diffusion);
@@ -1454,99 +1463,96 @@
//
//----------------------------------------------------------------------------
-int Reverb_getParameter(ReverbContext *pContext,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue){
+int Reverb_getParameter(ReverbContext* pContext, void* pParam, uint32_t* pValueSize, void* pValue) {
int status = 0;
- int32_t *pParamTemp = (int32_t *)pParam;
+ int32_t* pParamTemp = (int32_t*)pParam;
int32_t param = *pParamTemp++;
- t_reverb_settings *pProperties;
+ t_reverb_settings* pProperties;
- //ALOGV("\tReverb_getParameter start");
+ // ALOGV("\tReverb_getParameter start");
if (pContext->preset) {
if (param != REVERB_PARAM_PRESET || *pValueSize < sizeof(uint16_t)) {
return -EINVAL;
}
- *(uint16_t *)pValue = pContext->nextPreset;
+ *(uint16_t*)pValue = pContext->nextPreset;
ALOGV("get REVERB_PARAM_PRESET, preset %d", pContext->nextPreset);
return 0;
}
- switch (param){
+ switch (param) {
case REVERB_PARAM_ROOM_LEVEL:
- if (*pValueSize != sizeof(int16_t)){
+ if (*pValueSize != sizeof(int16_t)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize1 %d", *pValueSize);
return -EINVAL;
}
*pValueSize = sizeof(int16_t);
break;
case REVERB_PARAM_ROOM_HF_LEVEL:
- if (*pValueSize != sizeof(int16_t)){
+ if (*pValueSize != sizeof(int16_t)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize12 %d", *pValueSize);
return -EINVAL;
}
*pValueSize = sizeof(int16_t);
break;
case REVERB_PARAM_DECAY_TIME:
- if (*pValueSize != sizeof(uint32_t)){
+ if (*pValueSize != sizeof(uint32_t)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize3 %d", *pValueSize);
return -EINVAL;
}
*pValueSize = sizeof(uint32_t);
break;
case REVERB_PARAM_DECAY_HF_RATIO:
- if (*pValueSize != sizeof(int16_t)){
+ if (*pValueSize != sizeof(int16_t)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize4 %d", *pValueSize);
return -EINVAL;
}
*pValueSize = sizeof(int16_t);
break;
case REVERB_PARAM_REFLECTIONS_LEVEL:
- if (*pValueSize != sizeof(int16_t)){
+ if (*pValueSize != sizeof(int16_t)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize5 %d", *pValueSize);
return -EINVAL;
}
*pValueSize = sizeof(int16_t);
break;
case REVERB_PARAM_REFLECTIONS_DELAY:
- if (*pValueSize != sizeof(uint32_t)){
+ if (*pValueSize != sizeof(uint32_t)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize6 %d", *pValueSize);
return -EINVAL;
}
*pValueSize = sizeof(uint32_t);
break;
case REVERB_PARAM_REVERB_LEVEL:
- if (*pValueSize != sizeof(int16_t)){
+ if (*pValueSize != sizeof(int16_t)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize7 %d", *pValueSize);
return -EINVAL;
}
*pValueSize = sizeof(int16_t);
break;
case REVERB_PARAM_REVERB_DELAY:
- if (*pValueSize != sizeof(uint32_t)){
+ if (*pValueSize != sizeof(uint32_t)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize8 %d", *pValueSize);
return -EINVAL;
}
*pValueSize = sizeof(uint32_t);
break;
case REVERB_PARAM_DIFFUSION:
- if (*pValueSize != sizeof(int16_t)){
+ if (*pValueSize != sizeof(int16_t)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize9 %d", *pValueSize);
return -EINVAL;
}
*pValueSize = sizeof(int16_t);
break;
case REVERB_PARAM_DENSITY:
- if (*pValueSize != sizeof(int16_t)){
+ if (*pValueSize != sizeof(int16_t)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize10 %d", *pValueSize);
return -EINVAL;
}
*pValueSize = sizeof(int16_t);
break;
case REVERB_PARAM_PROPERTIES:
- if (*pValueSize != sizeof(t_reverb_settings)){
+ if (*pValueSize != sizeof(t_reverb_settings)) {
ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize11 %d", *pValueSize);
return -EINVAL;
}
@@ -1558,9 +1564,9 @@
return -EINVAL;
}
- pProperties = (t_reverb_settings *) pValue;
+ pProperties = (t_reverb_settings*)pValue;
- switch (param){
+ switch (param) {
case REVERB_PARAM_PROPERTIES:
pProperties->roomLevel = ReverbGetRoomLevel(pContext);
pProperties->roomHFLevel = ReverbGetRoomHfLevel(pContext);
@@ -1574,74 +1580,74 @@
pProperties->density = ReverbGetDensity(pContext);
ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is roomLevel %d",
- pProperties->roomLevel);
+ pProperties->roomLevel);
ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is roomHFLevel %d",
- pProperties->roomHFLevel);
+ pProperties->roomHFLevel);
ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is decayTime %d",
- pProperties->decayTime);
+ pProperties->decayTime);
ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is decayHFRatio %d",
- pProperties->decayHFRatio);
+ pProperties->decayHFRatio);
ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reflectionsLevel %d",
- pProperties->reflectionsLevel);
+ pProperties->reflectionsLevel);
ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reflectionsDelay %d",
- pProperties->reflectionsDelay);
+ pProperties->reflectionsDelay);
ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reverbDelay %d",
- pProperties->reverbDelay);
+ pProperties->reverbDelay);
ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reverbLevel %d",
- pProperties->reverbLevel);
+ pProperties->reverbLevel);
ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is diffusion %d",
- pProperties->diffusion);
+ pProperties->diffusion);
ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is density %d",
- pProperties->density);
+ pProperties->density);
break;
case REVERB_PARAM_ROOM_LEVEL:
- *(int16_t *)pValue = ReverbGetRoomLevel(pContext);
+ *(int16_t*)pValue = ReverbGetRoomLevel(pContext);
- //ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_LEVEL Value is %d",
+ // ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_LEVEL Value is %d",
// *(int16_t *)pValue);
break;
case REVERB_PARAM_ROOM_HF_LEVEL:
- *(int16_t *)pValue = ReverbGetRoomHfLevel(pContext);
+ *(int16_t*)pValue = ReverbGetRoomHfLevel(pContext);
- //ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_HF_LEVEL Value is %d",
+ // ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_HF_LEVEL Value is %d",
// *(int16_t *)pValue);
break;
case REVERB_PARAM_DECAY_TIME:
- *(uint32_t *)pValue = ReverbGetDecayTime(pContext);
+ *(uint32_t*)pValue = ReverbGetDecayTime(pContext);
- //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_TIME Value is %d",
+ // ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_TIME Value is %d",
// *(int32_t *)pValue);
break;
case REVERB_PARAM_DECAY_HF_RATIO:
- *(int16_t *)pValue = ReverbGetDecayHfRatio(pContext);
+ *(int16_t*)pValue = ReverbGetDecayHfRatio(pContext);
- //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_HF_RATION Value is %d",
+ // ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_HF_RATION Value is %d",
// *(int16_t *)pValue);
break;
case REVERB_PARAM_REVERB_LEVEL:
- *(int16_t *)pValue = ReverbGetReverbLevel(pContext);
+ *(int16_t*)pValue = ReverbGetReverbLevel(pContext);
- //ALOGV("\tReverb_getParameter() REVERB_PARAM_REVERB_LEVEL Value is %d",
+ // ALOGV("\tReverb_getParameter() REVERB_PARAM_REVERB_LEVEL Value is %d",
// *(int16_t *)pValue);
break;
case REVERB_PARAM_DIFFUSION:
- *(int16_t *)pValue = ReverbGetDiffusion(pContext);
+ *(int16_t*)pValue = ReverbGetDiffusion(pContext);
- //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_DIFFUSION Value is %d",
+ // ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_DIFFUSION Value is %d",
// *(int16_t *)pValue);
break;
case REVERB_PARAM_DENSITY:
- *(int16_t *)pValue = ReverbGetDensity(pContext);
- //ALOGV("\tReverb_getParameter() REVERB_PARAM_DENSITY Value is %d",
+ *(int16_t*)pValue = ReverbGetDensity(pContext);
+ // ALOGV("\tReverb_getParameter() REVERB_PARAM_DENSITY Value is %d",
// *(uint32_t *)pValue);
break;
case REVERB_PARAM_REFLECTIONS_LEVEL:
- *(uint16_t *)pValue = 0;
+ *(uint16_t*)pValue = 0;
break;
case REVERB_PARAM_REFLECTIONS_DELAY:
case REVERB_PARAM_REVERB_DELAY:
- *(uint32_t *)pValue = 0;
+ *(uint32_t*)pValue = 0;
break;
default:
@@ -1650,7 +1656,7 @@
break;
}
- //ALOGV("\tReverb_getParameter end");
+ // ALOGV("\tReverb_getParameter end");
return status;
} /* end Reverb_getParameter */
@@ -1670,16 +1676,16 @@
//
//----------------------------------------------------------------------------
-int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue, int vsize){
+int Reverb_setParameter(ReverbContext* pContext, void* pParam, void* pValue, int vsize) {
int status = 0;
int16_t level;
int16_t ratio;
uint32_t time;
- t_reverb_settings *pProperties;
- int32_t *pParamTemp = (int32_t *)pParam;
+ t_reverb_settings* pProperties;
+ int32_t* pParamTemp = (int32_t*)pParam;
int32_t param = *pParamTemp++;
- //ALOGV("\tReverb_setParameter start");
+ // ALOGV("\tReverb_setParameter start");
if (pContext->preset) {
if (param != REVERB_PARAM_PRESET) {
return -EINVAL;
@@ -1689,7 +1695,7 @@
return -EINVAL;
}
- uint16_t preset = *(uint16_t *)pValue;
+ uint16_t preset = *(uint16_t*)pValue;
ALOGV("set REVERB_PARAM_PRESET, preset %d", preset);
if (preset > REVERB_PRESET_LAST) {
return -EINVAL;
@@ -1703,10 +1709,10 @@
return -EINVAL;
}
- switch (param){
+ switch (param) {
case REVERB_PARAM_PROPERTIES:
ALOGV("\tReverb_setParameter() REVERB_PARAM_PROPERTIES");
- pProperties = (t_reverb_settings *) pValue;
+ pProperties = (t_reverb_settings*)pValue;
ReverbSetRoomLevel(pContext, pProperties->roomLevel);
ReverbSetRoomHfLevel(pContext, pProperties->roomHFLevel);
ReverbSetDecayTime(pContext, pProperties->decayTime);
@@ -1716,55 +1722,55 @@
ReverbSetDensity(pContext, pProperties->density);
break;
case REVERB_PARAM_ROOM_LEVEL:
- level = *(int16_t *)pValue;
- //ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_LEVEL value is %d", level);
- //ALOGV("\tReverb_setParameter() Calling ReverbSetRoomLevel");
+ level = *(int16_t*)pValue;
+ // ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_LEVEL value is %d", level);
+ // ALOGV("\tReverb_setParameter() Calling ReverbSetRoomLevel");
ReverbSetRoomLevel(pContext, level);
- //ALOGV("\tReverb_setParameter() Called ReverbSetRoomLevel");
- break;
- case REVERB_PARAM_ROOM_HF_LEVEL:
- level = *(int16_t *)pValue;
- //ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_HF_LEVEL value is %d", level);
- //ALOGV("\tReverb_setParameter() Calling ReverbSetRoomHfLevel");
- ReverbSetRoomHfLevel(pContext, level);
- //ALOGV("\tReverb_setParameter() Called ReverbSetRoomHfLevel");
- break;
- case REVERB_PARAM_DECAY_TIME:
- time = *(uint32_t *)pValue;
- //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_TIME value is %d", time);
- //ALOGV("\tReverb_setParameter() Calling ReverbSetDecayTime");
- ReverbSetDecayTime(pContext, time);
- //ALOGV("\tReverb_setParameter() Called ReverbSetDecayTime");
- break;
- case REVERB_PARAM_DECAY_HF_RATIO:
- ratio = *(int16_t *)pValue;
- //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_HF_RATIO value is %d", ratio);
- //ALOGV("\tReverb_setParameter() Calling ReverbSetDecayHfRatio");
- ReverbSetDecayHfRatio(pContext, ratio);
- //ALOGV("\tReverb_setParameter() Called ReverbSetDecayHfRatio");
+ // ALOGV("\tReverb_setParameter() Called ReverbSetRoomLevel");
break;
- case REVERB_PARAM_REVERB_LEVEL:
- level = *(int16_t *)pValue;
- //ALOGV("\tReverb_setParameter() REVERB_PARAM_REVERB_LEVEL value is %d", level);
- //ALOGV("\tReverb_setParameter() Calling ReverbSetReverbLevel");
+ case REVERB_PARAM_ROOM_HF_LEVEL:
+ level = *(int16_t*)pValue;
+ // ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_HF_LEVEL value is %d", level);
+ // ALOGV("\tReverb_setParameter() Calling ReverbSetRoomHfLevel");
+ ReverbSetRoomHfLevel(pContext, level);
+ // ALOGV("\tReverb_setParameter() Called ReverbSetRoomHfLevel");
+ break;
+ case REVERB_PARAM_DECAY_TIME:
+ time = *(uint32_t*)pValue;
+ // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_TIME value is %d", time);
+ // ALOGV("\tReverb_setParameter() Calling ReverbSetDecayTime");
+ ReverbSetDecayTime(pContext, time);
+ // ALOGV("\tReverb_setParameter() Called ReverbSetDecayTime");
+ break;
+ case REVERB_PARAM_DECAY_HF_RATIO:
+ ratio = *(int16_t*)pValue;
+ // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_HF_RATIO value is %d", ratio);
+ // ALOGV("\tReverb_setParameter() Calling ReverbSetDecayHfRatio");
+ ReverbSetDecayHfRatio(pContext, ratio);
+ // ALOGV("\tReverb_setParameter() Called ReverbSetDecayHfRatio");
+ break;
+ case REVERB_PARAM_REVERB_LEVEL:
+ level = *(int16_t*)pValue;
+ // ALOGV("\tReverb_setParameter() REVERB_PARAM_REVERB_LEVEL value is %d", level);
+ // ALOGV("\tReverb_setParameter() Calling ReverbSetReverbLevel");
ReverbSetReverbLevel(pContext, level);
- //ALOGV("\tReverb_setParameter() Called ReverbSetReverbLevel");
- break;
+ // ALOGV("\tReverb_setParameter() Called ReverbSetReverbLevel");
+ break;
case REVERB_PARAM_DIFFUSION:
- ratio = *(int16_t *)pValue;
- //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DIFFUSION value is %d", ratio);
- //ALOGV("\tReverb_setParameter() Calling ReverbSetDiffusion");
+ ratio = *(int16_t*)pValue;
+ // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DIFFUSION value is %d", ratio);
+ // ALOGV("\tReverb_setParameter() Calling ReverbSetDiffusion");
ReverbSetDiffusion(pContext, ratio);
- //ALOGV("\tReverb_setParameter() Called ReverbSetDiffusion");
+ // ALOGV("\tReverb_setParameter() Called ReverbSetDiffusion");
break;
case REVERB_PARAM_DENSITY:
- ratio = *(int16_t *)pValue;
- //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DENSITY value is %d", ratio);
- //ALOGV("\tReverb_setParameter() Calling ReverbSetDensity");
+ ratio = *(int16_t*)pValue;
+ // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DENSITY value is %d", ratio);
+ // ALOGV("\tReverb_setParameter() Calling ReverbSetDensity");
ReverbSetDensity(pContext, ratio);
- //ALOGV("\tReverb_setParameter() Called ReverbSetDensity");
+ // ALOGV("\tReverb_setParameter() Called ReverbSetDensity");
break;
- break;
+ break;
case REVERB_PARAM_REFLECTIONS_LEVEL:
case REVERB_PARAM_REFLECTIONS_DELAY:
case REVERB_PARAM_REVERB_DELAY:
@@ -1774,7 +1780,7 @@
break;
}
- //ALOGV("\tReverb_setParameter end");
+ // ALOGV("\tReverb_setParameter end");
return status;
} /* end Reverb_setParameter */
@@ -1783,52 +1789,46 @@
*/
int Reverb_paramValueSize(int32_t param) {
switch (param) {
- case REVERB_PARAM_ROOM_LEVEL:
- case REVERB_PARAM_ROOM_HF_LEVEL:
- case REVERB_PARAM_REFLECTIONS_LEVEL:
- case REVERB_PARAM_REVERB_LEVEL:
- return sizeof(int16_t); // millibel
- case REVERB_PARAM_DECAY_TIME:
- case REVERB_PARAM_REFLECTIONS_DELAY:
- case REVERB_PARAM_REVERB_DELAY:
- return sizeof(uint32_t); // milliseconds
- case REVERB_PARAM_DECAY_HF_RATIO:
- case REVERB_PARAM_DIFFUSION:
- case REVERB_PARAM_DENSITY:
- return sizeof(int16_t); // permille
- case REVERB_PARAM_PROPERTIES:
- return sizeof(s_reverb_settings); // struct of all reverb properties
+ case REVERB_PARAM_ROOM_LEVEL:
+ case REVERB_PARAM_ROOM_HF_LEVEL:
+ case REVERB_PARAM_REFLECTIONS_LEVEL:
+ case REVERB_PARAM_REVERB_LEVEL:
+ return sizeof(int16_t); // millibel
+ case REVERB_PARAM_DECAY_TIME:
+ case REVERB_PARAM_REFLECTIONS_DELAY:
+ case REVERB_PARAM_REVERB_DELAY:
+ return sizeof(uint32_t); // milliseconds
+ case REVERB_PARAM_DECAY_HF_RATIO:
+ case REVERB_PARAM_DIFFUSION:
+ case REVERB_PARAM_DENSITY:
+ return sizeof(int16_t); // permille
+ case REVERB_PARAM_PROPERTIES:
+ return sizeof(s_reverb_settings); // struct of all reverb properties
}
return sizeof(int32_t);
}
-} // namespace
-} // namespace
+} // namespace
+} // namespace android
extern "C" {
/* Effect Control Interface Implementation: Process */
-int Reverb_process(effect_handle_t self,
- audio_buffer_t *inBuffer,
- audio_buffer_t *outBuffer){
- android::ReverbContext * pContext = (android::ReverbContext *) self;
- int status = 0;
+int Reverb_process(effect_handle_t self, audio_buffer_t* inBuffer, audio_buffer_t* outBuffer) {
+ android::ReverbContext* pContext = (android::ReverbContext*)self;
+ int status = 0;
- if (pContext == NULL){
+ if (pContext == NULL) {
ALOGV("\tLVM_ERROR : Reverb_process() ERROR pContext == NULL");
return -EINVAL;
}
- if (inBuffer == NULL || inBuffer->raw == NULL ||
- outBuffer == NULL || outBuffer->raw == NULL ||
- inBuffer->frameCount != outBuffer->frameCount){
+ if (inBuffer == NULL || inBuffer->raw == NULL || outBuffer == NULL || outBuffer->raw == NULL ||
+ inBuffer->frameCount != outBuffer->frameCount) {
ALOGV("\tLVM_ERROR : Reverb_process() ERROR NULL INPUT POINTER OR FRAME COUNT IS WRONG");
return -EINVAL;
}
- //ALOGV("\tReverb_process() Calling process with %d frames", outBuffer->frameCount);
+ // ALOGV("\tReverb_process() Calling process with %d frames", outBuffer->frameCount);
/* Process all the available frames, block processing is handled internalLY by the LVM bundle */
- status = process( inBuffer->f32,
- outBuffer->f32,
- outBuffer->frameCount,
- pContext);
+ status = process(inBuffer->f32, outBuffer->f32, outBuffer->frameCount, pContext);
if (pContext->bEnabled == LVM_FALSE) {
if (pContext->SamplesToExitCount > 0) {
@@ -1840,72 +1840,67 @@
}
return status;
-} /* end Reverb_process */
+} /* end Reverb_process */
/* Effect Control Interface Implementation: Command */
-int Reverb_command(effect_handle_t self,
- uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t *replySize,
- void *pReplyData){
- android::ReverbContext * pContext = (android::ReverbContext *) self;
- LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
- LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */
+int Reverb_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
+ uint32_t* replySize, void* pReplyData) {
+ android::ReverbContext* pContext = (android::ReverbContext*)self;
+ LVREV_ControlParams_st ActiveParams; /* Current control Parameters */
+ LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
- if (pContext == NULL){
+ if (pContext == NULL) {
ALOGV("\tLVM_ERROR : Reverb_command ERROR pContext == NULL");
return -EINVAL;
}
- //ALOGV("\tReverb_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
+ // ALOGV("\tReverb_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
- switch (cmdCode){
+ switch (cmdCode) {
case EFFECT_CMD_INIT:
- //ALOGV("\tReverb_command cmdCode Case: "
+ // ALOGV("\tReverb_command cmdCode Case: "
// "EFFECT_CMD_INIT start");
- if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){
+ if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_INIT: ERROR");
+ "EFFECT_CMD_INIT: ERROR");
return -EINVAL;
}
- *(int *) pReplyData = 0;
+ *(int*)pReplyData = 0;
break;
case EFFECT_CMD_SET_CONFIG:
- //ALOGV("\tReverb_command cmdCode Case: "
+ // ALOGV("\tReverb_command cmdCode Case: "
// "EFFECT_CMD_SET_CONFIG start");
- if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) ||
- pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
+ if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || pReplyData == NULL ||
+ replySize == NULL || *replySize != sizeof(int)) {
ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_SET_CONFIG: ERROR");
+ "EFFECT_CMD_SET_CONFIG: ERROR");
return -EINVAL;
}
- *(int *) pReplyData = android::Reverb_setConfig(pContext,
- (effect_config_t *) pCmdData);
+ *(int*)pReplyData = android::Reverb_setConfig(pContext, (effect_config_t*)pCmdData);
break;
case EFFECT_CMD_GET_CONFIG:
if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(effect_config_t)) {
ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_GET_CONFIG: ERROR");
+ "EFFECT_CMD_GET_CONFIG: ERROR");
return -EINVAL;
}
- android::Reverb_getConfig(pContext, (effect_config_t *)pReplyData);
+ android::Reverb_getConfig(pContext, (effect_config_t*)pReplyData);
break;
case EFFECT_CMD_RESET:
- //ALOGV("\tReverb_command cmdCode Case: "
+ // ALOGV("\tReverb_command cmdCode Case: "
// "EFFECT_CMD_RESET start");
Reverb_setConfig(pContext, &pContext->config);
break;
- case EFFECT_CMD_GET_PARAM:{
- //ALOGV("\tReverb_command cmdCode Case: "
+ case EFFECT_CMD_GET_PARAM: {
+ // ALOGV("\tReverb_command cmdCode Case: "
// "EFFECT_CMD_GET_PARAM start");
- effect_param_t *p = (effect_param_t *)pCmdData;
+ effect_param_t* p = (effect_param_t*)pCmdData;
if (pCmdData == nullptr) {
ALOGW("\tLVM_ERROR : pCmdData is NULL");
return -EINVAL;
@@ -1914,163 +1909,156 @@
android_errorWriteLog(0x534e4554, "26347509");
return -EINVAL;
}
- if (cmdSize < sizeof(effect_param_t) ||
- cmdSize < (sizeof(effect_param_t) + p->psize) ||
- pReplyData == NULL || replySize == NULL ||
- *replySize < (sizeof(effect_param_t) + p->psize)) {
+ if (cmdSize < sizeof(effect_param_t) || cmdSize < (sizeof(effect_param_t) + p->psize) ||
+ pReplyData == NULL || replySize == NULL ||
+ *replySize < (sizeof(effect_param_t) + p->psize)) {
ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_GET_PARAM: ERROR");
+ "EFFECT_CMD_GET_PARAM: ERROR");
return -EINVAL;
}
memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + p->psize);
- p = (effect_param_t *)pReplyData;
+ p = (effect_param_t*)pReplyData;
int voffset = ((p->psize - 1) / sizeof(int32_t) + 1) * sizeof(int32_t);
- p->status = android::Reverb_getParameter(pContext,
- (void *)p->data,
- &p->vsize,
- p->data + voffset);
+ p->status = android::Reverb_getParameter(pContext, (void*)p->data, &p->vsize,
+ p->data + voffset);
*replySize = sizeof(effect_param_t) + voffset + p->vsize;
- //ALOGV("\tReverb_command EFFECT_CMD_GET_PARAM "
+ // ALOGV("\tReverb_command EFFECT_CMD_GET_PARAM "
// "*pCmdData %d, *replySize %d, *pReplyData %d ",
// *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
// *replySize,
// *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset));
} break;
- case EFFECT_CMD_SET_PARAM:{
-
- //ALOGV("\tReverb_command cmdCode Case: "
+ case EFFECT_CMD_SET_PARAM: {
+ // ALOGV("\tReverb_command cmdCode Case: "
// "EFFECT_CMD_SET_PARAM start");
- //ALOGV("\tReverb_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
+ // ALOGV("\tReverb_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
// *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
// *replySize,
// *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
if (pCmdData == NULL || (cmdSize < (sizeof(effect_param_t) + sizeof(int32_t))) ||
- pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
+ pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_SET_PARAM: ERROR");
+ "EFFECT_CMD_SET_PARAM: ERROR");
return -EINVAL;
}
- effect_param_t *p = (effect_param_t *) pCmdData;
+ effect_param_t* p = (effect_param_t*)pCmdData;
- if (p->psize != sizeof(int32_t)){
+ if (p->psize != sizeof(int32_t)) {
ALOGV("\t4LVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)");
+ "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)");
return -EINVAL;
}
- //ALOGV("\tn5Reverb_command cmdSize is %d\n"
+ // ALOGV("\tn5Reverb_command cmdSize is %d\n"
// "\tsizeof(effect_param_t) is %d\n"
// "\tp->psize is %d\n"
// "\tp->vsize is %d"
// "\n",
// cmdSize, sizeof(effect_param_t), p->psize, p->vsize );
- *(int *)pReplyData = android::Reverb_setParameter(pContext,
- (void *)p->data,
- p->data + p->psize,
- p->vsize);
+ *(int*)pReplyData = android::Reverb_setParameter(pContext, (void*)p->data,
+ p->data + p->psize, p->vsize);
} break;
case EFFECT_CMD_ENABLE:
- //ALOGV("\tReverb_command cmdCode Case: "
+ // ALOGV("\tReverb_command cmdCode Case: "
// "EFFECT_CMD_ENABLE start");
- if (pReplyData == NULL || *replySize != sizeof(int)){
+ if (pReplyData == NULL || *replySize != sizeof(int)) {
ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_ENABLE: ERROR");
+ "EFFECT_CMD_ENABLE: ERROR");
return -EINVAL;
}
- if(pContext->bEnabled == LVM_TRUE){
- ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_ENABLE: ERROR-Effect is already enabled");
- return -EINVAL;
- }
- *(int *)pReplyData = 0;
+ if (pContext->bEnabled == LVM_TRUE) {
+ ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
+ "EFFECT_CMD_ENABLE: ERROR-Effect is already enabled");
+ return -EINVAL;
+ }
+ *(int*)pReplyData = 0;
pContext->bEnabled = LVM_TRUE;
/* Get the current settings */
LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "EFFECT_CMD_ENABLE")
pContext->SamplesToExitCount =
- (ActiveParams.T60 * pContext->config.inputCfg.samplingRate)/1000;
+ (ActiveParams.T60 * pContext->config.inputCfg.samplingRate) / 1000;
// force no volume ramp for first buffer processed after enabling the effect
pContext->volumeMode = android::REVERB_VOLUME_FLAT;
- //ALOGV("\tEFFECT_CMD_ENABLE SamplesToExitCount = %d", pContext->SamplesToExitCount);
+ // ALOGV("\tEFFECT_CMD_ENABLE SamplesToExitCount = %d", pContext->SamplesToExitCount);
break;
case EFFECT_CMD_DISABLE:
- //ALOGV("\tReverb_command cmdCode Case: "
+ // ALOGV("\tReverb_command cmdCode Case: "
// "EFFECT_CMD_DISABLE start");
- if (pReplyData == NULL || *replySize != sizeof(int)){
+ if (pReplyData == NULL || *replySize != sizeof(int)) {
ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_DISABLE: ERROR");
+ "EFFECT_CMD_DISABLE: ERROR");
return -EINVAL;
}
- if(pContext->bEnabled == LVM_FALSE){
- ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_DISABLE: ERROR-Effect is not yet enabled");
- return -EINVAL;
- }
- *(int *)pReplyData = 0;
+ if (pContext->bEnabled == LVM_FALSE) {
+ ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
+ "EFFECT_CMD_DISABLE: ERROR-Effect is not yet enabled");
+ return -EINVAL;
+ }
+ *(int*)pReplyData = 0;
pContext->bEnabled = LVM_FALSE;
break;
case EFFECT_CMD_SET_VOLUME:
- if (pCmdData == NULL ||
- cmdSize != 2 * sizeof(uint32_t)) {
+ if (pCmdData == NULL || cmdSize != 2 * sizeof(uint32_t)) {
ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "EFFECT_CMD_SET_VOLUME: ERROR");
+ "EFFECT_CMD_SET_VOLUME: ERROR");
return -EINVAL;
}
- if (pReplyData != NULL) { // we have volume control
- pContext->leftVolume = (LVM_INT16)((*(uint32_t *)pCmdData + (1 << 11)) >> 12);
- pContext->rightVolume = (LVM_INT16)((*((uint32_t *)pCmdData + 1) + (1 << 11)) >> 12);
- *(uint32_t *)pReplyData = (1 << 24);
- *((uint32_t *)pReplyData + 1) = (1 << 24);
+ if (pReplyData != NULL) { // we have volume control
+ pContext->leftVolume = (LVM_INT16)((*(uint32_t*)pCmdData + (1 << 11)) >> 12);
+ pContext->rightVolume = (LVM_INT16)((*((uint32_t*)pCmdData + 1) + (1 << 11)) >> 12);
+ *(uint32_t*)pReplyData = (1 << 24);
+ *((uint32_t*)pReplyData + 1) = (1 << 24);
if (pContext->volumeMode == android::REVERB_VOLUME_OFF) {
// force no volume ramp for first buffer processed after getting volume control
pContext->volumeMode = android::REVERB_VOLUME_FLAT;
}
- } else { // we don't have volume control
+ } else { // we don't have volume control
pContext->leftVolume = REVERB_UNIT_VOLUME;
pContext->rightVolume = REVERB_UNIT_VOLUME;
pContext->volumeMode = android::REVERB_VOLUME_OFF;
}
- ALOGV("EFFECT_CMD_SET_VOLUME left %d, right %d mode %d",
- pContext->leftVolume, pContext->rightVolume, pContext->volumeMode);
+ ALOGV("EFFECT_CMD_SET_VOLUME left %d, right %d mode %d", pContext->leftVolume,
+ pContext->rightVolume, pContext->volumeMode);
break;
case EFFECT_CMD_SET_DEVICE:
case EFFECT_CMD_SET_AUDIO_MODE:
- //ALOGV("\tReverb_command cmdCode Case: "
- // "EFFECT_CMD_SET_DEVICE/EFFECT_CMD_SET_VOLUME/EFFECT_CMD_SET_AUDIO_MODE start");
+ // ALOGV("\tReverb_command cmdCode Case: "
+ // "EFFECT_CMD_SET_DEVICE/EFFECT_CMD_SET_VOLUME/EFFECT_CMD_SET_AUDIO_MODE
+ // start");
break;
default:
ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
- "DEFAULT start %d ERROR",cmdCode);
+ "DEFAULT start %d ERROR",
+ cmdCode);
return -EINVAL;
}
- //ALOGV("\tReverb_command end\n\n");
+ // ALOGV("\tReverb_command end\n\n");
return 0;
-} /* end Reverb_command */
+} /* end Reverb_command */
/* Effect Control Interface Implementation: get_descriptor */
-int Reverb_getDescriptor(effect_handle_t self,
- effect_descriptor_t *pDescriptor)
-{
- android::ReverbContext * pContext = (android::ReverbContext *)self;
- const effect_descriptor_t *desc;
+int Reverb_getDescriptor(effect_handle_t self, effect_descriptor_t* pDescriptor) {
+ android::ReverbContext* pContext = (android::ReverbContext*)self;
+ const effect_descriptor_t* desc;
if (pContext == NULL || pDescriptor == NULL) {
ALOGV("Reverb_getDescriptor() invalid param");
@@ -2094,26 +2082,24 @@
*pDescriptor = *desc;
return 0;
-} /* end Reverb_getDescriptor */
+} /* end Reverb_getDescriptor */
// effect_handle_t interface implementation for Reverb effect
const struct effect_interface_s gReverbInterface = {
- Reverb_process,
- Reverb_command,
- Reverb_getDescriptor,
- NULL,
-}; /* end gReverbInterface */
+ Reverb_process,
+ Reverb_command,
+ Reverb_getDescriptor,
+ NULL,
+}; /* end gReverbInterface */
// This is the only symbol that needs to be exported
-__attribute__ ((visibility ("default")))
-audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
- .tag = AUDIO_EFFECT_LIBRARY_TAG,
- .version = EFFECT_LIBRARY_API_VERSION,
- .name = "Reverb Library",
- .implementor = "NXP Software Ltd.",
- .create_effect = android::EffectCreate,
- .release_effect = android::EffectRelease,
- .get_descriptor = android::EffectGetDescriptor,
+__attribute__((visibility("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+ .tag = AUDIO_EFFECT_LIBRARY_TAG,
+ .version = EFFECT_LIBRARY_API_VERSION,
+ .name = "Reverb Library",
+ .implementor = "NXP Software Ltd.",
+ .create_effect = android::EffectCreate,
+ .release_effect = android::EffectRelease,
+ .get_descriptor = android::EffectGetDescriptor,
};
-
}
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h
index 96223a8..227d953 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h
@@ -20,16 +20,15 @@
#include <audio_effects/effect_environmentalreverb.h>
#include <audio_effects/effect_presetreverb.h>
-#define MAX_NUM_BANDS 5
-#define MAX_CALL_SIZE 256
-#define LVREV_MAX_T60 7000
-#define LVREV_MAX_REVERB_LEVEL 2000
-#define LVREV_MAX_FRAME_SIZE 2560
-#define LVREV_CUP_LOAD_ARM9E 470 // Expressed in 0.1 MIPS
-#define LVREV_MEM_USAGE (71+(LVREV_MAX_FRAME_SIZE>>7)) // Expressed in kB
+#define MAX_NUM_BANDS 5
+#define MAX_CALL_SIZE 256
+#define LVREV_MAX_T60 7000
+#define LVREV_MAX_REVERB_LEVEL 2000
+#define LVREV_MAX_FRAME_SIZE 2560
+#define LVREV_CUP_LOAD_ARM9E 470 // Expressed in 0.1 MIPS
+#define LVREV_MEM_USAGE (71 + (LVREV_MAX_FRAME_SIZE >> 7)) // Expressed in kB
-typedef struct _LPFPair_t
-{
+typedef struct _LPFPair_t {
int16_t Room_HF;
int16_t LPF;
} LPFPair_t;
diff --git a/media/libeffects/preprocessing/.clang-format b/media/libeffects/preprocessing/.clang-format
new file mode 120000
index 0000000..f1b4f69
--- /dev/null
+++ b/media/libeffects/preprocessing/.clang-format
@@ -0,0 +1 @@
+../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index c87635f..681e247 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -1,35 +1,31 @@
// audio preprocessing wrapper
cc_library_shared {
name: "libaudiopreprocessing",
-
vendor: true,
-
relative_install_path: "soundfx",
-
srcs: ["PreProcessing.cpp"],
-
- include_dirs: [
- "external/webrtc",
- "external/webrtc/webrtc/modules/include",
- "external/webrtc/webrtc/modules/audio_processing/include",
+ local_include_dirs: [
+ ".",
+ ],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ "-Wno-unused-parameter",
],
shared_libs: [
- "libwebrtc_audio_preprocessing",
- "libspeexresampler",
- "libutils",
"liblog",
+ "libutils",
],
- cflags: [
- "-DWEBRTC_POSIX",
- "-fvisibility=hidden",
- "-Wall",
- "-Werror",
+ static_libs: [
+ "webrtc_audio_processing",
],
header_libs: [
"libaudioeffects",
"libhardware_headers",
+ "libwebrtc_absl_headers",
],
}
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 5fab5be..03ccc34 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -18,15 +18,15 @@
#include <string.h>
#define LOG_TAG "PreProcessing"
//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-#include <utils/Timers.h>
-#include <hardware/audio_effect.h>
#include <audio_effects/effect_aec.h>
#include <audio_effects/effect_agc.h>
+#include <hardware/audio_effect.h>
+#include <utils/Log.h>
+#include <utils/Timers.h>
+#include <audio_effects/effect_agc2.h>
#include <audio_effects/effect_ns.h>
-#include <module_common_types.h>
#include <audio_processing.h>
-#include "speex/speex_resampler.h"
+#include <module_common_types.h>
// undefine to perform multi channels API functional tests
//#define DUAL_MIC_TEST
@@ -39,26 +39,26 @@
#define PREPROC_NUM_SESSIONS 8
// types of pre processing modules
-enum preproc_id
-{
- PREPROC_AGC, // Automatic Gain Control
- PREPROC_AEC, // Acoustic Echo Canceler
- PREPROC_NS, // Noise Suppressor
+enum preproc_id {
+ PREPROC_AGC, // Automatic Gain Control
+ PREPROC_AGC2, // Automatic Gain Control 2
+ PREPROC_AEC, // Acoustic Echo Canceler
+ PREPROC_NS, // Noise Suppressor
PREPROC_NUM_EFFECTS
};
// Session state
enum preproc_session_state {
- PREPROC_SESSION_STATE_INIT, // initialized
- PREPROC_SESSION_STATE_CONFIG // configuration received
+ PREPROC_SESSION_STATE_INIT, // initialized
+ PREPROC_SESSION_STATE_CONFIG // configuration received
};
// Effect/Preprocessor state
enum preproc_effect_state {
- PREPROC_EFFECT_STATE_INIT, // initialized
- PREPROC_EFFECT_STATE_CREATED, // webRTC engine created
- PREPROC_EFFECT_STATE_CONFIG, // configuration received/disabled
- PREPROC_EFFECT_STATE_ACTIVE // active/enabled
+ PREPROC_EFFECT_STATE_INIT, // initialized
+ PREPROC_EFFECT_STATE_CREATED, // webRTC engine created
+ PREPROC_EFFECT_STATE_CONFIG, // configuration received/disabled
+ PREPROC_EFFECT_STATE_ACTIVE // active/enabled
};
// handle on webRTC engine
@@ -71,74 +71,76 @@
// Effect operation table. Functions for all pre processors are declared in sPreProcOps[] table.
// Function pointer can be null if no action required.
struct preproc_ops_s {
- int (* create)(preproc_effect_t *fx);
- int (* init)(preproc_effect_t *fx);
- int (* reset)(preproc_effect_t *fx);
- void (* enable)(preproc_effect_t *fx);
- void (* disable)(preproc_effect_t *fx);
- int (* set_parameter)(preproc_effect_t *fx, void *param, void *value);
- int (* get_parameter)(preproc_effect_t *fx, void *param, uint32_t *size, void *value);
- int (* set_device)(preproc_effect_t *fx, uint32_t device);
+ int (*create)(preproc_effect_t* fx);
+ int (*init)(preproc_effect_t* fx);
+ int (*reset)(preproc_effect_t* fx);
+ void (*enable)(preproc_effect_t* fx);
+ void (*disable)(preproc_effect_t* fx);
+ int (*set_parameter)(preproc_effect_t* fx, void* param, void* value);
+ int (*get_parameter)(preproc_effect_t* fx, void* param, uint32_t* size, void* value);
+ int (*set_device)(preproc_effect_t* fx, uint32_t device);
};
// Effect context
struct preproc_effect_s {
- const struct effect_interface_s *itfe;
- uint32_t procId; // type of pre processor (enum preproc_id)
- uint32_t state; // current state (enum preproc_effect_state)
- preproc_session_t *session; // session the effect is on
- const preproc_ops_t *ops; // effect ops table
- preproc_fx_handle_t engine; // handle on webRTC engine
- uint32_t type; // subtype of effect
+ const struct effect_interface_s* itfe;
+ uint32_t procId; // type of pre processor (enum preproc_id)
+ uint32_t state; // current state (enum preproc_effect_state)
+ preproc_session_t* session; // session the effect is on
+ const preproc_ops_t* ops; // effect ops table
+ preproc_fx_handle_t engine; // handle on webRTC engine
+ uint32_t type; // subtype of effect
#ifdef DUAL_MIC_TEST
- bool aux_channels_on; // support auxiliary channels
- size_t cur_channel_config; // current auciliary channel configuration
+ bool aux_channels_on; // support auxiliary channels
+ size_t cur_channel_config; // current auciliary channel configuration
#endif
};
// Session context
struct preproc_session_s {
- struct preproc_effect_s effects[PREPROC_NUM_EFFECTS]; // effects in this session
- uint32_t state; // current state (enum preproc_session_state)
- int id; // audio session ID
- int io; // handle of input stream this session is on
- webrtc::AudioProcessing* apm; // handle on webRTC audio processing module (APM)
- size_t apmFrameCount; // buffer size for webRTC process (10 ms)
- uint32_t apmSamplingRate; // webRTC APM sampling rate (8/16 or 32 kHz)
- size_t frameCount; // buffer size before input resampler ( <=> apmFrameCount)
- uint32_t samplingRate; // sampling rate at effect process interface
- uint32_t inChannelCount; // input channel count
- uint32_t outChannelCount; // output channel count
- uint32_t createdMsk; // bit field containing IDs of crested pre processors
- uint32_t enabledMsk; // bit field containing IDs of enabled pre processors
- uint32_t processedMsk; // bit field containing IDs of pre processors already
- // processed in current round
- webrtc::AudioFrame *procFrame; // audio frame passed to webRTC AMP ProcessStream()
- int16_t *inBuf; // input buffer used when resampling
- size_t inBufSize; // input buffer size in frames
- size_t framesIn; // number of frames in input buffer
- SpeexResamplerState *inResampler; // handle on input speex resampler
- int16_t *outBuf; // output buffer used when resampling
- size_t outBufSize; // output buffer size in frames
- size_t framesOut; // number of frames in output buffer
- SpeexResamplerState *outResampler; // handle on output speex resampler
- uint32_t revChannelCount; // number of channels on reverse stream
- uint32_t revEnabledMsk; // bit field containing IDs of enabled pre processors
- // with reverse channel
- uint32_t revProcessedMsk; // bit field containing IDs of pre processors with reverse
- // channel already processed in current round
- webrtc::AudioFrame *revFrame; // audio frame passed to webRTC AMP AnalyzeReverseStream()
- int16_t *revBuf; // reverse channel input buffer
- size_t revBufSize; // reverse channel input buffer size
- size_t framesRev; // number of frames in reverse channel input buffer
- SpeexResamplerState *revResampler; // handle on reverse channel input speex resampler
+ struct preproc_effect_s effects[PREPROC_NUM_EFFECTS]; // effects in this session
+ uint32_t state; // current state (enum preproc_session_state)
+ int id; // audio session ID
+ int io; // handle of input stream this session is on
+ webrtc::AudioProcessing* apm; // handle on webRTC audio processing module (APM)
+ // Audio Processing module builder
+ webrtc::AudioProcessingBuilder ap_builder;
+ size_t apmFrameCount; // buffer size for webRTC process (10 ms)
+ uint32_t apmSamplingRate; // webRTC APM sampling rate (8/16 or 32 kHz)
+ size_t frameCount; // buffer size before input resampler ( <=> apmFrameCount)
+ uint32_t samplingRate; // sampling rate at effect process interface
+ uint32_t inChannelCount; // input channel count
+ uint32_t outChannelCount; // output channel count
+ uint32_t createdMsk; // bit field containing IDs of crested pre processors
+ uint32_t enabledMsk; // bit field containing IDs of enabled pre processors
+ uint32_t processedMsk; // bit field containing IDs of pre processors already
+ // processed in current round
+ // audio config strucutre
+ webrtc::AudioProcessing::Config config;
+ webrtc::StreamConfig inputConfig; // input stream configuration
+ webrtc::StreamConfig outputConfig; // output stream configuration
+ int16_t* inBuf; // input buffer used when resampling
+ size_t inBufSize; // input buffer size in frames
+ size_t framesIn; // number of frames in input buffer
+ int16_t* outBuf; // output buffer used when resampling
+ size_t outBufSize; // output buffer size in frames
+ size_t framesOut; // number of frames in output buffer
+ uint32_t revChannelCount; // number of channels on reverse stream
+ uint32_t revEnabledMsk; // bit field containing IDs of enabled pre processors
+ // with reverse channel
+ uint32_t revProcessedMsk; // bit field containing IDs of pre processors with reverse
+ // channel already processed in current round
+ webrtc::StreamConfig revConfig; // reverse stream configuration.
+ int16_t* revBuf; // reverse channel input buffer
+ size_t revBufSize; // reverse channel input buffer size
+ size_t framesRev; // number of frames in reverse channel input buffer
};
#ifdef DUAL_MIC_TEST
enum {
- PREPROC_CMD_DUAL_MIC_ENABLE = EFFECT_CMD_FIRST_PROPRIETARY, // enable dual mic mode
- PREPROC_CMD_DUAL_MIC_PCM_DUMP_START, // start pcm capture
- PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP // stop pcm capture
+ PREPROC_CMD_DUAL_MIC_ENABLE = EFFECT_CMD_FIRST_PROPRIETARY, // enable dual mic mode
+ PREPROC_CMD_DUAL_MIC_PCM_DUMP_START, // start pcm capture
+ PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP // stop pcm capture
};
enum {
@@ -151,24 +153,22 @@
};
const channel_config_t sDualMicConfigs[CHANNEL_CFG_CNT] = {
- {AUDIO_CHANNEL_IN_MONO , 0},
- {AUDIO_CHANNEL_IN_STEREO , 0},
- {AUDIO_CHANNEL_IN_FRONT , AUDIO_CHANNEL_IN_BACK},
- {AUDIO_CHANNEL_IN_STEREO , AUDIO_CHANNEL_IN_RIGHT}
-};
+ {AUDIO_CHANNEL_IN_MONO, 0},
+ {AUDIO_CHANNEL_IN_STEREO, 0},
+ {AUDIO_CHANNEL_IN_FRONT, AUDIO_CHANNEL_IN_BACK},
+ {AUDIO_CHANNEL_IN_STEREO, AUDIO_CHANNEL_IN_RIGHT}};
bool sHasAuxChannels[PREPROC_NUM_EFFECTS] = {
- false, // PREPROC_AGC
+ false, // PREPROC_AGC
true, // PREPROC_AEC
true, // PREPROC_NS
};
bool gDualMicEnabled;
-FILE *gPcmDumpFh;
+FILE* gPcmDumpFh;
static pthread_mutex_t gPcmDumpLock = PTHREAD_MUTEX_INITIALIZER;
#endif
-
//------------------------------------------------------------------------------
// Effect descriptors
//------------------------------------------------------------------------------
@@ -178,68 +178,69 @@
// Automatic Gain Control
static const effect_descriptor_t sAgcDescriptor = {
- { 0x0a8abfe0, 0x654c, 0x11e0, 0xba26, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // type
- { 0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // uuid
+ {0x0a8abfe0, 0x654c, 0x11e0, 0xba26, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type
+ {0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid
EFFECT_CONTROL_API_VERSION,
- (EFFECT_FLAG_TYPE_PRE_PROC|EFFECT_FLAG_DEVICE_IND),
- 0, //FIXME indicate CPU load
- 0, //FIXME indicate memory usage
+ (EFFECT_FLAG_TYPE_PRE_PROC | EFFECT_FLAG_DEVICE_IND),
+ 0, // FIXME indicate CPU load
+ 0, // FIXME indicate memory usage
"Automatic Gain Control",
- "The Android Open Source Project"
-};
+ "The Android Open Source Project"};
+
+// Automatic Gain Control 2
+static const effect_descriptor_t sAgc2Descriptor = {
+ {0xae3c653b, 0xbe18, 0x4ab8, 0x8938, {0x41, 0x8f, 0x0a, 0x7f, 0x06, 0xac}}, // type
+ {0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}}, // uuid
+ EFFECT_CONTROL_API_VERSION,
+ (EFFECT_FLAG_TYPE_PRE_PROC | EFFECT_FLAG_DEVICE_IND),
+ 0, // FIXME indicate CPU load
+ 0, // FIXME indicate memory usage
+ "Automatic Gain Control 2",
+ "The Android Open Source Project"};
// Acoustic Echo Cancellation
static const effect_descriptor_t sAecDescriptor = {
- { 0x7b491460, 0x8d4d, 0x11e0, 0xbd61, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // type
- { 0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // uuid
+ {0x7b491460, 0x8d4d, 0x11e0, 0xbd61, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type
+ {0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid
EFFECT_CONTROL_API_VERSION,
- (EFFECT_FLAG_TYPE_PRE_PROC|EFFECT_FLAG_DEVICE_IND),
- 0, //FIXME indicate CPU load
- 0, //FIXME indicate memory usage
+ (EFFECT_FLAG_TYPE_PRE_PROC | EFFECT_FLAG_DEVICE_IND),
+ 0, // FIXME indicate CPU load
+ 0, // FIXME indicate memory usage
"Acoustic Echo Canceler",
- "The Android Open Source Project"
-};
+ "The Android Open Source Project"};
// Noise suppression
static const effect_descriptor_t sNsDescriptor = {
- { 0x58b4b260, 0x8e06, 0x11e0, 0xaa8e, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // type
- { 0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // uuid
+ {0x58b4b260, 0x8e06, 0x11e0, 0xaa8e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type
+ {0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid
EFFECT_CONTROL_API_VERSION,
- (EFFECT_FLAG_TYPE_PRE_PROC|EFFECT_FLAG_DEVICE_IND),
- 0, //FIXME indicate CPU load
- 0, //FIXME indicate memory usage
+ (EFFECT_FLAG_TYPE_PRE_PROC | EFFECT_FLAG_DEVICE_IND),
+ 0, // FIXME indicate CPU load
+ 0, // FIXME indicate memory usage
"Noise Suppression",
- "The Android Open Source Project"
-};
+ "The Android Open Source Project"};
-
-static const effect_descriptor_t *sDescriptors[PREPROC_NUM_EFFECTS] = {
- &sAgcDescriptor,
- &sAecDescriptor,
- &sNsDescriptor
-};
+static const effect_descriptor_t* sDescriptors[PREPROC_NUM_EFFECTS] = {&sAgcDescriptor,
+ &sAgc2Descriptor,
+ &sAecDescriptor,
+ &sNsDescriptor};
//------------------------------------------------------------------------------
// Helper functions
//------------------------------------------------------------------------------
-const effect_uuid_t * const sUuidToPreProcTable[PREPROC_NUM_EFFECTS] = {
- FX_IID_AGC,
- FX_IID_AEC,
- FX_IID_NS
-};
+const effect_uuid_t* const sUuidToPreProcTable[PREPROC_NUM_EFFECTS] = {FX_IID_AGC,
+ FX_IID_AGC2,
+ FX_IID_AEC, FX_IID_NS};
-
-const effect_uuid_t * ProcIdToUuid(int procId)
-{
+const effect_uuid_t* ProcIdToUuid(int procId) {
if (procId >= PREPROC_NUM_EFFECTS) {
return EFFECT_UUID_NULL;
}
return sUuidToPreProcTable[procId];
}
-uint32_t UuidToProcId(const effect_uuid_t * uuid)
-{
+uint32_t UuidToProcId(const effect_uuid_t* uuid) {
size_t i;
for (i = 0; i < PREPROC_NUM_EFFECTS; i++) {
if (memcmp(uuid, sUuidToPreProcTable[i], sizeof(*uuid)) == 0) {
@@ -249,15 +250,13 @@
return i;
}
-bool HasReverseStream(uint32_t procId)
-{
+bool HasReverseStream(uint32_t procId) {
if (procId == PREPROC_AEC) {
return true;
}
return false;
}
-
//------------------------------------------------------------------------------
// Automatic Gain Control (AGC)
//------------------------------------------------------------------------------
@@ -266,499 +265,538 @@
static const int kAgcDefaultCompGain = 9;
static const bool kAgcDefaultLimiter = true;
-int AgcInit (preproc_effect_t *effect)
-{
- ALOGV("AgcInit");
- webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
- agc->set_mode(webrtc::GainControl::kFixedDigital);
- agc->set_target_level_dbfs(kAgcDefaultTargetLevel);
- agc->set_compression_gain_db(kAgcDefaultCompGain);
- agc->enable_limiter(kAgcDefaultLimiter);
+int Agc2Init(preproc_effect_t* effect) {
+ ALOGV("Agc2Init");
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller2.fixed_digital.gain_db = 0.f;
+ effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+ effect->session->config.gain_controller2.kRms;
+ effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db = 2.f;
+ effect->session->apm->ApplyConfig(effect->session->config);
return 0;
}
-int AgcCreate(preproc_effect_t *effect)
-{
- webrtc::GainControl *agc = effect->session->apm->gain_control();
- ALOGV("AgcCreate got agc %p", agc);
- if (agc == NULL) {
- ALOGW("AgcCreate Error");
- return -ENOMEM;
- }
- effect->engine = static_cast<preproc_fx_handle_t>(agc);
+int AgcInit(preproc_effect_t* effect) {
+ ALOGV("AgcInit");
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller1.target_level_dbfs = kAgcDefaultTargetLevel;
+ effect->session->config.gain_controller1.compression_gain_db = kAgcDefaultCompGain;
+ effect->session->config.gain_controller1.enable_limiter = kAgcDefaultLimiter;
+ effect->session->apm->ApplyConfig(effect->session->config);
+ return 0;
+}
+
+int Agc2Create(preproc_effect_t* effect) {
+ Agc2Init(effect);
+ return 0;
+}
+
+int AgcCreate(preproc_effect_t* effect) {
AgcInit(effect);
return 0;
}
-int AgcGetParameter(preproc_effect_t *effect,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue)
-{
+int Agc2GetParameter(preproc_effect_t* effect, void* pParam, uint32_t* pValueSize, void* pValue) {
int status = 0;
- uint32_t param = *(uint32_t *)pParam;
- t_agc_settings *pProperties = (t_agc_settings *)pValue;
- webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
+ uint32_t param = *(uint32_t*)pParam;
+ agc2_settings_t* pProperties = (agc2_settings_t*)pValue;
switch (param) {
- case AGC_PARAM_TARGET_LEVEL:
- case AGC_PARAM_COMP_GAIN:
- if (*pValueSize < sizeof(int16_t)) {
- *pValueSize = 0;
- return -EINVAL;
- }
- break;
- case AGC_PARAM_LIMITER_ENA:
- if (*pValueSize < sizeof(bool)) {
- *pValueSize = 0;
- return -EINVAL;
- }
- break;
- case AGC_PARAM_PROPERTIES:
- if (*pValueSize < sizeof(t_agc_settings)) {
- *pValueSize = 0;
- return -EINVAL;
- }
- break;
+ case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+ if (*pValueSize < sizeof(float)) {
+ *pValueSize = 0.f;
+ return -EINVAL;
+ }
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+ if (*pValueSize < sizeof(int32_t)) {
+ *pValueSize = 0;
+ return -EINVAL;
+ }
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+ if (*pValueSize < sizeof(float)) {
+ *pValueSize = 0.f;
+ return -EINVAL;
+ }
+ break;
+ case AGC2_PARAM_PROPERTIES:
+ if (*pValueSize < sizeof(agc2_settings_t)) {
+ *pValueSize = 0;
+ return -EINVAL;
+ }
+ break;
- default:
- ALOGW("AgcGetParameter() unknown param %08x", param);
- status = -EINVAL;
- break;
+ default:
+ ALOGW("Agc2GetParameter() unknown param %08x", param);
+ status = -EINVAL;
+ break;
}
+ effect->session->config = effect->session->apm->GetConfig();
switch (param) {
- case AGC_PARAM_TARGET_LEVEL:
- *(int16_t *) pValue = (int16_t)(agc->target_level_dbfs() * -100);
- ALOGV("AgcGetParameter() target level %d milliBels", *(int16_t *) pValue);
- break;
- case AGC_PARAM_COMP_GAIN:
- *(int16_t *) pValue = (int16_t)(agc->compression_gain_db() * 100);
- ALOGV("AgcGetParameter() comp gain %d milliBels", *(int16_t *) pValue);
- break;
- case AGC_PARAM_LIMITER_ENA:
- *(bool *) pValue = (bool)agc->is_limiter_enabled();
- ALOGV("AgcGetParameter() limiter enabled %s",
- (*(int16_t *) pValue != 0) ? "true" : "false");
- break;
- case AGC_PARAM_PROPERTIES:
- pProperties->targetLevel = (int16_t)(agc->target_level_dbfs() * -100);
- pProperties->compGain = (int16_t)(agc->compression_gain_db() * 100);
- pProperties->limiterEnabled = (bool)agc->is_limiter_enabled();
- break;
- default:
- ALOGW("AgcGetParameter() unknown param %d", param);
- status = -EINVAL;
- break;
+ case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+ *(float*)pValue =
+ (float)(effect->session->config.gain_controller2.fixed_digital.gain_db);
+ ALOGV("Agc2GetParameter() target level %f dB", *(float*)pValue);
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+ *(uint32_t*)pValue = (uint32_t)(
+ effect->session->config.gain_controller2.adaptive_digital.level_estimator);
+ ALOGV("Agc2GetParameter() level estimator %d",
+ *(webrtc::AudioProcessing::Config::GainController2::LevelEstimator*)pValue);
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+ *(float*)pValue = (float)(effect->session->config.gain_controller2.adaptive_digital
+ .extra_saturation_margin_db);
+ ALOGV("Agc2GetParameter() extra saturation margin %f dB", *(float*)pValue);
+ break;
+ case AGC2_PARAM_PROPERTIES:
+ pProperties->fixedDigitalGain =
+ (float)(effect->session->config.gain_controller2.fixed_digital.gain_db);
+ pProperties->level_estimator = (uint32_t)(
+ effect->session->config.gain_controller2.adaptive_digital.level_estimator);
+ pProperties->extraSaturationMargin =
+ (float)(effect->session->config.gain_controller2.adaptive_digital
+ .extra_saturation_margin_db);
+ break;
+ default:
+ ALOGW("Agc2GetParameter() unknown param %d", param);
+ status = -EINVAL;
+ break;
+ }
+
+ return status;
+}
+
+int AgcGetParameter(preproc_effect_t* effect, void* pParam, uint32_t* pValueSize, void* pValue) {
+ int status = 0;
+ uint32_t param = *(uint32_t*)pParam;
+ t_agc_settings* pProperties = (t_agc_settings*)pValue;
+
+ switch (param) {
+ case AGC_PARAM_TARGET_LEVEL:
+ case AGC_PARAM_COMP_GAIN:
+ if (*pValueSize < sizeof(int16_t)) {
+ *pValueSize = 0;
+ return -EINVAL;
+ }
+ break;
+ case AGC_PARAM_LIMITER_ENA:
+ if (*pValueSize < sizeof(bool)) {
+ *pValueSize = 0;
+ return -EINVAL;
+ }
+ break;
+ case AGC_PARAM_PROPERTIES:
+ if (*pValueSize < sizeof(t_agc_settings)) {
+ *pValueSize = 0;
+ return -EINVAL;
+ }
+ break;
+
+ default:
+ ALOGW("AgcGetParameter() unknown param %08x", param);
+ status = -EINVAL;
+ break;
+ }
+
+ effect->session->config = effect->session->apm->GetConfig();
+ switch (param) {
+ case AGC_PARAM_TARGET_LEVEL:
+ *(int16_t*)pValue =
+ (int16_t)(effect->session->config.gain_controller1.target_level_dbfs * -100);
+ ALOGV("AgcGetParameter() target level %d milliBels", *(int16_t*)pValue);
+ break;
+ case AGC_PARAM_COMP_GAIN:
+ *(int16_t*)pValue =
+ (int16_t)(effect->session->config.gain_controller1.compression_gain_db * -100);
+ ALOGV("AgcGetParameter() comp gain %d milliBels", *(int16_t*)pValue);
+ break;
+ case AGC_PARAM_LIMITER_ENA:
+ *(bool*)pValue = (bool)(effect->session->config.gain_controller1.enable_limiter);
+ ALOGV("AgcGetParameter() limiter enabled %s",
+ (*(int16_t*)pValue != 0) ? "true" : "false");
+ break;
+ case AGC_PARAM_PROPERTIES:
+ pProperties->targetLevel =
+ (int16_t)(effect->session->config.gain_controller1.target_level_dbfs * -100);
+ pProperties->compGain =
+ (int16_t)(effect->session->config.gain_controller1.compression_gain_db * -100);
+ pProperties->limiterEnabled =
+ (bool)(effect->session->config.gain_controller1.enable_limiter);
+ break;
+ default:
+ ALOGW("AgcGetParameter() unknown param %d", param);
+ status = -EINVAL;
+ break;
}
return status;
}
-int AgcSetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
-{
+int Agc2SetParameter(preproc_effect_t* effect, void* pParam, void* pValue) {
int status = 0;
- uint32_t param = *(uint32_t *)pParam;
- t_agc_settings *pProperties = (t_agc_settings *)pValue;
- webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
-
+ uint32_t param = *(uint32_t*)pParam;
+ float valueFloat = 0.f;
+ agc2_settings_t* pProperties = (agc2_settings_t*)pValue;
+ effect->session->config = effect->session->apm->GetConfig();
switch (param) {
- case AGC_PARAM_TARGET_LEVEL:
- ALOGV("AgcSetParameter() target level %d milliBels", *(int16_t *)pValue);
- status = agc->set_target_level_dbfs(-(*(int16_t *)pValue / 100));
- break;
- case AGC_PARAM_COMP_GAIN:
- ALOGV("AgcSetParameter() comp gain %d milliBels", *(int16_t *)pValue);
- status = agc->set_compression_gain_db(*(int16_t *)pValue / 100);
- break;
- case AGC_PARAM_LIMITER_ENA:
- ALOGV("AgcSetParameter() limiter enabled %s", *(bool *)pValue ? "true" : "false");
- status = agc->enable_limiter(*(bool *)pValue);
- break;
- case AGC_PARAM_PROPERTIES:
- ALOGV("AgcSetParameter() properties level %d, gain %d limiter %d",
- pProperties->targetLevel,
- pProperties->compGain,
- pProperties->limiterEnabled);
- status = agc->set_target_level_dbfs(-(pProperties->targetLevel / 100));
- if (status != 0) break;
- status = agc->set_compression_gain_db(pProperties->compGain / 100);
- if (status != 0) break;
- status = agc->enable_limiter(pProperties->limiterEnabled);
- break;
- default:
- ALOGW("AgcSetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
- status = -EINVAL;
- break;
+ case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+ valueFloat = (float)(*(int32_t*)pValue);
+ ALOGV("Agc2SetParameter() fixed digital gain %f dB", valueFloat);
+ effect->session->config.gain_controller2.fixed_digital.gain_db = valueFloat;
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+ ALOGV("Agc2SetParameter() level estimator %d",
+ *(webrtc::AudioProcessing::Config::GainController2::LevelEstimator*)pValue);
+ effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+ (*(webrtc::AudioProcessing::Config::GainController2::LevelEstimator*)pValue);
+ break;
+ case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+ valueFloat = (float)(*(int32_t*)pValue);
+ ALOGV("Agc2SetParameter() extra saturation margin %f dB", valueFloat);
+ effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db =
+ valueFloat;
+ break;
+ case AGC2_PARAM_PROPERTIES:
+ ALOGV("Agc2SetParameter() properties gain %f, level %d margin %f",
+ pProperties->fixedDigitalGain, pProperties->level_estimator,
+ pProperties->extraSaturationMargin);
+ effect->session->config.gain_controller2.fixed_digital.gain_db =
+ pProperties->fixedDigitalGain;
+ effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+ (webrtc::AudioProcessing::Config::GainController2::LevelEstimator)
+ pProperties->level_estimator;
+ effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db =
+ pProperties->extraSaturationMargin;
+ break;
+ default:
+ ALOGW("Agc2SetParameter() unknown param %08x value %08x", param, *(uint32_t*)pValue);
+ status = -EINVAL;
+ break;
}
+ effect->session->apm->ApplyConfig(effect->session->config);
+
+ ALOGV("Agc2SetParameter() done status %d", status);
+
+ return status;
+}
+
+int AgcSetParameter(preproc_effect_t* effect, void* pParam, void* pValue) {
+ int status = 0;
+ uint32_t param = *(uint32_t*)pParam;
+ t_agc_settings* pProperties = (t_agc_settings*)pValue;
+ effect->session->config = effect->session->apm->GetConfig();
+ switch (param) {
+ case AGC_PARAM_TARGET_LEVEL:
+ ALOGV("AgcSetParameter() target level %d milliBels", *(int16_t*)pValue);
+ effect->session->config.gain_controller1.target_level_dbfs =
+ (-(*(int16_t*)pValue / 100));
+ break;
+ case AGC_PARAM_COMP_GAIN:
+ ALOGV("AgcSetParameter() comp gain %d milliBels", *(int16_t*)pValue);
+ effect->session->config.gain_controller1.compression_gain_db =
+ (*(int16_t*)pValue / 100);
+ break;
+ case AGC_PARAM_LIMITER_ENA:
+ ALOGV("AgcSetParameter() limiter enabled %s", *(bool*)pValue ? "true" : "false");
+ effect->session->config.gain_controller1.enable_limiter = (*(bool*)pValue);
+ break;
+ case AGC_PARAM_PROPERTIES:
+ ALOGV("AgcSetParameter() properties level %d, gain %d limiter %d",
+ pProperties->targetLevel, pProperties->compGain, pProperties->limiterEnabled);
+ effect->session->config.gain_controller1.target_level_dbfs =
+ -(pProperties->targetLevel / 100);
+ effect->session->config.gain_controller1.compression_gain_db =
+ pProperties->compGain / 100;
+ effect->session->config.gain_controller1.enable_limiter = pProperties->limiterEnabled;
+ break;
+ default:
+ ALOGW("AgcSetParameter() unknown param %08x value %08x", param, *(uint32_t*)pValue);
+ status = -EINVAL;
+ break;
+ }
+ effect->session->apm->ApplyConfig(effect->session->config);
ALOGV("AgcSetParameter() done status %d", status);
return status;
}
-void AgcEnable(preproc_effect_t *effect)
-{
- webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
- ALOGV("AgcEnable agc %p", agc);
- agc->Enable(true);
+void Agc2Enable(preproc_effect_t* effect) {
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller2.enabled = true;
+ effect->session->apm->ApplyConfig(effect->session->config);
}
-void AgcDisable(preproc_effect_t *effect)
-{
- ALOGV("AgcDisable");
- webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
- agc->Enable(false);
+void AgcEnable(preproc_effect_t* effect) {
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller1.enabled = true;
+ effect->session->apm->ApplyConfig(effect->session->config);
}
+void Agc2Disable(preproc_effect_t* effect) {
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller2.enabled = false;
+ effect->session->apm->ApplyConfig(effect->session->config);
+}
-static const preproc_ops_t sAgcOps = {
- AgcCreate,
- AgcInit,
- NULL,
- AgcEnable,
- AgcDisable,
- AgcSetParameter,
- AgcGetParameter,
- NULL
-};
+void AgcDisable(preproc_effect_t* effect) {
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.gain_controller1.enabled = false;
+ effect->session->apm->ApplyConfig(effect->session->config);
+}
+static const preproc_ops_t sAgcOps = {AgcCreate, AgcInit, NULL, AgcEnable, AgcDisable,
+ AgcSetParameter, AgcGetParameter, NULL};
+
+static const preproc_ops_t sAgc2Ops = {Agc2Create, Agc2Init, NULL,
+ Agc2Enable, Agc2Disable, Agc2SetParameter,
+ Agc2GetParameter, NULL};
//------------------------------------------------------------------------------
// Acoustic Echo Canceler (AEC)
//------------------------------------------------------------------------------
-static const webrtc::EchoControlMobile::RoutingMode kAecDefaultMode =
- webrtc::EchoControlMobile::kEarpiece;
-static const bool kAecDefaultComfortNoise = true;
-int AecInit (preproc_effect_t *effect)
-{
+int AecInit(preproc_effect_t* effect) {
ALOGV("AecInit");
- webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
- aec->set_routing_mode(kAecDefaultMode);
- aec->enable_comfort_noise(kAecDefaultComfortNoise);
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.echo_canceller.mobile_mode = true;
+ effect->session->apm->ApplyConfig(effect->session->config);
return 0;
}
-int AecCreate(preproc_effect_t *effect)
-{
- webrtc::EchoControlMobile *aec = effect->session->apm->echo_control_mobile();
- ALOGV("AecCreate got aec %p", aec);
- if (aec == NULL) {
- ALOGW("AgcCreate Error");
- return -ENOMEM;
- }
- effect->engine = static_cast<preproc_fx_handle_t>(aec);
- AecInit (effect);
+int AecCreate(preproc_effect_t* effect) {
+ AecInit(effect);
return 0;
}
-int AecGetParameter(preproc_effect_t *effect,
- void *pParam,
- uint32_t *pValueSize,
- void *pValue)
-{
+int AecGetParameter(preproc_effect_t* effect, void* pParam, uint32_t* pValueSize, void* pValue) {
int status = 0;
- uint32_t param = *(uint32_t *)pParam;
+ uint32_t param = *(uint32_t*)pParam;
if (*pValueSize < sizeof(uint32_t)) {
return -EINVAL;
}
switch (param) {
- case AEC_PARAM_ECHO_DELAY:
- case AEC_PARAM_PROPERTIES:
- *(uint32_t *)pValue = 1000 * effect->session->apm->stream_delay_ms();
- ALOGV("AecGetParameter() echo delay %d us", *(uint32_t *)pValue);
- break;
- default:
- ALOGW("AecGetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
- status = -EINVAL;
- break;
+ case AEC_PARAM_ECHO_DELAY:
+ case AEC_PARAM_PROPERTIES:
+ *(uint32_t*)pValue = 1000 * effect->session->apm->stream_delay_ms();
+ ALOGV("AecGetParameter() echo delay %d us", *(uint32_t*)pValue);
+ break;
+ case AEC_PARAM_MOBILE_MODE:
+ effect->session->config = effect->session->apm->GetConfig();
+ *(uint32_t*)pValue = effect->session->config.echo_canceller.mobile_mode;
+ ALOGV("AecGetParameter() mobile mode %d us", *(uint32_t*)pValue);
+ break;
+ default:
+ ALOGW("AecGetParameter() unknown param %08x value %08x", param, *(uint32_t*)pValue);
+ status = -EINVAL;
+ break;
}
return status;
}
-int AecSetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
-{
+int AecSetParameter(preproc_effect_t* effect, void* pParam, void* pValue) {
int status = 0;
- uint32_t param = *(uint32_t *)pParam;
- uint32_t value = *(uint32_t *)pValue;
+ uint32_t param = *(uint32_t*)pParam;
+ uint32_t value = *(uint32_t*)pValue;
switch (param) {
- case AEC_PARAM_ECHO_DELAY:
- case AEC_PARAM_PROPERTIES:
- status = effect->session->apm->set_stream_delay_ms(value/1000);
- ALOGV("AecSetParameter() echo delay %d us, status %d", value, status);
- break;
- default:
- ALOGW("AecSetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
- status = -EINVAL;
- break;
+ case AEC_PARAM_ECHO_DELAY:
+ case AEC_PARAM_PROPERTIES:
+ status = effect->session->apm->set_stream_delay_ms(value / 1000);
+ ALOGV("AecSetParameter() echo delay %d us, status %d", value, status);
+ break;
+ case AEC_PARAM_MOBILE_MODE:
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.echo_canceller.mobile_mode = value;
+ ALOGV("AecSetParameter() mobile mode %d us", value);
+ effect->session->apm->ApplyConfig(effect->session->config);
+ break;
+ default:
+ ALOGW("AecSetParameter() unknown param %08x value %08x", param, *(uint32_t*)pValue);
+ status = -EINVAL;
+ break;
}
return status;
}
-void AecEnable(preproc_effect_t *effect)
-{
- webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
- ALOGV("AecEnable aec %p", aec);
- aec->Enable(true);
+void AecEnable(preproc_effect_t* effect) {
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.echo_canceller.enabled = true;
+ effect->session->apm->ApplyConfig(effect->session->config);
}
-void AecDisable(preproc_effect_t *effect)
-{
- ALOGV("AecDisable");
- webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
- aec->Enable(false);
+void AecDisable(preproc_effect_t* effect) {
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.echo_canceller.enabled = false;
+ effect->session->apm->ApplyConfig(effect->session->config);
}
-int AecSetDevice(preproc_effect_t *effect, uint32_t device)
-{
+int AecSetDevice(preproc_effect_t* effect, uint32_t device) {
ALOGV("AecSetDevice %08x", device);
- webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
- webrtc::EchoControlMobile::RoutingMode mode = webrtc::EchoControlMobile::kQuietEarpieceOrHeadset;
if (audio_is_input_device(device)) {
return 0;
}
- switch(device) {
- case AUDIO_DEVICE_OUT_EARPIECE:
- mode = webrtc::EchoControlMobile::kEarpiece;
- break;
- case AUDIO_DEVICE_OUT_SPEAKER:
- mode = webrtc::EchoControlMobile::kSpeakerphone;
- break;
- case AUDIO_DEVICE_OUT_WIRED_HEADSET:
- case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
- case AUDIO_DEVICE_OUT_USB_HEADSET:
- default:
- break;
- }
- aec->set_routing_mode(mode);
return 0;
}
-static const preproc_ops_t sAecOps = {
- AecCreate,
- AecInit,
- NULL,
- AecEnable,
- AecDisable,
- AecSetParameter,
- AecGetParameter,
- AecSetDevice
-};
+static const preproc_ops_t sAecOps = {AecCreate, AecInit, NULL,
+ AecEnable, AecDisable, AecSetParameter,
+ AecGetParameter, AecSetDevice};
//------------------------------------------------------------------------------
// Noise Suppression (NS)
//------------------------------------------------------------------------------
-static const webrtc::NoiseSuppression::Level kNsDefaultLevel = webrtc::NoiseSuppression::kModerate;
+static const webrtc::AudioProcessing::Config::NoiseSuppression::Level kNsDefaultLevel =
+ webrtc::AudioProcessing::Config::NoiseSuppression::kModerate;
-int NsInit (preproc_effect_t *effect)
-{
+int NsInit(preproc_effect_t* effect) {
ALOGV("NsInit");
- webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
- ns->set_level(kNsDefaultLevel);
- webrtc::Config config;
- std::vector<webrtc::Point> geometry;
- // TODO(aluebs): Make the geometry settable.
- geometry.push_back(webrtc::Point(-0.03f, 0.f, 0.f));
- geometry.push_back(webrtc::Point(-0.01f, 0.f, 0.f));
- geometry.push_back(webrtc::Point(0.01f, 0.f, 0.f));
- geometry.push_back(webrtc::Point(0.03f, 0.f, 0.f));
- // The geometry needs to be set with Beamforming enabled.
- config.Set<webrtc::Beamforming>(
- new webrtc::Beamforming(true, geometry));
- effect->session->apm->SetExtraOptions(config);
- config.Set<webrtc::Beamforming>(
- new webrtc::Beamforming(false, geometry));
- effect->session->apm->SetExtraOptions(config);
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.noise_suppression.level = kNsDefaultLevel;
+ effect->session->apm->ApplyConfig(effect->session->config);
effect->type = NS_TYPE_SINGLE_CHANNEL;
return 0;
}
-int NsCreate(preproc_effect_t *effect)
-{
- webrtc::NoiseSuppression *ns = effect->session->apm->noise_suppression();
- ALOGV("NsCreate got ns %p", ns);
- if (ns == NULL) {
- ALOGW("AgcCreate Error");
- return -ENOMEM;
- }
- effect->engine = static_cast<preproc_fx_handle_t>(ns);
- NsInit (effect);
+int NsCreate(preproc_effect_t* effect) {
+ NsInit(effect);
return 0;
}
-int NsGetParameter(preproc_effect_t *effect __unused,
- void *pParam __unused,
- uint32_t *pValueSize __unused,
- void *pValue __unused)
-{
+int NsGetParameter(preproc_effect_t* effect __unused, void* pParam __unused,
+ uint32_t* pValueSize __unused, void* pValue __unused) {
int status = 0;
return status;
}
-int NsSetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
-{
+int NsSetParameter(preproc_effect_t* effect, void* pParam, void* pValue) {
int status = 0;
- webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
- uint32_t param = *(uint32_t *)pParam;
- uint32_t value = *(uint32_t *)pValue;
- switch(param) {
+ uint32_t param = *(uint32_t*)pParam;
+ uint32_t value = *(uint32_t*)pValue;
+ effect->session->config = effect->session->apm->GetConfig();
+ switch (param) {
case NS_PARAM_LEVEL:
- ns->set_level((webrtc::NoiseSuppression::Level)value);
+ effect->session->config.noise_suppression.level =
+ (webrtc::AudioProcessing::Config::NoiseSuppression::Level)value;
ALOGV("NsSetParameter() level %d", value);
break;
- case NS_PARAM_TYPE:
- {
- webrtc::Config config;
- std::vector<webrtc::Point> geometry;
- bool is_beamforming_enabled =
- value == NS_TYPE_MULTI_CHANNEL && ns->is_enabled();
- config.Set<webrtc::Beamforming>(
- new webrtc::Beamforming(is_beamforming_enabled, geometry));
- effect->session->apm->SetExtraOptions(config);
- effect->type = value;
- ALOGV("NsSetParameter() type %d", value);
- break;
- }
default:
ALOGW("NsSetParameter() unknown param %08x value %08x", param, value);
status = -EINVAL;
}
+ effect->session->apm->ApplyConfig(effect->session->config);
return status;
}
-void NsEnable(preproc_effect_t *effect)
-{
- webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
- ALOGV("NsEnable ns %p", ns);
- ns->Enable(true);
- if (effect->type == NS_TYPE_MULTI_CHANNEL) {
- webrtc::Config config;
- std::vector<webrtc::Point> geometry;
- config.Set<webrtc::Beamforming>(new webrtc::Beamforming(true, geometry));
- effect->session->apm->SetExtraOptions(config);
- }
+void NsEnable(preproc_effect_t* effect) {
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.noise_suppression.enabled = true;
+ effect->session->apm->ApplyConfig(effect->session->config);
}
-void NsDisable(preproc_effect_t *effect)
-{
+void NsDisable(preproc_effect_t* effect) {
ALOGV("NsDisable");
- webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
- ns->Enable(false);
- webrtc::Config config;
- std::vector<webrtc::Point> geometry;
- config.Set<webrtc::Beamforming>(new webrtc::Beamforming(false, geometry));
- effect->session->apm->SetExtraOptions(config);
+ effect->session->config = effect->session->apm->GetConfig();
+ effect->session->config.noise_suppression.enabled = false;
+ effect->session->apm->ApplyConfig(effect->session->config);
}
-static const preproc_ops_t sNsOps = {
- NsCreate,
- NsInit,
- NULL,
- NsEnable,
- NsDisable,
- NsSetParameter,
- NsGetParameter,
- NULL
-};
+static const preproc_ops_t sNsOps = {NsCreate, NsInit, NULL, NsEnable,
+ NsDisable, NsSetParameter, NsGetParameter, NULL};
-
-static const preproc_ops_t *sPreProcOps[PREPROC_NUM_EFFECTS] = {
- &sAgcOps,
- &sAecOps,
- &sNsOps
-};
-
+static const preproc_ops_t* sPreProcOps[PREPROC_NUM_EFFECTS] = {&sAgcOps,
+ &sAgc2Ops,
+ &sAecOps, &sNsOps};
//------------------------------------------------------------------------------
// Effect functions
//------------------------------------------------------------------------------
-void Session_SetProcEnabled(preproc_session_t *session, uint32_t procId, bool enabled);
+void Session_SetProcEnabled(preproc_session_t* session, uint32_t procId, bool enabled);
extern "C" const struct effect_interface_s sEffectInterface;
extern "C" const struct effect_interface_s sEffectInterfaceReverse;
-#define BAD_STATE_ABORT(from, to) \
- LOG_ALWAYS_FATAL("Bad state transition from %d to %d", from, to);
+#define BAD_STATE_ABORT(from, to) LOG_ALWAYS_FATAL("Bad state transition from %d to %d", from, to);
-int Effect_SetState(preproc_effect_t *effect, uint32_t state)
-{
+int Effect_SetState(preproc_effect_t* effect, uint32_t state) {
int status = 0;
ALOGV("Effect_SetState proc %d, new %d old %d", effect->procId, state, effect->state);
- switch(state) {
- case PREPROC_EFFECT_STATE_INIT:
- switch(effect->state) {
- case PREPROC_EFFECT_STATE_ACTIVE:
- effect->ops->disable(effect);
- Session_SetProcEnabled(effect->session, effect->procId, false);
+ switch (state) {
+ case PREPROC_EFFECT_STATE_INIT:
+ switch (effect->state) {
+ case PREPROC_EFFECT_STATE_ACTIVE:
+ effect->ops->disable(effect);
+ Session_SetProcEnabled(effect->session, effect->procId, false);
+ break;
+ case PREPROC_EFFECT_STATE_CONFIG:
+ case PREPROC_EFFECT_STATE_CREATED:
+ case PREPROC_EFFECT_STATE_INIT:
+ break;
+ default:
+ BAD_STATE_ABORT(effect->state, state);
+ }
+ break;
+ case PREPROC_EFFECT_STATE_CREATED:
+ switch (effect->state) {
+ case PREPROC_EFFECT_STATE_INIT:
+ status = effect->ops->create(effect);
+ break;
+ case PREPROC_EFFECT_STATE_CREATED:
+ case PREPROC_EFFECT_STATE_ACTIVE:
+ case PREPROC_EFFECT_STATE_CONFIG:
+ ALOGE("Effect_SetState invalid transition");
+ status = -ENOSYS;
+ break;
+ default:
+ BAD_STATE_ABORT(effect->state, state);
+ }
break;
case PREPROC_EFFECT_STATE_CONFIG:
- case PREPROC_EFFECT_STATE_CREATED:
- case PREPROC_EFFECT_STATE_INIT:
+ switch (effect->state) {
+ case PREPROC_EFFECT_STATE_INIT:
+ ALOGE("Effect_SetState invalid transition");
+ status = -ENOSYS;
+ break;
+ case PREPROC_EFFECT_STATE_ACTIVE:
+ effect->ops->disable(effect);
+ Session_SetProcEnabled(effect->session, effect->procId, false);
+ break;
+ case PREPROC_EFFECT_STATE_CREATED:
+ case PREPROC_EFFECT_STATE_CONFIG:
+ break;
+ default:
+ BAD_STATE_ABORT(effect->state, state);
+ }
+ break;
+ case PREPROC_EFFECT_STATE_ACTIVE:
+ switch (effect->state) {
+ case PREPROC_EFFECT_STATE_INIT:
+ case PREPROC_EFFECT_STATE_CREATED:
+ ALOGE("Effect_SetState invalid transition");
+ status = -ENOSYS;
+ break;
+ case PREPROC_EFFECT_STATE_ACTIVE:
+ // enabling an already enabled effect is just ignored
+ break;
+ case PREPROC_EFFECT_STATE_CONFIG:
+ effect->ops->enable(effect);
+ Session_SetProcEnabled(effect->session, effect->procId, true);
+ break;
+ default:
+ BAD_STATE_ABORT(effect->state, state);
+ }
break;
default:
BAD_STATE_ABORT(effect->state, state);
- }
- break;
- case PREPROC_EFFECT_STATE_CREATED:
- switch(effect->state) {
- case PREPROC_EFFECT_STATE_INIT:
- status = effect->ops->create(effect);
- break;
- case PREPROC_EFFECT_STATE_CREATED:
- case PREPROC_EFFECT_STATE_ACTIVE:
- case PREPROC_EFFECT_STATE_CONFIG:
- ALOGE("Effect_SetState invalid transition");
- status = -ENOSYS;
- break;
- default:
- BAD_STATE_ABORT(effect->state, state);
- }
- break;
- case PREPROC_EFFECT_STATE_CONFIG:
- switch(effect->state) {
- case PREPROC_EFFECT_STATE_INIT:
- ALOGE("Effect_SetState invalid transition");
- status = -ENOSYS;
- break;
- case PREPROC_EFFECT_STATE_ACTIVE:
- effect->ops->disable(effect);
- Session_SetProcEnabled(effect->session, effect->procId, false);
- break;
- case PREPROC_EFFECT_STATE_CREATED:
- case PREPROC_EFFECT_STATE_CONFIG:
- break;
- default:
- BAD_STATE_ABORT(effect->state, state);
- }
- break;
- case PREPROC_EFFECT_STATE_ACTIVE:
- switch(effect->state) {
- case PREPROC_EFFECT_STATE_INIT:
- case PREPROC_EFFECT_STATE_CREATED:
- ALOGE("Effect_SetState invalid transition");
- status = -ENOSYS;
- break;
- case PREPROC_EFFECT_STATE_ACTIVE:
- // enabling an already enabled effect is just ignored
- break;
- case PREPROC_EFFECT_STATE_CONFIG:
- effect->ops->enable(effect);
- Session_SetProcEnabled(effect->session, effect->procId, true);
- break;
- default:
- BAD_STATE_ABORT(effect->state, state);
- }
- break;
- default:
- BAD_STATE_ABORT(effect->state, state);
}
if (status == 0) {
effect->state = state;
@@ -766,8 +804,7 @@
return status;
}
-int Effect_Init(preproc_effect_t *effect, uint32_t procId)
-{
+int Effect_Init(preproc_effect_t* effect, uint32_t procId) {
if (HasReverseStream(procId)) {
effect->itfe = &sEffectInterfaceReverse;
} else {
@@ -779,21 +816,17 @@
return 0;
}
-int Effect_Create(preproc_effect_t *effect,
- preproc_session_t *session,
- effect_handle_t *interface)
-{
+int Effect_Create(preproc_effect_t* effect, preproc_session_t* session,
+ effect_handle_t* interface) {
effect->session = session;
*interface = (effect_handle_t)&effect->itfe;
return Effect_SetState(effect, PREPROC_EFFECT_STATE_CREATED);
}
-int Effect_Release(preproc_effect_t *effect)
-{
+int Effect_Release(preproc_effect_t* effect) {
return Effect_SetState(effect, PREPROC_EFFECT_STATE_INIT);
}
-
//------------------------------------------------------------------------------
// Session functions
//------------------------------------------------------------------------------
@@ -803,8 +836,7 @@
static const int kPreprocDefaultSr = 16000;
static const int kPreProcDefaultCnl = 1;
-int Session_Init(preproc_session_t *session)
-{
+int Session_Init(preproc_session_t* session) {
size_t i;
int status = 0;
@@ -812,66 +844,45 @@
session->id = 0;
session->io = 0;
session->createdMsk = 0;
- session->apm = NULL;
for (i = 0; i < PREPROC_NUM_EFFECTS && status == 0; i++) {
status = Effect_Init(&session->effects[i], i);
}
return status;
}
-
-extern "C" int Session_CreateEffect(preproc_session_t *session,
- int32_t procId,
- effect_handle_t *interface)
-{
+extern "C" int Session_CreateEffect(preproc_session_t* session, int32_t procId,
+ effect_handle_t* interface) {
int status = -ENOMEM;
ALOGV("Session_CreateEffect procId %d, createdMsk %08x", procId, session->createdMsk);
if (session->createdMsk == 0) {
- session->apm = webrtc::AudioProcessing::Create();
+ session->apm = session->ap_builder.Create();
if (session->apm == NULL) {
ALOGW("Session_CreateEffect could not get apm engine");
goto error;
}
- const webrtc::ProcessingConfig processing_config = {
- {{kPreprocDefaultSr, kPreProcDefaultCnl},
- {kPreprocDefaultSr, kPreProcDefaultCnl},
- {kPreprocDefaultSr, kPreProcDefaultCnl},
- {kPreprocDefaultSr, kPreProcDefaultCnl}}};
- session->apm->Initialize(processing_config);
- session->procFrame = new webrtc::AudioFrame();
- if (session->procFrame == NULL) {
- ALOGW("Session_CreateEffect could not allocate audio frame");
- goto error;
- }
- session->revFrame = new webrtc::AudioFrame();
- if (session->revFrame == NULL) {
- ALOGW("Session_CreateEffect could not allocate reverse audio frame");
- goto error;
- }
session->apmSamplingRate = kPreprocDefaultSr;
session->apmFrameCount = (kPreprocDefaultSr) / 100;
session->frameCount = session->apmFrameCount;
session->samplingRate = kPreprocDefaultSr;
session->inChannelCount = kPreProcDefaultCnl;
session->outChannelCount = kPreProcDefaultCnl;
- session->procFrame->sample_rate_hz_ = kPreprocDefaultSr;
- session->procFrame->num_channels_ = kPreProcDefaultCnl;
+ session->inputConfig.set_sample_rate_hz(kPreprocDefaultSr);
+ session->inputConfig.set_num_channels(kPreProcDefaultCnl);
+ session->outputConfig.set_sample_rate_hz(kPreprocDefaultSr);
+ session->outputConfig.set_num_channels(kPreProcDefaultCnl);
session->revChannelCount = kPreProcDefaultCnl;
- session->revFrame->sample_rate_hz_ = kPreprocDefaultSr;
- session->revFrame->num_channels_ = kPreProcDefaultCnl;
+ session->revConfig.set_sample_rate_hz(kPreprocDefaultSr);
+ session->revConfig.set_num_channels(kPreProcDefaultCnl);
session->enabledMsk = 0;
session->processedMsk = 0;
session->revEnabledMsk = 0;
session->revProcessedMsk = 0;
- session->inResampler = NULL;
session->inBuf = NULL;
session->inBufSize = 0;
- session->outResampler = NULL;
session->outBuf = NULL;
session->outBufSize = 0;
- session->revResampler = NULL;
session->revBuf = NULL;
session->revBufSize = 0;
}
@@ -880,45 +891,23 @@
goto error;
}
ALOGV("Session_CreateEffect OK");
- session->createdMsk |= (1<<procId);
+ session->createdMsk |= (1 << procId);
return status;
error:
if (session->createdMsk == 0) {
- delete session->revFrame;
- session->revFrame = NULL;
- delete session->procFrame;
- session->procFrame = NULL;
delete session->apm;
- session->apm = NULL; // NOLINT(clang-analyzer-cplusplus.NewDelete)
+ session->apm = NULL;
}
return status;
}
-int Session_ReleaseEffect(preproc_session_t *session,
- preproc_effect_t *fx)
-{
+int Session_ReleaseEffect(preproc_session_t* session, preproc_effect_t* fx) {
ALOGW_IF(Effect_Release(fx) != 0, " Effect_Release() failed for proc ID %d", fx->procId);
- session->createdMsk &= ~(1<<fx->procId);
+ session->createdMsk &= ~(1 << fx->procId);
if (session->createdMsk == 0) {
delete session->apm;
session->apm = NULL;
- delete session->procFrame;
- session->procFrame = NULL;
- delete session->revFrame;
- session->revFrame = NULL;
- if (session->inResampler != NULL) {
- speex_resampler_destroy(session->inResampler);
- session->inResampler = NULL;
- }
- if (session->outResampler != NULL) {
- speex_resampler_destroy(session->outResampler);
- session->outResampler = NULL;
- }
- if (session->revResampler != NULL) {
- speex_resampler_destroy(session->revResampler);
- session->revResampler = NULL;
- }
delete session->inBuf;
session->inBuf = NULL;
delete session->outBuf;
@@ -932,9 +921,7 @@
return 0;
}
-
-int Session_SetConfig(preproc_session_t *session, effect_config_t *config)
-{
+int Session_SetConfig(preproc_session_t* session, effect_config_t* config) {
uint32_t inCnl = audio_channel_count_from_in_mask(config->inputCfg.channels);
uint32_t outCnl = audio_channel_count_from_in_mask(config->outputCfg.channels);
@@ -944,46 +931,37 @@
return -EINVAL;
}
- ALOGV("Session_SetConfig sr %d cnl %08x",
- config->inputCfg.samplingRate, config->inputCfg.channels);
- int status;
+ ALOGV("Session_SetConfig sr %d cnl %08x", config->inputCfg.samplingRate,
+ config->inputCfg.channels);
// AEC implementation is limited to 16kHz
if (config->inputCfg.samplingRate >= 32000 && !(session->createdMsk & (1 << PREPROC_AEC))) {
session->apmSamplingRate = 32000;
- } else
- if (config->inputCfg.samplingRate >= 16000) {
+ } else if (config->inputCfg.samplingRate >= 16000) {
session->apmSamplingRate = 16000;
} else if (config->inputCfg.samplingRate >= 8000) {
session->apmSamplingRate = 8000;
}
- const webrtc::ProcessingConfig processing_config = {
- {{static_cast<int>(session->apmSamplingRate), inCnl},
- {static_cast<int>(session->apmSamplingRate), outCnl},
- {static_cast<int>(session->apmSamplingRate), inCnl},
- {static_cast<int>(session->apmSamplingRate), inCnl}}};
- status = session->apm->Initialize(processing_config);
- if (status < 0) {
- return -EINVAL;
- }
session->samplingRate = config->inputCfg.samplingRate;
session->apmFrameCount = session->apmSamplingRate / 100;
if (session->samplingRate == session->apmSamplingRate) {
session->frameCount = session->apmFrameCount;
} else {
- session->frameCount = (session->apmFrameCount * session->samplingRate) /
- session->apmSamplingRate + 1;
+ session->frameCount =
+ (session->apmFrameCount * session->samplingRate) / session->apmSamplingRate;
}
session->inChannelCount = inCnl;
session->outChannelCount = outCnl;
- session->procFrame->num_channels_ = inCnl;
- session->procFrame->sample_rate_hz_ = session->apmSamplingRate;
+ session->inputConfig.set_sample_rate_hz(session->samplingRate);
+ session->inputConfig.set_num_channels(inCnl);
+ session->outputConfig.set_sample_rate_hz(session->samplingRate);
+ session->outputConfig.set_num_channels(inCnl);
session->revChannelCount = inCnl;
- session->revFrame->num_channels_ = inCnl;
- session->revFrame->sample_rate_hz_ = session->apmSamplingRate;
+ session->revConfig.set_sample_rate_hz(session->samplingRate);
+ session->revConfig.set_num_channels(inCnl);
// force process buffer reallocation
session->inBufSize = 0;
@@ -992,64 +970,11 @@
session->framesOut = 0;
- if (session->inResampler != NULL) {
- speex_resampler_destroy(session->inResampler);
- session->inResampler = NULL;
- }
- if (session->outResampler != NULL) {
- speex_resampler_destroy(session->outResampler);
- session->outResampler = NULL;
- }
- if (session->revResampler != NULL) {
- speex_resampler_destroy(session->revResampler);
- session->revResampler = NULL;
- }
- if (session->samplingRate != session->apmSamplingRate) {
- int error;
- session->inResampler = speex_resampler_init(session->inChannelCount,
- session->samplingRate,
- session->apmSamplingRate,
- RESAMPLER_QUALITY,
- &error);
- if (session->inResampler == NULL) {
- ALOGW("Session_SetConfig Cannot create speex resampler: %s",
- speex_resampler_strerror(error));
- return -EINVAL;
- }
- session->outResampler = speex_resampler_init(session->outChannelCount,
- session->apmSamplingRate,
- session->samplingRate,
- RESAMPLER_QUALITY,
- &error);
- if (session->outResampler == NULL) {
- ALOGW("Session_SetConfig Cannot create speex resampler: %s",
- speex_resampler_strerror(error));
- speex_resampler_destroy(session->inResampler);
- session->inResampler = NULL;
- return -EINVAL;
- }
- session->revResampler = speex_resampler_init(session->inChannelCount,
- session->samplingRate,
- session->apmSamplingRate,
- RESAMPLER_QUALITY,
- &error);
- if (session->revResampler == NULL) {
- ALOGW("Session_SetConfig Cannot create speex resampler: %s",
- speex_resampler_strerror(error));
- speex_resampler_destroy(session->inResampler);
- session->inResampler = NULL;
- speex_resampler_destroy(session->outResampler);
- session->outResampler = NULL;
- return -EINVAL;
- }
- }
-
session->state = PREPROC_SESSION_STATE_CONFIG;
return 0;
}
-void Session_GetConfig(preproc_session_t *session, effect_config_t *config)
-{
+void Session_GetConfig(preproc_session_t* session, effect_config_t* config) {
memset(config, 0, sizeof(effect_config_t));
config->inputCfg.samplingRate = config->outputCfg.samplingRate = session->samplingRate;
config->inputCfg.format = config->outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
@@ -1060,37 +985,25 @@
(EFFECT_CONFIG_SMP_RATE | EFFECT_CONFIG_CHANNELS | EFFECT_CONFIG_FORMAT);
}
-int Session_SetReverseConfig(preproc_session_t *session, effect_config_t *config)
-{
+int Session_SetReverseConfig(preproc_session_t* session, effect_config_t* config) {
if (config->inputCfg.samplingRate != config->outputCfg.samplingRate ||
- config->inputCfg.format != config->outputCfg.format ||
- config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
+ config->inputCfg.format != config->outputCfg.format ||
+ config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
return -EINVAL;
}
- ALOGV("Session_SetReverseConfig sr %d cnl %08x",
- config->inputCfg.samplingRate, config->inputCfg.channels);
+ ALOGV("Session_SetReverseConfig sr %d cnl %08x", config->inputCfg.samplingRate,
+ config->inputCfg.channels);
if (session->state < PREPROC_SESSION_STATE_CONFIG) {
return -ENOSYS;
}
if (config->inputCfg.samplingRate != session->samplingRate ||
- config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
+ config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
return -EINVAL;
}
uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
- const webrtc::ProcessingConfig processing_config = {
- {{static_cast<int>(session->apmSamplingRate), session->inChannelCount},
- {static_cast<int>(session->apmSamplingRate), session->outChannelCount},
- {static_cast<int>(session->apmSamplingRate), inCnl},
- {static_cast<int>(session->apmSamplingRate), inCnl}}};
- int status = session->apm->Initialize(processing_config);
- if (status < 0) {
- return -EINVAL;
- }
session->revChannelCount = inCnl;
- session->revFrame->num_channels_ = inCnl;
- session->revFrame->sample_rate_hz_ = session->apmSamplingRate;
// force process buffer reallocation
session->revBufSize = 0;
session->framesRev = 0;
@@ -1098,8 +1011,7 @@
return 0;
}
-void Session_GetReverseConfig(preproc_session_t *session, effect_config_t *config)
-{
+void Session_GetReverseConfig(preproc_session_t* session, effect_config_t* config) {
memset(config, 0, sizeof(effect_config_t));
config->inputCfg.samplingRate = config->outputCfg.samplingRate = session->samplingRate;
config->inputCfg.format = config->outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
@@ -1109,25 +1021,14 @@
(EFFECT_CONFIG_SMP_RATE | EFFECT_CONFIG_CHANNELS | EFFECT_CONFIG_FORMAT);
}
-void Session_SetProcEnabled(preproc_session_t *session, uint32_t procId, bool enabled)
-{
+void Session_SetProcEnabled(preproc_session_t* session, uint32_t procId, bool enabled) {
if (enabled) {
- if(session->enabledMsk == 0) {
+ if (session->enabledMsk == 0) {
session->framesIn = 0;
- if (session->inResampler != NULL) {
- speex_resampler_reset_mem(session->inResampler);
- }
- session->framesOut = 0;
- if (session->outResampler != NULL) {
- speex_resampler_reset_mem(session->outResampler);
- }
}
session->enabledMsk |= (1 << procId);
if (HasReverseStream(procId)) {
session->framesRev = 0;
- if (session->revResampler != NULL) {
- speex_resampler_reset_mem(session->revResampler);
- }
session->revEnabledMsk |= (1 << procId);
}
} else {
@@ -1136,8 +1037,8 @@
session->revEnabledMsk &= ~(1 << procId);
}
}
- ALOGV("Session_SetProcEnabled proc %d, enabled %d enabledMsk %08x revEnabledMsk %08x",
- procId, enabled, session->enabledMsk, session->revEnabledMsk);
+ ALOGV("Session_SetProcEnabled proc %d, enabled %d enabledMsk %08x revEnabledMsk %08x", procId,
+ enabled, session->enabledMsk, session->revEnabledMsk);
session->processedMsk = 0;
if (HasReverseStream(procId)) {
session->revProcessedMsk = 0;
@@ -1151,8 +1052,7 @@
static int sInitStatus = 1;
static preproc_session_t sSessions[PREPROC_NUM_SESSIONS];
-preproc_session_t *PreProc_GetSession(int32_t procId, int32_t sessionId, int32_t ioId)
-{
+preproc_session_t* PreProc_GetSession(int32_t procId, int32_t sessionId, int32_t ioId) {
size_t i;
for (i = 0; i < PREPROC_NUM_SESSIONS; i++) {
if (sSessions[i].id == sessionId) {
@@ -1172,7 +1072,6 @@
return NULL;
}
-
int PreProc_Init() {
size_t i;
int status = 0;
@@ -1187,8 +1086,7 @@
return sInitStatus;
}
-const effect_descriptor_t *PreProc_GetDescriptor(const effect_uuid_t *uuid)
-{
+const effect_descriptor_t* PreProc_GetDescriptor(const effect_uuid_t* uuid) {
size_t i;
for (i = 0; i < PREPROC_NUM_EFFECTS; i++) {
if (memcmp(&sDescriptors[i]->uuid, uuid, sizeof(effect_uuid_t)) == 0) {
@@ -1198,35 +1096,31 @@
return NULL;
}
-
extern "C" {
//------------------------------------------------------------------------------
// Effect Control Interface Implementation
//------------------------------------------------------------------------------
-int PreProcessingFx_Process(effect_handle_t self,
- audio_buffer_t *inBuffer,
- audio_buffer_t *outBuffer)
-{
- preproc_effect_t * effect = (preproc_effect_t *)self;
+int PreProcessingFx_Process(effect_handle_t self, audio_buffer_t* inBuffer,
+ audio_buffer_t* outBuffer) {
+ preproc_effect_t* effect = (preproc_effect_t*)self;
- if (effect == NULL){
+ if (effect == NULL) {
ALOGV("PreProcessingFx_Process() ERROR effect == NULL");
return -EINVAL;
}
- preproc_session_t * session = (preproc_session_t *)effect->session;
+ preproc_session_t* session = (preproc_session_t*)effect->session;
- if (inBuffer == NULL || inBuffer->raw == NULL ||
- outBuffer == NULL || outBuffer->raw == NULL){
+ if (inBuffer == NULL || inBuffer->raw == NULL || outBuffer == NULL || outBuffer->raw == NULL) {
ALOGW("PreProcessingFx_Process() ERROR bad pointer");
return -EINVAL;
}
- session->processedMsk |= (1<<effect->procId);
+ session->processedMsk |= (1 << effect->procId);
-// ALOGV("PreProcessingFx_Process In %d frames enabledMsk %08x processedMsk %08x",
-// inBuffer->frameCount, session->enabledMsk, session->processedMsk);
+ // ALOGV("PreProcessingFx_Process In %d frames enabledMsk %08x processedMsk %08x",
+ // inBuffer->frameCount, session->enabledMsk, session->processedMsk);
if ((session->processedMsk & session->enabledMsk) == session->enabledMsk) {
effect->session->processedMsk = 0;
@@ -1237,12 +1131,10 @@
if (outBuffer->frameCount < fr) {
fr = outBuffer->frameCount;
}
- memcpy(outBuffer->s16,
- session->outBuf,
- fr * session->outChannelCount * sizeof(int16_t));
- memcpy(session->outBuf,
- session->outBuf + fr * session->outChannelCount,
- (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
+ memcpy(outBuffer->s16, session->outBuf,
+ fr * session->outChannelCount * sizeof(int16_t));
+ memmove(session->outBuf, session->outBuf + fr * session->outChannelCount,
+ (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
session->framesOut -= fr;
framesWr += fr;
}
@@ -1252,95 +1144,33 @@
return 0;
}
- if (session->inResampler != NULL) {
- size_t fr = session->frameCount - session->framesIn;
- if (inBuffer->frameCount < fr) {
- fr = inBuffer->frameCount;
- }
- if (session->inBufSize < session->framesIn + fr) {
- int16_t *buf;
- session->inBufSize = session->framesIn + fr;
- buf = (int16_t *)realloc(session->inBuf,
- session->inBufSize * session->inChannelCount * sizeof(int16_t));
- if (buf == NULL) {
- session->framesIn = 0;
- free(session->inBuf);
- session->inBuf = NULL;
- return -ENOMEM;
- }
- session->inBuf = buf;
- }
- memcpy(session->inBuf + session->framesIn * session->inChannelCount,
- inBuffer->s16,
- fr * session->inChannelCount * sizeof(int16_t));
-#ifdef DUAL_MIC_TEST
- pthread_mutex_lock(&gPcmDumpLock);
- if (gPcmDumpFh != NULL) {
- fwrite(inBuffer->raw,
- fr * session->inChannelCount * sizeof(int16_t), 1, gPcmDumpFh);
- }
- pthread_mutex_unlock(&gPcmDumpLock);
-#endif
-
- session->framesIn += fr;
- inBuffer->frameCount = fr;
- if (session->framesIn < session->frameCount) {
- return 0;
- }
- spx_uint32_t frIn = session->framesIn;
- spx_uint32_t frOut = session->apmFrameCount;
- if (session->inChannelCount == 1) {
- speex_resampler_process_int(session->inResampler,
- 0,
- session->inBuf,
- &frIn,
- session->procFrame->data_,
- &frOut);
- } else {
- speex_resampler_process_interleaved_int(session->inResampler,
- session->inBuf,
- &frIn,
- session->procFrame->data_,
- &frOut);
- }
- memcpy(session->inBuf,
- session->inBuf + frIn * session->inChannelCount,
- (session->framesIn - frIn) * session->inChannelCount * sizeof(int16_t));
- session->framesIn -= frIn;
- } else {
- size_t fr = session->frameCount - session->framesIn;
- if (inBuffer->frameCount < fr) {
- fr = inBuffer->frameCount;
- }
- memcpy(session->procFrame->data_ + session->framesIn * session->inChannelCount,
- inBuffer->s16,
- fr * session->inChannelCount * sizeof(int16_t));
-
-#ifdef DUAL_MIC_TEST
- pthread_mutex_lock(&gPcmDumpLock);
- if (gPcmDumpFh != NULL) {
- fwrite(inBuffer->raw,
- fr * session->inChannelCount * sizeof(int16_t), 1, gPcmDumpFh);
- }
- pthread_mutex_unlock(&gPcmDumpLock);
-#endif
-
- session->framesIn += fr;
- inBuffer->frameCount = fr;
- if (session->framesIn < session->frameCount) {
- return 0;
- }
- session->framesIn = 0;
+ size_t fr = session->frameCount - session->framesIn;
+ if (inBuffer->frameCount < fr) {
+ fr = inBuffer->frameCount;
}
- session->procFrame->samples_per_channel_ = session->apmFrameCount;
-
- effect->session->apm->ProcessStream(session->procFrame);
+ session->framesIn += fr;
+ inBuffer->frameCount = fr;
+ if (session->framesIn < session->frameCount) {
+ return 0;
+ }
+ session->framesIn = 0;
+ if (int status = effect->session->apm->ProcessStream(
+ (const int16_t* const)inBuffer->s16,
+ (const webrtc::StreamConfig)effect->session->inputConfig,
+ (const webrtc::StreamConfig)effect->session->outputConfig,
+ (int16_t* const)outBuffer->s16);
+ status != 0) {
+ ALOGE("Process Stream failed with error %d\n", status);
+ return status;
+ }
+ outBuffer->frameCount = inBuffer->frameCount;
if (session->outBufSize < session->framesOut + session->frameCount) {
- int16_t *buf;
+ int16_t* buf;
session->outBufSize = session->framesOut + session->frameCount;
- buf = (int16_t *)realloc(session->outBuf,
- session->outBufSize * session->outChannelCount * sizeof(int16_t));
+ buf = (int16_t*)realloc(
+ session->outBuf,
+ session->outBufSize * session->outChannelCount * sizeof(int16_t));
if (buf == NULL) {
session->framesOut = 0;
free(session->outBuf);
@@ -1350,40 +1180,14 @@
session->outBuf = buf;
}
- if (session->outResampler != NULL) {
- spx_uint32_t frIn = session->apmFrameCount;
- spx_uint32_t frOut = session->frameCount;
- if (session->inChannelCount == 1) {
- speex_resampler_process_int(session->outResampler,
- 0,
- session->procFrame->data_,
- &frIn,
- session->outBuf + session->framesOut * session->outChannelCount,
- &frOut);
- } else {
- speex_resampler_process_interleaved_int(session->outResampler,
- session->procFrame->data_,
- &frIn,
- session->outBuf + session->framesOut * session->outChannelCount,
- &frOut);
- }
- session->framesOut += frOut;
- } else {
- memcpy(session->outBuf + session->framesOut * session->outChannelCount,
- session->procFrame->data_,
- session->frameCount * session->outChannelCount * sizeof(int16_t));
- session->framesOut += session->frameCount;
- }
- size_t fr = session->framesOut;
+ fr = session->framesOut;
if (framesRq - framesWr < fr) {
fr = framesRq - framesWr;
}
- memcpy(outBuffer->s16 + framesWr * session->outChannelCount,
- session->outBuf,
- fr * session->outChannelCount * sizeof(int16_t));
- memcpy(session->outBuf,
- session->outBuf + fr * session->outChannelCount,
- (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
+ memcpy(outBuffer->s16 + framesWr * session->outChannelCount, session->outBuf,
+ fr * session->outChannelCount * sizeof(int16_t));
+ memmove(session->outBuf, session->outBuf + fr * session->outChannelCount,
+ (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
session->framesOut -= fr;
outBuffer->frameCount += fr;
@@ -1393,39 +1197,32 @@
}
}
-int PreProcessingFx_Command(effect_handle_t self,
- uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t *replySize,
- void *pReplyData)
-{
- preproc_effect_t * effect = (preproc_effect_t *) self;
+int PreProcessingFx_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+ void* pCmdData, uint32_t* replySize, void* pReplyData) {
+ preproc_effect_t* effect = (preproc_effect_t*)self;
- if (effect == NULL){
+ if (effect == NULL) {
return -EINVAL;
}
- //ALOGV("PreProcessingFx_Command: command %d cmdSize %d",cmdCode, cmdSize);
+ // ALOGV("PreProcessingFx_Command: command %d cmdSize %d",cmdCode, cmdSize);
- switch (cmdCode){
+ switch (cmdCode) {
case EFFECT_CMD_INIT:
- if (pReplyData == NULL || *replySize != sizeof(int)){
+ if (pReplyData == NULL || *replySize != sizeof(int)) {
return -EINVAL;
}
if (effect->ops->init) {
effect->ops->init(effect);
}
- *(int *)pReplyData = 0;
+ *(int*)pReplyData = 0;
break;
case EFFECT_CMD_SET_CONFIG: {
- if (pCmdData == NULL||
- cmdSize != sizeof(effect_config_t)||
- pReplyData == NULL||
- *replySize != sizeof(int)){
+ if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || pReplyData == NULL ||
+ *replySize != sizeof(int)) {
ALOGV("PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_SET_CONFIG: ERROR");
+ "EFFECT_CMD_SET_CONFIG: ERROR");
return -EINVAL;
}
#ifdef DUAL_MIC_TEST
@@ -1436,55 +1233,51 @@
effect->session->enabledMsk = 0;
}
#endif
- *(int *)pReplyData = Session_SetConfig(effect->session, (effect_config_t *)pCmdData);
+ *(int*)pReplyData = Session_SetConfig(effect->session, (effect_config_t*)pCmdData);
#ifdef DUAL_MIC_TEST
if (gDualMicEnabled) {
effect->session->enabledMsk = enabledMsk;
}
#endif
- if (*(int *)pReplyData != 0) {
+ if (*(int*)pReplyData != 0) {
break;
}
if (effect->state != PREPROC_EFFECT_STATE_ACTIVE) {
- *(int *)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_CONFIG);
+ *(int*)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_CONFIG);
}
- } break;
+ } break;
case EFFECT_CMD_GET_CONFIG:
- if (pReplyData == NULL ||
- *replySize != sizeof(effect_config_t)) {
+ if (pReplyData == NULL || *replySize != sizeof(effect_config_t)) {
ALOGV("\tLVM_ERROR : PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_GET_CONFIG: ERROR");
+ "EFFECT_CMD_GET_CONFIG: ERROR");
return -EINVAL;
}
- Session_GetConfig(effect->session, (effect_config_t *)pReplyData);
+ Session_GetConfig(effect->session, (effect_config_t*)pReplyData);
break;
case EFFECT_CMD_SET_CONFIG_REVERSE:
- if (pCmdData == NULL ||
- cmdSize != sizeof(effect_config_t) ||
- pReplyData == NULL ||
+ if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || pReplyData == NULL ||
*replySize != sizeof(int)) {
ALOGV("PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_SET_CONFIG_REVERSE: ERROR");
+ "EFFECT_CMD_SET_CONFIG_REVERSE: ERROR");
return -EINVAL;
}
- *(int *)pReplyData = Session_SetReverseConfig(effect->session,
- (effect_config_t *)pCmdData);
- if (*(int *)pReplyData != 0) {
+ *(int*)pReplyData =
+ Session_SetReverseConfig(effect->session, (effect_config_t*)pCmdData);
+ if (*(int*)pReplyData != 0) {
break;
}
break;
case EFFECT_CMD_GET_CONFIG_REVERSE:
- if (pReplyData == NULL ||
- *replySize != sizeof(effect_config_t)){
+ if (pReplyData == NULL || *replySize != sizeof(effect_config_t)) {
ALOGV("PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_GET_CONFIG_REVERSE: ERROR");
+ "EFFECT_CMD_GET_CONFIG_REVERSE: ERROR");
return -EINVAL;
}
- Session_GetReverseConfig(effect->session, (effect_config_t *)pCmdData);
+ Session_GetReverseConfig(effect->session, (effect_config_t*)pCmdData);
break;
case EFFECT_CMD_RESET:
@@ -1494,80 +1287,74 @@
break;
case EFFECT_CMD_GET_PARAM: {
- effect_param_t *p = (effect_param_t *)pCmdData;
+ effect_param_t* p = (effect_param_t*)pCmdData;
if (pCmdData == NULL || cmdSize < sizeof(effect_param_t) ||
- cmdSize < (sizeof(effect_param_t) + p->psize) ||
- pReplyData == NULL || replySize == NULL ||
- *replySize < (sizeof(effect_param_t) + p->psize)){
+ cmdSize < (sizeof(effect_param_t) + p->psize) || pReplyData == NULL ||
+ replySize == NULL || *replySize < (sizeof(effect_param_t) + p->psize)) {
ALOGV("PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_GET_PARAM: ERROR");
+ "EFFECT_CMD_GET_PARAM: ERROR");
return -EINVAL;
}
memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + p->psize);
- p = (effect_param_t *)pReplyData;
+ p = (effect_param_t*)pReplyData;
int voffset = ((p->psize - 1) / sizeof(int32_t) + 1) * sizeof(int32_t);
if (effect->ops->get_parameter) {
- p->status = effect->ops->get_parameter(effect, p->data,
- &p->vsize,
- p->data + voffset);
+ p->status =
+ effect->ops->get_parameter(effect, p->data, &p->vsize, p->data + voffset);
*replySize = sizeof(effect_param_t) + voffset + p->vsize;
}
} break;
- case EFFECT_CMD_SET_PARAM:{
- if (pCmdData == NULL||
- cmdSize < sizeof(effect_param_t) ||
- pReplyData == NULL || replySize == NULL ||
- *replySize != sizeof(int32_t)){
+ case EFFECT_CMD_SET_PARAM: {
+ if (pCmdData == NULL || cmdSize < sizeof(effect_param_t) || pReplyData == NULL ||
+ replySize == NULL || *replySize != sizeof(int32_t)) {
ALOGV("PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_SET_PARAM: ERROR");
+ "EFFECT_CMD_SET_PARAM: ERROR");
return -EINVAL;
}
- effect_param_t *p = (effect_param_t *) pCmdData;
+ effect_param_t* p = (effect_param_t*)pCmdData;
- if (p->psize != sizeof(int32_t)){
+ if (p->psize != sizeof(int32_t)) {
ALOGV("PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)");
+ "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)");
return -EINVAL;
}
if (effect->ops->set_parameter) {
- *(int *)pReplyData = effect->ops->set_parameter(effect,
- (void *)p->data,
- p->data + p->psize);
+ *(int*)pReplyData =
+ effect->ops->set_parameter(effect, (void*)p->data, p->data + p->psize);
}
} break;
case EFFECT_CMD_ENABLE:
- if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){
+ if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
ALOGV("PreProcessingFx_Command cmdCode Case: EFFECT_CMD_ENABLE: ERROR");
return -EINVAL;
}
- *(int *)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_ACTIVE);
+ *(int*)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_ACTIVE);
break;
case EFFECT_CMD_DISABLE:
- if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){
+ if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
ALOGV("PreProcessingFx_Command cmdCode Case: EFFECT_CMD_DISABLE: ERROR");
return -EINVAL;
}
- *(int *)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_CONFIG);
+ *(int*)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_CONFIG);
break;
case EFFECT_CMD_SET_DEVICE:
case EFFECT_CMD_SET_INPUT_DEVICE:
- if (pCmdData == NULL ||
- cmdSize != sizeof(uint32_t)) {
+ if (pCmdData == NULL || cmdSize != sizeof(uint32_t)) {
ALOGV("PreProcessingFx_Command cmdCode Case: EFFECT_CMD_SET_DEVICE: ERROR");
return -EINVAL;
}
if (effect->ops->set_device) {
- effect->ops->set_device(effect, *(uint32_t *)pCmdData);
+ effect->ops->set_device(effect, *(uint32_t*)pCmdData);
}
break;
@@ -1578,30 +1365,30 @@
#ifdef DUAL_MIC_TEST
///// test commands start
case PREPROC_CMD_DUAL_MIC_ENABLE: {
- if (pCmdData == NULL|| cmdSize != sizeof(uint32_t) ||
- pReplyData == NULL || replySize == NULL) {
+ if (pCmdData == NULL || cmdSize != sizeof(uint32_t) || pReplyData == NULL ||
+ replySize == NULL) {
ALOGE("PreProcessingFx_Command cmdCode Case: "
- "PREPROC_CMD_DUAL_MIC_ENABLE: ERROR");
+ "PREPROC_CMD_DUAL_MIC_ENABLE: ERROR");
*replySize = 0;
return -EINVAL;
}
- gDualMicEnabled = *(bool *)pCmdData;
+ gDualMicEnabled = *(bool*)pCmdData;
if (gDualMicEnabled) {
effect->aux_channels_on = sHasAuxChannels[effect->procId];
} else {
effect->aux_channels_on = false;
}
- effect->cur_channel_config = (effect->session->inChannelCount == 1) ?
- CHANNEL_CFG_MONO : CHANNEL_CFG_STEREO;
+ effect->cur_channel_config =
+ (effect->session->inChannelCount == 1) ? CHANNEL_CFG_MONO : CHANNEL_CFG_STEREO;
ALOGV("PREPROC_CMD_DUAL_MIC_ENABLE: %s", gDualMicEnabled ? "enabled" : "disabled");
*replySize = sizeof(int);
- *(int *)pReplyData = 0;
- } break;
+ *(int*)pReplyData = 0;
+ } break;
case PREPROC_CMD_DUAL_MIC_PCM_DUMP_START: {
- if (pCmdData == NULL|| pReplyData == NULL || replySize == NULL) {
+ if (pCmdData == NULL || pReplyData == NULL || replySize == NULL) {
ALOGE("PreProcessingFx_Command cmdCode Case: "
- "PREPROC_CMD_DUAL_MIC_PCM_DUMP_START: ERROR");
+ "PREPROC_CMD_DUAL_MIC_PCM_DUMP_START: ERROR");
*replySize = 0;
return -EINVAL;
}
@@ -1610,20 +1397,19 @@
fclose(gPcmDumpFh);
gPcmDumpFh = NULL;
}
- char *path = strndup((char *)pCmdData, cmdSize);
- gPcmDumpFh = fopen((char *)path, "wb");
+ char* path = strndup((char*)pCmdData, cmdSize);
+ gPcmDumpFh = fopen((char*)path, "wb");
pthread_mutex_unlock(&gPcmDumpLock);
- ALOGV("PREPROC_CMD_DUAL_MIC_PCM_DUMP_START: path %s gPcmDumpFh %p",
- path, gPcmDumpFh);
+ ALOGV("PREPROC_CMD_DUAL_MIC_PCM_DUMP_START: path %s gPcmDumpFh %p", path, gPcmDumpFh);
ALOGE_IF(gPcmDumpFh <= 0, "gPcmDumpFh open error %d %s", errno, strerror(errno));
free(path);
*replySize = sizeof(int);
- *(int *)pReplyData = 0;
- } break;
+ *(int*)pReplyData = 0;
+ } break;
case PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP: {
if (pReplyData == NULL || replySize == NULL) {
ALOGE("PreProcessingFx_Command cmdCode Case: "
- "PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP: ERROR");
+ "PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP: ERROR");
*replySize = 0;
return -EINVAL;
}
@@ -1635,118 +1421,116 @@
pthread_mutex_unlock(&gPcmDumpLock);
ALOGV("PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP");
*replySize = sizeof(int);
- *(int *)pReplyData = 0;
- } break;
- ///// test commands end
+ *(int*)pReplyData = 0;
+ } break;
+ ///// test commands end
case EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: {
- if(!gDualMicEnabled) {
+ if (!gDualMicEnabled) {
return -EINVAL;
}
- if (pCmdData == NULL|| cmdSize != 2 * sizeof(uint32_t) ||
- pReplyData == NULL || replySize == NULL) {
+ if (pCmdData == NULL || cmdSize != 2 * sizeof(uint32_t) || pReplyData == NULL ||
+ replySize == NULL) {
ALOGE("PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: ERROR");
+ "EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: ERROR");
*replySize = 0;
return -EINVAL;
}
- if (*(uint32_t *)pCmdData != EFFECT_FEATURE_AUX_CHANNELS ||
- !effect->aux_channels_on) {
+ if (*(uint32_t*)pCmdData != EFFECT_FEATURE_AUX_CHANNELS || !effect->aux_channels_on) {
ALOGV("PreProcessingFx_Command feature EFFECT_FEATURE_AUX_CHANNELS not supported by"
- " fx %d", effect->procId);
- *(uint32_t *)pReplyData = -ENOSYS;
+ " fx %d",
+ effect->procId);
+ *(uint32_t*)pReplyData = -ENOSYS;
*replySize = sizeof(uint32_t);
break;
}
- size_t num_configs = *((uint32_t *)pCmdData + 1);
- if (*replySize < (2 * sizeof(uint32_t) +
- num_configs * sizeof(channel_config_t))) {
+ size_t num_configs = *((uint32_t*)pCmdData + 1);
+ if (*replySize < (2 * sizeof(uint32_t) + num_configs * sizeof(channel_config_t))) {
*replySize = 0;
return -EINVAL;
}
- *((uint32_t *)pReplyData + 1) = CHANNEL_CFG_CNT;
+ *((uint32_t*)pReplyData + 1) = CHANNEL_CFG_CNT;
if (num_configs < CHANNEL_CFG_CNT ||
- *replySize < (2 * sizeof(uint32_t) +
- CHANNEL_CFG_CNT * sizeof(channel_config_t))) {
- *(uint32_t *)pReplyData = -ENOMEM;
+ *replySize < (2 * sizeof(uint32_t) + CHANNEL_CFG_CNT * sizeof(channel_config_t))) {
+ *(uint32_t*)pReplyData = -ENOMEM;
} else {
num_configs = CHANNEL_CFG_CNT;
- *(uint32_t *)pReplyData = 0;
+ *(uint32_t*)pReplyData = 0;
}
ALOGV("PreProcessingFx_Command EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS num config %d",
num_configs);
*replySize = 2 * sizeof(uint32_t) + num_configs * sizeof(channel_config_t);
- *((uint32_t *)pReplyData + 1) = num_configs;
- memcpy((uint32_t *)pReplyData + 2, &sDualMicConfigs, num_configs * sizeof(channel_config_t));
- } break;
+ *((uint32_t*)pReplyData + 1) = num_configs;
+ memcpy((uint32_t*)pReplyData + 2, &sDualMicConfigs,
+ num_configs * sizeof(channel_config_t));
+ } break;
case EFFECT_CMD_GET_FEATURE_CONFIG:
- if(!gDualMicEnabled) {
+ if (!gDualMicEnabled) {
return -EINVAL;
}
- if (pCmdData == NULL|| cmdSize != sizeof(uint32_t) ||
- pReplyData == NULL || replySize == NULL ||
- *replySize < sizeof(uint32_t) + sizeof(channel_config_t)) {
+ if (pCmdData == NULL || cmdSize != sizeof(uint32_t) || pReplyData == NULL ||
+ replySize == NULL || *replySize < sizeof(uint32_t) + sizeof(channel_config_t)) {
ALOGE("PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_GET_FEATURE_CONFIG: ERROR");
+ "EFFECT_CMD_GET_FEATURE_CONFIG: ERROR");
return -EINVAL;
}
- if (*(uint32_t *)pCmdData != EFFECT_FEATURE_AUX_CHANNELS || !effect->aux_channels_on) {
- *(uint32_t *)pReplyData = -ENOSYS;
+ if (*(uint32_t*)pCmdData != EFFECT_FEATURE_AUX_CHANNELS || !effect->aux_channels_on) {
+ *(uint32_t*)pReplyData = -ENOSYS;
*replySize = sizeof(uint32_t);
break;
}
ALOGV("PreProcessingFx_Command EFFECT_CMD_GET_FEATURE_CONFIG");
- *(uint32_t *)pReplyData = 0;
+ *(uint32_t*)pReplyData = 0;
*replySize = sizeof(uint32_t) + sizeof(channel_config_t);
- memcpy((uint32_t *)pReplyData + 1,
- &sDualMicConfigs[effect->cur_channel_config],
+ memcpy((uint32_t*)pReplyData + 1, &sDualMicConfigs[effect->cur_channel_config],
sizeof(channel_config_t));
break;
case EFFECT_CMD_SET_FEATURE_CONFIG: {
ALOGV("PreProcessingFx_Command EFFECT_CMD_SET_FEATURE_CONFIG: "
- "gDualMicEnabled %d effect->aux_channels_on %d",
+ "gDualMicEnabled %d effect->aux_channels_on %d",
gDualMicEnabled, effect->aux_channels_on);
- if(!gDualMicEnabled) {
+ if (!gDualMicEnabled) {
return -EINVAL;
}
- if (pCmdData == NULL|| cmdSize != (sizeof(uint32_t) + sizeof(channel_config_t)) ||
- pReplyData == NULL || replySize == NULL ||
- *replySize < sizeof(uint32_t)) {
+ if (pCmdData == NULL || cmdSize != (sizeof(uint32_t) + sizeof(channel_config_t)) ||
+ pReplyData == NULL || replySize == NULL || *replySize < sizeof(uint32_t)) {
ALOGE("PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_SET_FEATURE_CONFIG: ERROR\n"
- "pCmdData %p cmdSize %d pReplyData %p replySize %p *replySize %d",
- pCmdData, cmdSize, pReplyData, replySize, replySize ? *replySize : -1);
+ "EFFECT_CMD_SET_FEATURE_CONFIG: ERROR\n"
+ "pCmdData %p cmdSize %d pReplyData %p replySize %p *replySize %d",
+ pCmdData, cmdSize, pReplyData, replySize, replySize ? *replySize : -1);
return -EINVAL;
}
*replySize = sizeof(uint32_t);
- if (*(uint32_t *)pCmdData != EFFECT_FEATURE_AUX_CHANNELS || !effect->aux_channels_on) {
- *(uint32_t *)pReplyData = -ENOSYS;
+ if (*(uint32_t*)pCmdData != EFFECT_FEATURE_AUX_CHANNELS || !effect->aux_channels_on) {
+ *(uint32_t*)pReplyData = -ENOSYS;
ALOGV("PreProcessingFx_Command cmdCode Case: "
- "EFFECT_CMD_SET_FEATURE_CONFIG: ERROR\n"
- "CmdData %d effect->aux_channels_on %d",
- *(uint32_t *)pCmdData, effect->aux_channels_on);
+ "EFFECT_CMD_SET_FEATURE_CONFIG: ERROR\n"
+ "CmdData %d effect->aux_channels_on %d",
+ *(uint32_t*)pCmdData, effect->aux_channels_on);
break;
}
size_t i;
- for (i = 0; i < CHANNEL_CFG_CNT;i++) {
- if (memcmp((uint32_t *)pCmdData + 1,
- &sDualMicConfigs[i], sizeof(channel_config_t)) == 0) {
+ for (i = 0; i < CHANNEL_CFG_CNT; i++) {
+ if (memcmp((uint32_t*)pCmdData + 1, &sDualMicConfigs[i],
+ sizeof(channel_config_t)) == 0) {
break;
}
}
if (i == CHANNEL_CFG_CNT) {
- *(uint32_t *)pReplyData = -EINVAL;
+ *(uint32_t*)pReplyData = -EINVAL;
ALOGW("PreProcessingFx_Command EFFECT_CMD_SET_FEATURE_CONFIG invalid config"
- "[%08x].[%08x]", *((uint32_t *)pCmdData + 1), *((uint32_t *)pCmdData + 2));
+ "[%08x].[%08x]",
+ *((uint32_t*)pCmdData + 1), *((uint32_t*)pCmdData + 2));
} else {
effect->cur_channel_config = i;
- *(uint32_t *)pReplyData = 0;
+ *(uint32_t*)pReplyData = 0;
ALOGV("PreProcessingFx_Command EFFECT_CMD_SET_FEATURE_CONFIG New config"
- "[%08x].[%08x]", sDualMicConfigs[i].main_channels, sDualMicConfigs[i].aux_channels);
+ "[%08x].[%08x]",
+ sDualMicConfigs[i].main_channels, sDualMicConfigs[i].aux_channels);
}
- } break;
+ } break;
#endif
default:
return -EINVAL;
@@ -1754,11 +1538,8 @@
return 0;
}
-
-int PreProcessingFx_GetDescriptor(effect_handle_t self,
- effect_descriptor_t *pDescriptor)
-{
- preproc_effect_t * effect = (preproc_effect_t *) self;
+int PreProcessingFx_GetDescriptor(effect_handle_t self, effect_descriptor_t* pDescriptor) {
+ preproc_effect_t* effect = (preproc_effect_t*)self;
if (effect == NULL || pDescriptor == NULL) {
return -EINVAL;
@@ -1769,137 +1550,79 @@
return 0;
}
-int PreProcessingFx_ProcessReverse(effect_handle_t self,
- audio_buffer_t *inBuffer,
- audio_buffer_t *outBuffer __unused)
-{
- preproc_effect_t * effect = (preproc_effect_t *)self;
+int PreProcessingFx_ProcessReverse(effect_handle_t self, audio_buffer_t* inBuffer,
+ audio_buffer_t* outBuffer __unused) {
+ preproc_effect_t* effect = (preproc_effect_t*)self;
- if (effect == NULL){
+ if (effect == NULL) {
ALOGW("PreProcessingFx_ProcessReverse() ERROR effect == NULL");
return -EINVAL;
}
- preproc_session_t * session = (preproc_session_t *)effect->session;
+ preproc_session_t* session = (preproc_session_t*)effect->session;
- if (inBuffer == NULL || inBuffer->raw == NULL){
+ if (inBuffer == NULL || inBuffer->raw == NULL) {
ALOGW("PreProcessingFx_ProcessReverse() ERROR bad pointer");
return -EINVAL;
}
- session->revProcessedMsk |= (1<<effect->procId);
+ session->revProcessedMsk |= (1 << effect->procId);
-// ALOGV("PreProcessingFx_ProcessReverse In %d frames revEnabledMsk %08x revProcessedMsk %08x",
-// inBuffer->frameCount, session->revEnabledMsk, session->revProcessedMsk);
-
+ // ALOGV("PreProcessingFx_ProcessReverse In %d frames revEnabledMsk %08x revProcessedMsk
+ // %08x",
+ // inBuffer->frameCount, session->revEnabledMsk, session->revProcessedMsk);
if ((session->revProcessedMsk & session->revEnabledMsk) == session->revEnabledMsk) {
effect->session->revProcessedMsk = 0;
- if (session->revResampler != NULL) {
- size_t fr = session->frameCount - session->framesRev;
- if (inBuffer->frameCount < fr) {
- fr = inBuffer->frameCount;
- }
- if (session->revBufSize < session->framesRev + fr) {
- int16_t *buf;
- session->revBufSize = session->framesRev + fr;
- buf = (int16_t *)realloc(session->revBuf,
- session->revBufSize * session->inChannelCount * sizeof(int16_t));
- if (buf == NULL) {
- session->framesRev = 0;
- free(session->revBuf);
- session->revBuf = NULL;
- return -ENOMEM;
- }
- session->revBuf = buf;
- }
- memcpy(session->revBuf + session->framesRev * session->inChannelCount,
- inBuffer->s16,
- fr * session->inChannelCount * sizeof(int16_t));
-
- session->framesRev += fr;
- inBuffer->frameCount = fr;
- if (session->framesRev < session->frameCount) {
- return 0;
- }
- spx_uint32_t frIn = session->framesRev;
- spx_uint32_t frOut = session->apmFrameCount;
- if (session->inChannelCount == 1) {
- speex_resampler_process_int(session->revResampler,
- 0,
- session->revBuf,
- &frIn,
- session->revFrame->data_,
- &frOut);
- } else {
- speex_resampler_process_interleaved_int(session->revResampler,
- session->revBuf,
- &frIn,
- session->revFrame->data_,
- &frOut);
- }
- memcpy(session->revBuf,
- session->revBuf + frIn * session->inChannelCount,
- (session->framesRev - frIn) * session->inChannelCount * sizeof(int16_t));
- session->framesRev -= frIn;
- } else {
- size_t fr = session->frameCount - session->framesRev;
- if (inBuffer->frameCount < fr) {
- fr = inBuffer->frameCount;
- }
- memcpy(session->revFrame->data_ + session->framesRev * session->inChannelCount,
- inBuffer->s16,
- fr * session->inChannelCount * sizeof(int16_t));
- session->framesRev += fr;
- inBuffer->frameCount = fr;
- if (session->framesRev < session->frameCount) {
- return 0;
- }
- session->framesRev = 0;
+ size_t fr = session->frameCount - session->framesRev;
+ if (inBuffer->frameCount < fr) {
+ fr = inBuffer->frameCount;
}
- session->revFrame->samples_per_channel_ = session->apmFrameCount;
- effect->session->apm->AnalyzeReverseStream(session->revFrame);
+ session->framesRev += fr;
+ inBuffer->frameCount = fr;
+ if (session->framesRev < session->frameCount) {
+ return 0;
+ }
+ session->framesRev = 0;
+ if (int status = effect->session->apm->ProcessReverseStream(
+ (const int16_t* const)inBuffer->s16,
+ (const webrtc::StreamConfig)effect->session->revConfig,
+ (const webrtc::StreamConfig)effect->session->revConfig,
+ (int16_t* const)outBuffer->s16);
+ status != 0) {
+ ALOGE("Process Reverse Stream failed with error %d\n", status);
+ return status;
+ }
return 0;
} else {
return -ENODATA;
}
}
-
// effect_handle_t interface implementation for effect
const struct effect_interface_s sEffectInterface = {
- PreProcessingFx_Process,
- PreProcessingFx_Command,
- PreProcessingFx_GetDescriptor,
- NULL
-};
+ PreProcessingFx_Process, PreProcessingFx_Command, PreProcessingFx_GetDescriptor, NULL};
const struct effect_interface_s sEffectInterfaceReverse = {
- PreProcessingFx_Process,
- PreProcessingFx_Command,
- PreProcessingFx_GetDescriptor,
- PreProcessingFx_ProcessReverse
-};
+ PreProcessingFx_Process, PreProcessingFx_Command, PreProcessingFx_GetDescriptor,
+ PreProcessingFx_ProcessReverse};
//------------------------------------------------------------------------------
// Effect Library Interface Implementation
//------------------------------------------------------------------------------
-int PreProcessingLib_Create(const effect_uuid_t *uuid,
- int32_t sessionId,
- int32_t ioId,
- effect_handle_t *pInterface)
-{
+int PreProcessingLib_Create(const effect_uuid_t* uuid, int32_t sessionId, int32_t ioId,
+ effect_handle_t* pInterface) {
ALOGV("EffectCreate: uuid: %08x session %d IO: %d", uuid->timeLow, sessionId, ioId);
int status;
- const effect_descriptor_t *desc;
- preproc_session_t *session;
+ const effect_descriptor_t* desc;
+ preproc_session_t* session;
uint32_t procId;
if (PreProc_Init() != 0) {
return sInitStatus;
}
- desc = PreProc_GetDescriptor(uuid);
+ desc = PreProc_GetDescriptor(uuid);
if (desc == NULL) {
ALOGW("EffectCreate: fx not found uuid: %08x", uuid->timeLow);
return -EINVAL;
@@ -1920,14 +1643,13 @@
return status;
}
-int PreProcessingLib_Release(effect_handle_t interface)
-{
+int PreProcessingLib_Release(effect_handle_t interface) {
ALOGV("EffectRelease start %p", interface);
if (PreProc_Init() != 0) {
return sInitStatus;
}
- preproc_effect_t *fx = (preproc_effect_t *)interface;
+ preproc_effect_t* fx = (preproc_effect_t*)interface;
if (fx->session->id == 0) {
return -EINVAL;
@@ -1935,17 +1657,15 @@
return Session_ReleaseEffect(fx->session, fx);
}
-int PreProcessingLib_GetDescriptor(const effect_uuid_t *uuid,
- effect_descriptor_t *pDescriptor) {
-
- if (pDescriptor == NULL || uuid == NULL){
+int PreProcessingLib_GetDescriptor(const effect_uuid_t* uuid, effect_descriptor_t* pDescriptor) {
+ if (pDescriptor == NULL || uuid == NULL) {
return -EINVAL;
}
- const effect_descriptor_t *desc = PreProc_GetDescriptor(uuid);
+ const effect_descriptor_t* desc = PreProc_GetDescriptor(uuid);
if (desc == NULL) {
ALOGV("PreProcessingLib_GetDescriptor() not found");
- return -EINVAL;
+ return -EINVAL;
}
ALOGV("PreProcessingLib_GetDescriptor() got fx %s", desc->name);
@@ -1955,15 +1675,13 @@
}
// This is the only symbol that needs to be exported
-__attribute__ ((visibility ("default")))
-audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
- .tag = AUDIO_EFFECT_LIBRARY_TAG,
- .version = EFFECT_LIBRARY_API_VERSION,
- .name = "Audio Preprocessing Library",
- .implementor = "The Android Open Source Project",
- .create_effect = PreProcessingLib_Create,
- .release_effect = PreProcessingLib_Release,
- .get_descriptor = PreProcessingLib_GetDescriptor
-};
+__attribute__((visibility("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+ .tag = AUDIO_EFFECT_LIBRARY_TAG,
+ .version = EFFECT_LIBRARY_API_VERSION,
+ .name = "Audio Preprocessing Library",
+ .implementor = "The Android Open Source Project",
+ .create_effect = PreProcessingLib_Create,
+ .release_effect = PreProcessingLib_Release,
+ .get_descriptor = PreProcessingLib_GetDescriptor};
-}; // extern "C"
+}; // extern "C"
diff --git a/media/libeffects/preprocessing/benchmarks/Android.bp b/media/libeffects/preprocessing/benchmarks/Android.bp
new file mode 100644
index 0000000..262fd19
--- /dev/null
+++ b/media/libeffects/preprocessing/benchmarks/Android.bp
@@ -0,0 +1,24 @@
+cc_benchmark {
+ name: "preprocessing_benchmark",
+ vendor: true,
+ relative_install_path: "soundfx",
+ srcs: ["preprocessing_benchmark.cpp"],
+ shared_libs: [
+ "libaudiopreprocessing",
+ "libaudioutils",
+ "liblog",
+ "libutils",
+ ],
+ cflags: [
+ "-DWEBRTC_POSIX",
+ "-fvisibility=default",
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+ header_libs: [
+ "libaudioeffects",
+ "libhardware_headers",
+ "libwebrtc_absl_headers",
+ ],
+}
diff --git a/media/libeffects/preprocessing/benchmarks/preprocessing_benchmark.cpp b/media/libeffects/preprocessing/benchmarks/preprocessing_benchmark.cpp
new file mode 100644
index 0000000..694a6c4
--- /dev/null
+++ b/media/libeffects/preprocessing/benchmarks/preprocessing_benchmark.cpp
@@ -0,0 +1,240 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*******************************************************************
+ * A test result running on Pixel 3 for comparison.
+ * The first parameter indicates the channel mask index.
+ * The second parameter indicates the effect index.
+ * 0: Automatic Gain Control,
+ * 1: Acoustic Echo Canceler,
+ * 2: Noise Suppressor,
+ * 3: Automatic Gain Control 2
+ * ---------------------------------------------------------------
+ * Benchmark Time CPU Iterations
+ * ---------------------------------------------------------------
+ * BM_PREPROCESSING/1/0 59836 ns 59655 ns 11732
+ * BM_PREPROCESSING/1/1 66848 ns 66642 ns 10554
+ * BM_PREPROCESSING/1/2 20726 ns 20655 ns 33822
+ * BM_PREPROCESSING/1/3 5093 ns 5076 ns 137897
+ * BM_PREPROCESSING/2/0 117040 ns 116670 ns 5996
+ * BM_PREPROCESSING/2/1 120600 ns 120225 ns 5845
+ * BM_PREPROCESSING/2/2 38460 ns 38330 ns 18190
+ * BM_PREPROCESSING/2/3 6294 ns 6274 ns 111488
+ * BM_PREPROCESSING/3/0 232272 ns 231528 ns 3025
+ * BM_PREPROCESSING/3/1 226346 ns 225628 ns 3117
+ * BM_PREPROCESSING/3/2 75442 ns 75227 ns 9104
+ * BM_PREPROCESSING/3/3 9782 ns 9750 ns 71805
+ * BM_PREPROCESSING/4/0 290388 ns 289426 ns 2389
+ * BM_PREPROCESSING/4/1 279394 ns 278498 ns 2522
+ * BM_PREPROCESSING/4/2 94029 ns 93759 ns 7307
+ * BM_PREPROCESSING/4/3 11487 ns 11450 ns 61129
+ * BM_PREPROCESSING/5/0 347736 ns 346580 ns 2020
+ * BM_PREPROCESSING/5/1 331853 ns 330788 ns 2122
+ * BM_PREPROCESSING/5/2 112594 ns 112268 ns 6105
+ * BM_PREPROCESSING/5/3 13254 ns 13212 ns 52972
+ *******************************************************************/
+
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_agc.h>
+#include <array>
+#include <climits>
+#include <cstdlib>
+#include <random>
+#include <vector>
+#include <audio_effects/effect_agc2.h>
+#include <audio_effects/effect_ns.h>
+#include <benchmark/benchmark.h>
+#include <hardware/audio_effect.h>
+#include <log/log.h>
+#include <sys/stat.h>
+#include <system/audio.h>
+
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+constexpr int kSampleRate = 16000;
+constexpr float kTenMilliSecVal = 0.01;
+constexpr unsigned int kStreamDelayMs = 0;
+constexpr effect_uuid_t kEffectUuids[] = {
+ // agc uuid
+ {0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ // aec uuid
+ {0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ // ns uuid
+ {0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ // agc2 uuid
+ {0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}},
+};
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+constexpr audio_channel_mask_t kChMasks[] = {
+ AUDIO_CHANNEL_IN_MONO, AUDIO_CHANNEL_IN_STEREO, AUDIO_CHANNEL_IN_2POINT0POINT2,
+ AUDIO_CHANNEL_IN_2POINT1POINT2, AUDIO_CHANNEL_IN_6,
+};
+constexpr size_t kNumChMasks = std::size(kChMasks);
+
+// types of pre processing modules
+enum PreProcId {
+ PREPROC_AGC, // Automatic Gain Control
+ PREPROC_AEC, // Acoustic Echo Canceler
+ PREPROC_NS, // Noise Suppressor
+ PREPROC_AGC2, // Automatic Gain Control 2
+ PREPROC_NUM_EFFECTS
+};
+
+int preProcCreateEffect(effect_handle_t* pEffectHandle, uint32_t effectType,
+ effect_config_t* pConfig, int sessionId, int ioId) {
+ if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&kEffectUuids[effectType],
+ sessionId, ioId, pEffectHandle);
+ status != 0) {
+ ALOGE("Audio Preprocessing create returned an error = %d\n", status);
+ return EXIT_FAILURE;
+ }
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ if (effectType == PREPROC_AEC) {
+ if (int status = (**pEffectHandle)
+ ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG_REVERSE,
+ sizeof(effect_config_t), pConfig, &replySize, &reply);
+ status != 0) {
+ ALOGE("Set config reverse command returned an error = %d\n", status);
+ return EXIT_FAILURE;
+ }
+ }
+ if (int status = (**pEffectHandle)
+ ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG,
+ sizeof(effect_config_t), pConfig, &replySize, &reply);
+ status != 0) {
+ ALOGE("Set config command returned an error = %d\n", status);
+ return EXIT_FAILURE;
+ }
+ return reply;
+}
+
+int preProcSetConfigParam(effect_handle_t effectHandle, uint32_t paramType, uint32_t paramValue) {
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ uint32_t paramData[2] = {paramType, paramValue};
+ effect_param_t* effectParam = (effect_param_t*)malloc(sizeof(*effectParam) + sizeof(paramData));
+ memcpy(&effectParam->data[0], ¶mData[0], sizeof(paramData));
+ effectParam->psize = sizeof(paramData[0]);
+ (*effectHandle)
+ ->command(effectHandle, EFFECT_CMD_SET_PARAM, sizeof(effect_param_t), effectParam,
+ &replySize, &reply);
+ free(effectParam);
+ return reply;
+}
+
+short preProcGetShortVal(float paramValue) {
+ return static_cast<short>(paramValue * std::numeric_limits<short>::max());
+}
+
+static void BM_PREPROCESSING(benchmark::State& state) {
+ const size_t chMask = kChMasks[state.range(0) - 1];
+ const size_t channelCount = audio_channel_count_from_in_mask(chMask);
+
+ PreProcId effectType = (PreProcId)state.range(1);
+
+ int32_t sessionId = 1;
+ int32_t ioId = 1;
+ effect_handle_t effectHandle = nullptr;
+ effect_config_t config{};
+ config.inputCfg.samplingRate = config.outputCfg.samplingRate = kSampleRate;
+ config.inputCfg.channels = config.outputCfg.channels = chMask;
+ config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+
+ if (int status = preProcCreateEffect(&effectHandle, state.range(1), &config, sessionId, ioId);
+ status != 0) {
+ ALOGE("Create effect call returned error %i", status);
+ return;
+ }
+
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ if (int status =
+ (*effectHandle)
+ ->command(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+ status != 0) {
+ ALOGE("Command enable call returned error %d\n", reply);
+ return;
+ }
+
+ // Initialize input buffer with deterministic pseudo-random values
+ const int frameLength = (int)(kSampleRate * kTenMilliSecVal);
+ std::minstd_rand gen(chMask);
+ std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+ std::vector<short> in(frameLength * channelCount);
+ for (auto& i : in) {
+ i = preProcGetShortVal(dis(gen));
+ }
+ std::vector<short> farIn(frameLength * channelCount);
+ for (auto& i : farIn) {
+ i = preProcGetShortVal(dis(gen));
+ }
+ std::vector<short> out(frameLength * channelCount);
+
+ // Run the test
+ for (auto _ : state) {
+ benchmark::DoNotOptimize(in.data());
+ benchmark::DoNotOptimize(out.data());
+ benchmark::DoNotOptimize(farIn.data());
+
+ audio_buffer_t inBuffer = {.frameCount = (size_t)frameLength, .s16 = in.data()};
+ audio_buffer_t outBuffer = {.frameCount = (size_t)frameLength, .s16 = out.data()};
+ audio_buffer_t farInBuffer = {.frameCount = (size_t)frameLength, .s16 = farIn.data()};
+
+ if (PREPROC_AEC == effectType) {
+ if (int status =
+ preProcSetConfigParam(effectHandle, AEC_PARAM_ECHO_DELAY, kStreamDelayMs);
+ status != 0) {
+ ALOGE("preProcSetConfigParam returned Error %d\n", status);
+ return;
+ }
+ }
+ if (int status = (*effectHandle)->process(effectHandle, &inBuffer, &outBuffer);
+ status != 0) {
+ ALOGE("\nError: Process i = %d returned with error %d\n", (int)state.range(1), status);
+ return;
+ }
+ if (PREPROC_AEC == effectType) {
+ if (int status =
+ (*effectHandle)->process_reverse(effectHandle, &farInBuffer, &outBuffer);
+ status != 0) {
+ ALOGE("\nError: Process reverse i = %d returned with error %d\n",
+ (int)state.range(1), status);
+ return;
+ }
+ }
+ }
+ benchmark::ClobberMemory();
+
+ state.SetComplexityN(state.range(0));
+
+ if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+ ALOGE("release_effect returned an error = %d\n", status);
+ return;
+ }
+}
+
+static void preprocessingArgs(benchmark::internal::Benchmark* b) {
+ for (int i = 1; i <= (int)kNumChMasks; i++) {
+ for (int j = 0; j < (int)kNumEffectUuids; ++j) {
+ b->Args({i, j});
+ }
+ }
+}
+
+BENCHMARK(BM_PREPROCESSING)->Apply(preprocessingArgs);
+
+BENCHMARK_MAIN();
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
new file mode 100644
index 0000000..b439880
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -0,0 +1,21 @@
+// audio preprocessing unit test
+cc_test {
+ name: "AudioPreProcessingTest",
+
+ vendor: true,
+
+ relative_install_path: "soundfx",
+
+ srcs: ["PreProcessingTest.cpp"],
+
+ shared_libs: [
+ "libaudiopreprocessing",
+ "libaudioutils",
+ "liblog",
+ "libutils",
+ ],
+ header_libs: [
+ "libaudioeffects",
+ "libhardware_headers",
+ ],
+}
diff --git a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
new file mode 100644
index 0000000..5f223c9
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
@@ -0,0 +1,479 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <getopt.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <sys/stat.h>
+#include <vector>
+
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_agc.h>
+#include <audio_effects/effect_agc2.h>
+#include <audio_effects/effect_ns.h>
+#include <log/log.h>
+
+// This is the only symbol that needs to be imported
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+//------------------------------------------------------------------------------
+// local definitions
+//------------------------------------------------------------------------------
+
+// types of pre processing modules
+enum PreProcId {
+ PREPROC_AGC, // Automatic Gain Control
+ PREPROC_AGC2, // Automatic Gain Control 2
+ PREPROC_AEC, // Acoustic Echo Canceler
+ PREPROC_NS, // Noise Suppressor
+ PREPROC_NUM_EFFECTS
+};
+
+enum PreProcParams {
+ ARG_HELP = 1,
+ ARG_INPUT,
+ ARG_OUTPUT,
+ ARG_FAR,
+ ARG_FS,
+ ARG_CH_MASK,
+ ARG_AGC_TGT_LVL,
+ ARG_AGC_COMP_LVL,
+ ARG_AEC_DELAY,
+ ARG_NS_LVL,
+ ARG_AGC2_GAIN,
+ ARG_AGC2_LVL,
+ ARG_AGC2_SAT_MGN
+};
+
+struct preProcConfigParams_t {
+ int samplingFreq = 16000;
+ audio_channel_mask_t chMask = AUDIO_CHANNEL_IN_MONO;
+ int nsLevel = 0; // a value between 0-3
+ int agcTargetLevel = 3; // in dB
+ int agcCompLevel = 9; // in dB
+ float agc2Gain = 0.f; // in dB
+ float agc2SaturationMargin = 2.f; // in dB
+ int agc2Level = 0; // either kRms(0) or kPeak(1)
+ int aecDelay = 0; // in ms
+};
+
+const effect_uuid_t kPreProcUuids[PREPROC_NUM_EFFECTS] = {
+ {0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // agc uuid
+ {0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}}, // agc2 uuid
+ {0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // aec uuid
+ {0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // ns uuid
+};
+
+constexpr audio_channel_mask_t kPreProcConfigChMask[] = {
+ AUDIO_CHANNEL_IN_MONO,
+ AUDIO_CHANNEL_IN_STEREO,
+ AUDIO_CHANNEL_IN_FRONT_BACK,
+ AUDIO_CHANNEL_IN_6,
+ AUDIO_CHANNEL_IN_2POINT0POINT2,
+ AUDIO_CHANNEL_IN_2POINT1POINT2,
+ AUDIO_CHANNEL_IN_3POINT0POINT2,
+ AUDIO_CHANNEL_IN_3POINT1POINT2,
+ AUDIO_CHANNEL_IN_5POINT1,
+ AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO,
+ AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO,
+ AUDIO_CHANNEL_IN_VOICE_CALL_MONO,
+};
+
+constexpr int kPreProcConfigChMaskCount = std::size(kPreProcConfigChMask);
+
+void printUsage() {
+ printf("\nUsage: ");
+ printf("\n <executable> [options]\n");
+ printf("\nwhere options are, ");
+ printf("\n --input <inputfile>");
+ printf("\n path to the input file");
+ printf("\n --output <outputfile>");
+ printf("\n path to the output file");
+ printf("\n --help");
+ printf("\n Prints this usage information");
+ printf("\n --fs <sampling_freq>");
+ printf("\n Sampling frequency in Hz, default 16000.");
+ printf("\n -ch_mask <channel_mask>\n");
+ printf("\n 0 - AUDIO_CHANNEL_IN_MONO");
+ printf("\n 1 - AUDIO_CHANNEL_IN_STEREO");
+ printf("\n 2 - AUDIO_CHANNEL_IN_FRONT_BACK");
+ printf("\n 3 - AUDIO_CHANNEL_IN_6");
+ printf("\n 4 - AUDIO_CHANNEL_IN_2POINT0POINT2");
+ printf("\n 5 - AUDIO_CHANNEL_IN_2POINT1POINT2");
+ printf("\n 6 - AUDIO_CHANNEL_IN_3POINT0POINT2");
+ printf("\n 7 - AUDIO_CHANNEL_IN_3POINT1POINT2");
+ printf("\n 8 - AUDIO_CHANNEL_IN_5POINT1");
+ printf("\n 9 - AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO");
+ printf("\n 10 - AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO ");
+ printf("\n 11 - AUDIO_CHANNEL_IN_VOICE_CALL_MONO ");
+ printf("\n default 0");
+ printf("\n --far <farend_file>");
+ printf("\n Path to far-end file needed for echo cancellation");
+ printf("\n --aec");
+ printf("\n Enable Echo Cancellation, default disabled");
+ printf("\n --ns");
+ printf("\n Enable Noise Suppression, default disabled");
+ printf("\n --agc");
+ printf("\n Enable Gain Control, default disabled");
+ printf("\n --agc2");
+ printf("\n Enable Gain Controller 2, default disabled");
+ printf("\n --ns_lvl <ns_level>");
+ printf("\n Noise Suppression level in dB, default value 0dB");
+ printf("\n --agc_tgt_lvl <target_level>");
+ printf("\n AGC Target Level in dB, default value 3dB");
+ printf("\n --agc_comp_lvl <comp_level>");
+ printf("\n AGC Comp Level in dB, default value 9dB");
+ printf("\n --agc2_gain <fixed_digital_gain>");
+ printf("\n AGC Fixed Digital Gain in dB, default value 0dB");
+ printf("\n --agc2_lvl <level_estimator>");
+ printf("\n AGC Adaptive Digital Level Estimator, default value kRms");
+ printf("\n --agc2_sat_mgn <saturation_margin>");
+ printf("\n AGC Adaptive Digital Saturation Margin in dB, default value 2dB");
+ printf("\n --aec_delay <delay>");
+ printf("\n AEC delay value in ms, default value 0ms");
+ printf("\n");
+}
+
+constexpr float kTenMilliSecVal = 0.01;
+
+int preProcCreateEffect(effect_handle_t* pEffectHandle, uint32_t effectType,
+ effect_config_t* pConfig, int sessionId, int ioId) {
+ if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&kPreProcUuids[effectType],
+ sessionId, ioId, pEffectHandle);
+ status != 0) {
+ ALOGE("Audio Preprocessing create returned an error = %d\n", status);
+ return EXIT_FAILURE;
+ }
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ if (effectType == PREPROC_AEC) {
+ (**pEffectHandle)
+ ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG_REVERSE, sizeof(effect_config_t),
+ pConfig, &replySize, &reply);
+ }
+ (**pEffectHandle)
+ ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t), pConfig,
+ &replySize, &reply);
+ return reply;
+}
+
+int preProcSetConfigParam(uint32_t paramType, uint32_t paramValue, effect_handle_t effectHandle) {
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ uint32_t paramData[2] = {paramType, paramValue};
+ effect_param_t* effectParam = (effect_param_t*)malloc(sizeof(*effectParam) + sizeof(paramData));
+ memcpy(&effectParam->data[0], ¶mData[0], sizeof(paramData));
+ effectParam->psize = sizeof(paramData[0]);
+ (*effectHandle)
+ ->command(effectHandle, EFFECT_CMD_SET_PARAM, sizeof(effect_param_t), effectParam,
+ &replySize, &reply);
+ free(effectParam);
+ return reply;
+}
+
+int main(int argc, const char* argv[]) {
+ if (argc == 1) {
+ printUsage();
+ return EXIT_FAILURE;
+ }
+ const char* inputFile = nullptr;
+ const char* outputFile = nullptr;
+ const char* farFile = nullptr;
+ int effectEn[PREPROC_NUM_EFFECTS] = {0};
+
+ const option long_opts[] = {
+ {"help", no_argument, nullptr, ARG_HELP},
+ {"input", required_argument, nullptr, ARG_INPUT},
+ {"output", required_argument, nullptr, ARG_OUTPUT},
+ {"far", required_argument, nullptr, ARG_FAR},
+ {"fs", required_argument, nullptr, ARG_FS},
+ {"ch_mask", required_argument, nullptr, ARG_CH_MASK},
+ {"agc_tgt_lvl", required_argument, nullptr, ARG_AGC_TGT_LVL},
+ {"agc_comp_lvl", required_argument, nullptr, ARG_AGC_COMP_LVL},
+ {"agc2_gain", required_argument, nullptr, ARG_AGC2_GAIN},
+ {"agc2_lvl", required_argument, nullptr, ARG_AGC2_LVL},
+ {"agc2_sat_mgn", required_argument, nullptr, ARG_AGC2_SAT_MGN},
+ {"aec_delay", required_argument, nullptr, ARG_AEC_DELAY},
+ {"ns_lvl", required_argument, nullptr, ARG_NS_LVL},
+ {"aec", no_argument, &effectEn[PREPROC_AEC], 1},
+ {"agc", no_argument, &effectEn[PREPROC_AGC], 1},
+ {"agc2", no_argument, &effectEn[PREPROC_AGC2], 1},
+ {"ns", no_argument, &effectEn[PREPROC_NS], 1},
+ {nullptr, 0, nullptr, 0},
+ };
+ struct preProcConfigParams_t preProcCfgParams {};
+
+ while (true) {
+ const int opt = getopt_long(argc, (char* const*)argv, "i:o:", long_opts, nullptr);
+ if (opt == -1) {
+ break;
+ }
+ switch (opt) {
+ case ARG_HELP:
+ printUsage();
+ return 0;
+ case ARG_INPUT: {
+ inputFile = (char*)optarg;
+ break;
+ }
+ case ARG_OUTPUT: {
+ outputFile = (char*)optarg;
+ break;
+ }
+ case ARG_FAR: {
+ farFile = (char*)optarg;
+ break;
+ }
+ case ARG_FS: {
+ preProcCfgParams.samplingFreq = atoi(optarg);
+ break;
+ }
+ case ARG_CH_MASK: {
+ int chMaskIdx = atoi(optarg);
+ if (chMaskIdx < 0 or chMaskIdx > kPreProcConfigChMaskCount) {
+ ALOGE("Channel Mask index not in correct range\n");
+ printUsage();
+ return EXIT_FAILURE;
+ }
+ preProcCfgParams.chMask = kPreProcConfigChMask[chMaskIdx];
+ break;
+ }
+ case ARG_AGC_TGT_LVL: {
+ preProcCfgParams.agcTargetLevel = atoi(optarg);
+ break;
+ }
+ case ARG_AGC_COMP_LVL: {
+ preProcCfgParams.agcCompLevel = atoi(optarg);
+ break;
+ }
+ case ARG_AGC2_GAIN: {
+ preProcCfgParams.agc2Gain = atof(optarg);
+ break;
+ }
+ case ARG_AGC2_LVL: {
+ preProcCfgParams.agc2Level = atoi(optarg);
+ break;
+ }
+ case ARG_AGC2_SAT_MGN: {
+ preProcCfgParams.agc2SaturationMargin = atof(optarg);
+ break;
+ }
+ case ARG_AEC_DELAY: {
+ preProcCfgParams.aecDelay = atoi(optarg);
+ break;
+ }
+ case ARG_NS_LVL: {
+ preProcCfgParams.nsLevel = atoi(optarg);
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ if (inputFile == nullptr) {
+ ALOGE("Error: missing input file\n");
+ printUsage();
+ return EXIT_FAILURE;
+ }
+
+ std::unique_ptr<FILE, decltype(&fclose)> inputFp(fopen(inputFile, "rb"), &fclose);
+ if (inputFp == nullptr) {
+ ALOGE("Cannot open input file %s\n", inputFile);
+ return EXIT_FAILURE;
+ }
+
+ std::unique_ptr<FILE, decltype(&fclose)> farFp(fopen(farFile, "rb"), &fclose);
+ std::unique_ptr<FILE, decltype(&fclose)> outputFp(fopen(outputFile, "wb"), &fclose);
+ if (effectEn[PREPROC_AEC]) {
+ if (farFile == nullptr) {
+ ALOGE("Far end signal file required for echo cancellation \n");
+ return EXIT_FAILURE;
+ }
+ if (farFp == nullptr) {
+ ALOGE("Cannot open far end stream file %s\n", farFile);
+ return EXIT_FAILURE;
+ }
+ struct stat statInput, statFar;
+ (void)fstat(fileno(inputFp.get()), &statInput);
+ (void)fstat(fileno(farFp.get()), &statFar);
+ if (statInput.st_size != statFar.st_size) {
+ ALOGE("Near and far end signals are of different sizes");
+ return EXIT_FAILURE;
+ }
+ }
+ if (outputFile != nullptr && outputFp == nullptr) {
+ ALOGE("Cannot open output file %s\n", outputFile);
+ return EXIT_FAILURE;
+ }
+
+ int32_t sessionId = 1;
+ int32_t ioId = 1;
+ effect_handle_t effectHandle[PREPROC_NUM_EFFECTS] = {nullptr};
+ effect_config_t config;
+ config.inputCfg.samplingRate = config.outputCfg.samplingRate = preProcCfgParams.samplingFreq;
+ config.inputCfg.channels = config.outputCfg.channels = preProcCfgParams.chMask;
+ config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+
+ // Create all the effect handles
+ for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+ if (int status = preProcCreateEffect(&effectHandle[i], i, &config, sessionId, ioId);
+ status != 0) {
+ ALOGE("Create effect call returned error %i", status);
+ return EXIT_FAILURE;
+ }
+ }
+
+ for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+ if (effectEn[i] == 1) {
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ (*effectHandle[i])
+ ->command(effectHandle[i], EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+ if (reply != 0) {
+ ALOGE("Command enable call returned error %d\n", reply);
+ return EXIT_FAILURE;
+ }
+ }
+ }
+
+ // Set Config Params of the effects
+ if (effectEn[PREPROC_AGC]) {
+ if (int status = preProcSetConfigParam(AGC_PARAM_TARGET_LEVEL,
+ (uint32_t)preProcCfgParams.agcTargetLevel,
+ effectHandle[PREPROC_AGC]);
+ status != 0) {
+ ALOGE("Invalid AGC Target Level. Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+ if (int status = preProcSetConfigParam(AGC_PARAM_COMP_GAIN,
+ (uint32_t)preProcCfgParams.agcCompLevel,
+ effectHandle[PREPROC_AGC]);
+ status != 0) {
+ ALOGE("Invalid AGC Comp Gain. Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+ }
+ if (effectEn[PREPROC_AGC2]) {
+ if (int status = preProcSetConfigParam(AGC2_PARAM_FIXED_DIGITAL_GAIN,
+ (float)preProcCfgParams.agc2Gain,
+ effectHandle[PREPROC_AGC2]);
+ status != 0) {
+ ALOGE("Invalid AGC2 Fixed Digital Gain. Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+ if (int status = preProcSetConfigParam(AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR,
+ (uint32_t)preProcCfgParams.agc2Level,
+ effectHandle[PREPROC_AGC2]);
+ status != 0) {
+ ALOGE("Invalid AGC2 Level Estimator. Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+ if (int status = preProcSetConfigParam(AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN,
+ (float)preProcCfgParams.agc2SaturationMargin,
+ effectHandle[PREPROC_AGC2]);
+ status != 0) {
+ ALOGE("Invalid AGC2 Saturation Margin. Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+ }
+ if (effectEn[PREPROC_NS]) {
+ if (int status = preProcSetConfigParam(NS_PARAM_LEVEL, (uint32_t)preProcCfgParams.nsLevel,
+ effectHandle[PREPROC_NS]);
+ status != 0) {
+ ALOGE("Invalid Noise Suppression level Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+ }
+
+ // Process Call
+ const int frameLength = (int)(preProcCfgParams.samplingFreq * kTenMilliSecVal);
+ const int ioChannelCount = audio_channel_count_from_in_mask(preProcCfgParams.chMask);
+ const int ioFrameSize = ioChannelCount * sizeof(short);
+ int frameCounter = 0;
+ while (true) {
+ std::vector<short> in(frameLength * ioChannelCount);
+ std::vector<short> out(frameLength * ioChannelCount);
+ std::vector<short> farIn(frameLength * ioChannelCount);
+ size_t samplesRead = fread(in.data(), ioFrameSize, frameLength, inputFp.get());
+ if (samplesRead == 0) {
+ break;
+ }
+ audio_buffer_t inputBuffer, outputBuffer;
+ audio_buffer_t farInBuffer{};
+ inputBuffer.frameCount = samplesRead;
+ outputBuffer.frameCount = samplesRead;
+ inputBuffer.s16 = in.data();
+ outputBuffer.s16 = out.data();
+
+ if (farFp != nullptr) {
+ samplesRead = fread(farIn.data(), ioFrameSize, frameLength, farFp.get());
+ if (samplesRead == 0) {
+ break;
+ }
+ farInBuffer.frameCount = samplesRead;
+ farInBuffer.s16 = farIn.data();
+ }
+
+ for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+ if (effectEn[i] == 1) {
+ if (i == PREPROC_AEC) {
+ if (int status = preProcSetConfigParam(AEC_PARAM_ECHO_DELAY,
+ (uint32_t)preProcCfgParams.aecDelay,
+ effectHandle[PREPROC_AEC]);
+ status != 0) {
+ ALOGE("preProcSetConfigParam returned Error %d\n", status);
+ return EXIT_FAILURE;
+ }
+ }
+ if (int status = (*effectHandle[i])
+ ->process(effectHandle[i], &inputBuffer, &outputBuffer);
+ status != 0) {
+ ALOGE("\nError: Process i = %d returned with error %d\n", i, status);
+ return EXIT_FAILURE;
+ }
+ if (i == PREPROC_AEC) {
+ if (int status = (*effectHandle[i])
+ ->process_reverse(effectHandle[i], &farInBuffer,
+ &outputBuffer);
+ status != 0) {
+ ALOGE("\nError: Process reverse i = %d returned with error %d\n", i,
+ status);
+ return EXIT_FAILURE;
+ }
+ }
+ }
+ }
+ if (outputFp != nullptr) {
+ size_t samplesWritten =
+ fwrite(out.data(), ioFrameSize, outputBuffer.frameCount, outputFp.get());
+ if (samplesWritten != outputBuffer.frameCount) {
+ ALOGE("\nError: Output file writing failed");
+ break;
+ }
+ }
+ frameCounter += frameLength;
+ }
+ // Release all the effect handles created
+ for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+ if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle[i]);
+ status != 0) {
+ ALOGE("Audio Preprocessing release returned an error = %d\n", status);
+ return EXIT_FAILURE;
+ }
+ }
+ return EXIT_SUCCESS;
+}
diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp
index 42e44f0..c010d68 100644
--- a/media/libeffects/proxy/EffectProxy.cpp
+++ b/media/libeffects/proxy/EffectProxy.cpp
@@ -30,7 +30,7 @@
#include <media/EffectsFactoryApi.h>
namespace android {
-// This is a dummy proxy descriptor just to return to Factory during the initial
+// This is a stub proxy descriptor just to return to Factory during the initial
// GetDescriptor call. Later in the factory, it is replaced with the
// SW sub effect descriptor
// proxy UUID af8da7e0-2ca1-11e3-b71d-0002a5d5c51b
diff --git a/media/libeffects/res/raw/sinesweepraw.raw b/media/libeffects/res/raw/sinesweepraw.raw
new file mode 100644
index 0000000..c0d48ce
--- /dev/null
+++ b/media/libeffects/res/raw/sinesweepraw.raw
Binary files differ
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 0fa0120..f68f65d 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -1,14 +1,18 @@
cc_library_headers {
name: "libmedia_headers",
vendor_available: true,
+ min_sdk_version: "29",
+
export_include_dirs: ["include"],
header_libs: [
+ "av-headers",
"libbase_headers",
"libgui_headers",
"libstagefright_headers",
"media_plugin_headers",
],
export_header_lib_headers: [
+ "av-headers",
"libgui_headers",
"libstagefright_headers",
"media_plugin_headers",
@@ -28,6 +32,7 @@
"//apex_available:platform",
"com.android.media",
],
+ min_sdk_version: "29",
}
filegroup {
@@ -46,20 +51,6 @@
path: "aidl",
}
-aidl_interface {
- name: "resourcemanager_aidl_interface",
- unstable: true,
- local_include_dir: "aidl",
- srcs: [
- "aidl/android/media/IResourceManagerClient.aidl",
- "aidl/android/media/IResourceManagerService.aidl",
- "aidl/android/media/MediaResourceType.aidl",
- "aidl/android/media/MediaResourceSubType.aidl",
- "aidl/android/media/MediaResourceParcel.aidl",
- "aidl/android/media/MediaResourcePolicyParcel.aidl",
- ],
-}
-
cc_library_shared {
name: "libmedia_omx",
vendor_available: true,
@@ -199,6 +190,8 @@
cc_library_static {
name: "libmedia_midiiowrapper",
+ min_sdk_version: "29",
+
srcs: ["MidiIoWrapper.cpp"],
static_libs: [
@@ -206,7 +199,7 @@
],
header_libs: [
- "libmedia_headers",
+ "libmedia_datasource_headers",
"media_ndk_headers",
],
@@ -223,6 +216,14 @@
],
cfi: true,
},
+
+ host_supported: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
}
cc_library_shared {
@@ -323,6 +324,8 @@
shared_libs: [
"android.hidl.token@1.0-utils",
+ "audioclient-types-aidl-unstable-cpp",
+ "av-types-aidl-unstable-cpp",
"liblog",
"libcutils",
"libprocessgroup",
diff --git a/media/libmedia/IMediaExtractor.cpp b/media/libmedia/IMediaExtractor.cpp
index 39caf53..7ed76d8 100644
--- a/media/libmedia/IMediaExtractor.cpp
+++ b/media/libmedia/IMediaExtractor.cpp
@@ -38,7 +38,8 @@
FLAGS,
SETMEDIACAS,
NAME,
- GETMETRICS
+ GETMETRICS,
+ SETENTRYPOINT
};
class BpMediaExtractor : public BpInterface<IMediaExtractor> {
@@ -142,6 +143,13 @@
}
return nm;
}
+
+ virtual status_t setEntryPoint(EntryPoint entryPoint) {
+ Parcel data, reply;
+ data.writeInterfaceToken(BpMediaExtractor::getInterfaceDescriptor());
+ data.writeInt32(static_cast<int32_t>(entryPoint));
+ return remote()->transact(SETENTRYPOINT, data, &reply);
+ }
};
IMPLEMENT_META_INTERFACE(MediaExtractor, "android.media.IMediaExtractor");
@@ -232,6 +240,16 @@
reply->writeString8(nm);
return NO_ERROR;
}
+ case SETENTRYPOINT: {
+ ALOGV("setEntryPoint");
+ CHECK_INTERFACE(IMediaExtractor, data, reply);
+ int32_t entryPoint;
+ status_t err = data.readInt32(&entryPoint);
+ if (err == OK) {
+ setEntryPoint(EntryPoint(entryPoint));
+ }
+ return err;
+ }
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index c08d187..8a4b17c 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -974,7 +974,7 @@
case PREPARE_DRM: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
- uint8_t uuid[16];
+ uint8_t uuid[16] = {};
data.read(uuid, sizeof(uuid));
Vector<uint8_t> drmSessionId;
readVector(data, drmSessionId);
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index bd18a40..11005c6 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -62,11 +62,13 @@
}
virtual sp<IMediaPlayer> create(
- const sp<IMediaPlayerClient>& client, audio_session_t audioSessionId) {
+ const sp<IMediaPlayerClient>& client, audio_session_t audioSessionId,
+ const std::string opPackageName) {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
data.writeStrongBinder(IInterface::asBinder(client));
data.writeInt32(audioSessionId);
+ data.writeCString(opPackageName.c_str());
remote()->transact(CREATE, data, &reply);
return interface_cast<IMediaPlayer>(reply.readStrongBinder());
@@ -127,7 +129,12 @@
sp<IMediaPlayerClient> client =
interface_cast<IMediaPlayerClient>(data.readStrongBinder());
audio_session_t audioSessionId = (audio_session_t) data.readInt32();
- sp<IMediaPlayer> player = create(client, audioSessionId);
+ const char* opPackageName = data.readCString();
+ if (opPackageName == nullptr) {
+ return FAILED_TRANSACTION;
+ }
+ std::string opPackageNameStr(opPackageName);
+ sp<IMediaPlayer> player = create(client, audioSessionId, opPackageNameStr);
reply->writeStrongBinder(IInterface::asBinder(player));
return NO_ERROR;
} break;
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 637322f..e8839ba 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -63,7 +63,7 @@
searchDirs[1] + fileName,
searchDirs[2] + fileName,
searchDirs[3] + fileName,
- "system/etc/media_profiles_V1_0.xml" // System fallback
+ "system/etc/media_profiles.xml" // System fallback
};
}();
static std::array<char const*, 5> const cPaths = {
@@ -240,7 +240,10 @@
const size_t nMappings = sizeof(sVideoEncoderNameMap)/sizeof(sVideoEncoderNameMap[0]);
const int codec = findTagForName(sVideoEncoderNameMap, nMappings, atts[1]);
- CHECK(codec != -1);
+ if (codec == -1) {
+ ALOGE("MediaProfiles::createVideoCodec failed to locate codec %s", atts[1]);
+ return nullptr;
+ }
MediaProfiles::VideoCodec *videoCodec =
new MediaProfiles::VideoCodec(static_cast<video_encoder>(codec),
@@ -262,7 +265,10 @@
!strcmp("channels", atts[6]));
const size_t nMappings = sizeof(sAudioEncoderNameMap)/sizeof(sAudioEncoderNameMap[0]);
const int codec = findTagForName(sAudioEncoderNameMap, nMappings, atts[1]);
- CHECK(codec != -1);
+ if (codec == -1) {
+ ALOGE("MediaProfiles::createAudioCodec failed to locate codec %s", atts[1]);
+ return nullptr;
+ }
MediaProfiles::AudioCodec *audioCodec =
new MediaProfiles::AudioCodec(static_cast<audio_encoder>(codec),
@@ -282,7 +288,10 @@
const size_t nMappings = sizeof(sAudioDecoderNameMap)/sizeof(sAudioDecoderNameMap[0]);
const int codec = findTagForName(sAudioDecoderNameMap, nMappings, atts[1]);
- CHECK(codec != -1);
+ if (codec == -1) {
+ ALOGE("MediaProfiles::createAudioDecoderCap failed to locate codec %s", atts[1]);
+ return nullptr;
+ }
MediaProfiles::AudioDecoderCap *cap =
new MediaProfiles::AudioDecoderCap(static_cast<audio_decoder>(codec));
@@ -298,7 +307,10 @@
const size_t nMappings = sizeof(sVideoDecoderNameMap)/sizeof(sVideoDecoderNameMap[0]);
const int codec = findTagForName(sVideoDecoderNameMap, nMappings, atts[1]);
- CHECK(codec != -1);
+ if (codec == -1) {
+ ALOGE("MediaProfiles::createVideoDecoderCap failed to locate codec %s", atts[1]);
+ return nullptr;
+ }
MediaProfiles::VideoDecoderCap *cap =
new MediaProfiles::VideoDecoderCap(static_cast<video_decoder>(codec));
@@ -322,7 +334,10 @@
const size_t nMappings = sizeof(sVideoEncoderNameMap)/sizeof(sVideoEncoderNameMap[0]);
const int codec = findTagForName(sVideoEncoderNameMap, nMappings, atts[1]);
- CHECK(codec != -1);
+ if (codec == -1) {
+ ALOGE("MediaProfiles::createVideoEncoderCap failed to locate codec %s", atts[1]);
+ return nullptr;
+ }
MediaProfiles::VideoEncoderCap *cap =
new MediaProfiles::VideoEncoderCap(static_cast<video_encoder>(codec),
@@ -346,7 +361,10 @@
const size_t nMappings = sizeof(sAudioEncoderNameMap)/sizeof(sAudioEncoderNameMap[0]);
const int codec = findTagForName(sAudioEncoderNameMap, nMappings, atts[1]);
- CHECK(codec != -1);
+ if (codec == -1) {
+ ALOGE("MediaProfiles::createAudioEncoderCap failed to locate codec %s", atts[1]);
+ return nullptr;
+ }
MediaProfiles::AudioEncoderCap *cap =
new MediaProfiles::AudioEncoderCap(static_cast<audio_encoder>(codec), atoi(atts[5]),
@@ -386,11 +404,17 @@
const size_t nProfileMappings = sizeof(sCamcorderQualityNameMap)/
sizeof(sCamcorderQualityNameMap[0]);
const int quality = findTagForName(sCamcorderQualityNameMap, nProfileMappings, atts[1]);
- CHECK(quality != -1);
+ if (quality == -1) {
+ ALOGE("MediaProfiles::createCamcorderProfile failed to locate quality %s", atts[1]);
+ return nullptr;
+ }
const size_t nFormatMappings = sizeof(sFileFormatMap)/sizeof(sFileFormatMap[0]);
const int fileFormat = findTagForName(sFileFormatMap, nFormatMappings, atts[3]);
- CHECK(fileFormat != -1);
+ if (fileFormat == -1) {
+ ALOGE("MediaProfiles::createCamcorderProfile failed to locate file format %s", atts[1]);
+ return nullptr;
+ }
MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = cameraId;
@@ -462,24 +486,39 @@
createAudioCodec(atts, profiles);
} else if (strcmp("VideoEncoderCap", name) == 0 &&
strcmp("true", atts[3]) == 0) {
- profiles->mVideoEncoders.add(createVideoEncoderCap(atts));
+ MediaProfiles::VideoEncoderCap* cap = createVideoEncoderCap(atts);
+ if (cap != nullptr) {
+ profiles->mVideoEncoders.add(cap);
+ }
} else if (strcmp("AudioEncoderCap", name) == 0 &&
strcmp("true", atts[3]) == 0) {
- profiles->mAudioEncoders.add(createAudioEncoderCap(atts));
+ MediaProfiles::AudioEncoderCap* cap = createAudioEncoderCap(atts);
+ if (cap != nullptr) {
+ profiles->mAudioEncoders.add(cap);
+ }
} else if (strcmp("VideoDecoderCap", name) == 0 &&
strcmp("true", atts[3]) == 0) {
- profiles->mVideoDecoders.add(createVideoDecoderCap(atts));
+ MediaProfiles::VideoDecoderCap* cap = createVideoDecoderCap(atts);
+ if (cap != nullptr) {
+ profiles->mVideoDecoders.add(cap);
+ }
} else if (strcmp("AudioDecoderCap", name) == 0 &&
strcmp("true", atts[3]) == 0) {
- profiles->mAudioDecoders.add(createAudioDecoderCap(atts));
+ MediaProfiles::AudioDecoderCap* cap = createAudioDecoderCap(atts);
+ if (cap != nullptr) {
+ profiles->mAudioDecoders.add(cap);
+ }
} else if (strcmp("EncoderOutputFileFormat", name) == 0) {
profiles->mEncoderOutputFileFormats.add(createEncoderOutputFileFormat(atts));
} else if (strcmp("CamcorderProfiles", name) == 0) {
profiles->mCurrentCameraId = getCameraId(atts);
profiles->addStartTimeOffset(profiles->mCurrentCameraId, atts);
} else if (strcmp("EncoderProfile", name) == 0) {
- profiles->mCamcorderProfiles.add(
- createCamcorderProfile(profiles->mCurrentCameraId, atts, profiles->mCameraIds));
+ MediaProfiles::CamcorderProfile* profile = createCamcorderProfile(
+ profiles->mCurrentCameraId, atts, profiles->mCameraIds);
+ if (profile != nullptr) {
+ profiles->mCamcorderProfiles.add(profile);
+ }
} else if (strcmp("ImageEncoding", name) == 0) {
profiles->addImageEncodingQualityLevel(profiles->mCurrentCameraId, atts);
}
diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp
index fe86d27..ec52a49 100644
--- a/media/libmedia/MediaResource.cpp
+++ b/media/libmedia/MediaResource.cpp
@@ -43,11 +43,11 @@
}
//static
-MediaResource MediaResource::CodecResource(bool secure, bool video) {
+MediaResource MediaResource::CodecResource(bool secure, bool video, int64_t instanceCount) {
return MediaResource(
secure ? Type::kSecureCodec : Type::kNonSecureCodec,
video ? SubType::kVideoCodec : SubType::kAudioCodec,
- 1);
+ instanceCount);
}
//static
diff --git a/media/libmedia/TEST_MAPPING b/media/libmedia/TEST_MAPPING
new file mode 100644
index 0000000..65390ed
--- /dev/null
+++ b/media/libmedia/TEST_MAPPING
@@ -0,0 +1,6 @@
+// test_mapping for frameworks/av/media/libmedia
+{
+ "presubmit": [
+ { "name": "CodecListTest" }
+ ]
+}
diff --git a/media/libmedia/aidl/android/media/IResourceManagerService.aidl b/media/libmedia/aidl/android/media/IResourceManagerService.aidl
deleted file mode 100644
index 1b2d522..0000000
--- a/media/libmedia/aidl/android/media/IResourceManagerService.aidl
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
- * Copyright (c) 2019, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.IResourceManagerClient;
-import android.media.MediaResourceParcel;
-import android.media.MediaResourcePolicyParcel;
-
-/**
- * ResourceManagerService interface that keeps track of media resource
- * owned by clients, and reclaims resources based on configured policies
- * when necessary.
- *
- * {@hide}
- */
-interface IResourceManagerService {
- const @utf8InCpp String kPolicySupportsMultipleSecureCodecs
- = "supports-multiple-secure-codecs";
- const @utf8InCpp String kPolicySupportsSecureWithNonSecureCodec
- = "supports-secure-with-non-secure-codec";
-
- /**
- * Configure the ResourceManagerService to adopted particular policies when
- * managing the resources.
- *
- * @param policies an array of policies to be adopted.
- */
- void config(in MediaResourcePolicyParcel[] policies);
-
- /**
- * Add a client to a process with a list of resources.
- *
- * @param pid pid of the client.
- * @param uid uid of the client.
- * @param clientId an identifier that uniquely identifies the client within the pid.
- * @param client interface for the ResourceManagerService to call the client.
- * @param resources an array of resources to be added.
- */
- void addResource(
- int pid,
- int uid,
- long clientId,
- IResourceManagerClient client,
- in MediaResourceParcel[] resources);
-
- /**
- * Remove the listed resources from a client.
- *
- * @param pid pid from which the list of resources will be removed.
- * @param clientId clientId within the pid from which the list of resources will be removed.
- * @param resources an array of resources to be removed from the client.
- */
- void removeResource(int pid, long clientId, in MediaResourceParcel[] resources);
-
- /**
- * Remove all resources from a client.
- *
- * @param pid pid from which the client's resources will be removed.
- * @param clientId clientId within the pid that will be removed.
- */
- void removeClient(int pid, long clientId);
-
- /**
- * Tries to reclaim resource from processes with lower priority than the
- * calling process according to the requested resources.
- *
- * @param callingPid pid of the calling process.
- * @param resources an array of resources to be reclaimed.
- *
- * @return true if the reclaim was successful and false otherwise.
- */
- boolean reclaimResource(int callingPid, in MediaResourceParcel[] resources);
-
- /**
- * Override the pid of original calling process with the pid of the process
- * who actually use the requested resources.
- *
- * @param originalPid pid of the original calling process.
- * @param newPid pid of the actual process who use the resources.
- * remove existing override on originalPid if newPid is -1.
- */
- void overridePid(int originalPid, int newPid);
-
- /**
- * Mark a client for pending removal
- *
- * @param pid pid from which the client's resources will be removed.
- * @param clientId clientId within the pid that will be removed.
- */
- void markClientForPendingRemoval(int pid, long clientId);
-}
diff --git a/media/libmedia/include/android/IMediaExtractor.h b/media/libmedia/include/android/IMediaExtractor.h
index 3e035ad..f9cafde 100644
--- a/media/libmedia/include/android/IMediaExtractor.h
+++ b/media/libmedia/include/android/IMediaExtractor.h
@@ -63,6 +63,15 @@
virtual status_t setMediaCas(const HInterfaceToken &casToken) = 0;
virtual String8 name() = 0;
+
+ enum class EntryPoint {
+ SDK = 1,
+ NDK_WITH_JVM = 2,
+ NDK_NO_JVM = 3,
+ OTHER = 4,
+ };
+
+ virtual status_t setEntryPoint(EntryPoint entryPoint) = 0;
};
diff --git a/media/libmedia/include/media/IMediaPlayerService.h b/media/libmedia/include/media/IMediaPlayerService.h
index f2e2060..a4207eb 100644
--- a/media/libmedia/include/media/IMediaPlayerService.h
+++ b/media/libmedia/include/media/IMediaPlayerService.h
@@ -28,6 +28,8 @@
#include <media/IMediaPlayerClient.h>
#include <media/IMediaMetadataRetriever.h>
+#include <string>
+
namespace android {
class IMediaPlayer;
@@ -47,7 +49,8 @@
virtual sp<IMediaRecorder> createMediaRecorder(const String16 &opPackageName) = 0;
virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0;
virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client,
- audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE) = 0;
+ audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE,
+ const std::string opPackage = "") = 0;
virtual sp<IMediaCodecList> getCodecList() const = 0;
// Connects to a remote display.
diff --git a/media/libmedia/include/media/MediaResource.h b/media/libmedia/include/media/MediaResource.h
index 4927d28..4712528 100644
--- a/media/libmedia/include/media/MediaResource.h
+++ b/media/libmedia/include/media/MediaResource.h
@@ -37,7 +37,7 @@
MediaResource(Type type, SubType subType, int64_t value);
MediaResource(Type type, const std::vector<uint8_t> &id, int64_t value);
- static MediaResource CodecResource(bool secure, bool video);
+ static MediaResource CodecResource(bool secure, bool video, int64_t instanceCount = 1);
static MediaResource GraphicMemoryResource(int64_t value);
static MediaResource CpuBoostResource();
static MediaResource VideoBatteryResource();
diff --git a/media/libmedia/include/media/mediametadataretriever.h b/media/libmedia/include/media/mediametadataretriever.h
index 138a014..fba1a30 100644
--- a/media/libmedia/include/media/mediametadataretriever.h
+++ b/media/libmedia/include/media/mediametadataretriever.h
@@ -73,6 +73,9 @@
METADATA_KEY_COLOR_RANGE = 37,
METADATA_KEY_SAMPLERATE = 38,
METADATA_KEY_BITS_PER_SAMPLE = 39,
+ METADATA_KEY_VIDEO_CODEC_MIME_TYPE = 40,
+ METADATA_KEY_XMP_OFFSET = 41,
+ METADATA_KEY_XMP_LENGTH = 42,
// Add more here...
};
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index d0a8e38..71c0bc5 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -33,6 +33,8 @@
#include <utils/KeyedVector.h>
#include <utils/String8.h>
+#include <string>
+
struct ANativeWindow;
namespace android {
@@ -178,7 +180,10 @@
KEY_PARAMETER_PLAYBACK_RATE_PERMILLE = 1300, // set only
// Set a Parcel containing the value of a parcelled Java AudioAttribute instance
- KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400 // set only
+ KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400, // set only
+
+ // Set a Parcel containing the values of RTP attribute
+ KEY_PARAMETER_RTP_ATTRIBUTES = 2000 // set only
};
// Keep INVOKE_ID_* in sync with MediaPlayer.java.
@@ -206,7 +211,7 @@
public virtual IMediaDeathNotifier
{
public:
- MediaPlayer();
+ MediaPlayer(const std::string opPackageName = "");
~MediaPlayer();
void died();
void disconnect();
@@ -310,6 +315,7 @@
float mSendLevel;
struct sockaddr_in mRetransmitEndpoint;
bool mRetransmitEndpointValid;
+ const std::string mOpPackageName;
};
}; // namespace android
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 1b89fc7..30c5006 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -41,7 +41,7 @@
using media::VolumeShaper;
-MediaPlayer::MediaPlayer()
+MediaPlayer::MediaPlayer(const std::string opPackageName) : mOpPackageName(opPackageName)
{
ALOGV("constructor");
mListener = NULL;
@@ -152,7 +152,7 @@
if (url != NULL) {
const sp<IMediaPlayerService> service(getMediaPlayerService());
if (service != 0) {
- sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+ sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
(NO_ERROR != player->setDataSource(httpService, url, headers))) {
player.clear();
@@ -169,7 +169,7 @@
status_t err = UNKNOWN_ERROR;
const sp<IMediaPlayerService> service(getMediaPlayerService());
if (service != 0) {
- sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+ sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
(NO_ERROR != player->setDataSource(fd, offset, length))) {
player.clear();
@@ -185,7 +185,7 @@
status_t err = UNKNOWN_ERROR;
const sp<IMediaPlayerService> service(getMediaPlayerService());
if (service != 0) {
- sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+ sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
(NO_ERROR != player->setDataSource(source))) {
player.clear();
@@ -201,7 +201,7 @@
status_t err = UNKNOWN_ERROR;
const sp<IMediaPlayerService> service(getMediaPlayerService());
if (service != 0) {
- sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+ sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mOpPackageName));
if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
(NO_ERROR != player->setDataSource(rtpParams))) {
player.clear();
diff --git a/media/libmedia/tests/codeclist/Android.bp b/media/libmedia/tests/codeclist/Android.bp
new file mode 100644
index 0000000..a930d6e
--- /dev/null
+++ b/media/libmedia/tests/codeclist/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+ name: "CodecListTest",
+ test_suites: ["device-tests"],
+ gtest: true,
+
+ srcs: [
+ "CodecListTest.cpp",
+ ],
+
+ shared_libs: [
+ "libbinder",
+ "liblog",
+ "libmedia_codeclist",
+ "libstagefright",
+ "libstagefright_foundation",
+ "libstagefright_xmlparser",
+ "libutils",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/libmedia/tests/codeclist/CodecListTest.cpp b/media/libmedia/tests/codeclist/CodecListTest.cpp
new file mode 100644
index 0000000..bd2adf7
--- /dev/null
+++ b/media/libmedia/tests/codeclist/CodecListTest.cpp
@@ -0,0 +1,222 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecListTest"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <binder/Parcel.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+
+#define kSwCodecXmlPath "/apex/com.android.media.swcodec/etc/"
+
+using namespace android;
+
+struct CddReq {
+ CddReq(const char *type, bool encoder) {
+ mediaType = type;
+ isEncoder = encoder;
+ }
+
+ const char *mediaType;
+ bool isEncoder;
+};
+
+TEST(CodecListTest, CodecListSanityTest) {
+ sp<IMediaCodecList> list = MediaCodecList::getInstance();
+ ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance.";
+ EXPECT_GT(list->countCodecs(), 0) << "No codecs in CodecList";
+ for (size_t i = 0; i < list->countCodecs(); ++i) {
+ sp<MediaCodecInfo> info = list->getCodecInfo(i);
+ ASSERT_NE(info, nullptr) << "CodecInfo is null";
+ ssize_t index = list->findCodecByName(info->getCodecName());
+ EXPECT_GE(index, 0) << "Wasn't able to find existing codec: " << info->getCodecName();
+ }
+}
+
+TEST(CodecListTest, CodecListByTypeTest) {
+ sp<IMediaCodecList> list = MediaCodecList::getInstance();
+ ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance.";
+
+ std::vector<CddReq> cddReq{
+ // media type, isEncoder
+ CddReq(MIMETYPE_AUDIO_AAC, false),
+ CddReq(MIMETYPE_AUDIO_AAC, true),
+
+ CddReq(MIMETYPE_VIDEO_AVC, false),
+ CddReq(MIMETYPE_VIDEO_HEVC, false),
+ CddReq(MIMETYPE_VIDEO_MPEG4, false),
+ CddReq(MIMETYPE_VIDEO_VP8, false),
+ CddReq(MIMETYPE_VIDEO_VP9, false),
+
+ CddReq(MIMETYPE_VIDEO_AVC, true),
+ CddReq(MIMETYPE_VIDEO_VP8, true),
+ };
+
+ for (CddReq codecReq : cddReq) {
+ ssize_t index = list->findCodecByType(codecReq.mediaType, codecReq.isEncoder);
+ EXPECT_GE(index, 0) << "Wasn't able to find codec for media type: " << codecReq.mediaType
+ << (codecReq.isEncoder ? " encoder" : " decoder");
+ }
+}
+
+TEST(CodecInfoTest, ListInfoTest) {
+ ALOGV("Compare CodecInfo with info in XML");
+ MediaCodecsXmlParser parser;
+ status_t status = parser.parseXmlFilesInSearchDirs();
+ ASSERT_EQ(status, OK) << "XML Parsing failed for default paths";
+
+ const std::vector<std::string> &xmlFiles = MediaCodecsXmlParser::getDefaultXmlNames();
+ const std::vector<std::string> &searchDirsApex{std::string(kSwCodecXmlPath)};
+ status = parser.parseXmlFilesInSearchDirs(xmlFiles, searchDirsApex);
+ ASSERT_EQ(status, OK) << "XML Parsing of " << kSwCodecXmlPath << " failed";
+
+ MediaCodecsXmlParser::CodecMap codecMap = parser.getCodecMap();
+
+ sp<IMediaCodecList> list = MediaCodecList::getInstance();
+ ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance";
+
+ // Compare CodecMap from XML to CodecList
+ for (auto mapIter : codecMap) {
+ ssize_t index = list->findCodecByName(mapIter.first.c_str());
+ if (index < 0) {
+ std::cout << "[ WARN ] " << mapIter.first << " not found in CodecList \n";
+ continue;
+ }
+
+ sp<MediaCodecInfo> info = list->getCodecInfo(index);
+ ASSERT_NE(info, nullptr) << "CodecInfo is null";
+
+ MediaCodecsXmlParser::CodecProperties codecProperties = mapIter.second;
+ ASSERT_EQ(codecProperties.isEncoder, info->isEncoder()) << "Encoder property mismatch";
+
+ ALOGV("codec name: %s", info->getCodecName());
+ ALOGV("codec rank: %d", info->getRank());
+ ALOGV("codec ownername: %s", info->getOwnerName());
+ ALOGV("codec isEncoder: %d", info->isEncoder());
+
+ ALOGV("attributeFlags: kFlagIsHardwareAccelerated, kFlagIsSoftwareOnly, kFlagIsVendor, "
+ "kFlagIsEncoder");
+ std::bitset<4> attr(info->getAttributes());
+ ALOGV("codec attributes: %s", attr.to_string().c_str());
+
+ Vector<AString> mediaTypes;
+ info->getSupportedMediaTypes(&mediaTypes);
+ ALOGV("supported media types count: %zu", mediaTypes.size());
+ ASSERT_FALSE(mediaTypes.isEmpty())
+ << "no media type supported by codec: " << info->getCodecName();
+
+ MediaCodecsXmlParser::TypeMap typeMap = codecProperties.typeMap;
+ for (auto mediaType : mediaTypes) {
+ ALOGV("codec mediaTypes: %s", mediaType.c_str());
+ auto searchTypeMap = typeMap.find(mediaType.c_str());
+ ASSERT_NE(searchTypeMap, typeMap.end())
+ << "CodecList doesn't contain codec media type: " << mediaType.c_str();
+ MediaCodecsXmlParser::AttributeMap attributeMap = searchTypeMap->second;
+
+ const sp<MediaCodecInfo::Capabilities> &capabilities =
+ info->getCapabilitiesFor(mediaType.c_str());
+
+ Vector<uint32_t> colorFormats;
+ capabilities->getSupportedColorFormats(&colorFormats);
+ for (auto colorFormat : colorFormats) {
+ ALOGV("supported color formats: %d", colorFormat);
+ }
+
+ Vector<MediaCodecInfo::ProfileLevel> profileLevels;
+ capabilities->getSupportedProfileLevels(&profileLevels);
+ if (!profileLevels.empty()) {
+ ALOGV("supported profilelevel for media type: %s", mediaType.c_str());
+ }
+ for (auto profileLevel : profileLevels) {
+ ALOGV("profile: %d, level: %d", profileLevel.mProfile, profileLevel.mLevel);
+ }
+
+ sp<AMessage> details = capabilities->getDetails();
+ ASSERT_NE(details, nullptr) << "Details in codec capabilities is null";
+ ALOGV("no. of entries in details: %zu", details->countEntries());
+
+ for (size_t idxDetail = 0; idxDetail < details->countEntries(); idxDetail++) {
+ AMessage::Type type;
+ const char *name = details->getEntryNameAt(idxDetail, &type);
+ ALOGV("details entry name: %s", name);
+ AMessage::ItemData itemData = details->getEntryAt(idxDetail);
+ switch (type) {
+ case AMessage::kTypeInt32:
+ int32_t val32;
+ if (itemData.find(&val32)) {
+ ALOGV("entry int val: %d", val32);
+ auto searchAttr = attributeMap.find(name);
+ if (searchAttr == attributeMap.end()) {
+ ALOGW("Parser doesn't have key: %s", name);
+ } else if (stoi(searchAttr->second) != val32) {
+ ALOGW("Values didn't match for key: %s", name);
+ ALOGV("Values act/exp: %d / %d", val32, stoi(searchAttr->second));
+ }
+ }
+ break;
+ case AMessage::kTypeString:
+ if (AString valStr; itemData.find(&valStr)) {
+ ALOGV("entry str val: %s", valStr.c_str());
+ auto searchAttr = attributeMap.find(name);
+ if (searchAttr == attributeMap.end()) {
+ ALOGW("Parser doesn't have key: %s", name);
+ } else if (searchAttr->second != valStr.c_str()) {
+ ALOGW("Values didn't match for key: %s", name);
+ ALOGV("Values act/exp: %s / %s", valStr.c_str(),
+ searchAttr->second.c_str());
+ }
+ }
+ break;
+ default:
+ ALOGV("data type: %d shouldn't be present in details", type);
+ break;
+ }
+ }
+ }
+
+ Parcel *codecInfoParcel = new Parcel();
+ ASSERT_NE(codecInfoParcel, nullptr) << "Unable to create parcel";
+
+ status_t status = info->writeToParcel(codecInfoParcel);
+ ASSERT_EQ(status, OK) << "Writing to parcel failed";
+
+ codecInfoParcel->setDataPosition(0);
+ sp<MediaCodecInfo> parcelCodecInfo = info->FromParcel(*codecInfoParcel);
+ ASSERT_NE(parcelCodecInfo, nullptr) << "CodecInfo from parcel is null";
+ delete codecInfoParcel;
+
+ EXPECT_STREQ(info->getCodecName(), parcelCodecInfo->getCodecName())
+ << "Returned codec name in info doesn't match";
+ EXPECT_EQ(info->getRank(), parcelCodecInfo->getRank())
+ << "Returned component rank in info doesn't match";
+ }
+}
+
+TEST(CodecListTest, CodecListGlobalSettingsTest) {
+ sp<IMediaCodecList> list = MediaCodecList::getInstance();
+ ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance";
+
+ sp<AMessage> globalSettings = list->getGlobalSettings();
+ ASSERT_NE(globalSettings, nullptr) << "GlobalSettings AMessage is null";
+ ALOGV("global settings: %s", globalSettings->debugString(0).c_str());
+}
diff --git a/media/libmedia/xsd/vts/Android.mk b/media/libmedia/xsd/vts/Android.mk
deleted file mode 100644
index 52c3779..0000000
--- a/media/libmedia/xsd/vts/Android.mk
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := VtsValidateMediaProfiles
-include test/vts/tools/build/Android.host_config.mk
diff --git a/media/libmedia/xsd/vts/AndroidTest.xml b/media/libmedia/xsd/vts/AndroidTest.xml
deleted file mode 100644
index e68721b..0000000
--- a/media/libmedia/xsd/vts/AndroidTest.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2019 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<configuration description="Config for VTS VtsValidateMediaProfiles.">
- <option name="config-descriptor:metadata" key="plan" value="vts-treble" />
- <target_preparer class="com.android.compatibility.common.tradefed.targetprep.VtsFilePusher">
- <option name="abort-on-push-failure" value="false"/>
- <option name="push-group" value="HostDrivenTest.push"/>
- <option name="push" value="DATA/etc/media_profiles.xsd->/data/local/tmp/media_profiles.xsd"/>
- </target_preparer>
- <test class="com.android.tradefed.testtype.VtsMultiDeviceTest">
- <option name="test-module-name" value="VtsValidateMediaProfiles"/>
- <option name="binary-test-source" value="_32bit::DATA/nativetest/vts_mediaProfiles_validate_test/vts_mediaProfiles_validate_test" />
- <option name="binary-test-source" value="_64bit::DATA/nativetest64/vts_mediaProfiles_validate_test/vts_mediaProfiles_validate_test" />
- <option name="binary-test-type" value="gtest"/>
- <option name="test-timeout" value="30s"/>
- </test>
-</configuration>
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index ae135af..849debf 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -1,6 +1,7 @@
cc_library_headers {
name: "libmedia_helper_headers",
vendor_available: true,
+ min_sdk_version: "29",
export_include_dirs: ["include"],
host_supported: true,
target: {
@@ -8,6 +9,12 @@
enabled: false,
},
},
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.bluetooth.updatable",
+ "com.android.media",
+ "com.android.media.swcodec",
+ ],
}
cc_library {
@@ -17,7 +24,11 @@
enabled: true,
},
double_loadable: true,
- srcs: ["AudioParameter.cpp", "TypeConverter.cpp"],
+ srcs: [
+ "AudioParameter.cpp",
+ "AudioValidator.cpp",
+ "TypeConverter.cpp",
+ ],
cflags: [
"-Werror",
"-Wextra",
diff --git a/media/libmediahelper/AudioValidator.cpp b/media/libmediahelper/AudioValidator.cpp
new file mode 100644
index 0000000..e2fd8ae
--- /dev/null
+++ b/media/libmediahelper/AudioValidator.cpp
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/AudioValidator.h>
+
+namespace android {
+
+/** returns true if string is overflow */
+template <size_t size>
+bool checkStringOverflow(const char (&s)[size]) {
+ return strnlen(s, size) >= size;
+}
+
+status_t safetyNetLog(status_t status, std::string_view bugNumber) {
+ if (status != NO_ERROR && !bugNumber.empty()) {
+ android_errorWriteLog(0x534e4554, bugNumber.data()); // SafetyNet logging
+ }
+ return status;
+}
+
+status_t AudioValidator::validateAudioAttributes(
+ const audio_attributes_t& attr, std::string_view bugNumber)
+{
+ status_t status = NO_ERROR;
+ const size_t tagsMaxSize = AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
+ if (strnlen(attr.tags, tagsMaxSize) >= tagsMaxSize) {
+ status = BAD_VALUE;
+ }
+ return safetyNetLog(status, bugNumber);
+}
+
+status_t AudioValidator::validateEffectDescriptor(
+ const effect_descriptor_t& desc, std::string_view bugNumber)
+{
+ status_t status = NO_ERROR;
+ if (checkStringOverflow(desc.name)
+ | /* always */ checkStringOverflow(desc.implementor)) {
+ status = BAD_VALUE;
+ }
+ return safetyNetLog(status, bugNumber);
+}
+
+status_t AudioValidator::validateAudioPortConfig(
+ const struct audio_port_config& config, std::string_view bugNumber)
+{
+ status_t status = NO_ERROR;
+ if (config.type == AUDIO_PORT_TYPE_DEVICE &&
+ checkStringOverflow(config.ext.device.address)) {
+ status = BAD_VALUE;
+ }
+ return safetyNetLog(status, bugNumber);
+}
+
+namespace {
+
+template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
+ || std::is_same<T, struct audio_port_v7>::value, int> = 0>
+static status_t validateAudioPortInternal(const T& port, std::string_view bugNumber = {}) {
+ status_t status = NO_ERROR;
+ if (checkStringOverflow(port.name)) {
+ status = BAD_VALUE;
+ }
+ if (AudioValidator::validateAudioPortConfig(port.active_config) != NO_ERROR) {
+ status = BAD_VALUE;
+ }
+ if (port.type == AUDIO_PORT_TYPE_DEVICE &&
+ checkStringOverflow(port.ext.device.address)) {
+ status = BAD_VALUE;
+ }
+ return safetyNetLog(status, bugNumber);
+}
+
+} // namespace
+
+status_t AudioValidator::validateAudioPort(
+ const struct audio_port& port, std::string_view bugNumber)
+{
+ return validateAudioPortInternal(port, bugNumber);
+}
+
+status_t AudioValidator::validateAudioPort(
+ const struct audio_port_v7& port, std::string_view bugNumber)
+{
+ return validateAudioPortInternal(port, bugNumber);
+}
+
+/** returns BAD_VALUE if sanitization was required. */
+status_t AudioValidator::validateAudioPatch(
+ const struct audio_patch& patch, std::string_view bugNumber)
+{
+ status_t status = NO_ERROR;
+ if (patch.num_sources > AUDIO_PATCH_PORTS_MAX) {
+ status = BAD_VALUE;
+ }
+ if (patch.num_sinks > AUDIO_PATCH_PORTS_MAX) {
+ status = BAD_VALUE;
+ }
+ for (size_t i = 0; i < patch.num_sources; i++) {
+ if (validateAudioPortConfig(patch.sources[i]) != NO_ERROR) {
+ status = BAD_VALUE;
+ }
+ }
+ for (size_t i = 0; i < patch.num_sinks; i++) {
+ if (validateAudioPortConfig(patch.sinks[i]) != NO_ERROR) {
+ status = BAD_VALUE;
+ }
+ }
+ return safetyNetLog(status, bugNumber);
+}
+
+}; // namespace android
diff --git a/media/libmediahelper/TEST_MAPPING b/media/libmediahelper/TEST_MAPPING
new file mode 100644
index 0000000..f9594bd
--- /dev/null
+++ b/media/libmediahelper/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+ "presubmit": [
+ {
+ "name": "libmedia_helper_tests"
+ }
+ ]
+}
diff --git a/media/libmediahelper/TypeConverter.cpp b/media/libmediahelper/TypeConverter.cpp
index 6382ce4..d3a517f 100644
--- a/media/libmediahelper/TypeConverter.cpp
+++ b/media/libmediahelper/TypeConverter.cpp
@@ -18,307 +18,9 @@
namespace android {
-#define MAKE_STRING_FROM_ENUM(string) { #string, string }
+#define MAKE_STRING_FROM_ENUM(enumval) { #enumval, enumval }
#define TERMINATOR { .literal = nullptr }
-template <>
-const OutputDeviceConverter::Table OutputDeviceConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_NONE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_EARPIECE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER_SAFE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADPHONE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_SCO),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_A2DP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_DIGITAL),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_ACCESSORY),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_DEVICE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_USB),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_REMOTE_SUBMIX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_TELEPHONY_TX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_LINE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI_ARC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPDIF),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_FM),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_LINE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_IP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BUS),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_PROXY),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HEARING_AID),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ECHO_CANCELLER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DEFAULT),
- // STUB must be after DEFAULT, so the latter is picked up by toString first.
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_STUB),
- TERMINATOR
-};
-
-template <>
-const InputDeviceConverter::Table InputDeviceConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_NONE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_COMMUNICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AMBIENT),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUILTIN_MIC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ALL_SCO),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_WIRED_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AUX_DIGITAL),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_HDMI),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_HDMI_ARC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TELEPHONY_RX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_VOICE_CALL),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BACK_MIC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_REMOTE_SUBMIX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_ACCESSORY),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_DEVICE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ALL_USB),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_FM_TUNER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TV_TUNER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LINE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_SPDIF),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_A2DP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LOOPBACK),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_IP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUS),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_PROXY),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_BLE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ECHO_REFERENCE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DEFAULT),
- // STUB must be after DEFAULT, so the latter is picked up by toString first.
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_STUB),
- TERMINATOR
-};
-
-
-template <>
-const OutputFlagConverter::Table OutputFlagConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NONE),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DIRECT),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_PRIMARY),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_FAST),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DEEP_BUFFER),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NON_BLOCKING),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_HW_AV_SYNC),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_TTS),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_RAW),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_SYNC),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DIRECT_PCM),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_VOIP_RX),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_INCALL_MUSIC),
- TERMINATOR
-};
-
-
-template <>
-const InputFlagConverter::Table InputFlagConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_NONE),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_FAST),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_HW_HOTWORD),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_RAW),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_SYNC),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_MMAP_NOIRQ),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_VOIP_TX),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_HW_AV_SYNC),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_DIRECT),
- TERMINATOR
-};
-
-
-template <>
-const FormatConverter::Table FormatConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_16_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_32_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_24_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_FLOAT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_24_BIT_PACKED),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MP3),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_NB),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_WB),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_MAIN),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SSR),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LTP),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SCALABLE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ERLC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ELD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_XHE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_MAIN),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_SSR),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LTP),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_HE_V1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_SCALABLE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_ERLC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_HE_V2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_ELD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_XHE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_VORBIS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_OPUS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AC3),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_E_AC3),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS_HD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_IEC61937),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DOLBY_TRUEHD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCB),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCWB),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCNW),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADIF),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_WMA),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_WMA_PRO),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_WB_PLUS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MP2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_QCELP),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DSD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_FLAC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_ALAC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_SBC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_HD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AC4),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LDAC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_E_AC3_JOC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_1_0),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_2_0),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_2_1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_LC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_HE_V1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_HE_V2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_CELT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_ADAPTIVE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC_LL),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_TWSP),
- TERMINATOR
-};
-
-
-template <>
-const OutputChannelConverter::Table OutputChannelConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT0POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_TRI),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_TRI_BACK),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT0POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD_BACK),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD_SIDE),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_SURROUND),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_PENTA),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1_BACK),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1_SIDE),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1POINT4),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_6POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1POINT4),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_HAPTIC_A),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO_HAPTIC_A),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_HAPTIC_AB),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB),
- TERMINATOR
-};
-
-
-template <>
-const InputChannelConverter::Table InputChannelConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_MONO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_STEREO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_FRONT_BACK),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_6),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_2POINT0POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_2POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_3POINT0POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_3POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_5POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_CALL_MONO),
- TERMINATOR
-};
-
-template <>
-const ChannelIndexConverter::Table ChannelIndexConverter::mTable[] = {
- {"AUDIO_CHANNEL_INDEX_MASK_1", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_1)},
- {"AUDIO_CHANNEL_INDEX_MASK_2", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_2)},
- {"AUDIO_CHANNEL_INDEX_MASK_3", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_3)},
- {"AUDIO_CHANNEL_INDEX_MASK_4", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_4)},
- {"AUDIO_CHANNEL_INDEX_MASK_5", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_5)},
- {"AUDIO_CHANNEL_INDEX_MASK_6", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_6)},
- {"AUDIO_CHANNEL_INDEX_MASK_7", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_7)},
- {"AUDIO_CHANNEL_INDEX_MASK_8", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_8)},
- TERMINATOR
-};
-
-
-template <>
-const GainModeConverter::Table GainModeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_JOINT),
- MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_CHANNELS),
- MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_RAMP),
- TERMINATOR
-};
-
-
-template <>
-const StreamTypeConverter::Table StreamTypeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DEFAULT),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_VOICE_CALL),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_SYSTEM),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_RING),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_MUSIC),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ALARM),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_NOTIFICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_BLUETOOTH_SCO ),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ENFORCED_AUDIBLE),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DTMF),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_TTS),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ACCESSIBILITY),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ASSISTANT),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_REROUTING),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_PATCH),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_CALL_ASSISTANT),
- TERMINATOR
-};
-
template<>
const AudioModeConverter::Table AudioModeConverter::mTable[] = {
MAKE_STRING_FROM_ENUM(AUDIO_MODE_INVALID),
@@ -331,62 +33,6 @@
TERMINATOR
};
-template<>
-const AudioContentTypeConverter::Table AudioContentTypeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_UNKNOWN),
- MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_SPEECH),
- MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_MUSIC),
- MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_MOVIE),
- MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_SONIFICATION),
- TERMINATOR
-};
-
-template <>
-const UsageTypeConverter::Table UsageTypeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_UNKNOWN),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_MEDIA),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VOICE_COMMUNICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ALARM),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_EVENT),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_SONIFICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_GAME),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VIRTUAL_SOURCE),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANT),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_CALL_ASSISTANT),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_EMERGENCY),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_SAFETY),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VEHICLE_STATUS),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ANNOUNCEMENT),
- TERMINATOR
-};
-
-template <>
-const SourceTypeConverter::Table SourceTypeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_DEFAULT),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_MIC),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_UPLINK),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_DOWNLINK),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_CALL),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_CAMCORDER),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_RECOGNITION),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_COMMUNICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_REMOTE_SUBMIX),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_UNPROCESSED),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_PERFORMANCE),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_ECHO_REFERENCE),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_FM_TUNER),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_HOTWORD),
- TERMINATOR
-};
-
template <>
const AudioFlagConverter::Table AudioFlagConverter::mTable[] = {
MAKE_STRING_FROM_ENUM(AUDIO_FLAG_NONE),
@@ -409,6 +55,7 @@
template class TypeConverter<OutputDeviceTraits>;
template class TypeConverter<InputDeviceTraits>;
+template class TypeConverter<DeviceTraits>;
template class TypeConverter<OutputFlagTraits>;
template class TypeConverter<InputFlagTraits>;
template class TypeConverter<FormatTraits>;
@@ -422,11 +69,6 @@
template class TypeConverter<SourceTraits>;
template class TypeConverter<AudioFlagTraits>;
-bool deviceFromString(const std::string& literalDevice, audio_devices_t& device) {
- return InputDeviceConverter::fromString(literalDevice, device) ||
- OutputDeviceConverter::fromString(literalDevice, device);
-}
-
SampleRateTraits::Collection samplingRatesFromString(
const std::string &samplingRates, const char *del)
{
@@ -446,21 +88,20 @@
audio_format_t formatFromString(const std::string &literalFormat, audio_format_t defaultFormat)
{
audio_format_t format;
- if (literalFormat.empty()) {
- return defaultFormat;
+ if (!literalFormat.empty() && FormatConverter::fromString(literalFormat, format)) {
+ return format;
}
- FormatConverter::fromString(literalFormat, format);
- return format;
+ return defaultFormat;
}
audio_channel_mask_t channelMaskFromString(const std::string &literalChannels)
{
audio_channel_mask_t channels;
- if (!OutputChannelConverter::fromString(literalChannels, channels) &&
- !InputChannelConverter::fromString(literalChannels, channels)) {
- return AUDIO_CHANNEL_INVALID;
+ if (!literalChannels.empty() &&
+ audio_channel_mask_from_string(literalChannels.c_str(), &channels)) {
+ return channels;
}
- return channels;
+ return AUDIO_CHANNEL_INVALID;
}
ChannelTraits::Collection channelMasksFromString(
diff --git a/media/libmediahelper/include/media/AudioValidator.h b/media/libmediahelper/include/media/AudioValidator.h
new file mode 100644
index 0000000..008868e
--- /dev/null
+++ b/media/libmediahelper/include/media/AudioValidator.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_VALIDATOR_H_
+#define ANDROID_AUDIO_VALIDATOR_H_
+
+#include <system/audio.h>
+#include <system/audio_effect.h>
+#include <utils/Errors.h>
+#include <utils/Log.h>
+
+#include <string_view>
+
+namespace android {
+
+/**
+ * AudioValidator is a class to validate audio data in binder call. NO_ERROR will be returned only
+ * when there is no error with the data.
+ */
+class AudioValidator {
+public:
+ /**
+ * Return NO_ERROR only when there is no error with the given audio attributes.
+ * Otherwise, return BAD_VALUE.
+ */
+ static status_t validateAudioAttributes(
+ const audio_attributes_t& attr, std::string_view bugNumber = {});
+
+ /**
+ * Return NO_ERROR only when there is no error with the given effect descriptor.
+ * Otherwise, return BAD_VALUE.
+ */
+ static status_t validateEffectDescriptor(
+ const effect_descriptor_t& desc, std::string_view bugNumber = {});
+
+ /**
+ * Return NO_ERROR only when there is no error with the given audio port config.
+ * Otherwise, return BAD_VALUE.
+ */
+ static status_t validateAudioPortConfig(
+ const struct audio_port_config& config, std::string_view bugNumber = {});
+
+ /**
+ * Return NO_ERROR only when there is no error with the given audio port.
+ * Otherwise, return BAD_VALUE.
+ */
+ static status_t validateAudioPort(
+ const struct audio_port& port, std::string_view bugNumber = {});
+
+ /**
+ * Return NO_ERROR only when there is no error with the given audio_port_v7.
+ * Otherwise, return BAD_VALUE.
+ */
+ static status_t validateAudioPort(
+ const struct audio_port_v7& port, std::string_view ugNumber = {});
+
+ /**
+ * Return NO_ERROR only when there is no error with the given audio patch.
+ * Otherwise, return BAD_VALUE.
+ */
+ static status_t validateAudioPatch(
+ const struct audio_patch& patch, std::string_view bugNumber = {});
+};
+
+}; // namespace android
+
+#endif /*ANDROID_AUDIO_VALIDATOR_H_*/
diff --git a/media/libmediahelper/include/media/TypeConverter.h b/media/libmediahelper/include/media/TypeConverter.h
index 011498a..42ccb5f 100644
--- a/media/libmediahelper/include/media/TypeConverter.h
+++ b/media/libmediahelper/include/media/TypeConverter.h
@@ -24,8 +24,6 @@
#include <system/audio.h>
#include <utils/Log.h>
-#include <utils/Vector.h>
-#include <utils/SortedVector.h>
#include <media/AudioParameter.h>
#include "convert.h"
@@ -43,16 +41,6 @@
}
};
template <typename T>
-struct SortedVectorTraits
-{
- typedef T Type;
- typedef SortedVector<Type> Collection;
- static void add(Collection &collection, Type value)
- {
- collection.add(value);
- }
-};
-template <typename T>
struct SetTraits
{
typedef T Type;
@@ -108,13 +96,20 @@
typename Traits::Collection &collection,
const char *del = AudioParameter::valueListSeparator);
- static uint32_t maskFromString(
+ static typename Traits::Type maskFromString(
const std::string &str, const char *del = AudioParameter::valueListSeparator);
static void maskToString(
- uint32_t mask, std::string &str, const char *del = AudioParameter::valueListSeparator);
+ typename Traits::Type mask, std::string &str,
+ const char *del = AudioParameter::valueListSeparator);
protected:
+ // Default implementations use mTable for to/from string conversions
+ // of each individual enum value.
+ // These functions may be specialized to use external converters instead.
+ static bool toStringImpl(const typename Traits::Type &value, std::string &str);
+ static bool fromStringImpl(const std::string &str, typename Traits::Type &result);
+
struct Table {
const char *literal;
typename Traits::Type value;
@@ -124,26 +119,22 @@
};
template <class Traits>
-inline bool TypeConverter<Traits>::toString(const typename Traits::Type &value, std::string &str)
-{
+inline bool TypeConverter<Traits>::toStringImpl(
+ const typename Traits::Type &value, std::string &str) {
for (size_t i = 0; mTable[i].literal; i++) {
if (mTable[i].value == value) {
str = mTable[i].literal;
return true;
}
}
- char result[64];
- snprintf(result, sizeof(result), "Unknown enum value %d", value);
- str = result;
return false;
}
template <class Traits>
-inline bool TypeConverter<Traits>::fromString(const std::string &str, typename Traits::Type &result)
-{
+inline bool TypeConverter<Traits>::fromStringImpl(
+ const std::string &str, typename Traits::Type &result) {
for (size_t i = 0; mTable[i].literal; i++) {
if (strcmp(mTable[i].literal, str.c_str()) == 0) {
- ALOGV("stringToEnum() found %s", mTable[i].literal);
result = mTable[i].value;
return true;
}
@@ -152,6 +143,26 @@
}
template <class Traits>
+inline bool TypeConverter<Traits>::toString(const typename Traits::Type &value, std::string &str)
+{
+ const bool success = toStringImpl(value, str);
+ if (!success) {
+ char result[64];
+ snprintf(result, sizeof(result), "Unknown enum value %d", value);
+ str = result;
+ }
+ return success;
+}
+
+template <class Traits>
+inline bool TypeConverter<Traits>::fromString(const std::string &str, typename Traits::Type &result)
+{
+ const bool success = fromStringImpl(str, result);
+ ALOGV_IF(success, "stringToEnum() found %s", str.c_str());
+ return success;
+}
+
+template <class Traits>
inline void TypeConverter<Traits>::collectionFromString(const std::string &str,
typename Traits::Collection &collection,
const char *del)
@@ -168,7 +179,8 @@
}
template <class Traits>
-inline uint32_t TypeConverter<Traits>::maskFromString(const std::string &str, const char *del)
+inline typename Traits::Type TypeConverter<Traits>::maskFromString(
+ const std::string &str, const char *del)
{
char *literal = strdup(str.c_str());
uint32_t value = 0;
@@ -179,20 +191,24 @@
}
}
free(literal);
- return value;
+ return static_cast<typename Traits::Type>(value);
}
template <class Traits>
-inline void TypeConverter<Traits>::maskToString(uint32_t mask, std::string &str, const char *del)
+inline void TypeConverter<Traits>::maskToString(
+ typename Traits::Type mask, std::string &str, const char *del)
{
if (mask != 0) {
bool first_flag = true;
- for (size_t i = 0; mTable[i].literal; i++) {
- uint32_t value = static_cast<uint32_t>(mTable[i].value);
- if (mTable[i].value != 0 && ((mask & value) == value)) {
- if (!first_flag) str += del;
- first_flag = false;
- str += mTable[i].literal;
+ for (size_t bit = 0; bit < sizeof(uint32_t) * 8; ++bit) {
+ uint32_t flag = 1u << bit;
+ if ((flag & mask) == flag) {
+ std::string flag_str;
+ if (toString(static_cast<typename Traits::Type>(flag), flag_str)) {
+ if (!first_flag) str += del;
+ first_flag = false;
+ str += flag_str;
+ }
}
}
} else {
@@ -200,6 +216,7 @@
}
}
+typedef TypeConverter<DeviceTraits> DeviceConverter;
typedef TypeConverter<OutputDeviceTraits> OutputDeviceConverter;
typedef TypeConverter<InputDeviceTraits> InputDeviceConverter;
typedef TypeConverter<OutputFlagTraits> OutputFlagConverter;
@@ -216,23 +233,227 @@
typedef TypeConverter<SourceTraits> SourceTypeConverter;
typedef TypeConverter<AudioFlagTraits> AudioFlagConverter;
-template<> const OutputDeviceConverter::Table OutputDeviceConverter::mTable[];
-template<> const InputDeviceConverter::Table InputDeviceConverter::mTable[];
-template<> const OutputFlagConverter::Table OutputFlagConverter::mTable[];
-template<> const InputFlagConverter::Table InputFlagConverter::mTable[];
-template<> const FormatConverter::Table FormatConverter::mTable[];
-template<> const OutputChannelConverter::Table OutputChannelConverter::mTable[];
-template<> const InputChannelConverter::Table InputChannelConverter::mTable[];
-template<> const ChannelIndexConverter::Table ChannelIndexConverter::mTable[];
-template<> const GainModeConverter::Table GainModeConverter::mTable[];
-template<> const StreamTypeConverter::Table StreamTypeConverter::mTable[];
template<> const AudioModeConverter::Table AudioModeConverter::mTable[];
-template<> const AudioContentTypeConverter::Table AudioContentTypeConverter::mTable[];
-template<> const UsageTypeConverter::Table UsageTypeConverter::mTable[];
-template<> const SourceTypeConverter::Table SourceTypeConverter::mTable[];
template<> const AudioFlagConverter::Table AudioFlagConverter::mTable[];
-bool deviceFromString(const std::string& literalDevice, audio_devices_t& device);
+template <>
+inline bool TypeConverter<DeviceTraits>::toStringImpl(
+ const DeviceTraits::Type &value, std::string &str) {
+ str = audio_device_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<DeviceTraits>::fromStringImpl(
+ const std::string &str, DeviceTraits::Type &result) {
+ return audio_device_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputDeviceTraits>::toStringImpl(
+ const OutputDeviceTraits::Type &value, std::string &str) {
+ if (audio_is_output_device(value)) {
+ str = audio_device_to_string(value);
+ return !str.empty();
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<OutputDeviceTraits>::fromStringImpl(
+ const std::string &str, OutputDeviceTraits::Type &result) {
+ OutputDeviceTraits::Type temp;
+ if (audio_device_from_string(str.c_str(), &temp) &&
+ audio_is_output_device(temp)) {
+ result = temp;
+ return true;
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<InputDeviceTraits>::toStringImpl(
+ const InputDeviceTraits::Type &value, std::string &str) {
+ if (audio_is_input_device(value)) {
+ str = audio_device_to_string(value);
+ return !str.empty();
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<InputDeviceTraits>::fromStringImpl(
+ const std::string &str, InputDeviceTraits::Type &result) {
+ InputDeviceTraits::Type temp;
+ if (audio_device_from_string(str.c_str(), &temp) &&
+ audio_is_input_device(temp)) {
+ result = temp;
+ return true;
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<InputFlagTraits>::toStringImpl(
+ const audio_input_flags_t &value, std::string &str) {
+ str = audio_input_flag_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<InputFlagTraits>::fromStringImpl(
+ const std::string &str, audio_input_flags_t &result) {
+ return audio_input_flag_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputFlagTraits>::toStringImpl(
+ const audio_output_flags_t &value, std::string &str) {
+ str = audio_output_flag_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<OutputFlagTraits>::fromStringImpl(
+ const std::string &str, audio_output_flags_t &result) {
+ return audio_output_flag_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<FormatTraits>::toStringImpl(
+ const audio_format_t &value, std::string &str) {
+ str = audio_format_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<FormatTraits>::fromStringImpl(
+ const std::string &str, audio_format_t &result) {
+ return audio_format_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputChannelTraits>::toStringImpl(
+ const audio_channel_mask_t &value, std::string &str) {
+ str = audio_channel_out_mask_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<OutputChannelTraits>::fromStringImpl(
+ const std::string &str, audio_channel_mask_t &result) {
+ OutputChannelTraits::Type temp;
+ if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+ audio_is_output_channel(temp)) {
+ result = temp;
+ return true;
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<InputChannelTraits>::toStringImpl(
+ const audio_channel_mask_t &value, std::string &str) {
+ str = audio_channel_in_mask_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<InputChannelTraits>::fromStringImpl(
+ const std::string &str, audio_channel_mask_t &result) {
+ InputChannelTraits::Type temp;
+ if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+ audio_is_input_channel(temp)) {
+ result = temp;
+ return true;
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<ChannelIndexTraits>::toStringImpl(
+ const audio_channel_mask_t &value, std::string &str) {
+ str = audio_channel_index_mask_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<ChannelIndexTraits>::fromStringImpl(
+ const std::string &str, audio_channel_mask_t &result) {
+ ChannelIndexTraits::Type temp;
+ if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+ audio_channel_mask_get_representation(temp) == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+ result = temp;
+ return true;
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<StreamTraits>::toStringImpl(
+ const audio_stream_type_t &value, std::string &str) {
+ str = audio_stream_type_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<StreamTraits>::fromStringImpl(
+ const std::string &str, audio_stream_type_t &result)
+{
+ return audio_stream_type_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<GainModeTraits>::toStringImpl(
+ const audio_gain_mode_t &value, std::string &str) {
+ str = audio_gain_mode_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<GainModeTraits>::fromStringImpl(
+ const std::string &str, audio_gain_mode_t &result) {
+ return audio_gain_mode_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<AudioContentTraits>::toStringImpl(
+ const audio_content_type_t &value, std::string &str) {
+ str = audio_content_type_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<AudioContentTraits>::fromStringImpl(
+ const std::string &str, audio_content_type_t &result) {
+ return audio_content_type_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<UsageTraits>::toStringImpl(const audio_usage_t &value, std::string &str)
+{
+ str = audio_usage_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<UsageTraits>::fromStringImpl(
+ const std::string &str, audio_usage_t &result) {
+ return audio_usage_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<SourceTraits>::toStringImpl(const audio_source_t &value, std::string &str)
+{
+ str = audio_source_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<SourceTraits>::fromStringImpl(
+ const std::string &str, audio_source_t &result) {
+ return audio_source_from_string(str.c_str(), &result);
+}
SampleRateTraits::Collection samplingRatesFromString(
const std::string &samplingRates, const char *del = AudioParameter::valueListSeparator);
@@ -256,6 +477,7 @@
// counting enumerations
template <typename T, std::enable_if_t<std::is_same<T, audio_content_type_t>::value
+ || std::is_same<T, audio_devices_t>::value
|| std::is_same<T, audio_mode_t>::value
|| std::is_same<T, audio_source_t>::value
|| std::is_same<T, audio_stream_type_t>::value
@@ -282,17 +504,6 @@
return result;
}
-static inline std::string toString(const audio_devices_t& devices)
-{
- std::string result;
- if ((devices & AUDIO_DEVICE_BIT_IN) != 0) {
- InputDeviceConverter::maskToString(devices, result);
- } else {
- OutputDeviceConverter::maskToString(devices, result);
- }
- return result;
-}
-
static inline std::string toString(const audio_attributes_t& attributes)
{
std::ostringstream result;
diff --git a/media/libmediahelper/tests/Android.bp b/media/libmediahelper/tests/Android.bp
new file mode 100644
index 0000000..c5ba122
--- /dev/null
+++ b/media/libmediahelper/tests/Android.bp
@@ -0,0 +1,22 @@
+cc_test {
+ name: "libmedia_helper_tests",
+
+ generated_headers: ["audio_policy_configuration_V7_0"],
+ generated_sources: ["audio_policy_configuration_V7_0"],
+ header_libs: ["libxsdc-utils"],
+ shared_libs: [
+ "libbase",
+ "liblog",
+ "libmedia_helper",
+ "libxml2",
+ ],
+
+ srcs: ["typeconverter_tests.cpp"],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ test_suites: ["device-tests"],
+}
diff --git a/media/libmediahelper/tests/typeconverter_tests.cpp b/media/libmediahelper/tests/typeconverter_tests.cpp
new file mode 100644
index 0000000..d7bfb89
--- /dev/null
+++ b/media/libmediahelper/tests/typeconverter_tests.cpp
@@ -0,0 +1,231 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#define LOG_TAG "TypeConverter_Test"
+#include <log/log.h>
+
+#include <android_audio_policy_configuration_V7_0.h>
+#include <media/TypeConverter.h>
+#include <system/audio.h>
+#include <xsdc/XsdcSupport.h>
+
+using namespace android;
+namespace xsd {
+using namespace android::audio::policy::configuration::V7_0;
+}
+
+TEST(TypeConverter, ParseChannelMasks) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioChannelMask>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_channel_mask_t channelMask = channelMaskFromString(stringVal);
+ EXPECT_EQ(enumVal != xsd::AudioChannelMask::AUDIO_CHANNEL_NONE,
+ audio_channel_mask_is_valid(channelMask))
+ << "Validity of \"" << stringVal << "\" is not as expected";
+ }
+}
+
+TEST(TypeConverter, ParseInputOutputIndexChannelMask) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioChannelMask>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_channel_mask_t channelMask, channelMaskBack;
+ std::string stringValBack;
+ if (stringVal.find("_CHANNEL_IN_") != std::string::npos) {
+ EXPECT_TRUE(InputChannelConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" failed (as input channel mask)";
+ EXPECT_TRUE(InputChannelConverter::toString(channelMask, stringValBack))
+ << "Conversion of input channel mask " << channelMask << " failed";
+ // Due to aliased values, the result of 'toString' might not be the same
+ // as 'stringVal', thus we need to compare the results of parsing instead.
+ EXPECT_TRUE(InputChannelConverter::fromString(stringValBack, channelMaskBack))
+ << "Conversion of \"" << stringValBack << "\" failed (as input channel mask)";
+ EXPECT_EQ(channelMask, channelMaskBack);
+ } else if (stringVal.find("_CHANNEL_OUT_") != std::string::npos) {
+ EXPECT_TRUE(OutputChannelConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" failed (as output channel mask)";
+ EXPECT_TRUE(OutputChannelConverter::toString(channelMask, stringValBack))
+ << "Conversion of output channel mask " << channelMask << " failed";
+ EXPECT_TRUE(OutputChannelConverter::fromString(stringValBack, channelMaskBack))
+ << "Conversion of \"" << stringValBack << "\" failed (as output channel mask)";
+ EXPECT_EQ(channelMask, channelMaskBack);
+ } else if (stringVal.find("_CHANNEL_INDEX_") != std::string::npos) {
+ EXPECT_TRUE(ChannelIndexConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" failed (as indexed channel mask)";
+ EXPECT_TRUE(ChannelIndexConverter::toString(channelMask, stringValBack))
+ << "Conversion of indexed channel mask " << channelMask << " failed";
+ EXPECT_EQ(stringVal, stringValBack);
+ } else if (stringVal == toString(xsd::AudioChannelMask::AUDIO_CHANNEL_NONE)) {
+ EXPECT_FALSE(InputChannelConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" succeeded (as input channel mask)";
+ EXPECT_FALSE(OutputChannelConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" succeeded (as output channel mask)";
+ EXPECT_FALSE(ChannelIndexConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" succeeded (as index channel mask)";
+ // None of Converters could parse this because 'NONE' isn't a 'valid' channel mask.
+ channelMask = AUDIO_CHANNEL_NONE;
+ // However they all must succeed in converting it back.
+ EXPECT_TRUE(InputChannelConverter::toString(channelMask, stringValBack))
+ << "Conversion of input channel mask " << channelMask << " failed";
+ EXPECT_EQ(stringVal, stringValBack);
+ EXPECT_TRUE(OutputChannelConverter::toString(channelMask, stringValBack))
+ << "Conversion of output channel mask " << channelMask << " failed";
+ EXPECT_EQ(stringVal, stringValBack);
+ EXPECT_TRUE(ChannelIndexConverter::toString(channelMask, stringValBack))
+ << "Conversion of indexed channel mask " << channelMask << " failed";
+ EXPECT_EQ(stringVal, stringValBack);
+ } else {
+ FAIL() << "Unrecognized channel mask \"" << stringVal << "\"";
+ }
+ }
+}
+
+TEST(TypeConverter, ParseContentTypes) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioContentType>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_content_type_t contentType;
+ EXPECT_TRUE(AudioContentTypeConverter::fromString(stringVal, contentType))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_EQ(stringVal, toString(contentType));
+ }
+}
+
+TEST(TypeConverter, ParseDevices) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioDevice>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_devices_t device, deviceBack;
+ std::string stringValBack;
+ EXPECT_TRUE(DeviceConverter::fromString(stringVal, device))
+ << "Conversion of \"" << stringVal << "\" failed";
+ if (enumVal != xsd::AudioDevice::AUDIO_DEVICE_NONE) {
+ EXPECT_TRUE(audio_is_input_device(device) || audio_is_output_device(device))
+ << "Device \"" << stringVal << "\" is neither input, nor output device";
+ } else {
+ EXPECT_FALSE(audio_is_input_device(device));
+ EXPECT_FALSE(audio_is_output_device(device));
+ }
+ // Due to aliased values, the result of 'toString' might not be the same
+ // as 'stringVal', thus we need to compare the results of parsing instead.
+ stringValBack = toString(device);
+ EXPECT_TRUE(DeviceConverter::fromString(stringValBack, deviceBack))
+ << "Conversion of \"" << stringValBack << "\" failed";
+ EXPECT_EQ(device, deviceBack);
+ }
+}
+
+TEST(TypeConverter, ParseInOutDevices) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioDevice>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_devices_t device, deviceBack;
+ std::string stringValBack;
+ if (stringVal.find("_DEVICE_IN_") != std::string::npos) {
+ EXPECT_TRUE(InputDeviceConverter::fromString(stringVal, device))
+ << "Conversion of \"" << stringVal << "\" failed (as input device)";
+ // Due to aliased values, the result of 'toString' might not be the same
+ // as 'stringVal', thus we need to compare the results of parsing instead.
+ stringValBack = toString(device);
+ EXPECT_TRUE(InputDeviceConverter::fromString(stringValBack, deviceBack))
+ << "Conversion of \"" << stringValBack << "\" failed";
+ EXPECT_EQ(device, deviceBack);
+ } else if (stringVal.find("_DEVICE_OUT_") != std::string::npos) {
+ EXPECT_TRUE(OutputDeviceConverter::fromString(stringVal, device))
+ << "Conversion of \"" << stringVal << "\" failed (as output device)";
+ stringValBack = toString(device);
+ EXPECT_TRUE(OutputDeviceConverter::fromString(stringValBack, deviceBack))
+ << "Conversion of \"" << stringValBack << "\" failed";
+ EXPECT_EQ(device, deviceBack);
+ } else if (stringVal == toString(xsd::AudioDevice::AUDIO_DEVICE_NONE)) {
+ EXPECT_FALSE(InputDeviceConverter::fromString(stringVal, device))
+ << "Conversion of \"" << stringVal << "\" succeeded (as input device)";
+ EXPECT_FALSE(OutputDeviceConverter::fromString(stringVal, device))
+ << "Conversion of \"" << stringVal << "\" succeeded (as output device)";
+ EXPECT_EQ(stringVal, toString(device));
+ } else {
+ FAIL() << "Unrecognized audio device \"" << stringVal << "\"";
+ }
+ }
+}
+
+TEST(TypeConverter, ParseInOutFlags) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioInOutFlag>{}) {
+ const std::string stringVal = toString(enumVal);
+ if (stringVal.find("_INPUT_FLAG_") != std::string::npos) {
+ audio_input_flags_t flag;
+ EXPECT_TRUE(InputFlagConverter::fromString(stringVal, flag))
+ << "Conversion of \"" << stringVal << "\" failed (as input flag)";
+ EXPECT_EQ(stringVal, toString(flag));
+ } else {
+ audio_output_flags_t flag;
+ EXPECT_TRUE(OutputFlagConverter::fromString(stringVal, flag))
+ << "Conversion of \"" << stringVal << "\" failed (as output flag)";
+ EXPECT_EQ(stringVal, toString(flag));
+ }
+ }
+}
+
+TEST(TypeConverter, ParseFormats) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioFormat>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_format_t format;
+ EXPECT_TRUE(FormatConverter::fromString(stringVal, format))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_TRUE(audio_is_valid_format(format))
+ << "Converted format \"" << stringVal << "\" is invalid";
+ EXPECT_EQ(stringVal, toString(format));
+ }
+}
+
+TEST(TypeConverter, ParseGainModes) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioGainMode>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_gain_mode_t gainMode;
+ EXPECT_TRUE(GainModeConverter::fromString(stringVal, gainMode))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_EQ(stringVal, toString(gainMode));
+ }
+}
+
+TEST(TypeConverter, ParseSources) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioSource>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_source_t source;
+ EXPECT_TRUE(SourceTypeConverter::fromString(stringVal, source))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_EQ(source != AUDIO_SOURCE_DEFAULT, audio_is_valid_audio_source(source))
+ << "Validity of \"" << stringVal << "\" is not as expected";
+ EXPECT_EQ(stringVal, toString(source));
+ }
+}
+
+TEST(TypeConverter, ParseStreamTypes) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioStreamType>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_stream_type_t streamType;
+ EXPECT_TRUE(StreamTypeConverter::fromString(stringVal, streamType))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_EQ(stringVal, toString(streamType));
+ }
+}
+
+TEST(TypeConverter, ParseUsages) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioUsage>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_usage_t usage;
+ EXPECT_TRUE(UsageTypeConverter::fromString(stringVal, usage))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_EQ(stringVal, toString(usage));
+ }
+}
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index 03068c7..c2e1dc9 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -3,7 +3,7 @@
export_include_dirs: ["include"],
}
-cc_library_shared {
+cc_library {
name: "libmediametrics",
srcs: [
@@ -53,6 +53,7 @@
visibility: [
"//cts/tests/tests/nativemedia/mediametrics",
"//frameworks/av:__subpackages__",
+ "//frameworks/base/apex/media/framework",
"//frameworks/base/core/jni",
"//frameworks/base/media/jni",
],
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index b916a78..84388c9 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -147,6 +147,7 @@
#define AMEDIAMETRICS_PROP_THREADID "threadId" // int32 value io handle
#define AMEDIAMETRICS_PROP_THROTTLEMS "throttleMs" // double
#define AMEDIAMETRICS_PROP_TRACKID "trackId" // int32 port id of track/record
+#define AMEDIAMETRICS_PROP_TRAITS "traits" // string
#define AMEDIAMETRICS_PROP_TYPE "type" // string (thread type)
#define AMEDIAMETRICS_PROP_UNDERRUN "underrun" // int32
#define AMEDIAMETRICS_PROP_UNDERRUNFRAMES "underrunFrames" // int64_t from Thread
@@ -175,10 +176,12 @@
#define AMEDIAMETRICS_PROP_EVENT_VALUE_OPEN "open"
#define AMEDIAMETRICS_PROP_EVENT_VALUE_PAUSE "pause" // AudioTrack
#define AMEDIAMETRICS_PROP_EVENT_VALUE_READPARAMETERS "readParameters" // Thread
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_RELEASE "release"
#define AMEDIAMETRICS_PROP_EVENT_VALUE_RESTORE "restore"
#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETMODE "setMode" // AudioFlinger
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETBUFFERSIZE "setBufferSize" // AudioTrack
#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYBACKPARAM "setPlaybackParam" // AudioTrack
-#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME "setVoiceVolume" // AudioFlinger
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME "setVoiceVolume" // AudioFlinger
#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME "setVolume" // AudioTrack
#define AMEDIAMETRICS_PROP_EVENT_VALUE_START "start" // AudioTrack, AudioRecord
#define AMEDIAMETRICS_PROP_EVENT_VALUE_STOP "stop" // AudioTrack, AudioRecord
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 324f4ae..b62317a 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -15,6 +15,7 @@
shared_libs: [
"android.hardware.media.c2@1.0",
"android.hardware.media.omx@1.0",
+ "av-types-aidl-unstable-cpp",
"libbase",
"libandroid_net",
"libaudioclient",
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 555f459..4d90d98 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -480,14 +480,14 @@
}
sp<IMediaPlayer> MediaPlayerService::create(const sp<IMediaPlayerClient>& client,
- audio_session_t audioSessionId)
+ audio_session_t audioSessionId, std::string opPackageName)
{
pid_t pid = IPCThreadState::self()->getCallingPid();
int32_t connId = android_atomic_inc(&mNextConnId);
sp<Client> c = new Client(
this, pid, connId, client, audioSessionId,
- IPCThreadState::self()->getCallingUid());
+ IPCThreadState::self()->getCallingUid(), opPackageName);
ALOGV("Create new client(%d) from pid %d, uid %d, ", connId, pid,
IPCThreadState::self()->getCallingUid());
@@ -733,7 +733,8 @@
MediaPlayerService::Client::Client(
const sp<MediaPlayerService>& service, pid_t pid,
int32_t connId, const sp<IMediaPlayerClient>& client,
- audio_session_t audioSessionId, uid_t uid)
+ audio_session_t audioSessionId, uid_t uid, const std::string& opPackageName)
+ : mOpPackageName(opPackageName)
{
ALOGV("Client(%d) constructor", connId);
mPid = pid;
@@ -922,7 +923,7 @@
if (!p->hardwareOutput()) {
mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),
- mPid, mAudioAttributes, mAudioDeviceUpdatedListener);
+ mPid, mAudioAttributes, mAudioDeviceUpdatedListener, mOpPackageName);
static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
}
@@ -1772,7 +1773,8 @@
#undef LOG_TAG
#define LOG_TAG "AudioSink"
MediaPlayerService::AudioOutput::AudioOutput(audio_session_t sessionId, uid_t uid, int pid,
- const audio_attributes_t* attr, const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
+ const audio_attributes_t* attr, const sp<AudioSystem::AudioDeviceCallback>& deviceCallback,
+ const std::string& opPackageName)
: mCallback(NULL),
mCallbackCookie(NULL),
mCallbackData(NULL),
@@ -1793,7 +1795,8 @@
mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
mDeviceCallbackEnabled(false),
- mDeviceCallback(deviceCallback)
+ mDeviceCallback(deviceCallback),
+ mOpPackageName(opPackageName)
{
ALOGV("AudioOutput(%d)", sessionId);
if (attr != NULL) {
@@ -2187,7 +2190,8 @@
mAttributes,
doNotReconnect,
1.0f, // default value for maxRequiredSpeed
- mSelectedDeviceId);
+ mSelectedDeviceId,
+ mOpPackageName);
} else {
// TODO: Due to buffer memory concerns, we use a max target playback speed
// based on mPlaybackRate at the time of open (instead of kMaxRequiredSpeed),
@@ -2215,7 +2219,8 @@
mAttributes,
doNotReconnect,
targetSpeed,
- mSelectedDeviceId);
+ mSelectedDeviceId,
+ mOpPackageName);
}
// Set caller name so it can be logged in destructor.
// MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_MEDIA
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 3d596a5..b2f1b9b 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -19,6 +19,7 @@
#define ANDROID_MEDIAPLAYERSERVICE_H
#include <arpa/inet.h>
+#include <string>
#include <utils/threads.h>
#include <utils/Errors.h>
@@ -81,7 +82,8 @@
uid_t uid,
int pid,
const audio_attributes_t * attr,
- const sp<AudioSystem::AudioDeviceCallback>& deviceCallback);
+ const sp<AudioSystem::AudioDeviceCallback>& deviceCallback,
+ const std::string& opPackageName);
virtual ~AudioOutput();
virtual bool ready() const { return mTrack != 0; }
@@ -178,6 +180,7 @@
bool mDeviceCallbackEnabled;
wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
mutable Mutex mLock;
+ const std::string mOpPackageName;
// static variables below not protected by mutex
static bool mIsOnEmulator;
@@ -235,7 +238,8 @@
virtual sp<IMediaMetadataRetriever> createMetadataRetriever();
virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client,
- audio_session_t audioSessionId);
+ audio_session_t audioSessionId,
+ const std::string opPackageName);
virtual sp<IMediaCodecList> getCodecList() const;
@@ -411,7 +415,8 @@
int32_t connId,
const sp<IMediaPlayerClient>& client,
audio_session_t audioSessionId,
- uid_t uid);
+ uid_t uid,
+ const std::string& opPackageName);
Client();
virtual ~Client();
@@ -468,6 +473,7 @@
bool mRetransmitEndpointValid;
sp<Client> mNextClient;
sp<MediaPlayerBase::Listener> mListener;
+ const std::string mOpPackageName;
// Metadata filters.
media::Metadata::Filter mMetadataAllow; // protected by mLock
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 9b1974b..89c7032 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -28,6 +28,7 @@
#include <binder/IServiceManager.h>
#include <binder/MemoryHeapBase.h>
#include <binder/MemoryBase.h>
+#include <camera/CameraUtils.h>
#include <codec2/hidl/client.h>
#include <cutils/atomic.h>
#include <cutils/properties.h> // for property_get
@@ -126,7 +127,8 @@
pid_t pid = IPCThreadState::self()->getCallingPid();
uid_t uid = IPCThreadState::self()->getCallingUid();
- if ((as == AUDIO_SOURCE_FM_TUNER && !captureAudioOutputAllowed(pid, uid))
+ if ((as == AUDIO_SOURCE_FM_TUNER
+ && !(captureAudioOutputAllowed(pid, uid) || captureTunerAudioInputAllowed(pid, uid)))
|| !recordingAllowed(String16(""), pid, uid)) {
return PERMISSION_DENIED;
}
@@ -423,30 +425,35 @@
sp<IServiceManager> sm = defaultServiceManager();
- // WORKAROUND: We don't know if camera exists here and getService might block for 5 seconds.
- // Use checkService for camera if we don't know it exists.
- static std::atomic<bool> sCameraChecked(false); // once true never becomes false.
- static std::atomic<bool> sCameraVerified(false); // once true never becomes false.
- sp<IBinder> binder = (sCameraVerified || !sCameraChecked)
- ? sm->getService(String16("media.camera")) : sm->checkService(String16("media.camera"));
- // If the device does not have a camera, do not create a death listener for it.
- if (binder != NULL) {
- sCameraVerified = true;
- mDeathNotifiers.emplace_back(
- binder, [l = wp<IMediaRecorderClient>(listener)](){
- sp<IMediaRecorderClient> listener = l.promote();
- if (listener) {
- ALOGV("media.camera service died. "
- "Sending death notification.");
- listener->notify(
- MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
- MediaPlayerService::CAMERA_PROCESS_DEATH);
- } else {
- ALOGW("media.camera service died without a death handler.");
- }
- });
+ static const bool sCameraDisabled = CameraUtils::isCameraServiceDisabled();
+
+ if (!sCameraDisabled) {
+ // WORKAROUND: We don't know if camera exists here and getService might block for 5 seconds.
+ // Use checkService for camera if we don't know it exists.
+ static std::atomic<bool> sCameraChecked(false); // once true never becomes false.
+ static std::atomic<bool> sCameraVerified(false); // once true never becomes false.
+
+ sp<IBinder> binder = (sCameraVerified || !sCameraChecked)
+ ? sm->getService(String16("media.camera")) : sm->checkService(String16("media.camera"));
+ // If the device does not have a camera, do not create a death listener for it.
+ if (binder != NULL) {
+ sCameraVerified = true;
+ mDeathNotifiers.emplace_back(
+ binder, [l = wp<IMediaRecorderClient>(listener)](){
+ sp<IMediaRecorderClient> listener = l.promote();
+ if (listener) {
+ ALOGV("media.camera service died. "
+ "Sending death notification.");
+ listener->notify(
+ MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
+ MediaPlayerService::CAMERA_PROCESS_DEATH);
+ } else {
+ ALOGW("media.camera service died without a death handler.");
+ }
+ });
+ }
+ sCameraChecked = true;
}
- sCameraChecked = true;
{
using ::android::hidl::base::V1_0::IBase;
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 41b6f72..93e03ee 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -174,9 +174,7 @@
ALOGV("getting track %zu of %zu, meta=%s", i, n, meta->toString().c_str());
const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
-
- if (!strncasecmp(mime, "image/", 6)) {
+ if (meta->findCString(kKeyMIMEType, &mime) && !strncasecmp(mime, "image/", 6)) {
int32_t isPrimary;
if ((index < 0 && meta->findInt32(
kKeyTrackIsDefault, &isPrimary) && isPrimary)
@@ -208,12 +206,19 @@
}
const char *mime;
- CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
+ if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
+ ALOGE("image track has no mime type");
+ return NULL;
+ }
ALOGV("extracting from %s track", mime);
if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
mime = MEDIA_MIMETYPE_VIDEO_HEVC;
trackMeta = new MetaData(*trackMeta);
trackMeta->setCString(kKeyMIMEType, mime);
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
+ mime = MEDIA_MIMETYPE_VIDEO_AV1;
+ trackMeta = new MetaData(*trackMeta);
+ trackMeta->setCString(kKeyMIMEType, mime);
}
bool preferhw = property_get_bool(
@@ -299,9 +304,7 @@
}
const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
-
- if (!strncasecmp(mime, "video/", 6)) {
+ if (meta->findCString(kKeyMIMEType, &mime) && !strncasecmp(mime, "video/", 6)) {
break;
}
}
@@ -337,7 +340,10 @@
}
const char *mime;
- CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
+ if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
+ ALOGE("video track has no mime information.");
+ return NULL;
+ }
bool preferhw = property_get_bool(
"media.stagefright.thumbnail.prefer_hw_codecs", false);
@@ -523,6 +529,15 @@
mMetaData.add(METADATA_KEY_EXIF_LENGTH, String8(tmp));
}
+ int64_t xmpOffset, xmpSize;
+ if (meta->findInt64(kKeyXmpOffset, &xmpOffset)
+ && meta->findInt64(kKeyXmpSize, &xmpSize)) {
+ sprintf(tmp, "%lld", (long long)xmpOffset);
+ mMetaData.add(METADATA_KEY_XMP_OFFSET, String8(tmp));
+ sprintf(tmp, "%lld", (long long)xmpSize);
+ mMetaData.add(METADATA_KEY_XMP_LENGTH, String8(tmp));
+ }
+
bool hasAudio = false;
bool hasVideo = false;
int32_t videoWidth = -1;
@@ -531,14 +546,14 @@
int32_t audioBitrate = -1;
int32_t rotationAngle = -1;
int32_t imageCount = 0;
- int32_t imagePrimary = 0;
+ int32_t imagePrimary = -1;
int32_t imageWidth = -1;
int32_t imageHeight = -1;
int32_t imageRotation = -1;
// The overall duration is the duration of the longest track.
int64_t maxDurationUs = 0;
- String8 timedTextLang;
+ String8 timedTextLang, videoMime;
for (size_t i = 0; i < numTracks; ++i) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
if (!trackMeta) {
@@ -574,28 +589,33 @@
mMetaData.add(METADATA_KEY_SAMPLERATE, String8(tmp));
}
} else if (!hasVideo && !strncasecmp("video/", mime, 6)) {
- hasVideo = true;
-
- CHECK(trackMeta->findInt32(kKeyWidth, &videoWidth));
- CHECK(trackMeta->findInt32(kKeyHeight, &videoHeight));
if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
rotationAngle = 0;
}
if (!trackMeta->findInt32(kKeyFrameCount, &videoFrameCount)) {
videoFrameCount = 0;
}
-
- parseColorAspects(trackMeta);
+ if (trackMeta->findInt32(kKeyWidth, &videoWidth)
+ && trackMeta->findInt32(kKeyHeight, &videoHeight)) {
+ hasVideo = true;
+ videoMime = String8(mime);
+ parseColorAspects(trackMeta);
+ } else {
+ ALOGE("video track ignored for missing dimensions");
+ }
} else if (!strncasecmp("image/", mime, 6)) {
int32_t isPrimary;
if (trackMeta->findInt32(
kKeyTrackIsDefault, &isPrimary) && isPrimary) {
- imagePrimary = imageCount;
- CHECK(trackMeta->findInt32(kKeyWidth, &imageWidth));
- CHECK(trackMeta->findInt32(kKeyHeight, &imageHeight));
if (!trackMeta->findInt32(kKeyRotation, &imageRotation)) {
imageRotation = 0;
}
+ if (trackMeta->findInt32(kKeyWidth, &imageWidth)
+ && trackMeta->findInt32(kKeyHeight, &imageHeight)) {
+ imagePrimary = imageCount;
+ } else {
+ ALOGE("primary image track ignored for missing dimensions");
+ }
}
imageCount++;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
@@ -628,22 +648,27 @@
if (hasVideo) {
mMetaData.add(METADATA_KEY_HAS_VIDEO, String8("yes"));
+ CHECK(videoWidth >= 0);
sprintf(tmp, "%d", videoWidth);
mMetaData.add(METADATA_KEY_VIDEO_WIDTH, String8(tmp));
+ CHECK(videoHeight >= 0);
sprintf(tmp, "%d", videoHeight);
mMetaData.add(METADATA_KEY_VIDEO_HEIGHT, String8(tmp));
sprintf(tmp, "%d", rotationAngle);
mMetaData.add(METADATA_KEY_VIDEO_ROTATION, String8(tmp));
+ mMetaData.add(METADATA_KEY_VIDEO_CODEC_MIME_TYPE, videoMime);
+
if (videoFrameCount > 0) {
sprintf(tmp, "%d", videoFrameCount);
mMetaData.add(METADATA_KEY_VIDEO_FRAME_COUNT, String8(tmp));
}
}
- if (imageCount > 0) {
+ // only if we have a primary image
+ if (imageCount > 0 && imagePrimary >= 0) {
mMetaData.add(METADATA_KEY_HAS_IMAGE, String8("yes"));
sprintf(tmp, "%d", imageCount);
@@ -652,9 +677,11 @@
sprintf(tmp, "%d", imagePrimary);
mMetaData.add(METADATA_KEY_IMAGE_PRIMARY, String8(tmp));
+ CHECK(imageWidth >= 0);
sprintf(tmp, "%d", imageWidth);
mMetaData.add(METADATA_KEY_IMAGE_WIDTH, String8(tmp));
+ CHECK(imageHeight >= 0);
sprintf(tmp, "%d", imageHeight);
mMetaData.add(METADATA_KEY_IMAGE_HEIGHT, String8(tmp));
@@ -682,10 +709,9 @@
!strcasecmp(fileMIME, "video/x-matroska")) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(0);
const char *trackMIME;
- if (trackMeta != nullptr) {
- CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
- }
- if (!strncasecmp("audio/", trackMIME, 6)) {
+ if (trackMeta != nullptr
+ && trackMeta->findCString(kKeyMIMEType, &trackMIME)
+ && !strncasecmp("audio/", trackMIME, 6)) {
// The matroska file only contains a single audio track,
// rewrite its mime type.
mMetaData.add(
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index c6272c8..b2f6407 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -17,6 +17,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "StagefrightRecorder"
#include <inttypes.h>
+// TODO/workaround: including base logging now as it conflicts with ADebug.h
+// and it must be included first.
+#include <android-base/logging.h>
#include <utils/Log.h>
#include "WebmWriter.h"
@@ -131,6 +134,7 @@
ALOGV("Constructor");
+ mMetricsItem = NULL;
mAnalyticsDirty = false;
reset();
}
@@ -205,10 +209,12 @@
void StagefrightRecorder::flushAndResetMetrics(bool reinitialize) {
ALOGV("flushAndResetMetrics");
// flush anything we have, maybe setup a new record
- if (mAnalyticsDirty && mMetricsItem != NULL) {
- updateMetrics();
- if (mMetricsItem->count() > 0) {
- mMetricsItem->selfrecord();
+ if (mMetricsItem != NULL) {
+ if (mAnalyticsDirty) {
+ updateMetrics();
+ if (mMetricsItem->count() > 0) {
+ mMetricsItem->selfrecord();
+ }
}
delete mMetricsItem;
mMetricsItem = NULL;
@@ -575,12 +581,14 @@
mVideoBitRate = bitRate;
// A new bitrate(TMMBR) should be applied on runtime as well if OutputFormat is RTP_AVP
- if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP && mStarted && mPauseStartTimeUs == 0) {
+ if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
// Regular I frames may overload the network so we reduce the bitrate to allow
// margins for the I frame overruns.
// Still send requested bitrate (TMMBR) in the reply (TMMBN).
const float coefficient = 0.8f;
mVideoBitRate = (bitRate * coefficient) / 1000 * 1000;
+ }
+ if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP && mStarted && mPauseStartTimeUs == 0) {
mVideoEncoderSource->setEncodingBitrate(mVideoBitRate);
ARTPWriter* rtpWriter = static_cast<ARTPWriter*>(mWriter.get());
rtpWriter->setTMMBNInfo(mOpponentID, bitRate);
@@ -1313,7 +1321,7 @@
if (mPrivacySensitive == PRIVACY_SENSITIVE_DEFAULT) {
if (attr.source == AUDIO_SOURCE_VOICE_COMMUNICATION
|| attr.source == AUDIO_SOURCE_CAMCORDER) {
- attr.flags |= AUDIO_FLAG_CAPTURE_PRIVATE;
+ attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_CAPTURE_PRIVATE);
mPrivacySensitive = PRIVACY_SENSITIVE_ENABLED;
} else {
mPrivacySensitive = PRIVACY_SENSITIVE_DISABLED;
@@ -1329,7 +1337,7 @@
return NULL;
}
if (mPrivacySensitive == PRIVACY_SENSITIVE_ENABLED) {
- attr.flags |= AUDIO_FLAG_CAPTURE_PRIVATE;
+ attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_CAPTURE_PRIVATE);
}
}
@@ -1967,10 +1975,6 @@
format->setInt32("stride", stride);
format->setInt32("slice-height", sliceHeight);
format->setInt32("color-format", colorFormat);
- if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
- // This indicates that a raw image provided to encoder needs to be rotated.
- format->setInt32("rotation-degrees", mRotationDegrees);
- }
} else {
format->setInt32("width", mVideoWidth);
format->setInt32("height", mVideoHeight);
@@ -1988,6 +1992,11 @@
}
}
+ if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+ // This indicates that a raw image provided to encoder needs to be rotated.
+ format->setInt32("rotation-degrees", mRotationDegrees);
+ }
+
format->setInt32("bitrate", mVideoBitRate);
format->setInt32("bitrate-mode", mVideoBitRateMode);
format->setInt32("frame-rate", mFrameRate);
@@ -2216,6 +2225,7 @@
}
if (mOutputFormat == OUTPUT_FORMAT_MPEG_4 || mOutputFormat == OUTPUT_FORMAT_THREE_GPP) {
(*meta)->setInt32(kKeyEmptyTrackMalFormed, true);
+ (*meta)->setInt32(kKey4BitTrackIds, true);
}
}
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 1b5cb4b..8d94698 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -60,7 +60,7 @@
#define DEFAULT_AUDIOSINK_SAMPLERATE 44100
// when the channel mask isn't known, use the channel count to derive a mask in AudioSink::open()
-#define CHANNEL_MASK_USE_CHANNEL_ORDER 0
+#define CHANNEL_MASK_USE_CHANNEL_ORDER AUDIO_CHANNEL_NONE
// duration below which we do not allow deep audio buffering
#define AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US 5000000
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 0eaa503..439dbe8 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -432,7 +432,7 @@
}
if (mDataSource == nullptr) {
ALOGD("FileSource local");
- mDataSource = new PlayerServiceFileSource(mFd.get(), mOffset, mLength);
+ mDataSource = new PlayerServiceFileSource(dup(mFd.get()), mOffset, mLength);
}
}
@@ -1159,7 +1159,7 @@
readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, mode, &actualTimeUs);
if (mode != MediaPlayerSeekMode::SEEK_CLOSEST) {
- seekTimeUs = actualTimeUs;
+ seekTimeUs = std::max<int64_t>(0, actualTimeUs);
}
mVideoLastDequeueTimeUs = actualTimeUs;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 4e7daa5..47362ef 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1702,6 +1702,12 @@
updateRebufferingTimer(false /* stopping */, false /* exiting */);
}
+void NuPlayer::setTargetBitrate(int bitrate) {
+ if (mSource != NULL) {
+ mSource->setTargetBitrate(bitrate);
+ }
+}
+
void NuPlayer::onPause() {
updatePlaybackTimer(true /* stopping */, "onPause");
@@ -2868,6 +2874,27 @@
}
break;
}
+ case NuPlayer::RTPSource::RTP_QUALITY:
+ {
+ int32_t feedbackType, bitrate;
+ int32_t highestSeqNum, baseSeqNum, prevExpected;
+ int32_t numBufRecv, prevNumBufRecv;
+ CHECK(msg->findInt32("feedback-type", &feedbackType));
+ CHECK(msg->findInt32("bit-rate", &bitrate));
+ CHECK(msg->findInt32("highest-seq-num", &highestSeqNum));
+ CHECK(msg->findInt32("base-seq-num", &baseSeqNum));
+ CHECK(msg->findInt32("prev-expected", &prevExpected));
+ CHECK(msg->findInt32("num-buf-recv", &numBufRecv));
+ CHECK(msg->findInt32("prev-num-buf-recv", &prevNumBufRecv));
+ in.writeInt32(feedbackType);
+ in.writeInt32(bitrate);
+ in.writeInt32(highestSeqNum);
+ in.writeInt32(baseSeqNum);
+ in.writeInt32(prevExpected);
+ in.writeInt32(numBufRecv);
+ in.writeInt32(prevNumBufRecv);
+ break;
+ }
case NuPlayer::RTPSource::RTP_CVO:
{
int32_t cvo;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 0105248..adb7075 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -102,6 +102,8 @@
void updateInternalTimers();
+ void setTargetBitrate(int bitrate /* bps */);
+
protected:
virtual ~NuPlayer();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 2d82944..2a50fc2 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -817,7 +817,11 @@
}
status_t NuPlayerDriver::setParameter(
- int /* key */, const Parcel & /* request */) {
+ int key, const Parcel &request ) {
+ if (key == KEY_PARAMETER_RTP_ATTRIBUTES) {
+ mPlayer->setTargetBitrate(request.readInt32());
+ return OK;
+ }
return INVALID_OPERATION;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index c30f048..6a8c708 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -922,6 +922,11 @@
firstEntry = false;
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+ if (mediaTimeUs < 0) {
+ ALOGD("fillAudioBuffer: reset negative media time %.2f secs to zero",
+ mediaTimeUs / 1E6);
+ mediaTimeUs = 0;
+ }
ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
}
@@ -1928,11 +1933,12 @@
int32_t numChannels;
CHECK(format->findInt32("channel-count", &numChannels));
- int32_t channelMask;
- if (!format->findInt32("channel-mask", &channelMask)) {
- // signal to the AudioSink to derive the mask from count.
- channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
- }
+ int32_t rawChannelMask;
+ audio_channel_mask_t channelMask =
+ format->findInt32("channel-mask", &rawChannelMask) ?
+ static_cast<audio_channel_mask_t>(rawChannelMask)
+ // signal to the AudioSink to derive the mask from count.
+ : CHANNEL_MASK_USE_CHANNEL_ORDER;
int32_t sampleRate;
CHECK(format->findInt32("sample-rate", &sampleRate));
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index eb39870..bf6b539 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -132,6 +132,8 @@
virtual void setOffloadAudio(bool /* offload */) {}
+ virtual void setTargetBitrate(int32_t) {}
+
// Modular DRM
virtual status_t prepareDrm(
const uint8_t /*uuid*/[16], const Vector<uint8_t> &/*drmSessionId*/,
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp
index b5142ed..a532603 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp
@@ -34,8 +34,6 @@
mTargetHandler(targetHandler),
mEOS(false),
mSendDataNotification(true) {
- mSource->setListener(this);
-
mMemoryDealer = new MemoryDealer(kNumBuffers * kBufferSize);
for (size_t i = 0; i < kNumBuffers; ++i) {
sp<IMemory> mem = mMemoryDealer->allocate(kBufferSize);
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index a6601cd..b1901e8 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -114,7 +114,8 @@
// index(i) should be started from 1. 0 is reserved for [root]
mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
mRTPConn->setSelfID(info->mSelfID);
- mRTPConn->setMinMaxBitrate(kMinVideoBitrate, info->mAS * 1000 /* kbps */);
+ mRTPConn->setJbTime(
+ (info->mJbTimeMs <= 3000 && info->mJbTimeMs >= 40) ? info->mJbTimeMs : 300);
info->mRTPSocket = sockRtp;
info->mRTCPSocket = sockRtcp;
@@ -135,11 +136,16 @@
if (info->mIsAudio) {
mAudioTrack = source;
+ info->mTimeScale = 16000;
} else {
mVideoTrack = source;
+ info->mTimeScale = 90000;
}
info->mSource = source;
+ info->mRTPTime = 0;
+ info->mNormalPlaytimeUs = 0;
+ info->mNPTMappingValid = false;
}
if (mInPreparationPhase) {
@@ -280,20 +286,19 @@
}
int32_t cvo;
- if ((*accessUnit) != NULL && (*accessUnit)->meta()->findInt32("cvo", &cvo)) {
- if (cvo != mLastCVOUpdated) {
- sp<AMessage> msg = new AMessage();
- msg->setInt32("payload-type", NuPlayer::RTPSource::RTP_CVO);
- msg->setInt32("cvo", cvo);
+ if ((*accessUnit) != NULL && (*accessUnit)->meta()->findInt32("cvo", &cvo) &&
+ cvo != mLastCVOUpdated) {
+ sp<AMessage> msg = new AMessage();
+ msg->setInt32("payload-type", NuPlayer::RTPSource::RTP_CVO);
+ msg->setInt32("cvo", cvo);
- sp<AMessage> notify = dupNotify();
- notify->setInt32("what", kWhatIMSRxNotice);
- notify->setMessage("message", msg);
- notify->post();
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatIMSRxNotice);
+ notify->setMessage("message", msg);
+ notify->post();
- ALOGV("notify cvo updated (%d)->(%d) to upper layer", mLastCVOUpdated, cvo);
- mLastCVOUpdated = cvo;
- }
+ ALOGV("notify cvo updated (%d)->(%d) to upper layer", mLastCVOUpdated, cvo);
+ mLastCVOUpdated = cvo;
}
return finalResult;
@@ -347,6 +352,11 @@
schedulePollBuffering();
}
+bool NuPlayer::RTPSource::isRealTime() const {
+ ALOGD("RTPSource::isRealTime=%d", true);
+ return true;
+}
+
void NuPlayer::RTPSource::onMessageReceived(const sp<AMessage> &msg) {
ALOGV("onMessageReceived =%d", msg->what());
@@ -429,7 +439,6 @@
source->queueAccessUnit(accessUnit);
break;
}
- */
int64_t nptUs =
((double)rtpTime - (double)info->mRTPTime)
@@ -437,7 +446,8 @@
* 1000000ll
+ info->mNormalPlaytimeUs;
- accessUnit->meta()->setInt64("timeUs", nptUs);
+ */
+ accessUnit->meta()->setInt64("timeUs", ALooper::GetNowUs());
source->queueAccessUnit(accessUnit);
}
@@ -490,6 +500,10 @@
}
}
+void NuPlayer::RTPSource::setTargetBitrate(int32_t bitrate) {
+ mRTPConn->setTargetBitrate(bitrate);
+}
+
void NuPlayer::RTPSource::onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = %#016llx",
trackIndex, rtpTime, (long long)ntpTime);
@@ -656,6 +670,7 @@
newTrackInfo.mIsAudio = isAudioKey;
mTracks.push(newTrackInfo);
info = &mTracks.editTop();
+ info->mJbTimeMs = 300;
}
if (key == "rtp-param-mime-type") {
@@ -698,6 +713,8 @@
} else if (key == "rtp-param-set-socket-network") {
int64_t networkHandle = atoll(value);
setSocketNetwork(networkHandle);
+ } else if (key == "rtp-param-jitter-buffer-time") {
+ info->mJbTimeMs = atoi(value);
}
return OK;
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/RTPSource.h
index 5085a7e..fb2d3b9 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.h
@@ -52,6 +52,9 @@
const String8& rtpParams);
enum {
+ RTP_FIRST_PACKET = 100,
+ RTCP_FIRST_PACKET = 101,
+ RTP_QUALITY = 102,
RTCP_TSFB = 205,
RTCP_PSFB = 206,
RTP_CVO = 300,
@@ -77,8 +80,12 @@
int64_t seekTimeUs,
MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override;
+ virtual bool isRealTime() const;
+
void onMessageReceived(const sp<AMessage> &msg);
+ virtual void setTargetBitrate(int32_t bitrate) override;
+
protected:
virtual ~RTPSource();
@@ -95,7 +102,6 @@
};
const int64_t kBufferingPollIntervalUs = 1000000ll;
- const int32_t kMinVideoBitrate = 192000; /* bps */
enum State {
DISCONNECTED,
@@ -123,6 +129,8 @@
int32_t mTimeScale;
int32_t mAS;
+ /* RTP jitter buffer time in milliseconds */
+ uint32_t mJbTimeMs;
/* Unique ID indicates itself */
uint32_t mSelfID;
/* extmap:<value> for CVO will be set to here */
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 83da092..9533ae5 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -146,7 +146,9 @@
}
// Close socket before posting message to RTSPSource message handler.
- close(mHandler->getARTSPConnection()->getSocket());
+ if (mHandler != NULL) {
+ close(mHandler->getARTSPConnection()->getSocket());
+ }
sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index 14f1323..bec27d3 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -79,6 +79,7 @@
void NuPlayer::StreamingSource::start() {
mStreamListener = new NuPlayerStreamListener(mSource, NULL);
+ mSource->setListener(mStreamListener);
uint32_t sourceFlags = mSource->flags();
diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
index f114046..c81a659 100644
--- a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
+++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
@@ -65,6 +65,14 @@
return true;
}
+ virtual bool overrideProcessInfo(
+ int /* pid */, int /* procState */, int /* oomScore */) {
+ return true;
+ }
+
+ virtual void removeProcessInfoOverride(int /* pid */) {
+ }
+
private:
DISALLOW_EVIL_CONSTRUCTORS(FakeProcessInfo);
};
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp b/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
new file mode 100644
index 0000000..5a52ea5
--- /dev/null
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+ name: "StagefrightRecorderTest",
+ gtest: true,
+
+ srcs: [
+ "StagefrightRecorderTest.cpp",
+ ],
+
+ include_dirs: [
+ "system/media/audio/include",
+ "frameworks/av/include",
+ "frameworks/av/camera/include",
+ "frameworks/av/media/libmediaplayerservice",
+ "frameworks/av/media/libmediametrics/include",
+ "frameworks/av/media/ndk/include",
+ ],
+
+ shared_libs: [
+ "liblog",
+ "libmedia",
+ "libbinder",
+ "libutils",
+ "libmediaplayerservice",
+ "libstagefright",
+ "libmediandk",
+ ],
+
+ compile_multilib: "32",
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
new file mode 100644
index 0000000..5751631
--- /dev/null
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
@@ -0,0 +1,318 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "StagefrightRecorderTest"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <chrono>
+#include <ctime>
+#include <iostream>
+#include <string>
+#include <thread>
+
+#include <MediaPlayerService.h>
+#include <media/NdkMediaExtractor.h>
+#include <media/stagefright/MediaCodec.h>
+#include <system/audio.h>
+
+#include "StagefrightRecorder.h"
+
+#define OUTPUT_INFO_FILE_NAME "/data/local/tmp/stfrecorder_audio.info"
+#define OUTPUT_FILE_NAME_AUDIO "/data/local/tmp/stfrecorder_audio.raw"
+
+const bool kDebug = false;
+constexpr int32_t kMaxLoopCount = 10;
+constexpr int32_t kClipDurationInSec = 4;
+constexpr int32_t kPauseTimeInSec = 2;
+// Tolerance value for extracted clipduration is maximum 10% of total clipduration
+constexpr int32_t kToleranceValueInUs = kClipDurationInSec * 100000;
+
+using namespace android;
+
+class StagefrightRecorderTest
+ : public ::testing::TestWithParam<std::pair<output_format, audio_encoder>> {
+ public:
+ StagefrightRecorderTest() : mStfRecorder(nullptr), mOutputAudioFp(nullptr) {
+ mExpectedDurationInMs = 0;
+ mExpectedPauseInMs = 0;
+ }
+
+ ~StagefrightRecorderTest() {
+ if (mStfRecorder) free(mStfRecorder);
+ if (mOutputAudioFp) fclose(mOutputAudioFp);
+ }
+
+ void SetUp() override {
+ mStfRecorder = new StagefrightRecorder(String16(LOG_TAG));
+ ASSERT_NE(mStfRecorder, nullptr) << "Failed to create the instance of recorder";
+
+ mOutputAudioFp = fopen(OUTPUT_FILE_NAME_AUDIO, "wb");
+ ASSERT_NE(mOutputAudioFp, nullptr) << "Failed to open output file "
+ << OUTPUT_FILE_NAME_AUDIO << " for stagefright recorder";
+
+ int32_t fd = fileno(mOutputAudioFp);
+ ASSERT_GE(fd, 0) << "Failed to get the file descriptor of the output file for "
+ << OUTPUT_FILE_NAME_AUDIO;
+
+ status_t status = mStfRecorder->setOutputFile(fd);
+ ASSERT_EQ(status, OK) << "Failed to set the output file " << OUTPUT_FILE_NAME_AUDIO
+ << " for stagefright recorder";
+ }
+
+ void TearDown() override {
+ if (mOutputAudioFp) {
+ fclose(mOutputAudioFp);
+ mOutputAudioFp = nullptr;
+ }
+ if (!kDebug) {
+ int32_t status = remove(OUTPUT_FILE_NAME_AUDIO);
+ ASSERT_EQ(status, 0) << "Unable to delete the output file " << OUTPUT_FILE_NAME_AUDIO;
+ }
+ }
+
+ void setAudioRecorderFormat(output_format outputFormat, audio_encoder encoder,
+ audio_source_t audioSource = AUDIO_SOURCE_DEFAULT);
+ void recordMedia(bool isPaused = false, int32_t numStart = 0, int32_t numPause = 0);
+ void dumpInfo();
+ void setupExtractor(AMediaExtractor *extractor, int32_t &trackCount);
+ void validateOutput();
+
+ MediaRecorderBase *mStfRecorder;
+ FILE *mOutputAudioFp;
+ double mExpectedDurationInMs;
+ double mExpectedPauseInMs;
+};
+
+void StagefrightRecorderTest::setAudioRecorderFormat(output_format outputFormat,
+ audio_encoder encoder,
+ audio_source_t audioSource) {
+ status_t status = mStfRecorder->setAudioSource(audioSource);
+ ASSERT_EQ(status, OK) << "Failed to set the audio source: " << audioSource;
+
+ status = mStfRecorder->setOutputFormat(outputFormat);
+ ASSERT_EQ(status, OK) << "Failed to set the output format: " << outputFormat;
+
+ status = mStfRecorder->setAudioEncoder(encoder);
+ ASSERT_EQ(status, OK) << "Failed to set the audio encoder: " << encoder;
+}
+
+void StagefrightRecorderTest::recordMedia(bool isPause, int32_t numStart, int32_t numPause) {
+ status_t status = mStfRecorder->init();
+ ASSERT_EQ(status, OK) << "Failed to initialize stagefright recorder";
+
+ status = mStfRecorder->prepare();
+ ASSERT_EQ(status, OK) << "Failed to preapre the reorder";
+
+ // first start should succeed.
+ status = mStfRecorder->start();
+ ASSERT_EQ(status, OK) << "Failed to start the recorder";
+
+ for (int32_t count = 0; count < numStart; count++) {
+ status = mStfRecorder->start();
+ }
+
+ auto tStart = std::chrono::high_resolution_clock::now();
+ // Recording media for 4 secs
+ std::this_thread::sleep_for(std::chrono::seconds(kClipDurationInSec));
+ auto tEnd = std::chrono::high_resolution_clock::now();
+ mExpectedDurationInMs = std::chrono::duration<double, std::milli>(tEnd - tStart).count();
+
+ if (isPause) {
+ // first pause should succeed.
+ status = mStfRecorder->pause();
+ ASSERT_EQ(status, OK) << "Failed to pause the recorder";
+
+ tStart = std::chrono::high_resolution_clock::now();
+ // Paused recorder for 2 secs
+ std::this_thread::sleep_for(std::chrono::seconds(kPauseTimeInSec));
+
+ for (int32_t count = 0; count < numPause; count++) {
+ status = mStfRecorder->pause();
+ }
+
+ tEnd = std::chrono::high_resolution_clock::now();
+ mExpectedPauseInMs = std::chrono::duration<double, std::milli>(tEnd - tStart).count();
+
+ status = mStfRecorder->resume();
+ ASSERT_EQ(status, OK) << "Failed to resume the recorder";
+
+ auto tStart = std::chrono::high_resolution_clock::now();
+ // Recording media for 4 secs
+ std::this_thread::sleep_for(std::chrono::seconds(kClipDurationInSec));
+ auto tEnd = std::chrono::high_resolution_clock::now();
+ mExpectedDurationInMs += std::chrono::duration<double, std::milli>(tEnd - tStart).count();
+ }
+ status = mStfRecorder->stop();
+ ASSERT_EQ(status, OK) << "Failed to stop the recorder";
+}
+
+void StagefrightRecorderTest::dumpInfo() {
+ FILE *dumpOutput = fopen(OUTPUT_INFO_FILE_NAME, "wb");
+ int32_t dumpFd = fileno(dumpOutput);
+ Vector<String16> args;
+ status_t status = mStfRecorder->dump(dumpFd, args);
+ ASSERT_EQ(status, OK) << "Failed to dump the info for the recorder";
+ fclose(dumpOutput);
+}
+
+void StagefrightRecorderTest::setupExtractor(AMediaExtractor *extractor, int32_t &trackCount) {
+ int32_t fd = open(OUTPUT_FILE_NAME_AUDIO, O_RDONLY);
+ ASSERT_GE(fd, 0) << "Failed to open recorder's output file " << OUTPUT_FILE_NAME_AUDIO
+ << " to validate";
+
+ struct stat buf;
+ int32_t status = fstat(fd, &buf);
+ ASSERT_EQ(status, 0) << "Failed to get properties of input file " << OUTPUT_FILE_NAME_AUDIO
+ << " for extractor";
+
+ size_t fileSize = buf.st_size;
+ ASSERT_GT(fileSize, 0) << "Size of input file " << OUTPUT_FILE_NAME_AUDIO
+ << " to extractor cannot be zero";
+ ALOGV("Size of input file to extractor: %zu", fileSize);
+
+ status = AMediaExtractor_setDataSourceFd(extractor, fd, 0, fileSize);
+ ASSERT_EQ(status, AMEDIA_OK) << "Failed to set data source for extractor";
+
+ trackCount = AMediaExtractor_getTrackCount(extractor);
+ ALOGV("Number of tracks reported by extractor : %d", trackCount);
+}
+
+// Validate recoder's output using extractor
+void StagefrightRecorderTest::validateOutput() {
+ int32_t trackCount = -1;
+ AMediaExtractor *extractor = AMediaExtractor_new();
+ ASSERT_NE(extractor, nullptr) << "Failed to create extractor";
+ ASSERT_NO_FATAL_FAILURE(setupExtractor(extractor, trackCount));
+ ASSERT_EQ(trackCount, 1) << "Expected 1 track, saw " << trackCount;
+
+ for (int32_t idx = 0; idx < trackCount; idx++) {
+ AMediaExtractor_selectTrack(extractor, idx);
+ AMediaFormat *format = AMediaExtractor_getTrackFormat(extractor, idx);
+ ASSERT_NE(format, nullptr) << "Track format is NULL";
+ ALOGI("Track format = %s", AMediaFormat_toString(format));
+
+ int64_t clipDurationUs;
+ AMediaFormat_getInt64(format, AMEDIAFORMAT_KEY_DURATION, &clipDurationUs);
+ int32_t diff = abs((mExpectedDurationInMs * 1000) - clipDurationUs);
+ ASSERT_LE(diff, kToleranceValueInUs)
+ << "Expected duration: " << (mExpectedDurationInMs * 1000)
+ << " Actual duration: " << clipDurationUs << " Difference: " << diff
+ << " Difference is expected to be less than tolerance value: " << kToleranceValueInUs;
+
+ const char *mime = nullptr;
+ AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+ ASSERT_NE(mime, nullptr) << "Track mime is NULL";
+ ALOGI("Track mime = %s", mime);
+
+ int32_t sampleRate, channelCount, bitRate;
+ AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &channelCount);
+ ALOGI("Channel count reported by extractor: %d", channelCount);
+ AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate);
+ ALOGI("Sample Rate reported by extractor: %d", sampleRate);
+ AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, &bitRate);
+ ALOGI("Bit Rate reported by extractor: %d", bitRate);
+ }
+}
+
+TEST_F(StagefrightRecorderTest, RecordingAudioSanityTest) {
+ ASSERT_NO_FATAL_FAILURE(setAudioRecorderFormat(OUTPUT_FORMAT_DEFAULT, AUDIO_ENCODER_DEFAULT));
+
+ int32_t maxAmplitude = -1;
+ status_t status = mStfRecorder->getMaxAmplitude(&maxAmplitude);
+ ASSERT_EQ(maxAmplitude, 0) << "Invalid value of max amplitude";
+
+ ASSERT_NO_FATAL_FAILURE(recordMedia());
+
+ // Verify getMetrics() behavior
+ Parcel parcel;
+ status = mStfRecorder->getMetrics(&parcel);
+ ASSERT_EQ(status, OK) << "Failed to get the parcel from getMetrics";
+ ALOGV("Size of the Parcel returned by getMetrics: %zu", parcel.dataSize());
+ ASSERT_GT(parcel.dataSize(), 0) << "Parcel size reports empty record";
+ ASSERT_NO_FATAL_FAILURE(validateOutput());
+ if (kDebug) {
+ ASSERT_NO_FATAL_FAILURE(dumpInfo());
+ }
+}
+
+TEST_P(StagefrightRecorderTest, MultiFormatAudioRecordTest) {
+ output_format outputFormat = GetParam().first;
+ audio_encoder audioEncoder = GetParam().second;
+ ASSERT_NO_FATAL_FAILURE(setAudioRecorderFormat(outputFormat, audioEncoder));
+ ASSERT_NO_FATAL_FAILURE(recordMedia());
+ // TODO(b/161687761)
+ // Skip for AMR-NB/WB output format
+ if (!(outputFormat == OUTPUT_FORMAT_AMR_NB || outputFormat == OUTPUT_FORMAT_AMR_WB)) {
+ ASSERT_NO_FATAL_FAILURE(validateOutput());
+ }
+ if (kDebug) {
+ ASSERT_NO_FATAL_FAILURE(dumpInfo());
+ }
+}
+
+TEST_F(StagefrightRecorderTest, GetActiveMicrophonesTest) {
+ ASSERT_NO_FATAL_FAILURE(
+ setAudioRecorderFormat(OUTPUT_FORMAT_DEFAULT, AUDIO_ENCODER_DEFAULT, AUDIO_SOURCE_MIC));
+
+ status_t status = mStfRecorder->init();
+ ASSERT_EQ(status, OK) << "Init failed for stagefright recorder";
+
+ status = mStfRecorder->prepare();
+ ASSERT_EQ(status, OK) << "Failed to preapre the reorder";
+
+ status = mStfRecorder->start();
+ ASSERT_EQ(status, OK) << "Failed to start the recorder";
+
+ // Record media for 4 secs
+ std::this_thread::sleep_for(std::chrono::seconds(kClipDurationInSec));
+
+ std::vector<media::MicrophoneInfo> activeMicrophones{};
+ status = mStfRecorder->getActiveMicrophones(&activeMicrophones);
+ ASSERT_EQ(status, OK) << "Failed to get Active Microphones";
+ ASSERT_GT(activeMicrophones.size(), 0) << "No active microphones are found";
+
+ status = mStfRecorder->stop();
+ ASSERT_EQ(status, OK) << "Failed to stop the recorder";
+ if (kDebug) {
+ ASSERT_NO_FATAL_FAILURE(dumpInfo());
+ }
+}
+
+TEST_F(StagefrightRecorderTest, MultiStartPauseTest) {
+ ASSERT_NO_FATAL_FAILURE(setAudioRecorderFormat(OUTPUT_FORMAT_DEFAULT, AUDIO_ENCODER_DEFAULT));
+ ASSERT_NO_FATAL_FAILURE(recordMedia(true, kMaxLoopCount, kMaxLoopCount));
+ ASSERT_NO_FATAL_FAILURE(validateOutput());
+ if (kDebug) {
+ ASSERT_NO_FATAL_FAILURE(dumpInfo());
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ StagefrightRecorderTestAll, StagefrightRecorderTest,
+ ::testing::Values(std::make_pair(OUTPUT_FORMAT_AMR_NB, AUDIO_ENCODER_AMR_NB),
+ std::make_pair(OUTPUT_FORMAT_AMR_WB, AUDIO_ENCODER_AMR_WB),
+ std::make_pair(OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC),
+ std::make_pair(OUTPUT_FORMAT_OGG, AUDIO_ENCODER_OPUS)));
+
+int main(int argc, char **argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = RUN_ALL_TESTS();
+ ALOGV("Test result = %d\n", status);
+ return status;
+}
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index 431f0be..1934820 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -14,23 +14,32 @@
* limitations under the License.
*/
-// AIDL interfaces of MediaTranscoding.
-aidl_interface {
- name: "mediatranscoding_aidl_interface",
- unstable: true,
- local_include_dir: "aidl",
+filegroup {
+ name: "libmediatranscoding_aidl",
srcs: [
"aidl/android/media/IMediaTranscodingService.aidl",
"aidl/android/media/ITranscodingClient.aidl",
"aidl/android/media/ITranscodingClientCallback.aidl",
"aidl/android/media/TranscodingErrorCode.aidl",
- "aidl/android/media/TranscodingJobPriority.aidl",
+ "aidl/android/media/TranscodingSessionPriority.aidl",
+ "aidl/android/media/TranscodingSessionStats.aidl",
"aidl/android/media/TranscodingType.aidl",
"aidl/android/media/TranscodingVideoCodecType.aidl",
- "aidl/android/media/TranscodingJobParcel.aidl",
+ "aidl/android/media/TranscodingVideoTrackFormat.aidl",
+ "aidl/android/media/TranscodingSessionParcel.aidl",
"aidl/android/media/TranscodingRequestParcel.aidl",
"aidl/android/media/TranscodingResultParcel.aidl",
+ "aidl/android/media/TranscodingTestConfig.aidl",
],
+ path: "aidl",
+}
+
+// AIDL interfaces of MediaTranscoding.
+aidl_interface {
+ name: "mediatranscoding_aidl_interface",
+ unstable: true,
+ local_include_dir: "aidl",
+ srcs: [":libmediatranscoding_aidl"],
backend:
{
java: {
@@ -39,28 +48,35 @@
},
}
-cc_library_shared {
+cc_library {
name: "libmediatranscoding",
srcs: [
"TranscodingClientManager.cpp",
- "TranscodingJobScheduler.cpp",
+ "TranscodingSessionController.cpp",
+ "TranscodingResourcePolicy.cpp",
"TranscodingUidPolicy.cpp",
+ "TranscoderWrapper.cpp",
],
shared_libs: [
+ "libandroid",
"libbinder_ndk",
"libcutils",
"liblog",
"libutils",
"libmediatranscoder",
- "libbinder",
+ "libmediandk",
+ ],
+ export_shared_lib_headers: [
+ "libmediandk",
],
export_include_dirs: ["include"],
static_libs: [
"mediatranscoding_aidl_interface-ndk_platform",
+ "resourceobserver_aidl_interface-ndk_platform",
],
cflags: [
diff --git a/media/libmediatranscoding/TEST_MAPPING b/media/libmediatranscoding/TEST_MAPPING
new file mode 100644
index 0000000..f8a9db9
--- /dev/null
+++ b/media/libmediatranscoding/TEST_MAPPING
@@ -0,0 +1,32 @@
+{
+ "presubmit": [
+ {
+ "name": "MediaSampleQueueTests"
+ },
+ {
+ "name": "MediaSampleReaderNDKTests"
+ },
+ {
+ "name": "MediaSampleWriterTests"
+ },
+ {
+ "name": "MediaTrackTranscoderTests"
+ },
+ {
+ "name": "MediaTranscoderTests"
+ },
+ {
+ "name": "PassthroughTrackTranscoderTests"
+ },
+ {
+ "name": "TranscodingClientManager_tests"
+ },
+ {
+ "name": "TranscodingSessionController_tests"
+ },
+ {
+ "name": "VideoTrackTranscoderTests"
+ }
+ ]
+}
+
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/libmediatranscoding/TranscoderWrapper.cpp
new file mode 100644
index 0000000..da86187
--- /dev/null
+++ b/media/libmediatranscoding/TranscoderWrapper.cpp
@@ -0,0 +1,508 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscoderWrapper"
+
+#include <aidl/android/media/TranscodingErrorCode.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+#include <media/TranscoderWrapper.h>
+#include <utils/Log.h>
+
+#include <thread>
+
+namespace android {
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingVideoCodecType;
+using ::aidl::android::media::TranscodingVideoTrackFormat;
+
+static TranscodingErrorCode toTranscodingError(media_status_t status) {
+ switch (status) {
+ case AMEDIA_OK:
+ return TranscodingErrorCode::kNoError;
+ case AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE: // FALLTHRU
+ case AMEDIACODEC_ERROR_RECLAIMED:
+ return TranscodingErrorCode::kInsufficientResources;
+ case AMEDIA_ERROR_MALFORMED:
+ return TranscodingErrorCode::kMalformed;
+ case AMEDIA_ERROR_UNSUPPORTED:
+ return TranscodingErrorCode::kUnsupported;
+ case AMEDIA_ERROR_INVALID_OBJECT: // FALLTHRU
+ case AMEDIA_ERROR_INVALID_PARAMETER:
+ return TranscodingErrorCode::kInvalidParameter;
+ case AMEDIA_ERROR_INVALID_OPERATION:
+ return TranscodingErrorCode::kInvalidOperation;
+ case AMEDIA_ERROR_IO:
+ return TranscodingErrorCode::kErrorIO;
+ case AMEDIA_ERROR_UNKNOWN: // FALLTHRU
+ default:
+ return TranscodingErrorCode::kUnknown;
+ }
+}
+
+static AMediaFormat* getVideoFormat(
+ const char* originalMime,
+ const std::optional<TranscodingVideoTrackFormat>& requestedFormat) {
+ if (requestedFormat == std::nullopt) {
+ return nullptr;
+ }
+
+ AMediaFormat* format = AMediaFormat_new();
+ bool changed = false;
+ if (requestedFormat->codecType == TranscodingVideoCodecType::kHevc &&
+ strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_HEVC)) {
+ AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_HEVC);
+ changed = true;
+ } else if (requestedFormat->codecType == TranscodingVideoCodecType::kAvc &&
+ strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_AVC)) {
+ AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+ changed = true;
+ }
+ if (requestedFormat->bitrateBps > 0) {
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, requestedFormat->bitrateBps);
+ changed = true;
+ }
+ // TODO: translate other fields from requestedFormat to the format for MediaTranscoder.
+ // Also need to determine more settings to expose in TranscodingVideoTrackFormat.
+ if (!changed) {
+ AMediaFormat_delete(format);
+ // Use null format for passthru.
+ format = nullptr;
+ }
+ return format;
+}
+
+//static
+std::string TranscoderWrapper::toString(const Event& event) {
+ std::string typeStr;
+ switch (event.type) {
+ case Event::Start:
+ typeStr = "Start";
+ break;
+ case Event::Pause:
+ typeStr = "Pause";
+ break;
+ case Event::Resume:
+ typeStr = "Resume";
+ break;
+ case Event::Stop:
+ typeStr = "Stop";
+ break;
+ case Event::Finish:
+ typeStr = "Finish";
+ break;
+ case Event::Error:
+ typeStr = "Error";
+ break;
+ case Event::Progress:
+ typeStr = "Progress";
+ break;
+ default:
+ return "(unknown)";
+ }
+ std::string result;
+ result = "session {" + std::to_string(event.clientId) + "," + std::to_string(event.sessionId) +
+ "}: " + typeStr;
+ if (event.type == Event::Error || event.type == Event::Progress) {
+ result += " " + std::to_string(event.arg);
+ }
+ return result;
+}
+
+class TranscoderWrapper::CallbackImpl : public MediaTranscoder::CallbackInterface {
+public:
+ CallbackImpl(const std::shared_ptr<TranscoderWrapper>& owner, ClientIdType clientId,
+ SessionIdType sessionId)
+ : mOwner(owner), mClientId(clientId), mSessionId(sessionId) {}
+
+ virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+ auto owner = mOwner.lock();
+ if (owner != nullptr) {
+ owner->onFinish(mClientId, mSessionId);
+ }
+ }
+
+ virtual void onError(const MediaTranscoder* transcoder __unused,
+ media_status_t error) override {
+ auto owner = mOwner.lock();
+ if (owner != nullptr) {
+ owner->onError(mClientId, mSessionId, error);
+ }
+ }
+
+ virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+ int32_t progress) override {
+ auto owner = mOwner.lock();
+ if (owner != nullptr) {
+ owner->onProgress(mClientId, mSessionId, progress);
+ }
+ }
+
+ virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState
+ __unused) override {
+ ALOGV("%s: session {%lld, %d}", __FUNCTION__, (long long)mClientId, mSessionId);
+ }
+
+private:
+ std::weak_ptr<TranscoderWrapper> mOwner;
+ ClientIdType mClientId;
+ SessionIdType mSessionId;
+};
+
+TranscoderWrapper::TranscoderWrapper() : mCurrentClientId(0), mCurrentSessionId(-1) {
+ std::thread(&TranscoderWrapper::threadLoop, this).detach();
+}
+
+void TranscoderWrapper::setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) {
+ mCallback = cb;
+}
+
+static bool isResourceError(media_status_t err) {
+ return err == AMEDIACODEC_ERROR_RECLAIMED || err == AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE;
+}
+
+void TranscoderWrapper::reportError(ClientIdType clientId, SessionIdType sessionId,
+ media_status_t err) {
+ auto callback = mCallback.lock();
+ if (callback != nullptr) {
+ if (isResourceError(err)) {
+ // Add a placeholder pause state to mPausedStateMap. This is required when resuming.
+ // TODO: remove this when transcoder pause/resume logic is ready. New logic will
+ // no longer use the pause states.
+ auto it = mPausedStateMap.find(SessionKeyType(clientId, sessionId));
+ if (it == mPausedStateMap.end()) {
+ mPausedStateMap.emplace(SessionKeyType(clientId, sessionId),
+ new ndk::ScopedAParcel());
+ }
+
+ callback->onResourceLost(clientId, sessionId);
+ } else {
+ callback->onError(clientId, sessionId, toTranscodingError(err));
+ }
+ }
+}
+
+void TranscoderWrapper::start(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+ queueEvent(Event::Start, clientId, sessionId, [=] {
+ media_status_t err = handleStart(clientId, sessionId, request, clientCb);
+
+ if (err != AMEDIA_OK) {
+ cleanup();
+ reportError(clientId, sessionId, err);
+ } else {
+ auto callback = mCallback.lock();
+ if (callback != nullptr) {
+ callback->onStarted(clientId, sessionId);
+ }
+ }
+ });
+}
+
+void TranscoderWrapper::pause(ClientIdType clientId, SessionIdType sessionId) {
+ queueEvent(Event::Pause, clientId, sessionId, [=] {
+ media_status_t err = handlePause(clientId, sessionId);
+
+ cleanup();
+
+ if (err != AMEDIA_OK) {
+ reportError(clientId, sessionId, err);
+ } else {
+ auto callback = mCallback.lock();
+ if (callback != nullptr) {
+ callback->onPaused(clientId, sessionId);
+ }
+ }
+ });
+}
+
+void TranscoderWrapper::resume(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+ queueEvent(Event::Resume, clientId, sessionId, [=] {
+ media_status_t err = handleResume(clientId, sessionId, request, clientCb);
+
+ if (err != AMEDIA_OK) {
+ cleanup();
+ reportError(clientId, sessionId, err);
+ } else {
+ auto callback = mCallback.lock();
+ if (callback != nullptr) {
+ callback->onResumed(clientId, sessionId);
+ }
+ }
+ });
+}
+
+void TranscoderWrapper::stop(ClientIdType clientId, SessionIdType sessionId) {
+ queueEvent(Event::Stop, clientId, sessionId, [=] {
+ if (mTranscoder != nullptr && clientId == mCurrentClientId &&
+ sessionId == mCurrentSessionId) {
+ // Cancelling the currently running session.
+ media_status_t err = mTranscoder->cancel();
+ if (err != AMEDIA_OK) {
+ ALOGW("failed to stop transcoder: %d", err);
+ } else {
+ ALOGI("transcoder stopped");
+ }
+ cleanup();
+ } else {
+ // For sessions that's not currently running, release any pausedState for the session.
+ mPausedStateMap.erase(SessionKeyType(clientId, sessionId));
+ }
+ // No callback needed for stop.
+ });
+}
+
+void TranscoderWrapper::onFinish(ClientIdType clientId, SessionIdType sessionId) {
+ queueEvent(Event::Finish, clientId, sessionId, [=] {
+ if (mTranscoder != nullptr && clientId == mCurrentClientId &&
+ sessionId == mCurrentSessionId) {
+ cleanup();
+ }
+
+ auto callback = mCallback.lock();
+ if (callback != nullptr) {
+ callback->onFinish(clientId, sessionId);
+ }
+ });
+}
+
+void TranscoderWrapper::onError(ClientIdType clientId, SessionIdType sessionId,
+ media_status_t error) {
+ queueEvent(
+ Event::Error, clientId, sessionId,
+ [=] {
+ if (mTranscoder != nullptr && clientId == mCurrentClientId &&
+ sessionId == mCurrentSessionId) {
+ cleanup();
+ }
+ reportError(clientId, sessionId, error);
+ },
+ error);
+}
+
+void TranscoderWrapper::onProgress(ClientIdType clientId, SessionIdType sessionId,
+ int32_t progress) {
+ queueEvent(
+ Event::Progress, clientId, sessionId,
+ [=] {
+ auto callback = mCallback.lock();
+ if (callback != nullptr) {
+ callback->onProgressUpdate(clientId, sessionId, progress);
+ }
+ },
+ progress);
+}
+
+media_status_t TranscoderWrapper::setupTranscoder(
+ ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCb,
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
+ if (clientCb == nullptr) {
+ ALOGE("client callback is null");
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (mTranscoder != nullptr) {
+ ALOGE("transcoder already running");
+ return AMEDIA_ERROR_INVALID_OPERATION;
+ }
+
+ Status status;
+ ::ndk::ScopedFileDescriptor srcFd, dstFd;
+ status = clientCb->openFileDescriptor(request.sourceFilePath, "r", &srcFd);
+ if (!status.isOk() || srcFd.get() < 0) {
+ ALOGE("failed to open source");
+ return AMEDIA_ERROR_IO;
+ }
+
+ // Open dest file with "rw", as the transcoder could potentially reuse part of it
+ // for resume case. We might want the further differentiate and open with "w" only
+ // for start.
+ status = clientCb->openFileDescriptor(request.destinationFilePath, "rw", &dstFd);
+ if (!status.isOk() || dstFd.get() < 0) {
+ ALOGE("failed to open destination");
+ return AMEDIA_ERROR_IO;
+ }
+
+ mCurrentClientId = clientId;
+ mCurrentSessionId = sessionId;
+ mTranscoderCb = std::make_shared<CallbackImpl>(shared_from_this(), clientId, sessionId);
+ mTranscoder = MediaTranscoder::create(mTranscoderCb, request.clientPid, request.clientUid,
+ pausedState);
+ if (mTranscoder == nullptr) {
+ ALOGE("failed to create transcoder");
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+
+ media_status_t err = mTranscoder->configureSource(srcFd.get());
+ if (err != AMEDIA_OK) {
+ ALOGE("failed to configure source: %d", err);
+ return err;
+ }
+
+ std::vector<std::shared_ptr<AMediaFormat>> trackFormats = mTranscoder->getTrackFormats();
+ if (trackFormats.size() == 0) {
+ ALOGE("failed to get track formats!");
+ return AMEDIA_ERROR_MALFORMED;
+ }
+
+ for (int i = 0; i < trackFormats.size(); ++i) {
+ AMediaFormat* format = nullptr;
+ const char* mime = nullptr;
+ AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+
+ if (!strncmp(mime, "video/", 6)) {
+ format = getVideoFormat(mime, request.requestedVideoTrackFormat);
+ }
+
+ err = mTranscoder->configureTrackFormat(i, format);
+ if (format != nullptr) {
+ AMediaFormat_delete(format);
+ }
+ if (err != AMEDIA_OK) {
+ ALOGE("failed to configure track format for track %d: %d", i, err);
+ return err;
+ }
+ }
+
+ err = mTranscoder->configureDestination(dstFd.get());
+ if (err != AMEDIA_OK) {
+ ALOGE("failed to configure dest: %d", err);
+ return err;
+ }
+
+ return AMEDIA_OK;
+}
+
+media_status_t TranscoderWrapper::handleStart(
+ ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+ ALOGI("%s: setting up transcoder for start", __FUNCTION__);
+ media_status_t err = setupTranscoder(clientId, sessionId, request, clientCb);
+ if (err != AMEDIA_OK) {
+ ALOGI("%s: failed to setup transcoder", __FUNCTION__);
+ return err;
+ }
+
+ err = mTranscoder->start();
+ if (err != AMEDIA_OK) {
+ ALOGE("%s: failed to start transcoder: %d", __FUNCTION__, err);
+ return err;
+ }
+
+ ALOGI("%s: transcoder started", __FUNCTION__);
+ return AMEDIA_OK;
+}
+
+media_status_t TranscoderWrapper::handlePause(ClientIdType clientId, SessionIdType sessionId) {
+ if (mTranscoder == nullptr) {
+ ALOGE("%s: transcoder is not running", __FUNCTION__);
+ return AMEDIA_ERROR_INVALID_OPERATION;
+ }
+
+ if (clientId != mCurrentClientId || sessionId != mCurrentSessionId) {
+ ALOGW("%s: stopping session {%lld, %d} that's not current session {%lld, %d}", __FUNCTION__,
+ (long long)clientId, sessionId, (long long)mCurrentClientId, mCurrentSessionId);
+ }
+
+ ALOGI("%s: pausing transcoder", __FUNCTION__);
+
+ std::shared_ptr<ndk::ScopedAParcel> pauseStates;
+ media_status_t err = mTranscoder->pause(&pauseStates);
+ if (err != AMEDIA_OK) {
+ ALOGE("%s: failed to pause transcoder: %d", __FUNCTION__, err);
+ return err;
+ }
+ mPausedStateMap[SessionKeyType(clientId, sessionId)] = pauseStates;
+
+ ALOGI("%s: transcoder paused", __FUNCTION__);
+ return AMEDIA_OK;
+}
+
+media_status_t TranscoderWrapper::handleResume(
+ ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+ std::shared_ptr<ndk::ScopedAParcel> pausedState;
+ auto it = mPausedStateMap.find(SessionKeyType(clientId, sessionId));
+ if (it != mPausedStateMap.end()) {
+ pausedState = it->second;
+ mPausedStateMap.erase(it);
+ } else {
+ ALOGE("%s: can't find paused state", __FUNCTION__);
+ return AMEDIA_ERROR_INVALID_OPERATION;
+ }
+
+ ALOGI("%s: setting up transcoder for resume", __FUNCTION__);
+ media_status_t err = setupTranscoder(clientId, sessionId, request, clientCb, pausedState);
+ if (err != AMEDIA_OK) {
+ ALOGE("%s: failed to setup transcoder: %d", __FUNCTION__, err);
+ return err;
+ }
+
+ err = mTranscoder->resume();
+ if (err != AMEDIA_OK) {
+ ALOGE("%s: failed to resume transcoder: %d", __FUNCTION__, err);
+ return err;
+ }
+
+ ALOGI("%s: transcoder resumed", __FUNCTION__);
+ return AMEDIA_OK;
+}
+
+void TranscoderWrapper::cleanup() {
+ mCurrentClientId = 0;
+ mCurrentSessionId = -1;
+ mTranscoderCb = nullptr;
+ mTranscoder = nullptr;
+}
+
+void TranscoderWrapper::queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
+ const std::function<void()> runnable, int32_t arg) {
+ std::scoped_lock lock{mLock};
+
+ mQueue.push_back({type, clientId, sessionId, runnable, arg});
+ mCondition.notify_one();
+}
+
+void TranscoderWrapper::threadLoop() {
+ std::unique_lock<std::mutex> lock{mLock};
+ // TranscoderWrapper currently lives in the transcoding service, as long as
+ // MediaTranscodingService itself.
+ while (true) {
+ // Wait for the next event.
+ while (mQueue.empty()) {
+ mCondition.wait(lock);
+ }
+
+ Event event = *mQueue.begin();
+ mQueue.pop_front();
+
+ ALOGD("%s: %s", __FUNCTION__, toString(event).c_str());
+
+ lock.unlock();
+ event.runnable();
+ lock.lock();
+ }
+}
+
+} // namespace android
diff --git a/media/libmediatranscoding/TranscodingClientManager.cpp b/media/libmediatranscoding/TranscodingClientManager.cpp
index de9dd76..c0cc862 100644
--- a/media/libmediatranscoding/TranscodingClientManager.cpp
+++ b/media/libmediatranscoding/TranscodingClientManager.cpp
@@ -20,18 +20,23 @@
#include <aidl/android/media/BnTranscodingClient.h>
#include <aidl/android/media/IMediaTranscodingService.h>
#include <android/binder_ibinder.h>
+#include <android/permission_manager.h>
#include <inttypes.h>
#include <media/TranscodingClientManager.h>
#include <media/TranscodingRequest.h>
+#include <media/TranscodingUidPolicy.h>
+#include <private/android_filesystem_config.h>
#include <utils/Log.h>
+#include <utils/String16.h>
+
namespace android {
static_assert(sizeof(ClientIdType) == sizeof(void*), "ClientIdType should be pointer-sized");
using ::aidl::android::media::BnTranscodingClient;
using ::aidl::android::media::IMediaTranscodingService; // For service error codes
-using ::aidl::android::media::TranscodingJobParcel;
using ::aidl::android::media::TranscodingRequestParcel;
+using ::aidl::android::media::TranscodingSessionParcel;
using Status = ::ndk::ScopedAStatus;
using ::ndk::SpAIBinder;
@@ -44,6 +49,12 @@
TranscodingClientManager::sCookie2Client;
///////////////////////////////////////////////////////////////////////////////
+// Convenience methods for constructing binder::Status objects for error returns
+#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
+ Status::fromServiceSpecificErrorWithMessage( \
+ errorCode, \
+ String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, ##__VA_ARGS__))
+
/**
* ClientImpl implements a single client and contains all its information.
*/
@@ -60,50 +71,46 @@
* (casted to int64t_t) as the client id.
*/
ClientIdType mClientId;
- pid_t mClientPid;
- uid_t mClientUid;
std::string mClientName;
std::string mClientOpPackageName;
- // Next jobId to assign.
- std::atomic<int32_t> mNextJobId;
+ // Next sessionId to assign.
+ std::atomic<int32_t> mNextSessionId;
// Whether this client has been unregistered already.
std::atomic<bool> mAbandoned;
// Weak pointer to the client manager for this client.
std::weak_ptr<TranscodingClientManager> mOwner;
- ClientImpl(const std::shared_ptr<ITranscodingClientCallback>& callback, pid_t pid, uid_t uid,
+ ClientImpl(const std::shared_ptr<ITranscodingClientCallback>& callback,
const std::string& clientName, const std::string& opPackageName,
const std::weak_ptr<TranscodingClientManager>& owner);
Status submitRequest(const TranscodingRequestParcel& /*in_request*/,
- TranscodingJobParcel* /*out_job*/, bool* /*_aidl_return*/) override;
+ TranscodingSessionParcel* /*out_session*/,
+ bool* /*_aidl_return*/) override;
- Status cancelJob(int32_t /*in_jobId*/, bool* /*_aidl_return*/) override;
+ Status cancelSession(int32_t /*in_sessionId*/, bool* /*_aidl_return*/) override;
- Status getJobWithId(int32_t /*in_jobId*/, TranscodingJobParcel* /*out_job*/,
- bool* /*_aidl_return*/) override;
+ Status getSessionWithId(int32_t /*in_sessionId*/, TranscodingSessionParcel* /*out_session*/,
+ bool* /*_aidl_return*/) override;
Status unregister() override;
};
TranscodingClientManager::ClientImpl::ClientImpl(
- const std::shared_ptr<ITranscodingClientCallback>& callback, pid_t pid, uid_t uid,
- const std::string& clientName, const std::string& opPackageName,
- const std::weak_ptr<TranscodingClientManager>& owner)
+ const std::shared_ptr<ITranscodingClientCallback>& callback, const std::string& clientName,
+ const std::string& opPackageName, const std::weak_ptr<TranscodingClientManager>& owner)
: mClientBinder((callback != nullptr) ? callback->asBinder() : nullptr),
mClientCallback(callback),
mClientId(sCookieCounter.fetch_add(1, std::memory_order_relaxed)),
- mClientPid(pid),
- mClientUid(uid),
mClientName(clientName),
mClientOpPackageName(opPackageName),
- mNextJobId(0),
+ mNextSessionId(0),
mAbandoned(false),
mOwner(owner) {}
Status TranscodingClientManager::ClientImpl::submitRequest(
- const TranscodingRequestParcel& in_request, TranscodingJobParcel* out_job,
+ const TranscodingRequestParcel& in_request, TranscodingSessionParcel* out_session,
bool* _aidl_return) {
*_aidl_return = false;
@@ -112,62 +119,102 @@
return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
}
- if (in_request.fileName.empty()) {
- // This is the only error we check for now.
+ if (in_request.sourceFilePath.empty() || in_request.destinationFilePath.empty()) {
return Status::ok();
}
- int32_t jobId = mNextJobId.fetch_add(1);
+ int32_t callingPid = AIBinder_getCallingPid();
+ int32_t callingUid = AIBinder_getCallingUid();
+ int32_t in_clientUid = in_request.clientUid;
+ int32_t in_clientPid = in_request.clientPid;
+
+ // Check if we can trust clientUid. Only privilege caller could forward the
+ // uid on app client's behalf.
+ if (in_clientUid == IMediaTranscodingService::USE_CALLING_UID) {
+ in_clientUid = callingUid;
+ } else if (in_clientUid < 0) {
+ return Status::ok();
+ } else if (in_clientUid != callingUid && !owner->isTrustedCaller(callingPid, callingUid)) {
+ ALOGE("MediaTranscodingService::registerClient rejected (clientPid %d, clientUid %d) "
+ "(don't trust callingUid %d)",
+ in_clientPid, in_clientUid, callingUid);
+ return STATUS_ERROR_FMT(
+ IMediaTranscodingService::ERROR_PERMISSION_DENIED,
+ "MediaTranscodingService::registerClient rejected (clientPid %d, clientUid %d) "
+ "(don't trust callingUid %d)",
+ in_clientPid, in_clientUid, callingUid);
+ }
+
+ // Check if we can trust clientPid. Only privilege caller could forward the
+ // pid on app client's behalf.
+ if (in_clientPid == IMediaTranscodingService::USE_CALLING_PID) {
+ in_clientPid = callingPid;
+ } else if (in_clientPid < 0) {
+ return Status::ok();
+ } else if (in_clientPid != callingPid && !owner->isTrustedCaller(callingPid, callingUid)) {
+ ALOGE("MediaTranscodingService::registerClient rejected (clientPid %d, clientUid %d) "
+ "(don't trust callingUid %d)",
+ in_clientPid, in_clientUid, callingUid);
+ return STATUS_ERROR_FMT(
+ IMediaTranscodingService::ERROR_PERMISSION_DENIED,
+ "MediaTranscodingService::registerClient rejected (clientPid %d, clientUid %d) "
+ "(don't trust callingUid %d)",
+ in_clientPid, in_clientUid, callingUid);
+ }
+
+ int32_t sessionId = mNextSessionId.fetch_add(1);
+
+ *_aidl_return = owner->mSessionController->submit(mClientId, sessionId, in_clientUid,
+ in_request, mClientCallback);
+
+ if (*_aidl_return) {
+ out_session->sessionId = sessionId;
+
+ // TODO(chz): is some of this coming from SessionController?
+ *(TranscodingRequest*)&out_session->request = in_request;
+ out_session->awaitNumberOfSessions = 0;
+ }
+
+ return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::cancelSession(int32_t in_sessionId,
+ bool* _aidl_return) {
+ *_aidl_return = false;
+
+ std::shared_ptr<TranscodingClientManager> owner;
+ if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+ return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+ }
+
+ if (in_sessionId < 0) {
+ return Status::ok();
+ }
+
+ *_aidl_return = owner->mSessionController->cancel(mClientId, in_sessionId);
+ return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::getSessionWithId(int32_t in_sessionId,
+ TranscodingSessionParcel* out_session,
+ bool* _aidl_return) {
+ *_aidl_return = false;
+
+ std::shared_ptr<TranscodingClientManager> owner;
+ if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+ return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+ }
+
+ if (in_sessionId < 0) {
+ return Status::ok();
+ }
*_aidl_return =
- owner->mJobScheduler->submit(mClientId, jobId, mClientUid, in_request, mClientCallback);
+ owner->mSessionController->getSession(mClientId, in_sessionId, &out_session->request);
if (*_aidl_return) {
- out_job->jobId = jobId;
-
- // TODO(chz): is some of this coming from JobScheduler?
- *(TranscodingRequest*)&out_job->request = in_request;
- out_job->awaitNumberOfJobs = 0;
- }
-
- return Status::ok();
-}
-
-Status TranscodingClientManager::ClientImpl::cancelJob(int32_t in_jobId, bool* _aidl_return) {
- *_aidl_return = false;
-
- std::shared_ptr<TranscodingClientManager> owner;
- if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
- return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
- }
-
- if (in_jobId < 0) {
- return Status::ok();
- }
-
- *_aidl_return = owner->mJobScheduler->cancel(mClientId, in_jobId);
- return Status::ok();
-}
-
-Status TranscodingClientManager::ClientImpl::getJobWithId(int32_t in_jobId,
- TranscodingJobParcel* out_job,
- bool* _aidl_return) {
- *_aidl_return = false;
-
- std::shared_ptr<TranscodingClientManager> owner;
- if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
- return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
- }
-
- if (in_jobId < 0) {
- return Status::ok();
- }
-
- *_aidl_return = owner->mJobScheduler->getJob(mClientId, in_jobId, &out_job->request);
-
- if (*_aidl_return) {
- out_job->jobId = in_jobId;
- out_job->awaitNumberOfJobs = 0;
+ out_session->sessionId = in_sessionId;
+ out_session->awaitNumberOfSessions = 0;
}
return Status::ok();
}
@@ -180,8 +227,8 @@
return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
}
- // Use jobId == -1 to cancel all realtime jobs for this client with the scheduler.
- owner->mJobScheduler->cancel(mClientId, -1);
+ // Use sessionId == -1 to cancel all realtime sessions for this client with the controller.
+ owner->mSessionController->cancel(mClientId, -1);
owner->removeClient(mClientId);
return Status::ok();
@@ -212,9 +259,13 @@
}
TranscodingClientManager::TranscodingClientManager(
- const std::shared_ptr<SchedulerClientInterface>& scheduler)
- : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)), mJobScheduler(scheduler) {
+ const std::shared_ptr<ControllerClientInterface>& controller)
+ : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)),
+ mSessionController(controller) {
ALOGD("TranscodingClientManager started");
+ for (uid_t uid : {AID_ROOT, AID_SYSTEM, AID_SHELL, AID_MEDIA}) {
+ mTrustedUids.insert(uid);
+ }
}
TranscodingClientManager::~TranscodingClientManager() {
@@ -228,16 +279,16 @@
char buffer[SIZE];
std::scoped_lock lock{mLock};
- snprintf(buffer, SIZE, " Total num of Clients: %zu\n", mClientIdToClientMap.size());
- result.append(buffer);
-
if (mClientIdToClientMap.size() > 0) {
- snprintf(buffer, SIZE, "========== Dumping all clients =========\n");
+ snprintf(buffer, SIZE, "\n========== Dumping all clients =========\n");
result.append(buffer);
}
+ snprintf(buffer, SIZE, " Total num of Clients: %zu\n", mClientIdToClientMap.size());
+ result.append(buffer);
+
for (const auto& iter : mClientIdToClientMap) {
- snprintf(buffer, SIZE, " -- Client id: %lld name: %s\n", (long long)iter.first,
+ snprintf(buffer, SIZE, " Client %lld: pkg: %s\n", (long long)iter.first,
iter.second->mClientName.c_str());
result.append(buffer);
}
@@ -245,12 +296,27 @@
write(fd, result.string(), result.size());
}
+bool TranscodingClientManager::isTrustedCaller(pid_t pid, uid_t uid) {
+ if (uid > 0 && mTrustedUids.count(uid) > 0) {
+ return true;
+ }
+
+ int32_t result;
+ if (APermissionManager_checkPermission("android.permission.WRITE_MEDIA_STORAGE", pid, uid,
+ &result) == PERMISSION_MANAGER_STATUS_OK &&
+ result == PERMISSION_MANAGER_PERMISSION_GRANTED) {
+ mTrustedUids.insert(uid);
+ return true;
+ }
+
+ return false;
+}
+
status_t TranscodingClientManager::addClient(
- const std::shared_ptr<ITranscodingClientCallback>& callback, pid_t pid, uid_t uid,
- const std::string& clientName, const std::string& opPackageName,
- std::shared_ptr<ITranscodingClient>* outClient) {
+ const std::shared_ptr<ITranscodingClientCallback>& callback, const std::string& clientName,
+ const std::string& opPackageName, std::shared_ptr<ITranscodingClient>* outClient) {
// Validate the client.
- if (callback == nullptr || pid < 0 || clientName.empty() || opPackageName.empty()) {
+ if (callback == nullptr || clientName.empty() || opPackageName.empty()) {
ALOGE("Invalid client");
return IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT;
}
@@ -264,12 +330,11 @@
return IMediaTranscodingService::ERROR_ALREADY_EXISTS;
}
- // Creates the client and uses its process id as client id.
+ // Creates the client (with the id assigned by ClientImpl).
std::shared_ptr<ClientImpl> client = ::ndk::SharedRefBase::make<ClientImpl>(
- callback, pid, uid, clientName, opPackageName, shared_from_this());
+ callback, clientName, opPackageName, shared_from_this());
- ALOGD("Adding client id %lld, pid %d, uid %d, name %s, package %s",
- (long long)client->mClientId, client->mClientPid, client->mClientUid,
+ ALOGD("Adding client id %lld, name %s, package %s", (long long)client->mClientId,
client->mClientName.c_str(), client->mClientOpPackageName.c_str());
{
diff --git a/media/libmediatranscoding/TranscodingJobScheduler.cpp b/media/libmediatranscoding/TranscodingJobScheduler.cpp
deleted file mode 100644
index ea07c5f..0000000
--- a/media/libmediatranscoding/TranscodingJobScheduler.cpp
+++ /dev/null
@@ -1,473 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "TranscodingJobScheduler"
-
-#define VALIDATE_STATE 1
-
-#include <inttypes.h>
-#include <media/TranscodingJobScheduler.h>
-#include <utils/Log.h>
-
-#include <utility>
-
-namespace android {
-
-static_assert((JobIdType)-1 < 0, "JobIdType should be signed");
-
-constexpr static uid_t OFFLINE_UID = -1;
-
-//static
-String8 TranscodingJobScheduler::jobToString(const JobKeyType& jobKey) {
- return String8::format("{client:%lld, job:%d}", (long long)jobKey.first, jobKey.second);
-}
-
-TranscodingJobScheduler::TranscodingJobScheduler(
- const std::shared_ptr<TranscoderInterface>& transcoder,
- const std::shared_ptr<UidPolicyInterface>& uidPolicy)
- : mTranscoder(transcoder), mUidPolicy(uidPolicy), mCurrentJob(nullptr), mResourceLost(false) {
- // Only push empty offline queue initially. Realtime queues are added when requests come in.
- mUidSortedList.push_back(OFFLINE_UID);
- mOfflineUidIterator = mUidSortedList.begin();
- mJobQueues.emplace(OFFLINE_UID, JobQueueType());
-}
-
-TranscodingJobScheduler::~TranscodingJobScheduler() {}
-
-TranscodingJobScheduler::Job* TranscodingJobScheduler::getTopJob_l() {
- if (mJobMap.empty()) {
- return nullptr;
- }
- uid_t topUid = *mUidSortedList.begin();
- JobKeyType topJobKey = *mJobQueues[topUid].begin();
- return &mJobMap[topJobKey];
-}
-
-void TranscodingJobScheduler::updateCurrentJob_l() {
- Job* topJob = getTopJob_l();
- Job* curJob = mCurrentJob;
- ALOGV("updateCurrentJob: topJob is %s, curJob is %s",
- topJob == nullptr ? "null" : jobToString(topJob->key).c_str(),
- curJob == nullptr ? "null" : jobToString(curJob->key).c_str());
-
- // If we found a topJob that should be run, and it's not already running,
- // take some actions to ensure it's running.
- if (topJob != nullptr && (topJob != curJob || topJob->state != Job::RUNNING)) {
- // If another job is currently running, pause it first.
- if (curJob != nullptr && curJob->state == Job::RUNNING) {
- mTranscoder->pause(curJob->key.first, curJob->key.second);
- curJob->state = Job::PAUSED;
- }
- // If we are not experiencing resource loss, we can start or resume
- // the topJob now.
- if (!mResourceLost) {
- if (topJob->state == Job::NOT_STARTED) {
- mTranscoder->start(topJob->key.first, topJob->key.second, curJob->request);
- } else if (topJob->state == Job::PAUSED) {
- mTranscoder->resume(topJob->key.first, topJob->key.second);
- }
- topJob->state = Job::RUNNING;
- }
- }
- mCurrentJob = topJob;
-}
-
-void TranscodingJobScheduler::removeJob_l(const JobKeyType& jobKey) {
- ALOGV("%s: job %s", __FUNCTION__, jobToString(jobKey).c_str());
-
- if (mJobMap.count(jobKey) == 0) {
- ALOGE("job %s doesn't exist", jobToString(jobKey).c_str());
- return;
- }
-
- // Remove job from uid's queue.
- const uid_t uid = mJobMap[jobKey].uid;
- JobQueueType& jobQueue = mJobQueues[uid];
- auto it = std::find(jobQueue.begin(), jobQueue.end(), jobKey);
- if (it == jobQueue.end()) {
- ALOGE("couldn't find job %s in queue for uid %d", jobToString(jobKey).c_str(), uid);
- return;
- }
- jobQueue.erase(it);
-
- // If this is the last job in a real-time queue, remove this uid's queue.
- if (uid != OFFLINE_UID && jobQueue.empty()) {
- mUidSortedList.remove(uid);
- mJobQueues.erase(uid);
- mUidPolicy->unregisterMonitorUid(uid);
-
- std::unordered_set<uid_t> topUids = mUidPolicy->getTopUids();
- moveUidsToTop_l(topUids, false /*preserveTopUid*/);
- }
-
- // Clear current job.
- if (mCurrentJob == &mJobMap[jobKey]) {
- mCurrentJob = nullptr;
- }
-
- // Remove job from job map.
- mJobMap.erase(jobKey);
-}
-
-/**
- * Moves the set of uids to the front of mUidSortedList (which is used to pick
- * the next job to run).
- *
- * This is called when 1) we received a onTopUidsChanged() callbcak from UidPolicy,
- * or 2) we removed the job queue for a uid because it becomes empty.
- *
- * In case of 1), if there are multiple uids in the set, and the current front
- * uid in mUidSortedList is still in the set, we try to keep that uid at front
- * so that current job run is not interrupted. (This is not a concern for case 2)
- * because the queue for a uid was just removed entirely.)
- */
-void TranscodingJobScheduler::moveUidsToTop_l(const std::unordered_set<uid_t>& uids,
- bool preserveTopUid) {
- // If uid set is empty, nothing to do. Do not change the queue status.
- if (uids.empty()) {
- return;
- }
-
- // Save the current top uid.
- uid_t curTopUid = *mUidSortedList.begin();
- bool pushCurTopToFront = false;
- int32_t numUidsMoved = 0;
-
- // Go through the sorted uid list once, and move the ones in top set to front.
- for (auto it = mUidSortedList.begin(); it != mUidSortedList.end();) {
- uid_t uid = *it;
-
- if (uid != OFFLINE_UID && uids.count(uid) > 0) {
- it = mUidSortedList.erase(it);
-
- // If this is the top we're preserving, don't push it here, push
- // it after the for-loop.
- if (uid == curTopUid && preserveTopUid) {
- pushCurTopToFront = true;
- } else {
- mUidSortedList.push_front(uid);
- }
-
- // If we found all uids in the set, break out.
- if (++numUidsMoved == uids.size()) {
- break;
- }
- } else {
- ++it;
- }
- }
-
- if (pushCurTopToFront) {
- mUidSortedList.push_front(curTopUid);
- }
-}
-
-bool TranscodingJobScheduler::submit(ClientIdType clientId, JobIdType jobId, uid_t uid,
- const TranscodingRequestParcel& request,
- const std::weak_ptr<ITranscodingClientCallback>& callback) {
- JobKeyType jobKey = std::make_pair(clientId, jobId);
-
- ALOGV("%s: job %s, uid %d, prioirty %d", __FUNCTION__, jobToString(jobKey).c_str(), uid,
- (int32_t)request.priority);
-
- std::scoped_lock lock{mLock};
-
- if (mJobMap.count(jobKey) > 0) {
- ALOGE("job %s already exists", jobToString(jobKey).c_str());
- return false;
- }
-
- // TODO(chz): only support offline vs real-time for now. All kUnspecified jobs
- // go to offline queue.
- if (request.priority == TranscodingJobPriority::kUnspecified) {
- uid = OFFLINE_UID;
- }
-
- // Add job to job map.
- mJobMap[jobKey].key = jobKey;
- mJobMap[jobKey].uid = uid;
- mJobMap[jobKey].state = Job::NOT_STARTED;
- mJobMap[jobKey].request = request;
- mJobMap[jobKey].callback = callback;
-
- // If it's an offline job, the queue was already added in constructor.
- // If it's a real-time jobs, check if a queue is already present for the uid,
- // and add a new queue if needed.
- if (uid != OFFLINE_UID) {
- if (mJobQueues.count(uid) == 0) {
- mUidPolicy->registerMonitorUid(uid);
- if (mUidPolicy->isUidOnTop(uid)) {
- mUidSortedList.push_front(uid);
- } else {
- // Shouldn't be submitting real-time requests from non-top app,
- // put it in front of the offline queue.
- mUidSortedList.insert(mOfflineUidIterator, uid);
- }
- } else if (uid != *mUidSortedList.begin()) {
- if (mUidPolicy->isUidOnTop(uid)) {
- mUidSortedList.remove(uid);
- mUidSortedList.push_front(uid);
- }
- }
- }
- // Append this job to the uid's queue.
- mJobQueues[uid].push_back(jobKey);
-
- updateCurrentJob_l();
-
- validateState_l();
- return true;
-}
-
-bool TranscodingJobScheduler::cancel(ClientIdType clientId, JobIdType jobId) {
- JobKeyType jobKey = std::make_pair(clientId, jobId);
-
- ALOGV("%s: job %s", __FUNCTION__, jobToString(jobKey).c_str());
-
- std::list<JobKeyType> jobsToRemove;
-
- std::scoped_lock lock{mLock};
-
- if (jobId < 0) {
- for (auto it = mJobMap.begin(); it != mJobMap.end(); ++it) {
- if (it->first.first == clientId && it->second.uid != OFFLINE_UID) {
- jobsToRemove.push_back(it->first);
- }
- }
- } else {
- if (mJobMap.count(jobKey) == 0) {
- ALOGE("job %s doesn't exist", jobToString(jobKey).c_str());
- return false;
- }
- jobsToRemove.push_back(jobKey);
- }
-
- for (auto it = jobsToRemove.begin(); it != jobsToRemove.end(); ++it) {
- // If the job has ever been started, stop it now.
- // Note that stop() is needed even if the job is currently paused. This instructs
- // the transcoder to discard any states for the job, otherwise the states may
- // never be discarded.
- if (mJobMap[*it].state != Job::NOT_STARTED) {
- mTranscoder->stop(it->first, it->second);
- }
-
- // Remove the job.
- removeJob_l(*it);
- }
-
- // Start next job.
- updateCurrentJob_l();
-
- validateState_l();
- return true;
-}
-
-bool TranscodingJobScheduler::getJob(ClientIdType clientId, JobIdType jobId,
- TranscodingRequestParcel* request) {
- JobKeyType jobKey = std::make_pair(clientId, jobId);
-
- std::scoped_lock lock{mLock};
-
- if (mJobMap.count(jobKey) == 0) {
- ALOGE("job %s doesn't exist", jobToString(jobKey).c_str());
- return false;
- }
-
- *(TranscodingRequest*)request = mJobMap[jobKey].request;
- return true;
-}
-
-void TranscodingJobScheduler::onFinish(ClientIdType clientId, JobIdType jobId) {
- JobKeyType jobKey = std::make_pair(clientId, jobId);
-
- ALOGV("%s: job %s", __FUNCTION__, jobToString(jobKey).c_str());
-
- std::scoped_lock lock{mLock};
-
- if (mJobMap.count(jobKey) == 0) {
- ALOGW("ignoring finish for non-existent job");
- return;
- }
-
- // Only ignore if job was never started. In particular, propagate the status
- // to client if the job is paused. Transcoder could have posted finish when
- // we're pausing it, and the finish arrived after we changed current job.
- if (mJobMap[jobKey].state == Job::NOT_STARTED) {
- ALOGW("ignoring finish for job that was never started");
- return;
- }
-
- {
- auto clientCallback = mJobMap[jobKey].callback.lock();
- if (clientCallback != nullptr) {
- clientCallback->onTranscodingFinished(jobId, TranscodingResultParcel({jobId, 0}));
- }
- }
-
- // Remove the job.
- removeJob_l(jobKey);
-
- // Start next job.
- updateCurrentJob_l();
-
- validateState_l();
-}
-
-void TranscodingJobScheduler::onError(ClientIdType clientId, JobIdType jobId,
- TranscodingErrorCode err) {
- JobKeyType jobKey = std::make_pair(clientId, jobId);
-
- ALOGV("%s: job %s, err %d", __FUNCTION__, jobToString(jobKey).c_str(), (int32_t)err);
-
- std::scoped_lock lock{mLock};
-
- if (mJobMap.count(jobKey) == 0) {
- ALOGW("ignoring error for non-existent job");
- return;
- }
-
- // Only ignore if job was never started. In particular, propagate the status
- // to client if the job is paused. Transcoder could have posted finish when
- // we're pausing it, and the finish arrived after we changed current job.
- if (mJobMap[jobKey].state == Job::NOT_STARTED) {
- ALOGW("ignoring error for job that was never started");
- return;
- }
-
- {
- auto clientCallback = mJobMap[jobKey].callback.lock();
- if (clientCallback != nullptr) {
- clientCallback->onTranscodingFailed(jobId, err);
- }
- }
-
- // Remove the job.
- removeJob_l(jobKey);
-
- // Start next job.
- updateCurrentJob_l();
-
- validateState_l();
-}
-
-void TranscodingJobScheduler::onProgressUpdate(ClientIdType clientId, JobIdType jobId,
- int32_t progress) {
- JobKeyType jobKey = std::make_pair(clientId, jobId);
-
- ALOGV("%s: job %s, progress %d", __FUNCTION__, jobToString(jobKey).c_str(), progress);
-
- std::scoped_lock lock{mLock};
-
- if (mJobMap.count(jobKey) == 0) {
- ALOGW("ignoring progress for non-existent job");
- return;
- }
-
- // Only ignore if job was never started. In particular, propagate the status
- // to client if the job is paused. Transcoder could have posted finish when
- // we're pausing it, and the finish arrived after we changed current job.
- if (mJobMap[jobKey].state == Job::NOT_STARTED) {
- ALOGW("ignoring progress for job that was never started");
- return;
- }
-
- {
- auto clientCallback = mJobMap[jobKey].callback.lock();
- if (clientCallback != nullptr) {
- clientCallback->onProgressUpdate(jobId, progress);
- }
- }
-}
-
-void TranscodingJobScheduler::onResourceLost() {
- ALOGV("%s", __FUNCTION__);
-
- std::scoped_lock lock{mLock};
-
- // If we receive a resource loss event, the TranscoderLibrary already paused
- // the transcoding, so we don't need to call onPaused to notify it to pause.
- // Only need to update the job state here.
- if (mCurrentJob != nullptr && mCurrentJob->state == Job::RUNNING) {
- mCurrentJob->state = Job::PAUSED;
- }
- mResourceLost = true;
-
- validateState_l();
-}
-
-void TranscodingJobScheduler::onTopUidsChanged(const std::unordered_set<uid_t>& uids) {
- if (uids.empty()) {
- ALOGW("%s: ignoring empty uids", __FUNCTION__);
- return;
- }
-
- std::string uidStr;
- for (auto it = uids.begin(); it != uids.end(); it++) {
- if (!uidStr.empty()) {
- uidStr += ", ";
- }
- uidStr += std::to_string(*it);
- }
-
- ALOGD("%s: topUids: size %zu, uids: %s", __FUNCTION__, uids.size(), uidStr.c_str());
-
- std::scoped_lock lock{mLock};
-
- moveUidsToTop_l(uids, true /*preserveTopUid*/);
-
- updateCurrentJob_l();
-
- validateState_l();
-}
-
-void TranscodingJobScheduler::onResourceAvailable() {
- ALOGV("%s", __FUNCTION__);
-
- std::scoped_lock lock{mLock};
-
- mResourceLost = false;
- updateCurrentJob_l();
-
- validateState_l();
-}
-
-void TranscodingJobScheduler::validateState_l() {
-#ifdef VALIDATE_STATE
- LOG_ALWAYS_FATAL_IF(mJobQueues.count(OFFLINE_UID) != 1,
- "mJobQueues offline queue number is not 1");
- LOG_ALWAYS_FATAL_IF(*mOfflineUidIterator != OFFLINE_UID,
- "mOfflineUidIterator not pointing to offline uid");
- LOG_ALWAYS_FATAL_IF(mUidSortedList.size() != mJobQueues.size(),
- "mUidList and mJobQueues size mismatch");
-
- int32_t totalJobs = 0;
- for (auto uidIt = mUidSortedList.begin(); uidIt != mUidSortedList.end(); uidIt++) {
- LOG_ALWAYS_FATAL_IF(mJobQueues.count(*uidIt) != 1, "mJobQueues count for uid %d is not 1",
- *uidIt);
- for (auto jobIt = mJobQueues[*uidIt].begin(); jobIt != mJobQueues[*uidIt].end(); jobIt++) {
- LOG_ALWAYS_FATAL_IF(mJobMap.count(*jobIt) != 1, "mJobs count for job %s is not 1",
- jobToString(*jobIt).c_str());
- }
-
- totalJobs += mJobQueues[*uidIt].size();
- }
- LOG_ALWAYS_FATAL_IF(mJobMap.size() != totalJobs,
- "mJobs size doesn't match total jobs counted from uid queues");
-#endif // VALIDATE_STATE
-}
-
-} // namespace android
diff --git a/media/libmediatranscoding/TranscodingResourcePolicy.cpp b/media/libmediatranscoding/TranscodingResourcePolicy.cpp
new file mode 100644
index 0000000..af53f64
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingResourcePolicy.cpp
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingResourcePolicy"
+
+#include <aidl/android/media/BnResourceObserver.h>
+#include <aidl/android/media/IResourceObserverService.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <media/TranscodingResourcePolicy.h>
+#include <utils/Log.h>
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::BnResourceObserver;
+using ::aidl::android::media::IResourceObserverService;
+using ::aidl::android::media::MediaObservableEvent;
+using ::aidl::android::media::MediaObservableFilter;
+using ::aidl::android::media::MediaObservableParcel;
+using ::aidl::android::media::MediaObservableType;
+
+static std::string toString(const MediaObservableParcel& observable) {
+ return "{" + ::aidl::android::media::toString(observable.type) + ", " +
+ std::to_string(observable.value) + "}";
+}
+
+struct TranscodingResourcePolicy::ResourceObserver : public BnResourceObserver {
+ explicit ResourceObserver(TranscodingResourcePolicy* owner) : mOwner(owner) {}
+
+ // IResourceObserver
+ ::ndk::ScopedAStatus onStatusChanged(
+ MediaObservableEvent event, int32_t uid, int32_t pid,
+ const std::vector<MediaObservableParcel>& observables) override {
+ ALOGD("%s: %s, uid %d, pid %d, %s", __FUNCTION__,
+ ::aidl::android::media::toString(event).c_str(), uid, pid,
+ toString(observables[0]).c_str());
+
+ // Only report kIdle event.
+ if (((uint64_t)event & (uint64_t)MediaObservableEvent::kIdle) != 0) {
+ for (auto& observable : observables) {
+ if (observable.type == MediaObservableType::kVideoSecureCodec ||
+ observable.type == MediaObservableType::kVideoNonSecureCodec) {
+ mOwner->onResourceAvailable(pid);
+ break;
+ }
+ }
+ }
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ TranscodingResourcePolicy* mOwner;
+};
+
+// static
+void TranscodingResourcePolicy::BinderDiedCallback(void* cookie) {
+ TranscodingResourcePolicy* owner = reinterpret_cast<TranscodingResourcePolicy*>(cookie);
+ if (owner != nullptr) {
+ owner->unregisterSelf();
+ }
+ // TODO(chz): retry to connecting to IResourceObserverService after failure.
+ // Also need to have back-up logic if IResourceObserverService is offline for
+ // Prolonged period of time. A possible alternative could be, during period where
+ // IResourceObserverService is not available, trigger onResourceAvailable() everytime
+ // when top uid changes (in hope that'll free up some codec instances that we could
+ // reclaim).
+}
+
+TranscodingResourcePolicy::TranscodingResourcePolicy()
+ : mRegistered(false),
+ mResourceLostPid(-1),
+ mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
+ registerSelf();
+}
+
+TranscodingResourcePolicy::~TranscodingResourcePolicy() {
+ unregisterSelf();
+}
+
+void TranscodingResourcePolicy::registerSelf() {
+ ALOGI("TranscodingResourcePolicy: registerSelf");
+
+ ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_observer"));
+
+ std::scoped_lock lock{mRegisteredLock};
+
+ if (mRegistered) {
+ return;
+ }
+
+ // TODO(chz): retry to connecting to IResourceObserverService after failure.
+ mService = IResourceObserverService::fromBinder(binder);
+ if (mService == nullptr) {
+ ALOGE("Failed to get IResourceObserverService");
+ return;
+ }
+
+ // Only register filters for codec resource available.
+ mObserver = ::ndk::SharedRefBase::make<ResourceObserver>(this);
+ std::vector<MediaObservableFilter> filters = {
+ {MediaObservableType::kVideoSecureCodec, MediaObservableEvent::kIdle},
+ {MediaObservableType::kVideoNonSecureCodec, MediaObservableEvent::kIdle}};
+
+ Status status = mService->registerObserver(mObserver, filters);
+ if (!status.isOk()) {
+ ALOGE("failed to register: error %d", status.getServiceSpecificError());
+ mService = nullptr;
+ mObserver = nullptr;
+ return;
+ }
+
+ AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+
+ ALOGD("@@@ registered observer");
+ mRegistered = true;
+}
+
+void TranscodingResourcePolicy::unregisterSelf() {
+ ALOGI("TranscodingResourcePolicy: unregisterSelf");
+
+ std::scoped_lock lock{mRegisteredLock};
+
+ if (!mRegistered) {
+ return;
+ }
+
+ ::ndk::SpAIBinder binder = mService->asBinder();
+ if (binder.get() != nullptr) {
+ Status status = mService->unregisterObserver(mObserver);
+ AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+ }
+
+ mService = nullptr;
+ mObserver = nullptr;
+ mRegistered = false;
+}
+
+void TranscodingResourcePolicy::setCallback(
+ const std::shared_ptr<ResourcePolicyCallbackInterface>& cb) {
+ std::scoped_lock lock{mCallbackLock};
+ mResourcePolicyCallback = cb;
+}
+
+void TranscodingResourcePolicy::setPidResourceLost(pid_t pid) {
+ std::scoped_lock lock{mCallbackLock};
+ mResourceLostPid = pid;
+}
+
+void TranscodingResourcePolicy::onResourceAvailable(pid_t pid) {
+ std::shared_ptr<ResourcePolicyCallbackInterface> cb;
+ {
+ std::scoped_lock lock{mCallbackLock};
+ // Only callback if codec resource is released from other processes.
+ if (mResourceLostPid != -1 && mResourceLostPid != pid) {
+ cb = mResourcePolicyCallback.lock();
+ mResourceLostPid = -1;
+ }
+ }
+
+ if (cb != nullptr) {
+ cb->onResourceAvailable();
+ }
+}
+} // namespace android
diff --git a/media/libmediatranscoding/TranscodingSessionController.cpp b/media/libmediatranscoding/TranscodingSessionController.cpp
new file mode 100644
index 0000000..b77a3a4
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingSessionController.cpp
@@ -0,0 +1,642 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingSessionController"
+
+#define VALIDATE_STATE 1
+
+#include <inttypes.h>
+#include <media/TranscodingSessionController.h>
+#include <media/TranscodingUidPolicy.h>
+#include <utils/Log.h>
+
+#include <utility>
+
+namespace android {
+
+static_assert((SessionIdType)-1 < 0, "SessionIdType should be signed");
+
+constexpr static uid_t OFFLINE_UID = -1;
+constexpr static size_t kSessionHistoryMax = 100;
+
+//static
+String8 TranscodingSessionController::sessionToString(const SessionKeyType& sessionKey) {
+ return String8::format("{client:%lld, session:%d}", (long long)sessionKey.first,
+ sessionKey.second);
+}
+
+//static
+const char* TranscodingSessionController::sessionStateToString(const Session::State sessionState) {
+ switch (sessionState) {
+ case Session::State::NOT_STARTED:
+ return "NOT_STARTED";
+ case Session::State::RUNNING:
+ return "RUNNING";
+ case Session::State::PAUSED:
+ return "PAUSED";
+ case Session::State::FINISHED:
+ return "FINISHED";
+ case Session::State::CANCELED:
+ return "CANCELED";
+ case Session::State::ERROR:
+ return "ERROR";
+ default:
+ break;
+ }
+ return "(unknown)";
+}
+
+TranscodingSessionController::TranscodingSessionController(
+ const std::shared_ptr<TranscoderInterface>& transcoder,
+ const std::shared_ptr<UidPolicyInterface>& uidPolicy,
+ const std::shared_ptr<ResourcePolicyInterface>& resourcePolicy)
+ : mTranscoder(transcoder),
+ mUidPolicy(uidPolicy),
+ mResourcePolicy(resourcePolicy),
+ mCurrentSession(nullptr),
+ mResourceLost(false) {
+ // Only push empty offline queue initially. Realtime queues are added when requests come in.
+ mUidSortedList.push_back(OFFLINE_UID);
+ mOfflineUidIterator = mUidSortedList.begin();
+ mSessionQueues.emplace(OFFLINE_UID, SessionQueueType());
+ mUidPackageNames[OFFLINE_UID] = "(offline)";
+}
+
+TranscodingSessionController::~TranscodingSessionController() {}
+
+void TranscodingSessionController::dumpSession_l(const Session& session, String8& result,
+ bool closedSession) {
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ const TranscodingRequestParcel& request = session.request;
+ snprintf(buffer, SIZE, " Session: %s, %s, %d%%\n", sessionToString(session.key).c_str(),
+ sessionStateToString(session.getState()), session.lastProgress);
+ result.append(buffer);
+ snprintf(buffer, SIZE, " pkg: %s\n", request.clientPackageName.c_str());
+ result.append(buffer);
+ snprintf(buffer, SIZE, " src: %s\n", request.sourceFilePath.c_str());
+ result.append(buffer);
+ snprintf(buffer, SIZE, " dst: %s\n", request.destinationFilePath.c_str());
+ result.append(buffer);
+
+ if (closedSession) {
+ snprintf(buffer, SIZE,
+ " waiting: %.1fs, running: %.1fs, paused: %.1fs, paused count: %d\n",
+ session.waitingTime.count() / 1000000.0f, session.runningTime.count() / 1000000.0f,
+ session.pausedTime.count() / 1000000.0f, session.pauseCount);
+ result.append(buffer);
+ }
+}
+
+void TranscodingSessionController::dumpAllSessions(int fd, const Vector<String16>& args __unused) {
+ String8 result;
+
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ std::scoped_lock lock{mLock};
+
+ snprintf(buffer, SIZE, "\n========== Dumping live sessions queues =========\n");
+ result.append(buffer);
+ snprintf(buffer, SIZE, " Total num of Sessions: %zu\n", mSessionMap.size());
+ result.append(buffer);
+
+ std::vector<int32_t> uids(mUidSortedList.begin(), mUidSortedList.end());
+
+ for (int32_t i = 0; i < uids.size(); i++) {
+ const uid_t uid = uids[i];
+
+ if (mSessionQueues[uid].empty()) {
+ continue;
+ }
+ snprintf(buffer, SIZE, " uid: %d, pkg: %s\n", uid,
+ mUidPackageNames.count(uid) > 0 ? mUidPackageNames[uid].c_str() : "(unknown)");
+ result.append(buffer);
+ snprintf(buffer, SIZE, " Num of sessions: %zu\n", mSessionQueues[uid].size());
+ result.append(buffer);
+ for (auto& sessionKey : mSessionQueues[uid]) {
+ auto sessionIt = mSessionMap.find(sessionKey);
+ if (sessionIt == mSessionMap.end()) {
+ snprintf(buffer, SIZE, "Failed to look up Session %s \n",
+ sessionToString(sessionKey).c_str());
+ result.append(buffer);
+ continue;
+ }
+ dumpSession_l(sessionIt->second, result);
+ }
+ }
+
+ snprintf(buffer, SIZE, "\n========== Dumping past sessions =========\n");
+ result.append(buffer);
+ for (auto &session : mSessionHistory) {
+ dumpSession_l(session, result, true /*closedSession*/);
+ }
+
+ write(fd, result.string(), result.size());
+}
+
+TranscodingSessionController::Session* TranscodingSessionController::getTopSession_l() {
+ if (mSessionMap.empty()) {
+ return nullptr;
+ }
+ uid_t topUid = *mUidSortedList.begin();
+ SessionKeyType topSessionKey = *mSessionQueues[topUid].begin();
+ return &mSessionMap[topSessionKey];
+}
+
+void TranscodingSessionController::Session::setState(Session::State newState) {
+ if (state == newState) {
+ return;
+ }
+ auto nowTime = std::chrono::system_clock::now();
+ if (state != INVALID) {
+ std::chrono::microseconds elapsedTime = (nowTime - stateEnterTime);
+ switch (state) {
+ case PAUSED:
+ pausedTime = pausedTime + elapsedTime;
+ break;
+ case RUNNING:
+ runningTime = runningTime + elapsedTime;
+ break;
+ case NOT_STARTED:
+ waitingTime = waitingTime + elapsedTime;
+ break;
+ default:
+ break;
+ }
+ }
+ if (newState == PAUSED) {
+ pauseCount++;
+ }
+ stateEnterTime = nowTime;
+ state = newState;
+}
+
+void TranscodingSessionController::updateCurrentSession_l() {
+ Session* topSession = getTopSession_l();
+ Session* curSession = mCurrentSession;
+ ALOGV("updateCurrentSession: topSession is %s, curSession is %s",
+ topSession == nullptr ? "null" : sessionToString(topSession->key).c_str(),
+ curSession == nullptr ? "null" : sessionToString(curSession->key).c_str());
+
+ // If we found a topSession that should be run, and it's not already running,
+ // take some actions to ensure it's running.
+ if (topSession != nullptr &&
+ (topSession != curSession || topSession->getState() != Session::RUNNING)) {
+ // If another session is currently running, pause it first.
+ if (curSession != nullptr && curSession->getState() == Session::RUNNING) {
+ mTranscoder->pause(curSession->key.first, curSession->key.second);
+ curSession->setState(Session::PAUSED);
+ }
+ // If we are not experiencing resource loss, we can start or resume
+ // the topSession now.
+ if (!mResourceLost) {
+ if (topSession->getState() == Session::NOT_STARTED) {
+ mTranscoder->start(topSession->key.first, topSession->key.second,
+ topSession->request, topSession->callback.lock());
+ } else if (topSession->getState() == Session::PAUSED) {
+ mTranscoder->resume(topSession->key.first, topSession->key.second,
+ topSession->request, topSession->callback.lock());
+ }
+ topSession->setState(Session::RUNNING);
+ }
+ }
+ mCurrentSession = topSession;
+}
+
+void TranscodingSessionController::removeSession_l(const SessionKeyType& sessionKey,
+ Session::State finalState) {
+ ALOGV("%s: session %s", __FUNCTION__, sessionToString(sessionKey).c_str());
+
+ if (mSessionMap.count(sessionKey) == 0) {
+ ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+ return;
+ }
+
+ // Remove session from uid's queue.
+ const uid_t uid = mSessionMap[sessionKey].uid;
+ SessionQueueType& sessionQueue = mSessionQueues[uid];
+ auto it = std::find(sessionQueue.begin(), sessionQueue.end(), sessionKey);
+ if (it == sessionQueue.end()) {
+ ALOGE("couldn't find session %s in queue for uid %d", sessionToString(sessionKey).c_str(),
+ uid);
+ return;
+ }
+ sessionQueue.erase(it);
+
+ // If this is the last session in a real-time queue, remove this uid's queue.
+ if (uid != OFFLINE_UID && sessionQueue.empty()) {
+ mUidSortedList.remove(uid);
+ mSessionQueues.erase(uid);
+ mUidPolicy->unregisterMonitorUid(uid);
+
+ std::unordered_set<uid_t> topUids = mUidPolicy->getTopUids();
+ moveUidsToTop_l(topUids, false /*preserveTopUid*/);
+ }
+
+ // Clear current session.
+ if (mCurrentSession == &mSessionMap[sessionKey]) {
+ mCurrentSession = nullptr;
+ }
+
+ mSessionMap[sessionKey].setState(finalState);
+ mSessionHistory.push_back(mSessionMap[sessionKey]);
+ if (mSessionHistory.size() > kSessionHistoryMax) {
+ mSessionHistory.erase(mSessionHistory.begin());
+ }
+
+ // Remove session from session map.
+ mSessionMap.erase(sessionKey);
+}
+
+/**
+ * Moves the set of uids to the front of mUidSortedList (which is used to pick
+ * the next session to run).
+ *
+ * This is called when 1) we received a onTopUidsChanged() callback from UidPolicy,
+ * or 2) we removed the session queue for a uid because it becomes empty.
+ *
+ * In case of 1), if there are multiple uids in the set, and the current front
+ * uid in mUidSortedList is still in the set, we try to keep that uid at front
+ * so that current session run is not interrupted. (This is not a concern for case 2)
+ * because the queue for a uid was just removed entirely.)
+ */
+void TranscodingSessionController::moveUidsToTop_l(const std::unordered_set<uid_t>& uids,
+ bool preserveTopUid) {
+ // If uid set is empty, nothing to do. Do not change the queue status.
+ if (uids.empty()) {
+ return;
+ }
+
+ // Save the current top uid.
+ uid_t curTopUid = *mUidSortedList.begin();
+ bool pushCurTopToFront = false;
+ int32_t numUidsMoved = 0;
+
+ // Go through the sorted uid list once, and move the ones in top set to front.
+ for (auto it = mUidSortedList.begin(); it != mUidSortedList.end();) {
+ uid_t uid = *it;
+
+ if (uid != OFFLINE_UID && uids.count(uid) > 0) {
+ it = mUidSortedList.erase(it);
+
+ // If this is the top we're preserving, don't push it here, push
+ // it after the for-loop.
+ if (uid == curTopUid && preserveTopUid) {
+ pushCurTopToFront = true;
+ } else {
+ mUidSortedList.push_front(uid);
+ }
+
+ // If we found all uids in the set, break out.
+ if (++numUidsMoved == uids.size()) {
+ break;
+ }
+ } else {
+ ++it;
+ }
+ }
+
+ if (pushCurTopToFront) {
+ mUidSortedList.push_front(curTopUid);
+ }
+}
+
+bool TranscodingSessionController::submit(
+ ClientIdType clientId, SessionIdType sessionId, uid_t uid,
+ const TranscodingRequestParcel& request,
+ const std::weak_ptr<ITranscodingClientCallback>& callback) {
+ SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+ ALOGV("%s: session %s, uid %d, prioirty %d", __FUNCTION__, sessionToString(sessionKey).c_str(),
+ uid, (int32_t)request.priority);
+
+ std::scoped_lock lock{mLock};
+
+ if (mSessionMap.count(sessionKey) > 0) {
+ ALOGE("session %s already exists", sessionToString(sessionKey).c_str());
+ return false;
+ }
+
+ // Add the uid package name to the store of package names we already know.
+ if (mUidPackageNames.count(uid) == 0) {
+ mUidPackageNames.emplace(uid, request.clientPackageName);
+ }
+
+ // TODO(chz): only support offline vs real-time for now. All kUnspecified sessions
+ // go to offline queue.
+ if (request.priority == TranscodingSessionPriority::kUnspecified) {
+ uid = OFFLINE_UID;
+ }
+
+ // Add session to session map.
+ mSessionMap[sessionKey].key = sessionKey;
+ mSessionMap[sessionKey].uid = uid;
+ mSessionMap[sessionKey].lastProgress = 0;
+ mSessionMap[sessionKey].pauseCount = 0;
+ mSessionMap[sessionKey].request = request;
+ mSessionMap[sessionKey].callback = callback;
+ mSessionMap[sessionKey].setState(Session::NOT_STARTED);
+
+ // If it's an offline session, the queue was already added in constructor.
+ // If it's a real-time sessions, check if a queue is already present for the uid,
+ // and add a new queue if needed.
+ if (uid != OFFLINE_UID) {
+ if (mSessionQueues.count(uid) == 0) {
+ mUidPolicy->registerMonitorUid(uid);
+ if (mUidPolicy->isUidOnTop(uid)) {
+ mUidSortedList.push_front(uid);
+ } else {
+ // Shouldn't be submitting real-time requests from non-top app,
+ // put it in front of the offline queue.
+ mUidSortedList.insert(mOfflineUidIterator, uid);
+ }
+ } else if (uid != *mUidSortedList.begin()) {
+ if (mUidPolicy->isUidOnTop(uid)) {
+ mUidSortedList.remove(uid);
+ mUidSortedList.push_front(uid);
+ }
+ }
+ }
+ // Append this session to the uid's queue.
+ mSessionQueues[uid].push_back(sessionKey);
+
+ updateCurrentSession_l();
+
+ validateState_l();
+ return true;
+}
+
+bool TranscodingSessionController::cancel(ClientIdType clientId, SessionIdType sessionId) {
+ SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+ ALOGV("%s: session %s", __FUNCTION__, sessionToString(sessionKey).c_str());
+
+ std::list<SessionKeyType> sessionsToRemove;
+
+ std::scoped_lock lock{mLock};
+
+ if (sessionId < 0) {
+ for (auto it = mSessionMap.begin(); it != mSessionMap.end(); ++it) {
+ if (it->first.first == clientId && it->second.uid != OFFLINE_UID) {
+ sessionsToRemove.push_back(it->first);
+ }
+ }
+ } else {
+ if (mSessionMap.count(sessionKey) == 0) {
+ ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+ return false;
+ }
+ sessionsToRemove.push_back(sessionKey);
+ }
+
+ for (auto it = sessionsToRemove.begin(); it != sessionsToRemove.end(); ++it) {
+ // If the session has ever been started, stop it now.
+ // Note that stop() is needed even if the session is currently paused. This instructs
+ // the transcoder to discard any states for the session, otherwise the states may
+ // never be discarded.
+ if (mSessionMap[*it].getState() != Session::NOT_STARTED) {
+ mTranscoder->stop(it->first, it->second);
+ }
+
+ // Remove the session.
+ removeSession_l(*it, Session::CANCELED);
+ }
+
+ // Start next session.
+ updateCurrentSession_l();
+
+ validateState_l();
+ return true;
+}
+
+bool TranscodingSessionController::getSession(ClientIdType clientId, SessionIdType sessionId,
+ TranscodingRequestParcel* request) {
+ SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+ std::scoped_lock lock{mLock};
+
+ if (mSessionMap.count(sessionKey) == 0) {
+ ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+ return false;
+ }
+
+ *(TranscodingRequest*)request = mSessionMap[sessionKey].request;
+ return true;
+}
+
+void TranscodingSessionController::notifyClient(ClientIdType clientId, SessionIdType sessionId,
+ const char* reason,
+ std::function<void(const SessionKeyType&)> func) {
+ SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+ std::scoped_lock lock{mLock};
+
+ if (mSessionMap.count(sessionKey) == 0) {
+ ALOGW("%s: ignoring %s for session %s that doesn't exist", __FUNCTION__, reason,
+ sessionToString(sessionKey).c_str());
+ return;
+ }
+
+ // Only ignore if session was never started. In particular, propagate the status
+ // to client if the session is paused. Transcoder could have posted finish when
+ // we're pausing it, and the finish arrived after we changed current session.
+ if (mSessionMap[sessionKey].getState() == Session::NOT_STARTED) {
+ ALOGW("%s: ignoring %s for session %s that was never started", __FUNCTION__, reason,
+ sessionToString(sessionKey).c_str());
+ return;
+ }
+
+ ALOGV("%s: session %s %s", __FUNCTION__, sessionToString(sessionKey).c_str(), reason);
+ func(sessionKey);
+}
+
+void TranscodingSessionController::onStarted(ClientIdType clientId, SessionIdType sessionId) {
+ notifyClient(clientId, sessionId, "started", [=](const SessionKeyType& sessionKey) {
+ auto callback = mSessionMap[sessionKey].callback.lock();
+ if (callback != nullptr) {
+ callback->onTranscodingStarted(sessionId);
+ }
+ });
+}
+
+void TranscodingSessionController::onPaused(ClientIdType clientId, SessionIdType sessionId) {
+ notifyClient(clientId, sessionId, "paused", [=](const SessionKeyType& sessionKey) {
+ auto callback = mSessionMap[sessionKey].callback.lock();
+ if (callback != nullptr) {
+ callback->onTranscodingPaused(sessionId);
+ }
+ });
+}
+
+void TranscodingSessionController::onResumed(ClientIdType clientId, SessionIdType sessionId) {
+ notifyClient(clientId, sessionId, "resumed", [=](const SessionKeyType& sessionKey) {
+ auto callback = mSessionMap[sessionKey].callback.lock();
+ if (callback != nullptr) {
+ callback->onTranscodingResumed(sessionId);
+ }
+ });
+}
+
+void TranscodingSessionController::onFinish(ClientIdType clientId, SessionIdType sessionId) {
+ notifyClient(clientId, sessionId, "finish", [=](const SessionKeyType& sessionKey) {
+ {
+ auto clientCallback = mSessionMap[sessionKey].callback.lock();
+ if (clientCallback != nullptr) {
+ clientCallback->onTranscodingFinished(
+ sessionId, TranscodingResultParcel({sessionId, -1 /*actualBitrateBps*/,
+ std::nullopt /*sessionStats*/}));
+ }
+ }
+
+ // Remove the session.
+ removeSession_l(sessionKey, Session::FINISHED);
+
+ // Start next session.
+ updateCurrentSession_l();
+
+ validateState_l();
+ });
+}
+
+void TranscodingSessionController::onError(ClientIdType clientId, SessionIdType sessionId,
+ TranscodingErrorCode err) {
+ notifyClient(clientId, sessionId, "error", [=](const SessionKeyType& sessionKey) {
+ {
+ auto clientCallback = mSessionMap[sessionKey].callback.lock();
+ if (clientCallback != nullptr) {
+ clientCallback->onTranscodingFailed(sessionId, err);
+ }
+ }
+
+ // Remove the session.
+ removeSession_l(sessionKey, Session::ERROR);
+
+ // Start next session.
+ updateCurrentSession_l();
+
+ validateState_l();
+ });
+}
+
+void TranscodingSessionController::onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
+ int32_t progress) {
+ notifyClient(clientId, sessionId, "progress", [=](const SessionKeyType& sessionKey) {
+ auto callback = mSessionMap[sessionKey].callback.lock();
+ if (callback != nullptr) {
+ callback->onProgressUpdate(sessionId, progress);
+ }
+ mSessionMap[sessionKey].lastProgress = progress;
+ });
+}
+
+void TranscodingSessionController::onResourceLost(ClientIdType clientId, SessionIdType sessionId) {
+ ALOGI("%s", __FUNCTION__);
+
+ notifyClient(clientId, sessionId, "resource_lost", [=](const SessionKeyType& sessionKey) {
+ if (mResourceLost) {
+ return;
+ }
+
+ Session* resourceLostSession = &mSessionMap[sessionKey];
+ if (resourceLostSession->getState() != Session::RUNNING) {
+ ALOGW("session %s lost resource but is no longer running",
+ sessionToString(sessionKey).c_str());
+ return;
+ }
+ // If we receive a resource loss event, the transcoder already paused the transcoding,
+ // so we don't need to call onPaused() to pause it. However, we still need to notify
+ // the client and update the session state here.
+ resourceLostSession->setState(Session::PAUSED);
+ // Notify the client as a paused event.
+ auto clientCallback = resourceLostSession->callback.lock();
+ if (clientCallback != nullptr) {
+ clientCallback->onTranscodingPaused(sessionKey.second);
+ }
+ mResourcePolicy->setPidResourceLost(resourceLostSession->request.clientPid);
+ mResourceLost = true;
+
+ validateState_l();
+ });
+}
+
+void TranscodingSessionController::onTopUidsChanged(const std::unordered_set<uid_t>& uids) {
+ if (uids.empty()) {
+ ALOGW("%s: ignoring empty uids", __FUNCTION__);
+ return;
+ }
+
+ std::string uidStr;
+ for (auto it = uids.begin(); it != uids.end(); it++) {
+ if (!uidStr.empty()) {
+ uidStr += ", ";
+ }
+ uidStr += std::to_string(*it);
+ }
+
+ ALOGD("%s: topUids: size %zu, uids: %s", __FUNCTION__, uids.size(), uidStr.c_str());
+
+ std::scoped_lock lock{mLock};
+
+ moveUidsToTop_l(uids, true /*preserveTopUid*/);
+
+ updateCurrentSession_l();
+
+ validateState_l();
+}
+
+void TranscodingSessionController::onResourceAvailable() {
+ std::scoped_lock lock{mLock};
+
+ if (!mResourceLost) {
+ return;
+ }
+
+ ALOGI("%s", __FUNCTION__);
+
+ mResourceLost = false;
+ updateCurrentSession_l();
+
+ validateState_l();
+}
+
+void TranscodingSessionController::validateState_l() {
+#ifdef VALIDATE_STATE
+ LOG_ALWAYS_FATAL_IF(mSessionQueues.count(OFFLINE_UID) != 1,
+ "mSessionQueues offline queue number is not 1");
+ LOG_ALWAYS_FATAL_IF(*mOfflineUidIterator != OFFLINE_UID,
+ "mOfflineUidIterator not pointing to offline uid");
+ LOG_ALWAYS_FATAL_IF(mUidSortedList.size() != mSessionQueues.size(),
+ "mUidList and mSessionQueues size mismatch");
+
+ int32_t totalSessions = 0;
+ for (auto uid : mUidSortedList) {
+ LOG_ALWAYS_FATAL_IF(mSessionQueues.count(uid) != 1,
+ "mSessionQueues count for uid %d is not 1", uid);
+ for (auto& sessionKey : mSessionQueues[uid]) {
+ LOG_ALWAYS_FATAL_IF(mSessionMap.count(sessionKey) != 1,
+ "mSessions count for session %s is not 1",
+ sessionToString(sessionKey).c_str());
+ }
+
+ totalSessions += mSessionQueues[uid].size();
+ }
+ LOG_ALWAYS_FATAL_IF(mSessionMap.size() != totalSessions,
+ "mSessions size doesn't match total sessions counted from uid queues");
+#endif // VALIDATE_STATE
+}
+
+} // namespace android
diff --git a/media/libmediatranscoding/TranscodingUidPolicy.cpp b/media/libmediatranscoding/TranscodingUidPolicy.cpp
index b72a2b9..a725387 100644
--- a/media/libmediatranscoding/TranscodingUidPolicy.cpp
+++ b/media/libmediatranscoding/TranscodingUidPolicy.cpp
@@ -17,8 +17,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "TranscodingUidPolicy"
-#include <binder/ActivityManager.h>
-#include <cutils/misc.h> // FIRST_APPLICATION_UID
+#include <android/activity_manager.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
#include <inttypes.h>
#include <media/TranscodingUidPolicy.h>
#include <utils/Log.h>
@@ -28,51 +29,12 @@
namespace android {
constexpr static uid_t OFFLINE_UID = -1;
-constexpr static const char* kTranscodingTag = "transcoding";
-
-struct TranscodingUidPolicy::UidObserver : public BnUidObserver,
- public virtual IBinder::DeathRecipient {
- explicit UidObserver(TranscodingUidPolicy* owner) : mOwner(owner) {}
-
- // IUidObserver
- void onUidGone(uid_t uid, bool disabled) override;
- void onUidActive(uid_t uid) override;
- void onUidIdle(uid_t uid, bool disabled) override;
- void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
- int32_t capability) override;
-
- // IBinder::DeathRecipient implementation
- void binderDied(const wp<IBinder>& who) override;
-
- TranscodingUidPolicy* mOwner;
-};
-
-void TranscodingUidPolicy::UidObserver::onUidGone(uid_t uid __unused, bool disabled __unused) {}
-
-void TranscodingUidPolicy::UidObserver::onUidActive(uid_t uid __unused) {}
-
-void TranscodingUidPolicy::UidObserver::onUidIdle(uid_t uid __unused, bool disabled __unused) {}
-
-void TranscodingUidPolicy::UidObserver::onUidStateChanged(uid_t uid, int32_t procState,
- int64_t procStateSeq __unused,
- int32_t capability __unused) {
- mOwner->onUidStateChanged(uid, procState);
-}
-
-void TranscodingUidPolicy::UidObserver::binderDied(const wp<IBinder>& /*who*/) {
- ALOGW("TranscodingUidPolicy: ActivityManager has died");
- // TODO(chz): this is a rare event (since if the AMS is dead, the system is
- // probably dead as well). But we should try to reconnect.
- mOwner->setUidObserverRegistered(false);
-}
-
-////////////////////////////////////////////////////////////////////////////
+constexpr static int32_t IMPORTANCE_UNKNOWN = INT32_MAX;
TranscodingUidPolicy::TranscodingUidPolicy()
- : mAm(std::make_shared<ActivityManager>()),
- mUidObserver(new UidObserver(this)),
+ : mUidObserver(nullptr),
mRegistered(false),
- mTopUidState(ActivityManager::PROCESS_STATE_UNKNOWN) {
+ mTopUidState(IMPORTANCE_UNKNOWN) {
registerSelf();
}
@@ -80,39 +42,32 @@
unregisterSelf();
}
+void TranscodingUidPolicy::OnUidImportance(uid_t uid, int32_t uidImportance, void* cookie) {
+ TranscodingUidPolicy* owner = reinterpret_cast<TranscodingUidPolicy*>(cookie);
+ owner->onUidStateChanged(uid, uidImportance);
+}
+
void TranscodingUidPolicy::registerSelf() {
- status_t res = mAm->linkToDeath(mUidObserver.get());
- mAm->registerUidObserver(
- mUidObserver.get(),
- ActivityManager::UID_OBSERVER_GONE | ActivityManager::UID_OBSERVER_IDLE |
- ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE,
- ActivityManager::PROCESS_STATE_UNKNOWN, String16(kTranscodingTag));
+ mUidObserver = AActivityManager_addUidImportanceListener(
+ &OnUidImportance, -1, (void*)this);
- if (res == OK) {
- Mutex::Autolock _l(mUidLock);
-
- mRegistered = true;
- ALOGI("TranscodingUidPolicy: Registered with ActivityManager");
- } else {
- mAm->unregisterUidObserver(mUidObserver.get());
+ if (mUidObserver == nullptr) {
+ ALOGE("Failed to register uid observer");
+ return;
}
+
+ Mutex::Autolock _l(mUidLock);
+ mRegistered = true;
+ ALOGI("Registered uid observer");
}
void TranscodingUidPolicy::unregisterSelf() {
- mAm->unregisterUidObserver(mUidObserver.get());
- mAm->unlinkToDeath(mUidObserver.get());
+ AActivityManager_removeUidImportanceListener(mUidObserver);
+ mUidObserver = nullptr;
Mutex::Autolock _l(mUidLock);
-
mRegistered = false;
-
- ALOGI("TranscodingUidPolicy: Unregistered with ActivityManager");
-}
-
-void TranscodingUidPolicy::setUidObserverRegistered(bool registered) {
- Mutex::Autolock _l(mUidLock);
-
- mRegistered = registered;
+ ALOGI("Unregistered uid observer");
}
void TranscodingUidPolicy::setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) {
@@ -130,9 +85,9 @@
return;
}
- int32_t state = ActivityManager::PROCESS_STATE_UNKNOWN;
- if (mRegistered && mAm->isUidActive(uid, String16(kTranscodingTag))) {
- state = mAm->getUidProcessState(uid, String16(kTranscodingTag));
+ int32_t state = IMPORTANCE_UNKNOWN;
+ if (mRegistered && AActivityManager_isUidActive(uid)) {
+ state = AActivityManager_getUidImportance(uid);
}
ALOGV("%s: inserting new uid: %u, procState %d", __FUNCTION__, uid, state);
@@ -167,14 +122,14 @@
bool TranscodingUidPolicy::isUidOnTop(uid_t uid) {
Mutex::Autolock _l(mUidLock);
- return mTopUidState != ActivityManager::PROCESS_STATE_UNKNOWN &&
+ return mTopUidState != IMPORTANCE_UNKNOWN &&
mTopUidState == getProcState_l(uid);
}
std::unordered_set<uid_t> TranscodingUidPolicy::getTopUids() const {
Mutex::Autolock _l(mUidLock);
- if (mTopUidState == ActivityManager::PROCESS_STATE_UNKNOWN) {
+ if (mTopUidState == IMPORTANCE_UNKNOWN) {
return std::unordered_set<uid_t>();
}
@@ -192,11 +147,13 @@
if (it != mUidStateMap.end() && it->second != procState) {
// Top set changed if 1) the uid is in the current top uid set, or 2) the
// new procState is at least the same priority as the current top uid state.
- bool isUidCurrentTop = mTopUidState != ActivityManager::PROCESS_STATE_UNKNOWN &&
- mStateUidMap[mTopUidState].count(uid) > 0;
- bool isNewStateHigherThanTop = procState != ActivityManager::PROCESS_STATE_UNKNOWN &&
- (procState <= mTopUidState ||
- mTopUidState == ActivityManager::PROCESS_STATE_UNKNOWN);
+ bool isUidCurrentTop =
+ mTopUidState != IMPORTANCE_UNKNOWN &&
+ mStateUidMap[mTopUidState].count(uid) > 0;
+ bool isNewStateHigherThanTop =
+ procState != IMPORTANCE_UNKNOWN &&
+ (procState <= mTopUidState ||
+ mTopUidState == IMPORTANCE_UNKNOWN);
topUidSetChanged = (isUidCurrentTop || isNewStateHigherThanTop);
// Move uid to the new procState.
@@ -224,11 +181,12 @@
}
void TranscodingUidPolicy::updateTopUid_l() {
- mTopUidState = ActivityManager::PROCESS_STATE_UNKNOWN;
+ mTopUidState = IMPORTANCE_UNKNOWN;
// Find the lowest uid state (ignoring PROCESS_STATE_UNKNOWN) with some monitored uids.
for (auto stateIt = mStateUidMap.begin(); stateIt != mStateUidMap.end(); stateIt++) {
- if (stateIt->first != ActivityManager::PROCESS_STATE_UNKNOWN && !stateIt->second.empty()) {
+ if (stateIt->first != IMPORTANCE_UNKNOWN &&
+ !stateIt->second.empty()) {
mTopUidState = stateIt->first;
break;
}
@@ -242,7 +200,7 @@
if (it != mUidStateMap.end()) {
return it->second;
}
- return ActivityManager::PROCESS_STATE_UNKNOWN;
+ return IMPORTANCE_UNKNOWN;
}
} // namespace android
diff --git a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
index 40ca2c2..ad2358e 100644
--- a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
+++ b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
@@ -18,7 +18,7 @@
import android.media.ITranscodingClient;
import android.media.ITranscodingClientCallback;
-import android.media.TranscodingJobParcel;
+import android.media.TranscodingSessionParcel;
import android.media.TranscodingRequestParcel;
/**
@@ -58,17 +58,13 @@
* the client.
* @param clientName name of the client.
* @param opPackageName op package name of the client.
- * @param clientUid user id of the client.
- * @param clientPid process id of the client.
* @return an ITranscodingClient interface object, with nullptr indicating
* failure to register.
*/
ITranscodingClient registerClient(
in ITranscodingClientCallback callback,
in String clientName,
- in String opPackageName,
- in int clientUid,
- in int clientPid);
+ in String opPackageName);
/**
* Returns the number of clients. This is used for debugging.
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
index 37b5147..151e3d0 100644
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
@@ -16,7 +16,7 @@
package android.media;
-import android.media.TranscodingJobParcel;
+import android.media.TranscodingSessionParcel;
import android.media.TranscodingRequestParcel;
/**
@@ -31,28 +31,28 @@
* Submits a transcoding request to MediaTranscodingService.
*
* @param request a TranscodingRequest contains transcoding configuration.
- * @param job(output variable) a TranscodingJob generated by the MediaTranscodingService.
+ * @param session(output variable) a TranscodingSession generated by MediaTranscodingService.
* @return true if success, false otherwise.
*/
boolean submitRequest(in TranscodingRequestParcel request,
- out TranscodingJobParcel job);
+ out TranscodingSessionParcel session);
/**
- * Cancels a transcoding job.
+ * Cancels a transcoding session.
*
- * @param jobId a TranscodingJob generated by the MediaTranscodingService.
+ * @param sessionId a TranscodingSession generated by the MediaTranscodingService.
* @return true if succeeds, false otherwise.
*/
- boolean cancelJob(in int jobId);
+ boolean cancelSession(in int sessionId);
/**
- * Queries the job detail associated with a jobId.
+ * Queries the session detail associated with a sessionId.
*
- * @param jobId a TranscodingJob generated by the MediaTranscodingService.
- * @param job(output variable) the TranscodingJob associated with the jobId.
+ * @param sessionId a TranscodingSession generated by the MediaTranscodingService.
+ * @param session(output variable) the TranscodingSession associated with the sessionId.
* @return true if succeeds, false otherwise.
*/
- boolean getJobWithId(in int jobId, out TranscodingJobParcel job);
+ boolean getSessionWithId(in int sessionId, out TranscodingSessionParcel session);
/**
* Unregister the client with the MediaTranscodingService.
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
index e810f1e..d7d9b6f 100644
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
@@ -17,8 +17,9 @@
package android.media;
import android.media.TranscodingErrorCode;
-import android.media.TranscodingJobParcel;
+import android.media.TranscodingSessionParcel;
import android.media.TranscodingResultParcel;
+import android.os.ParcelFileDescriptor;
/**
* ITranscodingClientCallback
@@ -28,46 +29,79 @@
* {@hide}
*/
interface ITranscodingClientCallback {
+ /**
+ * Called to open a raw file descriptor to access data under a URI
+ *
+ * @param fileUri The path of the filename.
+ * @param mode The file mode to use. Must be one of ("r, "w", "rw")
+ * @return ParcelFileDescriptor if open the file successfully, null otherwise.
+ */
+ ParcelFileDescriptor openFileDescriptor(in @utf8InCpp String fileUri,
+ in @utf8InCpp String mode);
/**
- * Called when the transcoding associated with the jobId finished.
+ * Called when the transcoding associated with the sessionId finished.
+ * This will only be called if client request to get all the status of the session.
*
- * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
+ * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+ */
+ oneway void onTranscodingStarted(in int sessionId);
+
+ /**
+ * Called when the transcoding associated with the sessionId is paused.
+ * This will only be called if client request to get all the status of the session.
+ *
+ * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+ */
+ oneway void onTranscodingPaused(in int sessionId);
+
+ /**
+ * Called when the transcoding associated with the sessionId is resumed.
+ * This will only be called if client request to get all the status of the session.
+ *
+ * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+ */
+ oneway void onTranscodingResumed(in int sessionId);
+
+ /**
+ * Called when the transcoding associated with the sessionId finished.
+ *
+ * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
* @param result contains the transcoded file stats and other transcoding metrics if requested.
*/
- oneway void onTranscodingFinished(in int jobId, in TranscodingResultParcel result);
+ oneway void onTranscodingFinished(in int sessionId, in TranscodingResultParcel result);
/**
- * Called when the transcoding associated with the jobId failed.
+ * Called when the transcoding associated with the sessionId failed.
*
- * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
+ * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
* @param errorCode error code that indicates the error.
*/
- oneway void onTranscodingFailed(in int jobId, in TranscodingErrorCode errorCode);
+ oneway void onTranscodingFailed(in int sessionId, in TranscodingErrorCode errorCode);
/**
- * Called when the transcoding configuration associated with the jobId gets updated, i.e. wait
- * number in the job queue.
+ * Called when the transcoding configuration associated with the sessionId gets updated, i.e. wait
+ * number in the session queue.
*
* <p> This will only be called if client set requestUpdate to be true in the TranscodingRequest
* submitted to the MediaTranscodingService.
*
- * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
- * @param oldAwaitNumber previous number of jobs ahead of current job.
- * @param newAwaitNumber updated number of jobs ahead of current job.
+ * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+ * @param oldAwaitNumber previous number of sessions ahead of current session.
+ * @param newAwaitNumber updated number of sessions ahead of current session.
*/
- oneway void onAwaitNumberOfJobsChanged(in int jobId,
+ oneway void onAwaitNumberOfSessionsChanged(in int sessionId,
in int oldAwaitNumber,
in int newAwaitNumber);
/**
- * Called when there is an update on the progress of the TranscodingJob.
+ * Called when there is an update on the progress of the TranscodingSession.
*
* <p> This will only be called if client set requestUpdate to be true in the TranscodingRequest
* submitted to the MediaTranscodingService.
*
- * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
+ * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
* @param progress an integer number ranging from 0 ~ 100 inclusive.
*/
- oneway void onProgressUpdate(in int jobId, in int progress);
+ oneway void onProgressUpdate(in int sessionId, in int progress);
}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
index 7f47fdc..b044d41 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
@@ -23,11 +23,12 @@
*/
@Backing(type = "int")
enum TranscodingErrorCode {
- kUnknown = 0,
- kUnsupported = 1,
- kDecoderError = 2,
- kEncoderError = 3,
- kExtractorError = 4,
- kMuxerError = 5,
- kInvalidBitstream = 6
+ kNoError = 0,
+ kUnknown = 1,
+ kMalformed = 2,
+ kUnsupported = 3,
+ kInvalidParameter = 4,
+ kInvalidOperation = 5,
+ kErrorIO = 6,
+ kInsufficientResources = 7,
}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
deleted file mode 100644
index d912c38..0000000
--- a/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.TranscodingRequestParcel;
-
-/**
- * TranscodingJob is generated by the MediaTranscodingService upon receiving a TranscodingRequest.
- * It contains all the necessary configuration generated by the MediaTranscodingService for the
- * TranscodingRequest.
- *
- * {@hide}
- */
-//TODO(hkuang): Implement the parcelable.
-parcelable TranscodingJobParcel {
- /**
- * A unique positive Id generated by the MediaTranscodingService.
- */
- int jobId;
-
- /**
- * The request associated with the TranscodingJob.
- */
- TranscodingRequestParcel request;
-
- /**
- * Current number of jobs ahead of this job. The service schedules the job based on the priority
- * passed from the client. Client could specify whether to receive updates when the
- * awaitNumberOfJobs changes through setting requestProgressUpdate in the TranscodingRequest.
- */
- int awaitNumberOfJobs;
-}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
deleted file mode 100644
index 1a5d81a..0000000
--- a/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * Priority of a transcoding job.
- *
- * {@hide}
- */
-@Backing(type="int")
-enum TranscodingJobPriority {
- // TODO(hkuang): define what each priority level actually mean.
- kUnspecified = 0,
- kLow = 1,
- /**
- * 2 ~ 20 is reserved for future use.
- */
- kNormal = 21,
- /**
- * 22 ~ 30 is reserved for future use.
- */
- kHigh = 31,
-}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
index 5857482..4b19f6a 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
@@ -16,8 +16,10 @@
package android.media;
-import android.media.TranscodingJobPriority;
+import android.media.TranscodingSessionPriority;
+import android.media.TranscodingTestConfig;
import android.media.TranscodingType;
+import android.media.TranscodingVideoTrackFormat;
/**
* TranscodingRequest contains the desired configuration for the transcoding.
@@ -27,9 +29,33 @@
//TODO(hkuang): Implement the parcelable.
parcelable TranscodingRequestParcel {
/**
- * Name of file to be transcoded.
+ * The absolute file path of the source file.
*/
- @utf8InCpp String fileName;
+ @utf8InCpp String sourceFilePath;
+
+ /**
+ * The absolute file path of the destination file.
+ */
+ @utf8InCpp String destinationFilePath;
+
+ /**
+ * The UID of the client that this transcoding request is for. Only privileged caller could
+ * set this Uid as only they could do the transcoding on behalf of the client.
+ * -1 means not available.
+ */
+ int clientUid = -1;
+
+ /**
+ * The PID of the client that this transcoding request is for. Only privileged caller could
+ * set this Uid as only they could do the transcoding on behalf of the client.
+ * -1 means not available.
+ */
+ int clientPid = -1;
+
+ /**
+ * The package name of the client whom this transcoding request is for.
+ */
+ @utf8InCpp String clientPackageName;
/**
* Type of the transcoding.
@@ -37,22 +63,44 @@
TranscodingType transcodingType;
/**
- * Input source file descriptor.
+ * Requested video track format for the transcoding.
+ * Note that the transcoding service will try to fulfill the requested format as much as
+ * possbile, while subject to hardware and software limitation. The final video track format
+ * will be available in the TranscodingSessionParcel when the session is finished.
*/
- @nullable ParcelFileDescriptor inFd;
-
- /**
- * Output transcoded file descriptor.
- */
- @nullable ParcelFileDescriptor outFd;
+ @nullable TranscodingVideoTrackFormat requestedVideoTrackFormat;
/**
* Priority of this transcoding. Service will schedule the transcoding based on the priority.
*/
- TranscodingJobPriority priority;
+ TranscodingSessionPriority priority;
/**
- * Whether to receive update on progress and change of awaitNumJobs.
+ * Whether to receive update on progress and change of awaitNumSessions.
+ * Default to false.
*/
- boolean requestUpdate;
+ boolean requestProgressUpdate = false;
+
+ /**
+ * Whether to receive update on session's start/stop/pause/resume.
+ * Default to false.
+ */
+ boolean requestSessionEventUpdate = false;
+
+ /**
+ * Whether this request is for testing.
+ */
+ boolean isForTesting = false;
+
+ /**
+ * Test configuration. This will be available only when isForTesting is set to true.
+ */
+ @nullable TranscodingTestConfig testConfig;
+
+ /**
+ * Whether to get the stats of the transcoding.
+ * If this is enabled, the TranscodingSessionStats will be returned in TranscodingResultParcel
+ * upon transcoding finishes.
+ */
+ boolean enableStats = false;
}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
index 65c49e7..7826e25 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
@@ -16,6 +16,8 @@
package android.media;
+import android.media.TranscodingSessionStats;
+
/**
* Result of the transcoding.
*
@@ -24,9 +26,9 @@
//TODO(hkuang): Implement the parcelable.
parcelable TranscodingResultParcel {
/**
- * The jobId associated with the TranscodingResult.
+ * The sessionId associated with the TranscodingResult.
*/
- int jobId;
+ int sessionId;
/**
* Actual bitrate of the transcoded video in bits per second. This will only present for video
@@ -34,5 +36,9 @@
*/
int actualBitrateBps;
- // TODO(hkuang): Add more fields.
+ /**
+ * Stats of the transcoding session. This will only be available when client requests to get the
+ * stats in TranscodingRequestParcel.
+ */
+ @nullable TranscodingSessionStats sessionStats;
}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl
new file mode 100644
index 0000000..3a4a500
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingRequestParcel;
+import android.media.TranscodingVideoTrackFormat;
+
+/**
+ * TranscodingSession is generated by the MediaTranscodingService upon receiving a
+ * TranscodingRequest. It contains all the necessary configuration generated by the
+ * MediaTranscodingService for the TranscodingRequest.
+ *
+ * {@hide}
+ */
+//TODO(hkuang): Implement the parcelable.
+parcelable TranscodingSessionParcel {
+ /**
+ * A unique positive Id generated by the MediaTranscodingService.
+ */
+ int sessionId;
+
+ /**
+ * The request associated with the TranscodingSession.
+ */
+ TranscodingRequestParcel request;
+
+ /**
+ * Output video track's format. This will only be avaiable for video transcoding and it will
+ * be avaiable when the session is finished.
+ */
+ @nullable TranscodingVideoTrackFormat videoTrackFormat;
+
+ /**
+ * Current number of sessions ahead of this session. The service schedules the session based on
+ * the priority passed from the client. Client could specify whether to receive updates when the
+ * awaitNumberOfSessions changes through setting requestProgressUpdate in the TranscodingRequest.
+ */
+ int awaitNumberOfSessions;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl
new file mode 100644
index 0000000..f001484
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Priority of a transcoding session.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum TranscodingSessionPriority {
+ // TODO(hkuang): define what each priority level actually mean.
+ kUnspecified = 0,
+ kLow = 1,
+ /**
+ * 2 ~ 20 is reserved for future use.
+ */
+ kNormal = 21,
+ /**
+ * 22 ~ 30 is reserved for future use.
+ */
+ kHigh = 31,
+}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl
new file mode 100644
index 0000000..b3e7eea
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * TranscodingSessionStats encapsulated the stats of the a TranscodingSession.
+ *
+ * {@hide}
+ */
+parcelable TranscodingSessionStats {
+ /**
+ * System time of when the session is created.
+ */
+ long sessionCreatedTimeUs;
+
+ /**
+ * System time of when the session is finished.
+ */
+ long sessionFinishedTimeUs;
+
+ /**
+ * Total time spend on transcoding, exclude the time in pause.
+ */
+ long totalProcessingTimeUs;
+
+ /**
+ * Total time spend on handling the session, include the time in pause.
+ * The totaltimeUs is actually the same as sessionFinishedTimeUs - sessionCreatedTimeUs.
+ */
+ long totalTimeUs;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
new file mode 100644
index 0000000..12e0e94
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ package android.media;
+
+ /**
+ * TranscodingTestConfig contains the test configureation used in testing.
+ *
+ * {@hide}
+ */
+parcelable TranscodingTestConfig {
+ /**
+ * Whether to use SimulatedTranscoder for testing. Note that SimulatedTranscoder does not send
+ * transcoding sessions to real MediaTranscoder.
+ */
+ boolean useSimulatedTranscoder = false;
+
+ /**
+ * Passthrough mode used for testing. The transcoding service will assume the destination
+ * path already contains the transcoding of the source file and return it to client directly.
+ */
+ boolean passThroughMode = false;
+
+ /**
+ * Time of processing the session in milliseconds. Service will return the session result at
+ * least after processingTotalTimeMs from the time it starts to process the session. Note that
+ * if service uses real MediaTranscoder to do transcoding, the time spent on transcoding may be
+ * more than that.
+ */
+ int processingTotalTimeMs = 0;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
new file mode 100644
index 0000000..8ed241a
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
@@ -0,0 +1,84 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingVideoCodecType;
+
+/**
+ * TranscodingVideoTrackFormat contains the video track format of a video.
+ *
+ * TODO(hkuang): Switch to PersistableBundle when b/156428735 is fixed or after we remove
+ * aidl_interface
+ *
+ * Note that TranscodingVideoTrackFormat is used in TranscodingRequestParcel for the client to
+ * specify the desired transcoded video format, and is also used in TranscodingSessionParcel for the
+ * service to notify client of the final video format for transcoding.
+ * When used as input in TranscodingRequestParcel, the client only needs to specify the config that
+ * they want to change, e.g. codec or resolution, and all the missing configs will be extracted
+ * from the source video and applied to the destination video.
+ * When used as output in TranscodingSessionParcel, all the configs will be populated to indicate
+ * the final encoder configs used for transcoding.
+ *
+ * {@hide}
+ */
+parcelable TranscodingVideoTrackFormat {
+ /**
+ * Video Codec type.
+ */
+ TranscodingVideoCodecType codecType; // TranscodingVideoCodecType::kUnspecified;
+
+ /**
+ * Width of the video in pixels. -1 means unavailable.
+ */
+ int width = -1;
+
+ /**
+ * Height of the video in pixels. -1 means unavailable.
+ */
+ int height = -1;
+
+ /**
+ * Bitrate in bits per second. -1 means unavailable.
+ */
+ int bitrateBps = -1;
+
+ /**
+ * Codec profile. This must be the same constant as used in MediaCodecInfo.CodecProfileLevel.
+ * -1 means unavailable.
+ */
+ int profile = -1;
+
+ /**
+ * Codec level. This must be the same constant as used in MediaCodecInfo.CodecProfileLevel.
+ * -1 means unavailable.
+ */
+ int level = -1;
+
+ /**
+ * Decoder operating rate. This is used to work around the fact that vendor does not boost the
+ * hardware to maximum speed in transcoding usage case. This operating rate will be applied
+ * to decoder inside MediaTranscoder. -1 means unavailable.
+ */
+ int decoderOperatingRate = -1;
+
+ /**
+ * Encoder operating rate. This is used to work around the fact that vendor does not boost the
+ * hardware to maximum speed in transcoding usage case. This operating rate will be applied
+ * to encoder inside MediaTranscoder. -1 means unavailable.
+ */
+ int encoderOperatingRate = -1;
+}
diff --git a/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h b/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
index 9ca2ee9..5ba1ee2 100644
--- a/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
+++ b/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
@@ -26,7 +26,7 @@
namespace android {
/*
- * AdjustableMaxPriorityQueue is a custom max priority queue that helps managing jobs for
+ * AdjustableMaxPriorityQueue is a custom max priority queue that helps managing sessions for
* MediaTranscodingService.
*
* AdjustableMaxPriorityQueue is a wrapper template around the STL's *_heap() functions.
diff --git a/media/libmediatranscoding/include/media/ControllerClientInterface.h b/media/libmediatranscoding/include/media/ControllerClientInterface.h
new file mode 100644
index 0000000..3fd4f0c
--- /dev/null
+++ b/media/libmediatranscoding/include/media/ControllerClientInterface.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_CONTROLLER_CLIENT_INTERFACE_H
+#define ANDROID_MEDIA_CONTROLLER_CLIENT_INTERFACE_H
+
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/TranscodingDefs.h>
+
+namespace android {
+
+using ::aidl::android::media::ITranscodingClientCallback;
+using ::aidl::android::media::TranscodingRequestParcel;
+
+// Interface for a client to call the controller to schedule or retrieve
+// the status of a session.
+class ControllerClientInterface {
+public:
+ /**
+ * Submits one request to the controller.
+ *
+ * Returns true on success and false on failure. This call will fail is a session identified
+ * by <clientId, sessionId> already exists.
+ */
+ virtual bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t uid,
+ const TranscodingRequestParcel& request,
+ const std::weak_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+
+ /**
+ * Cancels a session identified by <clientId, sessionId>.
+ *
+ * If sessionId is negative (<0), all sessions with a specified priority (that's not
+ * TranscodingSessionPriority::kUnspecified) will be cancelled. Otherwise, only the single
+ * session <clientId, sessionId> will be cancelled.
+ *
+ * Returns false if a single session is being cancelled but it doesn't exist. Returns
+ * true otherwise.
+ */
+ virtual bool cancel(ClientIdType clientId, SessionIdType sessionId) = 0;
+
+ /**
+ * Retrieves information about a session.
+ *
+ * Returns true and the session if it exists, and false otherwise.
+ */
+ virtual bool getSession(ClientIdType clientId, SessionIdType sessionId,
+ TranscodingRequestParcel* request) = 0;
+
+protected:
+ virtual ~ControllerClientInterface() = default;
+};
+
+} // namespace android
+#endif // ANDROID_MEDIA_CONTROLLER_CLIENT_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/ResourcePolicyInterface.h b/media/libmediatranscoding/include/media/ResourcePolicyInterface.h
new file mode 100644
index 0000000..ecce252
--- /dev/null
+++ b/media/libmediatranscoding/include/media/ResourcePolicyInterface.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_RESOURCE_POLICY_INTERFACE_H
+#define ANDROID_MEDIA_RESOURCE_POLICY_INTERFACE_H
+#include <memory>
+namespace android {
+
+class ResourcePolicyCallbackInterface;
+
+// Interface for the SessionController to control the resource status updates.
+class ResourcePolicyInterface {
+public:
+ // Set the associated callback interface to send the events when resource
+ // status changes. (Set to nullptr will stop the updates.)
+ virtual void setCallback(const std::shared_ptr<ResourcePolicyCallbackInterface>& cb) = 0;
+ virtual void setPidResourceLost(pid_t pid) = 0;
+
+protected:
+ virtual ~ResourcePolicyInterface() = default;
+};
+
+// Interface for notifying the SessionController of a change in resource status.
+class ResourcePolicyCallbackInterface {
+public:
+ // Called when codec resources become available. The controller may use this
+ // as a signal to attempt restart transcoding sessions that were previously
+ // paused due to temporary resource loss.
+ virtual void onResourceAvailable() = 0;
+
+protected:
+ virtual ~ResourcePolicyCallbackInterface() = default;
+};
+
+} // namespace android
+#endif // ANDROID_MEDIA_RESOURCE_POLICY_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/SchedulerClientInterface.h b/media/libmediatranscoding/include/media/SchedulerClientInterface.h
deleted file mode 100644
index e00cfb2..0000000
--- a/media/libmediatranscoding/include/media/SchedulerClientInterface.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_MEDIA_SCHEDULER_CLIENT_INTERFACE_H
-#define ANDROID_MEDIA_SCHEDULER_CLIENT_INTERFACE_H
-
-#include <aidl/android/media/ITranscodingClientCallback.h>
-#include <aidl/android/media/TranscodingRequestParcel.h>
-#include <media/TranscodingDefs.h>
-
-namespace android {
-
-using ::aidl::android::media::ITranscodingClientCallback;
-using ::aidl::android::media::TranscodingRequestParcel;
-
-// Interface for a client to call the scheduler to schedule or retrieve
-// the status of a job.
-class SchedulerClientInterface {
-public:
- /**
- * Submits one request to the scheduler.
- *
- * Returns true on success and false on failure. This call will fail is a job identified
- * by <clientId, jobId> already exists.
- */
- virtual bool submit(ClientIdType clientId, JobIdType jobId, uid_t uid,
- const TranscodingRequestParcel& request,
- const std::weak_ptr<ITranscodingClientCallback>& clientCallback) = 0;
-
- /**
- * Cancels a job identified by <clientId, jobId>.
- *
- * If jobId is negative (<0), all jobs with a specified priority (that's not
- * TranscodingJobPriority::kUnspecified) will be cancelled. Otherwise, only the single job
- * <clientId, jobId> will be cancelled.
- *
- * Returns false if a single job is being cancelled but it doesn't exist. Returns
- * true otherwise.
- */
- virtual bool cancel(ClientIdType clientId, JobIdType jobId) = 0;
-
- /**
- * Retrieves information about a job.
- *
- * Returns true and the job if it exists, and false otherwise.
- */
- virtual bool getJob(ClientIdType clientId, JobIdType jobId,
- TranscodingRequestParcel* request) = 0;
-
-protected:
- virtual ~SchedulerClientInterface() = default;
-};
-
-} // namespace android
-#endif // ANDROID_MEDIA_SCHEDULER_CLIENT_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/TranscoderInterface.h b/media/libmediatranscoding/include/media/TranscoderInterface.h
index ef51f65..6268aa5 100644
--- a/media/libmediatranscoding/include/media/TranscoderInterface.h
+++ b/media/libmediatranscoding/include/media/TranscoderInterface.h
@@ -17,46 +17,54 @@
#ifndef ANDROID_MEDIA_TRANSCODER_INTERFACE_H
#define ANDROID_MEDIA_TRANSCODER_INTERFACE_H
+#include <aidl/android/media/ITranscodingClientCallback.h>
#include <aidl/android/media/TranscodingErrorCode.h>
#include <aidl/android/media/TranscodingRequestParcel.h>
#include <media/TranscodingDefs.h>
namespace android {
+using ::aidl::android::media::ITranscodingClientCallback;
using ::aidl::android::media::TranscodingErrorCode;
using ::aidl::android::media::TranscodingRequestParcel;
class TranscoderCallbackInterface;
-// Interface for the scheduler to call the transcoder to take actions.
+// Interface for the controller to call the transcoder to take actions.
class TranscoderInterface {
public:
- // TODO(chz): determine what parameters are needed here.
- // For now, always pass in clientId&jobId.
virtual void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) = 0;
- virtual void start(ClientIdType clientId, JobIdType jobId,
- const TranscodingRequestParcel& request) = 0;
- virtual void pause(ClientIdType clientId, JobIdType jobId) = 0;
- virtual void resume(ClientIdType clientId, JobIdType jobId) = 0;
- virtual void stop(ClientIdType clientId, JobIdType jobId) = 0;
+ virtual void start(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+ virtual void pause(ClientIdType clientId, SessionIdType sessionId) = 0;
+ virtual void resume(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+ virtual void stop(ClientIdType clientId, SessionIdType sessionId) = 0;
protected:
virtual ~TranscoderInterface() = default;
};
-// Interface for the transcoder to notify the scheduler of the status of
-// the currently running job, or temporary loss of transcoding resources.
+// Interface for the transcoder to notify the controller of the status of
+// the currently running session, or temporary loss of transcoding resources.
class TranscoderCallbackInterface {
public:
// TODO(chz): determine what parameters are needed here.
- virtual void onFinish(ClientIdType clientId, JobIdType jobId) = 0;
- virtual void onError(ClientIdType clientId, JobIdType jobId, TranscodingErrorCode err) = 0;
- virtual void onProgressUpdate(ClientIdType clientId, JobIdType jobId, int32_t progress) = 0;
+ virtual void onStarted(ClientIdType clientId, SessionIdType sessionId) = 0;
+ virtual void onPaused(ClientIdType clientId, SessionIdType sessionId) = 0;
+ virtual void onResumed(ClientIdType clientId, SessionIdType sessionId) = 0;
+ virtual void onFinish(ClientIdType clientId, SessionIdType sessionId) = 0;
+ virtual void onError(ClientIdType clientId, SessionIdType sessionId,
+ TranscodingErrorCode err) = 0;
+ virtual void onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
+ int32_t progress) = 0;
// Called when transcoding becomes temporarily inaccessible due to loss of resource.
- // If there is any job currently running, it will be paused. When resource contention
- // is solved, the scheduler should call TranscoderInterface's to either start a new job,
- // or resume a paused job.
- virtual void onResourceLost() = 0;
+ // If there is any session currently running, it will be paused. When resource contention
+ // is solved, the controller should call TranscoderInterface's to either start a new session,
+ // or resume a paused session.
+ virtual void onResourceLost(ClientIdType clientId, SessionIdType sessionId) = 0;
protected:
virtual ~TranscoderCallbackInterface() = default;
diff --git a/media/libmediatranscoding/include/media/TranscoderWrapper.h b/media/libmediatranscoding/include/media/TranscoderWrapper.h
new file mode 100644
index 0000000..9ec32d7
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscoderWrapper.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_TRANSCODER_WRAPPER_H
+#define ANDROID_TRANSCODER_WRAPPER_H
+
+#include <android-base/thread_annotations.h>
+#include <media/NdkMediaError.h>
+#include <media/TranscoderInterface.h>
+
+#include <list>
+#include <map>
+#include <mutex>
+
+namespace android {
+
+class MediaTranscoder;
+class Parcelable;
+
+/*
+ * Wrapper class around MediaTranscoder.
+ * Implements TranscoderInterface for TranscodingSessionController to use.
+ */
+class TranscoderWrapper : public TranscoderInterface,
+ public std::enable_shared_from_this<TranscoderWrapper> {
+public:
+ TranscoderWrapper();
+
+ virtual void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) override;
+ virtual void start(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+ virtual void pause(ClientIdType clientId, SessionIdType sessionId) override;
+ virtual void resume(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+ virtual void stop(ClientIdType clientId, SessionIdType sessionId) override;
+
+private:
+ class CallbackImpl;
+ struct Event {
+ enum Type { NoEvent, Start, Pause, Resume, Stop, Finish, Error, Progress } type;
+ ClientIdType clientId;
+ SessionIdType sessionId;
+ std::function<void()> runnable;
+ int32_t arg;
+ };
+ using SessionKeyType = std::pair<ClientIdType, SessionIdType>;
+
+ std::shared_ptr<CallbackImpl> mTranscoderCb;
+ std::shared_ptr<MediaTranscoder> mTranscoder;
+ std::weak_ptr<TranscoderCallbackInterface> mCallback;
+ std::mutex mLock;
+ std::condition_variable mCondition;
+ std::list<Event> mQueue; // GUARDED_BY(mLock);
+ std::map<SessionKeyType, std::shared_ptr<ndk::ScopedAParcel>> mPausedStateMap;
+ ClientIdType mCurrentClientId;
+ SessionIdType mCurrentSessionId;
+
+ static std::string toString(const Event& event);
+ void onFinish(ClientIdType clientId, SessionIdType sessionId);
+ void onError(ClientIdType clientId, SessionIdType sessionId, media_status_t status);
+ void onProgress(ClientIdType clientId, SessionIdType sessionId, int32_t progress);
+
+ media_status_t handleStart(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& callback);
+ media_status_t handlePause(ClientIdType clientId, SessionIdType sessionId);
+ media_status_t handleResume(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& callback);
+ media_status_t setupTranscoder(
+ ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& callback,
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
+
+ void cleanup();
+ void reportError(ClientIdType clientId, SessionIdType sessionId, media_status_t err);
+ void queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
+ const std::function<void()> runnable, int32_t arg = 0);
+ void threadLoop();
+};
+
+} // namespace android
+#endif // ANDROID_TRANSCODER_WRAPPER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingClientManager.h b/media/libmediatranscoding/include/media/TranscodingClientManager.h
index a62ad8c..be55c78 100644
--- a/media/libmediatranscoding/include/media/TranscodingClientManager.h
+++ b/media/libmediatranscoding/include/media/TranscodingClientManager.h
@@ -29,7 +29,7 @@
#include <unordered_map>
#include <unordered_set>
-#include "SchedulerClientInterface.h"
+#include "ControllerClientInterface.h"
namespace android {
@@ -58,16 +58,14 @@
* already been added, it will also return non-zero errorcode.
*
* @param callback client callback for the service to call this client.
- * @param pid client's process id.
- * @param uid client's user id.
* @param clientName client's name.
* @param opPackageName client's package name.
* @param client output holding the ITranscodingClient interface for the client
* to use for subsequent communications with the service.
* @return 0 if client is added successfully, non-zero errorcode otherwise.
*/
- status_t addClient(const std::shared_ptr<ITranscodingClientCallback>& callback, pid_t pid,
- uid_t uid, const std::string& clientName, const std::string& opPackageName,
+ status_t addClient(const std::shared_ptr<ITranscodingClientCallback>& callback,
+ const std::string& clientName, const std::string& opPackageName,
std::shared_ptr<ITranscodingClient>* client);
/**
@@ -86,7 +84,10 @@
struct ClientImpl;
// Only allow MediaTranscodingService and unit tests to instantiate.
- TranscodingClientManager(const std::shared_ptr<SchedulerClientInterface>& scheduler);
+ TranscodingClientManager(const std::shared_ptr<ControllerClientInterface>& controller);
+
+ // Checks if a user is trusted (and allowed to submit sessions on behalf of other uids)
+ bool isTrustedCaller(pid_t pid, uid_t uid);
/**
* Removes an existing client from the manager.
@@ -107,7 +108,8 @@
::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
- std::shared_ptr<SchedulerClientInterface> mJobScheduler;
+ std::shared_ptr<ControllerClientInterface> mSessionController;
+ std::unordered_set<uid_t> mTrustedUids;
static std::atomic<ClientIdType> sCookieCounter;
static std::mutex sCookie2ClientLock;
diff --git a/media/libmediatranscoding/include/media/TranscodingDefs.h b/media/libmediatranscoding/include/media/TranscodingDefs.h
index 31d83ac..8e02dd2 100644
--- a/media/libmediatranscoding/include/media/TranscodingDefs.h
+++ b/media/libmediatranscoding/include/media/TranscodingDefs.h
@@ -23,7 +23,7 @@
namespace android {
using ClientIdType = uintptr_t;
-using JobIdType = int32_t;
+using SessionIdType = int32_t;
} // namespace android
#endif // ANDROID_MEDIA_TRANSCODING_DEFS_H
diff --git a/media/libmediatranscoding/include/media/TranscodingJobScheduler.h b/media/libmediatranscoding/include/media/TranscodingJobScheduler.h
deleted file mode 100644
index 63001c3..0000000
--- a/media/libmediatranscoding/include/media/TranscodingJobScheduler.h
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_MEDIA_TRANSCODING_JOB_SCHEDULER_H
-#define ANDROID_MEDIA_TRANSCODING_JOB_SCHEDULER_H
-
-#include <aidl/android/media/TranscodingJobPriority.h>
-#include <media/SchedulerClientInterface.h>
-#include <media/TranscoderInterface.h>
-#include <media/TranscodingRequest.h>
-#include <media/UidPolicyInterface.h>
-#include <utils/String8.h>
-
-#include <list>
-#include <map>
-#include <mutex>
-
-namespace android {
-using ::aidl::android::media::TranscodingJobPriority;
-using ::aidl::android::media::TranscodingResultParcel;
-
-class TranscodingJobScheduler : public UidPolicyCallbackInterface,
- public SchedulerClientInterface,
- public TranscoderCallbackInterface {
-public:
- virtual ~TranscodingJobScheduler();
-
- // SchedulerClientInterface
- bool submit(ClientIdType clientId, JobIdType jobId, uid_t uid,
- const TranscodingRequestParcel& request,
- const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override;
- bool cancel(ClientIdType clientId, JobIdType jobId) override;
- bool getJob(ClientIdType clientId, JobIdType jobId, TranscodingRequestParcel* request) override;
- // ~SchedulerClientInterface
-
- // TranscoderCallbackInterface
- void onFinish(ClientIdType clientId, JobIdType jobId) override;
- void onError(ClientIdType clientId, JobIdType jobId, TranscodingErrorCode err) override;
- void onProgressUpdate(ClientIdType clientId, JobIdType jobId, int32_t progress) override;
- void onResourceLost() override;
- // ~TranscoderCallbackInterface
-
- // UidPolicyCallbackInterface
- void onTopUidsChanged(const std::unordered_set<uid_t>& uids) override;
- void onResourceAvailable() override;
- // ~UidPolicyCallbackInterface
-
-private:
- friend class MediaTranscodingService;
- friend class TranscodingJobSchedulerTest;
-
- using JobKeyType = std::pair<ClientIdType, JobIdType>;
- using JobQueueType = std::list<JobKeyType>;
-
- struct Job {
- JobKeyType key;
- uid_t uid;
- enum JobState {
- NOT_STARTED,
- RUNNING,
- PAUSED,
- } state;
- TranscodingRequest request;
- std::weak_ptr<ITranscodingClientCallback> callback;
- };
-
- // TODO(chz): call transcoder without global lock.
- // Use mLock for all entrypoints for now.
- mutable std::mutex mLock;
-
- std::map<JobKeyType, Job> mJobMap;
-
- // uid->JobQueue map (uid == -1: offline queue)
- std::map<uid_t, JobQueueType> mJobQueues;
-
- // uids, with the head being the most-recently-top app, 2nd item is the
- // previous top app, etc.
- std::list<uid_t> mUidSortedList;
- std::list<uid_t>::iterator mOfflineUidIterator;
-
- std::shared_ptr<TranscoderInterface> mTranscoder;
- std::shared_ptr<UidPolicyInterface> mUidPolicy;
-
- Job* mCurrentJob;
- bool mResourceLost;
-
- // Only allow MediaTranscodingService and unit tests to instantiate.
- TranscodingJobScheduler(const std::shared_ptr<TranscoderInterface>& transcoder,
- const std::shared_ptr<UidPolicyInterface>& uidPolicy);
-
- Job* getTopJob_l();
- void updateCurrentJob_l();
- void removeJob_l(const JobKeyType& jobKey);
- void moveUidsToTop_l(const std::unordered_set<uid_t>& uids, bool preserveTopUid);
-
- // Internal state verifier (debug only)
- void validateState_l();
-
- static String8 jobToString(const JobKeyType& jobKey);
-};
-
-} // namespace android
-#endif // ANDROID_MEDIA_TRANSCODING_JOB_SCHEDULER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingRequest.h b/media/libmediatranscoding/include/media/TranscodingRequest.h
index 1337af3..485403f 100644
--- a/media/libmediatranscoding/include/media/TranscodingRequest.h
+++ b/media/libmediatranscoding/include/media/TranscodingRequest.h
@@ -35,13 +35,18 @@
private:
void setTo(const TranscodingRequestParcel& parcel) {
- fileName = parcel.fileName;
+ sourceFilePath = parcel.sourceFilePath;
+ destinationFilePath = parcel.destinationFilePath;
+ clientUid = parcel.clientUid;
+ clientPid = parcel.clientPid;
+ clientPackageName = parcel.clientPackageName;
transcodingType = parcel.transcodingType;
- // TODO: determine if the fds need dup
- inFd.set(dup(parcel.inFd.get()));
- outFd.set(dup(parcel.outFd.get()));
+ requestedVideoTrackFormat = parcel.requestedVideoTrackFormat;
priority = parcel.priority;
- requestUpdate = parcel.requestUpdate;
+ requestProgressUpdate = parcel.requestProgressUpdate;
+ requestSessionEventUpdate = parcel.requestSessionEventUpdate;
+ isForTesting = parcel.isForTesting;
+ testConfig = parcel.testConfig;
}
};
diff --git a/media/libmediatranscoding/include/media/TranscodingResourcePolicy.h b/media/libmediatranscoding/include/media/TranscodingResourcePolicy.h
new file mode 100644
index 0000000..ee232e7
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingResourcePolicy.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_RESOURCE_POLICY_H
+#define ANDROID_MEDIA_TRANSCODING_RESOURCE_POLICY_H
+
+#include <android/binder_auto_utils.h>
+#include <media/ResourcePolicyInterface.h>
+#include <utils/Condition.h>
+
+#include <mutex>
+namespace aidl {
+namespace android {
+namespace media {
+class IResourceObserverService;
+}
+} // namespace android
+} // namespace aidl
+
+namespace android {
+
+using ::aidl::android::media::IResourceObserverService;
+
+class TranscodingResourcePolicy : public ResourcePolicyInterface {
+public:
+ explicit TranscodingResourcePolicy();
+ ~TranscodingResourcePolicy();
+
+ void setCallback(const std::shared_ptr<ResourcePolicyCallbackInterface>& cb) override;
+ void setPidResourceLost(pid_t pid) override;
+
+private:
+ struct ResourceObserver;
+ mutable std::mutex mRegisteredLock;
+ bool mRegistered GUARDED_BY(mRegisteredLock);
+ std::shared_ptr<IResourceObserverService> mService GUARDED_BY(mRegisteredLock);
+ std::shared_ptr<ResourceObserver> mObserver;
+
+ mutable std::mutex mCallbackLock;
+ std::weak_ptr<ResourcePolicyCallbackInterface> mResourcePolicyCallback
+ GUARDED_BY(mCallbackLock);
+ pid_t mResourceLostPid GUARDED_BY(mCallbackLock);
+
+ ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+ static void BinderDiedCallback(void* cookie);
+
+ void registerSelf();
+ void unregisterSelf();
+ void onResourceAvailable(pid_t pid);
+}; // class TranscodingUidPolicy
+
+} // namespace android
+#endif // ANDROID_MEDIA_TRANSCODING_RESOURCE_POLICY_H
diff --git a/media/libmediatranscoding/include/media/TranscodingSessionController.h b/media/libmediatranscoding/include/media/TranscodingSessionController.h
new file mode 100644
index 0000000..a443265
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingSessionController.h
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_SESSION_CONTROLLER_H
+#define ANDROID_MEDIA_TRANSCODING_SESSION_CONTROLLER_H
+
+#include <aidl/android/media/TranscodingSessionPriority.h>
+#include <media/ControllerClientInterface.h>
+#include <media/ResourcePolicyInterface.h>
+#include <media/TranscoderInterface.h>
+#include <media/TranscodingRequest.h>
+#include <media/UidPolicyInterface.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+#include <chrono>
+#include <list>
+#include <map>
+#include <mutex>
+
+namespace android {
+using ::aidl::android::media::TranscodingResultParcel;
+using ::aidl::android::media::TranscodingSessionPriority;
+
+class TranscodingSessionController : public UidPolicyCallbackInterface,
+ public ControllerClientInterface,
+ public TranscoderCallbackInterface,
+ public ResourcePolicyCallbackInterface {
+public:
+ virtual ~TranscodingSessionController();
+
+ // ControllerClientInterface
+ bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t uid,
+ const TranscodingRequestParcel& request,
+ const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override;
+ bool cancel(ClientIdType clientId, SessionIdType sessionId) override;
+ bool getSession(ClientIdType clientId, SessionIdType sessionId,
+ TranscodingRequestParcel* request) override;
+ // ~ControllerClientInterface
+
+ // TranscoderCallbackInterface
+ void onStarted(ClientIdType clientId, SessionIdType sessionId) override;
+ void onPaused(ClientIdType clientId, SessionIdType sessionId) override;
+ void onResumed(ClientIdType clientId, SessionIdType sessionId) override;
+ void onFinish(ClientIdType clientId, SessionIdType sessionId) override;
+ void onError(ClientIdType clientId, SessionIdType sessionId, TranscodingErrorCode err) override;
+ void onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
+ int32_t progress) override;
+ void onResourceLost(ClientIdType clientId, SessionIdType sessionId) override;
+ // ~TranscoderCallbackInterface
+
+ // UidPolicyCallbackInterface
+ void onTopUidsChanged(const std::unordered_set<uid_t>& uids) override;
+ // ~UidPolicyCallbackInterface
+
+ // ResourcePolicyCallbackInterface
+ void onResourceAvailable() override;
+ // ~ResourcePolicyCallbackInterface
+
+ /**
+ * Dump all the session information to the fd.
+ */
+ void dumpAllSessions(int fd, const Vector<String16>& args);
+
+private:
+ friend class MediaTranscodingService;
+ friend class TranscodingSessionControllerTest;
+
+ using SessionKeyType = std::pair<ClientIdType, SessionIdType>;
+ using SessionQueueType = std::list<SessionKeyType>;
+
+ struct Session {
+ enum State {
+ INVALID = -1,
+ NOT_STARTED = 0,
+ RUNNING,
+ PAUSED,
+ FINISHED,
+ CANCELED,
+ ERROR,
+ };
+ SessionKeyType key;
+ uid_t uid;
+ int32_t lastProgress;
+ int32_t pauseCount;
+ std::chrono::time_point<std::chrono::system_clock> stateEnterTime;
+ std::chrono::microseconds waitingTime;
+ std::chrono::microseconds runningTime;
+ std::chrono::microseconds pausedTime;
+
+ TranscodingRequest request;
+ std::weak_ptr<ITranscodingClientCallback> callback;
+
+ // Must use setState to change state.
+ void setState(Session::State state);
+ State getState() const { return state; }
+
+ private:
+ State state = INVALID;
+ };
+
+ // TODO(chz): call transcoder without global lock.
+ // Use mLock for all entrypoints for now.
+ mutable std::mutex mLock;
+
+ std::map<SessionKeyType, Session> mSessionMap;
+
+ // uid->SessionQueue map (uid == -1: offline queue)
+ std::map<uid_t, SessionQueueType> mSessionQueues;
+
+ // uids, with the head being the most-recently-top app, 2nd item is the
+ // previous top app, etc.
+ std::list<uid_t> mUidSortedList;
+ std::list<uid_t>::iterator mOfflineUidIterator;
+ std::map<uid_t, std::string> mUidPackageNames;
+
+ std::shared_ptr<TranscoderInterface> mTranscoder;
+ std::shared_ptr<UidPolicyInterface> mUidPolicy;
+ std::shared_ptr<ResourcePolicyInterface> mResourcePolicy;
+
+ Session* mCurrentSession;
+ bool mResourceLost;
+ std::list<Session> mSessionHistory;
+
+ // Only allow MediaTranscodingService and unit tests to instantiate.
+ TranscodingSessionController(const std::shared_ptr<TranscoderInterface>& transcoder,
+ const std::shared_ptr<UidPolicyInterface>& uidPolicy,
+ const std::shared_ptr<ResourcePolicyInterface>& resourcePolicy);
+
+ void dumpSession_l(const Session& session, String8& result, bool closedSession = false);
+ Session* getTopSession_l();
+ void updateCurrentSession_l();
+ void removeSession_l(const SessionKeyType& sessionKey, Session::State finalState);
+ void moveUidsToTop_l(const std::unordered_set<uid_t>& uids, bool preserveTopUid);
+ void notifyClient(ClientIdType clientId, SessionIdType sessionId, const char* reason,
+ std::function<void(const SessionKeyType&)> func);
+ // Internal state verifier (debug only)
+ void validateState_l();
+
+ static String8 sessionToString(const SessionKeyType& sessionKey);
+ static const char* sessionStateToString(const Session::State sessionState);
+};
+
+} // namespace android
+#endif // ANDROID_MEDIA_TRANSCODING_SESSION_CONTROLLER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingUidPolicy.h b/media/libmediatranscoding/include/media/TranscodingUidPolicy.h
index 27dadd2..4dde5a6 100644
--- a/media/libmediatranscoding/include/media/TranscodingUidPolicy.h
+++ b/media/libmediatranscoding/include/media/TranscodingUidPolicy.h
@@ -22,20 +22,18 @@
#include <media/UidPolicyInterface.h>
#include <sys/types.h>
#include <utils/Condition.h>
-#include <utils/RefBase.h>
-#include <utils/String8.h>
-#include <utils/Vector.h>
#include <map>
#include <mutex>
#include <unordered_map>
#include <unordered_set>
+struct AActivityManager_UidImportanceListener;
+
namespace android {
-class ActivityManager;
// Observer for UID lifecycle and provide information about the uid's app
-// priority used by the job scheduler.
+// priority used by the session controller.
class TranscodingUidPolicy : public UidPolicyInterface {
public:
explicit TranscodingUidPolicy();
@@ -47,19 +45,21 @@
void unregisterMonitorUid(uid_t uid) override;
std::unordered_set<uid_t> getTopUids() const override;
void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) override;
+ // ~UidPolicyInterface
private:
void onUidStateChanged(uid_t uid, int32_t procState);
- void setUidObserverRegistered(bool registerd);
void registerSelf();
void unregisterSelf();
int32_t getProcState_l(uid_t uid) NO_THREAD_SAFETY_ANALYSIS;
void updateTopUid_l() NO_THREAD_SAFETY_ANALYSIS;
- struct UidObserver;
+ static void OnUidImportance(uid_t uid, int32_t uidImportance, void* cookie);
+
+ struct ResourceManagerClient;
mutable Mutex mUidLock;
- std::shared_ptr<ActivityManager> mAm;
- sp<UidObserver> mUidObserver;
+ AActivityManager_UidImportanceListener* mUidObserver;
+
bool mRegistered GUARDED_BY(mUidLock);
int32_t mTopUidState GUARDED_BY(mUidLock);
std::unordered_map<uid_t, int32_t> mUidStateMap GUARDED_BY(mUidLock);
diff --git a/media/libmediatranscoding/include/media/UidPolicyInterface.h b/media/libmediatranscoding/include/media/UidPolicyInterface.h
index dc28027..05d8db0 100644
--- a/media/libmediatranscoding/include/media/UidPolicyInterface.h
+++ b/media/libmediatranscoding/include/media/UidPolicyInterface.h
@@ -23,7 +23,7 @@
class UidPolicyCallbackInterface;
-// Interface for the scheduler to query a uid's info.
+// Interface for the controller to query a uid's info.
class UidPolicyInterface {
public:
// Instruct the uid policy to start monitoring a uid.
@@ -41,19 +41,13 @@
virtual ~UidPolicyInterface() = default;
};
-// Interface for notifying the scheduler of a change in uid states or
-// transcoding resource availability.
+// Interface for notifying the controller of a change in uid states.
class UidPolicyCallbackInterface {
public:
// Called when the set of uids that's top priority among the uids of interest
// has changed. The receiver of this callback should adjust accordingly.
virtual void onTopUidsChanged(const std::unordered_set<uid_t>& uids) = 0;
- // Called when resources become available for transcoding use. The scheduler
- // may use this as a signal to attempt restart transcoding activity that
- // were previously paused due to temporary resource loss.
- virtual void onResourceAvailable() = 0;
-
protected:
virtual ~UidPolicyCallbackInterface() = default;
};
diff --git a/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp b/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
index 2e49f32..a35ca53 100644
--- a/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
+++ b/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
@@ -223,19 +223,19 @@
}
// Test the heap property and make sure it is the same as std::priority_queue.
-TEST(AdjustableMaxPriorityQueueTest, TranscodingJobTest) {
- // Test data structure that mimics the Transcoding job.
- struct TranscodingJob {
+TEST(AdjustableMaxPriorityQueueTest, TranscodingSessionTest) {
+ // Test data structure that mimics the Transcoding session.
+ struct TranscodingSession {
int32_t priority;
int64_t createTimeUs;
};
- // The job is arranging according to priority with highest priority comes first.
- // For the job with the same priority, the job with early createTime will come first.
- class TranscodingJobComp {
+ // The session is arranging according to priority with highest priority comes first.
+ // For the session with the same priority, the session with early createTime will come first.
+ class TranscodingSessionComp {
public:
- bool operator()(const std::unique_ptr<TranscodingJob>& lhs,
- const std::unique_ptr<TranscodingJob>& rhs) const {
+ bool operator()(const std::unique_ptr<TranscodingSession>& lhs,
+ const std::unique_ptr<TranscodingSession>& rhs) const {
if (lhs->priority != rhs->priority) {
return lhs->priority < rhs->priority;
}
@@ -244,46 +244,47 @@
};
// Map to save each value's position in the heap.
- std::unordered_map<int, TranscodingJob*> jobIdToJobMap;
+ std::unordered_map<int, TranscodingSession*> sessionIdToSessionMap;
- TranscodingJob testJobs[] = {
- {1 /*priority*/, 66 /*createTimeUs*/}, // First job,
- {2 /*priority*/, 67 /*createTimeUs*/}, // Second job,
- {2 /*priority*/, 66 /*createTimeUs*/}, // Third job,
- {3 /*priority*/, 68 /*createTimeUs*/}, // Fourth job.
+ TranscodingSession testSessions[] = {
+ {1 /*priority*/, 66 /*createTimeUs*/}, // First session,
+ {2 /*priority*/, 67 /*createTimeUs*/}, // Second session,
+ {2 /*priority*/, 66 /*createTimeUs*/}, // Third session,
+ {3 /*priority*/, 68 /*createTimeUs*/}, // Fourth session.
};
- AdjustableMaxPriorityQueue<std::unique_ptr<TranscodingJob>, TranscodingJobComp> jobQueue;
+ AdjustableMaxPriorityQueue<std::unique_ptr<TranscodingSession>, TranscodingSessionComp>
+ sessionQueue;
- // Pushes all the jobs into the heap.
- for (int jobId = 0; jobId < 4; ++jobId) {
- auto newJob = std::make_unique<TranscodingJob>(testJobs[jobId]);
- jobIdToJobMap[jobId] = newJob.get();
- EXPECT_TRUE(jobQueue.push(std::move(newJob)));
+ // Pushes all the sessions into the heap.
+ for (int sessionId = 0; sessionId < 4; ++sessionId) {
+ auto newSession = std::make_unique<TranscodingSession>(testSessions[sessionId]);
+ sessionIdToSessionMap[sessionId] = newSession.get();
+ EXPECT_TRUE(sessionQueue.push(std::move(newSession)));
}
- // Check the job queue size.
- EXPECT_EQ(4, jobQueue.size());
+ // Check the session queue size.
+ EXPECT_EQ(4, sessionQueue.size());
- // Check the top and it should be Forth job: (3, 68)
- const std::unique_ptr<TranscodingJob>& topJob = jobQueue.top();
- EXPECT_EQ(3, topJob->priority);
- EXPECT_EQ(68, topJob->createTimeUs);
+ // Check the top and it should be Forth session: (3, 68)
+ const std::unique_ptr<TranscodingSession>& topSession = sessionQueue.top();
+ EXPECT_EQ(3, topSession->priority);
+ EXPECT_EQ(68, topSession->createTimeUs);
// Consume the top.
- std::unique_ptr<TranscodingJob> consumeJob = jobQueue.consume_top();
+ std::unique_ptr<TranscodingSession> consumeSession = sessionQueue.consume_top();
- // Check the top and it should be Third Job (2, 66)
- const std::unique_ptr<TranscodingJob>& topJob2 = jobQueue.top();
- EXPECT_EQ(2, topJob2->priority);
- EXPECT_EQ(66, topJob2->createTimeUs);
+ // Check the top and it should be Third Session (2, 66)
+ const std::unique_ptr<TranscodingSession>& topSession2 = sessionQueue.top();
+ EXPECT_EQ(2, topSession2->priority);
+ EXPECT_EQ(66, topSession2->createTimeUs);
- // Change the Second job's priority to 4 from (2, 67) -> (4, 67). It should becomes top of the
- // queue.
- jobIdToJobMap[1]->priority = 4;
- jobQueue.rebuild();
- const std::unique_ptr<TranscodingJob>& topJob3 = jobQueue.top();
- EXPECT_EQ(4, topJob3->priority);
- EXPECT_EQ(67, topJob3->createTimeUs);
+ // Change the Second session's priority to 4 from (2, 67) -> (4, 67). It should becomes
+ // top of the queue.
+ sessionIdToSessionMap[1]->priority = 4;
+ sessionQueue.rebuild();
+ const std::unique_ptr<TranscodingSession>& topSession3 = sessionQueue.top();
+ EXPECT_EQ(4, topSession3->priority);
+ EXPECT_EQ(67, topSession3->createTimeUs);
}
} // namespace android
\ No newline at end of file
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/libmediatranscoding/tests/Android.bp
index b54022a..8bff10a 100644
--- a/media/libmediatranscoding/tests/Android.bp
+++ b/media/libmediatranscoding/tests/Android.bp
@@ -1,4 +1,10 @@
// Build the unit tests for libmediatranscoding.
+filegroup {
+ name: "test_assets",
+ path: "assets",
+ srcs: ["assets/**/*"],
+}
+
cc_defaults {
name: "libmediatranscoding_test_defaults",
@@ -8,15 +14,16 @@
],
shared_libs: [
+ "libandroid",
"libbinder_ndk",
"libcutils",
"liblog",
"libutils",
- "libmediatranscoding"
],
static_libs: [
"mediatranscoding_aidl_interface-ndk_platform",
+ "libmediatranscoding",
],
cflags: [
@@ -38,13 +45,13 @@
}
//
-// TranscodingJobScheduler unit test
+// TranscodingSessionController unit test
//
cc_test {
- name: "TranscodingJobScheduler_tests",
+ name: "TranscodingSessionController_tests",
defaults: ["libmediatranscoding_test_defaults"],
- srcs: ["TranscodingJobScheduler_tests.cpp"],
+ srcs: ["TranscodingSessionController_tests.cpp"],
}
//
diff --git a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
index d9504ca..1a50923 100644
--- a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
@@ -25,7 +25,7 @@
#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <gtest/gtest.h>
-#include <media/SchedulerClientInterface.h>
+#include <media/ControllerClientInterface.h>
#include <media/TranscodingClientManager.h>
#include <media/TranscodingRequest.h>
#include <utils/Log.h>
@@ -38,45 +38,57 @@
using ::aidl::android::media::BnTranscodingClientCallback;
using ::aidl::android::media::IMediaTranscodingService;
using ::aidl::android::media::TranscodingErrorCode;
-using ::aidl::android::media::TranscodingJobParcel;
-using ::aidl::android::media::TranscodingJobPriority;
using ::aidl::android::media::TranscodingRequestParcel;
using ::aidl::android::media::TranscodingResultParcel;
+using ::aidl::android::media::TranscodingSessionParcel;
+using ::aidl::android::media::TranscodingSessionPriority;
-constexpr pid_t kInvalidClientPid = -1;
+constexpr pid_t kInvalidClientPid = -5;
+constexpr pid_t kInvalidClientUid = -10;
constexpr const char* kInvalidClientName = "";
constexpr const char* kInvalidClientPackage = "";
-constexpr pid_t kClientPid = 2;
-constexpr uid_t kClientUid = 3;
constexpr const char* kClientName = "TestClientName";
constexpr const char* kClientPackage = "TestClientPackage";
-#define JOB(n) (n)
+#define SESSION(n) (n)
struct TestClientCallback : public BnTranscodingClientCallback {
TestClientCallback() { ALOGI("TestClientCallback Created"); }
virtual ~TestClientCallback() { ALOGI("TestClientCallback destroyed"); };
- Status onTranscodingFinished(int32_t in_jobId,
+ Status openFileDescriptor(const std::string& /*in_fileUri*/, const std::string& /*in_mode*/,
+ ::ndk::ScopedFileDescriptor* /*_aidl_return*/) override {
+ return Status::ok();
+ }
+
+ Status onTranscodingStarted(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+ Status onTranscodingPaused(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+ Status onTranscodingResumed(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+ Status onTranscodingFinished(int32_t in_sessionId,
const TranscodingResultParcel& in_result) override {
- EXPECT_EQ(in_jobId, in_result.jobId);
- mEventQueue.push_back(Finished(in_jobId));
+ EXPECT_EQ(in_sessionId, in_result.sessionId);
+ mEventQueue.push_back(Finished(in_sessionId));
return Status::ok();
}
- Status onTranscodingFailed(int32_t in_jobId, TranscodingErrorCode /*in_errorCode */) override {
- mEventQueue.push_back(Failed(in_jobId));
+ Status onTranscodingFailed(int32_t in_sessionId,
+ TranscodingErrorCode /*in_errorCode */) override {
+ mEventQueue.push_back(Failed(in_sessionId));
return Status::ok();
}
- Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
- int32_t /* in_newAwaitNumber */) override {
+ Status onAwaitNumberOfSessionsChanged(int32_t /* in_sessionId */,
+ int32_t /* in_oldAwaitNumber */,
+ int32_t /* in_newAwaitNumber */) override {
return Status::ok();
}
- Status onProgressUpdate(int32_t /* in_jobId */, int32_t /* in_progress */) override {
+ Status onProgressUpdate(int32_t /* in_sessionId */, int32_t /* in_progress */) override {
return Status::ok();
}
@@ -86,12 +98,12 @@
Finished,
Failed,
} type;
- JobIdType jobId;
+ SessionIdType sessionId;
};
static constexpr Event NoEvent = {Event::NoEvent, 0};
#define DECLARE_EVENT(action) \
- static Event action(JobIdType jobId) { return {Event::action, jobId}; }
+ static Event action(SessionIdType sessionId) { return {Event::action, sessionId}; }
DECLARE_EVENT(Finished);
DECLARE_EVENT(Failed);
@@ -115,101 +127,102 @@
};
bool operator==(const TestClientCallback::Event& lhs, const TestClientCallback::Event& rhs) {
- return lhs.type == rhs.type && lhs.jobId == rhs.jobId;
+ return lhs.type == rhs.type && lhs.sessionId == rhs.sessionId;
}
-struct TestScheduler : public SchedulerClientInterface {
- TestScheduler() { ALOGI("TestScheduler Created"); }
+struct TestController : public ControllerClientInterface {
+ TestController() { ALOGI("TestController Created"); }
- virtual ~TestScheduler() { ALOGI("TestScheduler Destroyed"); }
+ virtual ~TestController() { ALOGI("TestController Destroyed"); }
- bool submit(ClientIdType clientId, JobIdType jobId, uid_t /*uid*/,
+ bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t /*uid*/,
const TranscodingRequestParcel& request,
const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override {
- JobKeyType jobKey = std::make_pair(clientId, jobId);
- if (mJobs.count(jobKey) > 0) {
+ SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+ if (mSessions.count(sessionKey) > 0) {
return false;
}
// This is the secret name we'll check, to test error propagation from
- // the scheduler back to client.
- if (request.fileName == "bad_file") {
+ // the controller back to client.
+ if (request.sourceFilePath == "bad_source_file") {
return false;
}
- mJobs[jobKey].request = request;
- mJobs[jobKey].callback = clientCallback;
+ mSessions[sessionKey].request = request;
+ mSessions[sessionKey].callback = clientCallback;
- mLastJob = jobKey;
+ mLastSession = sessionKey;
return true;
}
- bool cancel(ClientIdType clientId, JobIdType jobId) override {
- JobKeyType jobKey = std::make_pair(clientId, jobId);
+ bool cancel(ClientIdType clientId, SessionIdType sessionId) override {
+ SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
- if (mJobs.count(jobKey) == 0) {
+ if (mSessions.count(sessionKey) == 0) {
return false;
}
- mJobs.erase(jobKey);
+ mSessions.erase(sessionKey);
return true;
}
- bool getJob(ClientIdType clientId, JobIdType jobId,
- TranscodingRequestParcel* request) override {
- JobKeyType jobKey = std::make_pair(clientId, jobId);
- if (mJobs.count(jobKey) == 0) {
+ bool getSession(ClientIdType clientId, SessionIdType sessionId,
+ TranscodingRequestParcel* request) override {
+ SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+ if (mSessions.count(sessionKey) == 0) {
return false;
}
- *(TranscodingRequest*)request = mJobs[jobKey].request;
+ *(TranscodingRequest*)request = mSessions[sessionKey].request;
return true;
}
- void finishLastJob() {
- auto it = mJobs.find(mLastJob);
- if (it == mJobs.end()) {
+ void finishLastSession() {
+ auto it = mSessions.find(mLastSession);
+ if (it == mSessions.end()) {
return;
}
{
auto clientCallback = it->second.callback.lock();
if (clientCallback != nullptr) {
clientCallback->onTranscodingFinished(
- mLastJob.second, TranscodingResultParcel({mLastJob.second, 0}));
+ mLastSession.second,
+ TranscodingResultParcel({mLastSession.second, 0, std::nullopt}));
}
}
- mJobs.erase(it);
+ mSessions.erase(it);
}
- void abortLastJob() {
- auto it = mJobs.find(mLastJob);
- if (it == mJobs.end()) {
+ void abortLastSession() {
+ auto it = mSessions.find(mLastSession);
+ if (it == mSessions.end()) {
return;
}
{
auto clientCallback = it->second.callback.lock();
if (clientCallback != nullptr) {
- clientCallback->onTranscodingFailed(mLastJob.second,
+ clientCallback->onTranscodingFailed(mLastSession.second,
TranscodingErrorCode::kUnknown);
}
}
- mJobs.erase(it);
+ mSessions.erase(it);
}
- struct Job {
+ struct Session {
TranscodingRequest request;
std::weak_ptr<ITranscodingClientCallback> callback;
};
- typedef std::pair<ClientIdType, JobIdType> JobKeyType;
- std::map<JobKeyType, Job> mJobs;
- JobKeyType mLastJob;
+ typedef std::pair<ClientIdType, SessionIdType> SessionKeyType;
+ std::map<SessionKeyType, Session> mSessions;
+ SessionKeyType mLastSession;
};
class TranscodingClientManagerTest : public ::testing::Test {
public:
TranscodingClientManagerTest()
- : mScheduler(new TestScheduler()),
- mClientManager(new TranscodingClientManager(mScheduler)) {
+ : mController(new TestController()),
+ mClientManager(new TranscodingClientManager(mController)) {
ALOGD("TranscodingClientManagerTest created");
}
@@ -224,19 +237,19 @@
~TranscodingClientManagerTest() { ALOGD("TranscodingClientManagerTest destroyed"); }
void addMultipleClients() {
- EXPECT_EQ(mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, kClientName,
- kClientPackage, &mClient1),
- OK);
+ EXPECT_EQ(
+ mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &mClient1),
+ OK);
EXPECT_NE(mClient1, nullptr);
- EXPECT_EQ(mClientManager->addClient(mClientCallback2, kClientPid, kClientUid, kClientName,
- kClientPackage, &mClient2),
- OK);
+ EXPECT_EQ(
+ mClientManager->addClient(mClientCallback2, kClientName, kClientPackage, &mClient2),
+ OK);
EXPECT_NE(mClient2, nullptr);
- EXPECT_EQ(mClientManager->addClient(mClientCallback3, kClientPid, kClientUid, kClientName,
- kClientPackage, &mClient3),
- OK);
+ EXPECT_EQ(
+ mClientManager->addClient(mClientCallback3, kClientName, kClientPackage, &mClient3),
+ OK);
EXPECT_NE(mClient3, nullptr);
EXPECT_EQ(mClientManager->getNumOfClients(), 3);
@@ -249,7 +262,7 @@
EXPECT_EQ(mClientManager->getNumOfClients(), 0);
}
- std::shared_ptr<TestScheduler> mScheduler;
+ std::shared_ptr<TestController> mController;
std::shared_ptr<TranscodingClientManager> mClientManager;
std::shared_ptr<ITranscodingClient> mClient1;
std::shared_ptr<ITranscodingClient> mClient2;
@@ -262,40 +275,39 @@
TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientCallback) {
// Add a client with null callback and expect failure.
std::shared_ptr<ITranscodingClient> client;
- status_t err = mClientManager->addClient(nullptr, kClientPid, kClientUid, kClientName,
- kClientPackage, &client);
+ status_t err = mClientManager->addClient(nullptr, kClientName, kClientPackage, &client);
EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
}
-
-TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPid) {
- // Add a client with invalid Pid and expect failure.
- std::shared_ptr<ITranscodingClient> client;
- status_t err = mClientManager->addClient(mClientCallback1, kInvalidClientPid, kClientUid,
- kClientName, kClientPackage, &client);
- EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
-}
+//
+//TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPid) {
+// // Add a client with invalid Pid and expect failure.
+// std::shared_ptr<ITranscodingClient> client;
+// status_t err = mClientManager->addClient(mClientCallback1,
+// kClientName, kClientPackage, &client);
+// EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
+//}
TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientName) {
// Add a client with invalid name and expect failure.
std::shared_ptr<ITranscodingClient> client;
- status_t err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid,
- kInvalidClientName, kClientPackage, &client);
+ status_t err = mClientManager->addClient(mClientCallback1, kInvalidClientName, kClientPackage,
+ &client);
EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
}
TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPackageName) {
// Add a client with invalid packagename and expect failure.
std::shared_ptr<ITranscodingClient> client;
- status_t err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, kClientName,
- kInvalidClientPackage, &client);
+ status_t err = mClientManager->addClient(mClientCallback1, kClientName, kInvalidClientPackage,
+ &client);
EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
}
TEST_F(TranscodingClientManagerTest, TestAddingValidClient) {
// Add a valid client, should succeed.
std::shared_ptr<ITranscodingClient> client;
- status_t err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, kClientName,
- kClientPackage, &client);
+ status_t err =
+ mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &client);
EXPECT_EQ(err, OK);
EXPECT_NE(client.get(), nullptr);
EXPECT_EQ(mClientManager->getNumOfClients(), 1);
@@ -308,15 +320,14 @@
TEST_F(TranscodingClientManagerTest, TestAddingDupliacteClient) {
std::shared_ptr<ITranscodingClient> client;
- status_t err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, kClientName,
- kClientPackage, &client);
+ status_t err =
+ mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &client);
EXPECT_EQ(err, OK);
EXPECT_NE(client.get(), nullptr);
EXPECT_EQ(mClientManager->getNumOfClients(), 1);
std::shared_ptr<ITranscodingClient> dupClient;
- err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, "dupClient",
- "dupPackage", &dupClient);
+ err = mClientManager->addClient(mClientCallback1, "dupClient", "dupPackage", &dupClient);
EXPECT_EQ(err, IMediaTranscodingService::ERROR_ALREADY_EXISTS);
EXPECT_EQ(dupClient.get(), nullptr);
EXPECT_EQ(mClientManager->getNumOfClients(), 1);
@@ -325,8 +336,7 @@
EXPECT_TRUE(status.isOk());
EXPECT_EQ(mClientManager->getNumOfClients(), 0);
- err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, "dupClient",
- "dupPackage", &dupClient);
+ err = mClientManager->addClient(mClientCallback1, "dupClient", "dupPackage", &dupClient);
EXPECT_EQ(err, OK);
EXPECT_NE(dupClient.get(), nullptr);
EXPECT_EQ(mClientManager->getNumOfClients(), 1);
@@ -341,74 +351,86 @@
unregisterMultipleClients();
}
-TEST_F(TranscodingClientManagerTest, TestSubmitCancelGetJobs) {
+TEST_F(TranscodingClientManagerTest, TestSubmitCancelGetSessions) {
addMultipleClients();
- // Test jobId assignment.
+ // Test sessionId assignment.
TranscodingRequestParcel request;
- request.fileName = "test_file_0";
- TranscodingJobParcel job;
+ request.sourceFilePath = "test_source_file_0";
+ request.destinationFilePath = "test_desintaion_file_0";
+ TranscodingSessionParcel session;
bool result;
- EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+ EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
EXPECT_TRUE(result);
- EXPECT_EQ(job.jobId, JOB(0));
+ EXPECT_EQ(session.sessionId, SESSION(0));
- request.fileName = "test_file_1";
- EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+ request.sourceFilePath = "test_source_file_1";
+ request.destinationFilePath = "test_desintaion_file_1";
+ EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
EXPECT_TRUE(result);
- EXPECT_EQ(job.jobId, JOB(1));
+ EXPECT_EQ(session.sessionId, SESSION(1));
- request.fileName = "test_file_2";
- EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+ request.sourceFilePath = "test_source_file_2";
+ request.destinationFilePath = "test_desintaion_file_2";
+ EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
EXPECT_TRUE(result);
- EXPECT_EQ(job.jobId, JOB(2));
+ EXPECT_EQ(session.sessionId, SESSION(2));
- // Test submit bad request (no valid fileName) fails.
+ // Test submit bad request (no valid sourceFilePath) fails.
TranscodingRequestParcel badRequest;
- badRequest.fileName = "bad_file";
- EXPECT_TRUE(mClient1->submitRequest(badRequest, &job, &result).isOk());
+ badRequest.sourceFilePath = "bad_source_file";
+ badRequest.destinationFilePath = "bad_destination_file";
+ EXPECT_TRUE(mClient1->submitRequest(badRequest, &session, &result).isOk());
EXPECT_FALSE(result);
- // Test get jobs by id.
- EXPECT_TRUE(mClient1->getJobWithId(JOB(2), &job, &result).isOk());
- EXPECT_EQ(job.jobId, JOB(2));
- EXPECT_EQ(job.request.fileName, "test_file_2");
- EXPECT_TRUE(result);
-
- // Test get jobs by invalid id fails.
- EXPECT_TRUE(mClient1->getJobWithId(JOB(100), &job, &result).isOk());
+ // Test submit with bad pid/uid.
+ badRequest.sourceFilePath = "test_source_file_3";
+ badRequest.destinationFilePath = "test_desintaion_file_3";
+ badRequest.clientPid = kInvalidClientPid;
+ badRequest.clientUid = kInvalidClientUid;
+ EXPECT_TRUE(mClient1->submitRequest(badRequest, &session, &result).isOk());
EXPECT_FALSE(result);
- // Test cancel non-existent job fail.
- EXPECT_TRUE(mClient2->cancelJob(JOB(100), &result).isOk());
+ // Test get sessions by id.
+ EXPECT_TRUE(mClient1->getSessionWithId(SESSION(2), &session, &result).isOk());
+ EXPECT_EQ(session.sessionId, SESSION(2));
+ EXPECT_EQ(session.request.sourceFilePath, "test_source_file_2");
+ EXPECT_TRUE(result);
+
+ // Test get sessions by invalid id fails.
+ EXPECT_TRUE(mClient1->getSessionWithId(SESSION(100), &session, &result).isOk());
EXPECT_FALSE(result);
- // Test cancel valid jobId in arbitrary order.
- EXPECT_TRUE(mClient1->cancelJob(JOB(2), &result).isOk());
- EXPECT_TRUE(result);
-
- EXPECT_TRUE(mClient1->cancelJob(JOB(0), &result).isOk());
- EXPECT_TRUE(result);
-
- EXPECT_TRUE(mClient1->cancelJob(JOB(1), &result).isOk());
- EXPECT_TRUE(result);
-
- // Test cancel job again fails.
- EXPECT_TRUE(mClient1->cancelJob(JOB(1), &result).isOk());
+ // Test cancel non-existent session fail.
+ EXPECT_TRUE(mClient2->cancelSession(SESSION(100), &result).isOk());
EXPECT_FALSE(result);
- // Test get job after cancel fails.
- EXPECT_TRUE(mClient1->getJobWithId(JOB(2), &job, &result).isOk());
+ // Test cancel valid sessionId in arbitrary order.
+ EXPECT_TRUE(mClient1->cancelSession(SESSION(2), &result).isOk());
+ EXPECT_TRUE(result);
+
+ EXPECT_TRUE(mClient1->cancelSession(SESSION(0), &result).isOk());
+ EXPECT_TRUE(result);
+
+ EXPECT_TRUE(mClient1->cancelSession(SESSION(1), &result).isOk());
+ EXPECT_TRUE(result);
+
+ // Test cancel session again fails.
+ EXPECT_TRUE(mClient1->cancelSession(SESSION(1), &result).isOk());
EXPECT_FALSE(result);
- // Test jobId independence for each client.
- EXPECT_TRUE(mClient2->submitRequest(request, &job, &result).isOk());
- EXPECT_TRUE(result);
- EXPECT_EQ(job.jobId, JOB(0));
+ // Test get session after cancel fails.
+ EXPECT_TRUE(mClient1->getSessionWithId(SESSION(2), &session, &result).isOk());
+ EXPECT_FALSE(result);
- EXPECT_TRUE(mClient2->submitRequest(request, &job, &result).isOk());
+ // Test sessionId independence for each client.
+ EXPECT_TRUE(mClient2->submitRequest(request, &session, &result).isOk());
EXPECT_TRUE(result);
- EXPECT_EQ(job.jobId, JOB(1));
+ EXPECT_EQ(session.sessionId, SESSION(0));
+
+ EXPECT_TRUE(mClient2->submitRequest(request, &session, &result).isOk());
+ EXPECT_TRUE(result);
+ EXPECT_EQ(session.sessionId, SESSION(1));
unregisterMultipleClients();
}
@@ -417,33 +439,34 @@
addMultipleClients();
TranscodingRequestParcel request;
- request.fileName = "test_file_name";
- TranscodingJobParcel job;
+ request.sourceFilePath = "test_source_file_name";
+ request.destinationFilePath = "test_destination_file_name";
+ TranscodingSessionParcel session;
bool result;
- EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+ EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
EXPECT_TRUE(result);
- EXPECT_EQ(job.jobId, JOB(0));
+ EXPECT_EQ(session.sessionId, SESSION(0));
- mScheduler->finishLastJob();
- EXPECT_EQ(mClientCallback1->popEvent(), TestClientCallback::Finished(job.jobId));
+ mController->finishLastSession();
+ EXPECT_EQ(mClientCallback1->popEvent(), TestClientCallback::Finished(session.sessionId));
- EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+ EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
EXPECT_TRUE(result);
- EXPECT_EQ(job.jobId, JOB(1));
+ EXPECT_EQ(session.sessionId, SESSION(1));
- mScheduler->abortLastJob();
- EXPECT_EQ(mClientCallback1->popEvent(), TestClientCallback::Failed(job.jobId));
+ mController->abortLastSession();
+ EXPECT_EQ(mClientCallback1->popEvent(), TestClientCallback::Failed(session.sessionId));
- EXPECT_TRUE(mClient1->submitRequest(request, &job, &result).isOk());
+ EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
EXPECT_TRUE(result);
- EXPECT_EQ(job.jobId, JOB(2));
+ EXPECT_EQ(session.sessionId, SESSION(2));
- EXPECT_TRUE(mClient2->submitRequest(request, &job, &result).isOk());
+ EXPECT_TRUE(mClient2->submitRequest(request, &session, &result).isOk());
EXPECT_TRUE(result);
- EXPECT_EQ(job.jobId, JOB(0));
+ EXPECT_EQ(session.sessionId, SESSION(0));
- mScheduler->finishLastJob();
- EXPECT_EQ(mClientCallback2->popEvent(), TestClientCallback::Finished(job.jobId));
+ mController->finishLastSession();
+ EXPECT_EQ(mClientCallback2->popEvent(), TestClientCallback::Finished(session.sessionId));
unregisterMultipleClients();
}
@@ -451,62 +474,65 @@
TEST_F(TranscodingClientManagerTest, TestUseAfterUnregister) {
// Add a client.
std::shared_ptr<ITranscodingClient> client;
- status_t err = mClientManager->addClient(mClientCallback1, kClientPid, kClientUid, kClientName,
- kClientPackage, &client);
+ status_t err =
+ mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &client);
EXPECT_EQ(err, OK);
EXPECT_NE(client.get(), nullptr);
// Submit 2 requests, 1 offline and 1 realtime.
TranscodingRequestParcel request;
- TranscodingJobParcel job;
+ TranscodingSessionParcel session;
bool result;
- request.fileName = "test_file_0";
- request.priority = TranscodingJobPriority::kUnspecified;
- EXPECT_TRUE(client->submitRequest(request, &job, &result).isOk() && result);
- EXPECT_EQ(job.jobId, JOB(0));
+ request.sourceFilePath = "test_source_file_0";
+ request.destinationFilePath = "test_destination_file_0";
+ request.priority = TranscodingSessionPriority::kUnspecified;
+ EXPECT_TRUE(client->submitRequest(request, &session, &result).isOk() && result);
+ EXPECT_EQ(session.sessionId, SESSION(0));
- request.fileName = "test_file_1";
- request.priority = TranscodingJobPriority::kNormal;
- EXPECT_TRUE(client->submitRequest(request, &job, &result).isOk() && result);
- EXPECT_EQ(job.jobId, JOB(1));
+ request.sourceFilePath = "test_source_file_1";
+ request.destinationFilePath = "test_destination_file_1";
+ request.priority = TranscodingSessionPriority::kNormal;
+ EXPECT_TRUE(client->submitRequest(request, &session, &result).isOk() && result);
+ EXPECT_EQ(session.sessionId, SESSION(1));
// Unregister client, should succeed.
Status status = client->unregister();
EXPECT_TRUE(status.isOk());
// Test submit new request after unregister, should fail with ERROR_DISCONNECTED.
- request.fileName = "test_file_2";
- request.priority = TranscodingJobPriority::kNormal;
- status = client->submitRequest(request, &job, &result);
+ request.sourceFilePath = "test_source_file_2";
+ request.destinationFilePath = "test_destination_file_2";
+ request.priority = TranscodingSessionPriority::kNormal;
+ status = client->submitRequest(request, &session, &result);
EXPECT_FALSE(status.isOk());
EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
- // Test cancel jobs after unregister, should fail with ERROR_DISCONNECTED
- // regardless of realtime or offline job, or whether the jobId is valid.
- status = client->cancelJob(JOB(0), &result);
+ // Test cancel sessions after unregister, should fail with ERROR_DISCONNECTED
+ // regardless of realtime or offline session, or whether the sessionId is valid.
+ status = client->cancelSession(SESSION(0), &result);
EXPECT_FALSE(status.isOk());
EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
- status = client->cancelJob(JOB(1), &result);
+ status = client->cancelSession(SESSION(1), &result);
EXPECT_FALSE(status.isOk());
EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
- status = client->cancelJob(JOB(2), &result);
+ status = client->cancelSession(SESSION(2), &result);
EXPECT_FALSE(status.isOk());
EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
- // Test get jobs, should fail with ERROR_DISCONNECTED regardless of realtime
- // or offline job, or whether the jobId is valid.
- status = client->getJobWithId(JOB(0), &job, &result);
+ // Test get sessions, should fail with ERROR_DISCONNECTED regardless of realtime
+ // or offline session, or whether the sessionId is valid.
+ status = client->getSessionWithId(SESSION(0), &session, &result);
EXPECT_FALSE(status.isOk());
EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
- status = client->getJobWithId(JOB(1), &job, &result);
+ status = client->getSessionWithId(SESSION(1), &session, &result);
EXPECT_FALSE(status.isOk());
EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
- status = client->getJobWithId(JOB(2), &job, &result);
+ status = client->getSessionWithId(SESSION(2), &session, &result);
EXPECT_FALSE(status.isOk());
EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
}
diff --git a/media/libmediatranscoding/tests/TranscodingJobScheduler_tests.cpp b/media/libmediatranscoding/tests/TranscodingJobScheduler_tests.cpp
deleted file mode 100644
index 25321e3..0000000
--- a/media/libmediatranscoding/tests/TranscodingJobScheduler_tests.cpp
+++ /dev/null
@@ -1,590 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Unit Test for TranscodingJobScheduler
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "TranscodingJobSchedulerTest"
-
-#include <aidl/android/media/BnTranscodingClientCallback.h>
-#include <aidl/android/media/IMediaTranscodingService.h>
-#include <aidl/android/media/ITranscodingClient.h>
-#include <aidl/android/media/ITranscodingClientCallback.h>
-#include <android-base/logging.h>
-#include <android/binder_manager.h>
-#include <android/binder_process.h>
-#include <gtest/gtest.h>
-#include <media/TranscodingClientManager.h>
-#include <media/TranscodingJobScheduler.h>
-#include <utils/Log.h>
-
-#include <unordered_set>
-
-namespace android {
-
-using Status = ::ndk::ScopedAStatus;
-using aidl::android::media::BnTranscodingClientCallback;
-using aidl::android::media::IMediaTranscodingService;
-using aidl::android::media::ITranscodingClient;
-using aidl::android::media::TranscodingRequestParcel;
-
-constexpr ClientIdType kClientId = 1000;
-constexpr JobIdType kClientJobId = 0;
-constexpr uid_t kClientUid = 5000;
-constexpr uid_t kInvalidUid = (uid_t)-1;
-
-#define CLIENT(n) (kClientId + (n))
-#define JOB(n) (kClientJobId + (n))
-#define UID(n) (kClientUid + (n))
-
-class TestUidPolicy : public UidPolicyInterface {
-public:
- TestUidPolicy() = default;
- virtual ~TestUidPolicy() = default;
-
- // UidPolicyInterface
- void registerMonitorUid(uid_t /*uid*/) override {}
- void unregisterMonitorUid(uid_t /*uid*/) override {}
- bool isUidOnTop(uid_t uid) override { return mTopUids.count(uid) > 0; }
- std::unordered_set<uid_t> getTopUids() const override { return mTopUids; }
- void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) override {
- mUidPolicyCallback = cb;
- }
- void setTop(uid_t uid) {
- std::unordered_set<uid_t> uids = {uid};
- setTop(uids);
- }
- void setTop(const std::unordered_set<uid_t>& uids) {
- mTopUids = uids;
- auto uidPolicyCb = mUidPolicyCallback.lock();
- if (uidPolicyCb != nullptr) {
- uidPolicyCb->onTopUidsChanged(mTopUids);
- }
- }
-
- std::unordered_set<uid_t> mTopUids;
- std::weak_ptr<UidPolicyCallbackInterface> mUidPolicyCallback;
-};
-
-class TestTranscoder : public TranscoderInterface {
-public:
- TestTranscoder() : mLastError(TranscodingErrorCode::kUnknown) {}
- virtual ~TestTranscoder() {}
-
- // TranscoderInterface
- void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& /*cb*/) override {}
-
- void start(ClientIdType clientId, JobIdType jobId,
- const TranscodingRequestParcel& /*request*/) override {
- mEventQueue.push_back(Start(clientId, jobId));
- }
- void pause(ClientIdType clientId, JobIdType jobId) override {
- mEventQueue.push_back(Pause(clientId, jobId));
- }
- void resume(ClientIdType clientId, JobIdType jobId) override {
- mEventQueue.push_back(Resume(clientId, jobId));
- }
- void stop(ClientIdType clientId, JobIdType jobId) override {
- mEventQueue.push_back(Stop(clientId, jobId));
- }
-
- void onFinished(ClientIdType clientId, JobIdType jobId) {
- mEventQueue.push_back(Finished(clientId, jobId));
- }
-
- void onFailed(ClientIdType clientId, JobIdType jobId, TranscodingErrorCode err) {
- mLastError = err;
- mEventQueue.push_back(Failed(clientId, jobId));
- }
-
- TranscodingErrorCode getLastError() {
- TranscodingErrorCode result = mLastError;
- mLastError = TranscodingErrorCode::kUnknown;
- return result;
- }
-
- struct Event {
- enum { NoEvent, Start, Pause, Resume, Stop, Finished, Failed } type;
- ClientIdType clientId;
- JobIdType jobId;
- };
-
- static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
-
-#define DECLARE_EVENT(action) \
- static Event action(ClientIdType clientId, JobIdType jobId) { \
- return {Event::action, clientId, jobId}; \
- }
-
- DECLARE_EVENT(Start);
- DECLARE_EVENT(Pause);
- DECLARE_EVENT(Resume);
- DECLARE_EVENT(Stop);
- DECLARE_EVENT(Finished);
- DECLARE_EVENT(Failed);
-
- const Event& popEvent() {
- if (mEventQueue.empty()) {
- mPoppedEvent = NoEvent;
- } else {
- mPoppedEvent = *mEventQueue.begin();
- mEventQueue.pop_front();
- }
- return mPoppedEvent;
- }
-
-private:
- Event mPoppedEvent;
- std::list<Event> mEventQueue;
- TranscodingErrorCode mLastError;
-};
-
-bool operator==(const TestTranscoder::Event& lhs, const TestTranscoder::Event& rhs) {
- return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.jobId == rhs.jobId;
-}
-
-struct TestClientCallback : public BnTranscodingClientCallback {
- TestClientCallback(TestTranscoder* owner, int64_t clientId)
- : mOwner(owner), mClientId(clientId) {
- ALOGD("TestClient Created");
- }
-
- Status onTranscodingFinished(int32_t in_jobId,
- const TranscodingResultParcel& in_result) override {
- EXPECT_EQ(in_jobId, in_result.jobId);
- ALOGD("TestClientCallback: received onTranscodingFinished");
- mOwner->onFinished(mClientId, in_jobId);
- return Status::ok();
- }
-
- Status onTranscodingFailed(int32_t in_jobId, TranscodingErrorCode in_errorCode) override {
- mOwner->onFailed(mClientId, in_jobId, in_errorCode);
- return Status::ok();
- }
-
- Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
- int32_t /* in_newAwaitNumber */) override {
- return Status::ok();
- }
-
- Status onProgressUpdate(int32_t /* in_jobId */, int32_t /* in_progress */) override {
- return Status::ok();
- }
-
- virtual ~TestClientCallback() { ALOGI("TestClient destroyed"); };
-
-private:
- TestTranscoder* mOwner;
- int64_t mClientId;
- TestClientCallback(const TestClientCallback&) = delete;
- TestClientCallback& operator=(const TestClientCallback&) = delete;
-};
-
-class TranscodingJobSchedulerTest : public ::testing::Test {
-public:
- TranscodingJobSchedulerTest() { ALOGI("TranscodingJobSchedulerTest created"); }
-
- void SetUp() override {
- ALOGI("TranscodingJobSchedulerTest set up");
- mTranscoder.reset(new TestTranscoder());
- mUidPolicy.reset(new TestUidPolicy());
- mScheduler.reset(new TranscodingJobScheduler(mTranscoder, mUidPolicy));
- mUidPolicy->setCallback(mScheduler);
-
- // Set priority only, ignore other fields for now.
- mOfflineRequest.priority = TranscodingJobPriority::kUnspecified;
- mRealtimeRequest.priority = TranscodingJobPriority::kHigh;
- mClientCallback0 =
- ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(0));
- mClientCallback1 =
- ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(1));
- mClientCallback2 =
- ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(2));
- mClientCallback3 =
- ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(3));
- }
-
- void TearDown() override { ALOGI("TranscodingJobSchedulerTest tear down"); }
-
- ~TranscodingJobSchedulerTest() { ALOGD("TranscodingJobSchedulerTest destroyed"); }
-
- std::shared_ptr<TestTranscoder> mTranscoder;
- std::shared_ptr<TestUidPolicy> mUidPolicy;
- std::shared_ptr<TranscodingJobScheduler> mScheduler;
- TranscodingRequestParcel mOfflineRequest;
- TranscodingRequestParcel mRealtimeRequest;
- std::shared_ptr<TestClientCallback> mClientCallback0;
- std::shared_ptr<TestClientCallback> mClientCallback1;
- std::shared_ptr<TestClientCallback> mClientCallback2;
- std::shared_ptr<TestClientCallback> mClientCallback3;
-};
-
-TEST_F(TranscodingJobSchedulerTest, TestSubmitJob) {
- ALOGD("TestSubmitJob");
-
- // Start with UID(1) on top.
- mUidPolicy->setTop(UID(1));
-
- // Submit offline job to CLIENT(0) in UID(0).
- // Should start immediately (because this is the only job).
- mScheduler->submit(CLIENT(0), JOB(0), UID(0), mOfflineRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), 0));
-
- // Submit real-time job to CLIENT(0).
- // Should pause offline job and start new job, even if UID(0) is not on top.
- mScheduler->submit(CLIENT(0), JOB(1), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(1)));
-
- // Submit real-time job to CLIENT(0), should be queued after the previous job.
- mScheduler->submit(CLIENT(0), JOB(2), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Submit real-time job to CLIENT(1) in same uid, should be queued after the previous job.
- mScheduler->submit(CLIENT(1), JOB(0), UID(0), mRealtimeRequest, mClientCallback1);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Submit real-time job to CLIENT(2) in UID(1).
- // Should pause previous job and start new job, because UID(1) is (has been) top.
- mScheduler->submit(CLIENT(2), JOB(0), UID(1), mRealtimeRequest, mClientCallback2);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(1)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), JOB(0)));
-
- // Submit offline job, shouldn't generate any event.
- mScheduler->submit(CLIENT(2), JOB(1), UID(1), mOfflineRequest, mClientCallback2);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Bring UID(0) to top.
- mUidPolicy->setTop(UID(0));
- // Should pause current job, and resume last job in UID(0).
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(1)));
-}
-
-TEST_F(TranscodingJobSchedulerTest, TestCancelJob) {
- ALOGD("TestCancelJob");
-
- // Submit real-time job JOB(0), should start immediately.
- mScheduler->submit(CLIENT(0), JOB(0), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
-
- // Submit real-time job JOB(1), should not start.
- mScheduler->submit(CLIENT(0), JOB(1), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Submit offline job JOB(2), should not start.
- mScheduler->submit(CLIENT(0), JOB(2), UID(0), mOfflineRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Cancel queued real-time job.
- // Cancel real-time job JOB(1), should be cancelled.
- EXPECT_TRUE(mScheduler->cancel(CLIENT(0), JOB(1)));
-
- // Cancel queued offline job.
- // Cancel offline job JOB(2), should be cancelled.
- EXPECT_TRUE(mScheduler->cancel(CLIENT(0), JOB(2)));
-
- // Submit offline job JOB(3), shouldn't cause any event.
- mScheduler->submit(CLIENT(0), JOB(3), UID(0), mOfflineRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Cancel running real-time job JOB(0).
- // - Should be stopped first then cancelled.
- // - Should also start offline job JOB(2) because real-time queue is empty.
- EXPECT_TRUE(mScheduler->cancel(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(3)));
-
- // Submit real-time job JOB(4), offline JOB(3) should pause and JOB(4) should start.
- mScheduler->submit(CLIENT(0), JOB(4), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(3)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(4)));
-
- // Cancel paused JOB(3). JOB(3) should be stopped.
- EXPECT_TRUE(mScheduler->cancel(CLIENT(0), JOB(3)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), JOB(3)));
-}
-
-TEST_F(TranscodingJobSchedulerTest, TestFinishJob) {
- ALOGD("TestFinishJob");
-
- // Start with unspecified top UID.
- // Finish without any jobs submitted, should be ignored.
- mScheduler->onFinish(CLIENT(0), JOB(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Submit offline job JOB(0), should start immediately.
- mScheduler->submit(CLIENT(0), JOB(0), UID(0), mOfflineRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
-
- // Submit real-time job JOB(1), should pause offline job and start immediately.
- mScheduler->submit(CLIENT(0), JOB(1), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(1)));
-
- // Submit real-time job JOB(2), should not start.
- mScheduler->submit(CLIENT(0), JOB(2), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Finish when the job never started, should be ignored.
- mScheduler->onFinish(CLIENT(0), JOB(2));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // UID(1) moves to top.
- mUidPolicy->setTop(UID(1));
- // Submit real-time job to CLIENT(1) in UID(1), should pause previous job and start new job.
- mScheduler->submit(CLIENT(1), JOB(0), UID(1), mRealtimeRequest, mClientCallback1);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(1)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), JOB(0)));
-
- // Simulate Finish that arrived late, after pause issued by scheduler.
- // Should still be propagated to client, but shouldn't trigger any new start.
- mScheduler->onFinish(CLIENT(0), JOB(1));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), JOB(1)));
-
- // Finish running real-time job, should start next real-time job in queue.
- mScheduler->onFinish(CLIENT(1), JOB(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(1), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(2)));
-
- // Finish running real-time job, should resume next job (offline job) in queue.
- mScheduler->onFinish(CLIENT(0), JOB(2));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), JOB(2)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
-
- // Finish running offline job.
- mScheduler->onFinish(CLIENT(0), JOB(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), JOB(0)));
-
- // Duplicate finish for last job, should be ignored.
- mScheduler->onFinish(CLIENT(0), JOB(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-}
-
-TEST_F(TranscodingJobSchedulerTest, TestFailJob) {
- ALOGD("TestFailJob");
-
- // Start with unspecified top UID.
- // Fail without any jobs submitted, should be ignored.
- mScheduler->onError(CLIENT(0), JOB(0), TranscodingErrorCode::kUnknown);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Submit offline job JOB(0), should start immediately.
- mScheduler->submit(CLIENT(0), JOB(0), UID(0), mOfflineRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
-
- // Submit real-time job JOB(1), should pause offline job and start immediately.
- mScheduler->submit(CLIENT(0), JOB(1), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(1)));
-
- // Submit real-time job JOB(2), should not start.
- mScheduler->submit(CLIENT(0), JOB(2), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Fail when the job never started, should be ignored.
- mScheduler->onError(CLIENT(0), JOB(2), TranscodingErrorCode::kUnknown);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // UID(1) moves to top.
- mUidPolicy->setTop(UID(1));
- // Submit real-time job to CLIENT(1) in UID(1), should pause previous job and start new job.
- mScheduler->submit(CLIENT(1), JOB(0), UID(1), mRealtimeRequest, mClientCallback1);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(1)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), JOB(0)));
-
- // Simulate Fail that arrived late, after pause issued by scheduler.
- // Should still be propagated to client, but shouldn't trigger any new start.
- mScheduler->onError(CLIENT(0), JOB(1), TranscodingErrorCode::kUnknown);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), JOB(1)));
-
- // Fail running real-time job, should start next real-time job in queue.
- mScheduler->onError(CLIENT(1), JOB(0), TranscodingErrorCode::kUnknown);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(1), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(2)));
-
- // Fail running real-time job, should resume next job (offline job) in queue.
- mScheduler->onError(CLIENT(0), JOB(2), TranscodingErrorCode::kUnknown);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), JOB(2)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
-
- // Fail running offline job, and test error code propagation.
- mScheduler->onError(CLIENT(0), JOB(0), TranscodingErrorCode::kInvalidBitstream);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kInvalidBitstream);
-
- // Duplicate fail for last job, should be ignored.
- mScheduler->onError(CLIENT(0), JOB(0), TranscodingErrorCode::kUnknown);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-}
-
-TEST_F(TranscodingJobSchedulerTest, TestTopUidChanged) {
- ALOGD("TestTopUidChanged");
-
- // Start with unspecified top UID.
- // Submit real-time job to CLIENT(0), job should start immediately.
- mScheduler->submit(CLIENT(0), JOB(0), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
-
- // Submit offline job to CLIENT(0), should not start.
- mScheduler->submit(CLIENT(1), JOB(0), UID(0), mOfflineRequest, mClientCallback1);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Move UID(1) to top.
- mUidPolicy->setTop(UID(1));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Submit real-time job to CLIENT(2) in different uid UID(1).
- // Should pause previous job and start new job.
- mScheduler->submit(CLIENT(2), JOB(0), UID(1), mRealtimeRequest, mClientCallback2);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), JOB(0)));
-
- // Bring UID(0) back to top.
- mUidPolicy->setTop(UID(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
-
- // Bring invalid uid to top.
- mUidPolicy->setTop(kInvalidUid);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Finish job, next real-time job should resume.
- mScheduler->onFinish(CLIENT(0), JOB(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), JOB(0)));
-
- // Finish job, offline job should start.
- mScheduler->onFinish(CLIENT(2), JOB(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(2), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), JOB(0)));
-}
-
-TEST_F(TranscodingJobSchedulerTest, TestTopUidSetChanged) {
- ALOGD("TestTopUidChanged_MultipleUids");
-
- // Start with unspecified top UID.
- // Submit real-time job to CLIENT(0), job should start immediately.
- mScheduler->submit(CLIENT(0), JOB(0), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
-
- // Submit offline job to CLIENT(0), should not start.
- mScheduler->submit(CLIENT(1), JOB(0), UID(0), mOfflineRequest, mClientCallback1);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Set UID(0), UID(1) to top set.
- // UID(0) should continue to run.
- mUidPolicy->setTop({UID(0), UID(1)});
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Submit real-time job to CLIENT(2) in different uid UID(1).
- // UID(0) should pause and UID(1) should start.
- mScheduler->submit(CLIENT(2), JOB(0), UID(1), mRealtimeRequest, mClientCallback2);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), JOB(0)));
-
- // Remove UID(0) from top set, and only leave UID(1) in the set.
- // UID(1) should continue to run.
- mUidPolicy->setTop(UID(1));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Set UID(0), UID(2) to top set.
- // UID(1) should continue to run.
- mUidPolicy->setTop({UID(1), UID(2)});
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Bring UID(0) back to top.
- mUidPolicy->setTop(UID(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
-
- // Bring invalid uid to top.
- mUidPolicy->setTop(kInvalidUid);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Finish job, next real-time job from UID(1) should resume, even if UID(1) no longer top.
- mScheduler->onFinish(CLIENT(0), JOB(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), JOB(0)));
-
- // Finish job, offline job should start.
- mScheduler->onFinish(CLIENT(2), JOB(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(2), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), JOB(0)));
-}
-
-TEST_F(TranscodingJobSchedulerTest, TestResourceLost) {
- ALOGD("TestResourceLost");
-
- // Start with unspecified top UID.
- // Submit real-time job to CLIENT(0), job should start immediately.
- mScheduler->submit(CLIENT(0), JOB(0), UID(0), mRealtimeRequest, mClientCallback0);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), JOB(0)));
-
- // Submit offline job to CLIENT(0), should not start.
- mScheduler->submit(CLIENT(1), JOB(0), UID(0), mOfflineRequest, mClientCallback1);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Move UID(1) to top.
- mUidPolicy->setTop(UID(1));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Submit real-time job to CLIENT(2) in different uid UID(1).
- // Should pause previous job and start new job.
- mScheduler->submit(CLIENT(2), JOB(0), UID(1), mRealtimeRequest, mClientCallback2);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), JOB(0)));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), JOB(0)));
-
- // Test 1: No queue change during resource loss.
- // Signal resource lost.
- mScheduler->onResourceLost();
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Signal resource available, CLIENT(2) should resume.
- mScheduler->onResourceAvailable();
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), JOB(0)));
-
- // Test 2: Change of queue order during resource loss.
- // Signal resource lost.
- mScheduler->onResourceLost();
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Move UID(0) back to top, should have no resume due to no resource.
- mUidPolicy->setTop(UID(0));
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Signal resource available, CLIENT(0) should resume.
- mScheduler->onResourceAvailable();
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), JOB(0)));
-
- // Test 3: Adding new queue during resource loss.
- // Signal resource lost.
- mScheduler->onResourceLost();
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Move UID(2) to top.
- mUidPolicy->setTop(UID(2));
-
- // Submit real-time job to CLIENT(3) in UID(2), job shouldn't start due to no resource.
- mScheduler->submit(CLIENT(3), JOB(0), UID(2), mRealtimeRequest, mClientCallback3);
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
-
- // Signal resource available, CLIENT(3)'s job should start.
- mScheduler->onResourceAvailable();
- EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(3), JOB(0)));
-}
-
-} // namespace android
diff --git a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
new file mode 100644
index 0000000..fa52f63
--- /dev/null
+++ b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
@@ -0,0 +1,655 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for TranscodingSessionController
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingSessionControllerTest"
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <gtest/gtest.h>
+#include <media/TranscodingClientManager.h>
+#include <media/TranscodingSessionController.h>
+#include <utils/Log.h>
+
+#include <unordered_set>
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnTranscodingClientCallback;
+using aidl::android::media::IMediaTranscodingService;
+using aidl::android::media::ITranscodingClient;
+using aidl::android::media::TranscodingRequestParcel;
+
+constexpr ClientIdType kClientId = 1000;
+constexpr SessionIdType kClientSessionId = 0;
+constexpr uid_t kClientUid = 5000;
+constexpr pid_t kClientPid = 10000;
+constexpr uid_t kInvalidUid = (uid_t)-1;
+constexpr pid_t kInvalidPid = (pid_t)-1;
+
+#define CLIENT(n) (kClientId + (n))
+#define SESSION(n) (kClientSessionId + (n))
+#define UID(n) (kClientUid + (n))
+#define PID(n) (kClientPid + (n))
+
+class TestUidPolicy : public UidPolicyInterface {
+public:
+ TestUidPolicy() = default;
+ virtual ~TestUidPolicy() = default;
+
+ // UidPolicyInterface
+ void registerMonitorUid(uid_t /*uid*/) override {}
+ void unregisterMonitorUid(uid_t /*uid*/) override {}
+ bool isUidOnTop(uid_t uid) override { return mTopUids.count(uid) > 0; }
+ std::unordered_set<uid_t> getTopUids() const override { return mTopUids; }
+ void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) override {
+ mUidPolicyCallback = cb;
+ }
+ void setTop(uid_t uid) {
+ std::unordered_set<uid_t> uids = {uid};
+ setTop(uids);
+ }
+ void setTop(const std::unordered_set<uid_t>& uids) {
+ mTopUids = uids;
+ auto uidPolicyCb = mUidPolicyCallback.lock();
+ if (uidPolicyCb != nullptr) {
+ uidPolicyCb->onTopUidsChanged(mTopUids);
+ }
+ }
+
+ std::unordered_set<uid_t> mTopUids;
+ std::weak_ptr<UidPolicyCallbackInterface> mUidPolicyCallback;
+};
+
+class TestResourcePolicy : public ResourcePolicyInterface {
+public:
+ TestResourcePolicy() { reset(); }
+ virtual ~TestResourcePolicy() = default;
+
+ // ResourcePolicyInterface
+ void setCallback(const std::shared_ptr<ResourcePolicyCallbackInterface>& /*cb*/) override {}
+ void setPidResourceLost(pid_t pid) override {
+ mResourceLostPid = pid;
+ }
+ // ~ResourcePolicyInterface
+
+ pid_t getPid() {
+ pid_t result = mResourceLostPid;
+ reset();
+ return result;
+ }
+
+private:
+ void reset() {
+ mResourceLostPid = kInvalidPid;
+ }
+ pid_t mResourceLostPid;
+};
+
+class TestTranscoder : public TranscoderInterface {
+public:
+ TestTranscoder() : mLastError(TranscodingErrorCode::kUnknown) {}
+ virtual ~TestTranscoder() {}
+
+ // TranscoderInterface
+ void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& /*cb*/) override {}
+
+ void start(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& /*request*/,
+ const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
+ mEventQueue.push_back(Start(clientId, sessionId));
+ }
+ void pause(ClientIdType clientId, SessionIdType sessionId) override {
+ mEventQueue.push_back(Pause(clientId, sessionId));
+ }
+ void resume(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& /*request*/,
+ const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
+ mEventQueue.push_back(Resume(clientId, sessionId));
+ }
+ void stop(ClientIdType clientId, SessionIdType sessionId) override {
+ mEventQueue.push_back(Stop(clientId, sessionId));
+ }
+
+ void onFinished(ClientIdType clientId, SessionIdType sessionId) {
+ mEventQueue.push_back(Finished(clientId, sessionId));
+ }
+
+ void onFailed(ClientIdType clientId, SessionIdType sessionId, TranscodingErrorCode err) {
+ mLastError = err;
+ mEventQueue.push_back(Failed(clientId, sessionId));
+ }
+
+ TranscodingErrorCode getLastError() {
+ TranscodingErrorCode result = mLastError;
+ mLastError = TranscodingErrorCode::kUnknown;
+ return result;
+ }
+
+ struct Event {
+ enum { NoEvent, Start, Pause, Resume, Stop, Finished, Failed } type;
+ ClientIdType clientId;
+ SessionIdType sessionId;
+ };
+
+ static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
+
+#define DECLARE_EVENT(action) \
+ static Event action(ClientIdType clientId, SessionIdType sessionId) { \
+ return {Event::action, clientId, sessionId}; \
+ }
+
+ DECLARE_EVENT(Start);
+ DECLARE_EVENT(Pause);
+ DECLARE_EVENT(Resume);
+ DECLARE_EVENT(Stop);
+ DECLARE_EVENT(Finished);
+ DECLARE_EVENT(Failed);
+
+ const Event& popEvent() {
+ if (mEventQueue.empty()) {
+ mPoppedEvent = NoEvent;
+ } else {
+ mPoppedEvent = *mEventQueue.begin();
+ mEventQueue.pop_front();
+ }
+ return mPoppedEvent;
+ }
+
+private:
+ Event mPoppedEvent;
+ std::list<Event> mEventQueue;
+ TranscodingErrorCode mLastError;
+};
+
+bool operator==(const TestTranscoder::Event& lhs, const TestTranscoder::Event& rhs) {
+ return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.sessionId == rhs.sessionId;
+}
+
+struct TestClientCallback : public BnTranscodingClientCallback {
+ TestClientCallback(TestTranscoder* owner, int64_t clientId)
+ : mOwner(owner), mClientId(clientId) {
+ ALOGD("TestClient Created");
+ }
+
+ Status openFileDescriptor(const std::string& /*in_fileUri*/, const std::string& /*in_mode*/,
+ ::ndk::ScopedFileDescriptor* /*_aidl_return*/) override {
+ return Status::ok();
+ }
+
+ Status onTranscodingStarted(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+ Status onTranscodingPaused(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+ Status onTranscodingResumed(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+ Status onTranscodingFinished(int32_t in_sessionId,
+ const TranscodingResultParcel& in_result) override {
+ EXPECT_EQ(in_sessionId, in_result.sessionId);
+ ALOGD("TestClientCallback: received onTranscodingFinished");
+ mOwner->onFinished(mClientId, in_sessionId);
+ return Status::ok();
+ }
+
+ Status onTranscodingFailed(int32_t in_sessionId, TranscodingErrorCode in_errorCode) override {
+ mOwner->onFailed(mClientId, in_sessionId, in_errorCode);
+ return Status::ok();
+ }
+
+ Status onAwaitNumberOfSessionsChanged(int32_t /* in_sessionId */,
+ int32_t /* in_oldAwaitNumber */,
+ int32_t /* in_newAwaitNumber */) override {
+ return Status::ok();
+ }
+
+ Status onProgressUpdate(int32_t /* in_sessionId */, int32_t /* in_progress */) override {
+ return Status::ok();
+ }
+
+ virtual ~TestClientCallback() { ALOGI("TestClient destroyed"); };
+
+private:
+ TestTranscoder* mOwner;
+ int64_t mClientId;
+ TestClientCallback(const TestClientCallback&) = delete;
+ TestClientCallback& operator=(const TestClientCallback&) = delete;
+};
+
+class TranscodingSessionControllerTest : public ::testing::Test {
+public:
+ TranscodingSessionControllerTest() { ALOGI("TranscodingSessionControllerTest created"); }
+
+ void SetUp() override {
+ ALOGI("TranscodingSessionControllerTest set up");
+ mTranscoder.reset(new TestTranscoder());
+ mUidPolicy.reset(new TestUidPolicy());
+ mResourcePolicy.reset(new TestResourcePolicy());
+ mController.reset(
+ new TranscodingSessionController(mTranscoder, mUidPolicy, mResourcePolicy));
+ mUidPolicy->setCallback(mController);
+
+ // Set priority only, ignore other fields for now.
+ mOfflineRequest.priority = TranscodingSessionPriority::kUnspecified;
+ mRealtimeRequest.priority = TranscodingSessionPriority::kHigh;
+ mClientCallback0 =
+ ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(0));
+ mClientCallback1 =
+ ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(1));
+ mClientCallback2 =
+ ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(2));
+ mClientCallback3 =
+ ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(), CLIENT(3));
+ }
+
+ void TearDown() override { ALOGI("TranscodingSessionControllerTest tear down"); }
+
+ ~TranscodingSessionControllerTest() { ALOGD("TranscodingSessionControllerTest destroyed"); }
+
+ std::shared_ptr<TestTranscoder> mTranscoder;
+ std::shared_ptr<TestUidPolicy> mUidPolicy;
+ std::shared_ptr<TestResourcePolicy> mResourcePolicy;
+ std::shared_ptr<TranscodingSessionController> mController;
+ TranscodingRequestParcel mOfflineRequest;
+ TranscodingRequestParcel mRealtimeRequest;
+ std::shared_ptr<TestClientCallback> mClientCallback0;
+ std::shared_ptr<TestClientCallback> mClientCallback1;
+ std::shared_ptr<TestClientCallback> mClientCallback2;
+ std::shared_ptr<TestClientCallback> mClientCallback3;
+};
+
+TEST_F(TranscodingSessionControllerTest, TestSubmitSession) {
+ ALOGD("TestSubmitSession");
+
+ // Start with UID(1) on top.
+ mUidPolicy->setTop(UID(1));
+
+ // Submit offline session to CLIENT(0) in UID(0).
+ // Should start immediately (because this is the only session).
+ mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), 0));
+
+ // Submit real-time session to CLIENT(0).
+ // Should pause offline session and start new session, even if UID(0) is not on top.
+ mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+ // Submit real-time session to CLIENT(0), should be queued after the previous session.
+ mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Submit real-time session to CLIENT(1) in same uid, should be queued after the previous
+ // session.
+ mController->submit(CLIENT(1), SESSION(0), UID(0), mRealtimeRequest, mClientCallback1);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Submit real-time session to CLIENT(2) in UID(1).
+ // Should pause previous session and start new session, because UID(1) is (has been) top.
+ mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+ // Submit offline session, shouldn't generate any event.
+ mController->submit(CLIENT(2), SESSION(1), UID(1), mOfflineRequest, mClientCallback2);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Bring UID(0) to top.
+ mUidPolicy->setTop(UID(0));
+ // Should pause current session, and resume last session in UID(0).
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(1)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestCancelSession) {
+ ALOGD("TestCancelSession");
+
+ // Submit real-time session SESSION(0), should start immediately.
+ mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+ // Submit real-time session SESSION(1), should not start.
+ mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Submit offline session SESSION(2), should not start.
+ mController->submit(CLIENT(0), SESSION(2), UID(0), mOfflineRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Cancel queued real-time session.
+ // Cancel real-time session SESSION(1), should be cancelled.
+ EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(1)));
+
+ // Cancel queued offline session.
+ // Cancel offline session SESSION(2), should be cancelled.
+ EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(2)));
+
+ // Submit offline session SESSION(3), shouldn't cause any event.
+ mController->submit(CLIENT(0), SESSION(3), UID(0), mOfflineRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Cancel running real-time session SESSION(0).
+ // - Should be stopped first then cancelled.
+ // - Should also start offline session SESSION(2) because real-time queue is empty.
+ EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(3)));
+
+ // Submit real-time session SESSION(4), offline SESSION(3) should pause and SESSION(4)
+ // should start.
+ mController->submit(CLIENT(0), SESSION(4), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(3)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(4)));
+
+ // Cancel paused SESSION(3). SESSION(3) should be stopped.
+ EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(3)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(3)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestFinishSession) {
+ ALOGD("TestFinishSession");
+
+ // Start with unspecified top UID.
+ // Finish without any sessions submitted, should be ignored.
+ mController->onFinish(CLIENT(0), SESSION(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Submit offline session SESSION(0), should start immediately.
+ mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+ // Submit real-time session SESSION(1), should pause offline session and start immediately.
+ mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+ // Submit real-time session SESSION(2), should not start.
+ mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Finish when the session never started, should be ignored.
+ mController->onFinish(CLIENT(0), SESSION(2));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // UID(1) moves to top.
+ mUidPolicy->setTop(UID(1));
+ // Submit real-time session to CLIENT(1) in UID(1), should pause previous session and start
+ // new session.
+ mController->submit(CLIENT(1), SESSION(0), UID(1), mRealtimeRequest, mClientCallback1);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+
+ // Simulate Finish that arrived late, after pause issued by controller.
+ // Should still be propagated to client, but shouldn't trigger any new start.
+ mController->onFinish(CLIENT(0), SESSION(1));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(1)));
+
+ // Finish running real-time session, should start next real-time session in queue.
+ mController->onFinish(CLIENT(1), SESSION(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(1), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+
+ // Finish running real-time session, should resume next session (offline session) in queue.
+ mController->onFinish(CLIENT(0), SESSION(2));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(2)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+ // Finish running offline session.
+ mController->onFinish(CLIENT(0), SESSION(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(0)));
+
+ // Duplicate finish for last session, should be ignored.
+ mController->onFinish(CLIENT(0), SESSION(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestFailSession) {
+ ALOGD("TestFailSession");
+
+ // Start with unspecified top UID.
+ // Fail without any sessions submitted, should be ignored.
+ mController->onError(CLIENT(0), SESSION(0), TranscodingErrorCode::kUnknown);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Submit offline session SESSION(0), should start immediately.
+ mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+ // Submit real-time session SESSION(1), should pause offline session and start immediately.
+ mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+ // Submit real-time session SESSION(2), should not start.
+ mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Fail when the session never started, should be ignored.
+ mController->onError(CLIENT(0), SESSION(2), TranscodingErrorCode::kUnknown);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // UID(1) moves to top.
+ mUidPolicy->setTop(UID(1));
+ // Submit real-time session to CLIENT(1) in UID(1), should pause previous session and start
+ // new session.
+ mController->submit(CLIENT(1), SESSION(0), UID(1), mRealtimeRequest, mClientCallback1);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+
+ // Simulate Fail that arrived late, after pause issued by controller.
+ // Should still be propagated to client, but shouldn't trigger any new start.
+ mController->onError(CLIENT(0), SESSION(1), TranscodingErrorCode::kUnknown);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(1)));
+
+ // Fail running real-time session, should start next real-time session in queue.
+ mController->onError(CLIENT(1), SESSION(0), TranscodingErrorCode::kUnknown);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(1), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+
+ // Fail running real-time session, should resume next session (offline session) in queue.
+ mController->onError(CLIENT(0), SESSION(2), TranscodingErrorCode::kUnknown);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(2)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+ // Fail running offline session, and test error code propagation.
+ mController->onError(CLIENT(0), SESSION(0), TranscodingErrorCode::kInvalidOperation);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kInvalidOperation);
+
+ // Duplicate fail for last session, should be ignored.
+ mController->onError(CLIENT(0), SESSION(0), TranscodingErrorCode::kUnknown);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTopUidChanged) {
+ ALOGD("TestTopUidChanged");
+
+ // Start with unspecified top UID.
+ // Submit real-time session to CLIENT(0), session should start immediately.
+ mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+ // Submit offline session to CLIENT(0), should not start.
+ mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Move UID(1) to top.
+ mUidPolicy->setTop(UID(1));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Submit real-time session to CLIENT(2) in different uid UID(1).
+ // Should pause previous session and start new session.
+ mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+ // Bring UID(0) back to top.
+ mUidPolicy->setTop(UID(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+ // Bring invalid uid to top.
+ mUidPolicy->setTop(kInvalidUid);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Finish session, next real-time session should resume.
+ mController->onFinish(CLIENT(0), SESSION(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+ // Finish session, offline session should start.
+ mController->onFinish(CLIENT(2), SESSION(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(2), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTopUidSetChanged) {
+ ALOGD("TestTopUidChanged_MultipleUids");
+
+ // Start with unspecified top UID.
+ // Submit real-time session to CLIENT(0), session should start immediately.
+ mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+ // Submit offline session to CLIENT(0), should not start.
+ mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Set UID(0), UID(1) to top set.
+ // UID(0) should continue to run.
+ mUidPolicy->setTop({UID(0), UID(1)});
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Submit real-time session to CLIENT(2) in different uid UID(1).
+ // UID(0) should pause and UID(1) should start.
+ mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+ // Remove UID(0) from top set, and only leave UID(1) in the set.
+ // UID(1) should continue to run.
+ mUidPolicy->setTop(UID(1));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Set UID(0), UID(2) to top set.
+ // UID(1) should continue to run.
+ mUidPolicy->setTop({UID(1), UID(2)});
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Bring UID(0) back to top.
+ mUidPolicy->setTop(UID(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+ // Bring invalid uid to top.
+ mUidPolicy->setTop(kInvalidUid);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Finish session, next real-time session from UID(1) should resume, even if UID(1)
+ // no longer top.
+ mController->onFinish(CLIENT(0), SESSION(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+ // Finish session, offline session should start.
+ mController->onFinish(CLIENT(2), SESSION(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(2), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestResourceLost) {
+ ALOGD("TestResourceLost");
+
+ // Start with unspecified top UID.
+ // Submit real-time session to CLIENT(0), session should start immediately.
+ mRealtimeRequest.clientPid = PID(0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+ // Submit offline session to CLIENT(0), should not start.
+ mOfflineRequest.clientPid = PID(0);
+ mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Move UID(1) to top.
+ mUidPolicy->setTop(UID(1));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Submit real-time session to CLIENT(2) in different uid UID(1).
+ // Should pause previous session and start new session.
+ mRealtimeRequest.clientPid = PID(1);
+ mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+ // Test 0: No call into ResourcePolicy if resource lost is from a non-running
+ // or non-existent session.
+ mController->onResourceLost(CLIENT(0), SESSION(0));
+ EXPECT_EQ(mResourcePolicy->getPid(), kInvalidPid);
+ mController->onResourceLost(CLIENT(3), SESSION(0));
+ EXPECT_EQ(mResourcePolicy->getPid(), kInvalidPid);
+
+ // Test 1: No queue change during resource loss.
+ // Signal resource lost.
+ mController->onResourceLost(CLIENT(2), SESSION(0));
+ EXPECT_EQ(mResourcePolicy->getPid(), PID(1));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Signal resource available, CLIENT(2) should resume.
+ mController->onResourceAvailable();
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+ // Test 2: Change of queue order during resource loss.
+ // Signal resource lost.
+ mController->onResourceLost(CLIENT(2), SESSION(0));
+ EXPECT_EQ(mResourcePolicy->getPid(), PID(1));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Move UID(0) back to top, should have no resume due to no resource.
+ mUidPolicy->setTop(UID(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Signal resource available, CLIENT(0) should resume.
+ mController->onResourceAvailable();
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+ // Test 3: Adding new queue during resource loss.
+ // Signal resource lost.
+ mController->onResourceLost(CLIENT(0), SESSION(0));
+ EXPECT_EQ(mResourcePolicy->getPid(), PID(0));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Move UID(2) to top.
+ mUidPolicy->setTop(UID(2));
+
+ // Submit real-time session to CLIENT(3) in UID(2), session shouldn't start due to no resource.
+ mRealtimeRequest.clientPid = PID(2);
+ mController->submit(CLIENT(3), SESSION(0), UID(2), mRealtimeRequest, mClientCallback3);
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+ // Signal resource available, CLIENT(3)'s session should start.
+ mController->onResourceAvailable();
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(3), SESSION(0)));
+}
+
+} // namespace android
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/backyard_hevc_1920x1080_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/backyard_hevc_1920x1080_20Mbps.mp4
new file mode 100644
index 0000000..80d1ec3
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/backyard_hevc_1920x1080_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/transcoder/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/assets/cubicle_avc_480x240_aac_24KHz.mp4
rename to media/libmediatranscoding/tests/assets/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
new file mode 100644
index 0000000..df42a15
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4
new file mode 100644
index 0000000..7794b99
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/longtest_15s.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/longtest_15s.mp4
new file mode 100644
index 0000000..b50d8e4
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/longtest_15s.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_12Mbps.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_12Mbps.mp4
new file mode 100644
index 0000000..92dda3b
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_12Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_20Mbps.mp4
new file mode 100644
index 0000000..2fe37bd
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
index ff6df2c..5db9258 100644
--- a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -29,6 +29,6 @@
#adb shell /data/nativetest64/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
adb shell /data/nativetest/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
-echo "testing TranscodingJobScheduler"
-#adb shell /data/nativetest64/TranscodingJobScheduler_tests/TranscodingJobScheduler_tests
-adb shell /data/nativetest/TranscodingJobScheduler_tests/TranscodingJobScheduler_tests
+echo "testing TranscodingSessionController"
+#adb shell /data/nativetest64/TranscodingSessionController_tests/TranscodingSessionController_tests
+adb shell /data/nativetest/TranscodingSessionController_tests/TranscodingSessionController_tests
diff --git a/media/libmediatranscoding/tests/push_assets.sh b/media/libmediatranscoding/tests/push_assets.sh
new file mode 100755
index 0000000..cc71514
--- /dev/null
+++ b/media/libmediatranscoding/tests/push_assets.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+#
+# Pushes the assets to the /data/local/tmp.
+#
+
+if [ "$SYNC_FINISHED" != true ]; then
+ if [ -z "$ANDROID_BUILD_TOP" ]; then
+ echo "Android build environment not set"
+ exit -1
+ fi
+
+ # ensure we have mm
+ . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+ mm
+
+ echo "waiting for device"
+
+ adb root && adb wait-for-device remount
+fi
+
+echo "Copying files to device"
+
+adb shell mkdir -p /data/local/tmp/TranscodingTestAssets
+
+FILES=$ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/TranscodingTestAssets/*
+for file in $FILES
+do
+adb push --sync $file /data/local/tmp/TranscodingTestAssets
+done
+
+echo "Copy done"
diff --git a/media/libmediatranscoding/transcoder/Android.bp b/media/libmediatranscoding/transcoder/Android.bp
index 7f6630f..aa7cdde 100644
--- a/media/libmediatranscoding/transcoder/Android.bp
+++ b/media/libmediatranscoding/transcoder/Android.bp
@@ -14,13 +14,16 @@
* limitations under the License.
*/
-cc_library_shared {
- name: "libmediatranscoder",
+cc_defaults {
+ name: "mediatranscoder_defaults",
srcs: [
"MediaSampleQueue.cpp",
"MediaSampleReaderNDK.cpp",
+ "MediaSampleWriter.cpp",
"MediaTrackTranscoder.cpp",
+ "MediaTranscoder.cpp",
+ "NdkCommon.cpp",
"PassthroughTrackTranscoder.cpp",
"VideoTrackTranscoder.cpp",
],
@@ -31,6 +34,7 @@
"libmediandk",
"libnativewindow",
"libutils",
+ "libbinder_ndk",
],
export_include_dirs: [
@@ -55,3 +59,9 @@
cfi: true,
},
}
+
+cc_library {
+ name: "libmediatranscoder",
+ defaults: ["mediatranscoder_defaults"],
+}
+
diff --git a/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp b/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp
index 691ee1c..b085c98 100644
--- a/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp
+++ b/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp
@@ -47,6 +47,11 @@
return mAborted;
}
+bool MediaSampleQueue::isEmpty() {
+ std::scoped_lock<std::mutex> lock(mMutex);
+ return mSampleQueue.empty();
+}
+
void MediaSampleQueue::abort() {
std::scoped_lock<std::mutex> lock(mMutex);
// Clear the queue and notify consumers.
diff --git a/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp b/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
index a0096c7..1a6e7ed 100644
--- a/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
+++ b/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
@@ -21,7 +21,7 @@
#include <media/MediaSampleReaderNDK.h>
#include <algorithm>
-#include <vector>
+#include <cmath>
namespace android {
@@ -46,12 +46,6 @@
}
auto sampleReader = std::shared_ptr<MediaSampleReaderNDK>(new MediaSampleReaderNDK(extractor));
- status = sampleReader->init();
- if (status != AMEDIA_OK) {
- LOG(ERROR) << "MediaSampleReaderNDK::init returned error: " << status;
- return nullptr;
- }
-
return sampleReader;
}
@@ -59,39 +53,42 @@
: mExtractor(extractor), mTrackCount(AMediaExtractor_getTrackCount(mExtractor)) {
if (mTrackCount > 0) {
mTrackCursors.resize(mTrackCount);
- mTrackCursors.resize(mTrackCount);
}
}
-media_status_t MediaSampleReaderNDK::init() {
- for (size_t trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
- media_status_t status = AMediaExtractor_selectTrack(mExtractor, trackIndex);
- if (status != AMEDIA_OK) {
- LOG(ERROR) << "AMediaExtractor_selectTrack returned error: " << status;
- return status;
- }
- }
-
- mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
- if (mExtractorTrackIndex >= 0) {
- mTrackCursors[mExtractorTrackIndex].current.set(mExtractorSampleIndex,
- AMediaExtractor_getSampleTime(mExtractor));
- } else if (mTrackCount > 0) {
- // The extractor track index is only allowed to be invalid if there are no tracks.
- LOG(ERROR) << "Track index " << mExtractorTrackIndex << " is invalid for track count "
- << mTrackCount;
- return AMEDIA_ERROR_MALFORMED;
- }
-
- return AMEDIA_OK;
-}
-
MediaSampleReaderNDK::~MediaSampleReaderNDK() {
if (mExtractor != nullptr) {
AMediaExtractor_delete(mExtractor);
}
}
+void MediaSampleReaderNDK::advanceTrack_l(int trackIndex) {
+ if (!mEnforceSequentialAccess) {
+ // Note: Positioning the extractor before advancing the track is needed for two reasons:
+ // 1. To enable multiple advances without explicitly letting the extractor catch up.
+ // 2. To prevent the extractor from being farther than "next".
+ (void)moveToTrack_l(trackIndex);
+ }
+
+ SampleCursor& cursor = mTrackCursors[trackIndex];
+ cursor.previous = cursor.current;
+ cursor.current = cursor.next;
+ cursor.next.reset();
+
+ if (mEnforceSequentialAccess && trackIndex == mExtractorTrackIndex) {
+ while (advanceExtractor_l()) {
+ SampleCursor& cursor = mTrackCursors[mExtractorTrackIndex];
+ if (cursor.current.isSet && cursor.current.index == mExtractorSampleIndex) {
+ if (mExtractorTrackIndex != trackIndex) {
+ mTrackSignals[mExtractorTrackIndex].notify_all();
+ }
+ break;
+ }
+ }
+ }
+ return;
+}
+
bool MediaSampleReaderNDK::advanceExtractor_l() {
// Reset the "next" sample time whenever the extractor advances past a sample that is current,
// to ensure that "next" is appropriately updated when the extractor advances over the next
@@ -102,6 +99,11 @@
}
if (!AMediaExtractor_advance(mExtractor)) {
+ LOG(DEBUG) << " EOS in advanceExtractor_l";
+ mEosReached = true;
+ for (auto it = mTrackSignals.begin(); it != mTrackSignals.end(); ++it) {
+ it->second.notify_all();
+ }
return false;
}
@@ -116,6 +118,7 @@
cursor.next.set(mExtractorSampleIndex, AMediaExtractor_getSampleTime(mExtractor));
}
}
+
return true;
}
@@ -135,6 +138,8 @@
LOG(ERROR) << "Unable to seek to " << seekToTimeUs << ", target " << targetTimeUs;
return status;
}
+
+ mEosReached = false;
mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
int64_t sampleTimeUs = AMediaExtractor_getSampleTime(mExtractor);
@@ -149,38 +154,15 @@
return AMEDIA_OK;
}
-void MediaSampleReaderNDK::advanceTrack(int trackIndex) {
- std::scoped_lock lock(mExtractorMutex);
-
- if (trackIndex < 0 || trackIndex >= mTrackCount) {
- LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
- return;
- }
-
- // Note: Positioning the extractor before advancing the track is needed for two reasons:
- // 1. To enable multiple advances without explicitly letting the extractor catch up.
- // 2. To prevent the extractor from being farther than "next".
- (void)positionExtractorForTrack_l(trackIndex);
-
- SampleCursor& cursor = mTrackCursors[trackIndex];
- cursor.previous = cursor.current;
- cursor.current = cursor.next;
- cursor.next.reset();
-}
-
-media_status_t MediaSampleReaderNDK::positionExtractorForTrack_l(int trackIndex) {
- media_status_t status = AMEDIA_OK;
- const SampleCursor& cursor = mTrackCursors[trackIndex];
-
- // Seek backwards if the extractor is ahead of the current time.
- if (cursor.current.isSet && mExtractorSampleIndex > cursor.current.index) {
- status = seekExtractorBackwards_l(cursor.current.timeStampUs, trackIndex,
- cursor.current.index);
+media_status_t MediaSampleReaderNDK::moveToSample_l(SamplePosition& pos, int trackIndex) {
+ // Seek backwards if the extractor is ahead of the sample.
+ if (pos.isSet && mExtractorSampleIndex > pos.index) {
+ media_status_t status = seekExtractorBackwards_l(pos.timeStampUs, trackIndex, pos.index);
if (status != AMEDIA_OK) return status;
}
- // Advance until extractor points to the current sample.
- while (!(cursor.current.isSet && cursor.current.index == mExtractorSampleIndex)) {
+ // Advance until extractor points to the sample.
+ while (!(pos.isSet && pos.index == mExtractorSampleIndex)) {
if (!advanceExtractor_l()) {
return AMEDIA_ERROR_END_OF_STREAM;
}
@@ -189,18 +171,233 @@
return AMEDIA_OK;
}
-media_status_t MediaSampleReaderNDK::getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) {
+media_status_t MediaSampleReaderNDK::moveToTrack_l(int trackIndex) {
+ return moveToSample_l(mTrackCursors[trackIndex].current, trackIndex);
+}
+
+media_status_t MediaSampleReaderNDK::waitForTrack_l(int trackIndex,
+ std::unique_lock<std::mutex>& lockHeld) {
+ while (trackIndex != mExtractorTrackIndex && !mEosReached && mEnforceSequentialAccess) {
+ mTrackSignals[trackIndex].wait(lockHeld);
+ }
+
+ if (mEosReached) {
+ return AMEDIA_ERROR_END_OF_STREAM;
+ }
+
+ if (!mEnforceSequentialAccess) {
+ return moveToTrack_l(trackIndex);
+ }
+
+ return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::primeExtractorForTrack_l(
+ int trackIndex, std::unique_lock<std::mutex>& lockHeld) {
+ if (mExtractorTrackIndex < 0) {
+ mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+ if (mExtractorTrackIndex < 0) {
+ return AMEDIA_ERROR_END_OF_STREAM;
+ }
+ mTrackCursors[mExtractorTrackIndex].current.set(mExtractorSampleIndex,
+ AMediaExtractor_getSampleTime(mExtractor));
+ }
+
+ if (mEnforceSequentialAccess) {
+ return waitForTrack_l(trackIndex, lockHeld);
+ } else {
+ return moveToTrack_l(trackIndex);
+ }
+}
+
+media_status_t MediaSampleReaderNDK::selectTrack(int trackIndex) {
std::scoped_lock lock(mExtractorMutex);
if (trackIndex < 0 || trackIndex >= mTrackCount) {
LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
return AMEDIA_ERROR_INVALID_PARAMETER;
+ } else if (mTrackSignals.find(trackIndex) != mTrackSignals.end()) {
+ LOG(ERROR) << "TrackIndex " << trackIndex << " already selected";
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ } else if (mExtractorTrackIndex >= 0) {
+ LOG(ERROR) << "Tracks must be selected before sample reading begins.";
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ media_status_t status = AMediaExtractor_selectTrack(mExtractor, trackIndex);
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "AMediaExtractor_selectTrack returned error: " << status;
+ return status;
+ }
+
+ mTrackSignals.emplace(std::piecewise_construct, std::forward_as_tuple(trackIndex),
+ std::forward_as_tuple());
+ return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::unselectTrack(int trackIndex) {
+ std::scoped_lock lock(mExtractorMutex);
+
+ if (trackIndex < 0 || trackIndex >= mTrackCount) {
+ LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ } else if (mExtractorTrackIndex >= 0) {
+ LOG(ERROR) << "unselectTrack must be called before sample reading begins.";
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ auto it = mTrackSignals.find(trackIndex);
+ if (it == mTrackSignals.end()) {
+ LOG(ERROR) << "TrackIndex " << trackIndex << " is not selected";
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ mTrackSignals.erase(it);
+
+ media_status_t status = AMediaExtractor_unselectTrack(mExtractor, trackIndex);
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "AMediaExtractor_selectTrack returned error: " << status;
+ return status;
+ }
+
+ return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::setEnforceSequentialAccess(bool enforce) {
+ LOG(DEBUG) << "setEnforceSequentialAccess( " << enforce << " )";
+
+ std::scoped_lock lock(mExtractorMutex);
+
+ if (mEnforceSequentialAccess && !enforce) {
+ // If switching from enforcing to not enforcing sequential access there may be threads
+ // waiting that needs to be woken up.
+ for (auto it = mTrackSignals.begin(); it != mTrackSignals.end(); ++it) {
+ it->second.notify_all();
+ }
+ } else if (!mEnforceSequentialAccess && enforce && mExtractorTrackIndex >= 0) {
+ // If switching from not enforcing to enforcing sequential access the extractor needs to be
+ // positioned for the track farthest behind so that it won't get stuck waiting.
+ struct {
+ SamplePosition* pos = nullptr;
+ int trackIndex = -1;
+ } earliestSample;
+
+ for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+ SamplePosition& lastKnownTrackPosition = mTrackCursors[trackIndex].current.isSet
+ ? mTrackCursors[trackIndex].current
+ : mTrackCursors[trackIndex].previous;
+
+ if (lastKnownTrackPosition.isSet) {
+ if (earliestSample.pos == nullptr ||
+ earliestSample.pos->index > lastKnownTrackPosition.index) {
+ earliestSample.pos = &lastKnownTrackPosition;
+ earliestSample.trackIndex = trackIndex;
+ }
+ }
+ }
+
+ if (earliestSample.pos == nullptr) {
+ LOG(ERROR) << "No known sample position found";
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+
+ media_status_t status = moveToSample_l(*earliestSample.pos, earliestSample.trackIndex);
+ if (status != AMEDIA_OK) return status;
+
+ while (!(mTrackCursors[mExtractorTrackIndex].current.isSet &&
+ mTrackCursors[mExtractorTrackIndex].current.index == mExtractorSampleIndex)) {
+ if (!advanceExtractor_l()) {
+ return AMEDIA_ERROR_END_OF_STREAM;
+ }
+ }
+ }
+
+ mEnforceSequentialAccess = enforce;
+ return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::getEstimatedBitrateForTrack(int trackIndex, int32_t* bitrate) {
+ std::scoped_lock lock(mExtractorMutex);
+ media_status_t status = AMEDIA_OK;
+
+ if (mTrackSignals.find(trackIndex) == mTrackSignals.end()) {
+ LOG(ERROR) << "Track is not selected.";
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ } else if (bitrate == nullptr) {
+ LOG(ERROR) << "bitrate pointer is NULL.";
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ } else if (mExtractorTrackIndex >= 0) {
+ LOG(ERROR) << "getEstimatedBitrateForTrack must be called before sample reading begins.";
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ // Sample the track.
+ static constexpr int64_t kSamplingDurationUs = 10 * 1000 * 1000; // 10 seconds
+ size_t lastSampleSize = 0;
+ size_t totalSampleSize = 0;
+ int64_t firstSampleTimeUs = 0;
+ int64_t lastSampleTimeUs = 0;
+
+ do {
+ if (AMediaExtractor_getSampleTrackIndex(mExtractor) == trackIndex) {
+ lastSampleTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+ if (totalSampleSize == 0) {
+ firstSampleTimeUs = lastSampleTimeUs;
+ }
+
+ lastSampleSize = AMediaExtractor_getSampleSize(mExtractor);
+ totalSampleSize += lastSampleSize;
+ }
+ } while ((lastSampleTimeUs - firstSampleTimeUs) < kSamplingDurationUs &&
+ AMediaExtractor_advance(mExtractor));
+
+ // Reset the extractor to the beginning.
+ status = AMediaExtractor_seekTo(mExtractor, 0, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "Unable to reset extractor: " << status;
+ return status;
+ }
+
+ int64_t durationUs = 0;
+ const int64_t sampledDurationUs = lastSampleTimeUs - firstSampleTimeUs;
+
+ if (sampledDurationUs < kSamplingDurationUs) {
+ // Track is shorter than the sampling duration so use the full track duration to get better
+ // accuracy (i.e. don't skip the last sample).
+ AMediaFormat* trackFormat = getTrackFormat(trackIndex);
+ if (!AMediaFormat_getInt64(trackFormat, AMEDIAFORMAT_KEY_DURATION, &durationUs)) {
+ durationUs = 0;
+ }
+ AMediaFormat_delete(trackFormat);
+ }
+
+ if (durationUs == 0) {
+ // The sampled duration does not account for the last sample's duration so its size should
+ // not be included either.
+ totalSampleSize -= lastSampleSize;
+ durationUs = sampledDurationUs;
+ }
+
+ if (totalSampleSize == 0 || durationUs <= 0) {
+ LOG(ERROR) << "Unable to estimate track bitrate";
+ return AMEDIA_ERROR_MALFORMED;
+ }
+
+ *bitrate = roundf((float)totalSampleSize * 8 * 1000000 / durationUs);
+ return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) {
+ std::unique_lock<std::mutex> lock(mExtractorMutex);
+
+ if (mTrackSignals.find(trackIndex) == mTrackSignals.end()) {
+ LOG(ERROR) << "Track not selected.";
+ return AMEDIA_ERROR_INVALID_PARAMETER;
} else if (info == nullptr) {
LOG(ERROR) << "MediaSampleInfo pointer is NULL.";
return AMEDIA_ERROR_INVALID_PARAMETER;
}
- media_status_t status = positionExtractorForTrack_l(trackIndex);
+ media_status_t status = primeExtractorForTrack_l(trackIndex, lock);
if (status == AMEDIA_OK) {
info->presentationTimeUs = AMediaExtractor_getSampleTime(mExtractor);
info->flags = AMediaExtractor_getSampleFlags(mExtractor);
@@ -209,6 +406,9 @@
info->presentationTimeUs = 0;
info->flags = SAMPLE_FLAG_END_OF_STREAM;
info->size = 0;
+ LOG(DEBUG) << " getSampleInfoForTrack #" << trackIndex << ": End Of Stream";
+ } else {
+ LOG(ERROR) << " getSampleInfoForTrack #" << trackIndex << ": Error " << status;
}
return status;
@@ -216,18 +416,20 @@
media_status_t MediaSampleReaderNDK::readSampleDataForTrack(int trackIndex, uint8_t* buffer,
size_t bufferSize) {
- std::scoped_lock lock(mExtractorMutex);
+ std::unique_lock<std::mutex> lock(mExtractorMutex);
- if (trackIndex < 0 || trackIndex >= mTrackCount) {
- LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+ if (mTrackSignals.find(trackIndex) == mTrackSignals.end()) {
+ LOG(ERROR) << "Track not selected.";
return AMEDIA_ERROR_INVALID_PARAMETER;
} else if (buffer == nullptr) {
LOG(ERROR) << "buffer pointer is NULL";
return AMEDIA_ERROR_INVALID_PARAMETER;
}
- media_status_t status = positionExtractorForTrack_l(trackIndex);
- if (status != AMEDIA_OK) return status;
+ media_status_t status = primeExtractorForTrack_l(trackIndex, lock);
+ if (status != AMEDIA_OK) {
+ return status;
+ }
ssize_t sampleSize = AMediaExtractor_getSampleSize(mExtractor);
if (bufferSize < sampleSize) {
@@ -241,9 +443,21 @@
return AMEDIA_ERROR_INVALID_PARAMETER;
}
+ advanceTrack_l(trackIndex);
+
return AMEDIA_OK;
}
+void MediaSampleReaderNDK::advanceTrack(int trackIndex) {
+ std::scoped_lock lock(mExtractorMutex);
+
+ if (mTrackSignals.find(trackIndex) != mTrackSignals.end()) {
+ advanceTrack_l(trackIndex);
+ } else {
+ LOG(ERROR) << "Trying to advance a track that is not selected (#" << trackIndex << ")";
+ }
+}
+
AMediaFormat* MediaSampleReaderNDK::getFileFormat() {
return AMediaExtractor_getFileFormat(mExtractor);
}
diff --git a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
new file mode 100644
index 0000000..389b941
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
@@ -0,0 +1,314 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleWriter"
+
+#include <android-base/logging.h>
+#include <media/MediaSampleWriter.h>
+#include <media/NdkMediaMuxer.h>
+
+namespace android {
+
+class DefaultMuxer : public MediaSampleWriterMuxerInterface {
+public:
+ // MediaSampleWriterMuxerInterface
+ ssize_t addTrack(AMediaFormat* trackFormat) override {
+ // If the track format has rotation, need to call AMediaMuxer_setOrientationHint
+ // to set the rotation. Muxer doesn't take rotation specified on the track.
+ const char* mime;
+ if (AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime) &&
+ strncmp(mime, "video/", 6) == 0) {
+ int32_t rotation;
+ if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_ROTATION, &rotation) &&
+ (rotation != 0)) {
+ AMediaMuxer_setOrientationHint(mMuxer, rotation);
+ }
+ }
+
+ return AMediaMuxer_addTrack(mMuxer, trackFormat);
+ }
+ media_status_t start() override { return AMediaMuxer_start(mMuxer); }
+ media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+ const AMediaCodecBufferInfo* info) override {
+ return AMediaMuxer_writeSampleData(mMuxer, trackIndex, data, info);
+ }
+ media_status_t stop() override { return AMediaMuxer_stop(mMuxer); }
+ // ~MediaSampleWriterMuxerInterface
+
+ static std::shared_ptr<DefaultMuxer> create(int fd) {
+ AMediaMuxer* ndkMuxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
+ if (ndkMuxer == nullptr) {
+ LOG(ERROR) << "Unable to create AMediaMuxer";
+ return nullptr;
+ }
+
+ return std::make_shared<DefaultMuxer>(ndkMuxer);
+ }
+
+ ~DefaultMuxer() {
+ if (mMuxer != nullptr) {
+ AMediaMuxer_delete(mMuxer);
+ }
+ }
+
+ DefaultMuxer(AMediaMuxer* muxer) : mMuxer(muxer){};
+ DefaultMuxer() = delete;
+
+private:
+ AMediaMuxer* mMuxer;
+};
+
+// static
+std::shared_ptr<MediaSampleWriter> MediaSampleWriter::Create() {
+ return std::shared_ptr<MediaSampleWriter>(new MediaSampleWriter());
+}
+
+MediaSampleWriter::~MediaSampleWriter() {
+ if (mState == STARTED) {
+ stop();
+ }
+}
+
+bool MediaSampleWriter::init(int fd, const std::weak_ptr<CallbackInterface>& callbacks) {
+ return init(DefaultMuxer::create(fd), callbacks);
+}
+
+bool MediaSampleWriter::init(const std::shared_ptr<MediaSampleWriterMuxerInterface>& muxer,
+ const std::weak_ptr<CallbackInterface>& callbacks) {
+ if (callbacks.lock() == nullptr) {
+ LOG(ERROR) << "Callback object cannot be null";
+ return false;
+ } else if (muxer == nullptr) {
+ LOG(ERROR) << "Muxer cannot be null";
+ return false;
+ }
+
+ std::scoped_lock lock(mMutex);
+ if (mState != UNINITIALIZED) {
+ LOG(ERROR) << "Sample writer is already initialized";
+ return false;
+ }
+
+ mState = INITIALIZED;
+ mMuxer = muxer;
+ mCallbacks = callbacks;
+ return true;
+}
+
+MediaSampleWriter::MediaSampleConsumerFunction MediaSampleWriter::addTrack(
+ const std::shared_ptr<AMediaFormat>& trackFormat) {
+ if (trackFormat == nullptr) {
+ LOG(ERROR) << "Track format must be non-null";
+ return nullptr;
+ }
+
+ std::scoped_lock lock(mMutex);
+ if (mState != INITIALIZED) {
+ LOG(ERROR) << "Muxer needs to be initialized when adding tracks.";
+ return nullptr;
+ }
+ ssize_t trackIndexOrError = mMuxer->addTrack(trackFormat.get());
+ if (trackIndexOrError < 0) {
+ LOG(ERROR) << "Failed to add media track to muxer: " << trackIndexOrError;
+ return nullptr;
+ }
+ const size_t trackIndex = static_cast<size_t>(trackIndexOrError);
+
+ int64_t durationUs;
+ if (!AMediaFormat_getInt64(trackFormat.get(), AMEDIAFORMAT_KEY_DURATION, &durationUs)) {
+ durationUs = 0;
+ }
+
+ mTracks.emplace(trackIndex, durationUs);
+ std::shared_ptr<MediaSampleWriter> thisWriter = shared_from_this();
+
+ return [self = shared_from_this(), trackIndex](const std::shared_ptr<MediaSample>& sample) {
+ self->addSampleToTrack(trackIndex, sample);
+ };
+}
+
+void MediaSampleWriter::addSampleToTrack(size_t trackIndex,
+ const std::shared_ptr<MediaSample>& sample) {
+ if (sample == nullptr) return;
+
+ bool wasEmpty;
+ {
+ std::scoped_lock lock(mMutex);
+ wasEmpty = mSampleQueue.empty();
+ mSampleQueue.push(std::make_pair(trackIndex, sample));
+ }
+
+ if (wasEmpty) {
+ mSampleSignal.notify_one();
+ }
+}
+
+bool MediaSampleWriter::start() {
+ std::scoped_lock lock(mMutex);
+
+ if (mTracks.size() == 0) {
+ LOG(ERROR) << "No tracks to write.";
+ return false;
+ } else if (mState != INITIALIZED) {
+ LOG(ERROR) << "Sample writer is not initialized";
+ return false;
+ }
+
+ mState = STARTED;
+ std::thread([this] {
+ bool wasStopped = false;
+ media_status_t status = writeSamples(&wasStopped);
+ if (auto callbacks = mCallbacks.lock()) {
+ if (wasStopped && status == AMEDIA_OK) {
+ callbacks->onStopped(this);
+ } else {
+ callbacks->onFinished(this, status);
+ }
+ }
+ }).detach();
+ return true;
+}
+
+void MediaSampleWriter::stop() {
+ {
+ std::scoped_lock lock(mMutex);
+ if (mState != STARTED) {
+ LOG(ERROR) << "Sample writer is not started.";
+ return;
+ }
+ mState = STOPPED;
+ }
+
+ mSampleSignal.notify_all();
+}
+
+media_status_t MediaSampleWriter::writeSamples(bool* wasStopped) {
+ media_status_t muxerStatus = mMuxer->start();
+ if (muxerStatus != AMEDIA_OK) {
+ LOG(ERROR) << "Error starting muxer: " << muxerStatus;
+ return muxerStatus;
+ }
+
+ media_status_t writeStatus = runWriterLoop(wasStopped);
+ if (writeStatus != AMEDIA_OK) {
+ LOG(ERROR) << "Error writing samples: " << writeStatus;
+ }
+
+ muxerStatus = mMuxer->stop();
+ if (muxerStatus != AMEDIA_OK) {
+ LOG(ERROR) << "Error stopping muxer: " << muxerStatus;
+ }
+
+ return writeStatus != AMEDIA_OK ? writeStatus : muxerStatus;
+}
+
+media_status_t MediaSampleWriter::runWriterLoop(bool* wasStopped) NO_THREAD_SAFETY_ANALYSIS {
+ AMediaCodecBufferInfo bufferInfo;
+ int32_t lastProgressUpdate = 0;
+ int trackEosCount = 0;
+
+ // Set the "primary" track that will be used to determine progress to the track with longest
+ // duration.
+ int primaryTrackIndex = -1;
+ int64_t longestDurationUs = 0;
+ for (auto it = mTracks.begin(); it != mTracks.end(); ++it) {
+ if (it->second.mDurationUs > longestDurationUs) {
+ primaryTrackIndex = it->first;
+ longestDurationUs = it->second.mDurationUs;
+ }
+ }
+
+ while (true) {
+ if (trackEosCount >= mTracks.size()) {
+ break;
+ }
+
+ size_t trackIndex;
+ std::shared_ptr<MediaSample> sample;
+ {
+ std::unique_lock lock(mMutex);
+ while (mSampleQueue.empty() && mState == STARTED) {
+ mSampleSignal.wait(lock);
+ }
+
+ if (mState == STOPPED) {
+ *wasStopped = true;
+ return AMEDIA_OK;
+ }
+
+ auto& topEntry = mSampleQueue.top();
+ trackIndex = topEntry.first;
+ sample = topEntry.second;
+ mSampleQueue.pop();
+ }
+
+ TrackRecord& track = mTracks[trackIndex];
+
+ if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+ if (track.mReachedEos) {
+ continue;
+ }
+
+ // Track reached end of stream.
+ track.mReachedEos = true;
+ trackEosCount++;
+
+ // Preserve source track duration by setting the appropriate timestamp on the
+ // empty End-Of-Stream sample.
+ if (track.mDurationUs > 0 && track.mFirstSampleTimeSet) {
+ sample->info.presentationTimeUs = track.mDurationUs + track.mFirstSampleTimeUs;
+ }
+ }
+
+ track.mPrevSampleTimeUs = sample->info.presentationTimeUs;
+ if (!track.mFirstSampleTimeSet) {
+ // Record the first sample's timestamp in order to translate duration to EOS
+ // time for tracks that does not start at 0.
+ track.mFirstSampleTimeUs = sample->info.presentationTimeUs;
+ track.mFirstSampleTimeSet = true;
+ }
+
+ bufferInfo.offset = sample->dataOffset;
+ bufferInfo.size = sample->info.size;
+ bufferInfo.flags = sample->info.flags;
+ bufferInfo.presentationTimeUs = sample->info.presentationTimeUs;
+
+ media_status_t status = mMuxer->writeSampleData(trackIndex, sample->buffer, &bufferInfo);
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "writeSampleData returned " << status;
+ return status;
+ }
+ sample.reset();
+
+ // TODO(lnilsson): Add option to toggle progress reporting on/off.
+ if (trackIndex == primaryTrackIndex) {
+ const int64_t elapsed = track.mPrevSampleTimeUs - track.mFirstSampleTimeUs;
+ int32_t progress = (elapsed * 100) / track.mDurationUs;
+ progress = std::clamp(progress, 0, 100);
+
+ if (progress > lastProgressUpdate) {
+ if (auto callbacks = mCallbacks.lock()) {
+ callbacks->onProgressUpdate(this, progress);
+ }
+ lastProgressUpdate = progress;
+ }
+ }
+ }
+
+ return AMEDIA_OK;
+}
+} // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
index 1673b5b..15f7427 100644
--- a/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
@@ -19,6 +19,7 @@
#include <android-base/logging.h>
#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
namespace android {
@@ -44,9 +45,8 @@
mMediaSampleReader = mediaSampleReader;
mTrackIndex = trackIndex;
- mSourceFormat =
- std::shared_ptr<AMediaFormat>(mMediaSampleReader->getTrackFormat(mTrackIndex),
- std::bind(AMediaFormat_delete, std::placeholders::_1));
+ mSourceFormat = std::shared_ptr<AMediaFormat>(mMediaSampleReader->getTrackFormat(mTrackIndex),
+ &AMediaFormat_delete);
if (mSourceFormat == nullptr) {
LOG(ERROR) << "Unable to get format for track #" << mTrackIndex;
return AMEDIA_ERROR_MALFORMED;
@@ -69,36 +69,70 @@
LOG(ERROR) << "TrackTranscoder must be configured before started";
return false;
}
+ mState = STARTED;
- mTranscodingThread = std::thread([this] {
- media_status_t status = runTranscodeLoop();
+ std::thread([this] {
+ bool stopped = false;
+ media_status_t status = runTranscodeLoop(&stopped);
+
+ // Output an EOS sample if the transcoder was stopped.
+ if (stopped) {
+ auto sample = std::make_shared<MediaSample>();
+ sample->info.flags = SAMPLE_FLAG_END_OF_STREAM;
+ onOutputSampleAvailable(sample);
+ }
// Notify the client.
if (auto callbacks = mTranscoderCallback.lock()) {
- if (status != AMEDIA_OK) {
- callbacks->onTrackError(this, status);
- } else {
+ if (stopped) {
+ callbacks->onTrackStopped(this);
+ } else if (status == AMEDIA_OK) {
callbacks->onTrackFinished(this);
+ } else {
+ callbacks->onTrackError(this, status);
}
}
- });
+ }).detach();
- mState = STARTED;
return true;
}
-bool MediaTrackTranscoder::stop() {
+void MediaTrackTranscoder::stop(bool stopOnSyncSample) {
std::scoped_lock lock{mStateMutex};
- if (mState == STARTED) {
+ if (mState == STARTED || (mStopRequest == STOP_ON_SYNC && !stopOnSyncSample)) {
+ mStopRequest = stopOnSyncSample ? STOP_ON_SYNC : STOP_NOW;
abortTranscodeLoop();
- mTranscodingThread.join();
mState = STOPPED;
- return true;
+ } else {
+ LOG(WARNING) << "TrackTranscoder must be started before stopped";
}
-
- LOG(ERROR) << "TrackTranscoder must be started before stopped";
- return false;
}
-} // namespace android
\ No newline at end of file
+void MediaTrackTranscoder::notifyTrackFormatAvailable() {
+ if (auto callbacks = mTranscoderCallback.lock()) {
+ callbacks->onTrackFormatAvailable(this);
+ }
+}
+
+void MediaTrackTranscoder::onOutputSampleAvailable(const std::shared_ptr<MediaSample>& sample) {
+ std::scoped_lock lock{mSampleMutex};
+ if (mSampleConsumer == nullptr) {
+ mSampleQueue.enqueue(sample);
+ } else {
+ mSampleConsumer(sample);
+ }
+}
+
+void MediaTrackTranscoder::setSampleConsumer(
+ const MediaSampleWriter::MediaSampleConsumerFunction& sampleConsumer) {
+ std::scoped_lock lock{mSampleMutex};
+ mSampleConsumer = sampleConsumer;
+
+ std::shared_ptr<MediaSample> sample;
+ while (!mSampleQueue.isEmpty() && !mSampleQueue.dequeue(&sample)) {
+ mSampleConsumer(sample);
+ }
+}
+
+} // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
new file mode 100644
index 0000000..3d4ff15
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
@@ -0,0 +1,451 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscoder"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaSampleWriter.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <media/VideoTrackTranscoder.h>
+#include <unistd.h>
+
+namespace android {
+
+static AMediaFormat* mergeMediaFormats(AMediaFormat* base, AMediaFormat* overlay) {
+ if (base == nullptr || overlay == nullptr) {
+ LOG(ERROR) << "Cannot merge null formats";
+ return nullptr;
+ }
+
+ AMediaFormat* format = AMediaFormat_new();
+ if (AMediaFormat_copy(format, base) != AMEDIA_OK) {
+ AMediaFormat_delete(format);
+ return nullptr;
+ }
+
+ // Note: AMediaFormat does not expose a function for appending values from another format or for
+ // iterating over all values and keys in a format. Instead we define a static list of known keys
+ // along with their value types and copy the ones that are present. A better solution would be
+ // to either implement required functions in NDK or to parse the overlay format's string
+ // representation and copy all existing keys.
+ static const AMediaFormatUtils::EntryCopier kSupportedFormatEntries[] = {
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_MIME, String),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_DURATION, Int64),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_WIDTH, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_HEIGHT, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_BIT_RATE, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_PROFILE, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_LEVEL, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_FORMAT, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_RANGE, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_STANDARD, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_TRANSFER, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_FRAME_RATE, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_PRIORITY, Int32),
+ ENTRY_COPIER2(AMEDIAFORMAT_KEY_OPERATING_RATE, Float, Int32),
+ };
+ const size_t entryCount = sizeof(kSupportedFormatEntries) / sizeof(kSupportedFormatEntries[0]);
+
+ AMediaFormatUtils::CopyFormatEntries(overlay, format, kSupportedFormatEntries, entryCount);
+ return format;
+}
+
+void MediaTranscoder::onThreadFinished(const void* thread, media_status_t threadStatus,
+ bool threadStopped) {
+ LOG(DEBUG) << "Thread " << thread << " finished with status " << threadStatus << " stopped "
+ << threadStopped;
+
+ // Stop all threads if one reports an error.
+ if (threadStatus != AMEDIA_OK) {
+ requestStop(false /* stopOnSync */);
+ }
+
+ std::scoped_lock lock{mThreadStateMutex};
+
+ // Record the change.
+ mThreadStates[thread] = DONE;
+ if (threadStatus != AMEDIA_OK && mTranscoderStatus == AMEDIA_OK) {
+ mTranscoderStatus = threadStatus;
+ }
+
+ mTranscoderStopped |= threadStopped;
+
+ // Check if all threads are done. Note that if all transcoders have stopped but the sample
+ // writer has not yet started, it never will.
+ bool transcodersDone = true;
+ ThreadState sampleWriterState = PENDING;
+ for (const auto& it : mThreadStates) {
+ LOG(DEBUG) << " Thread " << it.first << " state" << it.second;
+ if (it.first == static_cast<const void*>(mSampleWriter.get())) {
+ sampleWriterState = it.second;
+ } else {
+ transcodersDone &= (it.second == DONE);
+ }
+ }
+ if (!transcodersDone || sampleWriterState == RUNNING) {
+ return;
+ }
+
+ // All done. Send callback asynchronously and wake up threads waiting in cancel/pause.
+ mThreadsDone = true;
+ if (!mCallbackSent) {
+ std::thread asyncNotificationThread{[this, self = shared_from_this(),
+ status = mTranscoderStatus,
+ stopped = mTranscoderStopped] {
+ // If the transcoder was stopped that means a caller is waiting in stop or pause
+ // in which case we don't send a callback.
+ if (status != AMEDIA_OK) {
+ mCallbacks->onError(this, status);
+ } else if (!stopped) {
+ mCallbacks->onFinished(this);
+ }
+ mThreadsDoneSignal.notify_all();
+ }};
+ asyncNotificationThread.detach();
+ mCallbackSent = true;
+ }
+}
+
+void MediaTranscoder::onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) {
+ LOG(DEBUG) << "TrackTranscoder " << transcoder << " format available.";
+
+ std::scoped_lock lock{mTracksAddedMutex};
+ const void* sampleWriterPtr = static_cast<const void*>(mSampleWriter.get());
+
+ // Ignore duplicate format change.
+ if (mTracksAdded.count(transcoder) > 0) {
+ return;
+ }
+
+ // Add track to the writer.
+ auto consumer = mSampleWriter->addTrack(transcoder->getOutputFormat());
+ if (consumer == nullptr) {
+ LOG(ERROR) << "Unable to add track to sample writer.";
+ onThreadFinished(sampleWriterPtr, AMEDIA_ERROR_UNKNOWN, false /* stopped */);
+ return;
+ }
+
+ MediaTrackTranscoder* mutableTranscoder = const_cast<MediaTrackTranscoder*>(transcoder);
+ mutableTranscoder->setSampleConsumer(consumer);
+
+ mTracksAdded.insert(transcoder);
+ bool errorStarting = false;
+ if (mTracksAdded.size() == mTrackTranscoders.size()) {
+ // Enable sequential access mode on the sample reader to achieve optimal read performance.
+ // This has to wait until all tracks have delivered their output formats and the sample
+ // writer is started. Otherwise the tracks will not get their output sample queues drained
+ // and the transcoder could hang due to one track running out of buffers and blocking the
+ // other tracks from reading source samples before they could output their formats.
+
+ std::scoped_lock lock{mThreadStateMutex};
+ // Don't start the sample writer if a stop already has been requested.
+ if (!mSampleWriterStopped) {
+ if (!mCancelled) {
+ mSampleReader->setEnforceSequentialAccess(true);
+ }
+ LOG(DEBUG) << "Starting sample writer.";
+ errorStarting = !mSampleWriter->start();
+ if (!errorStarting) {
+ mThreadStates[sampleWriterPtr] = RUNNING;
+ }
+ }
+ }
+
+ if (errorStarting) {
+ LOG(ERROR) << "Unable to start sample writer.";
+ onThreadFinished(sampleWriterPtr, AMEDIA_ERROR_UNKNOWN, false /* stopped */);
+ }
+}
+
+void MediaTranscoder::onTrackFinished(const MediaTrackTranscoder* transcoder) {
+ LOG(DEBUG) << "TrackTranscoder " << transcoder << " finished";
+ onThreadFinished(static_cast<const void*>(transcoder), AMEDIA_OK, false /* stopped */);
+}
+
+void MediaTranscoder::onTrackStopped(const MediaTrackTranscoder* transcoder) {
+ LOG(DEBUG) << "TrackTranscoder " << transcoder << " stopped";
+ onThreadFinished(static_cast<const void*>(transcoder), AMEDIA_OK, true /* stopped */);
+}
+
+void MediaTranscoder::onTrackError(const MediaTrackTranscoder* transcoder, media_status_t status) {
+ LOG(ERROR) << "TrackTranscoder " << transcoder << " returned error " << status;
+ onThreadFinished(static_cast<const void*>(transcoder), status, false /* stopped */);
+}
+
+void MediaTranscoder::onFinished(const MediaSampleWriter* writer, media_status_t status) {
+ LOG(status == AMEDIA_OK ? DEBUG : ERROR) << "Sample writer finished with status " << status;
+ onThreadFinished(static_cast<const void*>(writer), status, false /* stopped */);
+}
+
+void MediaTranscoder::onStopped(const MediaSampleWriter* writer) {
+ LOG(DEBUG) << "Sample writer " << writer << " stopped";
+ onThreadFinished(static_cast<const void*>(writer), AMEDIA_OK, true /* stopped */);
+}
+
+void MediaTranscoder::onProgressUpdate(const MediaSampleWriter* writer __unused, int32_t progress) {
+ // Dispatch progress updated to the client.
+ mCallbacks->onProgressUpdate(this, progress);
+}
+
+MediaTranscoder::MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks, pid_t pid,
+ uid_t uid)
+ : mCallbacks(callbacks), mPid(pid), mUid(uid) {}
+
+std::shared_ptr<MediaTranscoder> MediaTranscoder::create(
+ const std::shared_ptr<CallbackInterface>& callbacks, pid_t pid, uid_t uid,
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
+ if (pausedState != nullptr) {
+ LOG(INFO) << "Initializing from paused state.";
+ }
+ if (callbacks == nullptr) {
+ LOG(ERROR) << "Callbacks cannot be null";
+ return nullptr;
+ }
+
+ return std::shared_ptr<MediaTranscoder>(new MediaTranscoder(callbacks, pid, uid));
+}
+
+media_status_t MediaTranscoder::configureSource(int fd) {
+ if (fd < 0) {
+ LOG(ERROR) << "Invalid source fd: " << fd;
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const size_t fileSize = lseek(fd, 0, SEEK_END);
+ lseek(fd, 0, SEEK_SET);
+
+ mSampleReader = MediaSampleReaderNDK::createFromFd(fd, 0 /* offset */, fileSize);
+
+ if (mSampleReader == nullptr) {
+ LOG(ERROR) << "Unable to parse source fd: " << fd;
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ const size_t trackCount = mSampleReader->getTrackCount();
+ for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+ AMediaFormat* trackFormat = mSampleReader->getTrackFormat(static_cast<int>(trackIndex));
+ if (trackFormat == nullptr) {
+ LOG(ERROR) << "Track #" << trackIndex << " has no format";
+ return AMEDIA_ERROR_MALFORMED;
+ }
+
+ mSourceTrackFormats.emplace_back(trackFormat, &AMediaFormat_delete);
+ }
+
+ return AMEDIA_OK;
+}
+
+std::vector<std::shared_ptr<AMediaFormat>> MediaTranscoder::getTrackFormats() const {
+ // Return a deep copy of the formats to avoid the caller modifying our internal formats.
+ std::vector<std::shared_ptr<AMediaFormat>> trackFormats;
+ for (const std::shared_ptr<AMediaFormat>& sourceFormat : mSourceTrackFormats) {
+ AMediaFormat* copy = AMediaFormat_new();
+ AMediaFormat_copy(copy, sourceFormat.get());
+ trackFormats.emplace_back(copy, &AMediaFormat_delete);
+ }
+ return trackFormats;
+}
+
+media_status_t MediaTranscoder::configureTrackFormat(size_t trackIndex, AMediaFormat* trackFormat) {
+ if (mSampleReader == nullptr) {
+ LOG(ERROR) << "Source must be configured before tracks";
+ return AMEDIA_ERROR_INVALID_OPERATION;
+ } else if (trackIndex >= mSourceTrackFormats.size()) {
+ LOG(ERROR) << "Track index " << trackIndex
+ << " is out of bounds. Track count: " << mSourceTrackFormats.size();
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::shared_ptr<MediaTrackTranscoder> transcoder;
+ std::shared_ptr<AMediaFormat> format;
+
+ if (trackFormat == nullptr) {
+ transcoder = std::make_shared<PassthroughTrackTranscoder>(shared_from_this());
+ } else {
+ const char* srcMime = nullptr;
+ if (!AMediaFormat_getString(mSourceTrackFormats[trackIndex].get(), AMEDIAFORMAT_KEY_MIME,
+ &srcMime)) {
+ LOG(ERROR) << "Source track #" << trackIndex << " has no mime type";
+ return AMEDIA_ERROR_MALFORMED;
+ }
+
+ if (strncmp(srcMime, "video/", 6) != 0) {
+ LOG(ERROR) << "Only video tracks are supported for transcoding. Unable to configure "
+ "track #"
+ << trackIndex << " with mime " << srcMime;
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ const char* dstMime = nullptr;
+ if (AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &dstMime)) {
+ if (strncmp(dstMime, "video/", 6) != 0) {
+ LOG(ERROR) << "Unable to convert media types for track #" << trackIndex << ", from "
+ << srcMime << " to " << dstMime;
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+ }
+
+ transcoder = VideoTrackTranscoder::create(shared_from_this(), mPid, mUid);
+
+ AMediaFormat* mergedFormat =
+ mergeMediaFormats(mSourceTrackFormats[trackIndex].get(), trackFormat);
+ if (mergedFormat == nullptr) {
+ LOG(ERROR) << "Unable to merge source and destination formats";
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+
+ format = std::shared_ptr<AMediaFormat>(mergedFormat, &AMediaFormat_delete);
+ }
+
+ media_status_t status = mSampleReader->selectTrack(trackIndex);
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "Unable to select track " << trackIndex;
+ return status;
+ }
+
+ status = transcoder->configure(mSampleReader, trackIndex, format);
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "Configure track transcoder for track #" << trackIndex << " returned error "
+ << status;
+ mSampleReader->unselectTrack(trackIndex);
+ return status;
+ }
+
+ std::scoped_lock lock{mThreadStateMutex};
+ mThreadStates[static_cast<const void*>(transcoder.get())] = PENDING;
+
+ mTrackTranscoders.emplace_back(std::move(transcoder));
+ return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::configureDestination(int fd) {
+ if (fd < 0) {
+ LOG(ERROR) << "Invalid destination fd: " << fd;
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (mSampleWriter != nullptr) {
+ LOG(ERROR) << "Destination is already configured.";
+ return AMEDIA_ERROR_INVALID_OPERATION;
+ }
+
+ mSampleWriter = MediaSampleWriter::Create();
+ const bool initOk = mSampleWriter->init(fd, shared_from_this());
+
+ if (!initOk) {
+ LOG(ERROR) << "Unable to initialize sample writer with destination fd: " << fd;
+ mSampleWriter.reset();
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+
+ std::scoped_lock lock{mThreadStateMutex};
+ mThreadStates[static_cast<const void*>(mSampleWriter.get())] = PENDING;
+ return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::start() {
+ if (mTrackTranscoders.size() < 1) {
+ LOG(ERROR) << "Unable to start, no tracks are configured.";
+ return AMEDIA_ERROR_INVALID_OPERATION;
+ } else if (mSampleWriter == nullptr) {
+ LOG(ERROR) << "Unable to start, destination is not configured";
+ return AMEDIA_ERROR_INVALID_OPERATION;
+ }
+
+ // Start transcoders
+ bool started = true;
+ {
+ std::scoped_lock lock{mThreadStateMutex};
+ for (auto& transcoder : mTrackTranscoders) {
+ if (!(started = transcoder->start())) {
+ break;
+ }
+ mThreadStates[static_cast<const void*>(transcoder.get())] = RUNNING;
+ }
+ }
+ if (!started) {
+ LOG(ERROR) << "Unable to start track transcoder.";
+ cancel();
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+ return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::requestStop(bool stopOnSync) {
+ std::scoped_lock lock{mThreadStateMutex};
+ if (mCancelled) {
+ LOG(DEBUG) << "MediaTranscoder already cancelled";
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ if (!stopOnSync) {
+ mSampleWriterStopped = true;
+ mSampleWriter->stop();
+ }
+
+ mSampleReader->setEnforceSequentialAccess(false);
+ for (auto& transcoder : mTrackTranscoders) {
+ transcoder->stop(stopOnSync);
+ }
+
+ mCancelled = true;
+ return AMEDIA_OK;
+}
+
+void MediaTranscoder::waitForThreads() NO_THREAD_SAFETY_ANALYSIS {
+ std::unique_lock lock{mThreadStateMutex};
+ while (!mThreadsDone) {
+ mThreadsDoneSignal.wait(lock);
+ }
+}
+
+media_status_t MediaTranscoder::pause(std::shared_ptr<ndk::ScopedAParcel>* pausedState) {
+ media_status_t status = requestStop(true /* stopOnSync */);
+ if (status != AMEDIA_OK) {
+ return status;
+ }
+
+ waitForThreads();
+
+ // TODO: write internal states to parcel.
+ *pausedState = std::shared_ptr<::ndk::ScopedAParcel>(new ::ndk::ScopedAParcel());
+ return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::cancel() {
+ media_status_t status = requestStop(false /* stopOnSync */);
+ if (status != AMEDIA_OK) {
+ return status;
+ }
+
+ waitForThreads();
+
+ // TODO: Release transcoders?
+ return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::resume() {
+ // TODO: restore internal states from parcel.
+ return start();
+}
+
+} // namespace android
diff --git a/media/libmediatranscoding/transcoder/NdkCommon.cpp b/media/libmediatranscoding/transcoder/NdkCommon.cpp
new file mode 100644
index 0000000..a7b79dc
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/NdkCommon.cpp
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkCommon"
+
+#include <android-base/logging.h>
+#include <media/NdkCommon.h>
+
+#include <cstdio>
+#include <cstring>
+#include <utility>
+
+/* TODO(b/153592281)
+ * Note: constants used by the native media tests but not available in media ndk api
+ */
+const char* AMEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
+const char* AMEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
+const char* AMEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
+const char* AMEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
+const char* AMEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
+const char* AMEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
+const char* AMEDIA_MIMETYPE_VIDEO_H263 = "video/3gpp";
+
+/* TODO(b/153592281) */
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_ALLOW_FRAME_DROP = "allow-frame-drop";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_REQUEST_SYNC_FRAME = "request-sync";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_VIDEO_BITRATE = "video-bitrate";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_MAX_B_FRAMES = "max-bframes";
+
+namespace AMediaFormatUtils {
+
+#define DEFINE_FORMAT_VALUE_COPY_FUNC(_type, _typeName) \
+ bool CopyFormatEntry##_typeName(const char* key, AMediaFormat* from, AMediaFormat* to) { \
+ _type value; \
+ if (AMediaFormat_get##_typeName(from, key, &value)) { \
+ AMediaFormat_set##_typeName(to, key, value); \
+ return true; \
+ } \
+ return false; \
+ }
+
+DEFINE_FORMAT_VALUE_COPY_FUNC(const char*, String);
+DEFINE_FORMAT_VALUE_COPY_FUNC(int64_t, Int64);
+DEFINE_FORMAT_VALUE_COPY_FUNC(int32_t, Int32);
+DEFINE_FORMAT_VALUE_COPY_FUNC(float, Float);
+
+void CopyFormatEntries(AMediaFormat* from, AMediaFormat* to, const EntryCopier* entries,
+ size_t entryCount) {
+ if (from == nullptr || to == nullptr) {
+ LOG(ERROR) << "Cannot copy null formats";
+ return;
+ } else if (entries == nullptr || entryCount < 1) {
+ LOG(WARNING) << "No entries to copy";
+ return;
+ }
+
+ for (size_t i = 0; i < entryCount; ++i) {
+ if (!entries[i].copy(entries[i].key, from, to) && entries[i].copy2 != nullptr) {
+ entries[i].copy2(entries[i].key, from, to);
+ }
+ }
+}
+
+#define DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(_type, _typeName) \
+ bool SetDefaultFormatValue##_typeName(const char* key, AMediaFormat* format, _type value) { \
+ _type tmp; \
+ if (!AMediaFormat_get##_typeName(format, key, &tmp)) { \
+ AMediaFormat_set##_typeName(format, key, value); \
+ return true; \
+ } \
+ return false; \
+ }
+
+DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(float, Float);
+DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(int32_t, Int32);
+
+} // namespace AMediaFormatUtils
\ No newline at end of file
diff --git a/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
index 4404bbb..c55e244 100644
--- a/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
@@ -44,8 +44,17 @@
// Check if the free list contains a large enough buffer.
auto it = mFreeBufferMap.lower_bound(minimumBufferSize);
if (it != mFreeBufferMap.end()) {
+ uint8_t* buffer = it->second;
mFreeBufferMap.erase(it);
- return it->second;
+ return buffer;
+ }
+
+ // If the maximum buffer count is reached, remove an existing free buffer.
+ if (mAddressSizeMap.size() >= mMaxBufferCount) {
+ auto it = mFreeBufferMap.begin();
+ mAddressSizeMap.erase(it->second);
+ delete[] it->second;
+ mFreeBufferMap.erase(it);
}
// Allocate a new buffer.
@@ -55,14 +64,6 @@
return nullptr;
}
- // If the maximum buffer count is reached, remove an existing free buffer.
- if (mAddressSizeMap.size() >= mMaxBufferCount) {
- auto it = mFreeBufferMap.begin();
- mFreeBufferMap.erase(it);
- mAddressSizeMap.erase(it->second);
- delete[] it->second;
- }
-
// Add the buffer to the tracking set.
mAddressSizeMap.emplace(buffer, minimumBufferSize);
return buffer;
@@ -92,9 +93,13 @@
return AMEDIA_OK;
}
-media_status_t PassthroughTrackTranscoder::runTranscodeLoop() {
+media_status_t PassthroughTrackTranscoder::runTranscodeLoop(bool* stopped) {
MediaSampleInfo info;
std::shared_ptr<MediaSample> sample;
+ bool eosReached = false;
+
+ // Notify the track format as soon as we start. It's same as the source format.
+ notifyTrackFormatAvailable();
MediaSample::OnSampleReleasedCallback bufferReleaseCallback =
[bufferPool = mBufferPool](MediaSample* sample) {
@@ -102,18 +107,18 @@
};
// Move samples until EOS is reached or transcoding is stopped.
- while (!mStopRequested && !mEosFromSource) {
+ while (mStopRequest != STOP_NOW && !eosReached) {
media_status_t status = mMediaSampleReader->getSampleInfoForTrack(mTrackIndex, &info);
if (status == AMEDIA_OK) {
uint8_t* buffer = mBufferPool->getBufferWithSize(info.size);
if (buffer == nullptr) {
- if (mStopRequested) {
+ if (mStopRequest == STOP_NOW) {
break;
}
LOG(ERROR) << "Unable to get buffer from pool";
- return AMEDIA_ERROR_IO; // TODO: Custom error codes?
+ return AMEDIA_ERROR_UNKNOWN;
}
sample = MediaSample::createWithReleaseCallback(
@@ -127,30 +132,33 @@
} else if (status == AMEDIA_ERROR_END_OF_STREAM) {
sample = std::make_shared<MediaSample>();
- mEosFromSource = true;
+ eosReached = true;
} else {
LOG(ERROR) << "Unable to get next sample info. Aborting transcode.";
return status;
}
sample->info = info;
- if (mOutputQueue.enqueue(sample)) {
- LOG(ERROR) << "Output queue aborted";
- return AMEDIA_ERROR_IO;
- }
+ onOutputSampleAvailable(sample);
- mMediaSampleReader->advanceTrack(mTrackIndex);
+ if (mStopRequest == STOP_ON_SYNC && info.flags & SAMPLE_FLAG_SYNC_SAMPLE) {
+ break;
+ }
}
- if (mStopRequested && !mEosFromSource) {
- return AMEDIA_ERROR_UNKNOWN; // TODO: Custom error codes?
+ if (mStopRequest != NONE && !eosReached) {
+ *stopped = true;
}
return AMEDIA_OK;
}
void PassthroughTrackTranscoder::abortTranscodeLoop() {
- mStopRequested = true;
- mBufferPool->abort();
+ if (mStopRequest == STOP_NOW) {
+ mBufferPool->abort();
+ }
}
+std::shared_ptr<AMediaFormat> PassthroughTrackTranscoder::getOutputFormat() const {
+ return mSourceFormat;
+}
} // namespace android
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
index 311e9be..0695bdb 100644
--- a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -18,7 +18,12 @@
#define LOG_TAG "VideoTrackTranscoder"
#include <android-base/logging.h>
+#include <android-base/properties.h>
+#include <media/NdkCommon.h>
#include <media/VideoTrackTranscoder.h>
+#include <utils/AndroidThreads.h>
+
+using namespace AMediaFormatUtils;
namespace android {
@@ -30,10 +35,30 @@
static_assert(SAMPLE_FLAG_PARTIAL_FRAME == AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME,
"Sample flag mismatch: PARTIAL_FRAME");
+// Color format defined by surface. (See MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface.)
+static constexpr int32_t kColorFormatSurface = 0x7f000789;
+// Default key frame interval in seconds.
+static constexpr float kDefaultKeyFrameIntervalSeconds = 1.0f;
+// Default codec operating rate.
+static int32_t kDefaultCodecOperatingRate720P = base::GetIntProperty(
+ "debug.media.transcoding.codec_max_operating_rate_720P", /*default*/ 480);
+static int32_t kDefaultCodecOperatingRate1080P = base::GetIntProperty(
+ "debug.media.transcoding.codec_max_operating_rate_1080P", /*default*/ 240);
+// Default codec priority.
+static constexpr int32_t kDefaultCodecPriority = 1;
+// Default bitrate, in case source estimation fails.
+static constexpr int32_t kDefaultBitrateMbps = 10 * 1000 * 1000;
+// Default frame rate.
+static constexpr int32_t kDefaultFrameRate = 30;
+
template <typename T>
void VideoTrackTranscoder::BlockingQueue<T>::push(T const& value, bool front) {
{
- std::unique_lock<std::mutex> lock(mMutex);
+ std::scoped_lock lock(mMutex);
+ if (mAborted) {
+ return;
+ }
+
if (front) {
mQueue.push_front(value);
} else {
@@ -45,7 +70,7 @@
template <typename T>
T VideoTrackTranscoder::BlockingQueue<T>::pop() {
- std::unique_lock<std::mutex> lock(mMutex);
+ std::unique_lock lock(mMutex);
while (mQueue.empty()) {
mCondition.wait(lock);
}
@@ -54,49 +79,102 @@
return value;
}
+// Note: Do not call if another thread might waiting in pop.
+template <typename T>
+void VideoTrackTranscoder::BlockingQueue<T>::abort() {
+ std::scoped_lock lock(mMutex);
+ mAborted = true;
+ mQueue.clear();
+}
+
+// The CodecWrapper class is used to let AMediaCodec instances outlive the transcoder object itself
+// by giving the codec a weak pointer to the transcoder. Codecs wrapped in this object are kept
+// alive by the transcoder and the codec's outstanding buffers. Once the transcoder stops and all
+// output buffers have been released by downstream components the codec will also be released.
+class VideoTrackTranscoder::CodecWrapper {
+public:
+ CodecWrapper(AMediaCodec* codec, const std::weak_ptr<VideoTrackTranscoder>& transcoder)
+ : mCodec(codec), mTranscoder(transcoder), mCodecStarted(false) {}
+ ~CodecWrapper() {
+ if (mCodecStarted) {
+ AMediaCodec_stop(mCodec);
+ }
+ AMediaCodec_delete(mCodec);
+ }
+
+ AMediaCodec* getCodec() { return mCodec; }
+ std::shared_ptr<VideoTrackTranscoder> getTranscoder() const { return mTranscoder.lock(); };
+ void setStarted() { mCodecStarted = true; }
+
+private:
+ AMediaCodec* mCodec;
+ std::weak_ptr<VideoTrackTranscoder> mTranscoder;
+ bool mCodecStarted;
+};
+
// Dispatch responses to codec callbacks onto the message queue.
struct AsyncCodecCallbackDispatch {
static void onAsyncInputAvailable(AMediaCodec* codec, void* userdata, int32_t index) {
- VideoTrackTranscoder* transcoder = static_cast<VideoTrackTranscoder*>(userdata);
- if (codec == transcoder->mDecoder) {
- transcoder->mCodecMessageQueue.push(
- [transcoder, index] { transcoder->enqueueInputSample(index); });
+ VideoTrackTranscoder::CodecWrapper* wrapper =
+ static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+ if (auto transcoder = wrapper->getTranscoder()) {
+ if (codec == transcoder->mDecoder) {
+ transcoder->mCodecMessageQueue.push(
+ [transcoder, index] { transcoder->enqueueInputSample(index); });
+ }
}
}
static void onAsyncOutputAvailable(AMediaCodec* codec, void* userdata, int32_t index,
AMediaCodecBufferInfo* bufferInfoPtr) {
- VideoTrackTranscoder* transcoder = static_cast<VideoTrackTranscoder*>(userdata);
+ VideoTrackTranscoder::CodecWrapper* wrapper =
+ static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
AMediaCodecBufferInfo bufferInfo = *bufferInfoPtr;
- transcoder->mCodecMessageQueue.push([transcoder, index, codec, bufferInfo] {
- if (codec == transcoder->mDecoder) {
- transcoder->transferBuffer(index, bufferInfo);
- } else if (codec == transcoder->mEncoder.get()) {
- transcoder->dequeueOutputSample(index, bufferInfo);
- }
- });
+ if (auto transcoder = wrapper->getTranscoder()) {
+ transcoder->mCodecMessageQueue.push([transcoder, index, codec, bufferInfo] {
+ if (codec == transcoder->mDecoder) {
+ transcoder->transferBuffer(index, bufferInfo);
+ } else if (codec == transcoder->mEncoder->getCodec()) {
+ transcoder->dequeueOutputSample(index, bufferInfo);
+ }
+ });
+ }
}
static void onAsyncFormatChanged(AMediaCodec* codec, void* userdata, AMediaFormat* format) {
- VideoTrackTranscoder* transcoder = static_cast<VideoTrackTranscoder*>(userdata);
- const char* kCodecName = (codec == transcoder->mDecoder ? "Decoder" : "Encoder");
- LOG(DEBUG) << kCodecName << " format changed: " << AMediaFormat_toString(format);
+ VideoTrackTranscoder::CodecWrapper* wrapper =
+ static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+ if (auto transcoder = wrapper->getTranscoder()) {
+ const char* kCodecName = (codec == transcoder->mDecoder ? "Decoder" : "Encoder");
+ LOG(DEBUG) << kCodecName << " format changed: " << AMediaFormat_toString(format);
+ if (codec == transcoder->mEncoder->getCodec()) {
+ transcoder->mCodecMessageQueue.push(
+ [transcoder, format] { transcoder->updateTrackFormat(format); });
+ }
+ }
}
static void onAsyncError(AMediaCodec* codec, void* userdata, media_status_t error,
int32_t actionCode, const char* detail) {
LOG(ERROR) << "Error from codec " << codec << ", userdata " << userdata << ", error "
<< error << ", action " << actionCode << ", detail " << detail;
- VideoTrackTranscoder* transcoder = static_cast<VideoTrackTranscoder*>(userdata);
- transcoder->mCodecMessageQueue.push(
- [transcoder, error] {
- transcoder->mStatus = error;
- transcoder->mStopRequested = true;
- },
- true);
+ VideoTrackTranscoder::CodecWrapper* wrapper =
+ static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+ if (auto transcoder = wrapper->getTranscoder()) {
+ transcoder->mCodecMessageQueue.push(
+ [transcoder, error] { transcoder->mStatus = error; }, true);
+ }
}
};
+// static
+std::shared_ptr<VideoTrackTranscoder> VideoTrackTranscoder::create(
+ const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback, pid_t pid,
+ uid_t uid) {
+ return std::shared_ptr<VideoTrackTranscoder>(
+ new VideoTrackTranscoder(transcoderCallback, pid, uid));
+}
+
VideoTrackTranscoder::~VideoTrackTranscoder() {
if (mDecoder != nullptr) {
AMediaCodec_delete(mDecoder);
@@ -107,17 +185,71 @@
}
}
+// Search the default operating rate based on resolution.
+static int32_t getDefaultOperatingRate(AMediaFormat* encoderFormat) {
+ int32_t width, height;
+ if (AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_WIDTH, &width) && (width > 0) &&
+ AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_HEIGHT, &height) && (height > 0)) {
+ if ((width == 1280 && height == 720) || (width == 720 && height == 1280)) {
+ return kDefaultCodecOperatingRate720P;
+ } else if ((width == 1920 && height == 1080) || (width == 1080 && height == 1920)) {
+ return kDefaultCodecOperatingRate1080P;
+ } else {
+ LOG(WARNING) << "Could not find default operating rate: " << width << " " << height;
+ // Don't set operating rate if the correct dimensions are not found.
+ }
+ } else {
+ LOG(ERROR) << "Failed to get default operating rate due to missing resolution";
+ }
+ return -1;
+}
+
// Creates and configures the codecs.
media_status_t VideoTrackTranscoder::configureDestinationFormat(
const std::shared_ptr<AMediaFormat>& destinationFormat) {
media_status_t status = AMEDIA_OK;
if (destinationFormat == nullptr) {
- LOG(ERROR) << "Destination format is null";
+ LOG(ERROR) << "Destination format is null, use passthrough transcoder";
return AMEDIA_ERROR_INVALID_PARAMETER;
}
- mDestinationFormat = destinationFormat;
+ AMediaFormat* encoderFormat = AMediaFormat_new();
+ if (!encoderFormat || AMediaFormat_copy(encoderFormat, destinationFormat.get()) != AMEDIA_OK) {
+ LOG(ERROR) << "Unable to copy destination format";
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ int32_t bitrate;
+ if (!AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, &bitrate)) {
+ status = mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &bitrate);
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "Unable to estimate bitrate. Using default " << kDefaultBitrateMbps;
+ bitrate = kDefaultBitrateMbps;
+ }
+
+ LOG(INFO) << "Configuring bitrate " << bitrate;
+ AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
+ }
+
+ SetDefaultFormatValueFloat(AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, encoderFormat,
+ kDefaultKeyFrameIntervalSeconds);
+
+ int32_t operatingRate = getDefaultOperatingRate(encoderFormat);
+
+ if (operatingRate != -1) {
+ SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_OPERATING_RATE, encoderFormat, operatingRate);
+ }
+
+ SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_PRIORITY, encoderFormat, kDefaultCodecPriority);
+ SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_FRAME_RATE, encoderFormat, kDefaultFrameRate);
+ AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, kColorFormatSurface);
+
+ // Always encode without rotation. The rotation degree will be transferred directly to
+ // MediaSampleWriter track format, and MediaSampleWriter will call AMediaMuxer_setOrientationHint.
+ AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_ROTATION, 0);
+
+ mDestinationFormat = std::shared_ptr<AMediaFormat>(encoderFormat, &AMediaFormat_delete);
// Create and configure the encoder.
const char* destinationMime = nullptr;
@@ -128,22 +260,23 @@
return AMEDIA_ERROR_INVALID_PARAMETER;
}
- AMediaCodec* encoder = AMediaCodec_createEncoderByType(destinationMime);
+ AMediaCodec* encoder = AMediaCodec_createEncoderByTypeForClient(destinationMime, mPid, mUid);
if (encoder == nullptr) {
LOG(ERROR) << "Unable to create encoder for type " << destinationMime;
return AMEDIA_ERROR_UNSUPPORTED;
}
- mEncoder = std::shared_ptr<AMediaCodec>(encoder,
- std::bind(AMediaCodec_delete, std::placeholders::_1));
+ mEncoder = std::make_shared<CodecWrapper>(encoder, shared_from_this());
- status = AMediaCodec_configure(mEncoder.get(), mDestinationFormat.get(), NULL /* surface */,
- NULL /* crypto */, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
+ LOG(DEBUG) << "Configuring encoder with: " << AMediaFormat_toString(mDestinationFormat.get());
+ status = AMediaCodec_configure(mEncoder->getCodec(), mDestinationFormat.get(),
+ NULL /* surface */, NULL /* crypto */,
+ AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
if (status != AMEDIA_OK) {
LOG(ERROR) << "Unable to configure video encoder: " << status;
return status;
}
- status = AMediaCodec_createInputSurface(mEncoder.get(), &mSurface);
+ status = AMediaCodec_createInputSurface(mEncoder->getCodec(), &mSurface);
if (status != AMEDIA_OK) {
LOG(ERROR) << "Unable to create an encoder input surface: %d" << status;
return status;
@@ -157,13 +290,33 @@
return AMEDIA_ERROR_INVALID_PARAMETER;
}
- mDecoder = AMediaCodec_createDecoderByType(sourceMime);
+ mDecoder = AMediaCodec_createDecoderByTypeForClient(sourceMime, mPid, mUid);
if (mDecoder == nullptr) {
LOG(ERROR) << "Unable to create decoder for type " << sourceMime;
return AMEDIA_ERROR_UNSUPPORTED;
}
- status = AMediaCodec_configure(mDecoder, mSourceFormat.get(), mSurface, NULL /* crypto */,
+ auto decoderFormat = std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+ if (!decoderFormat ||
+ AMediaFormat_copy(decoderFormat.get(), mSourceFormat.get()) != AMEDIA_OK) {
+ LOG(ERROR) << "Unable to copy source format";
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ // Prevent decoder from overwriting frames that the encoder has not yet consumed.
+ AMediaFormat_setInt32(decoderFormat.get(), TBD_AMEDIACODEC_PARAMETER_KEY_ALLOW_FRAME_DROP, 0);
+
+ // Copy over configurations that apply to both encoder and decoder.
+ static const EntryCopier kEncoderEntriesToCopy[] = {
+ ENTRY_COPIER2(AMEDIAFORMAT_KEY_OPERATING_RATE, Float, Int32),
+ ENTRY_COPIER(AMEDIAFORMAT_KEY_PRIORITY, Int32),
+ };
+ const size_t entryCount = sizeof(kEncoderEntriesToCopy) / sizeof(kEncoderEntriesToCopy[0]);
+ CopyFormatEntries(mDestinationFormat.get(), decoderFormat.get(), kEncoderEntriesToCopy,
+ entryCount);
+
+ LOG(DEBUG) << "Configuring decoder with: " << AMediaFormat_toString(decoderFormat.get());
+ status = AMediaCodec_configure(mDecoder, decoderFormat.get(), mSurface, NULL /* crypto */,
0 /* flags */);
if (status != AMEDIA_OK) {
LOG(ERROR) << "Unable to configure video decoder: " << status;
@@ -177,13 +330,17 @@
.onAsyncFormatChanged = AsyncCodecCallbackDispatch::onAsyncFormatChanged,
.onAsyncError = AsyncCodecCallbackDispatch::onAsyncError};
- status = AMediaCodec_setAsyncNotifyCallback(mDecoder, asyncCodecCallbacks, this);
+ // Note: The decoder does not need its own wrapper because its lifetime is tied to the
+ // transcoder. But the same callbacks are reused for decoder and encoder so we pass the encoder
+ // wrapper as userdata here but never read the codec from it in the callback.
+ status = AMediaCodec_setAsyncNotifyCallback(mDecoder, asyncCodecCallbacks, mEncoder.get());
if (status != AMEDIA_OK) {
LOG(ERROR) << "Unable to set decoder to async mode: " << status;
return status;
}
- status = AMediaCodec_setAsyncNotifyCallback(mEncoder.get(), asyncCodecCallbacks, this);
+ status = AMediaCodec_setAsyncNotifyCallback(mEncoder->getCodec(), asyncCodecCallbacks,
+ mEncoder.get());
if (status != AMEDIA_OK) {
LOG(ERROR) << "Unable to set encoder to async mode: " << status;
return status;
@@ -227,8 +384,6 @@
mStatus = status;
return;
}
-
- mMediaSampleReader->advanceTrack(mTrackIndex);
} else {
LOG(DEBUG) << "EOS from source.";
mEosFromSource = true;
@@ -251,7 +406,7 @@
if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
LOG(DEBUG) << "EOS from decoder.";
- media_status_t status = AMediaCodec_signalEndOfInputStream(mEncoder.get());
+ media_status_t status = AMediaCodec_signalEndOfInputStream(mEncoder->getCodec());
if (status != AMEDIA_OK) {
LOG(ERROR) << "SignalEOS on encoder returned error: " << status;
mStatus = status;
@@ -263,12 +418,14 @@
AMediaCodecBufferInfo bufferInfo) {
if (bufferIndex >= 0) {
size_t sampleSize = 0;
- uint8_t* buffer = AMediaCodec_getOutputBuffer(mEncoder.get(), bufferIndex, &sampleSize);
+ uint8_t* buffer =
+ AMediaCodec_getOutputBuffer(mEncoder->getCodec(), bufferIndex, &sampleSize);
- MediaSample::OnSampleReleasedCallback bufferReleaseCallback = [encoder = mEncoder](
- MediaSample* sample) {
- AMediaCodec_releaseOutputBuffer(encoder.get(), sample->bufferId, false /* render */);
- };
+ MediaSample::OnSampleReleasedCallback bufferReleaseCallback =
+ [encoder = mEncoder](MediaSample* sample) {
+ AMediaCodec_releaseOutputBuffer(encoder->getCodec(), sample->bufferId,
+ false /* render */);
+ };
std::shared_ptr<MediaSample> sample = MediaSample::createWithReleaseCallback(
buffer, bufferInfo.offset, bufferIndex, bufferReleaseCallback);
@@ -276,14 +433,11 @@
sample->info.flags = bufferInfo.flags;
sample->info.presentationTimeUs = bufferInfo.presentationTimeUs;
- const bool aborted = mOutputQueue.enqueue(sample);
- if (aborted) {
- LOG(ERROR) << "Output sample queue was aborted. Stopping transcode.";
- mStatus = AMEDIA_ERROR_IO; // TODO: Define custom error codes?
- return;
- }
+ onOutputSampleAvailable(sample);
+
+ mLastSampleWasSync = sample->info.flags & SAMPLE_FLAG_SYNC_SAMPLE;
} else if (bufferIndex == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
- AMediaFormat* newFormat = AMediaCodec_getOutputFormat(mEncoder.get());
+ AMediaFormat* newFormat = AMediaCodec_getOutputFormat(mEncoder->getCodec());
LOG(DEBUG) << "Encoder output format changed: " << AMediaFormat_toString(newFormat);
}
@@ -293,41 +447,127 @@
}
}
-media_status_t VideoTrackTranscoder::runTranscodeLoop() {
- media_status_t status = AMEDIA_OK;
-
- status = AMediaCodec_start(mDecoder);
- if (status != AMEDIA_OK) {
- LOG(ERROR) << "Unable to start video decoder: " << status;
- return status;
+void VideoTrackTranscoder::updateTrackFormat(AMediaFormat* outputFormat) {
+ if (mActualOutputFormat != nullptr) {
+ LOG(WARNING) << "Ignoring duplicate format change.";
+ return;
}
- status = AMediaCodec_start(mEncoder.get());
- if (status != AMEDIA_OK) {
- LOG(ERROR) << "Unable to start video encoder: " << status;
- AMediaCodec_stop(mDecoder);
- return status;
+ AMediaFormat* formatCopy = AMediaFormat_new();
+ if (!formatCopy || AMediaFormat_copy(formatCopy, outputFormat) != AMEDIA_OK) {
+ LOG(ERROR) << "Unable to copy outputFormat";
+ AMediaFormat_delete(formatCopy);
+ mStatus = AMEDIA_ERROR_INVALID_PARAMETER;
+ return;
}
+ // Generate the actual track format for muxer based on the encoder output format,
+ // since many vital information comes in the encoder format (eg. CSD).
+ // Transfer necessary fields from the user-configured track format (derived from
+ // source track format and user transcoding request) where needed.
+
+ // Transfer SAR settings:
+ // If mDestinationFormat has SAR set, it means the original source has SAR specified
+ // at container level. This is supposed to override any SAR settings in the bitstream,
+ // thus should always be transferred to the container of the transcoded file.
+ int32_t sarWidth, sarHeight;
+ if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_SAR_WIDTH, &sarWidth) &&
+ (sarWidth > 0) &&
+ AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_SAR_HEIGHT, &sarHeight) &&
+ (sarHeight > 0)) {
+ AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_SAR_WIDTH, sarWidth);
+ AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_SAR_HEIGHT, sarHeight);
+ }
+ // Transfer DAR settings.
+ int32_t displayWidth, displayHeight;
+ if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_DISPLAY_WIDTH, &displayWidth) &&
+ (displayWidth > 0) &&
+ AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_DISPLAY_HEIGHT,
+ &displayHeight) &&
+ (displayHeight > 0)) {
+ AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_DISPLAY_WIDTH, displayWidth);
+ AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_DISPLAY_HEIGHT, displayHeight);
+ }
+
+ // Transfer rotation settings.
+ // Note that muxer itself doesn't take rotation from the track format. It requires
+ // AMediaMuxer_setOrientationHint to set the rotation. Here we pass the rotation to
+ // MediaSampleWriter using the track format. MediaSampleWriter will then call
+ // AMediaMuxer_setOrientationHint as needed.
+ int32_t rotation;
+ if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_ROTATION, &rotation) &&
+ (rotation != 0)) {
+ AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_ROTATION, rotation);
+ }
+
+ // Transfer track duration.
+ // Preserve the source track duration by sending it to MediaSampleWriter.
+ int64_t durationUs;
+ if (AMediaFormat_getInt64(mSourceFormat.get(), AMEDIAFORMAT_KEY_DURATION, &durationUs) &&
+ durationUs > 0) {
+ AMediaFormat_setInt64(formatCopy, AMEDIAFORMAT_KEY_DURATION, durationUs);
+ }
+
+ // TODO: transfer other fields as required.
+
+ mActualOutputFormat = std::shared_ptr<AMediaFormat>(formatCopy, &AMediaFormat_delete);
+
+ notifyTrackFormatAvailable();
+}
+
+media_status_t VideoTrackTranscoder::runTranscodeLoop(bool* stopped) {
+ androidSetThreadPriority(0 /* tid (0 = current) */, ANDROID_PRIORITY_VIDEO);
+
+ // Push start decoder and encoder as two messages, so that these are subject to the
+ // stop request as well. If the session is cancelled (or paused) immediately after start,
+ // we don't need to waste time start then stop the codecs.
+ mCodecMessageQueue.push([this] {
+ media_status_t status = AMediaCodec_start(mDecoder);
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "Unable to start video decoder: " << status;
+ mStatus = status;
+ }
+ });
+
+ mCodecMessageQueue.push([this] {
+ media_status_t status = AMediaCodec_start(mEncoder->getCodec());
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "Unable to start video encoder: " << status;
+ mStatus = status;
+ }
+ mEncoder->setStarted();
+ });
+
// Process codec events until EOS is reached, transcoding is stopped or an error occurs.
- while (!mStopRequested && !mEosFromEncoder && mStatus == AMEDIA_OK) {
+ while (mStopRequest != STOP_NOW && !mEosFromEncoder && mStatus == AMEDIA_OK) {
std::function<void()> message = mCodecMessageQueue.pop();
message();
+
+ if (mStopRequest == STOP_ON_SYNC && mLastSampleWasSync) {
+ break;
+ }
}
- // Return error if transcoding was stopped before it finished.
- if (mStopRequested && !mEosFromEncoder && mStatus == AMEDIA_OK) {
- mStatus = AMEDIA_ERROR_UNKNOWN; // TODO: Define custom error codes?
- }
-
+ mCodecMessageQueue.abort();
AMediaCodec_stop(mDecoder);
- AMediaCodec_stop(mEncoder.get());
+
+ // Signal if transcoding was stopped before it finished.
+ if (mStopRequest != NONE && !mEosFromEncoder && mStatus == AMEDIA_OK) {
+ *stopped = true;
+ }
+
return mStatus;
}
void VideoTrackTranscoder::abortTranscodeLoop() {
- // Push abort message to the front of the codec event queue.
- mCodecMessageQueue.push([this] { mStopRequested = true; }, true /* front */);
+ if (mStopRequest == STOP_NOW) {
+ // Wake up transcoder thread.
+ mCodecMessageQueue.push([] {}, true /* front */);
+ }
+}
+
+std::shared_ptr<AMediaFormat> VideoTrackTranscoder::getOutputFormat() const {
+ return mActualOutputFormat;
}
} // namespace android
diff --git a/media/libmediatranscoding/transcoder/benchmark/Android.bp b/media/libmediatranscoding/transcoder/benchmark/Android.bp
new file mode 100644
index 0000000..6c87233
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/Android.bp
@@ -0,0 +1,25 @@
+cc_defaults {
+ name: "benchmarkdefaults",
+ shared_libs: ["libmediandk", "libbase", "libbinder_ndk", "libutils", "libnativewindow"],
+ static_libs: ["libmediatranscoder", "libgoogle-benchmark"],
+ test_config_template: "AndroidTestTemplate.xml",
+ test_suites: ["device-tests", "TranscoderBenchmarks"],
+}
+
+cc_test {
+ name: "MediaTranscoderBenchmark",
+ srcs: ["MediaTranscoderBenchmark.cpp"],
+ defaults: ["benchmarkdefaults"],
+}
+
+cc_test {
+ name: "MediaSampleReaderBenchmark",
+ srcs: ["MediaSampleReaderBenchmark.cpp"],
+ defaults: ["benchmarkdefaults"],
+}
+
+cc_test {
+ name: "MediaTrackTranscoderBenchmark",
+ srcs: ["MediaTrackTranscoderBenchmark.cpp"],
+ defaults: ["benchmarkdefaults"],
+}
diff --git a/media/libmediatranscoding/transcoder/benchmark/AndroidTestTemplate.xml b/media/libmediatranscoding/transcoder/benchmark/AndroidTestTemplate.xml
new file mode 100644
index 0000000..64085d8
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/AndroidTestTemplate.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Unit test configuration for {MODULE}">
+ <option name="test-suite-tag" value="TranscoderBenchmarks" />
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="false" />
+ <option name="push-file" key="{MODULE}" value="/data/local/tmp/{MODULE}" />
+ <option name="push-file"
+ key="https://storage.googleapis.com/android_media/frameworks/av/media/libmediatranscoding/transcoder/benchmark/TranscodingBenchmark-1.1.zip?unzip=true"
+ value="/data/local/tmp/TranscodingBenchmark/" />
+ </target_preparer>
+
+ <test class="com.android.tradefed.testtype.GoogleBenchmarkTest" >
+ <option name="native-benchmark-device-path" value="/data/local/tmp" />
+ <option name="benchmark-module-name" value="{MODULE}" />
+ </test>
+</configuration>
+
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp
new file mode 100644
index 0000000..f0b9304
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * MediaSampleReader benchmark tests.
+ *
+ * How to run the benchmark:
+ *
+ * 1. Download the media assets from http://go/transcodingbenchmark and push the directory
+ * ("TranscodingBenchmark") to /data/local/tmp.
+ *
+ * 2. Compile the benchmark and sync to device:
+ * $ mm -j72 && adb sync
+ *
+ * 3. Run:
+ * $ adb shell /data/nativetest64/MediaSampleReaderBenchmark/MediaSampleReaderBenchmark
+ */
+
+#define LOG_TAG "MediaSampleReaderBenchmark"
+
+#include <android-base/logging.h>
+#include <benchmark/benchmark.h>
+#include <fcntl.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <unistd.h>
+
+#include <thread>
+
+using namespace android;
+
+static void ReadMediaSamples(benchmark::State& state, const std::string& srcFileName,
+ bool readAudio, bool sequentialAccess = false) {
+ // Asset directory.
+ static const std::string kAssetDirectory = "/data/local/tmp/TranscodingBenchmark/";
+
+ int srcFd = 0;
+ std::string srcPath = kAssetDirectory + srcFileName;
+
+ if ((srcFd = open(srcPath.c_str(), O_RDONLY)) < 0) {
+ state.SkipWithError("Unable to open source file");
+ return;
+ }
+
+ const size_t fileSize = lseek(srcFd, 0, SEEK_END);
+ lseek(srcFd, 0, SEEK_SET);
+
+ for (auto _ : state) {
+ auto sampleReader = MediaSampleReaderNDK::createFromFd(srcFd, 0, fileSize);
+ if (sampleReader->setEnforceSequentialAccess(sequentialAccess) != AMEDIA_OK) {
+ state.SkipWithError("setEnforceSequentialAccess failed");
+ return;
+ }
+
+ // Select tracks.
+ std::vector<int> trackIndices;
+ for (int trackIndex = 0; trackIndex < sampleReader->getTrackCount(); ++trackIndex) {
+ const char* mime = nullptr;
+
+ AMediaFormat* trackFormat = sampleReader->getTrackFormat(trackIndex);
+ AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+ if (strncmp(mime, "video/", 6) == 0) {
+ int32_t frameCount;
+ if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_FRAME_COUNT, &frameCount)) {
+ state.counters["VideoFrameRate"] =
+ benchmark::Counter(frameCount, benchmark::Counter::kIsRate);
+ }
+ } else if (!readAudio && strncmp(mime, "audio/", 6) == 0) {
+ continue;
+ }
+
+ trackIndices.push_back(trackIndex);
+ sampleReader->selectTrack(trackIndex);
+ }
+
+ // Start threads.
+ std::vector<std::thread> trackThreads;
+ for (auto trackIndex : trackIndices) {
+ trackThreads.emplace_back([trackIndex, sampleReader, &state] {
+ LOG(INFO) << "Track " << trackIndex << " started";
+ MediaSampleInfo info;
+
+ size_t bufferSize = 0;
+ std::unique_ptr<uint8_t[]> buffer;
+
+ while (true) {
+ media_status_t status = sampleReader->getSampleInfoForTrack(trackIndex, &info);
+ if (status == AMEDIA_ERROR_END_OF_STREAM) {
+ break;
+ }
+
+ if (info.size > bufferSize) {
+ bufferSize = info.size;
+ buffer.reset(new uint8_t[bufferSize]);
+ }
+
+ status = sampleReader->readSampleDataForTrack(trackIndex, buffer.get(),
+ bufferSize);
+ if (status != AMEDIA_OK) {
+ state.SkipWithError("Error reading sample data");
+ break;
+ }
+ }
+
+ LOG(INFO) << "Track " << trackIndex << " finished";
+ });
+ }
+
+ // Join threads.
+ for (auto& thread : trackThreads) {
+ thread.join();
+ }
+ }
+
+ close(srcFd);
+}
+
+// Benchmark registration wrapper for transcoding.
+#define TRANSCODER_BENCHMARK(func) \
+ BENCHMARK(func)->UseRealTime()->MeasureProcessCPUTime()->Unit(benchmark::kMillisecond)
+
+static void BM_MediaSampleReader_AudioVideo_Parallel(benchmark::State& state) {
+ ReadMediaSamples(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+ true /* readAudio */);
+}
+
+static void BM_MediaSampleReader_AudioVideo_Sequential(benchmark::State& state) {
+ ReadMediaSamples(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+ true /* readAudio */, true /* sequentialAccess */);
+}
+
+static void BM_MediaSampleReader_Video(benchmark::State& state) {
+ ReadMediaSamples(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+ false /* readAudio */);
+}
+
+TRANSCODER_BENCHMARK(BM_MediaSampleReader_AudioVideo_Parallel);
+TRANSCODER_BENCHMARK(BM_MediaSampleReader_AudioVideo_Sequential);
+TRANSCODER_BENCHMARK(BM_MediaSampleReader_Video);
+
+BENCHMARK_MAIN();
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp
new file mode 100644
index 0000000..d6ed2c6
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp
@@ -0,0 +1,456 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Native media track transcoder benchmark tests.
+ *
+ * How to run the benchmark:
+ *
+ * 1. Download the media assets from http://go/transcodingbenchmark and push the directory
+ * ("TranscodingBenchmark") to /data/local/tmp.
+ *
+ * 2. Compile the benchmark and sync to device:
+ * $ mm -j72 && adb sync
+ *
+ * 3. Run:
+ * $ adb shell /data/nativetest64/MediaTrackTranscoderBenchmark/MediaTrackTranscoderBenchmark
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTrackTranscoderBenchmark"
+
+#include <android-base/logging.h>
+#include <android/binder_process.h>
+#include <benchmark/benchmark.h>
+#include <fcntl.h>
+#include <media/MediaSampleReader.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
+#include <media/NdkCommon.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <media/VideoTrackTranscoder.h>
+
+using namespace android;
+
+typedef enum {
+ kVideo,
+ kAudio,
+} MediaType;
+
+class TrackTranscoderCallbacks : public MediaTrackTranscoderCallback {
+public:
+ virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder __unused) override {}
+
+ virtual void onTrackFinished(const MediaTrackTranscoder* transcoder __unused) override {
+ std::unique_lock lock(mMutex);
+ mFinished = true;
+ mCondition.notify_all();
+ }
+
+ virtual void onTrackStopped(const MediaTrackTranscoder* transcoder __unused) override {
+ std::unique_lock lock(mMutex);
+ mFinished = true;
+ mCondition.notify_all();
+ }
+
+ virtual void onTrackError(const MediaTrackTranscoder* transcoder __unused,
+ media_status_t status) override {
+ std::unique_lock lock(mMutex);
+ mFinished = true;
+ mStatus = status;
+ mCondition.notify_all();
+ }
+
+ void waitForTranscodingFinished() {
+ std::unique_lock lock(mMutex);
+ while (!mFinished) {
+ mCondition.wait(lock);
+ }
+ }
+
+ media_status_t mStatus = AMEDIA_OK;
+
+private:
+ std::mutex mMutex;
+ std::condition_variable mCondition;
+ bool mFinished = false;
+};
+
+/**
+ * MockSampleReader holds a ringbuffer of the first samples in the provided source track. Samples
+ * are returned to the caller from the ringbuffer in a round-robin fashion with increasing
+ * timestamps. The number of samples returned before EOS matches the number of frames in the source
+ * track.
+ */
+class MockSampleReader : public MediaSampleReader {
+public:
+ static std::shared_ptr<MediaSampleReader> createFromFd(int fd, size_t offset, size_t size) {
+ AMediaExtractor* extractor = AMediaExtractor_new();
+ media_status_t status = AMediaExtractor_setDataSourceFd(extractor, fd, offset, size);
+ if (status != AMEDIA_OK) return nullptr;
+
+ auto sampleReader = std::shared_ptr<MockSampleReader>(new MockSampleReader(extractor));
+ return sampleReader;
+ }
+
+ AMediaFormat* getFileFormat() override { return AMediaExtractor_getFileFormat(mExtractor); }
+
+ size_t getTrackCount() const override { return AMediaExtractor_getTrackCount(mExtractor); }
+
+ AMediaFormat* getTrackFormat(int trackIndex) override {
+ return AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+ }
+
+ media_status_t selectTrack(int trackIndex) override {
+ if (mSelectedTrack >= 0) return AMEDIA_ERROR_UNSUPPORTED;
+ mSelectedTrack = trackIndex;
+
+ media_status_t status = AMediaExtractor_selectTrack(mExtractor, trackIndex);
+ if (status != AMEDIA_OK) return status;
+
+ // Get the sample count.
+ AMediaFormat* format = getTrackFormat(trackIndex);
+ const bool haveSampleCount =
+ AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_FRAME_COUNT, &mSampleCount);
+ AMediaFormat_delete(format);
+
+ if (!haveSampleCount) {
+ LOG(ERROR) << "No sample count in track format.";
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ // Buffer samples.
+ const int32_t targetBufferCount = 60;
+ std::unique_ptr<uint8_t[]> buffer;
+ MediaSampleInfo info;
+ while (true) {
+ info.presentationTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+ info.flags = AMediaExtractor_getSampleFlags(mExtractor);
+ info.size = AMediaExtractor_getSampleSize(mExtractor);
+
+ // Finish buffering after either reading all the samples in the track or after
+ // completing the GOP satisfying the target count.
+ if (mSamples.size() == mSampleCount ||
+ (mSamples.size() >= targetBufferCount && info.flags & SAMPLE_FLAG_SYNC_SAMPLE)) {
+ break;
+ }
+
+ buffer.reset(new uint8_t[info.size]);
+
+ ssize_t bytesRead = AMediaExtractor_readSampleData(mExtractor, buffer.get(), info.size);
+ if (bytesRead != info.size) {
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+
+ mSamples.emplace_back(std::move(buffer), info);
+
+ AMediaExtractor_advance(mExtractor);
+ }
+
+ mFirstPtsUs = mSamples[0].second.presentationTimeUs;
+ mPtsDiff = mSamples[1].second.presentationTimeUs - mSamples[0].second.presentationTimeUs;
+
+ return AMEDIA_OK;
+ }
+
+ media_status_t unselectTrack(int trackIndex __unused) override {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ media_status_t setEnforceSequentialAccess(bool enforce __unused) override { return AMEDIA_OK; }
+
+ media_status_t getEstimatedBitrateForTrack(int trackIndex __unused,
+ int32_t* bitrate __unused) override {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) override {
+ if (trackIndex != mSelectedTrack) return AMEDIA_ERROR_INVALID_PARAMETER;
+
+ if (mCurrentSampleIndex >= mSampleCount) {
+ info->presentationTimeUs = 0;
+ info->size = 0;
+ info->flags = SAMPLE_FLAG_END_OF_STREAM;
+ return AMEDIA_ERROR_END_OF_STREAM;
+ }
+
+ *info = mSamples[mCurrentSampleIndex % mSamples.size()].second;
+ info->presentationTimeUs = mFirstPtsUs + mCurrentSampleIndex * mPtsDiff;
+ return AMEDIA_OK;
+ }
+
+ media_status_t readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+ size_t bufferSize) override {
+ if (trackIndex != mSelectedTrack) return AMEDIA_ERROR_INVALID_PARAMETER;
+
+ if (mCurrentSampleIndex >= mSampleCount) return AMEDIA_ERROR_END_OF_STREAM;
+
+ auto& p = mSamples[mCurrentSampleIndex % mSamples.size()];
+
+ if (bufferSize < p.second.size) return AMEDIA_ERROR_INVALID_PARAMETER;
+ memcpy(buffer, p.first.get(), p.second.size);
+
+ advanceTrack(trackIndex);
+ return AMEDIA_OK;
+ }
+
+ void advanceTrack(int trackIndex) {
+ if (trackIndex != mSelectedTrack) return;
+ ++mCurrentSampleIndex;
+ }
+
+ virtual ~MockSampleReader() override { AMediaExtractor_delete(mExtractor); }
+
+private:
+ MockSampleReader(AMediaExtractor* extractor) : mExtractor(extractor) {}
+ AMediaExtractor* mExtractor = nullptr;
+ int32_t mSampleCount = 0;
+ std::vector<std::pair<std::unique_ptr<uint8_t[]>, MediaSampleInfo>> mSamples;
+ int mSelectedTrack = -1;
+ int32_t mCurrentSampleIndex = 0;
+ int64_t mFirstPtsUs = 0;
+ int64_t mPtsDiff = 0;
+};
+
+static std::shared_ptr<AMediaFormat> GetDefaultTrackFormat(MediaType mediaType,
+ AMediaFormat* sourceFormat) {
+ // Default video config.
+ static constexpr int32_t kVideoBitRate = 20 * 1000 * 1000; // 20 mbps
+ static constexpr float kVideoFrameRate = 30.0f; // 30 fps
+
+ AMediaFormat* format = nullptr;
+
+ if (mediaType == kVideo) {
+ format = AMediaFormat_new();
+ AMediaFormat_copy(format, sourceFormat);
+ AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, kVideoBitRate);
+ AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY_FRAME_RATE, kVideoFrameRate);
+ }
+ // nothing for audio.
+
+ return std::shared_ptr<AMediaFormat>(format, &AMediaFormat_delete);
+}
+
+/** Gets a MediaSampleReader for the source file */
+static std::shared_ptr<MediaSampleReader> GetSampleReader(const std::string& srcFileName,
+ bool mock) {
+ // Asset directory
+ static const std::string kAssetDirectory = "/data/local/tmp/TranscodingBenchmark/";
+
+ int srcFd = 0;
+ std::string srcPath = kAssetDirectory + srcFileName;
+
+ if ((srcFd = open(srcPath.c_str(), O_RDONLY)) < 0) {
+ return nullptr;
+ }
+
+ const size_t fileSize = lseek(srcFd, 0, SEEK_END);
+ lseek(srcFd, 0, SEEK_SET);
+
+ std::shared_ptr<MediaSampleReader> sampleReader;
+
+ if (mock) {
+ sampleReader = MockSampleReader::createFromFd(srcFd, 0 /* offset */, fileSize);
+ } else {
+ sampleReader = MediaSampleReaderNDK::createFromFd(srcFd, 0 /* offset */, fileSize);
+ }
+
+ if (srcFd > 0) close(srcFd);
+ return sampleReader;
+}
+
+/**
+ * Configures a MediaTrackTranscoder with an empty sample consumer so that the samples are returned
+ * to the transcoder immediately.
+ */
+static void ConfigureEmptySampleConsumer(const std::shared_ptr<MediaTrackTranscoder>& transcoder,
+ uint32_t& sampleCount) {
+ transcoder->setSampleConsumer([&sampleCount](const std::shared_ptr<MediaSample>& sample) {
+ if (!(sample->info.flags & SAMPLE_FLAG_CODEC_CONFIG) && sample->info.size > 0) {
+ ++sampleCount;
+ }
+ });
+}
+
+/**
+ * Callback to edit track format for transcoding.
+ * @param dstFormat The default track format for the track type.
+ */
+using TrackFormatEditCallback = std::function<void(AMediaFormat* dstFormat)>;
+
+/**
+ * Configures a MediaTrackTranscoder with the provided MediaSampleReader, reading from the first
+ * track that matches the specified media type.
+ */
+static bool ConfigureSampleReader(const std::shared_ptr<MediaTrackTranscoder>& transcoder,
+ const std::shared_ptr<MediaSampleReader>& sampleReader,
+ MediaType mediaType,
+ const TrackFormatEditCallback& formatEditor) {
+ int srcTrackIndex = -1;
+ std::shared_ptr<AMediaFormat> srcTrackFormat = nullptr;
+
+ for (int trackIndex = 0; trackIndex < sampleReader->getTrackCount(); ++trackIndex) {
+ AMediaFormat* trackFormat = sampleReader->getTrackFormat(trackIndex);
+
+ const char* mime = nullptr;
+ AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+ if ((mediaType == kVideo && strncmp(mime, "video/", 6) == 0) ||
+ (mediaType == kAudio && strncmp(mime, "audio/", 6) == 0)) {
+ srcTrackIndex = trackIndex;
+ srcTrackFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+ break;
+ }
+ AMediaFormat_delete(trackFormat);
+ }
+
+ if (srcTrackIndex == -1) {
+ LOG(ERROR) << "No matching source track found";
+ return false;
+ }
+
+ media_status_t status = sampleReader->selectTrack(srcTrackIndex);
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "Unable to select track";
+ return false;
+ }
+
+ auto destinationFormat = GetDefaultTrackFormat(mediaType, srcTrackFormat.get());
+ if (formatEditor != nullptr) {
+ formatEditor(destinationFormat.get());
+ }
+ status = transcoder->configure(sampleReader, srcTrackIndex, destinationFormat);
+ if (status != AMEDIA_OK) {
+ LOG(ERROR) << "transcoder configure returned " << status;
+ return false;
+ }
+
+ return true;
+}
+
+static void BenchmarkTranscoder(benchmark::State& state, const std::string& srcFileName,
+ bool mockReader, MediaType mediaType,
+ const TrackFormatEditCallback& formatEditor = nullptr) {
+ static pthread_once_t once = PTHREAD_ONCE_INIT;
+ pthread_once(&once, ABinderProcess_startThreadPool);
+
+ for (auto _ : state) {
+ std::shared_ptr<TrackTranscoderCallbacks> callbacks =
+ std::make_shared<TrackTranscoderCallbacks>();
+ std::shared_ptr<MediaTrackTranscoder> transcoder;
+
+ if (mediaType == kVideo) {
+ transcoder = VideoTrackTranscoder::create(callbacks);
+ } else {
+ transcoder = std::make_shared<PassthroughTrackTranscoder>(callbacks);
+ }
+
+ std::shared_ptr<MediaSampleReader> sampleReader = GetSampleReader(srcFileName, mockReader);
+ if (sampleReader == nullptr) {
+ state.SkipWithError("Unable to create sample reader");
+ return;
+ }
+
+ if (!ConfigureSampleReader(transcoder, sampleReader, mediaType, formatEditor)) {
+ state.SkipWithError("Unable to configure the transcoder");
+ return;
+ }
+
+ uint32_t sampleCount = 0;
+ ConfigureEmptySampleConsumer(transcoder, sampleCount);
+
+ if (!transcoder->start()) {
+ state.SkipWithError("Unable to start the transcoder");
+ return;
+ }
+
+ callbacks->waitForTranscodingFinished();
+ transcoder->stop();
+
+ if (callbacks->mStatus != AMEDIA_OK) {
+ state.SkipWithError("Transcoder failed with error");
+ return;
+ }
+
+ LOG(DEBUG) << "Number of samples received: " << sampleCount;
+ state.counters["FrameRate"] = benchmark::Counter(sampleCount, benchmark::Counter::kIsRate);
+ }
+}
+
+static void BenchmarkTranscoderWithOperatingRate(benchmark::State& state,
+ const std::string& srcFile, bool mockReader,
+ MediaType mediaType) {
+ TrackFormatEditCallback editor;
+ const int32_t operatingRate = state.range(0);
+ const int32_t priority = state.range(1);
+
+ if (operatingRate >= 0 && priority >= 0) {
+ editor = [operatingRate, priority](AMediaFormat* format) {
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_OPERATING_RATE, operatingRate);
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_PRIORITY, priority);
+ };
+ }
+ BenchmarkTranscoder(state, srcFile, mockReader, mediaType, editor);
+}
+
+//-------------------------------- AVC to AVC Benchmarks -------------------------------------------
+
+static void BM_VideoTranscode_AVC2AVC(benchmark::State& state) {
+ const char* srcFile = "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4";
+ BenchmarkTranscoderWithOperatingRate(state, srcFile, false /* mockReader */, kVideo);
+}
+
+static void BM_VideoTranscode_AVC2AVC_NoExtractor(benchmark::State& state) {
+ const char* srcFile = "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4";
+ BenchmarkTranscoderWithOperatingRate(state, srcFile, true /* mockReader */, kVideo);
+}
+
+//-------------------------------- HEVC to AVC Benchmarks ------------------------------------------
+
+static void BM_VideoTranscode_HEVC2AVC(benchmark::State& state) {
+ const char* srcFile = "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4";
+ BenchmarkTranscoderWithOperatingRate(state, srcFile, false /* mockReader */, kVideo);
+}
+
+static void BM_VideoTranscode_HEVC2AVC_NoExtractor(benchmark::State& state) {
+ const char* srcFile = "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4";
+ BenchmarkTranscoderWithOperatingRate(state, srcFile, true /* mockReader */, kVideo);
+}
+
+//-------------------------------- Benchmark Registration ------------------------------------------
+
+// Benchmark registration wrapper for transcoding.
+#define TRANSCODER_BENCHMARK(func) \
+ BENCHMARK(func)->UseRealTime()->MeasureProcessCPUTime()->Unit(benchmark::kMillisecond)
+
+// Benchmark registration for testing different operating rate and priority combinations.
+#define TRANSCODER_OPERATING_RATE_BENCHMARK(func) \
+ TRANSCODER_BENCHMARK(func) \
+ ->Args({-1, -1}) /* <-- Use default */ \
+ ->Args({240, 0}) \
+ ->Args({INT32_MAX, 0}) \
+ ->Args({240, 1}) \
+ ->Args({INT32_MAX, 1})
+
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_AVC2AVC);
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_AVC2AVC_NoExtractor);
+
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_HEVC2AVC);
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_HEVC2AVC_NoExtractor);
+
+BENCHMARK_MAIN();
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
new file mode 100644
index 0000000..9ee55e5
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
@@ -0,0 +1,403 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Native media transcoder library benchmark tests.
+ *
+ * How to run the benchmark:
+ *
+ * 1. Download the media assets from http://go/transcodingbenchmark and push the directory
+ * ("TranscodingBenchmark") to /data/local/tmp.
+ *
+ * 2. Compile the benchmark and sync to device:
+ * $ mm -j72 && adb sync
+ *
+ * 3. Run:
+ * $ adb shell /data/nativetest64/MediaTranscoderBenchmark/MediaTranscoderBenchmark
+ */
+
+#include <benchmark/benchmark.h>
+#include <fcntl.h>
+#include <media/MediaTranscoder.h>
+#include <iostream>
+
+using namespace android;
+
+const std::string PARAM_VIDEO_FRAME_RATE = "VideoFrameRate";
+
+class TranscoderCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+ virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+ std::unique_lock<std::mutex> lock(mMutex);
+ mFinished = true;
+ mCondition.notify_all();
+ }
+
+ virtual void onError(const MediaTranscoder* transcoder __unused,
+ media_status_t error) override {
+ std::unique_lock<std::mutex> lock(mMutex);
+ mFinished = true;
+ mStatus = error;
+ mCondition.notify_all();
+ }
+
+ virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+ int32_t progress __unused) override {}
+
+ virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState
+ __unused) override {}
+
+ bool waitForTranscodingFinished() {
+ std::unique_lock<std::mutex> lock(mMutex);
+ while (!mFinished) {
+ if (mCondition.wait_for(lock, std::chrono::minutes(5)) == std::cv_status::timeout) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ media_status_t mStatus = AMEDIA_OK;
+
+private:
+ std::mutex mMutex;
+ std::condition_variable mCondition;
+ bool mFinished = false;
+};
+
+static AMediaFormat* CreateDefaultVideoFormat() {
+ // Default bitrate
+ static constexpr int32_t kVideoBitRate = 20 * 1000 * 1000; // 20Mbs
+
+ AMediaFormat* videoFormat = AMediaFormat_new();
+ AMediaFormat_setInt32(videoFormat, AMEDIAFORMAT_KEY_BIT_RATE, kVideoBitRate);
+ return videoFormat;
+}
+
+/**
+ * Callback to configure tracks for transcoding.
+ * @param mime The source track mime type.
+ * @param dstFormat The destination format if the track should be transcoded or nullptr if the track
+ * should be passed through.
+ * @return True if the track should be included in the output file.
+ */
+using TrackSelectionCallback = std::function<bool(const char* mime, AMediaFormat** dstFormat)>;
+
+static void TranscodeMediaFile(benchmark::State& state, const std::string& srcFileName,
+ const std::string& dstFileName,
+ TrackSelectionCallback trackSelectionCallback) {
+ // Write-only, create file if non-existent.
+ static constexpr int kDstOpenFlags = O_WRONLY | O_CREAT;
+ // User R+W permission.
+ static constexpr int kDstFileMode = S_IRUSR | S_IWUSR;
+ // Asset directory
+ static const std::string kAssetDirectory = "/data/local/tmp/TranscodingBenchmark/";
+
+ int srcFd = 0;
+ int dstFd = 0;
+
+ std::string srcPath = kAssetDirectory + srcFileName;
+ std::string dstPath = kAssetDirectory + dstFileName;
+
+ auto callbacks = std::make_shared<TranscoderCallbacks>();
+ media_status_t status = AMEDIA_OK;
+
+ if ((srcFd = open(srcPath.c_str(), O_RDONLY)) < 0) {
+ state.SkipWithError("Unable to open source file");
+ goto exit;
+ }
+ if ((dstFd = open(dstPath.c_str(), kDstOpenFlags, kDstFileMode)) < 0) {
+ state.SkipWithError("Unable to open destination file");
+ goto exit;
+ }
+
+ for (auto _ : state) {
+ auto transcoder = MediaTranscoder::create(callbacks);
+
+ status = transcoder->configureSource(srcFd);
+ if (status != AMEDIA_OK) {
+ state.SkipWithError("Unable to configure transcoder source");
+ goto exit;
+ }
+
+ status = transcoder->configureDestination(dstFd);
+ if (status != AMEDIA_OK) {
+ state.SkipWithError("Unable to configure transcoder destination");
+ goto exit;
+ }
+
+ std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+ for (int i = 0; i < trackFormats.size(); ++i) {
+ AMediaFormat* srcFormat = trackFormats[i].get();
+ AMediaFormat* dstFormat = nullptr;
+
+ const char* mime = nullptr;
+ if (!AMediaFormat_getString(srcFormat, AMEDIAFORMAT_KEY_MIME, &mime)) {
+ state.SkipWithError("Source track format does not have MIME type");
+ goto exit;
+ }
+
+ if (strncmp(mime, "video/", 6) == 0) {
+ int32_t frameCount;
+ if (AMediaFormat_getInt32(srcFormat, AMEDIAFORMAT_KEY_FRAME_COUNT, &frameCount)) {
+ state.counters[PARAM_VIDEO_FRAME_RATE] =
+ benchmark::Counter(frameCount, benchmark::Counter::kIsRate);
+ }
+ }
+
+ if (trackSelectionCallback(mime, &dstFormat)) {
+ status = transcoder->configureTrackFormat(i, dstFormat);
+ }
+
+ if (dstFormat != nullptr) {
+ AMediaFormat_delete(dstFormat);
+ }
+ if (status != AMEDIA_OK) {
+ state.SkipWithError("Unable to configure track");
+ goto exit;
+ }
+ }
+
+ status = transcoder->start();
+ if (status != AMEDIA_OK) {
+ state.SkipWithError("Unable to start transcoder");
+ goto exit;
+ }
+
+ if (!callbacks->waitForTranscodingFinished()) {
+ transcoder->cancel();
+ state.SkipWithError("Transcoder timed out");
+ goto exit;
+ }
+ if (callbacks->mStatus != AMEDIA_OK) {
+ state.SkipWithError("Transcoder error when running");
+ goto exit;
+ }
+ }
+
+exit:
+ if (srcFd > 0) close(srcFd);
+ if (dstFd > 0) close(dstFd);
+}
+
+/**
+ * Callback to edit track format for transcoding.
+ * @param dstFormat The default track format for the track type.
+ */
+using TrackFormatEditCallback = std::function<void(AMediaFormat* dstFormat)>;
+
+static void TranscodeMediaFile(benchmark::State& state, const std::string& srcFileName,
+ const std::string& dstFileName, bool includeAudio,
+ bool transcodeVideo,
+ const TrackFormatEditCallback& videoFormatEditor = nullptr) {
+ TranscodeMediaFile(state, srcFileName, dstFileName,
+ [=](const char* mime, AMediaFormat** dstFormatOut) -> bool {
+ *dstFormatOut = nullptr;
+ if (strncmp(mime, "video/", 6) == 0 && transcodeVideo) {
+ *dstFormatOut = CreateDefaultVideoFormat();
+ if (videoFormatEditor != nullptr) {
+ videoFormatEditor(*dstFormatOut);
+ }
+ } else if (strncmp(mime, "audio/", 6) == 0 && !includeAudio) {
+ return false;
+ }
+ return true;
+ });
+}
+
+static void SetMaxOperatingRate(AMediaFormat* format) {
+ AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY_OPERATING_RATE, INT32_MAX);
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_PRIORITY, 1);
+}
+
+//-------------------------------- AVC to AVC Benchmarks -------------------------------------------
+
+static void BM_TranscodeAvc2AvcAudioVideo2AudioVideo(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+ "video_1920x1080_3648frame_h264_22Mbps_30fps_aac_transcoded_AV.mp4",
+ true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeAvc2AvcVideo2Video(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps.mp4",
+ "video_1920x1080_3648frame_h264_22Mbps_30fps_transcoded_V.mp4",
+ false /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeAvc2AvcAV2AVMaxOperatingRate(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+ "video_1920x1080_3648frame_h264_22Mbps_30fps_aac_transcoded_AV.mp4",
+ true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeAvc2AvcV2VMaxOperatingRate(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps.mp4",
+ "video_1920x1080_3648frame_h264_22Mbps_30fps_transcoded_V.mp4",
+ false /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeAvc2AvcAV2AV720P(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1280x720_3648frame_h264_16Mbps_30fps_aac.mp4",
+ "video_1280x720_3648frame_h264_16Mbps_30fps_aac_transcoded_AV.mp4",
+ true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeAvc2AvcAV2AV720PMaxOperatingRate(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1280x720_3648frame_h264_16Mbps_30fps_aac.mp4",
+ "video_1280x720_3648frame_h264_16Mbps_30fps_aac_transcoded_AV.mp4",
+ true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+//-------------------------------- HEVC to AVC Benchmarks ------------------------------------------
+
+static void BM_TranscodeHevc2AvcAudioVideo2AudioVideo(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4",
+ "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac_transcoded_AV.mp4",
+ true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeHevc2AvcVideo2Video(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps.mp4",
+ "video_1920x1080_3863frame_hevc_4Mbps_30fps_transcoded_V.mp4",
+ false /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeHevc2AvcAV2AVMaxOperatingRate(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4",
+ "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac_transcoded_AV.mp4",
+ true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeHevc2AvcV2VMaxOperatingRate(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps.mp4",
+ "video_1920x1080_3863frame_hevc_4Mbps_30fps_transcoded_V.mp4",
+ false /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeHevc2AvcAV2AV720P(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1280x720_3863frame_hevc_16Mbps_30fps_aac.mp4",
+ "video_1280x720_3863frame_hevc_16Mbps_30fps_aac_transcoded_AV.mp4",
+ true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeHevc2AvcAV2AV720PMaxOperatingRate(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1280x720_3863frame_hevc_16Mbps_30fps_aac.mp4",
+ "video_1280x720_3863frame_hevc_16Mbps_30fps_aac_transcoded_AV.mp4",
+ true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+//-------------------------------- Passthrough Benchmarks ------------------------------------------
+
+static void BM_TranscodeAudioVideoPassthrough(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+ "video_1920x1080_3648frame_h264_22Mbps_30fps_aac_passthrough_AV.mp4",
+ true /* includeAudio */, false /* transcodeVideo */);
+}
+static void BM_TranscodeVideoPassthrough(benchmark::State& state) {
+ TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps.mp4",
+ "video_1920x1080_3648frame_h264_22Mbps_30fps_passthrough_AV.mp4",
+ false /* includeAudio */, false /* transcodeVideo */);
+}
+
+//-------------------------------- Benchmark Registration ------------------------------------------
+
+// Benchmark registration wrapper for transcoding.
+#define TRANSCODER_BENCHMARK(func) \
+ BENCHMARK(func)->UseRealTime()->MeasureProcessCPUTime()->Unit(benchmark::kMillisecond)
+
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAudioVideo2AudioVideo);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcVideo2Video);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAV2AVMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcV2VMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAV2AV720P);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAV2AV720PMaxOperatingRate);
+
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAudioVideo2AudioVideo);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcVideo2Video);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAV2AVMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcV2VMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAV2AV720P);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAV2AV720PMaxOperatingRate);
+
+TRANSCODER_BENCHMARK(BM_TranscodeAudioVideoPassthrough);
+TRANSCODER_BENCHMARK(BM_TranscodeVideoPassthrough);
+
+class CustomCsvReporter : public benchmark::BenchmarkReporter {
+public:
+ CustomCsvReporter() : mPrintedHeader(false) {}
+ virtual bool ReportContext(const Context& context);
+ virtual void ReportRuns(const std::vector<Run>& reports);
+
+private:
+ void PrintRunData(const Run& report);
+
+ bool mPrintedHeader;
+ std::vector<std::string> mHeaders = {"name", "real_time", "cpu_time", PARAM_VIDEO_FRAME_RATE};
+};
+
+bool CustomCsvReporter::ReportContext(const Context& context __unused) {
+ return true;
+}
+
+void CustomCsvReporter::ReportRuns(const std::vector<Run>& reports) {
+ std::ostream& Out = GetOutputStream();
+
+ if (!mPrintedHeader) {
+ // print the header
+ for (auto header = mHeaders.begin(); header != mHeaders.end();) {
+ Out << *header++;
+ if (header != mHeaders.end()) Out << ",";
+ }
+ Out << "\n";
+ mPrintedHeader = true;
+ }
+
+ // print results for each run
+ for (const auto& run : reports) {
+ PrintRunData(run);
+ }
+}
+
+void CustomCsvReporter::PrintRunData(const Run& run) {
+ if (run.error_occurred) {
+ return;
+ }
+ std::ostream& Out = GetOutputStream();
+ Out << run.benchmark_name() << ",";
+ Out << run.GetAdjustedRealTime() << ",";
+ Out << run.GetAdjustedCPUTime() << ",";
+ auto frameRate = run.counters.find(PARAM_VIDEO_FRAME_RATE);
+ if (frameRate == run.counters.end()) {
+ Out << "NA"
+ << ",";
+ } else {
+ Out << frameRate->second << ",";
+ }
+ Out << '\n';
+}
+
+int main(int argc, char** argv) {
+ std::unique_ptr<benchmark::BenchmarkReporter> fileReporter;
+ for (int i = 1; i < argc; ++i) {
+ if (std::string(argv[i]).find("--benchmark_out") != std::string::npos) {
+ fileReporter.reset(new CustomCsvReporter);
+ break;
+ }
+ }
+ ::benchmark::Initialize(&argc, argv);
+ if (::benchmark::ReportUnrecognizedArguments(argc, argv)) return 1;
+ ::benchmark::RunSpecifiedBenchmarks(nullptr, fileReporter.get());
+}
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
index dc22423..c6cf1a4 100644
--- a/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
@@ -50,6 +50,12 @@
bool dequeue(std::shared_ptr<MediaSample>* sample /* nonnull */);
/**
+ * Checks if the queue currently holds any media samples.
+ * @return True if the queue is empty or has been aborted. False otherwise.
+ */
+ bool isEmpty();
+
+ /**
* Aborts the queue operation. This clears the queue and notifies waiting consumers. After the
* has been aborted it is not possible to enqueue more samples, and dequeue will return null.
*/
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
index acebac2..5c7eeac 100644
--- a/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
@@ -24,12 +24,15 @@
namespace android {
/**
- * MediaSampleReader is an interface for reading media samples from a container.
- * MediaSampleReader allows for reading samples from multiple tracks independently of each other
- * while preserving the order of samples within each individual track.
- * MediaSampleReader implementations are thread safe and can be used by multiple threads
- * concurrently. But note that MediaSampleReader only maintains one state per track so concurrent
- * usage of the same track from multiple threads has no benefit.
+ * MediaSampleReader is an interface for reading media samples from a container. MediaSampleReader
+ * allows for reading samples from multiple tracks on individual threads independently of each other
+ * while preserving the order of samples. Due to poor non-sequential access performance of the
+ * underlying extractor, MediaSampleReader can optionally enforce sequential sample access by
+ * blocking requests for tracks that the underlying extractor does not currently point to. Waiting
+ * threads are serviced once the reader advances to a sample from the specified track. Due to this
+ * it is important to read samples and advance the reader from all selected tracks to avoid hanging
+ * other tracks. MediaSampleReader implementations are thread safe and sample access should be done
+ * on one thread per selected track.
*/
class MediaSampleReader {
public:
@@ -57,7 +60,44 @@
virtual AMediaFormat* getTrackFormat(int trackIndex) = 0;
/**
- * Returns the sample information for the current sample in the specified track.
+ * Select a track for sample access. Tracks must be selected in order for sample information and
+ * sample data to be available for that track. Samples for selected tracks must be accessed on
+ * its own thread to avoid blocking other tracks.
+ * @param trackIndex The track to select.
+ * @return AMEDIA_OK on success.
+ */
+ virtual media_status_t selectTrack(int trackIndex) = 0;
+
+ /**
+ * Undo a track selection.
+ * @param trackIndex The track to un-select.
+ * @return AMEDIA_OK on success.
+ */
+ virtual media_status_t unselectTrack(int trackIndex) = 0;
+
+ /**
+ * Toggles sequential access enforcement on or off. When the reader enforces sequential access
+ * calls to read sample information will block unless the underlying extractor points to the
+ * specified track.
+ * @param enforce True to enforce sequential access.
+ * @return AMEDIA_OK on success.
+ */
+ virtual media_status_t setEnforceSequentialAccess(bool enforce) = 0;
+
+ /**
+ * Estimates the bitrate of a source track by sampling sample sizes. The bitrate is returned in
+ * megabits per second (Mbps). This method will fail if the track only contains a single sample
+ * and does not have an associated duration.
+ * @param trackIndex The source track index.
+ * @param bitrate Output param for the bitrate.
+ * @return AMEDIA_OK on success.
+ */
+ virtual media_status_t getEstimatedBitrateForTrack(int trackIndex, int32_t* bitrate);
+
+ /**
+ * Returns the sample information for the current sample in the specified track. Note that this
+ * method will block until the reader advances to a sample belonging to the requested track if
+ * the reader is in sequential access mode.
* @param trackIndex The track index (zero-based).
* @param info Pointer to a MediaSampleInfo object where the sample information is written.
* @return AMEDIA_OK on success, AMEDIA_ERROR_END_OF_STREAM if there are no more samples to read
@@ -67,7 +107,10 @@
virtual media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) = 0;
/**
- * Reads the current sample's data into the supplied buffer.
+ * Returns the sample data for the current sample in the specified track into the supplied
+ * buffer. Note that this method will block until the reader advances to a sample belonging to
+ * the requested track if the reader is in sequential access mode. Upon successful return this
+ * method will also advance the specified track to the next sample.
* @param trackIndex The track index (zero-based).
* @param buffer The buffer to write the sample's data to.
* @param bufferSize The size of the supplied buffer.
@@ -80,7 +123,9 @@
size_t bufferSize) = 0;
/**
- * Advance the specified track to the next sample.
+ * Advance the specified track to the next sample. If the reader is in sequential access mode
+ * and the current sample belongs to the specified track, the reader will also advance to the
+ * next sample and wake up any threads waiting on the new track.
* @param trackIndex The track index (zero-based).
*/
virtual void advanceTrack(int trackIndex) = 0;
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
index 2dc9029..30cc37f 100644
--- a/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
@@ -20,6 +20,7 @@
#include <media/MediaSampleReader.h>
#include <media/NdkMediaExtractor.h>
+#include <map>
#include <memory>
#include <mutex>
#include <vector>
@@ -46,6 +47,10 @@
AMediaFormat* getFileFormat() override;
size_t getTrackCount() const override;
AMediaFormat* getTrackFormat(int trackIndex) override;
+ media_status_t selectTrack(int trackIndex) override;
+ media_status_t unselectTrack(int trackIndex) override;
+ media_status_t setEnforceSequentialAccess(bool enforce) override;
+ media_status_t getEstimatedBitrateForTrack(int trackIndex, int32_t* bitrate) override;
media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) override;
media_status_t readSampleDataForTrack(int trackIndex, uint8_t* buffer,
size_t bufferSize) override;
@@ -55,21 +60,6 @@
private:
/**
- * Creates a new MediaSampleReaderNDK object from an AMediaExtractor. The extractor needs to be
- * initialized with a valid data source before attempting to create a MediaSampleReaderNDK.
- * @param extractor The initialized media extractor.
- */
- MediaSampleReaderNDK(AMediaExtractor* extractor);
- media_status_t init();
-
- AMediaExtractor* mExtractor = nullptr;
- std::mutex mExtractorMutex;
- const size_t mTrackCount;
-
- int mExtractorTrackIndex = -1;
- uint64_t mExtractorSampleIndex = 0;
-
- /**
* SamplePosition describes the position of a single sample in the media file using its
* timestamp and index in the file.
*/
@@ -99,13 +89,52 @@
SamplePosition next;
};
- /** Samples cursor for each track in the file. */
- std::vector<SampleCursor> mTrackCursors;
+ /**
+ * Creates a new MediaSampleReaderNDK object from an AMediaExtractor. The extractor needs to be
+ * initialized with a valid data source before attempting to create a MediaSampleReaderNDK.
+ * @param extractor The initialized media extractor.
+ */
+ MediaSampleReaderNDK(AMediaExtractor* extractor);
+ /** Advances the track to next sample. */
+ void advanceTrack_l(int trackIndex);
+
+ /** Advances the extractor to next sample. */
bool advanceExtractor_l();
- media_status_t positionExtractorForTrack_l(int trackIndex);
+
+ /** Moves the extractor backwards to the specified sample. */
media_status_t seekExtractorBackwards_l(int64_t targetTimeUs, int targetTrackIndex,
uint64_t targetSampleIndex);
+
+ /** Moves the extractor to the specified sample. */
+ media_status_t moveToSample_l(SamplePosition& pos, int trackIndex);
+
+ /** Moves the extractor to the next sample of the specified track. */
+ media_status_t moveToTrack_l(int trackIndex);
+
+ /** In sequential mode, waits for the extractor to reach the next sample for the track. */
+ media_status_t waitForTrack_l(int trackIndex, std::unique_lock<std::mutex>& lockHeld);
+
+ /**
+ * Ensures the extractor is ready for the next sample of the track regardless of access mode.
+ */
+ media_status_t primeExtractorForTrack_l(int trackIndex, std::unique_lock<std::mutex>& lockHeld);
+
+ AMediaExtractor* mExtractor = nullptr;
+ std::mutex mExtractorMutex;
+ const size_t mTrackCount;
+
+ int mExtractorTrackIndex = -1;
+ uint64_t mExtractorSampleIndex = 0;
+
+ bool mEosReached = false;
+ bool mEnforceSequentialAccess = false;
+
+ // Maps selected track indices to condition variables for sequential sample access control.
+ std::map<int, std::condition_variable> mTrackSignals;
+
+ // Samples cursor for each track in the file.
+ std::vector<SampleCursor> mTrackCursors;
};
} // namespace android
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
new file mode 100644
index 0000000..080f2b7
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
@@ -0,0 +1,209 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_WRITER_H
+#define ANDROID_MEDIA_SAMPLE_WRITER_H
+
+#include <media/MediaSample.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <condition_variable>
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <queue>
+#include <thread>
+#include <unordered_map>
+
+namespace android {
+
+/**
+ * Muxer interface used by MediaSampleWriter.
+ * Methods in this interface are guaranteed to be called sequentially by MediaSampleWriter.
+ */
+class MediaSampleWriterMuxerInterface {
+public:
+ /**
+ * Adds a new track to the muxer.
+ * @param trackFormat Format of the new track.
+ * @return A non-negative track index on success, or a negative number on failure.
+ */
+ virtual ssize_t addTrack(AMediaFormat* trackFormat) = 0;
+
+ /** Starts the muxer. */
+ virtual media_status_t start() = 0;
+ /**
+ * Writes sample data to a previously added track.
+ * @param trackIndex Index of the track the sample data belongs to.
+ * @param data The sample data.
+ * @param info The sample information.
+ * @return The number of bytes written.
+ */
+ virtual media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+ const AMediaCodecBufferInfo* info) = 0;
+
+ /** Stops the muxer. */
+ virtual media_status_t stop() = 0;
+ virtual ~MediaSampleWriterMuxerInterface() = default;
+};
+
+/**
+ * MediaSampleWriter is a wrapper around a muxer. The sample writer puts samples on a queue that
+ * is serviced by an internal thread to minimize blocking time for clients. MediaSampleWriter also
+ * provides progress reporting. The default muxer interface implementation is based
+ * directly on AMediaMuxer.
+ */
+class MediaSampleWriter : public std::enable_shared_from_this<MediaSampleWriter> {
+public:
+ /** Function prototype for delivering media samples to the writer. */
+ using MediaSampleConsumerFunction =
+ std::function<void(const std::shared_ptr<MediaSample>& sample)>;
+
+ /** Callback interface. */
+ class CallbackInterface {
+ public:
+ /**
+ * Sample writer finished. The finished callback is only called after the sample writer has
+ * been successfully started.
+ */
+ virtual void onFinished(const MediaSampleWriter* writer, media_status_t status) = 0;
+
+ /** Sample writer was stopped before it was finished. */
+ virtual void onStopped(const MediaSampleWriter* writer) = 0;
+
+ /** Sample writer progress update in percent. */
+ virtual void onProgressUpdate(const MediaSampleWriter* writer, int32_t progress) = 0;
+
+ virtual ~CallbackInterface() = default;
+ };
+
+ static std::shared_ptr<MediaSampleWriter> Create();
+
+ /**
+ * Initializes the sample writer with its default muxer implementation. MediaSampleWriter needs
+ * to be initialized before tracks are added and can only be initialized once.
+ * @param fd An open file descriptor to write to. The caller is responsible for closing this
+ * file descriptor and it is safe to do so once this method returns.
+ * @param callbacks Client callback object that gets called by the sample writer.
+ * @return True if the writer was successfully initialized.
+ */
+ bool init(int fd, const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */);
+
+ /**
+ * Initializes the sample writer with a custom muxer interface implementation.
+ * @param muxer The custom muxer interface implementation.
+ * @param @param callbacks Client callback object that gets called by the sample writer.
+ * @return True if the writer was successfully initialized.
+ */
+ bool init(const std::shared_ptr<MediaSampleWriterMuxerInterface>& muxer /* nonnull */,
+ const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */);
+
+ /**
+ * Adds a new track to the sample writer. Tracks must be added after the sample writer has been
+ * initialized and before it is started.
+ * @param trackFormat The format of the track to add.
+ * @return A sample consumer to add samples to if the track was successfully added, or nullptr
+ * if the track could not be added.
+ */
+ MediaSampleConsumerFunction addTrack(
+ const std::shared_ptr<AMediaFormat>& trackFormat /* nonnull */);
+
+ /**
+ * Starts the sample writer. The sample writer will start processing samples and writing them to
+ * its muxer on an internal thread. MediaSampleWriter can only be started once.
+ * @return True if the sample writer was successfully started.
+ */
+ bool start();
+
+ /**
+ * Stops the sample writer. If the sample writer is not yet finished, its operation will be
+ * aborted and the onStopped callback will fire. If the sample writer has already finished and
+ * the onFinished callback has fired the writer has already automatically stopped and there is
+ * no need to call stop manually. Once the sample writer has been stopped it cannot be
+ * restarted. This method is asynchronous and will not wait for the sample writer to stop before
+ * returning.
+ */
+ void stop();
+
+ /** Destructor. */
+ ~MediaSampleWriter();
+
+private:
+ struct TrackRecord {
+ TrackRecord(int64_t durationUs)
+ : mDurationUs(durationUs),
+ mFirstSampleTimeUs(0),
+ mPrevSampleTimeUs(INT64_MIN),
+ mFirstSampleTimeSet(false),
+ mReachedEos(false){};
+
+ TrackRecord() : TrackRecord(0){};
+
+ int64_t mDurationUs;
+ int64_t mFirstSampleTimeUs;
+ int64_t mPrevSampleTimeUs;
+ bool mFirstSampleTimeSet;
+ bool mReachedEos;
+ };
+
+ // Track index and sample.
+ using SampleEntry = std::pair<size_t, std::shared_ptr<MediaSample>>;
+
+ struct SampleComparator {
+ // Return true if lhs should come after rhs in the sample queue.
+ bool operator()(const SampleEntry& lhs, const SampleEntry& rhs) {
+ const bool lhsEos = lhs.second->info.flags & SAMPLE_FLAG_END_OF_STREAM;
+ const bool rhsEos = rhs.second->info.flags & SAMPLE_FLAG_END_OF_STREAM;
+
+ if (lhsEos && !rhsEos) {
+ return true;
+ } else if (!lhsEos && rhsEos) {
+ return false;
+ } else if (lhsEos && rhsEos) {
+ return lhs.first > rhs.first;
+ }
+
+ return lhs.second->info.presentationTimeUs > rhs.second->info.presentationTimeUs;
+ }
+ };
+
+ std::weak_ptr<CallbackInterface> mCallbacks;
+ std::shared_ptr<MediaSampleWriterMuxerInterface> mMuxer;
+
+ std::mutex mMutex; // Protects sample queue and state.
+ std::condition_variable mSampleSignal;
+ std::unordered_map<size_t, TrackRecord> mTracks;
+ std::priority_queue<SampleEntry, std::vector<SampleEntry>, SampleComparator> mSampleQueue
+ GUARDED_BY(mMutex);
+
+ enum : int {
+ UNINITIALIZED,
+ INITIALIZED,
+ STARTED,
+ STOPPED,
+ } mState GUARDED_BY(mMutex);
+
+ MediaSampleWriter() : mState(UNINITIALIZED){};
+ void addSampleToTrack(size_t trackIndex, const std::shared_ptr<MediaSample>& sample);
+ media_status_t writeSamples(bool* wasStopped);
+ media_status_t runWriterLoop(bool* wasStopped);
+};
+
+} // namespace android
+#endif // ANDROID_MEDIA_SAMPLE_WRITER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
index 235766c..724b919 100644
--- a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
@@ -19,6 +19,7 @@
#include <media/MediaSampleQueue.h>
#include <media/MediaSampleReader.h>
+#include <media/MediaSampleWriter.h>
#include <media/NdkMediaError.h>
#include <media/NdkMediaFormat.h>
#include <utils/Mutex.h>
@@ -30,28 +31,7 @@
namespace android {
-class MediaTrackTranscoder;
-
-/** Callback interface for MediaTrackTranscoder. */
-class MediaTrackTranscoderCallback {
-public:
- /**
- * Called when the MediaTrackTranscoder instance have finished transcoding all media samples
- * successfully.
- * @param transcoder The MediaTrackTranscoder that finished the transcoding.
- */
- virtual void onTrackFinished(MediaTrackTranscoder* transcoder);
-
- /**
- * Called when the MediaTrackTranscoder instance encountered an error it could not recover from.
- * @param transcoder The MediaTrackTranscoder that encountered the error.
- * @param status The non-zero error code describing the encountered error.
- */
- virtual void onTrackError(MediaTrackTranscoder* transcoder, media_status_t status);
-
-protected:
- virtual ~MediaTrackTranscoderCallback() = default;
-};
+class MediaTrackTranscoderCallback;
/**
* Base class for all track transcoders. MediaTrackTranscoder operates asynchronously on an internal
@@ -82,29 +62,51 @@
const std::shared_ptr<AMediaFormat>& destinationFormat);
/**
- * Starts the track transcoder. Once started the track transcoder have to be stopped by calling
- * {@link #stop}, even after completing successfully. Start should only be called once.
+ * Starts the track transcoder. After the track transcoder is successfully started it will run
+ * until a callback signals that transcoding has ended. Start should only be called once.
* @return True if the track transcoder started, or false if it had already been started.
*/
bool start();
/**
* Stops the track transcoder. Once the transcoding has been stopped it cannot be restarted
- * again. It is safe to call stop multiple times.
- * @return True if the track transcoder stopped, or false if it was already stopped.
+ * again. It is safe to call stop multiple times. Stop is an asynchronous operation. Once the
+ * track transcoder has stopped the onTrackStopped callback will get called, unless the
+ * transcoding finished or encountered an error before it could be stopped in which case the
+ * callbacks corresponding to those events will be called instead.
+ * @param stopOnSyncSample Request the transcoder to stop after emitting a sync sample.
*/
- bool stop();
+ void stop(bool stopOnSyncSample = false);
/**
- * Sample output queue.
- * TODO(b/155918341) Move to protected.
+ * Set the sample consumer function. The MediaTrackTranscoder will deliver transcoded samples to
+ * this function. If the MediaTrackTranscoder is started before a consumer is set the transcoder
+ * will buffer a limited number of samples internally before stalling. Once a consumer has been
+ * set the internally buffered samples will be delivered to the consumer.
+ * @param sampleConsumer The sample consumer function.
*/
- MediaSampleQueue mOutputQueue = {};
+ void setSampleConsumer(const MediaSampleWriter::MediaSampleConsumerFunction& sampleConsumer);
+
+ /**
+ * Retrieves the track transcoder's final output format. The output is available after the
+ * track transcoder has been successfully configured.
+ * @return The track output format.
+ */
+ virtual std::shared_ptr<AMediaFormat> getOutputFormat() const = 0;
+
+ virtual ~MediaTrackTranscoder() = default;
protected:
MediaTrackTranscoder(const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback)
: mTranscoderCallback(transcoderCallback){};
- virtual ~MediaTrackTranscoder() = default;
+
+ // Called by subclasses when the actual track format becomes available.
+ void notifyTrackFormatAvailable();
+
+ // Called by subclasses when a transcoded sample is available. Samples must not hold a strong
+ // reference to the track transcoder in order to avoid retain cycles through the track
+ // transcoder's sample queue.
+ void onOutputSampleAvailable(const std::shared_ptr<MediaSample>& sample);
// configureDestinationFormat needs to be implemented by subclasses, and gets called on an
// external thread before start.
@@ -113,7 +115,7 @@
// runTranscodeLoop needs to be implemented by subclasses, and gets called on
// MediaTrackTranscoder's internal thread when the track transcoder is started.
- virtual media_status_t runTranscodeLoop() = 0;
+ virtual media_status_t runTranscodeLoop(bool* stopped) = 0;
// abortTranscodeLoop needs to be implemented by subclasses, and should request transcoding to
// be aborted as soon as possible. It should be safe to call abortTranscodeLoop multiple times.
@@ -123,10 +125,20 @@
int mTrackIndex;
std::shared_ptr<AMediaFormat> mSourceFormat;
+ enum StopRequest {
+ NONE,
+ STOP_NOW,
+ STOP_ON_SYNC,
+ };
+ std::atomic<StopRequest> mStopRequest = NONE;
+
private:
+ std::mutex mSampleMutex;
+ // SampleQueue for buffering output samples before a sample consumer has been set.
+ MediaSampleQueue mSampleQueue GUARDED_BY(mSampleMutex);
+ MediaSampleWriter::MediaSampleConsumerFunction mSampleConsumer GUARDED_BY(mSampleMutex);
const std::weak_ptr<MediaTrackTranscoderCallback> mTranscoderCallback;
std::mutex mStateMutex;
- std::thread mTranscodingThread GUARDED_BY(mStateMutex);
enum {
UNINITIALIZED,
CONFIGURED,
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
new file mode 100644
index 0000000..7b62d46
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
+#define ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
+
+#include <media/NdkMediaError.h>
+
+namespace android {
+
+class MediaTrackTranscoder;
+
+/** Callback interface for MediaTrackTranscoder. */
+class MediaTrackTranscoderCallback {
+public:
+ /**
+ * Called when the MediaTrackTranscoder's actual track format becomes available.
+ * @param transcoder The MediaTrackTranscoder whose track format becomes available.
+ */
+ virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder);
+ /**
+ * Called when the MediaTrackTranscoder instance have finished transcoding all media samples
+ * successfully.
+ * @param transcoder The MediaTrackTranscoder that finished the transcoding.
+ */
+ virtual void onTrackFinished(const MediaTrackTranscoder* transcoder);
+
+ /**
+ * Called when the MediaTrackTranscoder instance was explicitly stopped before it was finished.
+ * @param transcoder The MediaTrackTranscoder that was stopped.
+ */
+ virtual void onTrackStopped(const MediaTrackTranscoder* transcoder);
+
+ /**
+ * Called when the MediaTrackTranscoder instance encountered an error it could not recover from.
+ * @param transcoder The MediaTrackTranscoder that encountered the error.
+ * @param status The non-zero error code describing the encountered error.
+ */
+ virtual void onTrackError(const MediaTrackTranscoder* transcoder, media_status_t status);
+
+protected:
+ virtual ~MediaTrackTranscoderCallback() = default;
+};
+
+} // namespace android
+#endif // ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
new file mode 100644
index 0000000..4e11ef5
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODER_H
+#define ANDROID_MEDIA_TRANSCODER_H
+
+#include <android/binder_auto_utils.h>
+#include <media/MediaSampleWriter.h>
+#include <media/MediaTrackTranscoderCallback.h>
+#include <media/NdkMediaCodecPlatform.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <atomic>
+#include <memory>
+#include <mutex>
+#include <unordered_set>
+
+namespace android {
+
+class MediaSampleReader;
+
+class MediaTranscoder : public std::enable_shared_from_this<MediaTranscoder>,
+ public MediaTrackTranscoderCallback,
+ public MediaSampleWriter::CallbackInterface {
+public:
+ /** Callbacks from transcoder to client. */
+ class CallbackInterface {
+ public:
+ /** Transcoder finished successfully. */
+ virtual void onFinished(const MediaTranscoder* transcoder) = 0;
+
+ /** Transcoder encountered an unrecoverable error. */
+ virtual void onError(const MediaTranscoder* transcoder, media_status_t error) = 0;
+
+ /** Transcoder progress update reported in percent from 0 to 100. */
+ virtual void onProgressUpdate(const MediaTranscoder* transcoder, int32_t progress) = 0;
+
+ /**
+ * Transcoder lost codec resources and paused operations. The client can resume transcoding
+ * again when resources are available by either:
+ * 1) Calling resume on the same MediaTranscoder instance.
+ * 2) Creating a new MediaTranscoding instance with the paused state and then calling
+ * resume.
+ */
+ virtual void onCodecResourceLost(
+ const MediaTranscoder* transcoder,
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState) = 0;
+
+ virtual ~CallbackInterface() = default;
+ };
+
+ /**
+ * Creates a new MediaTranscoder instance. If the supplied paused state is valid, the transcoder
+ * will be initialized with the paused state and be ready to be resumed right away. It is not
+ * possible to change any configurations on a paused transcoder.
+ */
+ static std::shared_ptr<MediaTranscoder> create(
+ const std::shared_ptr<CallbackInterface>& callbacks,
+ pid_t pid = AMEDIACODEC_CALLING_PID, uid_t uid = AMEDIACODEC_CALLING_UID,
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
+
+ /** Configures source from path fd. */
+ media_status_t configureSource(int fd);
+
+ /** Gets the media formats of all tracks in the file. */
+ std::vector<std::shared_ptr<AMediaFormat>> getTrackFormats() const;
+
+ /**
+ * Configures transcoding of a track. Tracks that are not configured will not present in the
+ * final transcoded file, i.e. tracks will be dropped by default. Passing nullptr for
+ * trackFormat means the track will be copied unchanged ("passthrough") to the destination.
+ * Track configurations must be done after the source has been configured.
+ * Note: trackFormat is not modified but cannot be const.
+ */
+ media_status_t configureTrackFormat(size_t trackIndex, AMediaFormat* trackFormat);
+
+ /** Configures destination from fd. */
+ media_status_t configureDestination(int fd);
+
+ /** Starts transcoding. No configurations can be made once the transcoder has started. */
+ media_status_t start();
+
+ /**
+ * Pauses transcoding and finalizes the partial transcoded file to disk. Pause is a synchronous
+ * operation and will wait until all internal components are done. Once this method returns it
+ * is safe to release the transcoder instance. No callback will be called if the transcoder was
+ * paused successfully. But if the transcoding finishes or encountered an error during pause,
+ * the corresponding callback will be called.
+ */
+ media_status_t pause(std::shared_ptr<ndk::ScopedAParcel>* pausedState);
+
+ /** Resumes a paused transcoding. */
+ media_status_t resume();
+
+ /**
+ * Cancels the transcoding. Once canceled the transcoding can not be restarted. Client
+ * will be responsible for cleaning up the abandoned file. Cancel is a synchronous operation and
+ * will wait until all internal components are done. Once this method returns it is safe to
+ * release the transcoder instance. Normally no callback will be called when the transcoder is
+ * cancelled. But if the transcoding finishes or encountered an error during cancel, the
+ * corresponding callback will be called.
+ */
+ media_status_t cancel();
+
+ virtual ~MediaTranscoder() = default;
+
+private:
+ MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks, pid_t pid, uid_t uid);
+
+ // MediaTrackTranscoderCallback
+ virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) override;
+ virtual void onTrackFinished(const MediaTrackTranscoder* transcoder) override;
+ virtual void onTrackStopped(const MediaTrackTranscoder* transcoder) override;
+ virtual void onTrackError(const MediaTrackTranscoder* transcoder,
+ media_status_t status) override;
+ // ~MediaTrackTranscoderCallback
+
+ // MediaSampleWriter::CallbackInterface
+ virtual void onFinished(const MediaSampleWriter* writer, media_status_t status) override;
+ virtual void onStopped(const MediaSampleWriter* writer) override;
+ virtual void onProgressUpdate(const MediaSampleWriter* writer, int32_t progress) override;
+ // ~MediaSampleWriter::CallbackInterface
+
+ void onThreadFinished(const void* thread, media_status_t threadStatus, bool threadStopped);
+ media_status_t requestStop(bool stopOnSync);
+ void waitForThreads();
+
+ std::shared_ptr<CallbackInterface> mCallbacks;
+ std::shared_ptr<MediaSampleReader> mSampleReader;
+ std::shared_ptr<MediaSampleWriter> mSampleWriter;
+ std::vector<std::shared_ptr<AMediaFormat>> mSourceTrackFormats;
+ std::vector<std::shared_ptr<MediaTrackTranscoder>> mTrackTranscoders;
+ std::mutex mTracksAddedMutex;
+ std::unordered_set<const MediaTrackTranscoder*> mTracksAdded GUARDED_BY(mTracksAddedMutex);
+ pid_t mPid;
+ uid_t mUid;
+
+ enum ThreadState {
+ PENDING = 0, // Not yet started.
+ RUNNING, // Currently running.
+ DONE, // Done running (can be finished, stopped or error).
+ };
+ std::mutex mThreadStateMutex;
+ std::condition_variable mThreadsDoneSignal;
+ std::unordered_map<const void*, ThreadState> mThreadStates GUARDED_BY(mThreadStateMutex);
+ media_status_t mTranscoderStatus GUARDED_BY(mThreadStateMutex) = AMEDIA_OK;
+ bool mTranscoderStopped GUARDED_BY(mThreadStateMutex) = false;
+ bool mThreadsDone GUARDED_BY(mThreadStateMutex) = false;
+ bool mCallbackSent GUARDED_BY(mThreadStateMutex) = false;
+ bool mSampleWriterStopped GUARDED_BY(mThreadStateMutex) = false;
+
+ std::atomic_bool mCancelled = false;
+};
+
+} // namespace android
+#endif // ANDROID_MEDIA_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/NdkCommon.h b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
new file mode 100644
index 0000000..1a72be3
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
+#define ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
+
+#include <media/NdkMediaFormat.h>
+
+extern const char* AMEDIA_MIMETYPE_VIDEO_VP8;
+extern const char* AMEDIA_MIMETYPE_VIDEO_VP9;
+extern const char* AMEDIA_MIMETYPE_VIDEO_AV1;
+extern const char* AMEDIA_MIMETYPE_VIDEO_AVC;
+extern const char* AMEDIA_MIMETYPE_VIDEO_HEVC;
+extern const char* AMEDIA_MIMETYPE_VIDEO_MPEG4;
+extern const char* AMEDIA_MIMETYPE_VIDEO_H263;
+
+// TODO(b/146420990)
+// TODO: make MediaTranscoder use the consts from this header.
+typedef enum {
+ OUTPUT_FORMAT_START = 0,
+ OUTPUT_FORMAT_MPEG_4 = OUTPUT_FORMAT_START,
+ OUTPUT_FORMAT_WEBM = OUTPUT_FORMAT_START + 1,
+ OUTPUT_FORMAT_THREE_GPP = OUTPUT_FORMAT_START + 2,
+ OUTPUT_FORMAT_HEIF = OUTPUT_FORMAT_START + 3,
+ OUTPUT_FORMAT_OGG = OUTPUT_FORMAT_START + 4,
+ OUTPUT_FORMAT_LIST_END = OUTPUT_FORMAT_START + 4,
+} MuxerFormat;
+
+// Color formats supported by encoder - should mirror supportedColorList
+// from MediaCodecConstants.h (are these going to be deprecated)
+static constexpr int COLOR_FormatYUV420SemiPlanar = 21;
+static constexpr int COLOR_FormatYUV420Flexible = 0x7F420888;
+static constexpr int COLOR_FormatSurface = 0x7f000789;
+
+// constants not defined in NDK
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_ALLOW_FRAME_DROP;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_REQUEST_SYNC_FRAME;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_VIDEO_BITRATE;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_MAX_B_FRAMES;
+static constexpr int TBD_AMEDIACODEC_BUFFER_FLAG_KEY_FRAME = 0x1;
+
+static constexpr int kBitrateModeConstant = 2;
+
+namespace AMediaFormatUtils {
+
+typedef struct {
+ const char* key;
+ bool (*copy)(const char* key, AMediaFormat* from, AMediaFormat* to);
+ bool (*copy2)(const char* key, AMediaFormat* from, AMediaFormat* to);
+} EntryCopier;
+
+#define ENTRY_COPIER(keyName, typeName) \
+ { keyName, AMediaFormatUtils::CopyFormatEntry##typeName, nullptr }
+#define ENTRY_COPIER2(keyName, typeName, typeName2) \
+ { \
+ keyName, AMediaFormatUtils::CopyFormatEntry##typeName, \
+ AMediaFormatUtils::CopyFormatEntry##typeName2 \
+ }
+
+bool CopyFormatEntryString(const char* key, AMediaFormat* from, AMediaFormat* to);
+bool CopyFormatEntryInt64(const char* key, AMediaFormat* from, AMediaFormat* to);
+bool CopyFormatEntryInt32(const char* key, AMediaFormat* from, AMediaFormat* to);
+bool CopyFormatEntryFloat(const char* key, AMediaFormat* from, AMediaFormat* to);
+
+void CopyFormatEntries(AMediaFormat* from, AMediaFormat* to, const EntryCopier* entries,
+ size_t entryCount);
+
+bool SetDefaultFormatValueFloat(const char* key, AMediaFormat* format, float value);
+bool SetDefaultFormatValueInt32(const char* key, AMediaFormat* format, int32_t value);
+
+} // namespace AMediaFormatUtils
+#endif // ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
diff --git a/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
index 42feb85..c074831 100644
--- a/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
@@ -86,15 +86,14 @@
};
// MediaTrackTranscoder
- media_status_t runTranscodeLoop() override;
+ media_status_t runTranscodeLoop(bool* stopped) override;
void abortTranscodeLoop() override;
media_status_t configureDestinationFormat(
const std::shared_ptr<AMediaFormat>& destinationFormat) override;
+ std::shared_ptr<AMediaFormat> getOutputFormat() const override;
// ~MediaTrackTranscoder
std::shared_ptr<BufferPool> mBufferPool;
- bool mEosFromSource = false;
- std::atomic_bool mStopRequested = false;
};
} // namespace android
diff --git a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
index 7d93d60..d2ffb01 100644
--- a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
@@ -19,7 +19,7 @@
#include <android/native_window.h>
#include <media/MediaTrackTranscoder.h>
-#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaCodecPlatform.h>
#include <media/NdkMediaFormat.h>
#include <condition_variable>
@@ -34,10 +34,13 @@
* using a native surface (ANativeWindow). Codec callback events are placed on a message queue and
* serviced in order on the transcoding thread managed by MediaTrackTranscoder.
*/
-class VideoTrackTranscoder : public MediaTrackTranscoder {
+class VideoTrackTranscoder : public std::enable_shared_from_this<VideoTrackTranscoder>,
+ public MediaTrackTranscoder {
public:
- VideoTrackTranscoder(const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback)
- : MediaTrackTranscoder(transcoderCallback){};
+ static std::shared_ptr<VideoTrackTranscoder> create(
+ const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback,
+ pid_t pid = AMEDIACODEC_CALLING_PID, uid_t uid = AMEDIACODEC_CALLING_UID);
+
virtual ~VideoTrackTranscoder() override;
private:
@@ -49,18 +52,26 @@
public:
void push(T const& value, bool front = false);
T pop();
+ void abort();
private:
std::mutex mMutex;
std::condition_variable mCondition;
std::deque<T> mQueue;
+ bool mAborted = false;
};
+ class CodecWrapper;
+
+ VideoTrackTranscoder(const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback,
+ pid_t pid, uid_t uid)
+ : MediaTrackTranscoder(transcoderCallback), mPid(pid), mUid(uid){};
// MediaTrackTranscoder
- media_status_t runTranscodeLoop() override;
+ media_status_t runTranscodeLoop(bool* stopped) override;
void abortTranscodeLoop() override;
media_status_t configureDestinationFormat(
const std::shared_ptr<AMediaFormat>& destinationFormat) override;
+ std::shared_ptr<AMediaFormat> getOutputFormat() const override;
// ~MediaTrackTranscoder
// Enqueues an input sample with the decoder.
@@ -72,17 +83,22 @@
// Dequeues an encoded buffer from the encoder and adds it to the output queue.
void dequeueOutputSample(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo);
+ // Updates the video track's actual format based on encoder output format.
+ void updateTrackFormat(AMediaFormat* outputFormat);
+
AMediaCodec* mDecoder = nullptr;
- // Sample release callback holds a reference to the encoder, hence the shared_ptr.
- std::shared_ptr<AMediaCodec> mEncoder;
+ std::shared_ptr<CodecWrapper> mEncoder;
ANativeWindow* mSurface = nullptr;
bool mEosFromSource = false;
bool mEosFromEncoder = false;
- bool mStopRequested = false;
+ bool mLastSampleWasSync = false;
media_status_t mStatus = AMEDIA_OK;
MediaSampleInfo mSampleInfo;
BlockingQueue<std::function<void()>> mCodecMessageQueue;
std::shared_ptr<AMediaFormat> mDestinationFormat;
+ std::shared_ptr<AMediaFormat> mActualOutputFormat;
+ pid_t mPid;
+ uid_t mUid;
};
} // namespace android
diff --git a/media/libmediatranscoding/transcoder/setloglevel.sh b/media/libmediatranscoding/transcoder/setloglevel.sh
new file mode 100755
index 0000000..5eb7b67
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/setloglevel.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+if [ $# -ne 1 ]
+then
+ echo Usage: $0 loglevel
+ exit 1
+fi
+
+level=$1
+echo Setting transcoder log level to $level
+
+# List all log tags
+declare -a tags=(
+ MediaTranscoder MediaTrackTranscoder VideoTrackTranscoder PassthroughTrackTranscoder
+ MediaSampleWriter MediaSampleReader MediaSampleQueue MediaTranscoderTests
+ MediaTrackTranscoderTests VideoTrackTranscoderTests PassthroughTrackTranscoderTests
+ MediaSampleWriterTests MediaSampleReaderNDKTests MediaSampleQueueTests)
+
+# Set log level for all tags
+for tag in "${tags[@]}"
+do
+ adb shell setprop log.tag.${tag} $level
+done
+
+# Pick up new settings
+adb shell stop && adb shell start
diff --git a/media/libmediatranscoding/transcoder/tests/Android.bp b/media/libmediatranscoding/transcoder/tests/Android.bp
index 52a7a71..d0ea802 100644
--- a/media/libmediatranscoding/transcoder/tests/Android.bp
+++ b/media/libmediatranscoding/transcoder/tests/Android.bp
@@ -1,10 +1,4 @@
// Unit tests for libmediatranscoder.
-
-filegroup {
- name: "test_assets",
- srcs: ["assets/*"],
-}
-
cc_defaults {
name: "testdefaults",
@@ -13,11 +7,16 @@
"libmedia_headers",
],
+ static_libs: [
+ "libmediatranscoder",
+ ],
shared_libs: [
"libbase",
+ "libbinder_ndk",
+ "libcrypto",
"libcutils",
"libmediandk",
- "libmediatranscoder",
+ "libnativewindow",
"libutils",
],
@@ -26,6 +25,14 @@
"-Wall",
],
+ sanitize: {
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+
data: [":test_assets"],
test_config_template: "AndroidTestTemplate.xml",
test_suites: ["device-tests", "TranscoderTests"],
@@ -50,7 +57,6 @@
name: "MediaTrackTranscoderTests",
defaults: ["testdefaults"],
srcs: ["MediaTrackTranscoderTests.cpp"],
- shared_libs: ["libbinder_ndk"],
}
// VideoTrackTranscoder unit test
@@ -65,5 +71,18 @@
name: "PassthroughTrackTranscoderTests",
defaults: ["testdefaults"],
srcs: ["PassthroughTrackTranscoderTests.cpp"],
- shared_libs: ["libcrypto"],
+}
+
+// MediaSampleWriter unit test
+cc_test {
+ name: "MediaSampleWriterTests",
+ defaults: ["testdefaults"],
+ srcs: ["MediaSampleWriterTests.cpp"],
+}
+
+// MediaTranscoder unit test
+cc_test {
+ name: "MediaTranscoderTests",
+ defaults: ["testdefaults"],
+ srcs: ["MediaTranscoderTests.cpp"],
}
diff --git a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
index 23d1bab..6d781cd 100644
--- a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
+++ b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
@@ -17,12 +17,12 @@
<option name="test-suite-tag" value="TranscoderTests" />
<target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
<option name="cleanup" value="false" />
- <option name="push-file"
- key="assets"
- value="/data/local/tmp/TranscoderTestAssets" />
+ <option name="push-file" key="TranscodingTestAssets" value="/data/local/tmp/TranscodingTestAssets" />
+ <option name="push-file" key="{MODULE}" value="/data/local/tmp/{MODULE}" />
</target_preparer>
<test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
<option name="module-name" value="{MODULE}" />
</test>
</configuration>
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
index 2046ca0..6357e4d 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
@@ -46,10 +46,12 @@
static constexpr int kNumSamples = 4;
MediaSampleQueue sampleQueue;
+ EXPECT_TRUE(sampleQueue.isEmpty());
// Enqueue loop.
for (int i = 0; i < kNumSamples; ++i) {
sampleQueue.enqueue(newSample(i));
+ EXPECT_FALSE(sampleQueue.isEmpty());
}
// Dequeue loop.
@@ -60,6 +62,7 @@
EXPECT_EQ(sample->bufferId, i);
EXPECT_FALSE(aborted);
}
+ EXPECT_TRUE(sampleQueue.isEmpty());
}
TEST_F(MediaSampleQueueTests, TestInterleavedDequeueOrder) {
@@ -71,12 +74,14 @@
// Enqueue and dequeue.
for (int i = 0; i < kNumSamples; ++i) {
sampleQueue.enqueue(newSample(i));
+ EXPECT_FALSE(sampleQueue.isEmpty());
std::shared_ptr<MediaSample> sample;
bool aborted = sampleQueue.dequeue(&sample);
EXPECT_NE(sample, nullptr);
EXPECT_EQ(sample->bufferId, i);
EXPECT_FALSE(aborted);
+ EXPECT_TRUE(sampleQueue.isEmpty());
}
}
@@ -98,6 +103,7 @@
EXPECT_NE(sample, nullptr);
EXPECT_EQ(sample->bufferId, 1);
EXPECT_FALSE(aborted);
+ EXPECT_TRUE(sampleQueue.isEmpty());
enqueueThread.join();
}
@@ -160,7 +166,9 @@
EXPECT_FALSE(bufferReleased[i]);
}
+ EXPECT_FALSE(sampleQueue.isEmpty());
sampleQueue.abort();
+ EXPECT_TRUE(sampleQueue.isEmpty());
for (int i = 0; i < kNumSamples; ++i) {
EXPECT_TRUE(bufferReleased[i]);
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
index 858fcb3..11af0b1 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
@@ -25,25 +25,156 @@
#include <fcntl.h>
#include <gtest/gtest.h>
#include <media/MediaSampleReaderNDK.h>
+#include <openssl/md5.h>
#include <utils/Timers.h>
-// TODO(b/153453392): Test more asset types and validate sample data from readSampleDataForTrack.
+#include <cmath>
+#include <mutex>
+#include <thread>
+
+// TODO(b/153453392): Test more asset types (frame reordering?).
namespace android {
#define SEC_TO_USEC(s) ((s)*1000 * 1000)
+/** Helper class for comparing sample data using checksums. */
+class Sample {
+public:
+ Sample(uint32_t flags, int64_t timestamp, size_t size, const uint8_t* buffer)
+ : mFlags{flags}, mTimestamp{timestamp}, mSize{size} {
+ initChecksum(buffer);
+ }
+
+ Sample(AMediaExtractor* extractor) {
+ mFlags = AMediaExtractor_getSampleFlags(extractor);
+ mTimestamp = AMediaExtractor_getSampleTime(extractor);
+ mSize = static_cast<size_t>(AMediaExtractor_getSampleSize(extractor));
+
+ auto buffer = std::make_unique<uint8_t[]>(mSize);
+ AMediaExtractor_readSampleData(extractor, buffer.get(), mSize);
+
+ initChecksum(buffer.get());
+ }
+
+ void initChecksum(const uint8_t* buffer) {
+ MD5_CTX md5Ctx;
+ MD5_Init(&md5Ctx);
+ MD5_Update(&md5Ctx, buffer, mSize);
+ MD5_Final(mChecksum, &md5Ctx);
+ }
+
+ bool operator==(const Sample& rhs) const {
+ return mSize == rhs.mSize && mFlags == rhs.mFlags && mTimestamp == rhs.mTimestamp &&
+ memcmp(mChecksum, rhs.mChecksum, MD5_DIGEST_LENGTH) == 0;
+ }
+
+ uint32_t mFlags;
+ int64_t mTimestamp;
+ size_t mSize;
+ uint8_t mChecksum[MD5_DIGEST_LENGTH];
+};
+
+/** Constant for selecting all samples. */
+static constexpr int SAMPLE_COUNT_ALL = -1;
+
+/**
+ * Utility class to test different sample access patterns combined with sequential or parallel
+ * sample access modes.
+ */
+class SampleAccessTester {
+public:
+ SampleAccessTester(int sourceFd, size_t fileSize) {
+ mSampleReader = MediaSampleReaderNDK::createFromFd(sourceFd, 0, fileSize);
+ EXPECT_TRUE(mSampleReader);
+
+ mTrackCount = mSampleReader->getTrackCount();
+
+ for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+ EXPECT_EQ(mSampleReader->selectTrack(trackIndex), AMEDIA_OK);
+ }
+
+ mSamples.resize(mTrackCount);
+ mTrackThreads.resize(mTrackCount);
+ }
+
+ void getSampleInfo(int trackIndex) {
+ MediaSampleInfo info;
+ media_status_t status = mSampleReader->getSampleInfoForTrack(trackIndex, &info);
+ EXPECT_EQ(status, AMEDIA_OK);
+ }
+
+ void readSamplesAsync(int trackIndex, int sampleCount) {
+ mTrackThreads[trackIndex] = std::thread{[this, trackIndex, sampleCount] {
+ int samplesRead = 0;
+ MediaSampleInfo info;
+ while (samplesRead < sampleCount || sampleCount == SAMPLE_COUNT_ALL) {
+ media_status_t status = mSampleReader->getSampleInfoForTrack(trackIndex, &info);
+ if (status != AMEDIA_OK) {
+ EXPECT_EQ(status, AMEDIA_ERROR_END_OF_STREAM);
+ EXPECT_TRUE((info.flags & SAMPLE_FLAG_END_OF_STREAM) != 0);
+ break;
+ }
+ ASSERT_TRUE((info.flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+
+ auto buffer = std::make_unique<uint8_t[]>(info.size);
+ status = mSampleReader->readSampleDataForTrack(trackIndex, buffer.get(), info.size);
+ EXPECT_EQ(status, AMEDIA_OK);
+
+ mSampleMutex.lock();
+ const uint8_t* bufferPtr = buffer.get();
+ mSamples[trackIndex].emplace_back(info.flags, info.presentationTimeUs, info.size,
+ bufferPtr);
+ mSampleMutex.unlock();
+ ++samplesRead;
+ }
+ }};
+ }
+
+ void readSamplesAsync(int sampleCount) {
+ for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+ readSamplesAsync(trackIndex, sampleCount);
+ }
+ }
+
+ void waitForTrack(int trackIndex) {
+ ASSERT_TRUE(mTrackThreads[trackIndex].joinable());
+ mTrackThreads[trackIndex].join();
+ }
+
+ void waitForTracks() {
+ for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+ waitForTrack(trackIndex);
+ }
+ }
+
+ void setEnforceSequentialAccess(bool enforce) {
+ media_status_t status = mSampleReader->setEnforceSequentialAccess(enforce);
+ EXPECT_EQ(status, AMEDIA_OK);
+ }
+
+ std::vector<std::vector<Sample>>& getSamples() { return mSamples; }
+
+ std::shared_ptr<MediaSampleReader> mSampleReader;
+ size_t mTrackCount;
+ std::mutex mSampleMutex;
+ std::vector<std::thread> mTrackThreads;
+ std::vector<std::vector<Sample>> mSamples;
+};
+
class MediaSampleReaderNDKTests : public ::testing::Test {
public:
MediaSampleReaderNDKTests() { LOG(DEBUG) << "MediaSampleReaderNDKTests created"; }
void SetUp() override {
LOG(DEBUG) << "MediaSampleReaderNDKTests set up";
- const char* sourcePath =
- "/data/local/tmp/TranscoderTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
- mExtractor = AMediaExtractor_new();
- ASSERT_NE(mExtractor, nullptr);
+ // Need to start a thread pool to prevent AMediaExtractor binder calls from starving
+ // (b/155663561).
+ ABinderProcess_startThreadPool();
+
+ const char* sourcePath =
+ "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
mSourceFd = open(sourcePath, O_RDONLY);
ASSERT_GT(mSourceFd, 0);
@@ -51,6 +182,9 @@
mFileSize = lseek(mSourceFd, 0, SEEK_END);
lseek(mSourceFd, 0, SEEK_SET);
+ mExtractor = AMediaExtractor_new();
+ ASSERT_NE(mExtractor, nullptr);
+
media_status_t status =
AMediaExtractor_setDataSourceFd(mExtractor, mSourceFd, 0, mFileSize);
ASSERT_EQ(status, AMEDIA_OK);
@@ -61,15 +195,56 @@
}
}
- void initExtractorTimestamps() {
- // Save all sample timestamps, per track, as reported by the extractor.
- mExtractorTimestamps.resize(mTrackCount);
+ void initExtractorSamples() {
+ if (mExtractorSamples.size() == mTrackCount) return;
+
+ // Save sample information, per track, as reported by the extractor.
+ mExtractorSamples.resize(mTrackCount);
do {
const int trackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
- const int64_t sampleTime = AMediaExtractor_getSampleTime(mExtractor);
-
- mExtractorTimestamps[trackIndex].push_back(sampleTime);
+ mExtractorSamples[trackIndex].emplace_back(mExtractor);
} while (AMediaExtractor_advance(mExtractor));
+
+ AMediaExtractor_seekTo(mExtractor, 0, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+ }
+
+ std::vector<int32_t> getTrackBitrates() {
+ size_t totalSize[mTrackCount];
+ memset(totalSize, 0, sizeof(totalSize));
+
+ do {
+ const int trackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+ totalSize[trackIndex] += AMediaExtractor_getSampleSize(mExtractor);
+ } while (AMediaExtractor_advance(mExtractor));
+
+ AMediaExtractor_seekTo(mExtractor, 0, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+
+ std::vector<int32_t> bitrates;
+ for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+ int64_t durationUs;
+ AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+ EXPECT_NE(trackFormat, nullptr);
+ EXPECT_TRUE(AMediaFormat_getInt64(trackFormat, AMEDIAFORMAT_KEY_DURATION, &durationUs));
+ bitrates.push_back(roundf((float)totalSize[trackIndex] * 8 * 1000000 / durationUs));
+ }
+
+ return bitrates;
+ }
+
+ void compareSamples(std::vector<std::vector<Sample>>& readerSamples) {
+ initExtractorSamples();
+ EXPECT_EQ(readerSamples.size(), mTrackCount);
+
+ for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+ LOG(DEBUG) << "Track " << trackIndex << ", comparing "
+ << readerSamples[trackIndex].size() << " samples.";
+ EXPECT_EQ(readerSamples[trackIndex].size(), mExtractorSamples[trackIndex].size());
+ for (size_t sampleIndex = 0; sampleIndex < readerSamples[trackIndex].size();
+ sampleIndex++) {
+ EXPECT_EQ(readerSamples[trackIndex][sampleIndex],
+ mExtractorSamples[trackIndex][sampleIndex]);
+ }
+ }
}
void TearDown() override {
@@ -84,60 +259,116 @@
size_t mTrackCount;
int mSourceFd;
size_t mFileSize;
- std::vector<std::vector<int64_t>> mExtractorTimestamps;
+ std::vector<std::vector<Sample>> mExtractorSamples;
};
-TEST_F(MediaSampleReaderNDKTests, TestSampleTimes) {
- LOG(DEBUG) << "TestSampleTimes Starts";
+/** Reads all samples from all tracks in parallel. */
+TEST_F(MediaSampleReaderNDKTests, TestParallelSampleAccess) {
+ LOG(DEBUG) << "TestParallelSampleAccess Starts";
- std::shared_ptr<MediaSampleReader> sampleReader =
- MediaSampleReaderNDK::createFromFd(mSourceFd, 0, mFileSize);
+ SampleAccessTester tester{mSourceFd, mFileSize};
+ tester.readSamplesAsync(SAMPLE_COUNT_ALL);
+ tester.waitForTracks();
+ compareSamples(tester.getSamples());
+}
+
+/** Reads all samples from all tracks sequentially. */
+TEST_F(MediaSampleReaderNDKTests, TestSequentialSampleAccess) {
+ LOG(DEBUG) << "TestSequentialSampleAccess Starts";
+
+ SampleAccessTester tester{mSourceFd, mFileSize};
+ tester.setEnforceSequentialAccess(true);
+ tester.readSamplesAsync(SAMPLE_COUNT_ALL);
+ tester.waitForTracks();
+ compareSamples(tester.getSamples());
+}
+
+/** Reads all samples from one track in parallel mode before switching to sequential mode. */
+TEST_F(MediaSampleReaderNDKTests, TestMixedSampleAccessTrackEOS) {
+ LOG(DEBUG) << "TestMixedSampleAccessTrackEOS Starts";
+
+ for (int readSampleInfoFlag = 0; readSampleInfoFlag <= 1; readSampleInfoFlag++) {
+ for (int trackIndToEOS = 0; trackIndToEOS < mTrackCount; ++trackIndToEOS) {
+ LOG(DEBUG) << "Testing EOS of track " << trackIndToEOS;
+
+ SampleAccessTester tester{mSourceFd, mFileSize};
+
+ // If the flag is set, read sample info from a different track before draining the track
+ // under test to force the reader to save the extractor position.
+ if (readSampleInfoFlag) {
+ tester.getSampleInfo((trackIndToEOS + 1) % mTrackCount);
+ }
+
+ // Read all samples from one track before enabling sequential access
+ tester.readSamplesAsync(trackIndToEOS, SAMPLE_COUNT_ALL);
+ tester.waitForTrack(trackIndToEOS);
+ tester.setEnforceSequentialAccess(true);
+
+ for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+ if (trackIndex == trackIndToEOS) continue;
+
+ tester.readSamplesAsync(trackIndex, SAMPLE_COUNT_ALL);
+ tester.waitForTrack(trackIndex);
+ }
+
+ compareSamples(tester.getSamples());
+ }
+ }
+}
+
+/**
+ * Reads different combinations of sample counts from all tracks in parallel mode before switching
+ * to sequential mode and reading the rest of the samples.
+ */
+TEST_F(MediaSampleReaderNDKTests, TestMixedSampleAccess) {
+ LOG(DEBUG) << "TestMixedSampleAccess Starts";
+ initExtractorSamples();
+
+ for (int trackIndToTest = 0; trackIndToTest < mTrackCount; ++trackIndToTest) {
+ for (int sampleCount = 0; sampleCount <= (mExtractorSamples[trackIndToTest].size() + 1);
+ ++sampleCount) {
+ SampleAccessTester tester{mSourceFd, mFileSize};
+
+ for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+ if (trackIndex == trackIndToTest) {
+ tester.readSamplesAsync(trackIndex, sampleCount);
+ } else {
+ tester.readSamplesAsync(trackIndex, mExtractorSamples[trackIndex].size() / 2);
+ }
+ }
+
+ tester.waitForTracks();
+ tester.setEnforceSequentialAccess(true);
+
+ tester.readSamplesAsync(SAMPLE_COUNT_ALL);
+ tester.waitForTracks();
+
+ compareSamples(tester.getSamples());
+ }
+ }
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestEstimatedBitrateAccuracy) {
+ // Just put a somewhat reasonable upper bound on the estimated bitrate expected in our test
+ // assets. This is mostly to make sure the estimation is not way off.
+ static constexpr int32_t kMaxEstimatedBitrate = 100 * 1000 * 1000; // 100 Mbps
+
+ auto sampleReader = MediaSampleReaderNDK::createFromFd(mSourceFd, 0, mFileSize);
ASSERT_TRUE(sampleReader);
- MediaSampleInfo info;
- int trackEosCount = 0;
- std::vector<bool> trackReachedEos(mTrackCount, false);
- std::vector<std::vector<int64_t>> readerTimestamps(mTrackCount);
+ std::vector<int32_t> actualTrackBitrates = getTrackBitrates();
+ for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+ EXPECT_EQ(sampleReader->selectTrack(trackIndex), AMEDIA_OK);
- // Initialize the extractor timestamps.
- initExtractorTimestamps();
+ int32_t bitrate;
+ EXPECT_EQ(sampleReader->getEstimatedBitrateForTrack(trackIndex, &bitrate), AMEDIA_OK);
+ EXPECT_GT(bitrate, 0);
+ EXPECT_LT(bitrate, kMaxEstimatedBitrate);
- // Read 5s of each track at a time.
- const int64_t chunkDurationUs = SEC_TO_USEC(5);
- int64_t chunkEndTimeUs = chunkDurationUs;
-
- // Loop until all tracks have reached End Of Stream.
- while (trackEosCount < mTrackCount) {
- for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
- if (trackReachedEos[trackIndex]) continue;
-
- // Advance current track to next chunk end time.
- do {
- media_status_t status = sampleReader->getSampleInfoForTrack(trackIndex, &info);
- if (status != AMEDIA_OK) {
- ASSERT_EQ(status, AMEDIA_ERROR_END_OF_STREAM);
- ASSERT_TRUE((info.flags & SAMPLE_FLAG_END_OF_STREAM) != 0);
- trackReachedEos[trackIndex] = true;
- trackEosCount++;
- break;
- }
- ASSERT_TRUE((info.flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
- readerTimestamps[trackIndex].push_back(info.presentationTimeUs);
- sampleReader->advanceTrack(trackIndex);
- } while (info.presentationTimeUs < chunkEndTimeUs);
- }
- chunkEndTimeUs += chunkDurationUs;
- }
-
- for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
- LOG(DEBUG) << "Track " << trackIndex << ", comparing "
- << readerTimestamps[trackIndex].size() << " samples.";
- ASSERT_EQ(readerTimestamps[trackIndex].size(), mExtractorTimestamps[trackIndex].size());
- for (size_t sampleIndex = 0; sampleIndex < readerTimestamps[trackIndex].size();
- sampleIndex++) {
- ASSERT_EQ(readerTimestamps[trackIndex][sampleIndex],
- mExtractorTimestamps[trackIndex][sampleIndex]);
- }
+ // Note: The test asset currently used in this test is shorter than the sampling duration
+ // used to estimate the bitrate in the sample reader. So for now the estimation should be
+ // exact but if/when a longer asset is used a reasonable delta needs to be defined.
+ EXPECT_EQ(bitrate, actualTrackBitrates[trackIndex]);
}
}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
new file mode 100644
index 0000000..0a41b00
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
@@ -0,0 +1,598 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaSampleWriter
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleWriterTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleQueue.h>
+#include <media/MediaSampleWriter.h>
+#include <media/NdkMediaExtractor.h>
+
+#include <condition_variable>
+#include <list>
+#include <mutex>
+
+namespace android {
+
+/** Muxer interface to enable MediaSampleWriter testing. */
+class TestMuxer : public MediaSampleWriterMuxerInterface {
+public:
+ // MuxerInterface
+ ssize_t addTrack(AMediaFormat* trackFormat) override {
+ mEventQueue.push_back(AddTrack(trackFormat));
+ return mTrackCount++;
+ }
+ media_status_t start() override {
+ mEventQueue.push_back(Start());
+ return AMEDIA_OK;
+ }
+
+ media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+ const AMediaCodecBufferInfo* info) override {
+ mEventQueue.push_back(WriteSample(trackIndex, data, info));
+ return AMEDIA_OK;
+ }
+ media_status_t stop() override {
+ mEventQueue.push_back(Stop());
+ return AMEDIA_OK;
+ }
+ // ~MuxerInterface
+
+ struct Event {
+ enum { NoEvent, AddTrack, Start, WriteSample, Stop } type = NoEvent;
+ const AMediaFormat* format = nullptr;
+ size_t trackIndex = 0;
+ const uint8_t* data = nullptr;
+ AMediaCodecBufferInfo info{};
+ };
+
+ static constexpr Event NoEvent = {Event::NoEvent, nullptr, 0, nullptr, {}};
+
+ static Event AddTrack(const AMediaFormat* format) {
+ return {.type = Event::AddTrack, .format = format};
+ }
+
+ static Event Start() { return {.type = Event::Start}; }
+ static Event Stop() { return {.type = Event::Stop}; }
+
+ static Event WriteSample(size_t trackIndex, const uint8_t* data,
+ const AMediaCodecBufferInfo* info) {
+ return {.type = Event::WriteSample, .trackIndex = trackIndex, .data = data, .info = *info};
+ }
+
+ static Event WriteSampleWithPts(size_t trackIndex, int64_t pts) {
+ return {.type = Event::WriteSample, .trackIndex = trackIndex, .info = {0, 0, pts, 0}};
+ }
+
+ void pushEvent(const Event& e) {
+ std::unique_lock<std::mutex> lock(mMutex);
+ mEventQueue.push_back(e);
+ mCondition.notify_one();
+ }
+
+ const Event& popEvent(bool wait = false) {
+ std::unique_lock<std::mutex> lock(mMutex);
+ while (wait && mEventQueue.empty()) {
+ mCondition.wait_for(lock, std::chrono::milliseconds(200));
+ }
+
+ if (mEventQueue.empty()) {
+ mPoppedEvent = NoEvent;
+ } else {
+ mPoppedEvent = *mEventQueue.begin();
+ mEventQueue.pop_front();
+ }
+ return mPoppedEvent;
+ }
+
+private:
+ Event mPoppedEvent;
+ std::list<Event> mEventQueue;
+ ssize_t mTrackCount = 0;
+ std::mutex mMutex;
+ std::condition_variable mCondition;
+};
+
+bool operator==(const AMediaCodecBufferInfo& lhs, const AMediaCodecBufferInfo& rhs) {
+ return lhs.offset == rhs.offset && lhs.size == rhs.size &&
+ lhs.presentationTimeUs == rhs.presentationTimeUs && lhs.flags == rhs.flags;
+}
+
+bool operator==(const TestMuxer::Event& lhs, const TestMuxer::Event& rhs) {
+ return lhs.type == rhs.type && lhs.format == rhs.format && lhs.trackIndex == rhs.trackIndex &&
+ lhs.data == rhs.data && lhs.info == rhs.info;
+}
+
+/** Represents a media source file. */
+class TestMediaSource {
+public:
+ void init() {
+ static const char* sourcePath =
+ "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+ mExtractor = AMediaExtractor_new();
+ ASSERT_NE(mExtractor, nullptr);
+
+ int sourceFd = open(sourcePath, O_RDONLY);
+ ASSERT_GT(sourceFd, 0);
+
+ off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+ lseek(sourceFd, 0, SEEK_SET);
+
+ media_status_t status = AMediaExtractor_setDataSourceFd(mExtractor, sourceFd, 0, fileSize);
+ ASSERT_EQ(status, AMEDIA_OK);
+ close(sourceFd);
+
+ mTrackCount = AMediaExtractor_getTrackCount(mExtractor);
+ ASSERT_GT(mTrackCount, 1);
+ for (size_t trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+ AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+ ASSERT_NE(trackFormat, nullptr);
+
+ const char* mime = nullptr;
+ AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+ if (strncmp(mime, "video/", 6) == 0) {
+ mVideoTrackIndex = trackIndex;
+ } else if (strncmp(mime, "audio/", 6) == 0) {
+ mAudioTrackIndex = trackIndex;
+ }
+
+ mTrackFormats.push_back(
+ std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete));
+
+ AMediaExtractor_selectTrack(mExtractor, trackIndex);
+ }
+ EXPECT_GE(mVideoTrackIndex, 0);
+ EXPECT_GE(mAudioTrackIndex, 0);
+ }
+
+ void reset() const {
+ media_status_t status = AMediaExtractor_seekTo(mExtractor, 0 /* seekPosUs */,
+ AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+ ASSERT_EQ(status, AMEDIA_OK);
+ }
+
+ AMediaExtractor* mExtractor = nullptr;
+ size_t mTrackCount = 0;
+ std::vector<std::shared_ptr<AMediaFormat>> mTrackFormats;
+ int mVideoTrackIndex = -1;
+ int mAudioTrackIndex = -1;
+};
+
+class TestCallbacks : public MediaSampleWriter::CallbackInterface {
+public:
+ bool hasFinished() {
+ std::unique_lock<std::mutex> lock(mMutex);
+ return mFinished;
+ }
+
+ // MediaSampleWriter::CallbackInterface
+ virtual void onFinished(const MediaSampleWriter* writer __unused,
+ media_status_t status) override {
+ std::unique_lock<std::mutex> lock(mMutex);
+ EXPECT_FALSE(mFinished);
+ mFinished = true;
+ mStatus = status;
+ mCondition.notify_all();
+ }
+
+ virtual void onStopped(const MediaSampleWriter* writer __unused) {
+ std::unique_lock<std::mutex> lock(mMutex);
+ EXPECT_FALSE(mFinished);
+ mStopped = true;
+ mCondition.notify_all();
+ }
+
+ virtual void onProgressUpdate(const MediaSampleWriter* writer __unused,
+ int32_t progress) override {
+ EXPECT_GT(progress, mLastProgress);
+ EXPECT_GE(progress, 0);
+ EXPECT_LE(progress, 100);
+
+ mLastProgress = progress;
+ mProgressUpdateCount++;
+ }
+ // ~MediaSampleWriter::CallbackInterface
+
+ void waitForWritingFinished() {
+ std::unique_lock<std::mutex> lock(mMutex);
+ while (!mFinished && !mStopped) {
+ mCondition.wait(lock);
+ }
+ }
+
+ uint32_t getProgressUpdateCount() const { return mProgressUpdateCount; }
+ bool wasStopped() const { return mStopped; }
+
+private:
+ std::mutex mMutex;
+ std::condition_variable mCondition;
+ bool mFinished = false;
+ bool mStopped = false;
+ media_status_t mStatus = AMEDIA_OK;
+ int32_t mLastProgress = -1;
+ uint32_t mProgressUpdateCount = 0;
+};
+
+class MediaSampleWriterTests : public ::testing::Test {
+public:
+ MediaSampleWriterTests() { LOG(DEBUG) << "MediaSampleWriterTests created"; }
+ ~MediaSampleWriterTests() { LOG(DEBUG) << "MediaSampleWriterTests destroyed"; }
+
+ static const TestMediaSource& getMediaSource() {
+ static TestMediaSource sMediaSource;
+ static std::once_flag sOnceToken;
+
+ std::call_once(sOnceToken, [] { sMediaSource.init(); });
+
+ sMediaSource.reset();
+ return sMediaSource;
+ }
+
+ static std::shared_ptr<MediaSample> newSample(int64_t ptsUs, uint32_t flags, size_t size,
+ size_t offset, const uint8_t* buffer) {
+ auto sample = std::make_shared<MediaSample>();
+ sample->info.presentationTimeUs = ptsUs;
+ sample->info.flags = flags;
+ sample->info.size = size;
+ sample->dataOffset = offset;
+ sample->buffer = buffer;
+ return sample;
+ }
+
+ static std::shared_ptr<MediaSample> newSampleEos() {
+ return newSample(0, SAMPLE_FLAG_END_OF_STREAM, 0, 0, nullptr);
+ }
+
+ static std::shared_ptr<MediaSample> newSampleWithPts(int64_t ptsUs) {
+ static uint32_t sampleCount = 0;
+
+ // Use sampleCount to get a unique mock sample.
+ uint32_t sampleId = ++sampleCount;
+ return newSample(ptsUs, 0, sampleId, sampleId, reinterpret_cast<const uint8_t*>(sampleId));
+ }
+
+ static std::shared_ptr<MediaSample> newSampleWithPtsOnly(int64_t ptsUs) {
+ return newSample(ptsUs, 0, 0, 0, nullptr);
+ }
+
+ void SetUp() override {
+ LOG(DEBUG) << "MediaSampleWriterTests set up";
+ mTestMuxer = std::make_shared<TestMuxer>();
+ }
+
+ void TearDown() override {
+ LOG(DEBUG) << "MediaSampleWriterTests tear down";
+ mTestMuxer.reset();
+ }
+
+protected:
+ std::shared_ptr<TestMuxer> mTestMuxer;
+ std::shared_ptr<TestCallbacks> mTestCallbacks = std::make_shared<TestCallbacks>();
+};
+
+TEST_F(MediaSampleWriterTests, TestAddTrackWithoutInit) {
+ const TestMediaSource& mediaSource = getMediaSource();
+
+ std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+ EXPECT_EQ(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutInit) {
+ std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+ EXPECT_FALSE(writer->start());
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutTracks) {
+ std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+ EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+ EXPECT_FALSE(writer->start());
+ EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestAddInvalidTrack) {
+ std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+ EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+ EXPECT_EQ(writer->addTrack(nullptr), nullptr);
+ EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestDoubleStartStop) {
+ std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+
+ std::shared_ptr<TestCallbacks> callbacks = std::make_shared<TestCallbacks>();
+ EXPECT_TRUE(writer->init(mTestMuxer, callbacks));
+
+ const TestMediaSource& mediaSource = getMediaSource();
+ EXPECT_NE(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+ EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(mediaSource.mTrackFormats[0].get()));
+
+ ASSERT_TRUE(writer->start());
+ EXPECT_FALSE(writer->start());
+
+ writer->stop();
+ writer->stop();
+ callbacks->waitForWritingFinished();
+ EXPECT_TRUE(callbacks->wasStopped());
+}
+
+TEST_F(MediaSampleWriterTests, TestStopWithoutStart) {
+ std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+ EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+ const TestMediaSource& mediaSource = getMediaSource();
+ EXPECT_NE(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+ EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(mediaSource.mTrackFormats[0].get()));
+
+ writer->stop();
+ EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutCallback) {
+ std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+
+ std::weak_ptr<MediaSampleWriter::CallbackInterface> unassignedWp;
+ EXPECT_FALSE(writer->init(mTestMuxer, unassignedWp));
+
+ std::shared_ptr<MediaSampleWriter::CallbackInterface> unassignedSp;
+ EXPECT_FALSE(writer->init(mTestMuxer, unassignedSp));
+
+ const TestMediaSource& mediaSource = getMediaSource();
+ EXPECT_EQ(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+ ASSERT_FALSE(writer->start());
+}
+
+TEST_F(MediaSampleWriterTests, TestProgressUpdate) {
+ const TestMediaSource& mediaSource = getMediaSource();
+
+ std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+ EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+ std::shared_ptr<AMediaFormat> videoFormat =
+ std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+ AMediaFormat_copy(videoFormat.get(),
+ mediaSource.mTrackFormats[mediaSource.mVideoTrackIndex].get());
+
+ AMediaFormat_setInt64(videoFormat.get(), AMEDIAFORMAT_KEY_DURATION, 100);
+ auto sampleConsumer = writer->addTrack(videoFormat);
+ EXPECT_NE(sampleConsumer, nullptr);
+ ASSERT_TRUE(writer->start());
+
+ for (int64_t pts = 0; pts < 100; ++pts) {
+ sampleConsumer(newSampleWithPts(pts));
+ }
+ sampleConsumer(newSampleEos());
+ mTestCallbacks->waitForWritingFinished();
+
+ EXPECT_EQ(mTestCallbacks->getProgressUpdateCount(), 100);
+}
+
+TEST_F(MediaSampleWriterTests, TestInterleaving) {
+ std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+ EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+ // Use two tracks for this test.
+ static constexpr int kNumTracks = 2;
+ MediaSampleWriter::MediaSampleConsumerFunction sampleConsumers[kNumTracks];
+ std::vector<std::pair<std::shared_ptr<MediaSample>, size_t>> addedSamples;
+ const TestMediaSource& mediaSource = getMediaSource();
+
+ for (int trackIdx = 0; trackIdx < kNumTracks; ++trackIdx) {
+ auto trackFormat = mediaSource.mTrackFormats[trackIdx % mediaSource.mTrackCount];
+ sampleConsumers[trackIdx] = writer->addTrack(trackFormat);
+ EXPECT_NE(sampleConsumers[trackIdx], nullptr);
+ EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(trackFormat.get()));
+ }
+
+ // Create samples in the expected interleaved order for easy verification.
+ auto addSampleToTrackWithPts = [&addedSamples, &sampleConsumers](int trackIndex, int64_t pts) {
+ auto sample = newSampleWithPts(pts);
+ sampleConsumers[trackIndex](sample);
+ addedSamples.emplace_back(sample, trackIndex);
+ };
+
+ addSampleToTrackWithPts(0, 0);
+ addSampleToTrackWithPts(1, 4);
+
+ addSampleToTrackWithPts(0, 1);
+ addSampleToTrackWithPts(0, 2);
+ addSampleToTrackWithPts(0, 3);
+ addSampleToTrackWithPts(0, 10);
+
+ addSampleToTrackWithPts(1, 5);
+ addSampleToTrackWithPts(1, 6);
+ addSampleToTrackWithPts(1, 11);
+
+ addSampleToTrackWithPts(0, 12);
+ addSampleToTrackWithPts(1, 13);
+
+ for (int trackIndex = 0; trackIndex < kNumTracks; ++trackIndex) {
+ sampleConsumers[trackIndex](newSampleEos());
+ }
+
+ // Start the writer.
+ ASSERT_TRUE(writer->start());
+
+ // Wait for writer to complete.
+ mTestCallbacks->waitForWritingFinished();
+ EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Start());
+
+ std::sort(addedSamples.begin(), addedSamples.end(),
+ [](const std::pair<std::shared_ptr<MediaSample>, size_t>& left,
+ const std::pair<std::shared_ptr<MediaSample>, size_t>& right) {
+ return left.first->info.presentationTimeUs < right.first->info.presentationTimeUs;
+ });
+
+ // Verify sample order.
+ for (auto entry : addedSamples) {
+ auto sample = entry.first;
+ auto trackIndex = entry.second;
+
+ const TestMuxer::Event& event = mTestMuxer->popEvent();
+ EXPECT_EQ(event.type, TestMuxer::Event::WriteSample);
+ EXPECT_EQ(event.trackIndex, trackIndex);
+ EXPECT_EQ(event.data, sample->buffer);
+ EXPECT_EQ(event.info.offset, sample->dataOffset);
+ EXPECT_EQ(event.info.size, sample->info.size);
+ EXPECT_EQ(event.info.presentationTimeUs, sample->info.presentationTimeUs);
+ EXPECT_EQ(event.info.flags, sample->info.flags);
+ }
+
+ // Verify EOS samples.
+ for (int trackIndex = 0; trackIndex < kNumTracks; ++trackIndex) {
+ auto trackFormat = mediaSource.mTrackFormats[trackIndex % mediaSource.mTrackCount];
+ int64_t duration = 0;
+ AMediaFormat_getInt64(trackFormat.get(), AMEDIAFORMAT_KEY_DURATION, &duration);
+
+ // EOS timestamp = first sample timestamp + duration.
+ const int64_t endTime = duration + (trackIndex == 1 ? 4 : 0);
+ const AMediaCodecBufferInfo info = {0, 0, endTime, AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM};
+
+ EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::WriteSample(trackIndex, nullptr, &info));
+ }
+
+ EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Stop());
+ EXPECT_TRUE(mTestCallbacks->hasFinished());
+}
+
+// Convenience function for reading a sample from an AMediaExtractor represented as a MediaSample.
+static std::shared_ptr<MediaSample> readSampleAndAdvance(AMediaExtractor* extractor,
+ size_t* trackIndexOut) {
+ int trackIndex = AMediaExtractor_getSampleTrackIndex(extractor);
+ if (trackIndex < 0) {
+ return nullptr;
+ }
+
+ if (trackIndexOut != nullptr) {
+ *trackIndexOut = trackIndex;
+ }
+
+ ssize_t sampleSize = AMediaExtractor_getSampleSize(extractor);
+ int64_t sampleTimeUs = AMediaExtractor_getSampleTime(extractor);
+ uint32_t flags = AMediaExtractor_getSampleFlags(extractor);
+
+ size_t bufferSize = static_cast<size_t>(sampleSize);
+ uint8_t* buffer = new uint8_t[bufferSize];
+
+ ssize_t dataRead = AMediaExtractor_readSampleData(extractor, buffer, bufferSize);
+ EXPECT_EQ(dataRead, sampleSize);
+
+ auto sample = MediaSample::createWithReleaseCallback(
+ buffer, 0 /* offset */, 0 /* id */, [buffer](MediaSample*) { delete[] buffer; });
+ sample->info.size = bufferSize;
+ sample->info.presentationTimeUs = sampleTimeUs;
+ sample->info.flags = flags;
+
+ (void)AMediaExtractor_advance(extractor);
+ return sample;
+}
+
+TEST_F(MediaSampleWriterTests, TestDefaultMuxer) {
+ // Write samples straight from an extractor and validate output file.
+ static const char* destinationPath =
+ "/data/local/tmp/MediaSampleWriterTests_TestDefaultMuxer_output.MP4";
+ const int destinationFd =
+ open(destinationPath, O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR | S_IROTH);
+ ASSERT_GT(destinationFd, 0);
+
+ // Initialize writer.
+ std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+ EXPECT_TRUE(writer->init(destinationFd, mTestCallbacks));
+ close(destinationFd);
+
+ // Add tracks.
+ const TestMediaSource& mediaSource = getMediaSource();
+ std::vector<MediaSampleWriter::MediaSampleConsumerFunction> sampleConsumers;
+
+ for (size_t trackIndex = 0; trackIndex < mediaSource.mTrackCount; trackIndex++) {
+ auto consumer = writer->addTrack(mediaSource.mTrackFormats[trackIndex]);
+ sampleConsumers.push_back(consumer);
+ }
+
+ // Start the writer.
+ ASSERT_TRUE(writer->start());
+
+ // Enqueue samples and finally End Of Stream.
+ std::shared_ptr<MediaSample> sample;
+ size_t trackIndex;
+ while ((sample = readSampleAndAdvance(mediaSource.mExtractor, &trackIndex)) != nullptr) {
+ sampleConsumers[trackIndex](sample);
+ }
+ for (trackIndex = 0; trackIndex < mediaSource.mTrackCount; trackIndex++) {
+ sampleConsumers[trackIndex](newSampleEos());
+ }
+
+ // Wait for writer.
+ mTestCallbacks->waitForWritingFinished();
+
+ // Compare output file with source.
+ mediaSource.reset();
+
+ AMediaExtractor* extractor = AMediaExtractor_new();
+ ASSERT_NE(extractor, nullptr);
+
+ int sourceFd = open(destinationPath, O_RDONLY);
+ ASSERT_GT(sourceFd, 0);
+
+ off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+ lseek(sourceFd, 0, SEEK_SET);
+
+ media_status_t status = AMediaExtractor_setDataSourceFd(extractor, sourceFd, 0, fileSize);
+ ASSERT_EQ(status, AMEDIA_OK);
+ close(sourceFd);
+
+ size_t trackCount = AMediaExtractor_getTrackCount(extractor);
+ EXPECT_EQ(trackCount, mediaSource.mTrackCount);
+
+ for (size_t trackIndex = 0; trackIndex < trackCount; trackIndex++) {
+ AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(extractor, trackIndex);
+ ASSERT_NE(trackFormat, nullptr);
+
+ AMediaExtractor_selectTrack(extractor, trackIndex);
+ }
+
+ // Compare samples.
+ std::shared_ptr<MediaSample> sample1 = readSampleAndAdvance(mediaSource.mExtractor, nullptr);
+ std::shared_ptr<MediaSample> sample2 = readSampleAndAdvance(extractor, nullptr);
+
+ while (sample1 != nullptr && sample2 != nullptr) {
+ EXPECT_EQ(sample1->info.presentationTimeUs, sample2->info.presentationTimeUs);
+ EXPECT_EQ(sample1->info.size, sample2->info.size);
+ EXPECT_EQ(sample1->info.flags, sample2->info.flags);
+
+ EXPECT_EQ(memcmp(sample1->buffer, sample2->buffer, sample1->info.size), 0);
+
+ sample1 = readSampleAndAdvance(mediaSource.mExtractor, nullptr);
+ sample2 = readSampleAndAdvance(extractor, nullptr);
+ }
+ EXPECT_EQ(sample1, nullptr);
+ EXPECT_EQ(sample2, nullptr);
+
+ AMediaExtractor_delete(extractor);
+}
+
+} // namespace android
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
index c5b181d..21f0b86 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
@@ -53,7 +53,7 @@
switch (GetParam()) {
case VIDEO:
- mTranscoder = std::make_shared<VideoTrackTranscoder>(mCallback);
+ mTranscoder = VideoTrackTranscoder::create(mCallback);
break;
case PASSTHROUGH:
mTranscoder = std::make_shared<PassthroughTrackTranscoder>(mCallback);
@@ -61,13 +61,10 @@
}
ASSERT_NE(mTranscoder, nullptr);
- initSampleReader();
+ initSampleReader("/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4");
}
- void initSampleReader() {
- const char* sourcePath =
- "/data/local/tmp/TranscoderTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
-
+ void initSampleReader(const char* sourcePath) {
const int sourceFd = open(sourcePath, O_RDONLY);
ASSERT_GT(sourceFd, 0);
@@ -90,8 +87,7 @@
if (GetParam() == VIDEO && strncmp(mime, "video/", 6) == 0) {
mTrackIndex = trackIndex;
- mSourceFormat = std::shared_ptr<AMediaFormat>(
- trackFormat, std::bind(AMediaFormat_delete, std::placeholders::_1));
+ mSourceFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
ASSERT_NE(mSourceFormat, nullptr);
mDestinationFormat =
@@ -102,8 +98,7 @@
// TODO(lnilsson): Test metadata track passthrough after hkuang@ provides sample.
mTrackIndex = trackIndex;
- mSourceFormat = std::shared_ptr<AMediaFormat>(
- trackFormat, std::bind(AMediaFormat_delete, std::placeholders::_1));
+ mSourceFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
ASSERT_NE(mSourceFormat, nullptr);
break;
}
@@ -112,31 +107,28 @@
}
ASSERT_NE(mSourceFormat, nullptr);
+ EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
}
// Drains the transcoder's output queue in a loop.
- void drainOutputSampleQueue() {
- mSampleQueueDrainThread = std::thread{[this] {
- std::shared_ptr<MediaSample> sample;
- bool aborted = false;
- do {
- aborted = mTranscoder->mOutputQueue.dequeue(&sample);
- } while (!aborted && !(sample->info.flags & SAMPLE_FLAG_END_OF_STREAM));
- mQueueWasAborted = aborted;
- mGotEndOfStream =
- sample != nullptr && (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) != 0;
- }};
+ void drainOutputSamples(int numSamplesToSave = 0) {
+ mTranscoder->setSampleConsumer(
+ [this, numSamplesToSave](const std::shared_ptr<MediaSample>& sample) {
+ ASSERT_NE(sample, nullptr);
+
+ mGotEndOfStream = (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) != 0;
+
+ if (mSavedSamples.size() < numSamplesToSave) {
+ mSavedSamples.push_back(sample);
+ }
+
+ if (mSavedSamples.size() == numSamplesToSave || mGotEndOfStream) {
+ mSamplesSavedSemaphore.signal();
+ }
+ });
}
- void joinDrainThread() {
- if (mSampleQueueDrainThread.joinable()) {
- mSampleQueueDrainThread.join();
- }
- }
- void TearDown() override {
- LOG(DEBUG) << "MediaTrackTranscoderTests tear down";
- joinDrainThread();
- }
+ void TearDown() override { LOG(DEBUG) << "MediaTrackTranscoderTests tear down"; }
~MediaTrackTranscoderTests() { LOG(DEBUG) << "MediaTrackTranscoderTests destroyed"; }
@@ -150,8 +142,8 @@
std::shared_ptr<AMediaFormat> mSourceFormat;
std::shared_ptr<AMediaFormat> mDestinationFormat;
- std::thread mSampleQueueDrainThread;
- bool mQueueWasAborted = false;
+ std::vector<std::shared_ptr<MediaSample>> mSavedSamples;
+ OneShotSemaphore mSamplesSavedSemaphore;
bool mGotEndOfStream = false;
};
@@ -160,20 +152,25 @@
EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
AMEDIA_OK);
ASSERT_TRUE(mTranscoder->start());
- drainOutputSampleQueue();
+ drainOutputSamples();
EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
- EXPECT_TRUE(mTranscoder->stop());
- joinDrainThread();
- EXPECT_FALSE(mQueueWasAborted);
+ EXPECT_TRUE(mCallback->transcodingFinished());
EXPECT_TRUE(mGotEndOfStream);
}
TEST_P(MediaTrackTranscoderTests, StopNormalOperation) {
LOG(DEBUG) << "Testing StopNormalOperation";
+
+ // Use a longer test asset to make sure that transcoding can be stopped.
+ initSampleReader("/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4");
+
EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
AMEDIA_OK);
EXPECT_TRUE(mTranscoder->start());
- EXPECT_TRUE(mTranscoder->stop());
+ mCallback->waitUntilTrackFormatAvailable();
+ mTranscoder->stop();
+ EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+ EXPECT_TRUE(mCallback->transcodingWasStopped());
}
TEST_P(MediaTrackTranscoderTests, StartWithoutConfigure) {
@@ -185,17 +182,23 @@
LOG(DEBUG) << "Testing StopWithoutStart";
EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
AMEDIA_OK);
- EXPECT_FALSE(mTranscoder->stop());
+ mTranscoder->stop();
}
TEST_P(MediaTrackTranscoderTests, DoubleStartStop) {
LOG(DEBUG) << "Testing DoubleStartStop";
+
+ // Use a longer test asset to make sure that transcoding can be stopped.
+ initSampleReader("/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4");
+
EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
AMEDIA_OK);
EXPECT_TRUE(mTranscoder->start());
EXPECT_FALSE(mTranscoder->start());
- EXPECT_TRUE(mTranscoder->stop());
- EXPECT_FALSE(mTranscoder->stop());
+ mTranscoder->stop();
+ mTranscoder->stop();
+ EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+ EXPECT_TRUE(mCallback->transcodingWasStopped());
}
TEST_P(MediaTrackTranscoderTests, DoubleConfigure) {
@@ -219,7 +222,8 @@
EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
AMEDIA_OK);
EXPECT_TRUE(mTranscoder->start());
- EXPECT_TRUE(mTranscoder->stop());
+ mTranscoder->stop();
+ EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
EXPECT_FALSE(mTranscoder->start());
}
@@ -228,49 +232,27 @@
EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
AMEDIA_OK);
ASSERT_TRUE(mTranscoder->start());
- drainOutputSampleQueue();
+ drainOutputSamples();
EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
- EXPECT_TRUE(mTranscoder->stop());
+ mTranscoder->stop();
EXPECT_FALSE(mTranscoder->start());
- joinDrainThread();
- EXPECT_FALSE(mQueueWasAborted);
EXPECT_TRUE(mGotEndOfStream);
}
-TEST_P(MediaTrackTranscoderTests, AbortOutputQueue) {
- LOG(DEBUG) << "Testing AbortOutputQueue";
- EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
- AMEDIA_OK);
- ASSERT_TRUE(mTranscoder->start());
- mTranscoder->mOutputQueue.abort();
- drainOutputSampleQueue();
- EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_ERROR_IO);
- EXPECT_TRUE(mTranscoder->stop());
-
- joinDrainThread();
- EXPECT_TRUE(mQueueWasAborted);
- EXPECT_FALSE(mGotEndOfStream);
-}
-
TEST_P(MediaTrackTranscoderTests, HoldSampleAfterTranscoderRelease) {
LOG(DEBUG) << "Testing HoldSampleAfterTranscoderRelease";
EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
AMEDIA_OK);
ASSERT_TRUE(mTranscoder->start());
-
- std::shared_ptr<MediaSample> sample;
- EXPECT_FALSE(mTranscoder->mOutputQueue.dequeue(&sample));
-
- drainOutputSampleQueue();
+ drainOutputSamples(1 /* numSamplesToSave */);
EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
- EXPECT_TRUE(mTranscoder->stop());
- joinDrainThread();
- EXPECT_FALSE(mQueueWasAborted);
+ mTranscoder->stop();
EXPECT_TRUE(mGotEndOfStream);
mTranscoder.reset();
+
std::this_thread::sleep_for(std::chrono::milliseconds(20));
- sample.reset();
+ mSavedSamples.clear();
}
TEST_P(MediaTrackTranscoderTests, HoldSampleAfterTranscoderStop) {
@@ -278,13 +260,13 @@
EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
AMEDIA_OK);
ASSERT_TRUE(mTranscoder->start());
-
- std::shared_ptr<MediaSample> sample;
- EXPECT_FALSE(mTranscoder->mOutputQueue.dequeue(&sample));
- EXPECT_TRUE(mTranscoder->stop());
+ drainOutputSamples(1 /* numSamplesToSave */);
+ mSamplesSavedSemaphore.wait();
+ mTranscoder->stop();
+ EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
std::this_thread::sleep_for(std::chrono::milliseconds(20));
- sample.reset();
+ mSavedSamples.clear();
}
TEST_P(MediaTrackTranscoderTests, NullSampleReader) {
@@ -302,6 +284,44 @@
AMEDIA_OK);
}
+TEST_P(MediaTrackTranscoderTests, StopOnSync) {
+ LOG(DEBUG) << "Testing StopOnSync";
+
+ // Use a longer test asset to make sure there is a GOP to finish.
+ initSampleReader("/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4");
+
+ EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+ AMEDIA_OK);
+
+ bool lastSampleWasEos = false;
+ bool lastRealSampleWasSync = false;
+ OneShotSemaphore samplesReceivedSemaphore;
+ uint32_t sampleCount = 0;
+
+ mTranscoder->setSampleConsumer([&](const std::shared_ptr<MediaSample>& sample) {
+ ASSERT_NE(sample, nullptr);
+
+ if ((lastSampleWasEos = sample->info.flags & SAMPLE_FLAG_END_OF_STREAM)) {
+ samplesReceivedSemaphore.signal();
+ return;
+ }
+ lastRealSampleWasSync = sample->info.flags & SAMPLE_FLAG_SYNC_SAMPLE;
+
+ if (++sampleCount >= 10) { // Wait for a few samples before stopping.
+ samplesReceivedSemaphore.signal();
+ }
+ });
+
+ ASSERT_TRUE(mTranscoder->start());
+ samplesReceivedSemaphore.wait();
+ mTranscoder->stop(true /* stopOnSync */);
+ EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+
+ EXPECT_TRUE(lastSampleWasEos);
+ EXPECT_TRUE(lastRealSampleWasSync);
+ EXPECT_TRUE(mCallback->transcodingWasStopped());
+}
+
}; // namespace android
using namespace android;
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
new file mode 100644
index 0000000..bfc1f3b
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
@@ -0,0 +1,427 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscoderTests"
+
+#include <android-base/logging.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+
+namespace android {
+
+#define DEFINE_FORMAT_VALUE_EQUAL_FUNC(_type, _typeName) \
+ static bool equal##_typeName(const char* key, AMediaFormat* src, AMediaFormat* dst) { \
+ _type srcVal, dstVal; \
+ bool srcPresent = AMediaFormat_get##_typeName(src, key, &srcVal); \
+ bool dstPresent = AMediaFormat_get##_typeName(dst, key, &dstVal); \
+ return (srcPresent == dstPresent) && (!srcPresent || (srcVal == dstVal)); \
+ }
+
+DEFINE_FORMAT_VALUE_EQUAL_FUNC(int64_t, Int64);
+DEFINE_FORMAT_VALUE_EQUAL_FUNC(int32_t, Int32);
+
+struct FormatVerifierEntry {
+ const char* key;
+ std::function<bool(const char*, AMediaFormat*, AMediaFormat*)> equal;
+};
+
+static const FormatVerifierEntry kFieldsToPreserve[] = {
+ {AMEDIAFORMAT_KEY_DURATION, equalInt64}, {AMEDIAFORMAT_KEY_WIDTH, equalInt32},
+ {AMEDIAFORMAT_KEY_HEIGHT, equalInt32}, {AMEDIAFORMAT_KEY_FRAME_RATE, equalInt32},
+ {AMEDIAFORMAT_KEY_FRAME_COUNT, equalInt32}, {AMEDIAFORMAT_KEY_DISPLAY_WIDTH, equalInt32},
+ {AMEDIAFORMAT_KEY_DISPLAY_HEIGHT, equalInt32}, {AMEDIAFORMAT_KEY_SAR_WIDTH, equalInt32},
+ {AMEDIAFORMAT_KEY_SAR_HEIGHT, equalInt32}, {AMEDIAFORMAT_KEY_ROTATION, equalInt32},
+};
+
+class TestCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+ virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+ std::unique_lock<std::mutex> lock(mMutex);
+ EXPECT_FALSE(mFinished);
+ mFinished = true;
+ mCondition.notify_all();
+ }
+
+ virtual void onError(const MediaTranscoder* transcoder __unused,
+ media_status_t error) override {
+ std::unique_lock<std::mutex> lock(mMutex);
+ EXPECT_NE(error, AMEDIA_OK);
+ EXPECT_FALSE(mFinished);
+ mFinished = true;
+ mStatus = error;
+ mCondition.notify_all();
+ }
+
+ virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+ int32_t progress) override {
+ std::unique_lock<std::mutex> lock(mMutex);
+ if (progress > 0 && !mProgressMade) {
+ mProgressMade = true;
+ mCondition.notify_all();
+ }
+ }
+
+ virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState
+ __unused) override {}
+
+ void waitForTranscodingFinished() {
+ std::unique_lock<std::mutex> lock(mMutex);
+ while (!mFinished) {
+ mCondition.wait(lock);
+ }
+ }
+
+ void waitForProgressMade() {
+ std::unique_lock<std::mutex> lock(mMutex);
+ while (!mProgressMade && !mFinished) {
+ mCondition.wait(lock);
+ }
+ }
+ media_status_t mStatus = AMEDIA_OK;
+ bool mFinished = false;
+
+private:
+ std::mutex mMutex;
+ std::condition_variable mCondition;
+ bool mProgressMade = false;
+};
+
+// Write-only, create file if non-existent, don't overwrite existing file.
+static constexpr int kOpenFlags = O_WRONLY | O_CREAT | O_EXCL;
+// User R+W permission.
+static constexpr int kFileMode = S_IRUSR | S_IWUSR;
+
+class MediaTranscoderTests : public ::testing::Test {
+public:
+ MediaTranscoderTests() { LOG(DEBUG) << "MediaTranscoderTests created"; }
+ ~MediaTranscoderTests() { LOG(DEBUG) << "MediaTranscoderTests destroyed"; }
+
+ void SetUp() override {
+ LOG(DEBUG) << "MediaTranscoderTests set up";
+ mCallbacks = std::make_shared<TestCallbacks>();
+ ABinderProcess_startThreadPool();
+ }
+
+ void TearDown() override {
+ LOG(DEBUG) << "MediaTranscoderTests tear down";
+ mCallbacks.reset();
+ }
+
+ void deleteFile(const char* path) { unlink(path); }
+
+ float getFileSizeDiffPercent(const char* path1, const char* path2, bool absolute = false) {
+ struct stat s1, s2;
+ EXPECT_EQ(stat(path1, &s1), 0);
+ EXPECT_EQ(stat(path2, &s2), 0);
+
+ int64_t diff = s2.st_size - s1.st_size;
+ if (absolute && diff < 0) diff = -diff;
+
+ return (float)diff * 100.0f / s1.st_size;
+ }
+
+ typedef enum {
+ kRunToCompletion,
+ kCancelAfterProgress,
+ kCancelAfterStart,
+ kPauseAfterProgress,
+ kPauseAfterStart,
+ } TranscodeExecutionControl;
+
+ using FormatConfigurationCallback = std::function<AMediaFormat*(AMediaFormat*)>;
+ media_status_t transcodeHelper(const char* srcPath, const char* destPath,
+ FormatConfigurationCallback formatCallback,
+ TranscodeExecutionControl executionControl = kRunToCompletion) {
+ auto transcoder = MediaTranscoder::create(mCallbacks);
+ EXPECT_NE(transcoder, nullptr);
+
+ const int srcFd = open(srcPath, O_RDONLY);
+ EXPECT_EQ(transcoder->configureSource(srcFd), AMEDIA_OK);
+
+ std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+ EXPECT_GT(trackFormats.size(), 0);
+
+ for (int i = 0; i < trackFormats.size(); ++i) {
+ AMediaFormat* format = formatCallback(trackFormats[i].get());
+ EXPECT_EQ(transcoder->configureTrackFormat(i, format), AMEDIA_OK);
+
+ // Save original video track format for verification.
+ const char* mime = nullptr;
+ AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+ if (strncmp(mime, "video/", 6) == 0) {
+ mSourceVideoFormat = trackFormats[i];
+ }
+
+ if (format != nullptr) {
+ AMediaFormat_delete(format);
+ }
+ }
+ deleteFile(destPath);
+ const int dstFd = open(destPath, kOpenFlags, kFileMode);
+ EXPECT_EQ(transcoder->configureDestination(dstFd), AMEDIA_OK);
+
+ media_status_t startStatus = transcoder->start();
+ EXPECT_EQ(startStatus, AMEDIA_OK);
+
+ if (startStatus == AMEDIA_OK) {
+ std::shared_ptr<ndk::ScopedAParcel> pausedState;
+
+ switch (executionControl) {
+ case kCancelAfterProgress:
+ mCallbacks->waitForProgressMade();
+ FALLTHROUGH_INTENDED;
+ case kCancelAfterStart:
+ transcoder->cancel();
+ break;
+ case kPauseAfterProgress:
+ mCallbacks->waitForProgressMade();
+ FALLTHROUGH_INTENDED;
+ case kPauseAfterStart:
+ transcoder->pause(&pausedState);
+ break;
+ case kRunToCompletion:
+ default:
+ mCallbacks->waitForTranscodingFinished();
+ break;
+ }
+ }
+ close(srcFd);
+ close(dstFd);
+
+ return mCallbacks->mStatus;
+ }
+
+ void testTranscodeVideo(const char* srcPath, const char* destPath, const char* dstMime,
+ int32_t bitrate = 0) {
+ EXPECT_EQ(transcodeHelper(srcPath, destPath,
+ [dstMime, bitrate](AMediaFormat* sourceFormat) {
+ AMediaFormat* format = nullptr;
+ const char* mime = nullptr;
+ AMediaFormat_getString(sourceFormat, AMEDIAFORMAT_KEY_MIME,
+ &mime);
+
+ if (strncmp(mime, "video/", 6) == 0 &&
+ (bitrate > 0 || dstMime != nullptr)) {
+ format = AMediaFormat_new();
+
+ if (bitrate > 0) {
+ AMediaFormat_setInt32(
+ format, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
+ }
+
+ if (dstMime != nullptr) {
+ AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME,
+ dstMime);
+ }
+ }
+ return format;
+ }),
+ AMEDIA_OK);
+
+ if (dstMime != nullptr) {
+ std::vector<FormatVerifierEntry> extraVerifiers = {
+ {AMEDIAFORMAT_KEY_MIME,
+ [dstMime](const char* key, AMediaFormat* src __unused, AMediaFormat* dst) {
+ const char* mime = nullptr;
+ AMediaFormat_getString(dst, key, &mime);
+ return !strcmp(mime, dstMime);
+ }},
+ };
+ verifyOutputFormat(destPath, &extraVerifiers);
+ } else {
+ verifyOutputFormat(destPath);
+ }
+ }
+
+ void verifyOutputFormat(const char* destPath,
+ const std::vector<FormatVerifierEntry>* extraVerifiers = nullptr) {
+ int dstFd = open(destPath, O_RDONLY);
+ EXPECT_GT(dstFd, 0);
+ ssize_t fileSize = lseek(dstFd, 0, SEEK_END);
+ lseek(dstFd, 0, SEEK_SET);
+
+ std::shared_ptr<MediaSampleReader> sampleReader =
+ MediaSampleReaderNDK::createFromFd(dstFd, 0, fileSize);
+ ASSERT_NE(sampleReader, nullptr);
+
+ std::shared_ptr<AMediaFormat> videoFormat;
+ const size_t trackCount = sampleReader->getTrackCount();
+ for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+ AMediaFormat* trackFormat = sampleReader->getTrackFormat(static_cast<int>(trackIndex));
+ if (trackFormat != nullptr) {
+ const char* mime = nullptr;
+ AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+ if (strncmp(mime, "video/", 6) == 0) {
+ LOG(INFO) << "Track # " << trackIndex << ": "
+ << AMediaFormat_toString(trackFormat);
+ videoFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+ break;
+ }
+ }
+ }
+
+ EXPECT_NE(videoFormat, nullptr);
+ if (videoFormat != nullptr) {
+ LOG(INFO) << "source video format: " << AMediaFormat_toString(mSourceVideoFormat.get());
+ LOG(INFO) << "transcoded video format: " << AMediaFormat_toString(videoFormat.get());
+
+ for (int i = 0; i < (sizeof(kFieldsToPreserve) / sizeof(kFieldsToPreserve[0])); ++i) {
+ EXPECT_TRUE(kFieldsToPreserve[i].equal(kFieldsToPreserve[i].key,
+ mSourceVideoFormat.get(), videoFormat.get()))
+ << "Failed at key " << kFieldsToPreserve[i].key;
+ }
+
+ if (extraVerifiers != nullptr) {
+ for (int i = 0; i < extraVerifiers->size(); ++i) {
+ const FormatVerifierEntry& entry = (*extraVerifiers)[i];
+ EXPECT_TRUE(
+ entry.equal(entry.key, mSourceVideoFormat.get(), videoFormat.get()));
+ }
+ }
+ }
+
+ close(dstFd);
+ }
+
+ std::shared_ptr<TestCallbacks> mCallbacks;
+ std::shared_ptr<AMediaFormat> mSourceVideoFormat;
+};
+
+TEST_F(MediaTranscoderTests, TestPassthrough) {
+ const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+ const char* destPath = "/data/local/tmp/MediaTranscoder_Passthrough.MP4";
+ testTranscodeVideo(srcPath, destPath, nullptr);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_AvcToAvc_Basic) {
+ const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+ const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_AvcToAvc_Basic.MP4";
+ testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_HevcToAvc_Basic) {
+ const char* srcPath = "/data/local/tmp/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4";
+ const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_HevcToAvc_Basic.MP4";
+ testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_HevcToAvc_Rotation) {
+ const char* srcPath =
+ "/data/local/tmp/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4";
+ const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_HevcToAvc_Rotation.MP4";
+ testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+TEST_F(MediaTranscoderTests, TestPreserveBitrate) {
+ const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+ const char* destPath = "/data/local/tmp/MediaTranscoder_PreserveBitrate.MP4";
+ testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+
+ // Require maximum of 25% difference in file size.
+ // TODO(b/174678336): Find a better test asset to tighten the threshold.
+ EXPECT_LT(getFileSizeDiffPercent(srcPath, destPath, true /* absolute*/), 25);
+}
+
+TEST_F(MediaTranscoderTests, TestCustomBitrate) {
+ const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+ const char* destPath1 = "/data/local/tmp/MediaTranscoder_CustomBitrate_2Mbps.MP4";
+ const char* destPath2 = "/data/local/tmp/MediaTranscoder_CustomBitrate_8Mbps.MP4";
+ testTranscodeVideo(srcPath, destPath1, AMEDIA_MIMETYPE_VIDEO_AVC, 2 * 1000 * 1000);
+ mCallbacks = std::make_shared<TestCallbacks>();
+ testTranscodeVideo(srcPath, destPath2, AMEDIA_MIMETYPE_VIDEO_AVC, 8 * 1000 * 1000);
+
+ // The source asset is very short and heavily compressed from the beginning so don't expect the
+ // requested bitrate to be exactly matched. However the 8mbps should at least be larger.
+ // TODO(b/174678336): Find a better test asset to tighten the threshold.
+ EXPECT_GT(getFileSizeDiffPercent(destPath1, destPath2), 10);
+}
+
+static AMediaFormat* getAVCVideoFormat(AMediaFormat* sourceFormat) {
+ AMediaFormat* format = nullptr;
+ const char* mime = nullptr;
+ AMediaFormat_getString(sourceFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+ if (strncmp(mime, "video/", 6) == 0) {
+ format = AMediaFormat_new();
+ AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+ }
+
+ return format;
+}
+
+TEST_F(MediaTranscoderTests, TestCancelAfterProgress) {
+ const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+ const char* destPath = "/data/local/tmp/MediaTranscoder_Cancel.MP4";
+
+ for (int i = 0; i < 20; ++i) {
+ EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kCancelAfterProgress),
+ AMEDIA_OK);
+ EXPECT_FALSE(mCallbacks->mFinished);
+ mCallbacks = std::make_shared<TestCallbacks>();
+ }
+}
+
+TEST_F(MediaTranscoderTests, TestCancelAfterStart) {
+ const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+ const char* destPath = "/data/local/tmp/MediaTranscoder_Cancel.MP4";
+
+ for (int i = 0; i < 20; ++i) {
+ EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kCancelAfterStart),
+ AMEDIA_OK);
+ EXPECT_FALSE(mCallbacks->mFinished);
+ mCallbacks = std::make_shared<TestCallbacks>();
+ }
+}
+
+TEST_F(MediaTranscoderTests, TestPauseAfterProgress) {
+ const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+ const char* destPath = "/data/local/tmp/MediaTranscoder_Pause.MP4";
+
+ for (int i = 0; i < 20; ++i) {
+ EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kPauseAfterProgress),
+ AMEDIA_OK);
+ EXPECT_FALSE(mCallbacks->mFinished);
+ mCallbacks = std::make_shared<TestCallbacks>();
+ }
+}
+
+TEST_F(MediaTranscoderTests, TestPauseAfterStart) {
+ const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+ const char* destPath = "/data/local/tmp/MediaTranscoder_Pause.MP4";
+
+ for (int i = 0; i < 20; ++i) {
+ EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kPauseAfterStart),
+ AMEDIA_OK);
+ EXPECT_FALSE(mCallbacks->mFinished);
+ mCallbacks = std::make_shared<TestCallbacks>();
+ }
+}
+
+} // namespace android
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
index 7a92a37..5071efd 100644
--- a/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
@@ -41,7 +41,7 @@
void initSourceAndExtractor() {
const char* sourcePath =
- "/data/local/tmp/TranscoderTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+ "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
mExtractor = AMediaExtractor_new();
ASSERT_NE(mExtractor, nullptr);
@@ -159,27 +159,30 @@
MediaSampleReaderNDK::createFromFd(mSourceFd, 0, mSourceFileSize);
EXPECT_NE(mediaSampleReader, nullptr);
+ EXPECT_EQ(mediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
EXPECT_EQ(transcoder.configure(mediaSampleReader, mTrackIndex, nullptr /* destinationFormat */),
AMEDIA_OK);
ASSERT_TRUE(transcoder.start());
// Pull transcoder's output samples and compare against input checksums.
+ bool eos = false;
uint64_t sampleCount = 0;
- std::shared_ptr<MediaSample> sample;
- while (!transcoder.mOutputQueue.dequeue(&sample)) {
- ASSERT_NE(sample, nullptr);
+ transcoder.setSampleConsumer(
+ [&sampleCount, &sampleChecksums, &eos](const std::shared_ptr<MediaSample>& sample) {
+ ASSERT_NE(sample, nullptr);
+ EXPECT_FALSE(eos);
- if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
- break;
- }
+ if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+ eos = true;
+ } else {
+ SampleID sampleId{sample->buffer, static_cast<ssize_t>(sample->info.size)};
+ EXPECT_TRUE(sampleId == sampleChecksums[sampleCount]);
+ ++sampleCount;
+ }
+ });
- SampleID sampleId{sample->buffer, static_cast<ssize_t>(sample->info.size)};
- EXPECT_TRUE(sampleId == sampleChecksums[sampleCount]);
- ++sampleCount;
- }
-
+ callback->waitUntilFinished();
EXPECT_EQ(sampleCount, sampleChecksums.size());
- EXPECT_TRUE(transcoder.stop());
}
/** Class for testing PassthroughTrackTranscoder's built in buffer pool. */
diff --git a/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h b/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
index 6b9131c..a782f71 100644
--- a/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
+++ b/media/libmediatranscoding/transcoder/tests/TrackTranscoderTestUtils.h
@@ -15,6 +15,7 @@
*/
#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
#include <condition_variable>
#include <memory>
@@ -29,24 +30,19 @@
class TrackTranscoderTestUtils {
public:
static std::shared_ptr<AMediaFormat> getDefaultVideoDestinationFormat(
- AMediaFormat* sourceFormat) {
+ AMediaFormat* sourceFormat, bool includeBitrate = true) {
// Default video destination format setup.
static constexpr float kFrameRate = 30.0f;
- static constexpr float kIFrameInterval = 30.0f;
static constexpr int32_t kBitRate = 2 * 1000 * 1000;
- static constexpr int32_t kColorFormatSurface = 0x7f000789;
AMediaFormat* destinationFormat = AMediaFormat_new();
AMediaFormat_copy(destinationFormat, sourceFormat);
AMediaFormat_setFloat(destinationFormat, AMEDIAFORMAT_KEY_FRAME_RATE, kFrameRate);
- AMediaFormat_setFloat(destinationFormat, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
- kIFrameInterval);
- AMediaFormat_setInt32(destinationFormat, AMEDIAFORMAT_KEY_BIT_RATE, kBitRate);
- AMediaFormat_setInt32(destinationFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT,
- kColorFormatSurface);
+ if (includeBitrate) {
+ AMediaFormat_setInt32(destinationFormat, AMEDIAFORMAT_KEY_BIT_RATE, kBitRate);
+ }
- return std::shared_ptr<AMediaFormat>(destinationFormat,
- std::bind(AMediaFormat_delete, std::placeholders::_1));
+ return std::shared_ptr<AMediaFormat>(destinationFormat, &AMediaFormat_delete);
}
};
@@ -56,33 +52,82 @@
~TestCallback() = default;
// MediaTrackTranscoderCallback
- void onTrackFinished(MediaTrackTranscoder* transcoder __unused) {
+ void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder __unused) {
std::unique_lock<std::mutex> lock(mMutex);
- mTranscodingFinished = true;
- mCv.notify_all();
+ mTrackFormatAvailable = true;
+ mTrackFormatAvailableCondition.notify_all();
}
- void onTrackError(MediaTrackTranscoder* transcoder __unused, media_status_t status) {
+ void onTrackFinished(const MediaTrackTranscoder* transcoder __unused) {
+ std::unique_lock<std::mutex> lock(mMutex);
+ mTranscodingFinished = true;
+ mTranscodingFinishedCondition.notify_all();
+ }
+
+ virtual void onTrackStopped(const MediaTrackTranscoder* transcoder __unused) override {
+ std::unique_lock<std::mutex> lock(mMutex);
+ mTranscodingFinished = true;
+ mTranscodingStopped = true;
+ mTranscodingFinishedCondition.notify_all();
+ }
+
+ void onTrackError(const MediaTrackTranscoder* transcoder __unused, media_status_t status) {
std::unique_lock<std::mutex> lock(mMutex);
mTranscodingFinished = true;
mStatus = status;
- mCv.notify_all();
+ mTranscodingFinishedCondition.notify_all();
}
// ~MediaTrackTranscoderCallback
media_status_t waitUntilFinished() {
std::unique_lock<std::mutex> lock(mMutex);
while (!mTranscodingFinished) {
- mCv.wait(lock);
+ mTranscodingFinishedCondition.wait(lock);
}
return mStatus;
}
+ void waitUntilTrackFormatAvailable() {
+ std::unique_lock<std::mutex> lock(mMutex);
+ while (!mTrackFormatAvailable) {
+ mTrackFormatAvailableCondition.wait(lock);
+ }
+ }
+
+ bool transcodingWasStopped() const { return mTranscodingFinished && mTranscodingStopped; }
+ bool transcodingFinished() const {
+ return mTranscodingFinished && !mTranscodingStopped && mStatus == AMEDIA_OK;
+ }
+
private:
media_status_t mStatus = AMEDIA_OK;
std::mutex mMutex;
- std::condition_variable mCv;
+ std::condition_variable mTranscodingFinishedCondition;
+ std::condition_variable mTrackFormatAvailableCondition;
bool mTranscodingFinished = false;
+ bool mTranscodingStopped = false;
+ bool mTrackFormatAvailable = false;
+};
+
+class OneShotSemaphore {
+public:
+ void wait() {
+ std::unique_lock<std::mutex> lock(mMutex);
+ while (!mSignaled) {
+ mCondition.wait(lock);
+ }
+ }
+
+ void signal() {
+ std::unique_lock<std::mutex> lock(mMutex);
+ mSignaled = true;
+ mCondition.notify_all();
+ }
+
+private:
+ std::mutex mMutex;
+ std::condition_variable mCondition;
+ bool mSignaled = false;
};
}; // namespace android
diff --git a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
index 3cec1a1..4ede97f 100644
--- a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
@@ -23,6 +23,7 @@
#include <fcntl.h>
#include <gtest/gtest.h>
#include <media/MediaSampleReaderNDK.h>
+#include <media/NdkCommon.h>
#include <media/VideoTrackTranscoder.h>
#include <utils/Timers.h>
@@ -42,7 +43,7 @@
void SetUp() override {
LOG(DEBUG) << "VideoTrackTranscoderTests set up";
const char* sourcePath =
- "/data/local/tmp/TranscoderTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+ "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
const int sourceFd = open(sourcePath, O_RDONLY);
ASSERT_GT(sourceFd, 0);
@@ -66,8 +67,7 @@
if (strncmp(mime, "video/", 6) == 0) {
mTrackIndex = trackIndex;
- mSourceFormat = std::shared_ptr<AMediaFormat>(
- trackFormat, std::bind(AMediaFormat_delete, std::placeholders::_1));
+ mSourceFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
ASSERT_NE(mSourceFormat, nullptr);
mDestinationFormat =
@@ -92,53 +92,80 @@
std::shared_ptr<AMediaFormat> mDestinationFormat;
};
-TEST_F(VideoTrackTranscoderTests, SampleSanity) {
- LOG(DEBUG) << "Testing SampleSanity";
+TEST_F(VideoTrackTranscoderTests, SampleSoundness) {
+ LOG(DEBUG) << "Testing SampleSoundness";
std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
- VideoTrackTranscoder transcoder{callback};
+ auto transcoder = VideoTrackTranscoder::create(callback);
- EXPECT_EQ(transcoder.configure(mMediaSampleReader, mTrackIndex, mDestinationFormat), AMEDIA_OK);
- ASSERT_TRUE(transcoder.start());
+ EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+ EXPECT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+ AMEDIA_OK);
+ ASSERT_TRUE(transcoder->start());
- std::thread sampleConsumerThread{[&transcoder] {
- uint64_t sampleCount = 0;
- std::shared_ptr<MediaSample> sample;
- while (!transcoder.mOutputQueue.dequeue(&sample)) {
- ASSERT_NE(sample, nullptr);
- const uint32_t flags = sample->info.flags;
+ bool eos = false;
+ uint64_t sampleCount = 0;
+ transcoder->setSampleConsumer([&sampleCount, &eos](const std::shared_ptr<MediaSample>& sample) {
+ ASSERT_NE(sample, nullptr);
+ const uint32_t flags = sample->info.flags;
- if (sampleCount == 0) {
- // Expect first sample to be a codec config.
- EXPECT_TRUE((flags & SAMPLE_FLAG_CODEC_CONFIG) != 0);
- EXPECT_TRUE((flags & SAMPLE_FLAG_SYNC_SAMPLE) == 0);
- EXPECT_TRUE((flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
- EXPECT_TRUE((flags & SAMPLE_FLAG_PARTIAL_FRAME) == 0);
- } else if (sampleCount == 1) {
- // Expect second sample to be a sync sample.
- EXPECT_TRUE((flags & SAMPLE_FLAG_CODEC_CONFIG) == 0);
- EXPECT_TRUE((flags & SAMPLE_FLAG_SYNC_SAMPLE) != 0);
- EXPECT_TRUE((flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
- }
-
- if (!(flags & SAMPLE_FLAG_END_OF_STREAM)) {
- // Expect a valid buffer unless it is EOS.
- EXPECT_NE(sample->buffer, nullptr);
- EXPECT_NE(sample->bufferId, 0xBAADF00D);
- EXPECT_GT(sample->info.size, 0);
- }
-
- ++sampleCount;
- if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
- break;
- }
- sample.reset();
+ if (sampleCount == 0) {
+ // Expect first sample to be a codec config.
+ EXPECT_TRUE((flags & SAMPLE_FLAG_CODEC_CONFIG) != 0);
+ EXPECT_TRUE((flags & SAMPLE_FLAG_SYNC_SAMPLE) == 0);
+ EXPECT_TRUE((flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+ EXPECT_TRUE((flags & SAMPLE_FLAG_PARTIAL_FRAME) == 0);
+ } else if (sampleCount == 1) {
+ // Expect second sample to be a sync sample.
+ EXPECT_TRUE((flags & SAMPLE_FLAG_CODEC_CONFIG) == 0);
+ EXPECT_TRUE((flags & SAMPLE_FLAG_SYNC_SAMPLE) != 0);
+ EXPECT_TRUE((flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
}
- }};
+
+ if (!(flags & SAMPLE_FLAG_END_OF_STREAM)) {
+ // Expect a valid buffer unless it is EOS.
+ EXPECT_NE(sample->buffer, nullptr);
+ EXPECT_NE(sample->bufferId, 0xBAADF00D);
+ EXPECT_GT(sample->info.size, 0);
+ } else {
+ EXPECT_FALSE(eos);
+ eos = true;
+ }
+
+ ++sampleCount;
+ });
EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
- EXPECT_TRUE(transcoder.stop());
+}
- sampleConsumerThread.join();
+TEST_F(VideoTrackTranscoderTests, PreserveBitrate) {
+ LOG(DEBUG) << "Testing PreserveBitrate";
+ std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
+ std::shared_ptr<MediaTrackTranscoder> transcoder = VideoTrackTranscoder::create(callback);
+
+ auto destFormat = TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(
+ mSourceFormat.get(), false /* includeBitrate*/);
+ EXPECT_NE(destFormat, nullptr);
+
+ EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+
+ int32_t srcBitrate;
+ EXPECT_EQ(mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &srcBitrate), AMEDIA_OK);
+
+ ASSERT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, destFormat), AMEDIA_OK);
+ ASSERT_TRUE(transcoder->start());
+
+ callback->waitUntilTrackFormatAvailable();
+
+ auto outputFormat = transcoder->getOutputFormat();
+ ASSERT_NE(outputFormat, nullptr);
+
+ transcoder->stop();
+ EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
+
+ int32_t outBitrate;
+ EXPECT_TRUE(AMediaFormat_getInt32(outputFormat.get(), AMEDIAFORMAT_KEY_BIT_RATE, &outBitrate));
+
+ EXPECT_EQ(srcBitrate, outBitrate);
}
// VideoTrackTranscoder needs a valid destination format.
@@ -147,11 +174,49 @@
std::shared_ptr<TestCallback> callback = std::make_shared<TestCallback>();
std::shared_ptr<AMediaFormat> nullFormat;
- VideoTrackTranscoder transcoder{callback};
- EXPECT_EQ(transcoder.configure(mMediaSampleReader, 0 /* trackIndex */, nullFormat),
+ auto transcoder = VideoTrackTranscoder::create(callback);
+ EXPECT_EQ(transcoder->configure(mMediaSampleReader, 0 /* trackIndex */, nullFormat),
AMEDIA_ERROR_INVALID_PARAMETER);
}
+TEST_F(VideoTrackTranscoderTests, LingeringEncoder) {
+ OneShotSemaphore semaphore;
+ auto callback = std::make_shared<TestCallback>();
+ auto transcoder = VideoTrackTranscoder::create(callback);
+
+ EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+ EXPECT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+ AMEDIA_OK);
+ ASSERT_TRUE(transcoder->start());
+
+ std::vector<std::shared_ptr<MediaSample>> samples;
+ transcoder->setSampleConsumer(
+ [&samples, &semaphore](const std::shared_ptr<MediaSample>& sample) {
+ if (samples.size() >= 4) return;
+
+ ASSERT_NE(sample, nullptr);
+ samples.push_back(sample);
+
+ if (samples.size() == 4 || sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+ semaphore.signal();
+ }
+ });
+
+ // Wait for the encoder to output samples before stopping and releasing the transcoder.
+ semaphore.wait();
+
+ transcoder->stop();
+ EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
+ transcoder.reset();
+
+ // Return buffers to the codec so that it can resume processing, but keep one buffer to avoid
+ // the codec being released.
+ samples.resize(1);
+
+ // Wait for async codec events.
+ std::this_thread::sleep_for(std::chrono::seconds(1));
+}
+
} // namespace android
int main(int argc, char** argv) {
diff --git a/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
index 61a2252..792c541 100755
--- a/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
+++ b/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
@@ -18,21 +18,29 @@
adb root && adb wait-for-device remount && adb sync
fi
-adb push assets /data/local/tmp/TranscoderTestAssets
+
+# Push the files onto the device.
+. $ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/push_assets.sh
echo "========================================"
echo "testing MediaSampleReaderNDK"
-adb shell /data/nativetest64/MediaSampleReaderNDKTests/MediaSampleReaderNDKTests
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaSampleReaderNDKTests/MediaSampleReaderNDKTests
echo "testing MediaSampleQueue"
-adb shell /data/nativetest64/MediaSampleQueueTests/MediaSampleQueueTests
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaSampleQueueTests/MediaSampleQueueTests
echo "testing MediaTrackTranscoder"
-adb shell /data/nativetest64/MediaTrackTranscoderTests/MediaTrackTranscoderTests
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaTrackTranscoderTests/MediaTrackTranscoderTests
echo "testing VideoTrackTranscoder"
-adb shell /data/nativetest64/VideoTrackTranscoderTests/VideoTrackTranscoderTests
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/VideoTrackTranscoderTests/VideoTrackTranscoderTests
echo "testing PassthroughTrackTranscoder"
-adb shell /data/nativetest64/PassthroughTrackTranscoderTests/PassthroughTrackTranscoderTests
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/PassthroughTrackTranscoderTests/PassthroughTrackTranscoderTests
+
+echo "testing MediaSampleWriter"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaSampleWriterTests/MediaSampleWriterTests
+
+echo "testing MediaTranscoder"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaTranscoderTests/MediaTranscoderTests
diff --git a/media/libnbaio/include/media/nbaio/Pipe.h b/media/libnbaio/include/media/nbaio/Pipe.h
index 0431976..54dc08f 100644
--- a/media/libnbaio/include/media/nbaio/Pipe.h
+++ b/media/libnbaio/include/media/nbaio/Pipe.h
@@ -23,7 +23,7 @@
namespace android {
// Pipe is multi-thread safe for readers (see PipeReader), but safe for only a single writer thread.
-// It cannot UNDERRUN on write, unless we allow designation of a master reader that provides the
+// It cannot UNDERRUN on write, unless we allow designation of a primary reader that provides the
// time-base. Readers can be added and removed dynamically, and it's OK to have no readers.
class Pipe : public NBAIO_Sink {
diff --git a/media/libshmem/Android.bp b/media/libshmem/Android.bp
new file mode 100644
index 0000000..62784ed
--- /dev/null
+++ b/media/libshmem/Android.bp
@@ -0,0 +1,70 @@
+aidl_interface {
+ name: "shared-file-region-aidl",
+ unstable: true,
+ host_supported: true,
+ vendor_available: true,
+ double_loadable: true,
+ local_include_dir: "aidl",
+ srcs: [
+ "aidl/android/media/SharedFileRegion.aidl",
+ ],
+}
+
+cc_library {
+ name: "libshmemcompat",
+ export_include_dirs: ["include"],
+ srcs: ["ShmemCompat.cpp"],
+ host_supported: true,
+ vendor_available: true,
+ double_loadable: true,
+ shared_libs: [
+ "libbinder",
+ "libshmemutil",
+ "libutils",
+ "shared-file-region-aidl-unstable-cpp",
+ ],
+ export_shared_lib_headers: [
+ "libbinder",
+ "libutils",
+ "shared-file-region-aidl-unstable-cpp",
+ ],
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
+cc_library {
+ name: "libshmemutil",
+ export_include_dirs: ["include"],
+ srcs: ["ShmemUtil.cpp"],
+ host_supported: true,
+ vendor_available: true,
+ double_loadable: true,
+ shared_libs: [
+ "shared-file-region-aidl-unstable-cpp",
+ ],
+ export_shared_lib_headers: [
+ "shared-file-region-aidl-unstable-cpp",
+ ],
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
+cc_test {
+ name: "shmemTest",
+ srcs: ["ShmemTest.cpp"],
+ shared_libs: [
+ "libbinder",
+ "libcutils",
+ "libshmemcompat",
+ "libshmemutil",
+ "libutils",
+ "shared-file-region-aidl-unstable-cpp",
+ ],
+ test_suites: ["device-tests"],
+}
diff --git a/media/libshmem/OWNERS b/media/libshmem/OWNERS
new file mode 100644
index 0000000..29fa2f5
--- /dev/null
+++ b/media/libshmem/OWNERS
@@ -0,0 +1,3 @@
+ytai@google.com
+mnaganov@google.com
+elaurent@google.com
diff --git a/media/libshmem/README.md b/media/libshmem/README.md
new file mode 100644
index 0000000..c25fa7f
--- /dev/null
+++ b/media/libshmem/README.md
@@ -0,0 +1,6 @@
+# libshmem
+
+This library provides facilities for sharing memory across processes over (stable) AIDL. The main
+feature is the definition of the `android.media.SharedMemory` AIDL type, which represents a block of
+memory that can be shared between processes. In addition, a few utilities are provided to facilitate
+the use of shared memory and to integrate with legacy code that uses older facilities.
\ No newline at end of file
diff --git a/media/libshmem/ShmemCompat.cpp b/media/libshmem/ShmemCompat.cpp
new file mode 100644
index 0000000..246cb24
--- /dev/null
+++ b/media/libshmem/ShmemCompat.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "media/ShmemCompat.h"
+
+#include "binder/MemoryBase.h"
+#include "binder/MemoryHeapBase.h"
+#include "media/ShmemUtil.h"
+
+namespace android {
+namespace media {
+
+bool convertSharedFileRegionToIMemory(const SharedFileRegion& shmem,
+ sp<IMemory>* result) {
+ assert(result != nullptr);
+
+ if (!validateSharedFileRegion(shmem)) {
+ return false;
+ }
+
+ // Heap offset and size must be page aligned.
+ const size_t pageSize = getpagesize();
+ const size_t pageMask = ~(pageSize - 1);
+
+ // OK if this wraps.
+ const uint64_t endOffset = static_cast<uint64_t>(shmem.offset) +
+ static_cast<uint64_t>(shmem.size);
+
+ // Round down to page boundary.
+ const uint64_t heapStartOffset = shmem.offset & pageMask;
+ // Round up to page boundary.
+ const uint64_t heapEndOffset = (endOffset + pageSize - 1) & pageMask;
+ const uint64_t heapSize = heapEndOffset - heapStartOffset;
+
+ if (heapStartOffset > std::numeric_limits<size_t>::max() ||
+ heapSize > std::numeric_limits<size_t>::max()) {
+ return false;
+ }
+
+ uint32_t flags = !shmem.writeable ? IMemoryHeap::READ_ONLY : 0;
+
+ const sp<MemoryHeapBase> heap =
+ new MemoryHeapBase(shmem.fd.get(), heapSize, flags, heapStartOffset);
+ *result = sp<MemoryBase>::make(heap,
+ shmem.offset - heapStartOffset,
+ shmem.size);
+ return true;
+}
+
+bool convertIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+ SharedFileRegion* result) {
+ assert(mem != nullptr);
+ assert(result != nullptr);
+
+ *result = SharedFileRegion();
+
+ ssize_t offset;
+ size_t size;
+
+ sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
+ if (size > 0) {
+ if (heap == nullptr) {
+ return false;
+ }
+ // Make sure the offset and size do not overflow from int64 boundaries.
+ if (size > std::numeric_limits<int64_t>::max() ||
+ offset > std::numeric_limits<int64_t>::max() ||
+ heap->getOffset() > std::numeric_limits<int64_t>::max() ||
+ static_cast<uint64_t>(heap->getOffset()) +
+ static_cast<uint64_t>(offset)
+ > std::numeric_limits<int64_t>::max()) {
+ return false;
+ }
+
+ const int fd = fcntl(heap->getHeapID(), F_DUPFD_CLOEXEC, 0);
+ if (fd < 0) {
+ return false;
+ }
+ result->fd.reset(base::unique_fd(fd));
+ result->size = size;
+ result->offset = heap->getOffset() + offset;
+ result->writeable = (heap->getFlags() & IMemoryHeap::READ_ONLY) == 0;
+ }
+ return true;
+}
+
+bool convertNullableSharedFileRegionToIMemory(const std::optional<SharedFileRegion>& shmem,
+ sp<IMemory>* result) {
+ assert(result != nullptr);
+
+ if (!shmem.has_value()) {
+ result->clear();
+ return true;
+ }
+
+ return convertSharedFileRegionToIMemory(shmem.value(), result);
+}
+
+bool convertNullableIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+ std::optional<SharedFileRegion>* result) {
+ assert(result != nullptr);
+
+ if (mem == nullptr) {
+ result->reset();
+ return true;
+ }
+
+ result->emplace();
+ return convertIMemoryToSharedFileRegion(mem, &result->value());
+}
+
+} // namespace media
+} // namespace android
diff --git a/media/libshmem/ShmemTest.cpp b/media/libshmem/ShmemTest.cpp
new file mode 100644
index 0000000..874f34c
--- /dev/null
+++ b/media/libshmem/ShmemTest.cpp
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <gtest/gtest.h>
+
+#include "binder/MemoryBase.h"
+#include "binder/MemoryHeapBase.h"
+#include "cutils/ashmem.h"
+#include "media/ShmemCompat.h"
+#include "media/ShmemUtil.h"
+
+namespace android {
+namespace media {
+namespace {
+
+// Creates a SharedFileRegion instance.
+SharedFileRegion makeSharedFileRegion(int64_t offset, int64_t size) {
+ SharedFileRegion shmem;
+ shmem.offset = offset;
+ shmem.size = size;
+ int fd = ashmem_create_region("", size + offset);
+ assert(fd >= 0);
+ shmem.fd = os::ParcelFileDescriptor(base::unique_fd(fd));
+ return shmem;
+}
+
+// Creates a SharedFileRegion instance with an invalid FD.
+SharedFileRegion makeInvalidSharedFileRegion(int64_t offset, int64_t size) {
+ SharedFileRegion shmem;
+ shmem.offset = offset;
+ shmem.size = size;
+ return shmem;
+}
+
+sp<IMemory> makeIMemory(const std::vector<uint8_t>& content, bool writeable = true) {
+ constexpr size_t kOffset = 19;
+
+ sp<MemoryHeapBase> heap = new MemoryHeapBase(content.size(),
+ !writeable ? IMemoryHeap::READ_ONLY : 0);
+ sp<IMemory> result = sp<MemoryBase>::make(heap, kOffset, content.size());
+ memcpy(result->unsecurePointer(), content.data(), content.size());
+ return result;
+}
+
+TEST(ShmemTest, Validate) {
+ EXPECT_TRUE(validateSharedFileRegion(makeSharedFileRegion(0, 0)));
+ EXPECT_TRUE(validateSharedFileRegion(makeSharedFileRegion(1, 2)));
+ EXPECT_FALSE(validateSharedFileRegion(makeSharedFileRegion(-1, 2)));
+ EXPECT_FALSE(validateSharedFileRegion(makeSharedFileRegion(2, -1)));
+ EXPECT_FALSE(validateSharedFileRegion(makeInvalidSharedFileRegion(1, 2)));
+}
+
+TEST(ShmemTest, Conversion) {
+ sp<IMemory> reconstructed;
+ {
+ SharedFileRegion shmem;
+ sp<IMemory> imem = makeIMemory({6, 5, 3});
+ ASSERT_TRUE(convertIMemoryToSharedFileRegion(imem, &shmem));
+ ASSERT_EQ(3, shmem.size);
+ ASSERT_GE(shmem.fd.get(), 0);
+ ASSERT_TRUE(shmem.writeable);
+ ASSERT_TRUE(convertSharedFileRegionToIMemory(shmem, &reconstructed));
+ }
+ ASSERT_EQ(3, reconstructed->size());
+ ASSERT_EQ(reconstructed->getMemory()->getFlags() & IMemoryHeap::READ_ONLY, 0);
+ const uint8_t* p =
+ reinterpret_cast<const uint8_t*>(reconstructed->unsecurePointer());
+ EXPECT_EQ(6, p[0]);
+ EXPECT_EQ(5, p[1]);
+ EXPECT_EQ(3, p[2]);
+}
+
+TEST(ShmemTest, ConversionReadOnly) {
+ sp<IMemory> reconstructed;
+ {
+ SharedFileRegion shmem;
+ sp<IMemory> imem = makeIMemory({6, 5, 3}, false);
+ ASSERT_TRUE(convertIMemoryToSharedFileRegion(imem, &shmem));
+ ASSERT_EQ(3, shmem.size);
+ ASSERT_GE(shmem.fd.get(), 0);
+ ASSERT_FALSE(shmem.writeable);
+ ASSERT_TRUE(convertSharedFileRegionToIMemory(shmem, &reconstructed));
+ }
+ ASSERT_EQ(3, reconstructed->size());
+ ASSERT_NE(reconstructed->getMemory()->getFlags() & IMemoryHeap::READ_ONLY, 0);
+ const uint8_t* p =
+ reinterpret_cast<const uint8_t*>(reconstructed->unsecurePointer());
+ EXPECT_EQ(6, p[0]);
+ EXPECT_EQ(5, p[1]);
+ EXPECT_EQ(3, p[2]);
+}
+
+TEST(ShmemTest, NullConversion) {
+ sp<IMemory> reconstructed;
+ {
+ std::optional<SharedFileRegion> shmem;
+ sp<IMemory> imem;
+ ASSERT_TRUE(convertNullableIMemoryToSharedFileRegion(imem, &shmem));
+ ASSERT_FALSE(shmem.has_value());
+ ASSERT_TRUE(convertNullableSharedFileRegionToIMemory(shmem, &reconstructed));
+ }
+ ASSERT_EQ(nullptr, reconstructed);
+}
+
+} // namespace
+} // namespace media
+} // namespace android
diff --git a/media/libshmem/ShmemUtil.cpp b/media/libshmem/ShmemUtil.cpp
new file mode 100644
index 0000000..e075346
--- /dev/null
+++ b/media/libshmem/ShmemUtil.cpp
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "media/ShmemUtil.h"
+
+namespace android {
+namespace media {
+
+bool validateSharedFileRegion(const SharedFileRegion& shmem) {
+ // FD must be valid.
+ if (shmem.fd.get() < 0) {
+ return false;
+ }
+
+ // Size and offset must be non-negative.
+ if (shmem.size < 0 || shmem.offset < 0) {
+ return false;
+ }
+
+ uint64_t size = shmem.size;
+ uint64_t offset = shmem.offset;
+
+ // Must not wrap.
+ if (offset > offset + size) {
+ return false;
+ }
+
+ return true;
+}
+
+} // namespace media
+} // namespace android
diff --git a/media/libshmem/aidl/android/media/SharedFileRegion.aidl b/media/libshmem/aidl/android/media/SharedFileRegion.aidl
new file mode 100644
index 0000000..199b647
--- /dev/null
+++ b/media/libshmem/aidl/android/media/SharedFileRegion.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * A shared file region.
+ *
+ * This type contains the required information to share a region of a file between processes over
+ * AIDL.
+ * An instance of this type represents a valid FD. For representing a null SharedFileRegion, use a
+ * @nullable SharedFileRegion.
+ * Primarily, this is intended for shared memory blocks.
+ *
+ * @hide
+ */
+parcelable SharedFileRegion {
+ /** File descriptor of the region. Must be valid. */
+ ParcelFileDescriptor fd;
+ /** Offset, in bytes within the file of the start of the region. Must be non-negative. */
+ long offset;
+ /** Size, in bytes of the memory region. Must be non-negative. */
+ long size;
+ /** Whether the region is writeable. */
+ boolean writeable;
+}
diff --git a/media/libshmem/include/media/ShmemCompat.h b/media/libshmem/include/media/ShmemCompat.h
new file mode 100644
index 0000000..ba59f25
--- /dev/null
+++ b/media/libshmem/include/media/ShmemCompat.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+// This module contains utilities for interfacing between legacy code that is using IMemory and new
+// code that is using android.os.SharedFileRegion.
+
+#include <optional>
+
+#include "android/media/SharedFileRegion.h"
+#include "binder/IMemory.h"
+#include "utils/StrongPointer.h"
+
+namespace android {
+namespace media {
+
+/**
+ * Converts a SharedFileRegion parcelable to an IMemory instance.
+ * @param shmem The SharedFileRegion instance.
+ * @param result The resulting IMemory instance. May not be null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ * failure usually means corrupt data).
+ */
+bool convertSharedFileRegionToIMemory(const SharedFileRegion& shmem,
+ sp<IMemory>* result);
+
+/**
+ * Converts a nullable SharedFileRegion parcelable to an IMemory instance.
+ * @param shmem The SharedFileRegion instance.
+ * @param result The resulting IMemory instance. May not be null. Pointee assigned to null,
+ * if the input is null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ * failure usually means corrupt data).
+ */
+bool convertNullableSharedFileRegionToIMemory(const std::optional<SharedFileRegion>& shmem,
+ sp<IMemory>* result);
+
+/**
+ * Converts an IMemory instance to SharedFileRegion.
+ * @param mem The IMemory instance. May not be null.
+ * @param result The resulting SharedFileRegion instance.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ * failure usually means corrupt data).
+ */
+bool convertIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+ SharedFileRegion* result);
+
+/**
+ * Converts a nullable IMemory instance to a nullable SharedFileRegion.
+ * @param mem The IMemory instance. May be null.
+ * @param result The resulting SharedFileRegion instance. May not be null. Assigned to empty,
+ * if the input is null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ * failure usually means corrupt data).
+ */
+bool convertNullableIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+ std::optional<SharedFileRegion>* result);
+
+} // namespace media
+} // namespace android
diff --git a/media/libshmem/include/media/ShmemUtil.h b/media/libshmem/include/media/ShmemUtil.h
new file mode 100644
index 0000000..3a7a5a5
--- /dev/null
+++ b/media/libshmem/include/media/ShmemUtil.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+// This module contains utilities for working with android.os.SharedFileRegion.
+
+#include "android/media/SharedFileRegion.h"
+
+namespace android {
+namespace media {
+
+/**
+ * Checks whether a SharedFileRegion instance is valid (all the fields have sane values).
+ */
+bool validateSharedFileRegion(const SharedFileRegion& shmem);
+
+} // namespace media
+} // namespace android
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 6941198..8f1da0d 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -279,6 +279,13 @@
void postFillThisBuffer(BufferInfo *info);
+ void maybePostExtraOutputMetadataBufferRequest() {
+ if (!mPendingExtraOutputMetadataBufferRequest) {
+ (new AMessage(kWhatSubmitExtraOutputMetadataBuffer, mCodec))->post();
+ mPendingExtraOutputMetadataBufferRequest = true;
+ }
+ }
+
private:
// Handles an OMX message. Returns true iff message was handled.
bool onOMXMessage(const sp<AMessage> &msg);
@@ -302,6 +309,8 @@
void getMoreInputDataIfPossible();
+ bool mPendingExtraOutputMetadataBufferRequest;
+
DISALLOW_EVIL_CONSTRUCTORS(BaseState);
};
@@ -556,6 +565,7 @@
mExplicitShutdown(false),
mIsLegacyVP9Decoder(false),
mIsStreamCorruptFree(false),
+ mIsLowLatency(false),
mEncoderDelay(0),
mEncoderPadding(0),
mRotationDegrees(0),
@@ -888,7 +898,7 @@
sp<DataConverter> converter = mConverter[portIndex];
if (converter != NULL) {
- // here we assume sane conversions of max 4:1, so result fits in int32
+ // here we assume conversions of max 4:1, so result fits in int32
if (portIndex == kPortIndexInput) {
conversionBufferSize = converter->sourceSize(bufSize);
} else {
@@ -2238,6 +2248,12 @@
}
err = setupG711Codec(encoder, sampleRate, numChannels);
}
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_OPUS)) {
+ int32_t numChannels = 1, sampleRate = 48000;
+ if (msg->findInt32("channel-count", &numChannels) &&
+ msg->findInt32("sample-rate", &sampleRate)) {
+ err = setupOpusCodec(encoder, sampleRate, numChannels);
+ }
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
// numChannels needs to be set to properly communicate PCM values.
int32_t numChannels = 2, sampleRate = 44100, compressionLevel = -1;
@@ -2416,6 +2432,7 @@
if (err != OK) {
ALOGE("decoder can not set low-latency to %d (err %d)", lowLatency, err);
}
+ mIsLowLatency = (lowLatency && err == OK);
return err;
}
@@ -2609,15 +2626,15 @@
unsigned int numLayers = 0;
unsigned int numBLayers = 0;
int tags;
- char dummy;
+ char tmp;
OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern =
OMX_VIDEO_AndroidTemporalLayeringPatternNone;
- if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1
+ if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &tmp) == 1
&& numLayers > 0) {
pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
} else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
- &numLayers, &dummy, &numBLayers, &dummy))
- && (tags == 1 || (tags == 3 && dummy == '+'))
+ &numLayers, &tmp, &numBLayers, &tmp))
+ && (tags == 1 || (tags == 3 && tmp == '+'))
&& numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
numLayers += numBLayers;
pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid;
@@ -3117,6 +3134,26 @@
kPortIndexInput, sampleRate, numChannels);
}
+status_t ACodec::setupOpusCodec(bool encoder, int32_t sampleRate, int32_t numChannels) {
+ if (encoder) {
+ return INVALID_OPERATION;
+ }
+ OMX_AUDIO_PARAM_ANDROID_OPUSTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+ status_t err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, &def, sizeof(def));
+ if (err != OK) {
+ ALOGE("setupOpusCodec(): Error %d getting OMX_IndexParamAudioAndroidOpus parameter", err);
+ return err;
+ }
+ def.nSampleRate = sampleRate;
+ def.nChannels = numChannels;
+ err = mOMXNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, &def, sizeof(def));
+ return err;
+}
+
status_t ACodec::setupFlacCodec(
bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel,
AudioEncoding encoding) {
@@ -4787,15 +4824,15 @@
unsigned int numLayers = 0;
unsigned int numBLayers = 0;
int tags;
- char dummy;
- if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1
+ char tmp;
+ if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &tmp) == 1
&& numLayers > 0) {
pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
tsType = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
tsLayers = numLayers;
} else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
- &numLayers, &dummy, &numBLayers, &dummy))
- && (tags == 1 || (tags == 3 && dummy == '+'))
+ &numLayers, &tmp, &numBLayers, &tmp))
+ && (tags == 1 || (tags == 3 && tmp == '+'))
&& numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
// VPX does not have a concept of B-frames, so just count all layers
@@ -5313,6 +5350,34 @@
if (mChannelMaskPresent) {
notify->setInt32("channel-mask", mChannelMask);
}
+
+ if (!mIsEncoder && portIndex == kPortIndexOutput) {
+ AString mime;
+ if (mConfigFormat->findString("mime", &mime)
+ && !strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime.c_str())) {
+
+ OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE presentation;
+ InitOMXParams(&presentation);
+ err = mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacDrcPresentation,
+ &presentation, sizeof(presentation));
+ if (err != OK) {
+ return err;
+ }
+ notify->setInt32("aac-encoded-target-level",
+ presentation.nEncodedTargetLevel);
+ notify->setInt32("aac-drc-cut-level", presentation.nDrcCut);
+ notify->setInt32("aac-drc-boost-level", presentation.nDrcBoost);
+ notify->setInt32("aac-drc-heavy-compression",
+ presentation.nHeavyCompression);
+ notify->setInt32("aac-target-ref-level",
+ presentation.nTargetReferenceLevel);
+ notify->setInt32("aac-drc-effect-type", presentation.nDrcEffectType);
+ notify->setInt32("aac-drc-album-mode", presentation.nDrcAlbumMode);
+ notify->setInt32("aac-drc-output-loudness",
+ presentation.nDrcOutputLoudness);
+ }
+ }
break;
}
@@ -5752,7 +5817,8 @@
ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState)
: AState(parentState),
- mCodec(codec) {
+ mCodec(codec),
+ mPendingExtraOutputMetadataBufferRequest(false) {
}
ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(
@@ -5796,17 +5862,19 @@
case ACodec::kWhatSetSurface:
{
- sp<AReplyToken> replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
sp<RefBase> obj;
CHECK(msg->findObject("surface", &obj));
status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
- sp<AMessage> response = new AMessage;
- response->setInt32("err", err);
- response->postReply(replyID);
+ sp<AReplyToken> replyID;
+ if (msg->senderAwaitsResponse(&replyID)) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ } else if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, err);
+ }
break;
}
@@ -5853,6 +5921,21 @@
break;
}
+ case kWhatSubmitExtraOutputMetadataBuffer: {
+ mPendingExtraOutputMetadataBufferRequest = false;
+ if (getPortMode(kPortIndexOutput) == RESUBMIT_BUFFERS && mCodec->mIsLowLatency) {
+ // Decoders often need more than one output buffer to be
+ // submitted before processing a single input buffer.
+ // For low latency codecs, we don't want to wait for more input
+ // to be queued to get those output buffers submitted.
+ if (mCodec->submitOutputMetadataBuffer() == OK
+ && mCodec->mMetadataBuffersToSubmit > 0) {
+ maybePostExtraOutputMetadataBufferRequest();
+ }
+ }
+ break;
+ }
+
default:
return false;
}
@@ -6222,7 +6305,12 @@
(outputMode == FREE_BUFFERS ? "FREE" :
outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT"));
if (outputMode == RESUBMIT_BUFFERS) {
- mCodec->submitOutputMetadataBuffer();
+ status_t err = mCodec->submitOutputMetadataBuffer();
+ if (mCodec->mIsLowLatency
+ && err == OK
+ && mCodec->mMetadataBuffersToSubmit > 0) {
+ maybePostExtraOutputMetadataBufferRequest();
+ }
}
}
info->checkReadFence("onInputBufferFilled");
@@ -6990,10 +7078,9 @@
return err;
}
- using hardware::media::omx::V1_0::utils::TWOmxNode;
err = statusFromBinderStatus(
mCodec->mGraphicBufferSource->configure(
- new TWOmxNode(mCodec->mOMXNode),
+ mCodec->mOMXNode->getHalInterface<IOmxNode>(),
static_cast<hardware::graphics::common::V1_0::Dataspace>(dataSpace)));
if (err != OK) {
ALOGE("[%s] Unable to configure for node (err %d)",
@@ -7368,6 +7455,9 @@
break;
}
}
+ if (mCodec->mIsLowLatency) {
+ maybePostExtraOutputMetadataBufferRequest();
+ }
// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
@@ -7674,8 +7764,8 @@
mInputFormat->setInt64("android._stop-time-offset-us", stopTimeOffsetUs);
}
- int32_t dummy;
- if (params->findInt32("request-sync", &dummy)) {
+ int32_t tmp;
+ if (params->findInt32("request-sync", &tmp)) {
status_t err = requestIDRFrame();
if (err != OK) {
@@ -7747,6 +7837,58 @@
// Ignore errors as failure is expected for codecs that aren't video encoders.
(void)configureTemporalLayers(params, false /* inConfigure */, mOutputFormat);
+ AString mime;
+ if (!mIsEncoder
+ && (mConfigFormat->findString("mime", &mime))
+ && !strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime.c_str())) {
+ OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE presentation;
+ InitOMXParams(&presentation);
+ mOMXNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacDrcPresentation,
+ &presentation, sizeof(presentation));
+ int32_t value32 = 0;
+ bool updated = false;
+ if (params->findInt32("aac-pcm-limiter-enable", &value32)) {
+ presentation.nPCMLimiterEnable = value32;
+ updated = true;
+ }
+ if (params->findInt32("aac-encoded-target-level", &value32)) {
+ presentation.nEncodedTargetLevel = value32;
+ updated = true;
+ }
+ if (params->findInt32("aac-drc-cut-level", &value32)) {
+ presentation.nDrcCut = value32;
+ updated = true;
+ }
+ if (params->findInt32("aac-drc-boost-level", &value32)) {
+ presentation.nDrcBoost = value32;
+ updated = true;
+ }
+ if (params->findInt32("aac-drc-heavy-compression", &value32)) {
+ presentation.nHeavyCompression = value32;
+ updated = true;
+ }
+ if (params->findInt32("aac-target-ref-level", &value32)) {
+ presentation.nTargetReferenceLevel = value32;
+ updated = true;
+ }
+ if (params->findInt32("aac-drc-effect-type", &value32)) {
+ presentation.nDrcEffectType = value32;
+ updated = true;
+ }
+ if (params->findInt32("aac-drc-album-mode", &value32)) {
+ presentation.nDrcAlbumMode = value32;
+ updated = true;
+ }
+ if (!params->findInt32("aac-drc-output-loudness", &value32)) {
+ presentation.nDrcOutputLoudness = value32;
+ updated = true;
+ }
+ if (updated) {
+ mOMXNode->setParameter((OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacDrcPresentation,
+ &presentation, sizeof(presentation));
+ }
+ }
return setVendorParameters(params);
}
@@ -8250,17 +8392,38 @@
FALLTHROUGH_INTENDED;
}
case kWhatResume:
- case kWhatSetParameters:
{
- if (msg->what() == kWhatResume) {
- ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str());
- }
+ ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str());
mCodec->deferMessage(msg);
handled = true;
break;
}
+ case kWhatSetParameters:
+ {
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", ¶ms));
+
+ sp<ABuffer> hdr10PlusInfo;
+ if (params->findBuffer("hdr10-plus-info", &hdr10PlusInfo)) {
+ if (hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+ (void)mCodec->setHdr10PlusInfo(hdr10PlusInfo);
+ }
+ params->removeEntryAt(params->findEntryByName("hdr10-plus-info"));
+
+ if (params->countEntries() == 0) {
+ msg->removeEntryAt(msg->findEntryByName("params"));
+ }
+ }
+
+ if (msg->countEntries() > 0) {
+ mCodec->deferMessage(msg);
+ }
+ handled = true;
+ break;
+ }
+
case kWhatForceStateTransition:
{
int32_t generation = 0;
@@ -8271,6 +8434,23 @@
break;
}
+ case kWhatSetSurface:
+ {
+ ALOGV("[%s] Deferring setSurface", mCodec->mComponentName.c_str());
+
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ mCodec->deferMessage(msg);
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", OK);
+ response->postReply(replyID);
+
+ handled = true;
+ break;
+ }
+
case kWhatCheckIfStuck:
{
int32_t generation = 0;
@@ -8371,6 +8551,15 @@
return false;
}
+ case OMX_EventConfigUpdate:
+ {
+ CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);
+
+ mCodec->onConfigUpdate((OMX_INDEXTYPE)data2);
+
+ return true;
+ }
+
default:
return BaseState::onOMXEvent(event, data1, data2);
}
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index fa13f32..88b15ae 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -22,11 +22,14 @@
#include <C2Buffer.h>
+#include <Codec2BufferUtils.h>
+
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/drm/1.0/types.h>
#include <binder/MemoryDealer.h>
#include <hidlmemory/FrameworkUtils.h>
#include <media/openmax/OMX_Core.h>
+#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaCodec.h>
@@ -91,15 +94,27 @@
}
status_t ACodecBufferChannel::queueInputBuffer(const sp<MediaCodecBuffer> &buffer) {
- if (mDealer != nullptr) {
- return -ENOSYS;
- }
std::shared_ptr<const std::vector<const BufferInfo>> array(
std::atomic_load(&mInputBuffers));
BufferInfoIterator it = findClientBuffer(array, buffer);
if (it == array->end()) {
return -ENOENT;
}
+ if (it->mClientBuffer != it->mCodecBuffer) {
+ // Copy metadata from client to codec buffer.
+ it->mCodecBuffer->meta()->clear();
+ int64_t timeUs;
+ CHECK(it->mClientBuffer->meta()->findInt64("timeUs", &timeUs));
+ it->mCodecBuffer->meta()->setInt64("timeUs", timeUs);
+ int32_t eos;
+ if (it->mClientBuffer->meta()->findInt32("eos", &eos)) {
+ it->mCodecBuffer->meta()->setInt32("eos", eos);
+ }
+ int32_t csd;
+ if (it->mClientBuffer->meta()->findInt32("csd", &csd)) {
+ it->mCodecBuffer->meta()->setInt32("csd", csd);
+ }
+ }
ALOGV("queueInputBuffer #%d", it->mBufferId);
sp<AMessage> msg = mInputBufferFilled->dup();
msg->setObject("buffer", it->mCodecBuffer);
@@ -267,16 +282,30 @@
}
C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
C2ReadView view{block.map().get()};
- if (view.capacity() > buffer->capacity()) {
- return -ENOSYS;
- }
- memcpy(buffer->base(), view.data(), view.capacity());
- buffer->setRange(0, view.capacity());
+ size_t copyLength = std::min(size_t(view.capacity()), buffer->capacity());
+ ALOGV_IF(view.capacity() > buffer->capacity(),
+ "view.capacity() = %zu, buffer->capacity() = %zu",
+ view.capacity(), buffer->capacity());
+ memcpy(buffer->base(), view.data(), copyLength);
+ buffer->setRange(0, copyLength);
break;
}
case C2BufferData::GRAPHIC: {
- // TODO
- return -ENOSYS;
+ sp<ABuffer> imageData;
+ if (!buffer->format()->findBuffer("image-data", &imageData)) {
+ return -ENOSYS;
+ }
+ if (c2Buffer->data().graphicBlocks().size() != 1u) {
+ return -ENOSYS;
+ }
+ C2ConstGraphicBlock block{c2Buffer->data().graphicBlocks().front()};
+ const C2GraphicView view{block.map().get()};
+ status_t err = ImageCopy(
+ buffer->base(), (const MediaImage2 *)(imageData->base()), view);
+ if (err != OK) {
+ return err;
+ }
+ break;
}
case C2BufferData::LINEAR_CHUNKS: [[fallthrough]];
case C2BufferData::GRAPHIC_CHUNKS: [[fallthrough]];
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 2b3bfbf..16977d7 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -2,6 +2,12 @@
name: "libstagefright_headers",
export_include_dirs: ["include"],
vendor_available: true,
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ "com.android.media.swcodec",
+ ],
+ min_sdk_version: "29",
host_supported: true,
target: {
darwin: {
@@ -12,6 +18,11 @@
cc_library_static {
name: "libstagefright_esds",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ min_sdk_version: "29",
srcs: ["ESDS.cpp"],
@@ -29,10 +40,21 @@
"libstagefright_foundation",
"libutils"
],
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
}
cc_library_static {
name: "libstagefright_metadatautils",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ min_sdk_version: "29",
srcs: ["MetaDataUtils.cpp"],
@@ -48,9 +70,18 @@
},
header_libs: [
+ "libaudioclient_headers",
"libstagefright_foundation_headers",
+ "media_ndk_headers",
],
- shared_libs: ["libmediandk"],
+
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+
export_include_dirs: ["include"],
}
@@ -100,6 +131,11 @@
cc_library_static {
name: "libstagefright_mpeg2extractor",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ min_sdk_version: "29",
srcs: [
"Utils.cpp",
@@ -117,8 +153,10 @@
header_libs: [
"libaudioclient_headers",
- "libmedia_headers",
+ "libbase_headers",
+ "libmedia_datasource_headers",
"media_ndk_headers",
+ "media_plugin_headers",
],
cflags: [
@@ -135,6 +173,18 @@
"signed-integer-overflow",
],
},
+
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ linux: {
+ cflags: [
+ "-DDISABLE_AUDIO_SYSTEM_OFFLOAD",
+ ],
+ }
+ },
}
cc_library_shared {
@@ -161,7 +211,7 @@
],
static_libs: [
- "librenderengine",
+ "librenderfright",
],
export_include_dirs: [
@@ -258,6 +308,7 @@
"libutils",
"libmedia_helper",
"libsfplugin_ccodec",
+ "libsfplugin_ccodec_utils",
"libstagefright_codecbase",
"libstagefright_foundation",
"libstagefright_omx_utils",
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 9b3f420..bcf418a 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -46,88 +46,6 @@
static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
-struct CameraSourceListener : public CameraListener {
- explicit CameraSourceListener(const sp<CameraSource> &source);
-
- virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
- virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
- camera_frame_metadata_t *metadata);
-
- virtual void postDataTimestamp(
- nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
-
- virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle);
-
- virtual void postRecordingFrameHandleTimestampBatch(
- const std::vector<nsecs_t>& timestamps,
- const std::vector<native_handle_t*>& handles);
-
-protected:
- virtual ~CameraSourceListener();
-
-private:
- wp<CameraSource> mSource;
-
- CameraSourceListener(const CameraSourceListener &);
- CameraSourceListener &operator=(const CameraSourceListener &);
-};
-
-CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
- : mSource(source) {
-}
-
-CameraSourceListener::~CameraSourceListener() {
-}
-
-void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
- UNUSED_UNLESS_VERBOSE(msgType);
- UNUSED_UNLESS_VERBOSE(ext1);
- UNUSED_UNLESS_VERBOSE(ext2);
- ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
-}
-
-void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
- camera_frame_metadata_t * /* metadata */) {
- ALOGV("postData(%d, ptr:%p, size:%zu)",
- msgType, dataPtr->unsecurePointer(), dataPtr->size());
-
- sp<CameraSource> source = mSource.promote();
- if (source.get() != NULL) {
- source->dataCallback(msgType, dataPtr);
- }
-}
-
-void CameraSourceListener::postDataTimestamp(
- nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
-
- sp<CameraSource> source = mSource.promote();
- if (source.get() != NULL) {
- source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
- }
-}
-
-void CameraSourceListener::postRecordingFrameHandleTimestamp(nsecs_t timestamp,
- native_handle_t* handle) {
- sp<CameraSource> source = mSource.promote();
- if (source.get() != nullptr) {
- source->recordingFrameHandleCallbackTimestamp(timestamp/1000, handle);
- }
-}
-
-void CameraSourceListener::postRecordingFrameHandleTimestampBatch(
- const std::vector<nsecs_t>& timestamps,
- const std::vector<native_handle_t*>& handles) {
- sp<CameraSource> source = mSource.promote();
- if (source.get() != nullptr) {
- int n = timestamps.size();
- std::vector<nsecs_t> modifiedTimestamps(n);
- for (int i = 0; i < n; i++) {
- modifiedTimestamps[i] = timestamps[i] / 1000;
- }
- source->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
- }
-}
-
static int32_t getColorFormat(const char* colorFormat) {
if (!colorFormat) {
ALOGE("Invalid color format");
@@ -169,16 +87,6 @@
return -1;
}
-CameraSource *CameraSource::Create(const String16 &clientName) {
- Size size;
- size.width = -1;
- size.height = -1;
-
- sp<hardware::ICamera> camera;
- return new CameraSource(camera, NULL, 0, clientName, Camera::USE_CALLING_UID,
- Camera::USE_CALLING_PID, size, -1, NULL, false);
-}
-
// static
CameraSource *CameraSource::CreateFromCamera(
const sp<hardware::ICamera>& camera,
@@ -189,12 +97,10 @@
pid_t clientPid,
Size videoSize,
int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface,
- bool storeMetaDataInVideoBuffers) {
+ const sp<IGraphicBufferProducer>& surface) {
CameraSource *source = new CameraSource(camera, proxy, cameraId,
- clientName, clientUid, clientPid, videoSize, frameRate, surface,
- storeMetaDataInVideoBuffers);
+ clientName, clientUid, clientPid, videoSize, frameRate, surface);
return source;
}
@@ -207,8 +113,7 @@
pid_t clientPid,
Size videoSize,
int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface,
- bool storeMetaDataInVideoBuffers)
+ const sp<IGraphicBufferProducer>& surface)
: mCameraFlags(0),
mNumInputBuffers(0),
mVideoFrameRate(-1),
@@ -231,8 +136,7 @@
mInitCheck = init(camera, proxy, cameraId,
clientName, clientUid, clientPid,
- videoSize, frameRate,
- storeMetaDataInVideoBuffers);
+ videoSize, frameRate);
if (mInitCheck != OK) releaseCamera();
}
@@ -531,15 +435,13 @@
uid_t clientUid,
pid_t clientPid,
Size videoSize,
- int32_t frameRate,
- bool storeMetaDataInVideoBuffers) {
+ int32_t frameRate) {
ALOGV("init");
status_t err = OK;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
- videoSize, frameRate,
- storeMetaDataInVideoBuffers);
+ videoSize, frameRate);
IPCThreadState::self()->restoreCallingIdentity(token);
return err;
}
@@ -626,8 +528,7 @@
uid_t clientUid,
pid_t clientPid,
Size videoSize,
- int32_t frameRate,
- bool storeMetaDataInVideoBuffers) {
+ int32_t frameRate) {
ALOGV("initWithCameraAccess");
status_t err = OK;
@@ -667,24 +568,12 @@
CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
}
- // By default, store real data in video buffers.
- mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV;
- if (storeMetaDataInVideoBuffers) {
- if (OK == mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE)) {
- mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE;
- } else if (OK == mCamera->setVideoBufferMode(
- hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA)) {
- mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA;
- }
- }
-
- if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
- err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV);
- if (err != OK) {
- ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV failed: "
- "%s (err=%d)", __FUNCTION__, strerror(-err), err);
- return err;
- }
+ // Use buffer queue to receive video buffers from camera
+ err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
+ if (err != OK) {
+ ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_BUFFER_QUEUE failed: "
+ "%s (err=%d)", __FUNCTION__, strerror(-err), err);
+ return err;
}
int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
@@ -724,54 +613,26 @@
int64_t token = IPCThreadState::self()->clearCallingIdentity();
status_t err;
- if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
- // Initialize buffer queue.
- err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
- (android_dataspace_t)mEncoderDataSpace,
- mNumInputBuffers > 0 ? mNumInputBuffers : 1);
- if (err != OK) {
- ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
- strerror(-err), err);
- return err;
- }
- } else {
- if (mNumInputBuffers > 0) {
- err = mCamera->sendCommand(
- CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
-
- // This could happen for CameraHAL1 clients; thus the failure is
- // not a fatal error
- if (err != OK) {
- ALOGW("Failed to set video buffer count to %d due to %d",
- mNumInputBuffers, err);
- }
- }
-
- err = mCamera->sendCommand(
- CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
-
- // This could happen for CameraHAL1 clients; thus the failure is
- // not a fatal error
- if (err != OK) {
- ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
- mEncoderFormat, mEncoderDataSpace, err);
- }
-
- // Create memory heap to store buffers as VideoNativeMetadata.
- createVideoBufferMemoryHeap(sizeof(VideoNativeHandleMetadata), kDefaultVideoBufferCount);
+ // Initialize buffer queue.
+ err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
+ (android_dataspace_t)mEncoderDataSpace,
+ mNumInputBuffers > 0 ? mNumInputBuffers : 1);
+ if (err != OK) {
+ ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
+ strerror(-err), err);
+ return err;
}
+ // Start data flow
err = OK;
if (mCameraFlags & FLAGS_HOT_CAMERA) {
mCamera->unlock();
mCamera.clear();
- if ((err = mCameraRecordingProxy->startRecording(
- new ProxyListener(this))) != OK) {
+ if ((err = mCameraRecordingProxy->startRecording()) != OK) {
ALOGE("Failed to start recording, received error: %s (%d)",
strerror(-err), err);
}
} else {
- mCamera->setListener(new CameraSourceListener(this));
mCamera->startRecording();
if (!mCamera->recordingEnabled()) {
err = -EINVAL;
@@ -836,7 +697,6 @@
}
} else {
if (mCamera != 0) {
- mCamera->setListener(NULL);
mCamera->stopRecording();
}
}
@@ -935,97 +795,31 @@
void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
ALOGV("releaseRecordingFrame");
- if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
- // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
- ssize_t offset;
- size_t size;
- sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
- if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
- ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
- heap->getHeapID(), mMemoryHeapBase->getHeapID());
- return;
- }
-
- VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
- (uint8_t*)heap->getBase() + offset);
-
- // Find the corresponding buffer item for the native window buffer.
- ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
- if (index == NAME_NOT_FOUND) {
- ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
- return;
- }
-
- BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
- mReceivedBufferItemMap.removeItemsAt(index);
- mVideoBufferConsumer->releaseBuffer(buffer);
- mMemoryBases.push_back(frame);
- mMemoryBaseAvailableCond.signal();
- } else {
- native_handle_t* handle = nullptr;
-
- // Check if frame contains a VideoNativeHandleMetadata.
- if (frame->size() == sizeof(VideoNativeHandleMetadata)) {
- // TODO: Using unsecurePointer() has some associated security pitfalls
- // (see declaration for details).
- // Either document why it is safe in this case or address the
- // issue (e.g. by copying).
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(frame->unsecurePointer());
- if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
- handle = metadata->pHandle;
- }
- }
-
- if (handle != nullptr) {
- ssize_t offset;
- size_t size;
- sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
- if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
- ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)",
- __FUNCTION__, heap->getHeapID(), mMemoryHeapBase->getHeapID());
- return;
- }
- uint32_t batchSize = 0;
- {
- Mutex::Autolock autoLock(mBatchLock);
- if (mInflightBatchSizes.size() > 0) {
- batchSize = mInflightBatchSizes[0];
- }
- }
- if (batchSize == 0) { // return buffers one by one
- // Frame contains a VideoNativeHandleMetadata. Send the handle back to camera.
- releaseRecordingFrameHandle(handle);
- mMemoryBases.push_back(frame);
- mMemoryBaseAvailableCond.signal();
- } else { // Group buffers in batch then return
- Mutex::Autolock autoLock(mBatchLock);
- mInflightReturnedHandles.push_back(handle);
- mInflightReturnedMemorys.push_back(frame);
- if (mInflightReturnedHandles.size() == batchSize) {
- releaseRecordingFrameHandleBatch(mInflightReturnedHandles);
-
- mInflightBatchSizes.pop_front();
- mInflightReturnedHandles.clear();
- for (const auto& mem : mInflightReturnedMemorys) {
- mMemoryBases.push_back(mem);
- mMemoryBaseAvailableCond.signal();
- }
- mInflightReturnedMemorys.clear();
- }
- }
-
- } else if (mCameraRecordingProxy != nullptr) {
- // mCamera is created by application. Return the frame back to camera via camera
- // recording proxy.
- mCameraRecordingProxy->releaseRecordingFrame(frame);
- } else if (mCamera != nullptr) {
- // mCamera is created by CameraSource. Return the frame directly back to camera.
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrame(frame);
- IPCThreadState::self()->restoreCallingIdentity(token);
- }
+ // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
+ ssize_t offset;
+ size_t size;
+ sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
+ if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
+ ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
+ heap->getHeapID(), mMemoryHeapBase->getHeapID());
+ return;
}
+
+ VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
+ (uint8_t*)heap->getBase() + offset);
+
+ // Find the corresponding buffer item for the native window buffer.
+ ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
+ if (index == NAME_NOT_FOUND) {
+ ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
+ return;
+ }
+
+ BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
+ mReceivedBufferItemMap.removeItemsAt(index);
+ mVideoBufferConsumer->releaseBuffer(buffer);
+ mMemoryBases.push_back(frame);
+ mMemoryBaseAvailableCond.signal();
}
void CameraSource::releaseQueuedFrames() {
@@ -1181,152 +975,6 @@
return false;
}
-void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
- int32_t msgType __unused, const sp<IMemory> &data) {
- ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
- Mutex::Autolock autoLock(mLock);
-
- if (shouldSkipFrameLocked(timestampUs)) {
- releaseOneRecordingFrame(data);
- return;
- }
-
- ++mNumFramesReceived;
-
- CHECK(data != NULL && data->size() > 0);
- mFramesReceived.push_back(data);
- int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
- mFrameTimes.push_back(timeUs);
- ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
- mStartTimeUs, timeUs);
- mFrameAvailableCondition.signal();
-}
-
-void CameraSource::releaseRecordingFrameHandle(native_handle_t* handle) {
- if (mCameraRecordingProxy != nullptr) {
- mCameraRecordingProxy->releaseRecordingFrameHandle(handle);
- } else if (mCamera != nullptr) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrameHandle(handle);
- IPCThreadState::self()->restoreCallingIdentity(token);
- } else {
- native_handle_close(handle);
- native_handle_delete(handle);
- }
-}
-
-void CameraSource::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
- if (mCameraRecordingProxy != nullptr) {
- mCameraRecordingProxy->releaseRecordingFrameHandleBatch(handles);
- } else if (mCamera != nullptr) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrameHandleBatch(handles);
- IPCThreadState::self()->restoreCallingIdentity(token);
- } else {
- for (auto& handle : handles) {
- native_handle_close(handle);
- native_handle_delete(handle);
- }
- }
-}
-
-void CameraSource::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
- native_handle_t* handle) {
- ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
- Mutex::Autolock autoLock(mLock);
- if (handle == nullptr) return;
-
- if (shouldSkipFrameLocked(timestampUs)) {
- releaseRecordingFrameHandle(handle);
- return;
- }
-
- while (mMemoryBases.empty()) {
- if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
- TIMED_OUT) {
- ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
- releaseRecordingFrameHandle(handle);
- return;
- }
- }
-
- ++mNumFramesReceived;
-
- sp<IMemory> data = *mMemoryBases.begin();
- mMemoryBases.erase(mMemoryBases.begin());
-
- // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
- VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->unsecurePointer());
- metadata->eType = kMetadataBufferTypeNativeHandleSource;
- metadata->pHandle = handle;
-
- mFramesReceived.push_back(data);
- int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
- mFrameTimes.push_back(timeUs);
- ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
- mFrameAvailableCondition.signal();
-}
-
-void CameraSource::recordingFrameHandleCallbackTimestampBatch(
- const std::vector<int64_t>& timestampsUs,
- const std::vector<native_handle_t*>& handles) {
- size_t n = timestampsUs.size();
- if (n != handles.size()) {
- ALOGE("%s: timestampsUs(%zu) and handles(%zu) size mismatch!",
- __FUNCTION__, timestampsUs.size(), handles.size());
- }
-
- Mutex::Autolock autoLock(mLock);
- int batchSize = 0;
- for (size_t i = 0; i < n; i++) {
- int64_t timestampUs = timestampsUs[i];
- native_handle_t* handle = handles[i];
-
- ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
- if (handle == nullptr) continue;
-
- if (shouldSkipFrameLocked(timestampUs)) {
- releaseRecordingFrameHandle(handle);
- continue;
- }
-
- while (mMemoryBases.empty()) {
- if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
- TIMED_OUT) {
- ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
- releaseRecordingFrameHandle(handle);
- continue;
- }
- }
- ++batchSize;
- ++mNumFramesReceived;
- sp<IMemory> data = *mMemoryBases.begin();
- mMemoryBases.erase(mMemoryBases.begin());
-
- // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
- // TODO: Using unsecurePointer() has some associated security pitfalls
- // (see declaration for details).
- // Either document why it is safe in this case or address the
- // issue (e.g. by copying).
- VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->unsecurePointer());
- metadata->eType = kMetadataBufferTypeNativeHandleSource;
- metadata->pHandle = handle;
-
- mFramesReceived.push_back(data);
- int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
- mFrameTimes.push_back(timeUs);
- ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
-
- }
- if (batchSize > 0) {
- Mutex::Autolock autoLock(mBatchLock);
- mInflightBatchSizes.push_back(batchSize);
- }
- for (int i = 0; i < batchSize; i++) {
- mFrameAvailableCondition.signal();
- }
-}
-
CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
const sp<CameraSource>& cameraSource) {
mConsumer = consumer;
@@ -1417,41 +1065,7 @@
MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
ALOGV("metaDataStoredInVideoBuffers");
- // Output buffers will contain metadata if camera sends us buffer in metadata mode or via
- // buffer queue.
- switch (mVideoBufferMode) {
- case hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA:
- return kMetadataBufferTypeNativeHandleSource;
- case hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE:
- return kMetadataBufferTypeANWBuffer;
- default:
- return kMetadataBufferTypeInvalid;
- }
-}
-
-CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
- mSource = source;
-}
-
-void CameraSource::ProxyListener::dataCallbackTimestamp(
- nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
- mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
-}
-
-void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
- native_handle_t* handle) {
- mSource->recordingFrameHandleCallbackTimestamp(timestamp / 1000, handle);
-}
-
-void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestampBatch(
- const std::vector<int64_t>& timestampsUs,
- const std::vector<native_handle_t*>& handles) {
- int n = timestampsUs.size();
- std::vector<nsecs_t> modifiedTimestamps(n);
- for (int i = 0; i < n; i++) {
- modifiedTimestamps[i] = timestampsUs[i] / 1000;
- }
- mSource->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
+ return kMetadataBufferTypeANWBuffer;
}
void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index e0a6eb3..50a512f 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -45,15 +45,13 @@
Size videoSize,
int32_t videoFrameRate,
const sp<IGraphicBufferProducer>& surface,
- int64_t timeBetweenFrameCaptureUs,
- bool storeMetaDataInVideoBuffers) {
+ int64_t timeBetweenFrameCaptureUs) {
CameraSourceTimeLapse *source = new
CameraSourceTimeLapse(camera, proxy, cameraId,
clientName, clientUid, clientPid,
videoSize, videoFrameRate, surface,
- timeBetweenFrameCaptureUs,
- storeMetaDataInVideoBuffers);
+ timeBetweenFrameCaptureUs);
if (source != NULL) {
if (source->initCheck() != OK) {
@@ -74,11 +72,9 @@
Size videoSize,
int32_t videoFrameRate,
const sp<IGraphicBufferProducer>& surface,
- int64_t timeBetweenFrameCaptureUs,
- bool storeMetaDataInVideoBuffers)
+ int64_t timeBetweenFrameCaptureUs)
: CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid,
- videoSize, videoFrameRate, surface,
- storeMetaDataInVideoBuffers),
+ videoSize, videoFrameRate, surface),
mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
mLastTimeLapseFrameRealTimestampUs(0),
mSkipCurrentFrame(false) {
@@ -173,12 +169,6 @@
ALOGV("signalBufferReturned");
Mutex::Autolock autoLock(mQuickStopLock);
if (mQuickStop && (buffer == mLastReadBufferCopy)) {
- if (metaDataStoredInVideoBuffers() == kMetadataBufferTypeNativeHandleSource) {
- native_handle_t* handle = (
- (VideoNativeHandleMetadata*)(mLastReadBufferCopy->data()))->pHandle;
- native_handle_close(handle);
- native_handle_delete(handle);
- }
buffer->setObserver(NULL);
buffer->release();
mLastReadBufferCopy = NULL;
@@ -191,8 +181,7 @@
void createMediaBufferCopy(
const MediaBufferBase& sourceBuffer,
int64_t frameTime,
- MediaBufferBase **newBuffer,
- int32_t videoBufferMode) {
+ MediaBufferBase **newBuffer) {
ALOGV("createMediaBufferCopy");
size_t sourceSize = sourceBuffer.size();
@@ -203,19 +192,13 @@
(*newBuffer)->meta_data().setInt64(kKeyTime, frameTime);
- if (videoBufferMode == kMetadataBufferTypeNativeHandleSource) {
- ((VideoNativeHandleMetadata*)((*newBuffer)->data()))->pHandle =
- native_handle_clone(
- ((VideoNativeHandleMetadata*)(sourceBuffer.data()))->pHandle);
- }
}
void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBufferBase& sourceBuffer) {
ALOGV("fillLastReadBufferCopy");
int64_t frameTime;
CHECK(sourceBuffer.meta_data().findInt64(kKeyTime, &frameTime));
- createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy,
- metaDataStoredInVideoBuffers());
+ createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
mLastReadBufferCopy->add_ref();
mLastReadBufferCopy->setObserver(this);
}
@@ -240,19 +223,6 @@
}
}
-sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(
- const sp<IMemory> &source_data) {
-
- ALOGV("createIMemoryCopy");
- size_t source_size = source_data->size();
- void* source_pointer = source_data->unsecurePointer();
-
- sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
- sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
- memcpy(newMemory->unsecurePointer(), source_pointer, source_size);
- return newMemory;
-}
-
bool CameraSourceTimeLapse::skipCurrentFrame(int64_t /* timestampUs */) {
ALOGV("skipCurrentFrame");
if (mSkipCurrentFrame) {
@@ -318,31 +288,6 @@
return false;
}
-void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
- const sp<IMemory> &data) {
- ALOGV("dataCallbackTimestamp");
- mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs);
- CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
-}
-
-void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
- native_handle_t* handle) {
- ALOGV("recordingFrameHandleCallbackTimestamp");
- mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs);
- CameraSource::recordingFrameHandleCallbackTimestamp(timestampUs, handle);
-}
-
-void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestampBatch(
- const std::vector<int64_t>& timestampsUs,
- const std::vector<native_handle_t*>& handles) {
- ALOGV("recordingFrameHandleCallbackTimestampBatch");
- int n = timestampsUs.size();
- for (int i = 0; i < n; i++) {
- // Don't do batching for CameraSourceTimeLapse for now
- recordingFrameHandleCallbackTimestamp(timestampsUs[i], handles[i]);
- }
-}
-
void CameraSourceTimeLapse::processBufferQueueFrame(BufferItem& buffer) {
ALOGV("processBufferQueueFrame");
int64_t timestampUs = buffer.mTimestamp / 1000;
diff --git a/media/libstagefright/FrameCaptureProcessor.cpp b/media/libstagefright/FrameCaptureProcessor.cpp
index 96c1195..8cd7f82 100644
--- a/media/libstagefright/FrameCaptureProcessor.cpp
+++ b/media/libstagefright/FrameCaptureProcessor.cpp
@@ -164,14 +164,15 @@
if (err != OK) {
ALOGE("drawLayers returned err %d", err);
- return err;
+ } else {
+ err = fence->wait(500);
+ if (err != OK) {
+ ALOGW("wait for fence returned err %d", err);
+ err = OK;
+ }
}
-
- err = fence->wait(500);
- if (err != OK) {
- ALOGW("wait for fence returned err %d", err);
- }
- return OK;
+ mRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL);
+ return err;
}
void FrameCaptureProcessor::onMessageReceived(const sp<AMessage> &msg) {
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 965b6dd..d11408d 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -44,7 +44,7 @@
namespace android {
static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
-static const size_t kRetryCount = 50; // must be >0
+static const size_t kRetryCount = 100; // must be >0
static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
@@ -121,15 +121,23 @@
false /*allocRotated*/, true /*metaOnly*/);
}
+bool isAvif(const sp<MetaData> &trackMeta) {
+ const char *mime;
+ return trackMeta->findCString(kKeyMIMEType, &mime)
+ && (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)
+ || !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF));
+}
+
bool findThumbnailInfo(
const sp<MetaData> &trackMeta, int32_t *width, int32_t *height,
uint32_t *type = NULL, const void **data = NULL, size_t *size = NULL) {
uint32_t dummyType;
const void *dummyData;
size_t dummySize;
+ int codecConfigKey = isAvif(trackMeta) ? kKeyThumbnailAV1C : kKeyThumbnailHVCC;
return trackMeta->findInt32(kKeyThumbnailWidth, width)
&& trackMeta->findInt32(kKeyThumbnailHeight, height)
- && trackMeta->findData(kKeyThumbnailHVCC,
+ && trackMeta->findData(codecConfigKey,
type ?: &dummyType, data ?: &dummyData, size ?: &dummySize);
}
@@ -752,7 +760,10 @@
overrideMeta->remove(kKeyDisplayHeight);
overrideMeta->setInt32(kKeyWidth, mWidth);
overrideMeta->setInt32(kKeyHeight, mHeight);
- overrideMeta->setData(kKeyHVCC, type, data, size);
+ // The AV1 codec configuration data is passed via CSD0 to the AV1
+ // decoder.
+ const int codecConfigKey = isAvif(trackMeta()) ? kKeyOpaqueCSD0 : kKeyHVCC;
+ overrideMeta->setData(codecConfigKey, type, data, size);
options->setSeekTo(-1);
} else {
CHECK(trackMeta()->findInt32(kKeyWidth, &mWidth));
diff --git a/media/libstagefright/HevcUtils.cpp b/media/libstagefright/HevcUtils.cpp
index 9faa28c..5f9c20e 100644
--- a/media/libstagefright/HevcUtils.cpp
+++ b/media/libstagefright/HevcUtils.cpp
@@ -30,6 +30,8 @@
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/Utils.h>
+#define UNUSED_PARAM __attribute__((unused))
+
namespace android {
static const uint8_t kHevcNalUnitTypes[8] = {
@@ -86,6 +88,7 @@
}
if (err != OK) {
+ ALOGE("error parsing VPS or SPS or PPS");
return err;
}
@@ -377,8 +380,56 @@
return reader.overRead() ? ERROR_MALFORMED : OK;
}
+void HevcParameterSets::FindHEVCDimensions(const sp<ABuffer> &SpsBuffer, int32_t *width, int32_t *height)
+{
+ ALOGD("FindHEVCDimensions");
+ // See Rec. ITU-T H.265 v3 (04/2015) Chapter 7.3.2.2 for reference
+ ABitReader reader(SpsBuffer->data() + 1, SpsBuffer->size() - 1);
+ // Skip sps_video_parameter_set_id
+ reader.skipBits(4);
+ uint8_t maxSubLayersMinus1 = reader.getBitsWithFallback(3, 0);
+ // Skip sps_temporal_id_nesting_flag;
+ reader.skipBits(1);
+ // Skip general profile
+ reader.skipBits(96);
+ if (maxSubLayersMinus1 > 0) {
+ bool subLayerProfilePresentFlag[8];
+ bool subLayerLevelPresentFlag[8];
+ for (int i = 0; i < maxSubLayersMinus1; ++i) {
+ subLayerProfilePresentFlag[i] = reader.getBitsWithFallback(1, 0);
+ subLayerLevelPresentFlag[i] = reader.getBitsWithFallback(1, 0);
+ }
+ // Skip reserved
+ reader.skipBits(2 * (8 - maxSubLayersMinus1));
+ for (int i = 0; i < maxSubLayersMinus1; ++i) {
+ if (subLayerProfilePresentFlag[i]) {
+ // Skip profile
+ reader.skipBits(88);
+ }
+ if (subLayerLevelPresentFlag[i]) {
+ // Skip sub_layer_level_idc[i]
+ reader.skipBits(8);
+ }
+ }
+ }
+ // Skip sps_seq_parameter_set_id
+ skipUE(&reader);
+ uint8_t chromaFormatIdc = parseUEWithFallback(&reader, 0);
+ if (chromaFormatIdc == 3) {
+ // Skip separate_colour_plane_flag
+ reader.skipBits(1);
+ }
+ skipUE(&reader);
+ skipUE(&reader);
+
+ // pic_width_in_luma_samples
+ *width = parseUEWithFallback(&reader, 0);
+ // pic_height_in_luma_samples
+ *height = parseUEWithFallback(&reader, 0);
+}
+
status_t HevcParameterSets::parsePps(
- const uint8_t* data __unused, size_t size __unused) {
+ const uint8_t* data UNUSED_PARAM, size_t size UNUSED_PARAM) {
return OK;
}
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index af8096d..447d599 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -103,8 +103,40 @@
//#define SHOW_MODEL_BUILD 1
class MPEG4Writer::Track {
+ struct TrackId {
+ TrackId(uint32_t aId)
+ :mId(aId),
+ mTrackIdValid(false) {
+ }
+ bool isValid(bool akKey4BitTrackIds) {
+ // trackId cannot be zero, ISO/IEC 14496-12 8.3.2.3
+ if (mId == 0) {
+ return false;
+ }
+ /* MediaRecorder uses only 4 bit to represent track ids during notifying clients.
+ * MediaMuxer's track ids are restricted by container allowed size only.
+ * MPEG4 Container defines unsigned int (32), ISO/IEC 14496-12 8.3.2.2
+ */
+ if (akKey4BitTrackIds && mId > 15) {
+ return false;
+ }
+ mTrackIdValid = true;
+ return true;
+ }
+ uint32_t getId() const {
+ CHECK(mTrackIdValid);
+ return mId;
+ }
+ TrackId() = delete;
+ DISALLOW_EVIL_CONSTRUCTORS(TrackId);
+ private:
+ // unsigned int (32), ISO/IEC 14496-12 8.3.2.2
+ uint32_t mId;
+ bool mTrackIdValid;
+ };
+
public:
- Track(MPEG4Writer *owner, const sp<MediaSource> &source, size_t trackId);
+ Track(MPEG4Writer *owner, const sp<MediaSource> &source, uint32_t aTrackId);
~Track();
@@ -129,7 +161,7 @@
void addChunkOffset(off64_t offset);
void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif);
void flushItemRefs();
- int32_t getTrackId() const { return mTrackId; }
+ TrackId& getTrackId() { return mTrackId; }
status_t dump(int fd, const Vector<String16>& args) const;
static const char *getFourCCForMime(const char *mime);
const char *getTrackType() const;
@@ -290,7 +322,7 @@
bool mIsMPEG4;
bool mGotStartKeyFrame;
bool mIsMalformed;
- int32_t mTrackId;
+ TrackId mTrackId;
int64_t mTrackDurationUs;
int64_t mMaxChunkDurationUs;
int64_t mLastDecodingTimeUs;
@@ -413,7 +445,7 @@
void addOneElstTableEntry(uint32_t segmentDuration, int32_t mediaTime,
int16_t mediaRate, int16_t mediaRateFraction);
- bool isTrackMalFormed() const;
+ bool isTrackMalFormed();
void sendTrackSummary(bool hasMultipleTracks);
// Write the boxes
@@ -510,6 +542,7 @@
mNumGrids = 0;
mNextItemId = kItemIdBase;
mHasRefs = false;
+ mResetStatus = OK;
mPreAllocFirstTime = true;
mPrevAllTracksTotalMetaDataSizeEstimate = 0;
@@ -534,7 +567,7 @@
release();
}
- if (fallocate(mFd, 0, 0, 1) == 0) {
+ if (fallocate64(mFd, FALLOC_FL_KEEP_SIZE, 0, 1) == 0) {
ALOGD("PreAllocation enabled");
mPreAllocationEnabled = true;
} else {
@@ -744,8 +777,7 @@
// where 1MB is the common file size limit for MMS application.
// The default MAX _MOOV_BOX_SIZE value is based on about 3
// minute video recording with a bit rate about 3 Mbps, because
- // statistics also show that most of the video captured are going
- // to be less than 3 minutes.
+ // statistics show that most captured videos are less than 3 minutes.
// If the estimation is wrong, we will pay the price of wasting
// some reserved space. This should not happen so often statistically.
@@ -796,6 +828,15 @@
return size;
}
+status_t MPEG4Writer::validateAllTracksId(bool akKey4BitTrackIds) {
+ for (List<Track *>::iterator it = mTracks.begin(); it != mTracks.end(); ++it) {
+ if (!(*it)->getTrackId().isValid(akKey4BitTrackIds)) {
+ return BAD_VALUE;
+ }
+ }
+ return OK;
+}
+
status_t MPEG4Writer::start(MetaData *param) {
if (mInitCheck != OK) {
return UNKNOWN_ERROR;
@@ -810,6 +851,9 @@
mIsFileSizeLimitExplicitlyRequested = true;
}
+ /* mMaxFileSizeLimitBytes has to be set everytime fd is switched, hence the following code is
+ * appropriate in start() method.
+ */
int32_t fileSizeBits = fpathconf(mFd, _PC_FILESIZEBITS);
ALOGD("fpathconf _PC_FILESIZEBITS:%" PRId32, fileSizeBits);
fileSizeBits = std::min(fileSizeBits, 52 /* cap it below 4 peta bytes */);
@@ -902,10 +946,30 @@
mInMemoryCache = NULL;
mInMemoryCacheOffset = 0;
+ status_t err = OK;
+ int32_t is4bitTrackId = false;
+ if (param && param->findInt32(kKey4BitTrackIds, &is4bitTrackId) && is4bitTrackId) {
+ err = validateAllTracksId(true);
+ } else {
+ err = validateAllTracksId(false);
+ }
+ if (err != OK) {
+ return err;
+ }
ALOGV("muxer starting: mHasMoovBox %d, mHasFileLevelMeta %d",
mHasMoovBox, mHasFileLevelMeta);
+ err = startWriterThread();
+ if (err != OK) {
+ return err;
+ }
+
+ err = setupAndStartLooper();
+ if (err != OK) {
+ return err;
+ }
+
writeFtypBox(param);
mFreeBoxOffset = mOffset;
@@ -937,13 +1001,24 @@
seekOrPostError(mFd, mMdatOffset, SEEK_SET);
write("\x00\x00\x00\x01mdat????????", 16);
- status_t err = startWriterThread();
- if (err != OK) {
- return err;
+ /* Confirm whether the writing of the initial file atoms, ftyp and free,
+ * are written to the file properly by posting kWhatNoIOErrorSoFar to the
+ * MP4WtrCtrlHlpLooper that's handling write and seek errors also. If there
+ * was kWhatIOError, the following two scenarios should be handled.
+ * 1) If kWhatIOError was delivered and processed, MP4WtrCtrlHlpLooper
+ * would have stopped all threads gracefully already and posting
+ * kWhatNoIOErrorSoFar would fail.
+ * 2) If kWhatIOError wasn't delivered or getting processed,
+ * kWhatNoIOErrorSoFar should get posted successfully. Wait for
+ * response from MP4WtrCtrlHlpLooper.
+ */
+ sp<AMessage> msg = new AMessage(kWhatNoIOErrorSoFar, mReflector);
+ sp<AMessage> response;
+ err = msg->postAndAwaitResponse(&response);
+ if (err != OK || !response->findInt32("err", &err) || err != OK) {
+ return ERROR_IO;
}
- setupAndStartLooper();
-
err = startTracks(param);
if (err != OK) {
return err;
@@ -953,6 +1028,11 @@
return OK;
}
+status_t MPEG4Writer::stop() {
+ // If reset was in progress, wait for it to complete.
+ return reset(true, true);
+}
+
status_t MPEG4Writer::pause() {
ALOGW("MPEG4Writer: pause is not supported");
return ERROR_UNSUPPORTED;
@@ -961,6 +1041,7 @@
status_t MPEG4Writer::stopWriterThread() {
ALOGV("Stopping writer thread");
if (!mWriterThreadStarted) {
+ ALOGD("Writer thread not started");
return OK;
}
{
@@ -970,12 +1051,16 @@
}
void *dummy;
- status_t err = pthread_join(mThread, &dummy);
- WARN_UNLESS(err == 0, "stopWriterThread pthread_join err: %d", err);
-
- err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
+ status_t err = OK;
+ int retVal = pthread_join(mThread, &dummy);
+ if (retVal == 0) {
+ err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
+ ALOGD("WriterThread stopped. Status:%d", err);
+ } else {
+ ALOGE("stopWriterThread pthread_join status:%d", retVal);
+ err = UNKNOWN_ERROR;
+ }
mWriterThreadStarted = false;
- WARN_UNLESS(err == 0, "stopWriterThread pthread_join retVal: %d, writer thread stopped", err);
return err;
}
@@ -1031,30 +1116,74 @@
writeInt32(0x40000000); // w
}
-void MPEG4Writer::release() {
- ALOGD("release()");
- if (mPreAllocationEnabled) {
- truncatePreAllocation();
+void MPEG4Writer::printWriteDurations() {
+ if (mWriteDurationPQ.empty()) {
+ return;
}
- int retVal = fsync(mFd);
- WARN_UNLESS(retVal == 0, "fsync retVal:%d", retVal);
- retVal = close(mFd);
- WARN_UNLESS(retVal == 0, "close mFd retVal :%d", retVal);
+ std::string writeDurationsString =
+ "Top " + std::to_string(mWriteDurationPQ.size()) + " write durations(microseconds):";
+ uint8_t i = 0;
+ while (!mWriteDurationPQ.empty()) {
+ writeDurationsString +=
+ " #" + std::to_string(++i) + ":" + std::to_string(mWriteDurationPQ.top().count());
+ mWriteDurationPQ.pop();
+ }
+ ALOGD("%s", writeDurationsString.c_str());
+}
+
+status_t MPEG4Writer::release() {
+ ALOGD("release()");
+ status_t err = OK;
+ if (!truncatePreAllocation()) {
+ if (err == OK) { err = ERROR_IO; }
+ }
+
+ // TODO(b/174770856) remove this measurement (and perhaps the fsync)
+ nsecs_t sync_started = systemTime(SYSTEM_TIME_REALTIME);
+ if (fsync(mFd) != 0) {
+ ALOGW("(ignored)fsync err:%s(%d)", std::strerror(errno), errno);
+ // Don't bubble up fsync error, b/157291505.
+ // if (err == OK) { err = ERROR_IO; }
+ }
+ nsecs_t sync_finished = systemTime(SYSTEM_TIME_REALTIME);
+ nsecs_t sync_elapsed_ns = sync_finished - sync_started;
+ int64_t filesize = -1;
+ struct stat statbuf;
+ if (fstat(mFd, &statbuf) == 0) {
+ filesize = statbuf.st_size;
+ }
+ ALOGD("final fsync() takes %" PRId64 " ms, file size %" PRId64,
+ sync_elapsed_ns / 1000000, (int64_t) filesize);
+
+ if (close(mFd) != 0) {
+ ALOGE("close err:%s(%d)", std::strerror(errno), errno);
+ if (err == OK) { err = ERROR_IO; }
+ }
mFd = -1;
if (mNextFd != -1) {
- retVal = close(mNextFd);
+ if (close(mNextFd) != 0) {
+ ALOGE("close(mNextFd) error:%s(%d)", std::strerror(errno), errno);
+ }
+ if (err == OK) { err = ERROR_IO; }
mNextFd = -1;
- WARN_UNLESS(retVal == 0, "close mNextFd retVal :%d", retVal);
}
stopAndReleaseLooper();
mInitCheck = NO_INIT;
mStarted = false;
free(mInMemoryCache);
mInMemoryCache = NULL;
+
+ printWriteDurations();
+
+ return err;
}
-void MPEG4Writer::finishCurrentSession() {
- reset(false /* stopSource */);
+status_t MPEG4Writer::finishCurrentSession() {
+ ALOGV("finishCurrentSession");
+ /* Don't wait if reset is in progress already, that avoids deadlock
+ * as finishCurrentSession() is called from control looper thread.
+ */
+ return reset(false, false);
}
status_t MPEG4Writer::switchFd() {
@@ -1065,7 +1194,7 @@
}
if (mNextFd == -1) {
- ALOGW("No FileDescripter for next recording");
+ ALOGW("No FileDescriptor for next recording");
return INVALID_OPERATION;
}
@@ -1076,20 +1205,45 @@
return err;
}
-status_t MPEG4Writer::reset(bool stopSource) {
+status_t MPEG4Writer::reset(bool stopSource, bool waitForAnyPreviousCallToComplete) {
ALOGD("reset()");
- std::lock_guard<std::mutex> l(mResetMutex);
+ std::unique_lock<std::mutex> lk(mResetMutex, std::defer_lock);
+ if (waitForAnyPreviousCallToComplete) {
+ /* stop=>reset from client needs the return value of reset call, hence wait here
+ * if a reset was in process already.
+ */
+ lk.lock();
+ } else if (!lk.try_lock()) {
+ /* Internal reset from control looper thread shouldn't wait for any reset in
+ * process already.
+ */
+ return INVALID_OPERATION;
+ }
+
+ if (mResetStatus != OK) {
+ /* Don't have to proceed if reset has finished with an error before.
+ * If there was no error before, proceeding reset would be harmless, as the
+ * the call would return from the mInitCheck condition below.
+ */
+ return mResetStatus;
+ }
+
if (mInitCheck != OK) {
- return OK;
+ mResetStatus = OK;
+ return mResetStatus;
} else {
if (!mWriterThreadStarted ||
!mStarted) {
- status_t err = OK;
+ status_t writerErr = OK;
if (mWriterThreadStarted) {
- err = stopWriterThread();
+ writerErr = stopWriterThread();
}
- release();
- return err;
+ status_t retErr = release();
+ if (writerErr != OK) {
+ retErr = writerErr;
+ }
+ mResetStatus = retErr;
+ return mResetStatus;
}
}
@@ -1100,8 +1254,9 @@
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
status_t trackErr = (*it)->stop(stopSource);
+ WARN_UNLESS(trackErr == OK, "%s track stopped with an error",
+ (*it)->getTrackType());
if (err == OK && trackErr != OK) {
- ALOGW("%s track stopped with an error", (*it)->getTrackType());
err = trackErr;
}
@@ -1118,7 +1273,6 @@
}
}
-
if (nonImageTrackCount > 1) {
ALOGD("Duration from tracks range is [%" PRId64 ", %" PRId64 "] us",
minDurationUs, maxDurationUs);
@@ -1126,15 +1280,18 @@
status_t writerErr = stopWriterThread();
- // TODO: which error to propagage, writerErr or trackErr?
+ // Propagating writer error
if (err == OK && writerErr != OK) {
err = writerErr;
}
// Do not write out movie header on error except malformed track.
+ // TODO: Remove samples of malformed tracks added in mdat.
if (err != OK && err != ERROR_MALFORMED) {
+ // Ignoring release() return value as there was an "err" already.
release();
- return err;
+ mResetStatus = err;
+ return mResetStatus;
}
// Fix up the size of the 'mdat' chunk.
@@ -1174,6 +1331,7 @@
} else {
ALOGI("The mp4 file will not be streamable.");
}
+ ALOGI("MOOV atom was written to the file");
}
mWriteBoxToMemory = false;
@@ -1186,8 +1344,13 @@
CHECK(mBoxes.empty());
- release();
- return err;
+ status_t errRelease = release();
+ // Prioritize the error that occurred before release().
+ if (err == OK) {
+ err = errRelease;
+ }
+ mResetStatus = err;
+ return mResetStatus;
}
/*
@@ -1354,7 +1517,7 @@
for (List<ChunkInfo>::iterator it = mChunkInfos.begin();
it != mChunkInfos.end(); ++it) {
- int trackNum = it->mTrack->getTrackId() << 28;
+ uint32_t trackNum = (it->mTrack->getTrackId().getId() << 28);
notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
trackNum | MEDIA_RECORDER_TRACK_INTER_CHUNK_TIME_MS,
it->mMaxInterChunkDurUs);
@@ -1438,25 +1601,21 @@
void MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
size_t length = buffer->range_length();
if (mUse4ByteNalLength) {
- uint8_t x = length >> 24;
- writeOrPostError(mFd, &x, 1);
- x = (length >> 16) & 0xff;
- writeOrPostError(mFd, &x, 1);
- x = (length >> 8) & 0xff;
- writeOrPostError(mFd, &x, 1);
- x = length & 0xff;
- writeOrPostError(mFd, &x, 1);
-
+ uint8_t x[4];
+ x[0] = length >> 24;
+ x[1] = (length >> 16) & 0xff;
+ x[2] = (length >> 8) & 0xff;
+ x[3] = length & 0xff;
+ writeOrPostError(mFd, &x, 4);
writeOrPostError(mFd, (const uint8_t*)buffer->data() + buffer->range_offset(), length);
-
mOffset += length + 4;
} else {
CHECK_LT(length, 65536u);
- uint8_t x = length >> 8;
- writeOrPostError(mFd, &x, 1);
- x = length & 0xff;
- writeOrPostError(mFd, &x, 1);
+ uint8_t x[2];
+ x[0] = length >> 8;
+ x[1] = length & 0xff;
+ writeOrPostError(mFd, &x, 2);
writeOrPostError(mFd, (const uint8_t*)buffer->data() + buffer->range_offset(), length);
mOffset += length + 2;
}
@@ -1500,7 +1659,17 @@
void MPEG4Writer::writeOrPostError(int fd, const void* buf, size_t count) {
if (mWriteSeekErr == true)
return;
+
+ auto beforeTP = std::chrono::high_resolution_clock::now();
ssize_t bytesWritten = ::write(fd, buf, count);
+ auto afterTP = std::chrono::high_resolution_clock::now();
+ auto writeDuration =
+ std::chrono::duration_cast<std::chrono::microseconds>(afterTP - beforeTP).count();
+ mWriteDurationPQ.emplace(writeDuration);
+ if (mWriteDurationPQ.size() > kWriteDurationsCount) {
+ mWriteDurationPQ.pop();
+ }
+
/* Write as much as possible during stop() execution when there was an error
* (mWriteSeekErr == true) in the previous call to write() or lseek64().
*/
@@ -1512,9 +1681,9 @@
std::strerror(errno), errno);
// Can't guarantee that file is usable or write would succeed anymore, hence signal to stop.
- sp<AMessage> msg = new AMessage(kWhatHandleIOError, mReflector);
- status_t err = msg->post();
- ALOGE("writeOrPostError post:%d", err);
+ sp<AMessage> msg = new AMessage(kWhatIOError, mReflector);
+ msg->setInt32("err", ERROR_IO);
+ WARN_UNLESS(msg->post() == OK, "writeOrPostError:error posting ERROR_IO");
}
void MPEG4Writer::seekOrPostError(int fd, off64_t offset, int whence) {
@@ -1531,9 +1700,9 @@
offset, std::strerror(errno), errno);
// Can't guarantee that file is usable or seek would succeed anymore, hence signal to stop.
- sp<AMessage> msg = new AMessage(kWhatHandleIOError, mReflector);
- status_t err = msg->post();
- ALOGE("seekOrPostError post:%d", err);
+ sp<AMessage> msg = new AMessage(kWhatIOError, mReflector);
+ msg->setInt32("err", ERROR_IO);
+ WARN_UNLESS(msg->post() == OK, "seekOrPostError:error posting ERROR_IO");
}
void MPEG4Writer::beginBox(uint32_t id) {
@@ -1768,10 +1937,11 @@
ALOGV("preAllocateSize :%" PRIu64 " lastFileEndOffset:%" PRIu64, preAllocateSize,
lastFileEndOffset);
- int res = fallocate(mFd, 0, lastFileEndOffset, preAllocateSize);
+ int res = fallocate64(mFd, FALLOC_FL_KEEP_SIZE, lastFileEndOffset, preAllocateSize);
if (res == -1) {
ALOGE("fallocate err:%s, %d, fd:%d", strerror(errno), errno, mFd);
- sp<AMessage> msg = new AMessage(kWhatHandleFallocateError, mReflector);
+ sp<AMessage> msg = new AMessage(kWhatFallocateError, mReflector);
+ msg->setInt32("err", ERROR_IO);
status_t err = msg->post();
mFallocateErr = true;
ALOGD("preAllocation post:%d", err);
@@ -1783,6 +1953,9 @@
}
bool MPEG4Writer::truncatePreAllocation() {
+ if (!mPreAllocationEnabled)
+ return true;
+
bool status = true;
off64_t endOffset = std::max(mMdatEndOffset, mOffset);
/* if mPreAllocateFileEndOffset >= endOffset, then preallocation logic works good. (diff >= 0).
@@ -1791,9 +1964,13 @@
ALOGD("ftruncate mPreAllocateFileEndOffset:%" PRId64 " mOffset:%" PRIu64
" mMdatEndOffset:%" PRIu64 " diff:%" PRId64, mPreAllocateFileEndOffset, mOffset,
mMdatEndOffset, mPreAllocateFileEndOffset - endOffset);
- if(ftruncate(mFd, endOffset) == -1) {
+ if (ftruncate64(mFd, endOffset) == -1) {
ALOGE("ftruncate err:%s, %d, fd:%d", strerror(errno), errno, mFd);
status = false;
+ /* No need to post and handle(stop & notify client) error like it's done in preAllocate(),
+ * because ftruncate() is called during release() only and the error here would be
+ * reported from there as this function is returning false on any error in ftruncate().
+ */
}
return status;
}
@@ -1899,7 +2076,7 @@
////////////////////////////////////////////////////////////////////////////////
MPEG4Writer::Track::Track(
- MPEG4Writer *owner, const sp<MediaSource> &source, size_t trackId)
+ MPEG4Writer *owner, const sp<MediaSource> &source, uint32_t aTrackId)
: mOwner(owner),
mMeta(source->getFormat()),
mSource(source),
@@ -1909,7 +2086,7 @@
mStarted(false),
mGotStartKeyFrame(false),
mIsMalformed(false),
- mTrackId(trackId),
+ mTrackId(aTrackId),
mTrackDurationUs(0),
mEstimatedTrackSizeBytes(0),
mSamplesHaveSameSize(true),
@@ -2086,25 +2263,28 @@
mElstTableEntries->add(htonl((((uint32_t)mediaRate) << 16) | (uint32_t)mediaRateFraction));
}
-void MPEG4Writer::setupAndStartLooper() {
+status_t MPEG4Writer::setupAndStartLooper() {
+ status_t err = OK;
if (mLooper == nullptr) {
mLooper = new ALooper;
- mLooper->setName("MP4WriterLooper");
- mLooper->start();
+ mLooper->setName("MP4WtrCtrlHlpLooper");
+ err = mLooper->start();
mReflector = new AHandlerReflector<MPEG4Writer>(this);
mLooper->registerHandler(mReflector);
}
+ ALOGD("MP4WtrCtrlHlpLooper Started");
+ return err;
}
void MPEG4Writer::stopAndReleaseLooper() {
if (mLooper != nullptr) {
if (mReflector != nullptr) {
- ALOGD("unregisterHandler");
mLooper->unregisterHandler(mReflector->id());
mReflector.clear();
}
mLooper->stop();
mLooper.clear();
+ ALOGD("MP4WtrCtrlHlpLooper stopped");
}
}
@@ -2321,27 +2501,40 @@
int fd = mNextFd;
mNextFd = -1;
mLock.unlock();
- finishCurrentSession();
- initInternal(fd, false /*isFirstSession*/);
- start(mStartMeta.get());
- mSwitchPending = false;
- notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
+ if (finishCurrentSession() == OK) {
+ initInternal(fd, false /*isFirstSession*/);
+ status_t status = start(mStartMeta.get());
+ mSwitchPending = false;
+ if (status == OK) {
+ notify(MEDIA_RECORDER_EVENT_INFO,
+ MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
+ }
+ }
break;
}
- // ::write() or lseek64() wasn't a success, file could be malformed
- case kWhatHandleIOError: {
- ALOGE("kWhatHandleIOError");
- // Stop tracks' threads and main writer thread.
- notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, ERROR_MALFORMED);
- stop();
+ /* ::write() or lseek64() wasn't a success, file could be malformed.
+ * Or fallocate() failed. reset() and notify client on both the cases.
+ */
+ case kWhatFallocateError: // fallthrough
+ case kWhatIOError: {
+ int32_t err;
+ CHECK(msg->findInt32("err", &err));
+ // If reset already in process, don't wait for it complete to avoid deadlock.
+ reset(true, false);
+ //TODO: new MEDIA_RECORDER_ERROR_**** instead MEDIA_RECORDER_ERROR_UNKNOWN ?
+ notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, err);
break;
}
- // fallocate() failed, hence notify app about it and stop().
- case kWhatHandleFallocateError: {
- ALOGE("kWhatHandleFallocateError");
- //TODO: introduce new MEDIA_RECORDER_INFO_STOPPED instead MEDIA_RECORDER_INFO_UNKNOWN?
- notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_UNKNOWN, ERROR_IO);
- stop();
+ /* Response to kWhatNoIOErrorSoFar would be OK always as of now.
+ * Responding with other options could be added later if required.
+ */
+ case kWhatNoIOErrorSoFar: {
+ ALOGV("kWhatNoIOErrorSoFar");
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", OK);
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
break;
}
default:
@@ -2624,7 +2817,7 @@
// even if the file is well-formed and the primary picture is correct.
// Reserve item ids for samples + grid
- size_t numItemsToReserve = mNumTiles + (mNumTiles > 1);
+ size_t numItemsToReserve = mNumTiles + (mNumTiles > 0);
status_t err = mOwner->reserveItemId_l(numItemsToReserve, &mItemIdBase);
if (err != OK) {
return err;
@@ -2706,11 +2899,16 @@
mDone = true;
void *dummy;
- status_t err = pthread_join(mThread, &dummy);
- WARN_UNLESS(err == 0, "track::stop: pthread_join status:%d", err);
- err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
- WARN_UNLESS(err == 0, "%s track stopped. Status :%d. %s source", getTrackType(), err,
- stopSource ? "Stop" : "Not Stop");
+ status_t err = OK;
+ int retVal = pthread_join(mThread, &dummy);
+ if (retVal == 0) {
+ err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
+ ALOGD("%s track stopped. Status:%d. %s source",
+ getTrackType(), err, stopSource ? "Stop" : "Not Stop");
+ } else {
+ ALOGE("track::stop: pthread_join retVal:%d", retVal);
+ err = UNKNOWN_ERROR;
+ }
mStarted = false;
return err;
}
@@ -2849,6 +3047,7 @@
}
if (nextStartCode == NULL) {
+ ALOGE("nextStartCode is null");
return ERROR_MALFORMED;
}
@@ -3126,11 +3325,11 @@
int64_t lastSampleDurationTicks = -1; // Timescale based ticks
if (mIsAudio) {
- prctl(PR_SET_NAME, (unsigned long)"AudioTrackWriterThread", 0, 0, 0);
+ prctl(PR_SET_NAME, (unsigned long)"MP4WtrAudTrkThread", 0, 0, 0);
} else if (mIsVideo) {
- prctl(PR_SET_NAME, (unsigned long)"VideoTrackWriterThread", 0, 0, 0);
+ prctl(PR_SET_NAME, (unsigned long)"MP4WtrVidTrkThread", 0, 0, 0);
} else {
- prctl(PR_SET_NAME, (unsigned long)"MetadataTrackWriterThread", 0, 0, 0);
+ prctl(PR_SET_NAME, (unsigned long)"MP4WtrMetaTrkThread", 0, 0, 0);
}
if (mOwner->isRealTimeRecording()) {
@@ -3181,6 +3380,7 @@
}
++count;
+
int32_t isCodecConfig;
if (buffer->meta_data().findInt32(kKeyIsCodecConfig, &isCodecConfig)
&& isCodecConfig) {
@@ -3204,7 +3404,7 @@
+ buffer->range_offset(),
buffer->range_length());
} else if (mIsMPEG4) {
- copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
+ err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
buffer->range_length());
}
}
@@ -3213,8 +3413,10 @@
buffer = NULL;
if (OK != err) {
mSource->stop();
+ mIsMalformed = true;
+ uint32_t trackNum = (mTrackId.getId() << 28);
mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_ERROR,
- mTrackId | MEDIA_RECORDER_TRACK_ERROR_GENERAL, err);
+ trackNum | MEDIA_RECORDER_TRACK_ERROR_GENERAL, err);
break;
}
@@ -3251,7 +3453,7 @@
* Reserve space in the file for the current sample + to be written MOOV box. If reservation
* for a new sample fails, preAllocate(...) stops muxing session completely. Stop() could
* write MOOV box successfully as space for the same was reserved in the prior call.
- * Release the current buffer/sample only here.
+ * Release the current buffer/sample here.
*/
if (!mOwner->preAllocate(buffer->range_length())) {
buffer->release();
@@ -3291,6 +3493,7 @@
updateTrackSizeEstimate();
if (mOwner->exceedsFileSizeLimit()) {
+ copy->release();
if (mOwner->switchFd() != OK) {
ALOGW("Recorded file size exceeds limit %" PRId64 "bytes",
mOwner->mMaxFileSizeLimitBytes);
@@ -3301,16 +3504,15 @@
ALOGV("%s Current recorded file size exceeds limit %" PRId64 "bytes. Switching output",
getTrackType(), mOwner->mMaxFileSizeLimitBytes);
}
- copy->release();
break;
}
if (mOwner->exceedsFileDurationLimit()) {
ALOGW("Recorded file duration exceeds limit %" PRId64 "microseconds",
mOwner->mMaxFileDurationLimitUs);
- mOwner->notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
copy->release();
mSource->stop();
+ mOwner->notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
break;
}
@@ -3592,13 +3794,13 @@
}
}
}
+
if (isTrackMalFormed()) {
- mIsMalformed = true;
dumpTimeStamps();
err = ERROR_MALFORMED;
}
- mOwner->trackProgressStatus(mTrackId, -1, err);
+ mOwner->trackProgressStatus(mTrackId.getId(), -1, err);
// Add final entries only for non-empty tracks.
if (mStszTableEntries->count() > 0) {
@@ -3665,7 +3867,7 @@
return err;
}
-bool MPEG4Writer::Track::isTrackMalFormed() const {
+bool MPEG4Writer::Track::isTrackMalFormed() {
if (mIsMalformed) {
return true;
}
@@ -3674,23 +3876,29 @@
if (mOwner->mStartMeta &&
mOwner->mStartMeta->findInt32(kKeyEmptyTrackMalFormed, &emptyTrackMalformed) &&
emptyTrackMalformed) {
+ // MediaRecorder(sets kKeyEmptyTrackMalFormed by default) report empty tracks as malformed.
if (!mIsHeic && mStszTableEntries->count() == 0) { // no samples written
ALOGE("The number of recorded samples is 0");
+ mIsMalformed = true;
return true;
}
if (mIsVideo && mStssTableEntries->count() == 0) { // no sync frames for video
ALOGE("There are no sync frames for video track");
+ mIsMalformed = true;
return true;
}
} else {
- // No sync frames for video.
+ // Through MediaMuxer, empty tracks can be added. No sync frames for video.
if (mIsVideo && mStszTableEntries->count() > 0 && mStssTableEntries->count() == 0) {
ALOGE("There are no sync frames for video track");
+ mIsMalformed = true;
return true;
}
}
-
- if (OK != checkCodecSpecificData()) { // no codec specific data
+ // Don't check for CodecSpecificData when track is empty.
+ if (mStszTableEntries->count() > 0 && OK != checkCodecSpecificData()) {
+ // No codec specific data.
+ mIsMalformed = true;
return true;
}
@@ -3704,7 +3912,7 @@
return;
}
- int trackNum = (mTrackId << 28);
+ uint32_t trackNum = (mTrackId.getId() << 28);
mOwner->notify(MEDIA_RECORDER_TRACK_EVENT_INFO,
trackNum | MEDIA_RECORDER_TRACK_INFO_TYPE,
@@ -3758,15 +3966,15 @@
if (mTrackEveryTimeDurationUs > 0 &&
timeUs - mPreviousTrackTimeUs >= mTrackEveryTimeDurationUs) {
ALOGV("Fire time tracking progress status at %" PRId64 " us", timeUs);
- mOwner->trackProgressStatus(mTrackId, timeUs - mPreviousTrackTimeUs, err);
+ mOwner->trackProgressStatus(mTrackId.getId(), timeUs - mPreviousTrackTimeUs, err);
mPreviousTrackTimeUs = timeUs;
}
}
void MPEG4Writer::trackProgressStatus(
- size_t trackId, int64_t timeUs, status_t err) {
+ uint32_t trackId, int64_t timeUs, status_t err) {
Mutex::Autolock lock(mLock);
- int32_t trackNum = (trackId << 28);
+ uint32_t trackNum = (trackId << 28);
// Error notification
// Do not consider ERROR_END_OF_STREAM an error
@@ -3936,8 +4144,8 @@
void MPEG4Writer::Track::writeStblBox() {
mOwner->beginBox("stbl");
- // Add subboxes only for non-empty tracks.
- if (mStszTableEntries->count() > 0) {
+ // Add subboxes for only non-empty and well-formed tracks.
+ if (mStszTableEntries->count() > 0 && !isTrackMalFormed()) {
mOwner->beginBox("stsd");
mOwner->writeInt32(0); // version=0, flags=0
mOwner->writeInt32(1); // entry count
@@ -4242,7 +4450,7 @@
mOwner->writeInt32(0x07); // version=0, flags=7
mOwner->writeInt32(now); // creation time
mOwner->writeInt32(now); // modification time
- mOwner->writeInt32(mTrackId); // track id starts with 1
+ mOwner->writeInt32(mTrackId.getId()); // track id starts with 1
mOwner->writeInt32(0); // reserved
int64_t trakDurationUs = getDurationUs();
int32_t mvhdTimeScale = mOwner->getTimeScale();
@@ -4550,10 +4758,18 @@
// This is useful if the pixel is not square
void MPEG4Writer::Track::writePaspBox() {
- mOwner->beginBox("pasp");
- mOwner->writeInt32(1 << 16); // hspacing
- mOwner->writeInt32(1 << 16); // vspacing
- mOwner->endBox(); // pasp
+ // Do not write 'pasp' box unless the track format specifies it.
+ // According to ISO/IEC 14496-12 (ISO base media file format), 'pasp' box
+ // is optional. If present, it overrides the SAR from the video CSD. Only
+ // set it if the track format specifically requests that.
+ int32_t hSpacing, vSpacing;
+ if (mMeta->findInt32(kKeySARWidth, &hSpacing) && (hSpacing > 0)
+ && mMeta->findInt32(kKeySARHeight, &vSpacing) && (vSpacing > 0)) {
+ mOwner->beginBox("pasp");
+ mOwner->writeInt32(hSpacing); // hspacing
+ mOwner->writeInt32(vSpacing); // vspacing
+ mOwner->endBox(); // pasp
+ }
}
int64_t MPEG4Writer::Track::getStartTimeOffsetTimeUs() const {
diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp
index f1b6e8c..5a2a910 100644
--- a/media/libstagefright/MediaAdapter.cpp
+++ b/media/libstagefright/MediaAdapter.cpp
@@ -114,6 +114,13 @@
return -EINVAL;
}
+ /* As mAdapterLock is unlocked while waiting for signalBufferReturned,
+ * a new buffer for the same track could be pushed from another thread
+ * in the client process, mBufferGatingMutex will help to hold that
+ * until the previous buffer is processed.
+ */
+ std::unique_lock<std::mutex> lk(mBufferGatingMutex);
+
Mutex::Autolock autoLock(mAdapterLock);
if (!mStarted) {
ALOGE("pushBuffer called before start");
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 0022ba6..da8f024 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -15,6 +15,7 @@
*/
//#define LOG_NDEBUG 0
+#include "hidl/HidlSupport.h"
#define LOG_TAG "MediaCodec"
#include <utils/Log.h>
@@ -37,6 +38,7 @@
#include <cutils/properties.h>
#include <gui/BufferQueue.h>
#include <gui/Surface.h>
+#include <hidlmemory/FrameworkUtils.h>
#include <mediadrm/ICrypto.h>
#include <media/IOMX.h>
#include <media/MediaCodecBuffer.h>
@@ -55,6 +57,7 @@
#include <media/stagefright/BufferProducerWrapper.h>
#include <media/stagefright/CCodec.h>
#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaDefs.h>
@@ -94,9 +97,12 @@
static const char *kCodecCrypto = "android.media.mediacodec.crypto"; /* 0,1 */
static const char *kCodecProfile = "android.media.mediacodec.profile"; /* 0..n */
static const char *kCodecLevel = "android.media.mediacodec.level"; /* 0..n */
+static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode"; /* CQ/VBR/CBR */
+static const char *kCodecBitrate = "android.media.mediacodec.bitrate"; /* 0..n */
static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth"; /* 0..n */
static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight"; /* 0..n */
static const char *kCodecError = "android.media.mediacodec.errcode";
+static const char *kCodecLifetimeMs = "android.media.mediacodec.lifetimeMs"; /* 0..n ms*/
static const char *kCodecErrorState = "android.media.mediacodec.errstate";
static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max"; /* in us */
static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min"; /* in us */
@@ -236,6 +242,9 @@
}
AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
+
+ // Kill clients pending removal.
+ mService->reclaimResourcesFromClientsPendingRemoval(mPid);
}
//static
@@ -309,7 +318,7 @@
class MediaCodec::ReleaseSurface {
public:
- ReleaseSurface() {
+ explicit ReleaseSurface(uint64_t usage) {
BufferQueue::createBufferQueue(&mProducer, &mConsumer);
mSurface = new Surface(mProducer, false /* controlledByApp */);
struct ConsumerListener : public BnConsumerListener {
@@ -320,6 +329,7 @@
sp<ConsumerListener> listener{new ConsumerListener};
mConsumer->consumerConnect(listener, false);
mConsumer->setConsumerName(String8{"MediaCodec.release"});
+ mConsumer->setConsumerUsageBits(usage);
}
const sp<Surface> &getSurface() {
@@ -609,7 +619,10 @@
return new PersistentSurface(bufferProducer, bufferSource);
}
-MediaCodec::MediaCodec(const sp<ALooper> &looper, pid_t pid, uid_t uid)
+MediaCodec::MediaCodec(
+ const sp<ALooper> &looper, pid_t pid, uid_t uid,
+ std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
+ std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
: mState(UNINITIALIZED),
mReleasedByResourceManager(false),
mLooper(looper),
@@ -618,7 +631,6 @@
mFlags(0),
mStickyError(OK),
mSoftRenderer(NULL),
- mMetricsHandle(0),
mIsVideo(false),
mVideoWidth(0),
mVideoHeight(0),
@@ -627,6 +639,9 @@
mDequeueInputReplyID(0),
mDequeueOutputTimeoutGeneration(0),
mDequeueOutputReplyID(0),
+ mTunneledInputWidth(0),
+ mTunneledInputHeight(0),
+ mTunneled(false),
mHaveInputSurface(false),
mHavePendingInputBuffers(false),
mCpuBoostRequested(false),
@@ -635,7 +650,9 @@
mNumLowLatencyDisables(0),
mIsLowLatencyModeOn(false),
mIndexOfFirstFrameWhenLowLatencyOn(-1),
- mInputBufferCounter(0) {
+ mInputBufferCounter(0),
+ mGetCodecBase(getCodecBase),
+ mGetCodecInfo(getCodecInfo) {
if (uid == kNoUid) {
mUid = AIBinder_getCallingUid();
} else {
@@ -643,6 +660,33 @@
}
mResourceManagerProxy = new ResourceManagerServiceProxy(pid, mUid,
::ndk::SharedRefBase::make<ResourceManagerClient>(this));
+ if (!mGetCodecBase) {
+ mGetCodecBase = [](const AString &name, const char *owner) {
+ return GetCodecBase(name, owner);
+ };
+ }
+ if (!mGetCodecInfo) {
+ mGetCodecInfo = [](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
+ *info = nullptr;
+ const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
+ if (!mcl) {
+ return NO_INIT; // if called from Java should raise IOException
+ }
+ AString tmp = name;
+ if (tmp.endsWith(".secure")) {
+ tmp.erase(tmp.size() - 7, 7);
+ }
+ for (const AString &codecName : { name, tmp }) {
+ ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
+ if (codecIdx < 0) {
+ continue;
+ }
+ *info = mcl->getCodecInfo(codecIdx);
+ return OK;
+ }
+ return NAME_NOT_FOUND;
+ };
+ }
initMediametrics();
}
@@ -678,6 +722,8 @@
mIndexOfFirstFrameWhenLowLatencyOn = -1;
mInputBufferCounter = 0;
}
+
+ mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
}
void MediaCodec::updateMediametrics() {
@@ -686,7 +732,6 @@
return;
}
-
if (mLatencyHist.getCount() != 0 ) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
@@ -702,6 +747,11 @@
if (mLatencyUnknown > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
}
+ if (mLifetimeStartNs > 0) {
+ nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
+ lifetime = lifetime / (1000 * 1000); // emitted in ms, truncated not rounded
+ mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
+ }
{
Mutex::Autolock al(mLatencyLock);
@@ -739,7 +789,6 @@
}
}
-
// spit the data (if any) into the supplied analytics record
if (recentHist.getCount()!= 0 ) {
mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
@@ -999,6 +1048,12 @@
return err;
}
+void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ PostReplyWithError(replyID, err);
+}
+
void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
int32_t finalErr = err;
if (mReleasedByResourceManager) {
@@ -1075,40 +1130,30 @@
bool secureCodec = false;
const char *owner = "";
if (!name.startsWith("android.filter.")) {
- AString tmp = name;
- if (tmp.endsWith(".secure")) {
- secureCodec = true;
- tmp.erase(tmp.size() - 7, 7);
- }
- const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
- if (mcl == NULL) {
+ status_t err = mGetCodecInfo(name, &mCodecInfo);
+ if (err != OK) {
mCodec = NULL; // remove the codec.
- return NO_INIT; // if called from Java should raise IOException
- }
- for (const AString &codecName : { name, tmp }) {
- ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
- if (codecIdx < 0) {
- continue;
- }
- mCodecInfo = mcl->getCodecInfo(codecIdx);
- Vector<AString> mediaTypes;
- mCodecInfo->getSupportedMediaTypes(&mediaTypes);
- for (size_t i = 0; i < mediaTypes.size(); i++) {
- if (mediaTypes[i].startsWith("video/")) {
- mIsVideo = true;
- break;
- }
- }
- break;
+ return err;
}
if (mCodecInfo == nullptr) {
+ ALOGE("Getting codec info with name '%s' failed", name.c_str());
return NAME_NOT_FOUND;
}
+ secureCodec = name.endsWith(".secure");
+ Vector<AString> mediaTypes;
+ mCodecInfo->getSupportedMediaTypes(&mediaTypes);
+ for (size_t i = 0; i < mediaTypes.size(); ++i) {
+ if (mediaTypes[i].startsWith("video/")) {
+ mIsVideo = true;
+ break;
+ }
+ }
owner = mCodecInfo->getOwnerName();
}
- mCodec = GetCodecBase(name, owner);
+ mCodec = mGetCodecBase(name, owner);
if (mCodec == NULL) {
+ ALOGE("Getting codec base with name '%s' (owner='%s') failed", name.c_str(), owner);
return NAME_NOT_FOUND;
}
@@ -1473,9 +1518,9 @@
return PostAndAwaitResponse(msg, &response);
}
-status_t MediaCodec::releaseAsync() {
+status_t MediaCodec::releaseAsync(const sp<AMessage> ¬ify) {
sp<AMessage> msg = new AMessage(kWhatRelease, this);
- msg->setInt32("async", 1);
+ msg->setMessage("async", notify);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
@@ -1502,7 +1547,6 @@
mStickyError = OK;
// reset state not reset by setState(UNINITIALIZED)
- mReplyID = 0;
mDequeueInputReplyID = 0;
mDequeueOutputReplyID = 0;
mDequeueInputTimeoutGeneration = 0;
@@ -2034,20 +2078,25 @@
} else if (mFlags & kFlagOutputBuffersChanged) {
PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
mFlags &= ~kFlagOutputBuffersChanged;
- } else if (mFlags & kFlagOutputFormatChanged) {
- PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
- mFlags &= ~kFlagOutputFormatChanged;
} else {
sp<AMessage> response = new AMessage;
- ssize_t index = dequeuePortBuffer(kPortIndexOutput);
-
- if (index < 0) {
- CHECK_EQ(index, -EAGAIN);
+ BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
+ if (!info) {
return false;
}
- const sp<MediaCodecBuffer> &buffer =
- mPortBuffers[kPortIndexOutput][index].mData;
+ // In synchronous mode, output format change should be handled
+ // at dequeue to put the event at the correct order.
+
+ const sp<MediaCodecBuffer> &buffer = info->mData;
+ handleOutputFormatChangeIfNeeded(buffer);
+ if (mFlags & kFlagOutputFormatChanged) {
+ PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
+ mFlags &= ~kFlagOutputFormatChanged;
+ return true;
+ }
+
+ ssize_t index = dequeuePortBuffer(kPortIndexOutput);
response->setSize("index", index);
response->setSize("offset", buffer->offset());
@@ -2084,14 +2133,16 @@
CHECK(msg->findInt32("err", &err));
CHECK(msg->findInt32("actionCode", &actionCode));
- ALOGE("Codec reported err %#x, actionCode %d, while in state %d",
- err, actionCode, mState);
+ ALOGE("Codec reported err %#x, actionCode %d, while in state %d/%s",
+ err, actionCode, mState, stateString(mState).c_str());
if (err == DEAD_OBJECT) {
mFlags |= kFlagSawMediaServerDie;
mFlags &= ~kFlagIsComponentAllocated;
}
bool sendErrorResponse = true;
+ std::string origin{"kWhatError:"};
+ origin += stateString(mState);
switch (mState) {
case INITIALIZING:
@@ -2143,14 +2194,14 @@
// be a shutdown complete notification after
// all.
- // note that we're directly going from
+ // note that we may be directly going from
// STOPPING->UNINITIALIZED, instead of the
// usual STOPPING->INITIALIZED state.
setState(UNINITIALIZED);
if (mState == RELEASING) {
mComponentName.clear();
}
- (new AMessage)->postReply(mReplyID);
+ postPendingRepliesAndDeferredMessages(origin + ":dead");
sendErrorResponse = false;
}
break;
@@ -2176,7 +2227,7 @@
case FLUSHED:
case STARTED:
{
- sendErrorResponse = false;
+ sendErrorResponse = (mReplyID != nullptr);
setStickyError(err);
postActivityNotificationIfPossible();
@@ -2206,7 +2257,7 @@
default:
{
- sendErrorResponse = false;
+ sendErrorResponse = (mReplyID != nullptr);
setStickyError(err);
postActivityNotificationIfPossible();
@@ -2233,7 +2284,15 @@
}
if (sendErrorResponse) {
- PostReplyWithError(mReplyID, err);
+ // TRICKY: replicate PostReplyWithError logic for
+ // err code override
+ int32_t finalErr = err;
+ if (mReleasedByResourceManager) {
+ // override the err code if MediaCodec has been
+ // released by ResourceManager.
+ finalErr = DEAD_OBJECT;
+ }
+ postPendingRepliesAndDeferredMessages(origin, finalErr);
}
break;
}
@@ -2243,8 +2302,8 @@
if (mState == RELEASING || mState == UNINITIALIZED) {
// In case a kWhatError or kWhatRelease message came in and replied,
// we log a warning and ignore.
- ALOGW("allocate interrupted by error or release, current state %d",
- mState);
+ ALOGW("allocate interrupted by error or release, current state %d/%s",
+ mState, stateString(mState).c_str());
break;
}
CHECK_EQ(mState, INITIALIZING);
@@ -2281,7 +2340,7 @@
MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
}
- (new AMessage)->postReply(mReplyID);
+ postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
break;
}
@@ -2290,8 +2349,8 @@
if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
// In case a kWhatError or kWhatRelease message came in and replied,
// we log a warning and ignore.
- ALOGW("configure interrupted by error or release, current state %d",
- mState);
+ ALOGW("configure interrupted by error or release, current state %d/%s",
+ mState, stateString(mState).c_str());
break;
}
CHECK_EQ(mState, CONFIGURING);
@@ -2308,6 +2367,8 @@
// meaningful and confusing for an encoder in a transcoder scenario
mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
}
+ sp<AMessage> interestingFormat =
+ (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
ALOGV("[%s] configured as input format: %s, output format: %s",
mComponentName.c_str(),
mInputFormat->debugString(4).c_str(),
@@ -2318,9 +2379,10 @@
mFlags |= kFlagUsesSoftwareRenderer;
}
setState(CONFIGURED);
- (new AMessage)->postReply(mReplyID);
+ postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
// augment our media metrics info, now that we know more things
+ // such as what the codec extracted from any CSD passed in.
if (mMetricsHandle != 0) {
sp<AMessage> format;
if (mConfigureMsg != NULL &&
@@ -2332,12 +2394,42 @@
mime.c_str());
}
}
+ // perhaps video only?
+ int32_t profile = 0;
+ if (interestingFormat->findInt32("profile", &profile)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
+ }
+ int32_t level = 0;
+ if (interestingFormat->findInt32("level", &level)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
+ }
+ // bitrate and bitrate mode, encoder only
+ if (mFlags & kFlagIsEncoder) {
+ // encoder specific values
+ int32_t bitrate_mode = -1;
+ if (mOutputFormat->findInt32(KEY_BITRATE_MODE, &bitrate_mode)) {
+ mediametrics_setCString(mMetricsHandle, kCodecBitrateMode,
+ asString_BitrateMode(bitrate_mode));
+ }
+ int32_t bitrate = -1;
+ if (mOutputFormat->findInt32(KEY_BIT_RATE, &bitrate)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecBitrate, bitrate);
+ }
+ } else {
+ // decoder specific values
+ }
}
break;
}
case kWhatInputSurfaceCreated:
{
+ if (mState != CONFIGURED) {
+ // state transitioned unexpectedly; we should have replied already.
+ ALOGD("received kWhatInputSurfaceCreated message in state %s",
+ stateString(mState).c_str());
+ break;
+ }
// response to initiateCreateInputSurface()
status_t err = NO_ERROR;
sp<AMessage> response = new AMessage;
@@ -2356,12 +2448,18 @@
} else {
response->setInt32("err", err);
}
- response->postReply(mReplyID);
+ postPendingRepliesAndDeferredMessages("kWhatInputSurfaceCreated", response);
break;
}
case kWhatInputSurfaceAccepted:
{
+ if (mState != CONFIGURED) {
+ // state transitioned unexpectedly; we should have replied already.
+ ALOGD("received kWhatInputSurfaceAccepted message in state %s",
+ stateString(mState).c_str());
+ break;
+ }
// response to initiateSetInputSurface()
status_t err = NO_ERROR;
sp<AMessage> response = new AMessage();
@@ -2372,19 +2470,25 @@
} else {
response->setInt32("err", err);
}
- response->postReply(mReplyID);
+ postPendingRepliesAndDeferredMessages("kWhatInputSurfaceAccepted", response);
break;
}
case kWhatSignaledInputEOS:
{
+ if (!isExecuting()) {
+ // state transitioned unexpectedly; we should have replied already.
+ ALOGD("received kWhatSignaledInputEOS message in state %s",
+ stateString(mState).c_str());
+ break;
+ }
// response to signalEndOfInputStream()
sp<AMessage> response = new AMessage;
status_t err;
if (msg->findInt32("err", &err)) {
response->setInt32("err", err);
}
- response->postReply(mReplyID);
+ postPendingRepliesAndDeferredMessages("kWhatSignaledInputEOS", response);
break;
}
@@ -2393,7 +2497,8 @@
if (mState == RELEASING || mState == UNINITIALIZED) {
// In case a kWhatRelease message came in and replied,
// we log a warning and ignore.
- ALOGW("start interrupted by release, current state %d", mState);
+ ALOGW("start interrupted by release, current state %d/%s",
+ mState, stateString(mState).c_str());
break;
}
@@ -2403,7 +2508,7 @@
MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
}
setState(STARTED);
- (new AMessage)->postReply(mReplyID);
+ postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
break;
}
@@ -2460,6 +2565,18 @@
}
break;
}
+ if (!mLeftover.empty()) {
+ ssize_t index = dequeuePortBuffer(kPortIndexInput);
+ CHECK_GE(index, 0);
+
+ status_t err = handleLeftover(index);
+ if (err != OK) {
+ setStickyError(err);
+ postActivityNotificationIfPossible();
+ cancelPendingDequeueOperations();
+ }
+ break;
+ }
if (mFlags & kFlagIsAsync) {
if (!mHaveInputSurface) {
@@ -2492,107 +2609,13 @@
break;
}
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
- sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
-
- if (mOutputFormat != buffer->format()) {
- if (mFlags & kFlagUseBlockModel) {
- sp<AMessage> diff1 = mOutputFormat->changesFrom(buffer->format());
- sp<AMessage> diff2 = buffer->format()->changesFrom(mOutputFormat);
- std::set<std::string> keys;
- size_t numEntries = diff1->countEntries();
- AMessage::Type type;
- for (size_t i = 0; i < numEntries; ++i) {
- keys.emplace(diff1->getEntryNameAt(i, &type));
- }
- numEntries = diff2->countEntries();
- for (size_t i = 0; i < numEntries; ++i) {
- keys.emplace(diff2->getEntryNameAt(i, &type));
- }
- sp<WrapperObject<std::set<std::string>>> changedKeys{
- new WrapperObject<std::set<std::string>>{std::move(keys)}};
- buffer->meta()->setObject("changedKeys", changedKeys);
- }
- mOutputFormat = buffer->format();
- ALOGV("[%s] output format changed to: %s",
- mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
-
- if (mSoftRenderer == NULL &&
- mSurface != NULL &&
- (mFlags & kFlagUsesSoftwareRenderer)) {
- AString mime;
- CHECK(mOutputFormat->findString("mime", &mime));
-
- // TODO: propagate color aspects to software renderer to allow better
- // color conversion to RGB. For now, just mark dataspace for YUV
- // rendering.
- int32_t dataSpace;
- if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
- ALOGD("[%s] setting dataspace on output surface to #%x",
- mComponentName.c_str(), dataSpace);
- int err = native_window_set_buffers_data_space(
- mSurface.get(), (android_dataspace)dataSpace);
- ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
- }
- if (mOutputFormat->contains("hdr-static-info")) {
- HDRStaticInfo info;
- if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
- setNativeWindowHdrMetadata(mSurface.get(), &info);
- }
- }
-
- sp<ABuffer> hdr10PlusInfo;
- if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
- && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
- native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
- hdr10PlusInfo->size(), hdr10PlusInfo->data());
- }
-
- if (mime.startsWithIgnoreCase("video/")) {
- mSurface->setDequeueTimeout(-1);
- mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
- }
- }
-
- requestCpuBoostIfNeeded();
-
- if (mFlags & kFlagIsEncoder) {
- // Before we announce the format change we should
- // collect codec specific data and amend the output
- // format as necessary.
- int32_t flags = 0;
- (void) buffer->meta()->findInt32("flags", &flags);
- if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)) {
- status_t err =
- amendOutputFormatWithCodecSpecificData(buffer);
-
- if (err != OK) {
- ALOGE("Codec spit out malformed codec "
- "specific data!");
- }
- }
- }
- if (mFlags & kFlagIsAsync) {
- onOutputFormatChanged();
- } else {
- mFlags |= kFlagOutputFormatChanged;
- postActivityNotificationIfPossible();
- }
-
- // Notify mCrypto of video resolution changes
- if (mCrypto != NULL) {
- int32_t left, top, right, bottom, width, height;
- if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
- mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
- } else if (mOutputFormat->findInt32("width", &width)
- && mOutputFormat->findInt32("height", &height)) {
- mCrypto->notifyResolution(width, height);
- }
- }
- }
-
if (mFlags & kFlagIsAsync) {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("buffer", &obj));
+ sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+
+ // In asynchronous mode, output format change is processed immediately.
+ handleOutputFormatChangeIfNeeded(buffer);
onOutputBufferAvailable();
} else if (mFlags & kFlagDequeueOutputPending) {
CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
@@ -2617,18 +2640,26 @@
case kWhatStopCompleted:
{
if (mState != STOPPING) {
- ALOGW("Received kWhatStopCompleted in state %d", mState);
+ ALOGW("Received kWhatStopCompleted in state %d/%s",
+ mState, stateString(mState).c_str());
break;
}
setState(INITIALIZED);
- (new AMessage)->postReply(mReplyID);
+ if (mReplyID) {
+ postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
+ } else {
+ ALOGW("kWhatStopCompleted: presumably an error occurred earlier, "
+ "but the operation completed anyway. (last reply origin=%s)",
+ mLastReplyOrigin.c_str());
+ }
break;
}
case kWhatReleaseCompleted:
{
if (mState != RELEASING) {
- ALOGW("Received kWhatReleaseCompleted in state %d", mState);
+ ALOGW("Received kWhatReleaseCompleted in state %d/%s",
+ mState, stateString(mState).c_str());
break;
}
setState(UNINITIALIZED);
@@ -2645,7 +2676,12 @@
mReleaseSurface.reset();
if (mReplyID != nullptr) {
- (new AMessage)->postReply(mReplyID);
+ postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
+ }
+ if (mAsyncReleaseCompleteNotification != nullptr) {
+ flushMediametrics();
+ mAsyncReleaseCompleteNotification->post();
+ mAsyncReleaseCompleteNotification.clear();
}
break;
}
@@ -2653,8 +2689,8 @@
case kWhatFlushCompleted:
{
if (mState != FLUSHING) {
- ALOGW("received FlushCompleted message in state %d",
- mState);
+ ALOGW("received FlushCompleted message in state %d/%s",
+ mState, stateString(mState).c_str());
break;
}
@@ -2665,7 +2701,7 @@
mCodec->signalResume();
}
- (new AMessage)->postReply(mReplyID);
+ postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
break;
}
@@ -2677,14 +2713,18 @@
case kWhatInit:
{
- sp<AReplyToken> replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
if (mState != UNINITIALIZED) {
- PostReplyWithError(replyID, INVALID_OPERATION);
+ PostReplyWithError(msg, INVALID_OPERATION);
break;
}
+ if (mReplyID) {
+ mDeferredMessages.push_back(msg);
+ break;
+ }
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
mReplyID = replyID;
setState(INITIALIZING);
@@ -2746,14 +2786,18 @@
case kWhatConfigure:
{
- sp<AReplyToken> replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
if (mState != INITIALIZED) {
- PostReplyWithError(replyID, INVALID_OPERATION);
+ PostReplyWithError(msg, INVALID_OPERATION);
break;
}
+ if (mReplyID) {
+ mDeferredMessages.push_back(msg);
+ break;
+ }
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
sp<RefBase> obj;
CHECK(msg->findObject("surface", &obj));
@@ -2826,6 +2870,14 @@
extractCSD(format);
+ int32_t tunneled;
+ if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
+ ALOGI("Configuring TUNNELED video playback.");
+ mTunneled = true;
+ } else {
+ mTunneled = false;
+ }
+
mCodec->initiateConfigureComponent(format);
break;
}
@@ -2891,15 +2943,19 @@
case kWhatCreateInputSurface:
case kWhatSetInputSurface:
{
- sp<AReplyToken> replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
// Must be configured, but can't have been started yet.
if (mState != CONFIGURED) {
- PostReplyWithError(replyID, INVALID_OPERATION);
+ PostReplyWithError(msg, INVALID_OPERATION);
break;
}
+ if (mReplyID) {
+ mDeferredMessages.push_back(msg);
+ break;
+ }
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
mReplyID = replyID;
if (msg->what() == kWhatCreateInputSurface) {
mCodec->initiateCreateInputSurface();
@@ -2914,9 +2970,6 @@
}
case kWhatStart:
{
- sp<AReplyToken> replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
if (mState == FLUSHED) {
setState(STARTED);
if (mHavePendingInputBuffers) {
@@ -2924,13 +2977,20 @@
mHavePendingInputBuffers = false;
}
mCodec->signalResume();
- PostReplyWithError(replyID, OK);
+ PostReplyWithError(msg, OK);
break;
} else if (mState != CONFIGURED) {
- PostReplyWithError(replyID, INVALID_OPERATION);
+ PostReplyWithError(msg, INVALID_OPERATION);
break;
}
+ if (mReplyID) {
+ mDeferredMessages.push_back(msg);
+ break;
+ }
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
mReplyID = replyID;
setState(STARTING);
@@ -2938,15 +2998,42 @@
break;
}
- case kWhatStop:
+ case kWhatStop: {
+ if (mReplyID) {
+ mDeferredMessages.push_back(msg);
+ break;
+ }
+ [[fallthrough]];
+ }
case kWhatRelease:
{
State targetState =
(msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
+ if ((mState == RELEASING && targetState == UNINITIALIZED)
+ || (mState == STOPPING && targetState == INITIALIZED)) {
+ mDeferredMessages.push_back(msg);
+ break;
+ }
+
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ sp<AMessage> asyncNotify;
+ (void)msg->findMessage("async", &asyncNotify);
+ // post asyncNotify if going out of scope.
+ struct AsyncNotifyPost {
+ AsyncNotifyPost(const sp<AMessage> &asyncNotify) : mAsyncNotify(asyncNotify) {}
+ ~AsyncNotifyPost() {
+ if (mAsyncNotify) {
+ mAsyncNotify->post();
+ }
+ }
+ void clear() { mAsyncNotify.clear(); }
+ private:
+ sp<AMessage> mAsyncNotify;
+ } asyncNotifyPost{asyncNotify};
+
// already stopped/released
if (mState == UNINITIALIZED && mReleasedByResourceManager) {
sp<AMessage> response = new AMessage;
@@ -2958,7 +3045,13 @@
int32_t reclaimed = 0;
msg->findInt32("reclaimed", &reclaimed);
if (reclaimed) {
- mReleasedByResourceManager = true;
+ if (!mReleasedByResourceManager) {
+ // notify the async client
+ if (mFlags & kFlagIsAsync) {
+ onError(DEAD_OBJECT, ACTION_CODE_FATAL);
+ }
+ mReleasedByResourceManager = true;
+ }
int32_t force = 0;
msg->findInt32("force", &force);
@@ -2970,10 +3063,6 @@
response->setInt32("err", WOULD_BLOCK);
response->postReply(replyID);
- // notify the async client
- if (mFlags & kFlagIsAsync) {
- onError(DEAD_OBJECT, ACTION_CODE_FATAL);
- }
break;
}
}
@@ -3010,12 +3099,15 @@
// after this, and we'll no longer be able to reply.
if (mState == FLUSHING || mState == STOPPING
|| mState == CONFIGURING || mState == STARTING) {
- (new AMessage)->postReply(mReplyID);
+ // mReply is always set if in these states.
+ postPendingRepliesAndDeferredMessages(
+ std::string("kWhatRelease:") + stateString(mState));
}
if (mFlags & kFlagSawMediaServerDie) {
// It's dead, Jim. Don't expect initiateShutdown to yield
// any useful results now...
+ // Any pending reply would have been handled at kWhatError.
setState(UNINITIALIZED);
if (targetState == UNINITIALIZED) {
mComponentName.clear();
@@ -3029,30 +3121,41 @@
// reply now with an error to unblock the client, client can
// release after the failure (instead of ANR).
if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
+ // Any pending reply would have been handled at kWhatError.
PostReplyWithError(replyID, getStickyError());
break;
}
- int32_t async = 0;
- if (msg->findInt32("async", &async) && async) {
- if ((mState == CONFIGURED || mState == STARTED || mState == FLUSHED)
- && mSurface != NULL) {
+ if (asyncNotify != nullptr) {
+ if (mSurface != NULL) {
if (!mReleaseSurface) {
- mReleaseSurface.reset(new ReleaseSurface);
+ uint64_t usage = 0;
+ if (mSurface->getConsumerUsage(&usage) != OK) {
+ usage = 0;
+ }
+ mReleaseSurface.reset(new ReleaseSurface(usage));
}
- status_t err = connectToSurface(mReleaseSurface->getSurface());
- ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
- if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
- err = mCodec->setSurface(mReleaseSurface->getSurface());
- ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
- }
- if (err == OK) {
- (void)disconnectFromSurface();
- mSurface = mReleaseSurface->getSurface();
+ if (mSurface != mReleaseSurface->getSurface()) {
+ status_t err = connectToSurface(mReleaseSurface->getSurface());
+ ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+ if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+ err = mCodec->setSurface(mReleaseSurface->getSurface());
+ ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+ }
+ if (err == OK) {
+ (void)disconnectFromSurface();
+ mSurface = mReleaseSurface->getSurface();
+ }
}
}
}
+ if (mReplyID) {
+ // State transition replies are handled above, so this reply
+ // would not be related to state transition. As we are
+ // shutting down the component, just fail the operation.
+ postPendingRepliesAndDeferredMessages("kWhatRelease:reply", UNKNOWN_ERROR);
+ }
mReplyID = replyID;
setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
@@ -3065,10 +3168,11 @@
pushBlankBuffersToNativeWindow(mSurface.get());
}
- if (async) {
+ if (asyncNotify != nullptr) {
mResourceManagerProxy->markClientForPendingRemoval();
- (new AMessage)->postReply(mReplyID);
- mReplyID = 0;
+ postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+ asyncNotifyPost.clear();
+ mAsyncReleaseCompleteNotification = asyncNotify;
}
break;
@@ -3148,7 +3252,15 @@
break;
}
- status_t err = onQueueInputBuffer(msg);
+ status_t err = UNKNOWN_ERROR;
+ if (!mLeftover.empty()) {
+ mLeftover.push_back(msg);
+ size_t index;
+ msg->findSize("index", &index);
+ err = handleLeftover(index);
+ } else {
+ err = onQueueInputBuffer(msg);
+ }
PostReplyWithError(replyID, err);
break;
@@ -3230,17 +3342,21 @@
case kWhatSignalEndOfInputStream:
{
- sp<AReplyToken> replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
if (!isExecuting() || !mHaveInputSurface) {
- PostReplyWithError(replyID, INVALID_OPERATION);
+ PostReplyWithError(msg, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
- PostReplyWithError(replyID, getStickyError());
+ PostReplyWithError(msg, getStickyError());
break;
}
+ if (mReplyID) {
+ mDeferredMessages.push_back(msg);
+ break;
+ }
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
mReplyID = replyID;
mCodec->signalEndOfInputStream();
break;
@@ -3282,17 +3398,21 @@
case kWhatFlush:
{
- sp<AReplyToken> replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
if (!isExecuting()) {
- PostReplyWithError(replyID, INVALID_OPERATION);
+ PostReplyWithError(msg, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
- PostReplyWithError(replyID, getStickyError());
+ PostReplyWithError(msg, getStickyError());
break;
}
+ if (mReplyID) {
+ mDeferredMessages.push_back(msg);
+ break;
+ }
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
mReplyID = replyID;
// TODO: skip flushing if already FLUSHED
setState(FLUSHING);
@@ -3406,6 +3526,106 @@
}
}
+void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
+ sp<AMessage> format = buffer->format();
+ if (mOutputFormat == format) {
+ return;
+ }
+ if (mFlags & kFlagUseBlockModel) {
+ sp<AMessage> diff1 = mOutputFormat->changesFrom(format);
+ sp<AMessage> diff2 = format->changesFrom(mOutputFormat);
+ std::set<std::string> keys;
+ size_t numEntries = diff1->countEntries();
+ AMessage::Type type;
+ for (size_t i = 0; i < numEntries; ++i) {
+ keys.emplace(diff1->getEntryNameAt(i, &type));
+ }
+ numEntries = diff2->countEntries();
+ for (size_t i = 0; i < numEntries; ++i) {
+ keys.emplace(diff2->getEntryNameAt(i, &type));
+ }
+ sp<WrapperObject<std::set<std::string>>> changedKeys{
+ new WrapperObject<std::set<std::string>>{std::move(keys)}};
+ buffer->meta()->setObject("changedKeys", changedKeys);
+ }
+ mOutputFormat = format;
+ ALOGV("[%s] output format changed to: %s",
+ mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
+
+ if (mSoftRenderer == NULL &&
+ mSurface != NULL &&
+ (mFlags & kFlagUsesSoftwareRenderer)) {
+ AString mime;
+ CHECK(mOutputFormat->findString("mime", &mime));
+
+ // TODO: propagate color aspects to software renderer to allow better
+ // color conversion to RGB. For now, just mark dataspace for YUV
+ // rendering.
+ int32_t dataSpace;
+ if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
+ ALOGD("[%s] setting dataspace on output surface to #%x",
+ mComponentName.c_str(), dataSpace);
+ int err = native_window_set_buffers_data_space(
+ mSurface.get(), (android_dataspace)dataSpace);
+ ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
+ }
+ if (mOutputFormat->contains("hdr-static-info")) {
+ HDRStaticInfo info;
+ if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
+ setNativeWindowHdrMetadata(mSurface.get(), &info);
+ }
+ }
+
+ sp<ABuffer> hdr10PlusInfo;
+ if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
+ && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+ native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
+ hdr10PlusInfo->size(), hdr10PlusInfo->data());
+ }
+
+ if (mime.startsWithIgnoreCase("video/")) {
+ mSurface->setDequeueTimeout(-1);
+ mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
+ }
+ }
+
+ requestCpuBoostIfNeeded();
+
+ if (mFlags & kFlagIsEncoder) {
+ // Before we announce the format change we should
+ // collect codec specific data and amend the output
+ // format as necessary.
+ int32_t flags = 0;
+ (void) buffer->meta()->findInt32("flags", &flags);
+ if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)) {
+ status_t err =
+ amendOutputFormatWithCodecSpecificData(buffer);
+
+ if (err != OK) {
+ ALOGE("Codec spit out malformed codec "
+ "specific data!");
+ }
+ }
+ }
+ if (mFlags & kFlagIsAsync) {
+ onOutputFormatChanged();
+ } else {
+ mFlags |= kFlagOutputFormatChanged;
+ postActivityNotificationIfPossible();
+ }
+
+ // Notify mCrypto of video resolution changes
+ if (mCrypto != NULL) {
+ int32_t left, top, right, bottom, width, height;
+ if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
+ } else if (mOutputFormat->findInt32("width", &width)
+ && mOutputFormat->findInt32("height", &height)) {
+ mCrypto->notifyResolution(width, height);
+ }
+ }
+}
+
void MediaCodec::extractCSD(const sp<AMessage> &format) {
mCSD.clear();
@@ -3432,19 +3652,42 @@
sp<ABuffer> csd = *mCSD.begin();
mCSD.erase(mCSD.begin());
std::shared_ptr<C2Buffer> c2Buffer;
+ sp<hardware::HidlMemory> memory;
+ size_t offset = 0;
- if ((mFlags & kFlagUseBlockModel) && mOwnerName.startsWith("codec2::")) {
- std::shared_ptr<C2LinearBlock> block =
- FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
- C2WriteView view{block->map().get()};
- if (view.error() != C2_OK) {
- return -EINVAL;
+ if (mFlags & kFlagUseBlockModel) {
+ if (hasCryptoOrDescrambler()) {
+ constexpr size_t kInitialDealerCapacity = 1048576; // 1MB
+ thread_local sp<MemoryDealer> sDealer = new MemoryDealer(
+ kInitialDealerCapacity, "CSD(1MB)");
+ sp<IMemory> mem = sDealer->allocate(csd->size());
+ if (mem == nullptr) {
+ size_t newDealerCapacity = sDealer->getMemoryHeap()->getSize() * 2;
+ while (csd->size() * 2 > newDealerCapacity) {
+ newDealerCapacity *= 2;
+ }
+ sDealer = new MemoryDealer(
+ newDealerCapacity,
+ AStringPrintf("CSD(%dMB)", newDealerCapacity / 1048576).c_str());
+ mem = sDealer->allocate(csd->size());
+ }
+ memcpy(mem->unsecurePointer(), csd->data(), csd->size());
+ ssize_t heapOffset;
+ memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
+ offset += heapOffset;
+ } else {
+ std::shared_ptr<C2LinearBlock> block =
+ FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
+ C2WriteView view{block->map().get()};
+ if (view.error() != C2_OK) {
+ return -EINVAL;
+ }
+ if (csd->size() > view.capacity()) {
+ return -EINVAL;
+ }
+ memcpy(view.base(), csd->data(), csd->size());
+ c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
}
- if (csd->size() > view.capacity()) {
- return -EINVAL;
- }
- memcpy(view.base(), csd->data(), csd->size());
- c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
} else {
const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
const sp<MediaCodecBuffer> &codecInputData = info.mData;
@@ -3474,6 +3717,11 @@
new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
msg->setObject("c2buffer", obj);
msg->setMessage("tunings", new AMessage);
+ } else if (memory) {
+ sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
+ new WrapperObject<sp<hardware::HidlMemory>>{memory}};
+ msg->setObject("memory", obj);
+ msg->setMessage("tunings", new AMessage);
}
return onQueueInputBuffer(msg);
@@ -3533,6 +3781,9 @@
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
Mutex::Autolock al(mBufferLock);
+ if (portIndex == kPortIndexInput) {
+ mLeftover.clear();
+ }
for (size_t i = 0; i < mPortBuffers[portIndex].size(); ++i) {
BufferInfo *info = &mPortBuffers[portIndex][i];
@@ -3591,6 +3842,7 @@
} else if (msg->findObject("memory", &obj)) {
CHECK(obj);
memory = static_cast<WrapperObject<sp<hardware::HidlMemory>> *>(obj.get())->value;
+ CHECK(msg->findSize("offset", &offset));
} else {
CHECK(msg->findSize("offset", &offset));
}
@@ -3662,7 +3914,26 @@
err = mBufferChannel->attachEncryptedBuffer(
memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
offset, subSamples, numSubSamples, buffer);
+ } else {
+ err = UNKNOWN_ERROR;
}
+
+ if (err == OK && !buffer->asC2Buffer()
+ && c2Buffer && c2Buffer->data().type() == C2BufferData::LINEAR) {
+ C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
+ if (block.size() > buffer->size()) {
+ C2ConstLinearBlock leftover = block.subBlock(
+ block.offset() + buffer->size(), block.size() - buffer->size());
+ sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
+ new WrapperObject<std::shared_ptr<C2Buffer>>{
+ C2Buffer::CreateLinearBuffer(leftover)}};
+ msg->setObject("c2buffer", obj);
+ mLeftover.push_front(msg);
+ // Not sending EOS if we have leftovers
+ flags &= ~BUFFER_FLAG_EOS;
+ }
+ }
+
offset = buffer->offset();
size = buffer->size();
if (err != OK) {
@@ -3689,10 +3960,21 @@
}
status_t err = OK;
- if (hasCryptoOrDescrambler()) {
+ if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
AString *errorDetailMsg;
CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
-
+ // Notify mCrypto of video resolution changes
+ if (mTunneled && mCrypto != NULL) {
+ int32_t width, height;
+ if (mInputFormat->findInt32("width", &width) &&
+ mInputFormat->findInt32("height", &height) && width > 0 && height > 0) {
+ if (width != mTunneledInputWidth || height != mTunneledInputHeight) {
+ mTunneledInputWidth = width;
+ mTunneledInputHeight = height;
+ mCrypto->notifyResolution(width, height);
+ }
+ }
+ }
err = mBufferChannel->queueSecureInputBuffer(
buffer,
(mFlags & kFlagIsSecure),
@@ -3727,6 +4009,16 @@
return err;
}
+status_t MediaCodec::handleLeftover(size_t index) {
+ if (mLeftover.empty()) {
+ return OK;
+ }
+ sp<AMessage> msg = mLeftover.front();
+ mLeftover.pop_front();
+ msg->setSize("index", index);
+ return onQueueInputBuffer(msg);
+}
+
//static
size_t MediaCodec::CreateFramesRenderedMessage(
const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
@@ -3810,19 +4102,31 @@
return OK;
}
-ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
+MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
if (availBuffers->empty()) {
+ return nullptr;
+ }
+
+ return &mPortBuffers[portIndex][*availBuffers->begin()];
+}
+
+ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+ BufferInfo *info = peekNextPortBuffer(portIndex);
+ if (!info) {
return -EAGAIN;
}
+ List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
size_t index = *availBuffers->begin();
+ CHECK_EQ(info, &mPortBuffers[portIndex][index]);
availBuffers->erase(availBuffers->begin());
- BufferInfo *info = &mPortBuffers[portIndex][index];
CHECK(!info->mOwnedByClient);
{
Mutex::Autolock al(mBufferLock);
@@ -4064,6 +4368,33 @@
return OK;
}
+void MediaCodec::postPendingRepliesAndDeferredMessages(
+ std::string origin, status_t err /* = OK */) {
+ sp<AMessage> response{new AMessage};
+ if (err != OK) {
+ response->setInt32("err", err);
+ }
+ postPendingRepliesAndDeferredMessages(origin, response);
+}
+
+void MediaCodec::postPendingRepliesAndDeferredMessages(
+ std::string origin, const sp<AMessage> &response) {
+ LOG_ALWAYS_FATAL_IF(
+ !mReplyID,
+ "postPendingRepliesAndDeferredMessages: mReplyID == null, from %s following %s",
+ origin.c_str(),
+ mLastReplyOrigin.c_str());
+ mLastReplyOrigin = origin;
+ response->postReply(mReplyID);
+ mReplyID.clear();
+ ALOGV_IF(!mDeferredMessages.empty(),
+ "posting %zu deferred messages", mDeferredMessages.size());
+ for (sp<AMessage> msg : mDeferredMessages) {
+ msg->post();
+ }
+ mDeferredMessages.clear();
+}
+
std::string MediaCodec::stateString(State state) {
const char *rval = NULL;
char rawbuffer[16]; // room for "%d"
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 1cb45ac..c91386d 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -48,8 +48,7 @@
MediaMuxer::MediaMuxer(int fd, OutputFormat format)
: mFormat(format),
- mState(UNINITIALIZED),
- mError(OK) {
+ mState(UNINITIALIZED) {
if (isMp4Format(format)) {
mWriter = new MPEG4Writer(fd);
} else if (format == OUTPUT_FORMAT_WEBM) {
@@ -59,7 +58,6 @@
}
if (mWriter != NULL) {
- mWriter->setMuxerListener(this);
mFileMeta = new MetaData;
if (format == OUTPUT_FORMAT_HEIF) {
// Note that the key uses recorder file types.
@@ -94,7 +92,9 @@
}
sp<MetaData> trackMeta = new MetaData;
- convertMessageToMetaData(format, trackMeta);
+ if (convertMessageToMetaData(format, trackMeta) != OK) {
+ return BAD_VALUE;
+ }
sp<MediaAdapter> newTrack = new MediaAdapter(trackMeta);
status_t result = mWriter->addSource(newTrack);
@@ -157,23 +157,16 @@
status_t MediaMuxer::stop() {
Mutex::Autolock autoLock(mMuxerLock);
- if (mState == STARTED || mState == ERROR) {
+ if (mState == STARTED) {
mState = STOPPED;
for (size_t i = 0; i < mTrackList.size(); i++) {
if (mTrackList[i]->stop() != OK) {
return INVALID_OPERATION;
}
}
- // Unlock this mutex to allow notify to be called during stop process.
- mMuxerLock.unlock();
status_t err = mWriter->stop();
- mMuxerLock.lock();
- if (err != OK || mError != OK) {
- ALOGE("stop err: %d, mError:%d", err, mError);
- }
- // Prioritize mError over err.
- if (mError != OK) {
- err = mError;
+ if (err != OK) {
+ ALOGE("stop() err: %d", err);
}
return err;
} else {
@@ -184,16 +177,23 @@
status_t MediaMuxer::writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
int64_t timeUs, uint32_t flags) {
- Mutex::Autolock autoLock(mMuxerLock);
-
if (buffer.get() == NULL) {
ALOGE("WriteSampleData() get an NULL buffer.");
return -EINVAL;
}
-
- if (mState != STARTED) {
- ALOGE("WriteSampleData() is called in invalid state %d", mState);
- return INVALID_OPERATION;
+ {
+ /* As MediaMuxer's writeSampleData handles inputs from multiple tracks,
+ * limited the scope of mMuxerLock to this inner block so that the
+ * current track's buffer does not wait until the completion
+ * of processing of previous buffer of the same or another track.
+ * It's the responsibility of individual track - MediaAdapter object
+ * to gate its buffers.
+ */
+ Mutex::Autolock autoLock(mMuxerLock);
+ if (mState != STARTED) {
+ ALOGE("WriteSampleData() is called in invalid state %d", mState);
+ return INVALID_OPERATION;
+ }
}
if (trackIndex >= mTrackList.size()) {
@@ -229,29 +229,4 @@
return currentTrack->pushBuffer(mediaBuffer);
}
-void MediaMuxer::notify(int msg, int ext1, int ext2) {
- switch (msg) {
- case MEDIA_RECORDER_EVENT_ERROR:
- case MEDIA_RECORDER_TRACK_EVENT_ERROR: {
- Mutex::Autolock autoLock(mMuxerLock);
- mState = ERROR;
- mError = ext2;
- ALOGW("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2);
- break;
- }
- case MEDIA_RECORDER_EVENT_INFO: {
- if (ext1 == MEDIA_RECORDER_INFO_UNKNOWN) {
- Mutex::Autolock autoLock(mMuxerLock);
- mState = ERROR;
- mError = ext2;
- ALOGW("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2);
- }
- break;
- }
- default:
- // Ignore INFO and other notifications for now.
- break;
- }
-}
-
} // namespace android
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 050d7c2..f2c7dd6 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -50,8 +50,9 @@
mSampleTimeUs(timeUs) {
}
-NuMediaExtractor::NuMediaExtractor()
- : mTotalBitrate(-1LL),
+NuMediaExtractor::NuMediaExtractor(EntryPoint entryPoint)
+ : mEntryPoint(entryPoint),
+ mTotalBitrate(-1LL),
mDurationUs(-1LL) {
}
@@ -93,6 +94,7 @@
if (mImpl == NULL) {
return ERROR_UNSUPPORTED;
}
+ setEntryPointToRemoteMediaExtractor();
status_t err = OK;
if (!mCasToken.empty()) {
@@ -134,6 +136,7 @@
if (mImpl == NULL) {
return ERROR_UNSUPPORTED;
}
+ setEntryPointToRemoteMediaExtractor();
if (!mCasToken.empty()) {
err = mImpl->setMediaCas(mCasToken);
@@ -168,6 +171,7 @@
if (mImpl == NULL) {
return ERROR_UNSUPPORTED;
}
+ setEntryPointToRemoteMediaExtractor();
if (!mCasToken.empty()) {
err = mImpl->setMediaCas(mCasToken);
@@ -312,6 +316,27 @@
(*format)->setBuffer("pssh", buf);
}
+ // Copy over the slow-motion related metadata
+ const void *slomoMarkers;
+ size_t slomoMarkersSize;
+ if (meta->findData(kKeySlowMotionMarkers, &type, &slomoMarkers, &slomoMarkersSize)
+ && slomoMarkersSize > 0) {
+ sp<ABuffer> buf = new ABuffer(slomoMarkersSize);
+ memcpy(buf->data(), slomoMarkers, slomoMarkersSize);
+ (*format)->setBuffer("slow-motion-markers", buf);
+ }
+
+ int32_t temporalLayerCount;
+ if (meta->findInt32(kKeyTemporalLayerCount, &temporalLayerCount)
+ && temporalLayerCount > 0) {
+ (*format)->setInt32("temporal-layer-count", temporalLayerCount);
+ }
+
+ float captureFps;
+ if (meta->findFloat(kKeyCaptureFramerate, &captureFps) && captureFps > 0.0f) {
+ (*format)->setFloat("capture-rate", captureFps);
+ }
+
return OK;
}
@@ -468,6 +493,16 @@
}
}
+void NuMediaExtractor::setEntryPointToRemoteMediaExtractor() {
+ if (mImpl == NULL) {
+ return;
+ }
+ status_t err = mImpl->setEntryPoint(mEntryPoint);
+ if (err != OK) {
+ ALOGW("Failed to set entry point with error %d.", err);
+ }
+}
+
ssize_t NuMediaExtractor::fetchAllTrackSamples(
int64_t seekTimeUs, MediaSource::ReadOptions::SeekMode mode) {
TrackInfo *minInfo = NULL;
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
new file mode 100644
index 0000000..819389d
--- /dev/null
+++ b/media/libstagefright/OWNERS
@@ -0,0 +1,7 @@
+set noparent
+chz@google.com
+essick@google.com
+lajos@google.com
+marcone@google.com
+taklee@google.com
+wonsik@google.com
\ No newline at end of file
diff --git a/media/libstagefright/RemoteMediaExtractor.cpp b/media/libstagefright/RemoteMediaExtractor.cpp
index 25e43c2..381eb1a 100644
--- a/media/libstagefright/RemoteMediaExtractor.cpp
+++ b/media/libstagefright/RemoteMediaExtractor.cpp
@@ -39,6 +39,12 @@
static const char *kExtractorFormat = "android.media.mediaextractor.fmt";
static const char *kExtractorMime = "android.media.mediaextractor.mime";
static const char *kExtractorTracks = "android.media.mediaextractor.ntrk";
+static const char *kExtractorEntryPoint = "android.media.mediaextractor.entry";
+
+static const char *kEntryPointSdk = "sdk";
+static const char *kEntryPointWithJvm = "ndk-with-jvm";
+static const char *kEntryPointNoJvm = "ndk-no-jvm";
+static const char *kEntryPointOther = "other";
RemoteMediaExtractor::RemoteMediaExtractor(
MediaExtractor *extractor,
@@ -74,6 +80,9 @@
}
// what else is interesting and not already available?
}
+ // By default, we set the entry point to be "other". Clients of this
+ // class will override this value by calling setEntryPoint.
+ mMetricsItem->setCString(kExtractorEntryPoint, kEntryPointOther);
}
}
@@ -143,6 +152,28 @@
return String8(mExtractor->name());
}
+status_t RemoteMediaExtractor::setEntryPoint(EntryPoint entryPoint) {
+ const char* entryPointString;
+ switch (entryPoint) {
+ case EntryPoint::SDK:
+ entryPointString = kEntryPointSdk;
+ break;
+ case EntryPoint::NDK_WITH_JVM:
+ entryPointString = kEntryPointWithJvm;
+ break;
+ case EntryPoint::NDK_NO_JVM:
+ entryPointString = kEntryPointNoJvm;
+ break;
+ case EntryPoint::OTHER:
+ entryPointString = kEntryPointOther;
+ break;
+ default:
+ return BAD_VALUE;
+ }
+ mMetricsItem->setCString(kExtractorEntryPoint, entryPointString);
+ return OK;
+}
+
////////////////////////////////////////////////////////////////////////////////
// static
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 85ff474..1f569ef 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -132,37 +132,47 @@
}
void setNativeWindowHdrMetadata(ANativeWindow *nativeWindow, HDRStaticInfo *info) {
- struct android_smpte2086_metadata smpte2086_meta = {
- .displayPrimaryRed = {
- info->sType1.mR.x * 0.00002f,
- info->sType1.mR.y * 0.00002f
- },
- .displayPrimaryGreen = {
- info->sType1.mG.x * 0.00002f,
- info->sType1.mG.y * 0.00002f
- },
- .displayPrimaryBlue = {
- info->sType1.mB.x * 0.00002f,
- info->sType1.mB.y * 0.00002f
- },
- .whitePoint = {
- info->sType1.mW.x * 0.00002f,
- info->sType1.mW.y * 0.00002f
- },
- .maxLuminance = (float) info->sType1.mMaxDisplayLuminance,
- .minLuminance = info->sType1.mMinDisplayLuminance * 0.0001f
- };
+ // If mastering max and min luminance fields are 0, do not use them.
+ // It indicates the value may not be present in the stream.
+ if ((float)info->sType1.mMaxDisplayLuminance > 0.0f &&
+ (info->sType1.mMinDisplayLuminance * 0.0001f) > 0.0f) {
+ struct android_smpte2086_metadata smpte2086_meta = {
+ .displayPrimaryRed = {
+ info->sType1.mR.x * 0.00002f,
+ info->sType1.mR.y * 0.00002f
+ },
+ .displayPrimaryGreen = {
+ info->sType1.mG.x * 0.00002f,
+ info->sType1.mG.y * 0.00002f
+ },
+ .displayPrimaryBlue = {
+ info->sType1.mB.x * 0.00002f,
+ info->sType1.mB.y * 0.00002f
+ },
+ .whitePoint = {
+ info->sType1.mW.x * 0.00002f,
+ info->sType1.mW.y * 0.00002f
+ },
+ .maxLuminance = (float) info->sType1.mMaxDisplayLuminance,
+ .minLuminance = info->sType1.mMinDisplayLuminance * 0.0001f
+ };
- int err = native_window_set_buffers_smpte2086_metadata(nativeWindow, &smpte2086_meta);
- ALOGW_IF(err != 0, "failed to set smpte2086 metadata on surface (%d)", err);
+ int err = native_window_set_buffers_smpte2086_metadata(nativeWindow, &smpte2086_meta);
+ ALOGW_IF(err != 0, "failed to set smpte2086 metadata on surface (%d)", err);
+ }
- struct android_cta861_3_metadata cta861_meta = {
- .maxContentLightLevel = (float) info->sType1.mMaxContentLightLevel,
- .maxFrameAverageLightLevel = (float) info->sType1.mMaxFrameAverageLightLevel
- };
+ // If the content light level fields are 0, do not use them, it
+ // indicates the value may not be present in the stream.
+ if ((float)info->sType1.mMaxContentLightLevel > 0.0f &&
+ (float)info->sType1.mMaxFrameAverageLightLevel > 0.0f) {
+ struct android_cta861_3_metadata cta861_meta = {
+ .maxContentLightLevel = (float) info->sType1.mMaxContentLightLevel,
+ .maxFrameAverageLightLevel = (float) info->sType1.mMaxFrameAverageLightLevel
+ };
- err = native_window_set_buffers_cta861_3_metadata(nativeWindow, &cta861_meta);
- ALOGW_IF(err != 0, "failed to set cta861_3 metadata on surface (%d)", err);
+ int err = native_window_set_buffers_cta861_3_metadata(nativeWindow, &cta861_meta);
+ ALOGW_IF(err != 0, "failed to set cta861_3 metadata on surface (%d)", err);
+ }
}
status_t setNativeWindowRotation(
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 3dceef7..dff7b22 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -1,12 +1,17 @@
{
- "presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
- // { "name": "HEVCUtilsUnitTest" },
- //{ "name": "ExtractorFactoryTest" },
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
// writerTest fails about 5 out of 66
- // in addition to needing the download ability
- //{ "name": "writerTest" },
+ // { "name": "writerTest" },
+ { "name": "HEVCUtilsUnitTest" },
+ { "name": "ExtractorFactoryTest" }
+
+ ],
+
+ "presubmit-large": [
{
"name": "CtsMediaTestCases",
"options": [
@@ -26,6 +31,11 @@
]
}
],
+ "presubmit": [
+ {
+ "name": "mediacodecTest"
+ }
+ ],
"postsubmit": [
{
"name": "BatteryChecker_test"
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index a1e4d43..f63740e 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -729,6 +729,8 @@
{
{ "exif-offset", kKeyExifOffset },
{ "exif-size", kKeyExifSize },
+ { "xmp-offset", kKeyXmpOffset },
+ { "xmp-size", kKeyXmpSize },
{ "target-time", kKeyTargetTime },
{ "thumbnail-time", kKeyThumbnailTime },
{ "timeUs", kKeyTime },
@@ -769,6 +771,8 @@
{ "sei", kKeySEI },
{ "text-format-data", kKeyTextFormatData },
{ "thumbnail-csd-hevc", kKeyThumbnailHVCC },
+ { "slow-motion-markers", kKeySlowMotionMarkers },
+ { "thumbnail-csd-av1c", kKeyThumbnailAV1C },
}
};
@@ -1135,7 +1139,7 @@
// assertion, let's be lenient for now...
// CHECK((ptr[4] >> 2) == 0x3f); // reserved
- size_t lengthSize __unused = 1 + (ptr[4] & 3);
+ // we can get lengthSize value from 1 + (ptr[4] & 3)
// commented out check below as H264_QVGA_500_NO_AUDIO.3gp
// violates it...
@@ -1663,13 +1667,16 @@
meta->setInt32(kKeyColorMatrix, colorAspects.mMatrixCoeffs);
}
}
-
-void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
+/* Converts key and value pairs in AMessage format to MetaData format.
+ * Also checks for the presence of required keys.
+ */
+status_t convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
AString mime;
if (msg->findString("mime", &mime)) {
meta->setCString(kKeyMIMEType, mime.c_str());
} else {
- ALOGW("did not find mime type");
+ ALOGE("did not find mime type");
+ return BAD_VALUE;
}
convertMessageToMetaDataFromMappings(msg, meta);
@@ -1718,7 +1725,8 @@
meta->setInt32(kKeyWidth, width);
meta->setInt32(kKeyHeight, height);
} else {
- ALOGV("did not find width and/or height");
+ ALOGE("did not find width and/or height");
+ return BAD_VALUE;
}
int32_t sarWidth, sarHeight;
@@ -1803,14 +1811,14 @@
}
}
} else if (mime.startsWith("audio/")) {
- int32_t numChannels;
- if (msg->findInt32("channel-count", &numChannels)) {
- meta->setInt32(kKeyChannelCount, numChannels);
+ int32_t numChannels, sampleRate;
+ if (!msg->findInt32("channel-count", &numChannels) ||
+ !msg->findInt32("sample-rate", &sampleRate)) {
+ ALOGE("did not find channel-count and/or sample-rate");
+ return BAD_VALUE;
}
- int32_t sampleRate;
- if (msg->findInt32("sample-rate", &sampleRate)) {
- meta->setInt32(kKeySampleRate, sampleRate);
- }
+ meta->setInt32(kKeyChannelCount, numChannels);
+ meta->setInt32(kKeySampleRate, sampleRate);
int32_t bitsPerSample;
if (msg->findInt32("bits-per-sample", &bitsPerSample)) {
meta->setInt32(kKeyBitsPerSample, bitsPerSample);
@@ -1900,7 +1908,8 @@
std::vector<uint8_t> hvcc(csd0size + 1024);
size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
- } else if (mime == MEDIA_MIMETYPE_VIDEO_AV1) {
+ } else if (mime == MEDIA_MIMETYPE_VIDEO_AV1 ||
+ mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
} else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
if (msg->findBuffer("csd-2", &csd2)) {
@@ -1925,7 +1934,8 @@
}
}
} else {
- ALOGW("We need csd-2!!. %s", msg->debugString().c_str());
+ ALOGE("We need csd-2!!. %s", msg->debugString().c_str());
+ return BAD_VALUE;
}
} else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
meta->setData(kKeyVp9CodecPrivate, 0, csd0->data(), csd0->size());
@@ -1991,6 +2001,7 @@
ALOGI("converted %s to:", msg->debugString(0).c_str());
meta->dumpToLog();
#endif
+ return OK;
}
status_t sendMetaDataToHal(sp<MediaPlayerBase::AudioSink>& sink,
@@ -2136,8 +2147,10 @@
}
info->sample_rate = srate;
- int32_t cmask = 0;
- if (!meta->findInt32(kKeyChannelMask, &cmask) || cmask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
+ int32_t rawChannelMask;
+ audio_channel_mask_t cmask = meta->findInt32(kKeyChannelMask, &rawChannelMask) ?
+ static_cast<audio_channel_mask_t>(rawChannelMask) : CHANNEL_MASK_USE_CHANNEL_ORDER;
+ if (cmask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
ALOGV("track of type '%s' does not publish channel mask", mime);
// Try a channel count instead
@@ -2178,7 +2191,11 @@
}
// Check if offload is possible for given format, stream type, sample rate,
// bit rate, duration, video and streaming
- return AudioSystem::isOffloadSupported(info);
+#ifdef DISABLE_AUDIO_SYSTEM_OFFLOAD
+ return false;
+#else
+ return AudioSystem::getOffloadSupport(info) != AUDIO_OFFLOAD_NOT_SUPPORTED;
+#endif
}
HLSTime::HLSTime(const sp<AMessage>& meta) :
diff --git a/media/libstagefright/bqhelper/Android.bp b/media/libstagefright/bqhelper/Android.bp
index 37e842a..2b0494c 100644
--- a/media/libstagefright/bqhelper/Android.bp
+++ b/media/libstagefright/bqhelper/Android.bp
@@ -63,6 +63,8 @@
vndk: {
enabled: true,
},
+ min_sdk_version: "29",
+
shared_libs: [ "libgui" ],
target: {
vendor: {
@@ -99,6 +101,7 @@
"//apex_available:platform",
],
vendor_available: false,
+ min_sdk_version: "29",
static_libs: [
"libgui_bufferqueue_static",
],
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
index 168d140..157cab6 100644
--- a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
+++ b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
@@ -217,7 +217,7 @@
}
else { // handle other used encoder target levels
- // Sanity check: DRC presentation mode is only specified for max. 5.1 channels
+ // Validation check: DRC presentation mode is only specified for max. 5.1 channels
if (mStreamNrAACChan > 6) {
drcPresMode = 0;
}
@@ -308,7 +308,7 @@
} // switch()
} // if (mEncoderTarget == GPM_ENCODER_TARGET_LEVEL)
- // sanity again
+ // validation check again
if (newHeavy == 1) {
newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
newAttFactor = 127;
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index 2aeddd7..28a7a1e 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -38,6 +38,7 @@
#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1 /* switch for heavy compression for mobile conf */
#define DRC_DEFAULT_MOBILE_DRC_EFFECT 3 /* MPEG-D DRC effect type; 3 => Limited playback range */
#define DRC_DEFAULT_MOBILE_DRC_ALBUM 0 /* MPEG-D DRC album mode; 0 => album mode is disabled, 1 => album mode is enabled */
+#define DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS -1 /* decoder output loudness; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
#define DRC_DEFAULT_MOBILE_ENC_LEVEL (-1) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */
// names of properties that can be used to override the default DRC settings
@@ -230,6 +231,15 @@
// For seven and eight channel input streams, enable 6.1 and 7.1 channel output
aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1);
+ mDrcCompressMode = DRC_DEFAULT_MOBILE_DRC_HEAVY;
+ mDrcTargetRefLevel = DRC_DEFAULT_MOBILE_REF_LEVEL;
+ mDrcEncTargetLevel = DRC_DEFAULT_MOBILE_ENC_LEVEL;
+ mDrcBoostFactor = DRC_DEFAULT_MOBILE_DRC_BOOST;
+ mDrcAttenuationFactor = DRC_DEFAULT_MOBILE_DRC_CUT;
+ mDrcEffectType = DRC_DEFAULT_MOBILE_DRC_EFFECT;
+ mDrcAlbumMode = DRC_DEFAULT_MOBILE_DRC_ALBUM;
+ mDrcOutputLoudness = DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS;
+
return status;
}
@@ -358,6 +368,27 @@
return OMX_ErrorNone;
}
+ case OMX_IndexParamAudioAndroidAacDrcPresentation:
+ {
+ OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *aacPresParams =
+ (OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *)params;
+
+ ALOGD("get OMX_IndexParamAudioAndroidAacDrcPresentation");
+
+ if (!isValidOMXParam(aacPresParams)) {
+ return OMX_ErrorBadParameter;
+ }
+ aacPresParams->nDrcEffectType = mDrcEffectType;
+ aacPresParams->nDrcAlbumMode = mDrcAlbumMode;
+ aacPresParams->nDrcBoost = mDrcBoostFactor;
+ aacPresParams->nDrcCut = mDrcAttenuationFactor;
+ aacPresParams->nHeavyCompression = mDrcCompressMode;
+ aacPresParams->nTargetReferenceLevel = mDrcTargetRefLevel;
+ aacPresParams->nEncodedTargetLevel = mDrcEncTargetLevel;
+ aacPresParams ->nDrcOutputLoudness = mDrcOutputLoudness;
+ return OMX_ErrorNone;
+ }
+
default:
return SimpleSoftOMXComponent::internalGetParameter(index, params);
}
@@ -464,11 +495,13 @@
if (aacPresParams->nDrcEffectType >= -1) {
ALOGV("set nDrcEffectType=%d", aacPresParams->nDrcEffectType);
aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_SET_EFFECT, aacPresParams->nDrcEffectType);
+ mDrcEffectType = aacPresParams->nDrcEffectType;
}
if (aacPresParams->nDrcAlbumMode >= -1) {
ALOGV("set nDrcAlbumMode=%d", aacPresParams->nDrcAlbumMode);
aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_ALBUM_MODE,
aacPresParams->nDrcAlbumMode);
+ mDrcAlbumMode = aacPresParams->nDrcAlbumMode;
}
bool updateDrcWrapper = false;
if (aacPresParams->nDrcBoost >= 0) {
@@ -476,34 +509,42 @@
mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR,
aacPresParams->nDrcBoost);
updateDrcWrapper = true;
+ mDrcBoostFactor = aacPresParams->nDrcBoost;
}
if (aacPresParams->nDrcCut >= 0) {
ALOGV("set nDrcCut=%d", aacPresParams->nDrcCut);
mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, aacPresParams->nDrcCut);
updateDrcWrapper = true;
+ mDrcAttenuationFactor = aacPresParams->nDrcCut;
}
if (aacPresParams->nHeavyCompression >= 0) {
ALOGV("set nHeavyCompression=%d", aacPresParams->nHeavyCompression);
mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY,
aacPresParams->nHeavyCompression);
updateDrcWrapper = true;
+ mDrcCompressMode = aacPresParams->nHeavyCompression;
}
if (aacPresParams->nTargetReferenceLevel >= -1) {
ALOGV("set nTargetReferenceLevel=%d", aacPresParams->nTargetReferenceLevel);
mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET,
aacPresParams->nTargetReferenceLevel);
updateDrcWrapper = true;
+ mDrcTargetRefLevel = aacPresParams->nTargetReferenceLevel;
}
if (aacPresParams->nEncodedTargetLevel >= 0) {
ALOGV("set nEncodedTargetLevel=%d", aacPresParams->nEncodedTargetLevel);
mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET,
aacPresParams->nEncodedTargetLevel);
updateDrcWrapper = true;
+ mDrcEncTargetLevel = aacPresParams->nEncodedTargetLevel;
}
if (aacPresParams->nPCMLimiterEnable >= 0) {
aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE,
(aacPresParams->nPCMLimiterEnable != 0));
}
+ if (aacPresParams ->nDrcOutputLoudness != DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS) {
+ mDrcOutputLoudness = aacPresParams ->nDrcOutputLoudness;
+ }
if (updateDrcWrapper) {
mDrcWrap.update();
}
@@ -854,6 +895,11 @@
// fall through
}
+ if ( mDrcOutputLoudness != mStreamInfo->outputLoudness) {
+ ALOGD("update Loudness, before = %d, now = %d", mDrcOutputLoudness, mStreamInfo->outputLoudness);
+ mDrcOutputLoudness = mStreamInfo->outputLoudness;
+ }
+
/*
* AAC+/eAAC+ streams can be signalled in two ways: either explicitly
* or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index 5bee710..9f98aa1 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -85,6 +85,17 @@
int32_t mOutputDelayRingBufferWritePos;
int32_t mOutputDelayRingBufferReadPos;
int32_t mOutputDelayRingBufferFilled;
+
+ //drc
+ int32_t mDrcCompressMode;
+ int32_t mDrcTargetRefLevel;
+ int32_t mDrcEncTargetLevel;
+ int32_t mDrcBoostFactor;
+ int32_t mDrcAttenuationFactor;
+ int32_t mDrcEffectType;
+ int32_t mDrcAlbumMode;
+ int32_t mDrcOutputLoudness;
+
bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
int32_t outputDelayRingBufferSamplesAvailable();
diff --git a/media/libstagefright/codecs/amrnb/TEST_MAPPING b/media/libstagefright/codecs/amrnb/TEST_MAPPING
deleted file mode 100644
index 2909099..0000000
--- a/media/libstagefright/codecs/amrnb/TEST_MAPPING
+++ /dev/null
@@ -1,11 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrnb
-{
- "presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
- // { "name": "AmrnbDecoderTest"},
-
- // TODO(b/148094059): unit tests not allowed to download content
- // { "name": "AmrnbEncoderTest"}
-
- ]
-}
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.bp b/media/libstagefright/codecs/amrnb/dec/Android.bp
index b8e00b3..9d0da17 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.bp
+++ b/media/libstagefright/codecs/amrnb/dec/Android.bp
@@ -1,76 +1,3 @@
-cc_library_static {
- name: "libstagefright_amrnbdec",
- vendor_available: true,
- host_supported: true,
- min_sdk_version: "29",
-
- srcs: [
- "src/a_refl.cpp",
- "src/agc.cpp",
- "src/amrdecode.cpp",
- "src/b_cn_cod.cpp",
- "src/bgnscd.cpp",
- "src/c_g_aver.cpp",
- "src/d1035pf.cpp",
- "src/d2_11pf.cpp",
- "src/d2_9pf.cpp",
- "src/d3_14pf.cpp",
- "src/d4_17pf.cpp",
- "src/d8_31pf.cpp",
- "src/d_gain_c.cpp",
- "src/d_gain_p.cpp",
- "src/d_plsf.cpp",
- "src/d_plsf_3.cpp",
- "src/d_plsf_5.cpp",
- "src/dec_amr.cpp",
- "src/dec_gain.cpp",
- "src/dec_input_format_tab.cpp",
- "src/dec_lag3.cpp",
- "src/dec_lag6.cpp",
- "src/dtx_dec.cpp",
- "src/ec_gains.cpp",
- "src/ex_ctrl.cpp",
- "src/if2_to_ets.cpp",
- "src/int_lsf.cpp",
- "src/lsp_avg.cpp",
- "src/ph_disp.cpp",
- "src/post_pro.cpp",
- "src/preemph.cpp",
- "src/pstfilt.cpp",
- "src/qgain475_tab.cpp",
- "src/sp_dec.cpp",
- "src/wmf_to_ets.cpp",
- ],
-
- export_include_dirs: ["src"],
-
- cflags: [
- "-DOSCL_UNUSED_ARG(x)=(void)(x)",
- "-DOSCL_IMPORT_REF=",
-
- "-Werror",
- ],
-
- version_script: "exports.lds",
-
- //sanitize: {
- // misc_undefined: [
- // "signed-integer-overflow",
- // ],
- //},
-
- shared_libs: [
- "libstagefright_amrnb_common",
- "liblog",
- ],
-
- target: {
- darwin: {
- enabled: false,
- },
- },
-}
-
//###############################################################################
cc_library_shared {
@@ -79,8 +6,6 @@
srcs: ["SoftAMR.cpp"],
- local_include_dirs: ["src"],
-
cflags: [
"-DOSCL_IMPORT_REF=",
],
@@ -104,38 +29,3 @@
],
}
-//###############################################################################
-cc_test {
- name: "libstagefright_amrnbdec_test",
- gtest: false,
- host_supported: true,
-
- srcs: ["test/amrnbdec_test.cpp"],
-
- cflags: ["-Wall", "-Werror"],
-
- local_include_dirs: ["src"],
-
- static_libs: [
- "libstagefright_amrnbdec",
- "libsndfile",
- ],
-
- shared_libs: [
- "libstagefright_amrnb_common",
- "libaudioutils",
- "liblog",
- ],
-
- target: {
- darwin: {
- enabled: false,
- },
- },
-
- //sanitize: {
- // misc_undefined: [
- // "signed-integer-overflow",
- // ],
- //},
-}
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.bp b/media/libstagefright/codecs/amrnb/enc/Android.bp
index 73a1d4b..bdd1cdf 100644
--- a/media/libstagefright/codecs/amrnb/enc/Android.bp
+++ b/media/libstagefright/codecs/amrnb/enc/Android.bp
@@ -1,87 +1,3 @@
-cc_library_static {
- name: "libstagefright_amrnbenc",
- vendor_available: true,
- min_sdk_version: "29",
-
- srcs: [
- "src/amrencode.cpp",
- "src/autocorr.cpp",
- "src/c1035pf.cpp",
- "src/c2_11pf.cpp",
- "src/c2_9pf.cpp",
- "src/c3_14pf.cpp",
- "src/c4_17pf.cpp",
- "src/c8_31pf.cpp",
- "src/calc_cor.cpp",
- "src/calc_en.cpp",
- "src/cbsearch.cpp",
- "src/cl_ltp.cpp",
- "src/cod_amr.cpp",
- "src/convolve.cpp",
- "src/cor_h.cpp",
- "src/cor_h_x.cpp",
- "src/cor_h_x2.cpp",
- "src/corrwght_tab.cpp",
- "src/dtx_enc.cpp",
- "src/enc_lag3.cpp",
- "src/enc_lag6.cpp",
- "src/enc_output_format_tab.cpp",
- "src/ets_to_if2.cpp",
- "src/ets_to_wmf.cpp",
- "src/g_adapt.cpp",
- "src/g_code.cpp",
- "src/g_pitch.cpp",
- "src/gain_q.cpp",
- "src/hp_max.cpp",
- "src/inter_36.cpp",
- "src/inter_36_tab.cpp",
- "src/l_comp.cpp",
- "src/l_extract.cpp",
- "src/l_negate.cpp",
- "src/lag_wind.cpp",
- "src/lag_wind_tab.cpp",
- "src/levinson.cpp",
- "src/lpc.cpp",
- "src/ol_ltp.cpp",
- "src/p_ol_wgh.cpp",
- "src/pitch_fr.cpp",
- "src/pitch_ol.cpp",
- "src/pre_big.cpp",
- "src/pre_proc.cpp",
- "src/prm2bits.cpp",
- "src/q_gain_c.cpp",
- "src/q_gain_p.cpp",
- "src/qgain475.cpp",
- "src/qgain795.cpp",
- "src/qua_gain.cpp",
- "src/s10_8pf.cpp",
- "src/set_sign.cpp",
- "src/sid_sync.cpp",
- "src/sp_enc.cpp",
- "src/spreproc.cpp",
- "src/spstproc.cpp",
- "src/ton_stab.cpp",
- ],
-
- header_libs: ["libstagefright_headers"],
- export_include_dirs: ["src"],
-
- cflags: [
- "-DOSCL_UNUSED_ARG(x)=(void)(x)",
- "-Werror",
- ],
-
- version_script: "exports.lds",
-
- //addressing b/25409744
- //sanitize: {
- // misc_undefined: [
- // "signed-integer-overflow",
- // ],
- //},
-
- shared_libs: ["libstagefright_amrnb_common"],
-}
//###############################################################################
@@ -91,8 +7,6 @@
srcs: ["SoftAMRNBEncoder.cpp"],
- local_include_dirs: ["src"],
-
//addressing b/25409744
//sanitize: {
// misc_undefined: [
@@ -107,26 +21,3 @@
],
}
-//###############################################################################
-
-cc_test {
- name: "libstagefright_amrnbenc_test",
- gtest: false,
-
- srcs: ["test/amrnb_enc_test.cpp"],
-
- cflags: ["-Wall", "-Werror"],
-
- local_include_dirs: ["src"],
-
- static_libs: ["libstagefright_amrnbenc"],
-
- shared_libs: ["libstagefright_amrnb_common"],
-
- //addressing b/25409744
- //sanitize: {
- // misc_undefined: [
- // "signed-integer-overflow",
- // ],
- //},
-}
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qgain475.cpp b/media/libstagefright/codecs/amrnb/enc/src/qgain475.cpp
deleted file mode 100644
index f8da589..0000000
--- a/media/libstagefright/codecs/amrnb/enc/src/qgain475.cpp
+++ /dev/null
@@ -1,1445 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
- 3GPP TS 26.073
- ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
- Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-------------------------------------------------------------------------------
-
-
-
- Pathname: ./audio/gsm-amr/c/src/qgain475.c
- Funtions: MR475_quant_store_results
- MR475_update_unq_pred
- MR475_gain_quant
-
-------------------------------------------------------------------------------
- MODULE DESCRIPTION
-
- These modules handle the quantization of pitch and codebook gains for MR475.
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include "qgain475.h"
-#include "typedef.h"
-#include "basic_op.h"
-#include "mode.h"
-#include "cnst.h"
-#include "pow2.h"
-#include "log2.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-#define MR475_VQ_SIZE 256
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL VARIABLE DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/* The table contains the following data:
- *
- * g_pitch(0) (Q14) // for sub-
- * g_fac(0) (Q12) // frame 0 and 2
- * g_pitch(1) (Q14) // for sub-
- * g_fac(2) (Q12) // frame 1 and 3
- *
- */
-static const Word16 table_gain_MR475[MR475_VQ_SIZE*4] =
-{
- /*g_pit(0), g_fac(0), g_pit(1), g_fac(1) */
- 812, 128, 542, 140,
- 2873, 1135, 2266, 3402,
- 2067, 563, 12677, 647,
- 4132, 1798, 5601, 5285,
- 7689, 374, 3735, 441,
- 10912, 2638, 11807, 2494,
- 20490, 797, 5218, 675,
- 6724, 8354, 5282, 1696,
- 1488, 428, 5882, 452,
- 5332, 4072, 3583, 1268,
- 2469, 901, 15894, 1005,
- 14982, 3271, 10331, 4858,
- 3635, 2021, 2596, 835,
- 12360, 4892, 12206, 1704,
- 13432, 1604, 9118, 2341,
- 3968, 1538, 5479, 9936,
- 3795, 417, 1359, 414,
- 3640, 1569, 7995, 3541,
- 11405, 645, 8552, 635,
- 4056, 1377, 16608, 6124,
- 11420, 700, 2007, 607,
- 12415, 1578, 11119, 4654,
- 13680, 1708, 11990, 1229,
- 7996, 7297, 13231, 5715,
- 2428, 1159, 2073, 1941,
- 6218, 6121, 3546, 1804,
- 8925, 1802, 8679, 1580,
- 13935, 3576, 13313, 6237,
- 6142, 1130, 5994, 1734,
- 14141, 4662, 11271, 3321,
- 12226, 1551, 13931, 3015,
- 5081, 10464, 9444, 6706,
- 1689, 683, 1436, 1306,
- 7212, 3933, 4082, 2713,
- 7793, 704, 15070, 802,
- 6299, 5212, 4337, 5357,
- 6676, 541, 6062, 626,
- 13651, 3700, 11498, 2408,
- 16156, 716, 12177, 751,
- 8065, 11489, 6314, 2256,
- 4466, 496, 7293, 523,
- 10213, 3833, 8394, 3037,
- 8403, 966, 14228, 1880,
- 8703, 5409, 16395, 4863,
- 7420, 1979, 6089, 1230,
- 9371, 4398, 14558, 3363,
- 13559, 2873, 13163, 1465,
- 5534, 1678, 13138, 14771,
- 7338, 600, 1318, 548,
- 4252, 3539, 10044, 2364,
- 10587, 622, 13088, 669,
- 14126, 3526, 5039, 9784,
- 15338, 619, 3115, 590,
- 16442, 3013, 15542, 4168,
- 15537, 1611, 15405, 1228,
- 16023, 9299, 7534, 4976,
- 1990, 1213, 11447, 1157,
- 12512, 5519, 9475, 2644,
- 7716, 2034, 13280, 2239,
- 16011, 5093, 8066, 6761,
- 10083, 1413, 5002, 2347,
- 12523, 5975, 15126, 2899,
- 18264, 2289, 15827, 2527,
- 16265, 10254, 14651, 11319,
- 1797, 337, 3115, 397,
- 3510, 2928, 4592, 2670,
- 7519, 628, 11415, 656,
- 5946, 2435, 6544, 7367,
- 8238, 829, 4000, 863,
- 10032, 2492, 16057, 3551,
- 18204, 1054, 6103, 1454,
- 5884, 7900, 18752, 3468,
- 1864, 544, 9198, 683,
- 11623, 4160, 4594, 1644,
- 3158, 1157, 15953, 2560,
- 12349, 3733, 17420, 5260,
- 6106, 2004, 2917, 1742,
- 16467, 5257, 16787, 1680,
- 17205, 1759, 4773, 3231,
- 7386, 6035, 14342, 10012,
- 4035, 442, 4194, 458,
- 9214, 2242, 7427, 4217,
- 12860, 801, 11186, 825,
- 12648, 2084, 12956, 6554,
- 9505, 996, 6629, 985,
- 10537, 2502, 15289, 5006,
- 12602, 2055, 15484, 1653,
- 16194, 6921, 14231, 5790,
- 2626, 828, 5615, 1686,
- 13663, 5778, 3668, 1554,
- 11313, 2633, 9770, 1459,
- 14003, 4733, 15897, 6291,
- 6278, 1870, 7910, 2285,
- 16978, 4571, 16576, 3849,
- 15248, 2311, 16023, 3244,
- 14459, 17808, 11847, 2763,
- 1981, 1407, 1400, 876,
- 4335, 3547, 4391, 4210,
- 5405, 680, 17461, 781,
- 6501, 5118, 8091, 7677,
- 7355, 794, 8333, 1182,
- 15041, 3160, 14928, 3039,
- 20421, 880, 14545, 852,
- 12337, 14708, 6904, 1920,
- 4225, 933, 8218, 1087,
- 10659, 4084, 10082, 4533,
- 2735, 840, 20657, 1081,
- 16711, 5966, 15873, 4578,
- 10871, 2574, 3773, 1166,
- 14519, 4044, 20699, 2627,
- 15219, 2734, 15274, 2186,
- 6257, 3226, 13125, 19480,
- 7196, 930, 2462, 1618,
- 4515, 3092, 13852, 4277,
- 10460, 833, 17339, 810,
- 16891, 2289, 15546, 8217,
- 13603, 1684, 3197, 1834,
- 15948, 2820, 15812, 5327,
- 17006, 2438, 16788, 1326,
- 15671, 8156, 11726, 8556,
- 3762, 2053, 9563, 1317,
- 13561, 6790, 12227, 1936,
- 8180, 3550, 13287, 1778,
- 16299, 6599, 16291, 7758,
- 8521, 2551, 7225, 2645,
- 18269, 7489, 16885, 2248,
- 17882, 2884, 17265, 3328,
- 9417, 20162, 11042, 8320,
- 1286, 620, 1431, 583,
- 5993, 2289, 3978, 3626,
- 5144, 752, 13409, 830,
- 5553, 2860, 11764, 5908,
- 10737, 560, 5446, 564,
- 13321, 3008, 11946, 3683,
- 19887, 798, 9825, 728,
- 13663, 8748, 7391, 3053,
- 2515, 778, 6050, 833,
- 6469, 5074, 8305, 2463,
- 6141, 1865, 15308, 1262,
- 14408, 4547, 13663, 4515,
- 3137, 2983, 2479, 1259,
- 15088, 4647, 15382, 2607,
- 14492, 2392, 12462, 2537,
- 7539, 2949, 12909, 12060,
- 5468, 684, 3141, 722,
- 5081, 1274, 12732, 4200,
- 15302, 681, 7819, 592,
- 6534, 2021, 16478, 8737,
- 13364, 882, 5397, 899,
- 14656, 2178, 14741, 4227,
- 14270, 1298, 13929, 2029,
- 15477, 7482, 15815, 4572,
- 2521, 2013, 5062, 1804,
- 5159, 6582, 7130, 3597,
- 10920, 1611, 11729, 1708,
- 16903, 3455, 16268, 6640,
- 9306, 1007, 9369, 2106,
- 19182, 5037, 12441, 4269,
- 15919, 1332, 15357, 3512,
- 11898, 14141, 16101, 6854,
- 2010, 737, 3779, 861,
- 11454, 2880, 3564, 3540,
- 9057, 1241, 12391, 896,
- 8546, 4629, 11561, 5776,
- 8129, 589, 8218, 588,
- 18728, 3755, 12973, 3149,
- 15729, 758, 16634, 754,
- 15222, 11138, 15871, 2208,
- 4673, 610, 10218, 678,
- 15257, 4146, 5729, 3327,
- 8377, 1670, 19862, 2321,
- 15450, 5511, 14054, 5481,
- 5728, 2888, 7580, 1346,
- 14384, 5325, 16236, 3950,
- 15118, 3744, 15306, 1435,
- 14597, 4070, 12301, 15696,
- 7617, 1699, 2170, 884,
- 4459, 4567, 18094, 3306,
- 12742, 815, 14926, 907,
- 15016, 4281, 15518, 8368,
- 17994, 1087, 2358, 865,
- 16281, 3787, 15679, 4596,
- 16356, 1534, 16584, 2210,
- 16833, 9697, 15929, 4513,
- 3277, 1085, 9643, 2187,
- 11973, 6068, 9199, 4462,
- 8955, 1629, 10289, 3062,
- 16481, 5155, 15466, 7066,
- 13678, 2543, 5273, 2277,
- 16746, 6213, 16655, 3408,
- 20304, 3363, 18688, 1985,
- 14172, 12867, 15154, 15703,
- 4473, 1020, 1681, 886,
- 4311, 4301, 8952, 3657,
- 5893, 1147, 11647, 1452,
- 15886, 2227, 4582, 6644,
- 6929, 1205, 6220, 799,
- 12415, 3409, 15968, 3877,
- 19859, 2109, 9689, 2141,
- 14742, 8830, 14480, 2599,
- 1817, 1238, 7771, 813,
- 19079, 4410, 5554, 2064,
- 3687, 2844, 17435, 2256,
- 16697, 4486, 16199, 5388,
- 8028, 2763, 3405, 2119,
- 17426, 5477, 13698, 2786,
- 19879, 2720, 9098, 3880,
- 18172, 4833, 17336, 12207,
- 5116, 996, 4935, 988,
- 9888, 3081, 6014, 5371,
- 15881, 1667, 8405, 1183,
- 15087, 2366, 19777, 7002,
- 11963, 1562, 7279, 1128,
- 16859, 1532, 15762, 5381,
- 14708, 2065, 20105, 2155,
- 17158, 8245, 17911, 6318,
- 5467, 1504, 4100, 2574,
- 17421, 6810, 5673, 2888,
- 16636, 3382, 8975, 1831,
- 20159, 4737, 19550, 7294,
- 6658, 2781, 11472, 3321,
- 19397, 5054, 18878, 4722,
- 16439, 2373, 20430, 4386,
- 11353, 26526, 11593, 3068,
- 2866, 1566, 5108, 1070,
- 9614, 4915, 4939, 3536,
- 7541, 878, 20717, 851,
- 6938, 4395, 16799, 7733,
- 10137, 1019, 9845, 964,
- 15494, 3955, 15459, 3430,
- 18863, 982, 20120, 963,
- 16876, 12887, 14334, 4200,
- 6599, 1220, 9222, 814,
- 16942, 5134, 5661, 4898,
- 5488, 1798, 20258, 3962,
- 17005, 6178, 17929, 5929,
- 9365, 3420, 7474, 1971,
- 19537, 5177, 19003, 3006,
- 16454, 3788, 16070, 2367,
- 8664, 2743, 9445, 26358,
- 10856, 1287, 3555, 1009,
- 5606, 3622, 19453, 5512,
- 12453, 797, 20634, 911,
- 15427, 3066, 17037, 10275,
- 18883, 2633, 3913, 1268,
- 19519, 3371, 18052, 5230,
- 19291, 1678, 19508, 3172,
- 18072, 10754, 16625, 6845,
- 3134, 2298, 10869, 2437,
- 15580, 6913, 12597, 3381,
- 11116, 3297, 16762, 2424,
- 18853, 6715, 17171, 9887,
- 12743, 2605, 8937, 3140,
- 19033, 7764, 18347, 3880,
- 20475, 3682, 19602, 3380,
- 13044, 19373, 10526, 23124
-};
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: MR475_quant_store_results
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
- pred_st = pointer to structure of type gc_predState
- p = pointer to selected quantizer table entry (const Word16)
- gcode0 = predicted CB gain (Word16)
- exp_gcode0 = exponent of predicted CB gain (Word16)
- gain_pit = pointer to Pitch gain (Word16)
- gain_cod = pointer to Code gain (Word16)
-
- Outputs:
- pred_st points to the updated structure of type gc_predState
- gain_pit points to Pitch gain
- gain_cod points to Code gain
- pOverflow points to overflow indicator (Flag)
-
- Returns:
- None.
-
- Global Variables Used:
- None.
-
- Local Variables Needed:
- None.
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This function calculates the final fixed codebook gain and the predictor
- update values, and updates the gain predictor.
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None.
-
-------------------------------------------------------------------------------
- REFERENCES
-
- qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-static void MR475_quant_store_results(
-
- gc_predState *pred_st, // i/o: gain predictor state struct
- const Word16 *p, // i : pointer to selected quantizer table entry
- Word16 gcode0, // i : predicted CB gain, Q(14 - exp_gcode0)
- Word16 exp_gcode0, // i : exponent of predicted CB gain, Q0
- Word16 *gain_pit, // o : Pitch gain, Q14
- Word16 *gain_cod // o : Code gain, Q1
-)
-{
-
- Word16 g_code, exp, frac, tmp;
- Word32 L_tmp;
-
- Word16 qua_ener_MR122; // o : quantized energy error, MR122 version Q10
- Word16 qua_ener; // o : quantized energy error, Q10
-
- // Read the quantized gains
- *gain_pit = *p++;
- g_code = *p++;
-
- //------------------------------------------------------------------*
- * calculate final fixed codebook gain: *
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
- * *
- * gc = gc0 * g *
- *------------------------------------------------------------------
-
- L_tmp = L_mult(g_code, gcode0);
- L_tmp = L_shr(L_tmp, sub(10, exp_gcode0));
- *gain_cod = extract_h(L_tmp);
-
- //------------------------------------------------------------------*
- * calculate predictor update values and update gain predictor: *
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
- * *
- * qua_ener = log2(g) *
- * qua_ener_MR122 = 20*log10(g) *
- *------------------------------------------------------------------
-
- Log2 (L_deposit_l (g_code), &exp, &frac); // Log2(x Q12) = log2(x) + 12
- exp = sub(exp, 12);
-
- tmp = shr_r (frac, 5);
- qua_ener_MR122 = add (tmp, shl (exp, 10));
-
- L_tmp = Mpy_32_16(exp, frac, 24660); // 24660 Q12 ~= 6.0206 = 20*log10(2)
- qua_ener = pv_round (L_shl (L_tmp, 13)); // Q12 * Q0 = Q13 -> Q10
-
- gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
- used to represent cycle count for each subroutine
- called)
- where: (cycle count variable) = cycle count for [subroutine
- name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-
-static void MR475_quant_store_results(
- gc_predState *pred_st, /* i/o: gain predictor state struct */
- const Word16 *p, /* i : pointer to selected quantizer table entry */
- Word16 gcode0, /* i : predicted CB gain, Q(14 - exp_gcode0) */
- Word16 exp_gcode0, /* i : exponent of predicted CB gain, Q0 */
- Word16 *gain_pit, /* o : Pitch gain, Q14 */
- Word16 *gain_cod, /* o : Code gain, Q1 */
- Flag *pOverflow /* o : overflow indicator */
-)
-{
- Word16 g_code;
- Word16 exp;
- Word16 frac;
- Word16 tmp;
- Word32 L_tmp;
-
- Word16 qua_ener_MR122; /* o : quantized energy error, MR122 version Q10 */
- Word16 qua_ener; /* o : quantized energy error, Q10 */
-
-
- /* Read the quantized gains */
- *gain_pit = *p++;
- g_code = *p++;
-
- /*------------------------------------------------------------------*
- * calculate final fixed codebook gain: *
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
- * *
- * gc = gc0 * g *
- *------------------------------------------------------------------*/
-
- L_tmp = ((Word32) g_code * gcode0) << 1;
- tmp = 10 - exp_gcode0;
- L_tmp = L_shr(L_tmp, tmp, pOverflow);
- *gain_cod = (Word16)(L_tmp >> 16);
-
- /*------------------------------------------------------------------*
- * calculate predictor update values and update gain predictor: *
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
- * *
- * qua_ener = log2(g) *
- * qua_ener_MR122 = 20*log10(g) *
- *------------------------------------------------------------------*/
-
- /* Log2(x Q12) = log2(x) + 12 */
- Log2((Word32) g_code, &exp, &frac, pOverflow);
- exp -= 12;
-
- tmp = shr_r(frac, 5, pOverflow);
- qua_ener_MR122 = exp << 10;
- qua_ener_MR122 = tmp + qua_ener_MR122;
-
- /* 24660 Q12 ~= 6.0206 = 20*log10(2) */
- L_tmp = Mpy_32_16(exp, frac, 24660, pOverflow);
- L_tmp = L_tmp << 13;
-
- /* Q12 * Q0 = Q13 -> Q10 */
- qua_ener = (Word16)((L_tmp + (Word32) 0x00008000L) >> 16);
-
- gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
-
- return;
-}
-
-/****************************************************************************/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: MR475_update_unq_pred
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
- pred_st = pointer to structure of type gc_predState
- exp_gcode0 = predicted CB gain (exponent MSW) (Word16)
- frac_gcode0 = predicted CB gain (exponent LSW) (Word16)
- cod_gain_exp = optimum codebook gain (exponent)(Word16)
- cod_gain_frac = optimum codebook gain (fraction) (Word16)
-
- Outputs:
- pred_st points to the updated structure of type gc_predState
- pOverflow points to overflow indicator (Flag)
-
- Returns:
- None.
-
- Global Variables Used:
- None.
-
- Local Variables Needed:
- None.
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This module uses the optimum codebook gain and updates the "unquantized"
- gain predictor with the (bounded) prediction error.
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None.
-
-------------------------------------------------------------------------------
- REFERENCES
-
- qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-void
-MR475_update_unq_pred(
- gc_predState *pred_st, // i/o: gain predictor state struct
- Word16 exp_gcode0, // i : predicted CB gain (exponent MSW), Q0
- Word16 frac_gcode0, // i : predicted CB gain (exponent LSW), Q15
- Word16 cod_gain_exp, // i : optimum codebook gain (exponent), Q0
- Word16 cod_gain_frac // i : optimum codebook gain (fraction), Q15
-)
-{
- Word16 tmp, exp, frac;
- Word16 qua_ener, qua_ener_MR122;
- Word32 L_tmp;
-
- // calculate prediction error factor (given optimum CB gain gcu):
- // predErrFact = gcu / gcode0
- // (limit to MIN_PRED_ERR_FACT <= predErrFact <= MAX_PRED_ERR_FACT
- // -> limit qua_ener*)
- //
- // calculate prediction error (log):
- //
- // qua_ener_MR122 = log2(predErrFact)
- // qua_ener = 20*log10(predErrFact)
-
- if (cod_gain_frac <= 0)
- {
- // if gcu <= 0 -> predErrFact = 0 < MIN_PRED_ERR_FACT
- // -> set qua_ener(_MR122) directly
- qua_ener = MIN_QUA_ENER;
- qua_ener_MR122 = MIN_QUA_ENER_MR122;
- }
- else
- {
- // convert gcode0 from DPF to standard fraction/exponent format
- // with normalized frac, i.e. 16384 <= frac <= 32767
- // Note: exponent correction (exp=exp-14) is done after div_s
- frac_gcode0 = extract_l (Pow2 (14, frac_gcode0));
-
- // make sure cod_gain_frac < frac_gcode0 for div_s
- if (sub(cod_gain_frac, frac_gcode0) >= 0)
- {
- cod_gain_frac = shr (cod_gain_frac, 1);
- cod_gain_exp = add (cod_gain_exp, 1);
- }
-
- // predErrFact
- // = gcu / gcode0
- // = cod_gain_frac/frac_gcode0 * 2^(cod_gain_exp-(exp_gcode0-14))
- // = div_s (c_g_f, frac_gcode0)*2^-15 * 2^(c_g_e-exp_gcode0+14)
- // = div_s * 2^(cod_gain_exp-exp_gcode0 - 1)
-
- frac = div_s (cod_gain_frac, frac_gcode0);
- tmp = sub (sub (cod_gain_exp, exp_gcode0), 1);
-
- Log2 (L_deposit_l (frac), &exp, &frac);
- exp = add (exp, tmp);
-
- // calculate prediction error (log2, Q10)
- qua_ener_MR122 = shr_r (frac, 5);
- qua_ener_MR122 = add (qua_ener_MR122, shl (exp, 10));
-
- if (sub(qua_ener_MR122, MIN_QUA_ENER_MR122) < 0)
- {
- qua_ener = MIN_QUA_ENER;
- qua_ener_MR122 = MIN_QUA_ENER_MR122;
- }
- else if (sub(qua_ener_MR122, MAX_QUA_ENER_MR122) > 0)
- {
- qua_ener = MAX_QUA_ENER;
- qua_ener_MR122 = MAX_QUA_ENER_MR122;
- }
- else
- {
- // calculate prediction error (20*log10, Q10)
- L_tmp = Mpy_32_16(exp, frac, 24660);
- // 24660 Q12 ~= 6.0206 = 20*log10(2)
- qua_ener = pv_round (L_shl (L_tmp, 13));
- // Q12 * Q0 = Q13 -> Q26 -> Q10
- }
- }
-
- // update MA predictor memory
- gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
- used to represent cycle count for each subroutine
- called)
- where: (cycle count variable) = cycle count for [subroutine
- name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-
-void MR475_update_unq_pred(
- gc_predState *pred_st, /* i/o: gain predictor state struct */
- Word16 exp_gcode0, /* i : predicted CB gain (exponent MSW), Q0 */
- Word16 frac_gcode0, /* i : predicted CB gain (exponent LSW), Q15 */
- Word16 cod_gain_exp, /* i : optimum codebook gain (exponent), Q0 */
- Word16 cod_gain_frac, /* i : optimum codebook gain (fraction), Q15 */
- Flag *pOverflow /* o : overflow indicator */
-)
-{
- Word16 tmp;
- Word16 exp;
- Word16 frac;
- Word16 qua_ener;
- Word16 qua_ener_MR122;
- Word32 L_tmp;
-
- /* calculate prediction error factor (given optimum CB gain gcu):
- *
- * predErrFact = gcu / gcode0
- * (limit to MIN_PRED_ERR_FACT <= predErrFact <= MAX_PRED_ERR_FACT
- * -> limit qua_ener*)
- *
- * calculate prediction error (log):
- *
- * qua_ener_MR122 = log2(predErrFact)
- * qua_ener = 20*log10(predErrFact)
- *
- */
-
- if (cod_gain_frac <= 0)
- {
- /* if gcu <= 0 -> predErrFact = 0 < MIN_PRED_ERR_FACT */
- /* -> set qua_ener(_MR122) directly */
- qua_ener = MIN_QUA_ENER;
- qua_ener_MR122 = MIN_QUA_ENER_MR122;
- }
- else
- {
- /* convert gcode0 from DPF to standard fraction/exponent format */
- /* with normalized frac, i.e. 16384 <= frac <= 32767 */
- /* Note: exponent correction (exp=exp-14) is done after div_s */
- frac_gcode0 = (Word16)(Pow2(14, frac_gcode0, pOverflow));
-
- /* make sure cod_gain_frac < frac_gcode0 for div_s */
- if (cod_gain_frac >= frac_gcode0)
- {
- cod_gain_frac >>= 1;
- cod_gain_exp += 1;
- }
-
- /*
- predErrFact
- = gcu / gcode0
- = cod_gain_frac/frac_gcode0 * 2^(cod_gain_exp-(exp_gcode0-14))
- = div_s (c_g_f, frac_gcode0)*2^-15 * 2^(c_g_e-exp_gcode0+14)
- = div_s * 2^(cod_gain_exp-exp_gcode0 - 1)
- */
- frac = div_s(cod_gain_frac, frac_gcode0);
- tmp = cod_gain_exp - exp_gcode0;
- tmp -= 1;
-
- Log2((Word32) frac, &exp, &frac, pOverflow);
- exp += tmp;
-
- /* calculate prediction error (log2, Q10) */
- qua_ener_MR122 = shr_r(frac, 5, pOverflow);
- tmp = exp << 10;
- qua_ener_MR122 += tmp;
-
- if (qua_ener_MR122 > MAX_QUA_ENER_MR122)
- {
- qua_ener = MAX_QUA_ENER;
- qua_ener_MR122 = MAX_QUA_ENER_MR122;
- }
- else
- {
- /* calculate prediction error (20*log10, Q10) */
- L_tmp = Mpy_32_16(exp, frac, 24660, pOverflow);
- /* 24660 Q12 ~= 6.0206 = 20*log10(2) */
- L_tmp = L_shl(L_tmp, 13, pOverflow);
- qua_ener = pv_round(L_tmp, pOverflow);
-
- /* Q12 * Q0 = Q13 -> Q26 -> Q10 */
- }
- }
-
- /* update MA predictor memory */
- gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
-
-
- return;
-}
-
-/****************************************************************************/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: MR475_gain_quant
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
- pred_st = pointer to structure of type gc_predState
- sf0_exp_gcode0 = predicted CB gain (exponent) (Word16)
- f0_frac_gcode0 = predicted CB gain (fraction) (Word16)
- sf0_exp_coeff = energy coeff. (exponent part) (Word16)
- sf0_frac_coeff = energy coeff. ((fraction part) (Word16)
- sf0_exp_target_en = exponent of target energy (Word16)
- sf0_frac_target_en = fraction of target energy (Word16)
- sf1_code_nosharp = innovative codebook vector (Word16)
- sf1_exp_gcode0 = predicted CB gain (exponent) (Word16)
- sf1_frac_gcode0 = predicted CB gain (fraction) (Word16)
- sf1_exp_coeff = energy coeff. (exponent part) (Word16)
- sf1_frac_coeff = energy coeff. (fraction part) (Word16)
- sf1_exp_target_en = exponent of target energy (Word16)
- sf1_frac_target_en = fraction of target energy (Word16)
- gp_limit = pitch gain limit (Word16)
- sf0_gain_pit = pointer to Pitch gain (Word16)
- sf0_gain_cod = pointer to Code gain (Word16)
- sf1_gain_pit = pointer to Pitch gain (Word16)
- sf1_gain_cod = pointer to Code gain (Word16)
-
- Outputs:
- pred_st points to the updated structure of type gc_predState
- sf0_gain_pit points to Pitch gain
- sf0_gain_cod points to Code gain
- sf1_gain_pit points to Pitch gain
- sf1_gain_cod points to Code gain
-
- Returns:
- index = index of quantization
-
- Global Variables Used:
- None.
-
- Local Variables Needed:
- None.
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This module provides quantization of pitch and codebook gains for two
- subframes using the predicted codebook gain.
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None.
-
-------------------------------------------------------------------------------
- REFERENCES
-
- qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-Word16
-MR475_gain_quant( // o : index of quantization.
- gc_predState *pred_st, // i/o: gain predictor state struct
-
- // data from subframe 0 (or 2)
- Word16 sf0_exp_gcode0, // i : predicted CB gain (exponent), Q0
- Word16 sf0_frac_gcode0, // i : predicted CB gain (fraction), Q15
- Word16 sf0_exp_coeff[], // i : energy coeff. (5), exponent part, Q0
- Word16 sf0_frac_coeff[], // i : energy coeff. (5), fraction part, Q15
- // (frac_coeff and exp_coeff computed in
- // calc_filt_energies())
- Word16 sf0_exp_target_en, // i : exponent of target energy, Q0
- Word16 sf0_frac_target_en, // i : fraction of target energy, Q15
-
- // data from subframe 1 (or 3)
- Word16 sf1_code_nosharp[], // i : innovative codebook vector (L_SUBFR)
- // (whithout pitch sharpening)
- Word16 sf1_exp_gcode0, // i : predicted CB gain (exponent), Q0
- Word16 sf1_frac_gcode0, // i : predicted CB gain (fraction), Q15
- Word16 sf1_exp_coeff[], // i : energy coeff. (5), exponent part, Q0
- Word16 sf1_frac_coeff[], // i : energy coeff. (5), fraction part, Q15
- // (frac_coeff and exp_coeff computed in
- // calc_filt_energies())
- Word16 sf1_exp_target_en, // i : exponent of target energy, Q0
- Word16 sf1_frac_target_en, // i : fraction of target energy, Q15
-
- Word16 gp_limit, // i : pitch gain limit
-
- Word16 *sf0_gain_pit, // o : Pitch gain, Q14
- Word16 *sf0_gain_cod, // o : Code gain, Q1
-
- Word16 *sf1_gain_pit, // o : Pitch gain, Q14
- Word16 *sf1_gain_cod // o : Code gain, Q1
-)
-{
- const Word16 *p;
- Word16 i, index = 0;
- Word16 tmp;
- Word16 exp;
- Word16 sf0_gcode0, sf1_gcode0;
- Word16 g_pitch, g2_pitch, g_code, g2_code, g_pit_cod;
- Word16 coeff[10], coeff_lo[10], exp_max[10]; // 0..4: sf0; 5..9: sf1
- Word32 L_tmp, dist_min;
-
- *-------------------------------------------------------------------*
- * predicted codebook gain *
- * ~~~~~~~~~~~~~~~~~~~~~~~ *
- * gc0 = 2^exp_gcode0 + 2^frac_gcode0 *
- * *
- * gcode0 (Q14) = 2^14*2^frac_gcode0 = gc0 * 2^(14-exp_gcode0) *
- *-------------------------------------------------------------------*
-
- sf0_gcode0 = extract_l(Pow2(14, sf0_frac_gcode0));
- sf1_gcode0 = extract_l(Pow2(14, sf1_frac_gcode0));
-
- * For each subframe, the error energy (sum) to be minimized consists
- * of five terms, t[0..4].
- *
- * t[0] = gp^2 * <y1 y1>
- * t[1] = -2*gp * <xn y1>
- * t[2] = gc^2 * <y2 y2>
- * t[3] = -2*gc * <xn y2>
- * t[4] = 2*gp*gc * <y1 y2>
- *
-
- // sf 0
- // determine the scaling exponent for g_code: ec = ec0 - 11
- exp = sub(sf0_exp_gcode0, 11);
-
- // calculate exp_max[i] = s[i]-1
- exp_max[0] = sub(sf0_exp_coeff[0], 13);
- exp_max[1] = sub(sf0_exp_coeff[1], 14);
- exp_max[2] = add(sf0_exp_coeff[2], add(15, shl(exp, 1)));
- exp_max[3] = add(sf0_exp_coeff[3], exp);
- exp_max[4] = add(sf0_exp_coeff[4], add(1, exp));
-
- // sf 1
- // determine the scaling exponent for g_code: ec = ec0 - 11
- exp = sub(sf1_exp_gcode0, 11);
-
- // calculate exp_max[i] = s[i]-1
- exp_max[5] = sub(sf1_exp_coeff[0], 13);
- exp_max[6] = sub(sf1_exp_coeff[1], 14);
- exp_max[7] = add(sf1_exp_coeff[2], add(15, shl(exp, 1)));
- exp_max[8] = add(sf1_exp_coeff[3], exp);
- exp_max[9] = add(sf1_exp_coeff[4], add(1, exp));
-
- *-------------------------------------------------------------------*
- * Gain search equalisation: *
- * ~~~~~~~~~~~~~~~~~~~~~~~~~ *
- * The MSE for the two subframes is weighted differently if there *
- * is a big difference in the corresponding target energies *
- *-------------------------------------------------------------------*
-
- // make the target energy exponents the same by de-normalizing the
- // fraction of the smaller one. This is necessary to be able to compare
- // them
-
- exp = sf0_exp_target_en - sf1_exp_target_en;
- if (exp > 0)
- {
- sf1_frac_target_en = shr (sf1_frac_target_en, exp);
- }
- else
- {
- sf0_frac_target_en = shl (sf0_frac_target_en, exp);
- }
-
- // assume no change of exponents
- exp = 0;
-
- // test for target energy difference; set exp to +1 or -1 to scale
- // up/down coefficients for sf 1
-
- tmp = shr_r (sf1_frac_target_en, 1); // tmp = ceil(0.5*en(sf1))
- if (sub (tmp, sf0_frac_target_en) > 0) // tmp > en(sf0)?
- {
- // target_energy(sf1) > 2*target_energy(sf0)
- // -> scale up MSE(sf0) by 2 by adding 1 to exponents 0..4
- exp = 1;
- }
- else
- {
- tmp = shr (add (sf0_frac_target_en, 3), 2); // tmp=ceil(0.25*en(sf0))
- if (sub (tmp, sf1_frac_target_en) > 0) // tmp > en(sf1)?
- {
- // target_energy(sf1) < 0.25*target_energy(sf0)
- // -> scale down MSE(sf0) by 0.5 by subtracting 1 from
- // coefficients 0..4
- exp = -1;
- }
- }
-
- for (i = 0; i < 5; i++)
- {
- exp_max[i] = add (exp_max[i], exp);
- }
-
- *-------------------------------------------------------------------*
- * Find maximum exponent: *
- * ~~~~~~~~~~~~~~~~~~~~~~ *
- * *
- * For the sum operation, all terms must have the same scaling; *
- * that scaling should be low enough to prevent overflow. There- *
- * fore, the maximum scale is determined and all coefficients are *
- * re-scaled: *
- * *
- * exp = max(exp_max[i]) + 1; *
- * e = exp_max[i]-exp; e <= 0! *
- * c[i] = c[i]*2^e *
- *-------------------------------------------------------------------*
-
- exp = exp_max[0];
- for (i = 1; i < 10; i++)
- {
- if (sub(exp_max[i], exp) > 0)
- {
- exp = exp_max[i];
- }
- }
- exp = add(exp, 1); // To avoid overflow
-
- p = &sf0_frac_coeff[0];
- for (i = 0; i < 5; i++) {
- tmp = sub(exp, exp_max[i]);
- L_tmp = L_deposit_h(*p++);
- L_tmp = L_shr(L_tmp, tmp);
- L_Extract(L_tmp, &coeff[i], &coeff_lo[i]);
- }
- p = &sf1_frac_coeff[0];
- for (; i < 10; i++) {
- tmp = sub(exp, exp_max[i]);
- L_tmp = L_deposit_h(*p++);
- L_tmp = L_shr(L_tmp, tmp);
- L_Extract(L_tmp, &coeff[i], &coeff_lo[i]);
- }
-
- //-------------------------------------------------------------------*
- * Codebook search: *
- * ~~~~~~~~~~~~~~~~ *
- * *
- * For each pair (g_pitch, g_fac) in the table calculate the *
- * terms t[0..4] and sum them up; the result is the mean squared *
- * error for the quantized gains from the table. The index for the *
- * minimum MSE is stored and finally used to retrieve the quantized *
- * gains *
- *-------------------------------------------------------------------
-
- // start with "infinite" MSE
- dist_min = MAX_32;
-
- p = &table_gain_MR475[0];
-
- for (i = 0; i < MR475_VQ_SIZE; i++)
- {
- // subframe 0 (and 2) calculations
- g_pitch = *p++;
- g_code = *p++;
-
- g_code = mult(g_code, sf0_gcode0);
- g2_pitch = mult(g_pitch, g_pitch);
- g2_code = mult(g_code, g_code);
- g_pit_cod = mult(g_code, g_pitch);
-
- L_tmp = Mpy_32_16( coeff[0], coeff_lo[0], g2_pitch);
- L_tmp = Mac_32_16(L_tmp, coeff[1], coeff_lo[1], g_pitch);
- L_tmp = Mac_32_16(L_tmp, coeff[2], coeff_lo[2], g2_code);
- L_tmp = Mac_32_16(L_tmp, coeff[3], coeff_lo[3], g_code);
- L_tmp = Mac_32_16(L_tmp, coeff[4], coeff_lo[4], g_pit_cod);
-
- tmp = sub (g_pitch, gp_limit);
-
- // subframe 1 (and 3) calculations
- g_pitch = *p++;
- g_code = *p++;
-
- if (tmp <= 0 && sub(g_pitch, gp_limit) <= 0)
- {
- g_code = mult(g_code, sf1_gcode0);
- g2_pitch = mult(g_pitch, g_pitch);
- g2_code = mult(g_code, g_code);
- g_pit_cod = mult(g_code, g_pitch);
-
- L_tmp = Mac_32_16(L_tmp, coeff[5], coeff_lo[5], g2_pitch);
- L_tmp = Mac_32_16(L_tmp, coeff[6], coeff_lo[6], g_pitch);
- L_tmp = Mac_32_16(L_tmp, coeff[7], coeff_lo[7], g2_code);
- L_tmp = Mac_32_16(L_tmp, coeff[8], coeff_lo[8], g_code);
- L_tmp = Mac_32_16(L_tmp, coeff[9], coeff_lo[9], g_pit_cod);
-
- // store table index if MSE for this index is lower
- than the minimum MSE seen so far
- if (L_sub(L_tmp, dist_min) < (Word32) 0)
- {
- dist_min = L_tmp;
- index = i;
- }
- }
- }
-
- *------------------------------------------------------------------*
- * read quantized gains and update MA predictor memories *
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
- *------------------------------------------------------------------*
-
- // for subframe 0, the pre-calculated gcode0/exp_gcode0 are the same
- // as those calculated from the "real" predictor using quantized gains
- tmp = shl(index, 2);
- MR475_quant_store_results(pred_st,
- &table_gain_MR475[tmp],
- sf0_gcode0,
- sf0_exp_gcode0,
- sf0_gain_pit,
- sf0_gain_cod);
-
- // calculate new predicted gain for subframe 1 (this time using
- // the real, quantized gains)
- gc_pred(pred_st, MR475, sf1_code_nosharp,
- &sf1_exp_gcode0, &sf1_frac_gcode0,
- &sf0_exp_gcode0, &sf0_gcode0); // last two args are dummy
- sf1_gcode0 = extract_l(Pow2(14, sf1_frac_gcode0));
-
- tmp = add (tmp, 2);
- MR475_quant_store_results(pred_st,
- &table_gain_MR475[tmp],
- sf1_gcode0,
- sf1_exp_gcode0,
- sf1_gain_pit,
- sf1_gain_cod);
-
- return index;
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
- used to represent cycle count for each subroutine
- called)
- where: (cycle count variable) = cycle count for [subroutine
- name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-
-Word16 MR475_gain_quant( /* o : index of quantization. */
- gc_predState *pred_st, /* i/o: gain predictor state struct */
-
- /* data from subframe 0 (or 2) */
- Word16 sf0_exp_gcode0, /* i : predicted CB gain (exponent), Q0 */
- Word16 sf0_frac_gcode0, /* i : predicted CB gain (fraction), Q15 */
- Word16 sf0_exp_coeff[], /* i : energy coeff. (5), exponent part, Q0 */
- Word16 sf0_frac_coeff[], /* i : energy coeff. (5), fraction part, Q15 */
- /* (frac_coeff and exp_coeff computed in */
- /* calc_filt_energies()) */
- Word16 sf0_exp_target_en, /* i : exponent of target energy, Q0 */
- Word16 sf0_frac_target_en, /* i : fraction of target energy, Q15 */
-
- /* data from subframe 1 (or 3) */
- Word16 sf1_code_nosharp[], /* i : innovative codebook vector (L_SUBFR) */
- /* (whithout pitch sharpening) */
- Word16 sf1_exp_gcode0, /* i : predicted CB gain (exponent), Q0 */
- Word16 sf1_frac_gcode0, /* i : predicted CB gain (fraction), Q15 */
- Word16 sf1_exp_coeff[], /* i : energy coeff. (5), exponent part, Q0 */
- Word16 sf1_frac_coeff[], /* i : energy coeff. (5), fraction part, Q15 */
- /* (frac_coeff and exp_coeff computed in */
- /* calc_filt_energies()) */
- Word16 sf1_exp_target_en, /* i : exponent of target energy, Q0 */
- Word16 sf1_frac_target_en, /* i : fraction of target energy, Q15 */
-
- Word16 gp_limit, /* i : pitch gain limit */
-
- Word16 *sf0_gain_pit, /* o : Pitch gain, Q14 */
- Word16 *sf0_gain_cod, /* o : Code gain, Q1 */
-
- Word16 *sf1_gain_pit, /* o : Pitch gain, Q14 */
- Word16 *sf1_gain_cod, /* o : Code gain, Q1 */
- Flag *pOverflow /* o : overflow indicator */
-)
-{
- const Word16 *p;
- Word16 i;
- Word16 index = 0;
- Word16 tmp;
- Word16 exp;
- Word16 sf0_gcode0;
- Word16 sf1_gcode0;
- Word16 g_pitch;
- Word16 g2_pitch;
- Word16 g_code;
- Word16 g2_code;
- Word16 g_pit_cod;
- Word16 coeff[10];
- Word16 coeff_lo[10];
- Word16 exp_max[10]; /* 0..4: sf0; 5..9: sf1 */
- Word32 L_tmp;
- Word32 dist_min;
-
- /*-------------------------------------------------------------------*
- * predicted codebook gain *
- * ~~~~~~~~~~~~~~~~~~~~~~~ *
- * gc0 = 2^exp_gcode0 + 2^frac_gcode0 *
- * *
- * gcode0 (Q14) = 2^14*2^frac_gcode0 = gc0 * 2^(14-exp_gcode0) *
- *-------------------------------------------------------------------*/
-
- sf0_gcode0 = (Word16)(Pow2(14, sf0_frac_gcode0, pOverflow));
- sf1_gcode0 = (Word16)(Pow2(14, sf1_frac_gcode0, pOverflow));
-
- /*
- * For each subframe, the error energy (sum) to be minimized consists
- * of five terms, t[0..4].
- *
- * t[0] = gp^2 * <y1 y1>
- * t[1] = -2*gp * <xn y1>
- * t[2] = gc^2 * <y2 y2>
- * t[3] = -2*gc * <xn y2>
- * t[4] = 2*gp*gc * <y1 y2>
- *
- */
-
- /* sf 0 */
- /* determine the scaling exponent for g_code: ec = ec0 - 11 */
- exp = sf0_exp_gcode0 - 11;
-
- /* calculate exp_max[i] = s[i]-1 */
- exp_max[0] = (sf0_exp_coeff[0] - 13);
- exp_max[1] = (sf0_exp_coeff[1] - 14);
- exp_max[2] = (sf0_exp_coeff[2] + (15 + (exp << 1)));
- exp_max[3] = (sf0_exp_coeff[3] + exp);
- exp_max[4] = (sf0_exp_coeff[4] + (1 + exp));
-
- /* sf 1 */
- /* determine the scaling exponent for g_code: ec = ec0 - 11 */
- exp = sf1_exp_gcode0 - 11;
-
- /* calculate exp_max[i] = s[i]-1 */
- exp_max[5] = (sf1_exp_coeff[0] - 13);
- exp_max[6] = (sf1_exp_coeff[1] - 14);
- exp_max[7] = (sf1_exp_coeff[2] + (15 + (exp << 1)));
- exp_max[8] = (sf1_exp_coeff[3] + exp);
- exp_max[9] = (sf1_exp_coeff[4] + (1 + exp));
-
- /*-------------------------------------------------------------------*
- * Gain search equalisation: *
- * ~~~~~~~~~~~~~~~~~~~~~~~~~ *
- * The MSE for the two subframes is weighted differently if there *
- * is a big difference in the corresponding target energies *
- *-------------------------------------------------------------------*/
-
- /* make the target energy exponents the same by de-normalizing the
- fraction of the smaller one. This is necessary to be able to compare
- them
- */
- exp = sf0_exp_target_en - sf1_exp_target_en;
- if (exp > 0)
- {
- sf1_frac_target_en >>= exp;
- }
- else
- {
- sf0_frac_target_en >>= (-exp);
- }
-
- /* assume no change of exponents */
- exp = 0;
-
- /* test for target energy difference; set exp to +1 or -1 to scale
- * up/down coefficients for sf 1
- */
- tmp = shr_r(sf1_frac_target_en, 1, pOverflow); /* tmp = ceil(0.5*en(sf1)) */
-
- if (tmp > sf0_frac_target_en) /* tmp > en(sf0)? */
- {
- /*
- * target_energy(sf1) > 2*target_energy(sf0)
- * -> scale up MSE(sf0) by 2 by adding 1 to exponents 0..4
- */
- exp = 1;
- }
- else
- {
- tmp = ((sf0_frac_target_en + 3) >> 2); /* tmp=ceil(0.25*en(sf0)) */
-
- if (tmp > sf1_frac_target_en) /* tmp > en(sf1)? */
- {
- /*
- * target_energy(sf1) < 0.25*target_energy(sf0)
- * -> scale down MSE(sf0) by 0.5 by subtracting 1 from
- * coefficients 0..4
- */
- exp = -1;
- }
- }
-
- for (i = 0; i < 5; i++)
- {
- exp_max[i] += exp;
- }
-
- /*-------------------------------------------------------------------*
- * Find maximum exponent: *
- * ~~~~~~~~~~~~~~~~~~~~~~ *
- * *
- * For the sum operation, all terms must have the same scaling; *
- * that scaling should be low enough to prevent overflow. There- *
- * fore, the maximum scale is determined and all coefficients are *
- * re-scaled: *
- * *
- * exp = max(exp_max[i]) + 1; *
- * e = exp_max[i]-exp; e <= 0! *
- * c[i] = c[i]*2^e *
- *-------------------------------------------------------------------*/
-
- exp = exp_max[0];
- for (i = 9; i > 0; i--)
- {
- if (exp_max[i] > exp)
- {
- exp = exp_max[i];
- }
- }
- exp++; /* To avoid overflow */
-
- p = &sf0_frac_coeff[0];
- for (i = 0; i < 5; i++)
- {
- tmp = (exp - exp_max[i]);
- L_tmp = ((Word32)(*p++) << 16);
- L_tmp = L_shr(L_tmp, tmp, pOverflow);
- coeff[i] = (Word16)(L_tmp >> 16);
- coeff_lo[i] = (Word16)((L_tmp >> 1) - ((L_tmp >> 16) << 15));
- }
- p = &sf1_frac_coeff[0];
- for (; i < 10; i++)
- {
- tmp = exp - exp_max[i];
- L_tmp = ((Word32)(*p++) << 16);
- L_tmp = L_shr(L_tmp, tmp, pOverflow);
- coeff[i] = (Word16)(L_tmp >> 16);
- coeff_lo[i] = (Word16)((L_tmp >> 1) - ((L_tmp >> 16) << 15));
- }
-
-
- /*-------------------------------------------------------------------*
- * Codebook search: *
- * ~~~~~~~~~~~~~~~~ *
- * *
- * For each pair (g_pitch, g_fac) in the table calculate the *
- * terms t[0..4] and sum them up; the result is the mean squared *
- * error for the quantized gains from the table. The index for the *
- * minimum MSE is stored and finally used to retrieve the quantized *
- * gains *
- *-------------------------------------------------------------------*/
-
- /* start with "infinite" MSE */
- dist_min = MAX_32;
-
- p = &table_gain_MR475[0];
-
- for (i = 0; i < MR475_VQ_SIZE; i++)
- {
- /* subframe 0 (and 2) calculations */
- g_pitch = *p++;
- g_code = *p++;
-
- /* Need to be there OKA */
- g_code = (Word16)(((Word32) g_code * sf0_gcode0) >> 15);
- g2_pitch = (Word16)(((Word32) g_pitch * g_pitch) >> 15);
- g2_code = (Word16)(((Word32) g_code * g_code) >> 15);
- g_pit_cod = (Word16)(((Word32) g_code * g_pitch) >> 15);
-
-
- L_tmp = Mpy_32_16(coeff[0], coeff_lo[0], g2_pitch, pOverflow) +
- Mpy_32_16(coeff[1], coeff_lo[1], g_pitch, pOverflow) +
- Mpy_32_16(coeff[2], coeff_lo[2], g2_code, pOverflow) +
- Mpy_32_16(coeff[3], coeff_lo[3], g_code, pOverflow) +
- Mpy_32_16(coeff[4], coeff_lo[4], g_pit_cod, pOverflow);
-
- tmp = (g_pitch - gp_limit);
-
- /* subframe 1 (and 3) calculations */
- g_pitch = *p++;
- g_code = *p++;
-
- if ((tmp <= 0) && (g_pitch <= gp_limit))
- {
- g_code = (Word16)(((Word32) g_code * sf1_gcode0) >> 15);
- g2_pitch = (Word16)(((Word32) g_pitch * g_pitch) >> 15);
- g2_code = (Word16)(((Word32) g_code * g_code) >> 15);
- g_pit_cod = (Word16)(((Word32) g_code * g_pitch) >> 15);
-
- L_tmp += (Mpy_32_16(coeff[5], coeff_lo[5], g2_pitch, pOverflow) +
- Mpy_32_16(coeff[6], coeff_lo[6], g_pitch, pOverflow) +
- Mpy_32_16(coeff[7], coeff_lo[7], g2_code, pOverflow) +
- Mpy_32_16(coeff[8], coeff_lo[8], g_code, pOverflow) +
- Mpy_32_16(coeff[9], coeff_lo[9], g_pit_cod, pOverflow));
-
- /* store table index if MSE for this index is lower
- than the minimum MSE seen so far */
- if (L_tmp < dist_min)
- {
- dist_min = L_tmp;
- index = i;
- }
- }
- }
-
- /*------------------------------------------------------------------*
- * read quantized gains and update MA predictor memories *
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *
- *------------------------------------------------------------------*/
-
- /* for subframe 0, the pre-calculated gcode0/exp_gcode0 are the same
- as those calculated from the "real" predictor using quantized gains */
- tmp = index << 2;
- MR475_quant_store_results(pred_st,
- &table_gain_MR475[tmp],
- sf0_gcode0,
- sf0_exp_gcode0,
- sf0_gain_pit,
- sf0_gain_cod,
- pOverflow);
-
- /* calculate new predicted gain for subframe 1 (this time using
- the real, quantized gains) */
- gc_pred(pred_st, MR475, sf1_code_nosharp,
- &sf1_exp_gcode0, &sf1_frac_gcode0,
- &sf0_exp_gcode0, &sf0_gcode0, /* dummy args */
- pOverflow);
-
- sf1_gcode0 = (Word16)(Pow2(14, sf1_frac_gcode0, pOverflow));
-
- tmp += 2;
- MR475_quant_store_results(
- pred_st,
- &table_gain_MR475[tmp],
- sf1_gcode0,
- sf1_exp_gcode0,
- sf1_gain_pit,
- sf1_gain_cod,
- pOverflow);
-
- return(index);
-}
diff --git a/media/libstagefright/codecs/amrnb/fuzzer/Android.bp b/media/libstagefright/codecs/amrnb/fuzzer/Android.bp
deleted file mode 100644
index 54de1cc..0000000
--- a/media/libstagefright/codecs/amrnb/fuzzer/Android.bp
+++ /dev/null
@@ -1,37 +0,0 @@
-/******************************************************************************
- *
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at:
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- *****************************************************************************
- * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
- */
-
-cc_fuzz {
- name: "amrnb_dec_fuzzer",
- host_supported: true,
- srcs: [
- "amrnb_dec_fuzzer.cpp",
- ],
- static_libs: [
- "libstagefright_amrnbdec",
- "libstagefright_amrnb_common",
- "liblog",
- ],
- target: {
- darwin: {
- enabled: false,
- },
- },
-}
diff --git a/media/libstagefright/codecs/amrwb/NOTICE b/media/libstagefright/codecs/amrwb/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/amrwb/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
- Copyright (c) 2005-2008, The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/amrwb/TEST_MAPPING b/media/libstagefright/codecs/amrwb/TEST_MAPPING
deleted file mode 100644
index 3d58ba2..0000000
--- a/media/libstagefright/codecs/amrwb/TEST_MAPPING
+++ /dev/null
@@ -1,8 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrwb
-{
- "presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
- // { "name": "AmrwbDecoderTest"}
-
- ]
-}
diff --git a/media/libstagefright/codecs/amrwb/fuzzer/Android.bp b/media/libstagefright/codecs/amrwb/fuzzer/Android.bp
deleted file mode 100644
index 46f77e3..0000000
--- a/media/libstagefright/codecs/amrwb/fuzzer/Android.bp
+++ /dev/null
@@ -1,35 +0,0 @@
-/******************************************************************************
- *
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at:
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- *****************************************************************************
- * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
- */
-
-cc_fuzz {
- name: "amrwb_dec_fuzzer",
- host_supported: true,
- srcs: [
- "amrwb_dec_fuzzer.cpp",
- ],
- static_libs: [
- "libstagefright_amrwbdec",
- ],
- target: {
- darwin: {
- enabled: false,
- },
- },
-}
diff --git a/media/libstagefright/codecs/amrwb/patent_disclaimer.txt b/media/libstagefright/codecs/amrwb/patent_disclaimer.txt
deleted file mode 100644
index b4bf11d..0000000
--- a/media/libstagefright/codecs/amrwb/patent_disclaimer.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-
-THIS IS NOT A GRANT OF PATENT RIGHTS.
-
-Google makes no representation or warranty that the codecs for which
-source code is made available hereunder are unencumbered by
-third-party patents. Those intending to use this source code in
-hardware or software products are advised that implementations of
-these codecs, including in open source software or shareware, may
-require patent licenses from the relevant patent holders.
diff --git a/media/libstagefright/codecs/amrwb/src/wb_syn_filt.cpp b/media/libstagefright/codecs/amrwb/src/wb_syn_filt.cpp
deleted file mode 100644
index e1af6d4..0000000
--- a/media/libstagefright/codecs/amrwb/src/wb_syn_filt.cpp
+++ /dev/null
@@ -1,307 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
- 3GPP TS 26.173
- ANSI-C code for the Adaptive Multi-Rate - Wideband (AMR-WB) speech codec
- Available from http://www.3gpp.org
-
-(C) 2007, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-------------------------------------------------------------------------------
-
-
-
- Filename: wb_syn_filt.cpp
-
- Date: 05/08/2004
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
-
- Description:
-
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
-wb_syn_filt
-
- int16 a[], (i) Q12 : a[m+1] prediction coefficients
- int16 m, (i) : order of LP filter
- int16 x[], (i) : input signal
- int16 y[], (o) : output signal
- int16 lg, (i) : size of filtering
- int16 mem[], (i/o) : memory associated with this filtering.
- int16 update, (i) : 0=no update, 1=update of memory.
- int16 y_buf[]
-
-Syn_filt_32
-
- int16 a[], (i) Q12 : a[m+1] prediction coefficients
- int16 m, (i) : order of LP filter
- int16 exc[], (i) Qnew: excitation (exc[i] >> Qnew)
- int16 Qnew, (i) : exc scaling = 0(min) to 8(max)
- int16 sig_hi[], (o) /16 : synthesis high
- int16 sig_lo[], (o) /16 : synthesis low
- int16 lg (i) : size of filtering
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- Do the synthesis filtering 1/A(z) 16 and 32-bits version
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
-
-------------------------------------------------------------------------------
- REFERENCES
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-
-
-#include "pv_amr_wb_type_defs.h"
-#include "pvamrwbdecoder_mem_funcs.h"
-#include "pvamrwbdecoder_basic_op.h"
-#include "pvamrwb_math_op.h"
-#include "pvamrwbdecoder_cnst.h"
-#include "pvamrwbdecoder_acelp.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-void wb_syn_filt(
- int16 a[], /* (i) Q12 : a[m+1] prediction coefficients */
- int16 m, /* (i) : order of LP filter */
- int16 x[], /* (i) : input signal */
- int16 y[], /* (o) : output signal */
- int16 lg, /* (i) : size of filtering */
- int16 mem[], /* (i/o) : memory associated with this filtering. */
- int16 update, /* (i) : 0=no update, 1=update of memory. */
- int16 y_buf[]
-)
-{
-
- int16 i, j;
- int32 L_tmp1;
- int32 L_tmp2;
- int32 L_tmp3;
- int32 L_tmp4;
- int16 *yy;
-
- /* copy initial filter states into synthesis buffer */
- pv_memcpy(y_buf, mem, m*sizeof(*yy));
-
- yy = &y_buf[m];
-
- /* Do the filtering. */
-
- for (i = 0; i < lg >> 2; i++)
- {
- L_tmp1 = -((int32)x[(i<<2)] << 11);
- L_tmp2 = -((int32)x[(i<<2)+1] << 11);
- L_tmp3 = -((int32)x[(i<<2)+2] << 11);
- L_tmp4 = -((int32)x[(i<<2)+3] << 11);
-
- /* a[] uses Q12 and abs(a) =< 1 */
-
- L_tmp1 = fxp_mac_16by16(yy[(i<<2) -3], a[3], L_tmp1);
- L_tmp2 = fxp_mac_16by16(yy[(i<<2) -2], a[3], L_tmp2);
- L_tmp1 = fxp_mac_16by16(yy[(i<<2) -2], a[2], L_tmp1);
- L_tmp2 = fxp_mac_16by16(yy[(i<<2) -1], a[2], L_tmp2);
- L_tmp1 = fxp_mac_16by16(yy[(i<<2) -1], a[1], L_tmp1);
-
- for (j = 4; j < m; j += 2)
- {
- L_tmp1 = fxp_mac_16by16(yy[(i<<2)-1 - j], a[j+1], L_tmp1);
- L_tmp2 = fxp_mac_16by16(yy[(i<<2) - j], a[j+1], L_tmp2);
- L_tmp1 = fxp_mac_16by16(yy[(i<<2) - j], a[j ], L_tmp1);
- L_tmp2 = fxp_mac_16by16(yy[(i<<2)+1 - j], a[j ], L_tmp2);
- L_tmp3 = fxp_mac_16by16(yy[(i<<2)+1 - j], a[j+1], L_tmp3);
- L_tmp4 = fxp_mac_16by16(yy[(i<<2)+2 - j], a[j+1], L_tmp4);
- L_tmp3 = fxp_mac_16by16(yy[(i<<2)+2 - j], a[j ], L_tmp3);
- L_tmp4 = fxp_mac_16by16(yy[(i<<2)+3 - j], a[j ], L_tmp4);
- }
-
- L_tmp1 = fxp_mac_16by16(yy[(i<<2) - j], a[j], L_tmp1);
- L_tmp2 = fxp_mac_16by16(yy[(i<<2)+1 - j], a[j], L_tmp2);
- L_tmp3 = fxp_mac_16by16(yy[(i<<2)+2 - j], a[j], L_tmp3);
- L_tmp4 = fxp_mac_16by16(yy[(i<<2)+3 - j], a[j], L_tmp4);
-
- L_tmp1 = shl_int32(L_tmp1, 4);
-
- y[(i<<2)] = yy[(i<<2)] = amr_wb_round(-L_tmp1);
-
- L_tmp2 = fxp_mac_16by16(yy[(i<<2)], a[1], L_tmp2);
-
- L_tmp2 = shl_int32(L_tmp2, 4);
-
- y[(i<<2)+1] = yy[(i<<2)+1] = amr_wb_round(-L_tmp2);
-
- L_tmp3 = fxp_mac_16by16(yy[(i<<2) - 1], a[3], L_tmp3);
- L_tmp4 = fxp_mac_16by16(yy[(i<<2)], a[3], L_tmp4);
- L_tmp3 = fxp_mac_16by16(yy[(i<<2)], a[2], L_tmp3);
- L_tmp4 = fxp_mac_16by16(yy[(i<<2) + 1], a[2], L_tmp4);
- L_tmp3 = fxp_mac_16by16(yy[(i<<2) + 1], a[1], L_tmp3);
-
- L_tmp3 = shl_int32(L_tmp3, 4);
-
- y[(i<<2)+2] = yy[(i<<2)+2] = amr_wb_round(-L_tmp3);
-
- L_tmp4 = fxp_mac_16by16(yy[(i<<2)+2], a[1], L_tmp4);
-
- L_tmp4 = shl_int32(L_tmp4, 4);
-
- y[(i<<2)+3] = yy[(i<<2)+3] = amr_wb_round(-L_tmp4);
- }
-
-
- /* Update memory if required */
-
- if (update)
- {
- pv_memcpy(mem, &y[lg - m], m*sizeof(*y));
- }
-
- return;
-}
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-void Syn_filt_32(
- int16 a[], /* (i) Q12 : a[m+1] prediction coefficients */
- int16 m, /* (i) : order of LP filter */
- int16 exc[], /* (i) Qnew: excitation (exc[i] >> Qnew) */
- int16 Qnew, /* (i) : exc scaling = 0(min) to 8(max) */
- int16 sig_hi[], /* (o) /16 : synthesis high */
- int16 sig_lo[], /* (o) /16 : synthesis low */
- int16 lg /* (i) : size of filtering */
-)
-{
- int16 i, k, a0;
- int32 L_tmp1;
- int32 L_tmp2;
- int32 L_tmp3;
- int32 L_tmp4;
-
- a0 = 9 - Qnew; /* input / 16 and >>Qnew */
-
- /* Do the filtering. */
-
- for (i = 0; i < lg >> 1; i++)
- {
-
- L_tmp3 = 0;
- L_tmp4 = 0;
-
- L_tmp1 = fxp_mul_16by16(sig_lo[(i<<1) - 1], a[1]);
- L_tmp2 = fxp_mul_16by16(sig_hi[(i<<1) - 1], a[1]);
-
- for (k = 2; k < m; k += 2)
- {
-
- L_tmp1 = fxp_mac_16by16(sig_lo[(i<<1)-1 - k], a[k+1], L_tmp1);
- L_tmp2 = fxp_mac_16by16(sig_hi[(i<<1)-1 - k], a[k+1], L_tmp2);
- L_tmp1 = fxp_mac_16by16(sig_lo[(i<<1) - k], a[k ], L_tmp1);
- L_tmp2 = fxp_mac_16by16(sig_hi[(i<<1) - k], a[k ], L_tmp2);
- L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1) - k], a[k+1], L_tmp3);
- L_tmp4 = fxp_mac_16by16(sig_hi[(i<<1) - k], a[k+1], L_tmp4);
- L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)+1 - k], a[k ], L_tmp3);
- L_tmp4 = fxp_mac_16by16(sig_hi[(i<<1)+1 - k], a[k ], L_tmp4);
- }
-
- L_tmp1 = -fxp_mac_16by16(sig_lo[(i<<1) - k], a[k], L_tmp1);
- L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)+1 - k], a[k], L_tmp3);
- L_tmp2 = fxp_mac_16by16(sig_hi[(i<<1) - k], a[k], L_tmp2);
- L_tmp4 = fxp_mac_16by16(sig_hi[(i<<1)+1 - k], a[k], L_tmp4);
-
-
-
- L_tmp1 >>= 11; /* -4 : sig_lo[i] << 4 */
-
- L_tmp1 += (int32)exc[(i<<1)] << a0;
-
- L_tmp1 -= (L_tmp2 << 1);
- /* sig_hi = bit16 to bit31 of synthesis */
- L_tmp1 = shl_int32(L_tmp1, 3); /* ai in Q12 */
-
- sig_hi[(i<<1)] = (int16)(L_tmp1 >> 16);
-
- L_tmp4 = fxp_mac_16by16((int16)(L_tmp1 >> 16), a[1], L_tmp4);
-
- /* sig_lo = bit4 to bit15 of synthesis */
- /* L_tmp1 >>= 4 : sig_lo[i] >> 4 */
- sig_lo[(i<<1)] = (int16)((L_tmp1 >> 4) - ((L_tmp1 >> 16) << 12));
-
- L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)], a[1], L_tmp3);
- L_tmp3 = -L_tmp3 >> 11;
-
- L_tmp3 += (int32)exc[(i<<1)+1] << a0;
-
- L_tmp3 -= (L_tmp4 << 1);
- /* sig_hi = bit16 to bit31 of synthesis */
- L_tmp3 = shl_int32(L_tmp3, 3); /* ai in Q12 */
- sig_hi[(i<<1)+1] = (int16)(L_tmp3 >> 16);
-
- /* sig_lo = bit4 to bit15 of synthesis */
- /* L_tmp1 >>= 4 : sig_lo[i] >> 4 */
- sig_lo[(i<<1)+1] = (int16)((L_tmp3 >> 4) - (sig_hi[(i<<1)+1] << 12));
- }
-
-}
-
-
diff --git a/media/libstagefright/codecs/amrwb/test/Android.bp b/media/libstagefright/codecs/amrwb/test/Android.bp
deleted file mode 100644
index 968215a..0000000
--- a/media/libstagefright/codecs/amrwb/test/Android.bp
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-cc_test {
- name: "AmrwbDecoderTest",
- gtest: true,
-
- srcs: [
- "AmrwbDecoderTest.cpp",
- ],
-
- static_libs: [
- "libstagefright_amrwbdec",
- "libsndfile",
- "libaudioutils",
- ],
-
- shared_libs: [
- "liblog",
- ],
-
- cflags: [
- "-Werror",
- "-Wall",
- ],
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-}
diff --git a/media/libstagefright/codecs/amrwbenc/Android.bp b/media/libstagefright/codecs/amrwbenc/Android.bp
index 64f302c..67a0f45 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/Android.bp
@@ -1,146 +1,3 @@
-cc_library_static {
- name: "libstagefright_amrwbenc",
- vendor_available: true,
- min_sdk_version: "29",
-
- srcs: [
- "src/autocorr.c",
- "src/az_isp.c",
- "src/bits.c",
- "src/c2t64fx.c",
- "src/c4t64fx.c",
- "src/convolve.c",
- "src/cor_h_x.c",
- "src/decim54.c",
- "src/deemph.c",
- "src/dtx.c",
- "src/g_pitch.c",
- "src/gpclip.c",
- "src/homing.c",
- "src/hp400.c",
- "src/hp50.c",
- "src/hp6k.c",
- "src/hp_wsp.c",
- "src/int_lpc.c",
- "src/isp_az.c",
- "src/isp_isf.c",
- "src/lag_wind.c",
- "src/levinson.c",
- "src/log2.c",
- "src/lp_dec2.c",
- "src/math_op.c",
- "src/oper_32b.c",
- "src/p_med_ol.c",
- "src/pit_shrp.c",
- "src/pitch_f4.c",
- "src/pred_lt4.c",
- "src/preemph.c",
- "src/q_gain2.c",
- "src/q_pulse.c",
- "src/qisf_ns.c",
- "src/qpisf_2s.c",
- "src/random.c",
- "src/residu.c",
- "src/scale.c",
- "src/stream.c",
- "src/syn_filt.c",
- "src/updt_tar.c",
- "src/util.c",
- "src/voAMRWBEnc.c",
- "src/voicefac.c",
- "src/wb_vad.c",
- "src/weight_a.c",
- "src/mem_align.c",
- ],
-
- arch: {
- arm: {
- srcs: [
- "src/asm/ARMV5E/convolve_opt.s",
- "src/asm/ARMV5E/cor_h_vec_opt.s",
- "src/asm/ARMV5E/Deemph_32_opt.s",
- "src/asm/ARMV5E/Dot_p_opt.s",
- "src/asm/ARMV5E/Filt_6k_7k_opt.s",
- "src/asm/ARMV5E/Norm_Corr_opt.s",
- "src/asm/ARMV5E/pred_lt4_1_opt.s",
- "src/asm/ARMV5E/residu_asm_opt.s",
- "src/asm/ARMV5E/scale_sig_opt.s",
- "src/asm/ARMV5E/Syn_filt_32_opt.s",
- "src/asm/ARMV5E/syn_filt_opt.s",
- ],
-
- cflags: [
- "-DARM",
- "-DASM_OPT",
- ],
- local_include_dirs: ["src/asm/ARMV5E"],
-
- instruction_set: "arm",
-
- neon: {
- exclude_srcs: [
- "src/asm/ARMV5E/convolve_opt.s",
- "src/asm/ARMV5E/cor_h_vec_opt.s",
- "src/asm/ARMV5E/Deemph_32_opt.s",
- "src/asm/ARMV5E/Dot_p_opt.s",
- "src/asm/ARMV5E/Filt_6k_7k_opt.s",
- "src/asm/ARMV5E/Norm_Corr_opt.s",
- "src/asm/ARMV5E/pred_lt4_1_opt.s",
- "src/asm/ARMV5E/residu_asm_opt.s",
- "src/asm/ARMV5E/scale_sig_opt.s",
- "src/asm/ARMV5E/Syn_filt_32_opt.s",
- "src/asm/ARMV5E/syn_filt_opt.s",
- ],
-
- srcs: [
- "src/asm/ARMV7/convolve_neon.s",
- "src/asm/ARMV7/cor_h_vec_neon.s",
- "src/asm/ARMV7/Deemph_32_neon.s",
- "src/asm/ARMV7/Dot_p_neon.s",
- "src/asm/ARMV7/Filt_6k_7k_neon.s",
- "src/asm/ARMV7/Norm_Corr_neon.s",
- "src/asm/ARMV7/pred_lt4_1_neon.s",
- "src/asm/ARMV7/residu_asm_neon.s",
- "src/asm/ARMV7/scale_sig_neon.s",
- "src/asm/ARMV7/Syn_filt_32_neon.s",
- "src/asm/ARMV7/syn_filt_neon.s",
- ],
-
- // don't actually generate neon instructions, see bug 26932980
- cflags: [
- "-DARMV7",
- "-mfpu=vfpv3",
- ],
- local_include_dirs: [
- "src/asm/ARMV5E",
- "src/asm/ARMV7",
- ],
- },
-
- },
- },
-
- include_dirs: [
- "frameworks/av/include",
- "frameworks/av/media/libstagefright/include",
- ],
-
- local_include_dirs: ["src"],
- export_include_dirs: ["inc"],
-
- shared_libs: [
- "libstagefright_enc_common",
- "liblog",
- ],
-
- cflags: ["-Werror"],
- sanitize: {
- cfi: true,
- },
-
-}
-
-//###############################################################################
cc_library_shared {
name: "libstagefright_soft_amrwbenc",
diff --git a/media/libstagefright/codecs/amrwbenc/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/amrwbenc/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/amrwbenc/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/amrwbenc/NOTICE b/media/libstagefright/codecs/amrwbenc/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/amrwbenc/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
- Copyright (c) 2005-2008, The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/amrwbenc/TEST_MAPPING b/media/libstagefright/codecs/amrwbenc/TEST_MAPPING
deleted file mode 100644
index d53d665..0000000
--- a/media/libstagefright/codecs/amrwbenc/TEST_MAPPING
+++ /dev/null
@@ -1,8 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrwbenc
-{
- "presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
- // { "name": "AmrwbEncoderTest"}
-
- ]
-}
diff --git a/media/libstagefright/codecs/amrwbenc/src/preemph.c b/media/libstagefright/codecs/amrwbenc/src/preemph.c
deleted file mode 100644
index 70c8650..0000000
--- a/media/libstagefright/codecs/amrwbenc/src/preemph.c
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- ** Copyright 2003-2010, VisualOn, Inc.
- **
- ** Licensed under the Apache License, Version 2.0 (the "License");
- ** you may not use this file except in compliance with the License.
- ** You may obtain a copy of the License at
- **
- ** http://www.apache.org/licenses/LICENSE-2.0
- **
- ** Unless required by applicable law or agreed to in writing, software
- ** distributed under the License is distributed on an "AS IS" BASIS,
- ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ** See the License for the specific language governing permissions and
- ** limitations under the License.
- */
-
-/***********************************************************************
-* File: preemph.c *
-* *
-* Description: Preemphasis: filtering through 1 - g z^-1 *
-* Preemph2 --> signal is multiplied by 2 *
-* *
-************************************************************************/
-
-#include "typedef.h"
-#include "basic_op.h"
-
-void Preemph(
- Word16 x[], /* (i/o) : input signal overwritten by the output */
- Word16 mu, /* (i) Q15 : preemphasis coefficient */
- Word16 lg, /* (i) : lenght of filtering */
- Word16 * mem /* (i/o) : memory (x[-1]) */
- )
-{
- Word16 temp;
- Word32 i, L_tmp;
-
- temp = x[lg - 1];
-
- for (i = lg - 1; i > 0; i--)
- {
- L_tmp = L_deposit_h(x[i]);
- L_tmp -= (x[i - 1] * mu)<<1;
- x[i] = (L_tmp + 0x8000)>>16;
- }
-
- L_tmp = L_deposit_h(x[0]);
- L_tmp -= ((*mem) * mu)<<1;
- x[0] = (L_tmp + 0x8000)>>16;
-
- *mem = temp;
-
- return;
-}
-
-
-void Preemph2(
- Word16 x[], /* (i/o) : input signal overwritten by the output */
- Word16 mu, /* (i) Q15 : preemphasis coefficient */
- Word16 lg, /* (i) : lenght of filtering */
- Word16 * mem /* (i/o) : memory (x[-1]) */
- )
-{
- Word16 temp;
- Word32 i, L_tmp;
-
- temp = x[lg - 1];
-
- for (i = (Word16) (lg - 1); i > 0; i--)
- {
- L_tmp = L_deposit_h(x[i]);
- L_tmp -= (x[i - 1] * mu)<<1; // only called with mu == 22282, so this won't overflow
- if (L_tmp > INT32_MAX / 2) {
- L_tmp = INT32_MAX / 2;
- }
- L_tmp = (L_tmp << 1);
- if (L_tmp > INT32_MAX - 0x8000) {
- L_tmp = INT32_MAX - 0x8000;
- }
- x[i] = (L_tmp + 0x8000)>>16;
- }
-
- L_tmp = L_deposit_h(x[0]);
- L_tmp -= ((*mem) * mu)<<1;
- if (L_tmp > INT32_MAX / 2) {
- L_tmp = INT32_MAX / 2;
- }
- L_tmp = (L_tmp << 1);
- if (L_tmp > INT32_MAX - 0x8000) {
- L_tmp = INT32_MAX - 0x8000;
- }
- x[0] = (L_tmp + 0x8000)>>16;
-
- *mem = temp;
-
- return;
-}
-
-
-
diff --git a/media/libstagefright/codecs/amrwbenc/src/q_pulse.c b/media/libstagefright/codecs/amrwbenc/src/q_pulse.c
deleted file mode 100644
index fe0bdda..0000000
--- a/media/libstagefright/codecs/amrwbenc/src/q_pulse.c
+++ /dev/null
@@ -1,400 +0,0 @@
-/*
- ** Copyright 2003-2010, VisualOn, Inc.
- **
- ** Licensed under the Apache License, Version 2.0 (the "License");
- ** you may not use this file except in compliance with the License.
- ** You may obtain a copy of the License at
- **
- ** http://www.apache.org/licenses/LICENSE-2.0
- **
- ** Unless required by applicable law or agreed to in writing, software
- ** distributed under the License is distributed on an "AS IS" BASIS,
- ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ** See the License for the specific language governing permissions and
- ** limitations under the License.
- */
-
-/***********************************************************************
-* File: q_pulse.c *
-* *
-* Description: Coding and decoding of algebraic codebook *
-* *
-************************************************************************/
-
-#include <stdio.h>
-#include "typedef.h"
-#include "basic_op.h"
-#include "q_pulse.h"
-
-#define NB_POS 16 /* pos in track, mask for sign bit */
-
-Word32 quant_1p_N1( /* (o) return N+1 bits */
- Word16 pos, /* (i) position of the pulse */
- Word16 N) /* (i) number of bits for position */
-{
- Word16 mask;
- Word32 index;
-
- mask = (1 << N) - 1; /* mask = ((1<<N)-1); */
- /*-------------------------------------------------------*
- * Quantization of 1 pulse with N+1 bits: *
- *-------------------------------------------------------*/
- index = L_deposit_l((Word16) (pos & mask));
- if ((pos & NB_POS) != 0)
- {
- index = vo_L_add(index, L_deposit_l(1 << N)); /* index += 1 << N; */
- }
- return (index);
-}
-
-
-Word32 quant_2p_2N1( /* (o) return (2*N)+1 bits */
- Word16 pos1, /* (i) position of the pulse 1 */
- Word16 pos2, /* (i) position of the pulse 2 */
- Word16 N) /* (i) number of bits for position */
-{
- Word16 mask, tmp;
- Word32 index;
- mask = (1 << N) - 1; /* mask = ((1<<N)-1); */
- /*-------------------------------------------------------*
- * Quantization of 2 pulses with 2*N+1 bits: *
- *-------------------------------------------------------*/
- if (((pos2 ^ pos1) & NB_POS) == 0)
- {
- /* sign of 1st pulse == sign of 2th pulse */
- if(pos1 <= pos2) /* ((pos1 - pos2) <= 0) */
- {
- /* index = ((pos1 & mask) << N) + (pos2 & mask); */
- index = L_deposit_l(add1((((Word16) (pos1 & mask)) << N), ((Word16) (pos2 & mask))));
- } else
- {
- /* ((pos2 & mask) << N) + (pos1 & mask); */
- index = L_deposit_l(add1((((Word16) (pos2 & mask)) << N), ((Word16) (pos1 & mask))));
- }
- if ((pos1 & NB_POS) != 0)
- {
- tmp = (N << 1);
- index = vo_L_add(index, (1L << tmp)); /* index += 1 << (2*N); */
- }
- } else
- {
- /* sign of 1st pulse != sign of 2th pulse */
- if (vo_sub((Word16) (pos1 & mask), (Word16) (pos2 & mask)) <= 0)
- {
- /* index = ((pos2 & mask) << N) + (pos1 & mask); */
- index = L_deposit_l(add1((((Word16) (pos2 & mask)) << N), ((Word16) (pos1 & mask))));
- if ((pos2 & NB_POS) != 0)
- {
- tmp = (N << 1); /* index += 1 << (2*N); */
- index = vo_L_add(index, (1L << tmp));
- }
- } else
- {
- /* index = ((pos1 & mask) << N) + (pos2 & mask); */
- index = L_deposit_l(add1((((Word16) (pos1 & mask)) << N), ((Word16) (pos2 & mask))));
- if ((pos1 & NB_POS) != 0)
- {
- tmp = (N << 1);
- index = vo_L_add(index, (1 << tmp)); /* index += 1 << (2*N); */
- }
- }
- }
- return (index);
-}
-
-
-Word32 quant_3p_3N1( /* (o) return (3*N)+1 bits */
- Word16 pos1, /* (i) position of the pulse 1 */
- Word16 pos2, /* (i) position of the pulse 2 */
- Word16 pos3, /* (i) position of the pulse 3 */
- Word16 N) /* (i) number of bits for position */
-{
- Word16 nb_pos;
- Word32 index;
-
- nb_pos =(1 <<(N - 1)); /* nb_pos = (1<<(N-1)); */
- /*-------------------------------------------------------*
- * Quantization of 3 pulses with 3*N+1 bits: *
- *-------------------------------------------------------*/
- if (((pos1 ^ pos2) & nb_pos) == 0)
- {
- index = quant_2p_2N1(pos1, pos2, sub(N, 1)); /* index = quant_2p_2N1(pos1, pos2, (N-1)); */
- /* index += (pos1 & nb_pos) << N; */
- index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
- /* index += quant_1p_N1(pos3, N) << (2*N); */
- index = vo_L_add(index, (quant_1p_N1(pos3, N)<<(N << 1)));
-
- } else if (((pos1 ^ pos3) & nb_pos) == 0)
- {
- index = quant_2p_2N1(pos1, pos3, sub(N, 1)); /* index = quant_2p_2N1(pos1, pos3, (N-1)); */
- index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
- /* index += (pos1 & nb_pos) << N; */
- index = vo_L_add(index, (quant_1p_N1(pos2, N) << (N << 1)));
- /* index += quant_1p_N1(pos2, N) <<
- * (2*N); */
- } else
- {
- index = quant_2p_2N1(pos2, pos3, (N - 1)); /* index = quant_2p_2N1(pos2, pos3, (N-1)); */
- /* index += (pos2 & nb_pos) << N; */
- index = vo_L_add(index, (L_deposit_l((Word16) (pos2 & nb_pos)) << N));
- /* index += quant_1p_N1(pos1, N) << (2*N); */
- index = vo_L_add(index, (quant_1p_N1(pos1, N) << (N << 1)));
- }
- return (index);
-}
-
-
-Word32 quant_4p_4N1( /* (o) return (4*N)+1 bits */
- Word16 pos1, /* (i) position of the pulse 1 */
- Word16 pos2, /* (i) position of the pulse 2 */
- Word16 pos3, /* (i) position of the pulse 3 */
- Word16 pos4, /* (i) position of the pulse 4 */
- Word16 N) /* (i) number of bits for position */
-{
- Word16 nb_pos;
- Word32 index;
-
- nb_pos = 1 << (N - 1); /* nb_pos = (1<<(N-1)); */
- /*-------------------------------------------------------*
- * Quantization of 4 pulses with 4*N+1 bits: *
- *-------------------------------------------------------*/
- if (((pos1 ^ pos2) & nb_pos) == 0)
- {
- index = quant_2p_2N1(pos1, pos2, sub(N, 1)); /* index = quant_2p_2N1(pos1, pos2, (N-1)); */
- /* index += (pos1 & nb_pos) << N; */
- index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
- /* index += quant_2p_2N1(pos3, pos4, N) << (2*N); */
- index = vo_L_add(index, (quant_2p_2N1(pos3, pos4, N) << (N << 1)));
- } else if (((pos1 ^ pos3) & nb_pos) == 0)
- {
- index = quant_2p_2N1(pos1, pos3, (N - 1));
- /* index += (pos1 & nb_pos) << N; */
- index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
- /* index += quant_2p_2N1(pos2, pos4, N) << (2*N); */
- index = vo_L_add(index, (quant_2p_2N1(pos2, pos4, N) << (N << 1)));
- } else
- {
- index = quant_2p_2N1(pos2, pos3, (N - 1));
- /* index += (pos2 & nb_pos) << N; */
- index = vo_L_add(index, (L_deposit_l((Word16) (pos2 & nb_pos)) << N));
- /* index += quant_2p_2N1(pos1, pos4, N) << (2*N); */
- index = vo_L_add(index, (quant_2p_2N1(pos1, pos4, N) << (N << 1)));
- }
- return (index);
-}
-
-
-Word32 quant_4p_4N( /* (o) return 4*N bits */
- Word16 pos[], /* (i) position of the pulse 1..4 */
- Word16 N) /* (i) number of bits for position */
-{
- Word16 nb_pos, mask __unused, n_1, tmp;
- Word16 posA[4], posB[4];
- Word32 i, j, k, index;
-
- n_1 = (Word16) (N - 1);
- nb_pos = (1 << n_1); /* nb_pos = (1<<n_1); */
- mask = vo_sub((1 << N), 1); /* mask = ((1<<N)-1); */
-
- i = 0;
- j = 0;
- for (k = 0; k < 4; k++)
- {
- if ((pos[k] & nb_pos) == 0)
- {
- posA[i++] = pos[k];
- } else
- {
- posB[j++] = pos[k];
- }
- }
-
- switch (i)
- {
- case 0:
- tmp = vo_sub((N << 2), 3); /* index = 1 << ((4*N)-3); */
- index = (1L << tmp);
- /* index += quant_4p_4N1(posB[0], posB[1], posB[2], posB[3], n_1); */
- index = vo_L_add(index, quant_4p_4N1(posB[0], posB[1], posB[2], posB[3], n_1));
- break;
- case 1:
- /* index = quant_1p_N1(posA[0], n_1) << ((3*n_1)+1); */
- tmp = add1((Word16)((vo_L_mult(3, n_1) >> 1)), 1);
- index = L_shl(quant_1p_N1(posA[0], n_1), tmp);
- /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1); */
- index = vo_L_add(index, quant_3p_3N1(posB[0], posB[1], posB[2], n_1));
- break;
- case 2:
- tmp = ((n_1 << 1) + 1); /* index = quant_2p_2N1(posA[0], posA[1], n_1) << ((2*n_1)+1); */
- index = L_shl(quant_2p_2N1(posA[0], posA[1], n_1), tmp);
- /* index += quant_2p_2N1(posB[0], posB[1], n_1); */
- index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], n_1));
- break;
- case 3:
- /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << N; */
- index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), N);
- index = vo_L_add(index, quant_1p_N1(posB[0], n_1)); /* index += quant_1p_N1(posB[0], n_1); */
- break;
- case 4:
- index = quant_4p_4N1(posA[0], posA[1], posA[2], posA[3], n_1);
- break;
- default:
- index = 0;
- fprintf(stderr, "Error in function quant_4p_4N\n");
- }
- tmp = ((N << 2) - 2); /* index += (i & 3) << ((4*N)-2); */
- index = vo_L_add(index, L_shl((L_deposit_l(i) & (3L)), tmp));
-
- return (index);
-}
-
-
-
-Word32 quant_5p_5N( /* (o) return 5*N bits */
- Word16 pos[], /* (i) position of the pulse 1..5 */
- Word16 N) /* (i) number of bits for position */
-{
- Word16 nb_pos, n_1, tmp;
- Word16 posA[5], posB[5];
- Word32 i, j, k, index, tmp2;
-
- n_1 = (Word16) (N - 1);
- nb_pos = (1 << n_1); /* nb_pos = (1<<n_1); */
-
- i = 0;
- j = 0;
- for (k = 0; k < 5; k++)
- {
- if ((pos[k] & nb_pos) == 0)
- {
- posA[i++] = pos[k];
- } else
- {
- posB[j++] = pos[k];
- }
- }
-
- switch (i)
- {
- case 0:
- tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1); /* ((5*N)-1)) */
- index = L_shl(1L, tmp); /* index = 1 << ((5*N)-1); */
- tmp = add1((N << 1), 1); /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) << ((2*N)+1);*/
- tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
- index = vo_L_add(index, tmp2);
- index = vo_L_add(index, quant_2p_2N1(posB[3], posB[4], N)); /* index += quant_2p_2N1(posB[3], posB[4], N); */
- break;
- case 1:
- tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1); /* index = 1 << ((5*N)-1); */
- index = L_shl(1L, tmp);
- tmp = add1((N << 1), 1); /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) <<((2*N)+1); */
- tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
- index = vo_L_add(index, tmp2);
- index = vo_L_add(index, quant_2p_2N1(posB[3], posA[0], N)); /* index += quant_2p_2N1(posB[3], posA[0], N); */
- break;
- case 2:
- tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1); /* ((5*N)-1)) */
- index = L_shl(1L, tmp); /* index = 1 << ((5*N)-1); */
- tmp = add1((N << 1), 1); /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) << ((2*N)+1); */
- tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
- index = vo_L_add(index, tmp2);
- index = vo_L_add(index, quant_2p_2N1(posA[0], posA[1], N)); /* index += quant_2p_2N1(posA[0], posA[1], N); */
- break;
- case 3:
- tmp = add1((N << 1), 1); /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1); */
- index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
- index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], N)); /* index += quant_2p_2N1(posB[0], posB[1], N); */
- break;
- case 4:
- tmp = add1((N << 1), 1); /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1); */
- index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
- index = vo_L_add(index, quant_2p_2N1(posA[3], posB[0], N)); /* index += quant_2p_2N1(posA[3], posB[0], N); */
- break;
- case 5:
- tmp = add1((N << 1), 1); /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1); */
- index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
- index = vo_L_add(index, quant_2p_2N1(posA[3], posA[4], N)); /* index += quant_2p_2N1(posA[3], posA[4], N); */
- break;
- default:
- index = 0;
- fprintf(stderr, "Error in function quant_5p_5N\n");
- }
-
- return (index);
-}
-
-
-Word32 quant_6p_6N_2( /* (o) return (6*N)-2 bits */
- Word16 pos[], /* (i) position of the pulse 1..6 */
- Word16 N) /* (i) number of bits for position */
-{
- Word16 nb_pos, n_1;
- Word16 posA[6], posB[6];
- Word32 i, j, k, index;
-
- /* !! N and n_1 are constants -> it doesn't need to be operated by Basic Operators */
- n_1 = (Word16) (N - 1);
- nb_pos = (1 << n_1); /* nb_pos = (1<<n_1); */
-
- i = 0;
- j = 0;
- for (k = 0; k < 6; k++)
- {
- if ((pos[k] & nb_pos) == 0)
- {
- posA[i++] = pos[k];
- } else
- {
- posB[j++] = pos[k];
- }
- }
-
- switch (i)
- {
- case 0:
- index = (1 << (Word16) (6 * N - 5)); /* index = 1 << ((6*N)-5); */
- index = vo_L_add(index, (quant_5p_5N(posB, n_1) << N)); /* index += quant_5p_5N(posB, n_1) << N; */
- index = vo_L_add(index, quant_1p_N1(posB[5], n_1)); /* index += quant_1p_N1(posB[5], n_1); */
- break;
- case 1:
- index = (1L << (Word16) (6 * N - 5)); /* index = 1 << ((6*N)-5); */
- index = vo_L_add(index, (quant_5p_5N(posB, n_1) << N)); /* index += quant_5p_5N(posB, n_1) << N; */
- index = vo_L_add(index, quant_1p_N1(posA[0], n_1)); /* index += quant_1p_N1(posA[0], n_1); */
- break;
- case 2:
- index = (1L << (Word16) (6 * N - 5)); /* index = 1 << ((6*N)-5); */
- /* index += quant_4p_4N(posB, n_1) << ((2*n_1)+1); */
- index = vo_L_add(index, (quant_4p_4N(posB, n_1) << (Word16) (2 * n_1 + 1)));
- index = vo_L_add(index, quant_2p_2N1(posA[0], posA[1], n_1)); /* index += quant_2p_2N1(posA[0], posA[1], n_1); */
- break;
- case 3:
- index = (quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << (Word16) (3 * n_1 + 1));
- /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((3*n_1)+1); */
- index =vo_L_add(index, quant_3p_3N1(posB[0], posB[1], posB[2], n_1));
- /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1); */
- break;
- case 4:
- i = 2;
- index = (quant_4p_4N(posA, n_1) << (Word16) (2 * n_1 + 1)); /* index = quant_4p_4N(posA, n_1) << ((2*n_1)+1); */
- index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], n_1)); /* index += quant_2p_2N1(posB[0], posB[1], n_1); */
- break;
- case 5:
- i = 1;
- index = (quant_5p_5N(posA, n_1) << N); /* index = quant_5p_5N(posA, n_1) << N; */
- index = vo_L_add(index, quant_1p_N1(posB[0], n_1)); /* index += quant_1p_N1(posB[0], n_1); */
- break;
- case 6:
- i = 0;
- index = (quant_5p_5N(posA, n_1) << N); /* index = quant_5p_5N(posA, n_1) << N; */
- index = vo_L_add(index, quant_1p_N1(posA[5], n_1)); /* index += quant_1p_N1(posA[5], n_1); */
- break;
- default:
- index = 0;
- fprintf(stderr, "Error in function quant_6p_6N_2\n");
- }
- index = vo_L_add(index, ((L_deposit_l(i) & 3L) << (Word16) (6 * N - 4))); /* index += (i & 3) << ((6*N)-4); */
-
- return (index);
-}
-
-
diff --git a/media/libstagefright/codecs/amrwbenc/test/Android.bp b/media/libstagefright/codecs/amrwbenc/test/Android.bp
deleted file mode 100644
index 7042bc5..0000000
--- a/media/libstagefright/codecs/amrwbenc/test/Android.bp
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-cc_test {
- name: "AmrwbEncoderTest",
- gtest: true,
-
- srcs: [
- "AmrwbEncoderTest.cpp",
- ],
-
- static_libs: [
- "libstagefright_enc_common",
- "libstagefright_amrwbenc",
- "libaudioutils",
- "libsndfile",
- ],
-
- shared_libs: [
- "liblog",
- ],
-
- cflags: [
- "-Werror",
- "-Wall",
- ],
-
- sanitize: {
- cfi: true,
- misc_undefined: [
- "unsigned-integer-overflow",
- "signed-integer-overflow",
- ],
- },
-}
diff --git a/media/libstagefright/codecs/common/Android.bp b/media/libstagefright/codecs/common/Android.bp
index 260a60a..2290722 100644
--- a/media/libstagefright/codecs/common/Android.bp
+++ b/media/libstagefright/codecs/common/Android.bp
@@ -14,4 +14,11 @@
export_include_dirs: ["include"],
cflags: ["-Werror"],
+
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
}
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
index 3add006..078c8e3 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
@@ -399,29 +399,31 @@
mEncoderWriteData = true;
mEncoderReturnedEncodedData = false;
mEncoderReturnedNbBytes = 0;
- mCurrentInputTimeStamp = inHeader->nTimeStamp;
+ if (inHeader->nFilledLen) {
+ mCurrentInputTimeStamp = inHeader->nTimeStamp;
- const unsigned nbInputFrames = inHeader->nFilledLen / frameSize;
- const unsigned nbInputSamples = inHeader->nFilledLen / sampleSize;
+ const unsigned nbInputFrames = inHeader->nFilledLen / frameSize;
+ const unsigned nbInputSamples = inHeader->nFilledLen / sampleSize;
- if (inputFloat) {
- CHECK_LE(nbInputSamples, kNumSamplesPerFrame * kMaxChannels);
- const float * const pcmFloat = reinterpret_cast<float *>(inHeader->pBuffer);
- memcpy_to_q8_23_from_float_with_clamp(
- mInputBufferPcm32, pcmFloat, nbInputSamples);
- } else {
- // note nbInputSamples may be 2x as large for pcm16 data.
- CHECK_LE(nbInputSamples, kNumSamplesPerFrame * kMaxChannels * 2);
- const int16_t * const pcm16 = reinterpret_cast<int16_t *>(inHeader->pBuffer);
- for (unsigned i = 0; i < nbInputSamples; ++i) {
- mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
+ if (inputFloat) {
+ CHECK_LE(nbInputSamples, kNumSamplesPerFrame * kMaxChannels);
+ const float * const pcmFloat = reinterpret_cast<float *>(inHeader->pBuffer);
+ memcpy_to_q8_23_from_float_with_clamp(
+ mInputBufferPcm32, pcmFloat, nbInputSamples);
+ } else {
+ // note nbInputSamples may be 2x as large for pcm16 data.
+ CHECK_LE(nbInputSamples, kNumSamplesPerFrame * kMaxChannels * 2);
+ const int16_t * const pcm16 = reinterpret_cast<int16_t *>(inHeader->pBuffer);
+ for (unsigned i = 0; i < nbInputSamples; ++i) {
+ mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
+ }
}
+ ALOGV(" about to encode %u samples per channel", nbInputFrames);
+ ok = FLAC__stream_encoder_process_interleaved(
+ mFlacStreamEncoder,
+ mInputBufferPcm32,
+ nbInputFrames /*samples per channel*/ );
}
- ALOGV(" about to encode %u samples per channel", nbInputFrames);
- ok = FLAC__stream_encoder_process_interleaved(
- mFlacStreamEncoder,
- mInputBufferPcm32,
- nbInputFrames /*samples per channel*/ );
inInfo->mOwnedByUs = false;
inQueue.erase(inQueue.begin());
@@ -434,7 +436,15 @@
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
if (ok) {
- if (mEncoderReturnedEncodedData && (mEncoderReturnedNbBytes != 0)) {
+ ALOGV("encoded %d, bytes %lld, eos %d", mEncoderReturnedEncodedData,
+ (long long )mEncoderReturnedNbBytes, mSawInputEOS);
+ if (mSawInputEOS && !mEncoderReturnedEncodedData) {
+ ALOGV("finishing encoder");
+ mSentOutputEOS = true;
+ FLAC__stream_encoder_finish(mFlacStreamEncoder);
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ }
+ if (mSawInputEOS || mEncoderReturnedEncodedData) {
ALOGV(" dequeueing buffer on output port after writing data");
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
@@ -442,23 +452,6 @@
notifyFillBufferDone(outHeader);
outHeader = NULL;
mEncoderReturnedEncodedData = false;
- } else {
- ALOGV(" encoder process_interleaved returned without data to write");
- if (mSawInputEOS) {
- ALOGV("finishing encoder");
- mSentOutputEOS = true;
- FLAC__stream_encoder_finish(mFlacStreamEncoder);
- if (mEncoderReturnedEncodedData && (mEncoderReturnedNbBytes != 0)) {
- ALOGV(" dequeueing residual buffer on output port after writing data");
- outInfo->mOwnedByUs = false;
- outQueue.erase(outQueue.begin());
- outInfo = NULL;
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
- notifyFillBufferDone(outHeader);
- outHeader = NULL;
- mEncoderReturnedEncodedData = false;
- }
- }
}
} else {
ALOGE(" error encountered during encoding");
diff --git a/media/libstagefright/codecs/m4v_h263/TEST_MAPPING b/media/libstagefright/codecs/m4v_h263/TEST_MAPPING
deleted file mode 100644
index 6b42847..0000000
--- a/media/libstagefright/codecs/m4v_h263/TEST_MAPPING
+++ /dev/null
@@ -1,16 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/m4v_h263
-{
- "presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
-
- // the decoder reports something bad about an unexpected newline in the *config file
- // and the config file looks like the AndroidTest.xml file that we put in there.
- // I don't get this from the Encoder -- and I don't see any substantive difference
- // between decode and encode AndroidTest.xml files -- except that encode does NOT
- // finish with a newline.
- // strange.
- // { "name": "Mpeg4H263DecoderTest"},
- // { "name": "Mpeg4H263EncoderTest"}
-
- ]
-}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.bp b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
index f278f92..e5cccd8 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
@@ -1,59 +1,3 @@
-cc_library_static {
- name: "libstagefright_m4vh263dec",
- vendor_available: true,
- host_supported: true,
- shared_libs: ["liblog"],
-
- srcs: [
- "src/bitstream.cpp",
- "src/block_idct.cpp",
- "src/cal_dc_scaler.cpp",
- "src/combined_decode.cpp",
- "src/conceal.cpp",
- "src/datapart_decode.cpp",
- "src/dcac_prediction.cpp",
- "src/dec_pred_intra_dc.cpp",
- "src/get_pred_adv_b_add.cpp",
- "src/get_pred_outside.cpp",
- "src/idct.cpp",
- "src/idct_vca.cpp",
- "src/mb_motion_comp.cpp",
- "src/mb_utils.cpp",
- "src/packet_util.cpp",
- "src/post_filter.cpp",
- "src/pvdec_api.cpp",
- "src/scaling_tab.cpp",
- "src/vlc_decode.cpp",
- "src/vlc_dequant.cpp",
- "src/vlc_tab.cpp",
- "src/vop.cpp",
- "src/zigzag_tab.cpp",
- ],
-
- local_include_dirs: ["src"],
- export_include_dirs: ["include"],
-
- cflags: [
- "-Werror",
- ],
-
- version_script: "exports.lds",
-
- sanitize: {
- misc_undefined: [
- "signed-integer-overflow",
- ],
- cfi: true,
- },
-
- target: {
- darwin: {
- enabled: false,
- },
- },
-}
-
-//###############################################################################
cc_library_shared {
name: "libstagefright_soft_mpeg4dec",
@@ -61,8 +5,6 @@
srcs: ["SoftMPEG4.cpp"],
- local_include_dirs: ["src"],
-
cflags: [
],
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/bitstream.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/bitstream.cpp
deleted file mode 100644
index 37250f3..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/bitstream.cpp
+++ /dev/null
@@ -1,1001 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "bitstream.h"
-#include "mp4dec_lib.h"
-
-
-#define OSCL_DISABLE_WARNING_CONDITIONAL_IS_CONSTANT
-/* to mask the n least significant bits of an integer */
-static const uint32 msk[33] =
-{
- 0x00000000, 0x00000001, 0x00000003, 0x00000007,
- 0x0000000f, 0x0000001f, 0x0000003f, 0x0000007f,
- 0x000000ff, 0x000001ff, 0x000003ff, 0x000007ff,
- 0x00000fff, 0x00001fff, 0x00003fff, 0x00007fff,
- 0x0000ffff, 0x0001ffff, 0x0003ffff, 0x0007ffff,
- 0x000fffff, 0x001fffff, 0x003fffff, 0x007fffff,
- 0x00ffffff, 0x01ffffff, 0x03ffffff, 0x07ffffff,
- 0x0fffffff, 0x1fffffff, 0x3fffffff, 0x7fffffff,
- 0xffffffff
-};
-
-
-/* ======================================================================== */
-/* Function : BitstreamFillCache() */
-/* Date : 08/29/2000 */
-/* Purpose : Read more bitstream data into buffer & the 24-byte cache. */
-/* This function is different from BitstreamFillBuffer in */
-/* that the buffer is the frame-based buffer provided by */
-/* the application. */
-/* In/out : */
-/* Return : PV_SUCCESS if successed, PV_FAIL if failed. */
-/* Modified : 4/16/01 : removed return of PV_END_OF_BUFFER */
-/* ======================================================================== */
-PV_STATUS BitstreamFillCache(BitstreamDecVideo *stream)
-{
- uint8 *bitstreamBuffer = stream->bitstreamBuffer;
- uint8 *v;
- int num_bits, i;
-
- stream->curr_word |= (stream->next_word >> stream->incnt); // stream->incnt cannot be 32
- stream->next_word <<= (31 - stream->incnt);
- stream->next_word <<= 1;
- num_bits = stream->incnt_next + stream->incnt;
- if (num_bits >= 32)
- {
- stream->incnt_next -= (32 - stream->incnt);
- stream->incnt = 32;
- return PV_SUCCESS;
- }
- /* this check can be removed if there is additional extra 4 bytes at the end of the bitstream */
- v = bitstreamBuffer + stream->read_point;
-
- if (stream->read_point > stream->data_end_pos - 4)
- {
- if (stream->data_end_pos <= stream->read_point)
- {
- stream->incnt = num_bits;
- stream->incnt_next = 0;
- return PV_SUCCESS;
- }
-
- stream->next_word = 0;
-
- for (i = 0; i < stream->data_end_pos - stream->read_point; i++)
- {
- stream->next_word |= (v[i] << ((3 - i) << 3));
- }
-
- stream->read_point = stream->data_end_pos;
- stream->curr_word |= (stream->next_word >> num_bits); // this is safe
-
- stream->next_word <<= (31 - num_bits);
- stream->next_word <<= 1;
- num_bits = i << 3;
- stream->incnt += stream->incnt_next;
- stream->incnt_next = num_bits - (32 - stream->incnt);
- if (stream->incnt_next < 0)
- {
- stream->incnt += num_bits;
- stream->incnt_next = 0;
- }
- else
- {
- stream->incnt = 32;
- }
- return PV_SUCCESS;
- }
-
- stream->next_word = ((uint32)v[0] << 24) | (v[1] << 16) | (v[2] << 8) | v[3];
- stream->read_point += 4;
-
- stream->curr_word |= (stream->next_word >> num_bits); // this is safe
- stream->next_word <<= (31 - num_bits);
- stream->next_word <<= 1;
- stream->incnt_next += stream->incnt;
- stream->incnt = 32;
- return PV_SUCCESS;
-}
-
-
-/* ======================================================================== */
-/* Function : BitstreamReset() */
-/* Date : 08/29/2000 */
-/* Purpose : Initialize the bitstream buffer for frame-based decoding. */
-/* In/out : */
-/* Return : */
-/* Modified : */
-/* ======================================================================== */
-void BitstreamReset(BitstreamDecVideo *stream, uint8 *buffer, int32 buffer_size)
-{
- /* set up frame-based bitstream buffer */
- oscl_memset(stream, 0, sizeof(BitstreamDecVideo));
- stream->data_end_pos = buffer_size;
- stream->bitstreamBuffer = buffer;
-}
-
-
-/* ======================================================================== */
-/* Function : BitstreamOpen() */
-/* Purpose : Initialize the bitstream data structure. */
-/* In/out : */
-/* Return : */
-/* Modified : */
-/* ======================================================================== */
-int BitstreamOpen(BitstreamDecVideo *stream, int)
-{
- int buffer_size = 0;
- /* set up linear bitstream buffer */
-// stream->currentBytePos = 0;
- stream->data_end_pos = 0;
-
- stream->incnt = 0;
- stream->incnt_next = 0;
- stream->bitcnt = 0;
- stream->curr_word = stream->next_word = 0;
- stream->read_point = stream->data_end_pos;
- return buffer_size;
-}
-
-
-/* ======================================================================== */
-/* Function : BitstreamClose() */
-/* Purpose : Cleanup the bitstream data structure. */
-/* In/out : */
-/* Return : */
-/* Modified : */
-/* ======================================================================== */
-void BitstreamClose(BitstreamDecVideo *)
-{
- return;
-}
-
-
-/***********************************************************CommentBegin******
-*
-* -- BitstreamShowBits32HC
-* Shows 32 bits
-***********************************************************CommentEnd********/
-
-PV_STATUS BitstreamShowBits32HC(BitstreamDecVideo *stream, uint32 *code)
-{
- PV_STATUS status = PV_SUCCESS;
-
- if (stream->incnt < 32)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
- *code = stream->curr_word;
- return status;
-}
-
-/***********************************************************CommentBegin******
-*
-* -- BitstreamShowBits32
-* Shows upto and including 31 bits
-***********************************************************CommentEnd********/
-PV_STATUS BitstreamShowBits32(BitstreamDecVideo *stream, int nbits, uint32 *code)
-{
- PV_STATUS status = PV_SUCCESS;
-
- if (stream->incnt < nbits)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
- *code = stream->curr_word >> (32 - nbits);
- return status;
-}
-
-
-#ifndef PV_BS_INLINE
-/*========================================================================= */
-/* Function: BitstreamShowBits16() */
-/* Date: 12/18/2000 */
-/* Purpose: To see the next "nbits"(nbits<=16) bitstream bits */
-/* without advancing the read pointer */
-/* */
-/* =========================================================================*/
-PV_STATUS BitstreamShowBits16(BitstreamDecVideo *stream, int nbits, uint *code)
-{
- PV_STATUS status = PV_SUCCESS;
-
-
- if (stream->incnt < nbits)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
-
- *code = stream->curr_word >> (32 - nbits);
- return status;
-}
-
-
-/*========================================================================= */
-/* Function: BitstreamShow15Bits() */
-/* Date: 01/23/2001 */
-/* Purpose: To see the next 15 bitstream bits */
-/* without advancing the read pointer */
-/* */
-/* =========================================================================*/
-PV_STATUS BitstreamShow15Bits(BitstreamDecVideo *stream, uint *code)
-{
- PV_STATUS status = PV_SUCCESS;
-
- if (stream->incnt < 15)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
- *code = stream->curr_word >> 17;
- return status;
-}
-/*========================================================================= */
-/* Function: BitstreamShow13Bits */
-/* Date: 050923 */
-/* Purpose: Faciliate and speed up showing 13 bit from bitstream */
-/* used in VlcTCOEFF decoding */
-/* Modified: */
-/* =========================================================================*/
-PV_STATUS BitstreamShow13Bits(BitstreamDecVideo *stream, uint *code)
-{
- PV_STATUS status = PV_SUCCESS;
-
- if (stream->incnt < 13)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
- *code = stream->curr_word >> 19;
- return status;
-}
-
-uint BitstreamReadBits16_INLINE(BitstreamDecVideo *stream, int nbits)
-{
- uint code;
- PV_STATUS status;
-
- if (stream->incnt < nbits)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
- code = stream->curr_word >> (32 - nbits);
- PV_BitstreamFlushBits(stream, nbits);
- return code;
-}
-
-
-uint BitstreamRead1Bits_INLINE(BitstreamDecVideo *stream)
-{
- PV_STATUS status = PV_SUCCESS;
- uint code;
-
-
- if (stream->incnt < 1)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
- code = stream->curr_word >> 31;
- PV_BitstreamFlushBits(stream, 1);
-
- return code;
-}
-
-#endif
-
-/* ======================================================================== */
-/* Function : BitstreamReadBits16() */
-/* Purpose : Read bits (nbits <=16) from bitstream buffer. */
-/* In/out : */
-/* Return : */
-/* ======================================================================== */
-uint BitstreamReadBits16(BitstreamDecVideo *stream, int nbits)
-{
- uint code;
-
- if (stream->incnt < nbits)
- {
- /* frame-based decoding */
- BitstreamFillCache(stream);
- }
- code = stream->curr_word >> (32 - nbits);
- PV_BitstreamFlushBits(stream, nbits);
- return code;
-}
-
-/* ======================================================================== */
-/* Function : BitstreamRead1Bits() */
-/* Date : 10/23/2000 */
-/* Purpose : Faciliate and speed up reading 1 bit from bitstream. */
-/* In/out : */
-/* Return : */
-/* ======================================================================== */
-
-uint BitstreamRead1Bits(BitstreamDecVideo *stream)
-{
- uint code;
-
- if (stream->incnt < 1)
- {
- /* frame-based decoding */
- BitstreamFillCache(stream);
- }
- code = stream->curr_word >> 31;
- PV_BitstreamFlushBits(stream, 1);
-
- return code;
-}
-
-/* ======================================================================== */
-/* Function : PV_BitstreamFlushBitsCheck() */
-/* Purpose : Flush nbits bits from bitstream buffer. Check for cache */
-/* In/out : */
-/* Return : */
-/* Modified : */
-/* ======================================================================== */
-PV_STATUS PV_BitstreamFlushBitsCheck(BitstreamDecVideo *stream, int nbits)
-{
- PV_STATUS status = PV_SUCCESS;
-
- stream->bitcnt += nbits;
- stream->incnt -= nbits;
- if (stream->incnt < 0)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
-
- if (stream->incnt < 0)
- {
- stream->bitcnt += stream->incnt;
- stream->incnt = 0;
- }
- }
- stream->curr_word <<= nbits;
- return status;
-}
-
-/* ======================================================================== */
-/* Function : BitstreamReadBits32() */
-/* Purpose : Read bits from bitstream buffer. */
-/* In/out : */
-/* Return : */
-/* ======================================================================== */
-uint32 BitstreamReadBits32(BitstreamDecVideo *stream, int nbits)
-{
- uint32 code;
-
- if (stream->incnt < nbits)
- {
- /* frame-based decoding */
- BitstreamFillCache(stream);
- }
- code = stream->curr_word >> (32 - nbits);
- PV_BitstreamFlushBits(stream, nbits);
- return code;
-}
-
-uint32 BitstreamReadBits32HC(BitstreamDecVideo *stream)
-{
- uint32 code;
-
- BitstreamShowBits32HC(stream, &code);
- stream->bitcnt += 32;
- stream->incnt = 0;
- stream->curr_word = 0;
- return code;
-}
-
-/* ======================================================================== */
-/* Function : BitstreamCheckEndBuffer() */
-/* Date : 03/30/2001 */
-/* Purpose : Check to see if we are at the end of buffer */
-/* In/out : */
-/* Return : */
-/* Modified : */
-/* ======================================================================== */
-PV_STATUS BitstreamCheckEndBuffer(BitstreamDecVideo *stream)
-{
- if (stream->read_point >= stream->data_end_pos && stream->incnt <= 0) return PV_END_OF_VOP;
- return PV_SUCCESS;
-}
-
-
-PV_STATUS PV_BitstreamShowBitsByteAlign(BitstreamDecVideo *stream, int nbits, uint32 *code)
-{
- PV_STATUS status = PV_SUCCESS;
-
- int n_stuffed;
-
- n_stuffed = 8 - (stream->bitcnt & 0x7); /* 07/05/01 */
-
- if (stream->incnt < (nbits + n_stuffed))
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
-
- *code = (stream->curr_word << n_stuffed) >> (32 - nbits);
- return status;
-}
-
-#ifdef PV_ANNEX_IJKT_SUPPORT
-PV_STATUS PV_BitstreamShowBitsByteAlignNoForceStuffing(BitstreamDecVideo *stream, int nbits, uint32 *code)
-{
- PV_STATUS status = PV_SUCCESS;
-
- int n_stuffed;
-
- n_stuffed = (8 - (stream->bitcnt & 0x7)) & 7;
-
- if (stream->incnt < (nbits + n_stuffed))
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
-
- *code = (stream->curr_word << n_stuffed) >> (32 - nbits);
- return status;
-}
-#endif
-
-PV_STATUS PV_BitstreamByteAlign(BitstreamDecVideo *stream)
-{
- PV_STATUS status = PV_SUCCESS;
- int n_stuffed;
-
- n_stuffed = 8 - (stream->bitcnt & 0x7); /* 07/05/01 */
-
- /* We have to make sure we have enough bits in the cache. 08/15/2000 */
- if (stream->incnt < n_stuffed)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
-
-
- stream->bitcnt += n_stuffed;
- stream->incnt -= n_stuffed;
- stream->curr_word <<= n_stuffed;
- if (stream->incnt < 0)
- {
- stream->bitcnt += stream->incnt;
- stream->incnt = 0;
- }
- return status;
-}
-
-
-PV_STATUS BitstreamByteAlignNoForceStuffing(BitstreamDecVideo *stream)
-{
- uint n_stuffed;
-
- n_stuffed = (8 - (stream->bitcnt & 0x7)) & 0x7; /* 07/05/01 */
-
- stream->bitcnt += n_stuffed;
- stream->incnt -= n_stuffed;
-
- if (stream->incnt < 0)
- {
- stream->bitcnt += stream->incnt;
- stream->incnt = 0;
- }
- stream->curr_word <<= n_stuffed;
- return PV_SUCCESS;
-}
-
-
-/* ==================================================================== */
-/* Function : getPointer() */
-/* Date : 10/98 */
-/* Purpose : get current position of file pointer */
-/* In/out : */
-/* Return : */
-/* ==================================================================== */
-int32 getPointer(BitstreamDecVideo *stream)
-{
- return stream->bitcnt;
-}
-
-
-
-
-/* ====================================================================== /
-Function : movePointerTo()
-Date : 05/14/2004
-Purpose : move bitstream pointer to a desired position
-In/out :
-Return :
-Modified :
-/ ====================================================================== */
-PV_STATUS movePointerTo(BitstreamDecVideo *stream, int32 pos)
-{
- int32 byte_pos;
- if (pos < 0)
- {
- pos = 0;
- }
-
- byte_pos = pos >> 3;
-
- if (byte_pos > stream->data_end_pos)
- {
- byte_pos = stream->data_end_pos;
- }
-
- stream->read_point = byte_pos & -4;
- stream->bitcnt = stream->read_point << 3;;
- stream->curr_word = 0;
- stream->next_word = 0;
- stream->incnt = 0;
- stream->incnt_next = 0;
- BitstreamFillCache(stream);
- PV_BitstreamFlushBits(stream, ((pos & 0x7) + ((byte_pos & 0x3) << 3)));
- return PV_SUCCESS;
-}
-
-
-/* ======================================================================== */
-/* Function : validStuffing() */
-/* Date : 04/11/2000 */
-/* Purpose : Check whether we have valid stuffing at current position. */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : 12/18/2000 : changed the pattern type to uint */
-/* 04/01/2001 : removed PV_END_OF_BUFFER */
-/* ======================================================================== */
-Bool validStuffing(BitstreamDecVideo *stream)
-{
- uint n_stuffed;
- uint pattern;
-
-
- n_stuffed = 8 - (stream->bitcnt & 0x7);
- BitstreamShowBits16(stream, n_stuffed, &pattern);
- if (pattern == msk[n_stuffed-1]) return PV_TRUE;
- return PV_FALSE;
-}
-#ifdef PV_ANNEX_IJKT_SUPPORT
-Bool validStuffing_h263(BitstreamDecVideo *stream)
-{
- uint n_stuffed;
- uint pattern;
-
-
- n_stuffed = (8 - (stream->bitcnt & 0x7)) & 7; // stream->incnt % 8
- if (n_stuffed == 0)
- {
- return PV_TRUE;
- }
- BitstreamShowBits16(stream, n_stuffed, &pattern);
- if (pattern == 0) return PV_TRUE;
- return PV_FALSE;
-}
-#endif
-
-
-/* ======================================================================== */
-/* Function : PVSearchNextH263Frame() */
-/* Date : 04/08/2005 */
-/* Purpose : search for 0x00 0x00 0x80 */
-/* In/out : */
-/* Return : PV_SUCCESS if succeeded or PV_END_OF_VOP if failed */
-/* Modified : */
-/* ======================================================================== */
-PV_STATUS PVSearchNextH263Frame(BitstreamDecVideo *stream)
-{
- PV_STATUS status = PV_SUCCESS;
- uint8 *ptr;
- int32 i;
- int32 initial_byte_aligned_position = (stream->bitcnt + 7) >> 3;
-
- ptr = stream->bitstreamBuffer + initial_byte_aligned_position;
-
- i = PVLocateH263FrameHeader(ptr, stream->data_end_pos - initial_byte_aligned_position);
- if (stream->data_end_pos <= initial_byte_aligned_position + i)
- {
- status = PV_END_OF_VOP;
- }
- (void)movePointerTo(stream, ((i + initial_byte_aligned_position) << 3)); /* ptr + i */
- return status;
-}
-
-
-/* ======================================================================== */
-/* Function : PVSearchNextM4VFrame() */
-/* Date : 04/08/2005 */
-/* Purpose : search for 0x00 0x00 0x01 and move the pointer to the */
-/* beginning of the start code */
-/* In/out : */
-/* Return : PV_SUCCESS if succeeded or PV_END_OF_VOP if failed */
-/* Modified : */
-/* ======================================================================== */
-
-PV_STATUS PVSearchNextM4VFrame(BitstreamDecVideo *stream)
-{
- PV_STATUS status = PV_SUCCESS;
- uint8 *ptr;
- int32 i;
- int32 initial_byte_aligned_position = (stream->bitcnt + 7) >> 3;
-
- ptr = stream->bitstreamBuffer + initial_byte_aligned_position;
-
- i = PVLocateFrameHeader(ptr, stream->data_end_pos - initial_byte_aligned_position);
- if (stream->data_end_pos <= initial_byte_aligned_position + i)
- {
- status = PV_END_OF_VOP;
- }
- (void)movePointerTo(stream, ((i + initial_byte_aligned_position) << 3)); /* ptr + i */
- return status;
-}
-
-
-
-void PVLocateM4VFrameBoundary(BitstreamDecVideo *stream)
-{
- uint8 *ptr;
- int32 byte_pos = (stream->bitcnt >> 3);
-
- stream->searched_frame_boundary = 1;
- ptr = stream->bitstreamBuffer + byte_pos;
-
- stream->data_end_pos = PVLocateFrameHeader(ptr, (int32)stream->data_end_pos - byte_pos) + byte_pos;
-}
-
-void PVLocateH263FrameBoundary(BitstreamDecVideo *stream)
-{
- uint8 *ptr;
- int32 byte_pos = (stream->bitcnt >> 3);
-
- stream->searched_frame_boundary = 1;
- ptr = stream->bitstreamBuffer + byte_pos;
-
- stream->data_end_pos = PVLocateH263FrameHeader(ptr, (int32)stream->data_end_pos - byte_pos) + byte_pos;
-}
-
-/* ======================================================================== */
-/* Function : quickSearchVideoPacketHeader() */
-/* Date : 05/08/2000 */
-/* Purpose : Quick search for the next video packet header */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* ======================================================================== */
-PV_STATUS quickSearchVideoPacketHeader(BitstreamDecVideo *stream, int marker_length)
-{
- PV_STATUS status = PV_SUCCESS;
- uint32 tmpvar;
-
-
- if (stream->searched_frame_boundary == 0)
- {
- PVLocateM4VFrameBoundary(stream);
- }
-
- do
- {
- status = BitstreamCheckEndBuffer(stream);
- if (status == PV_END_OF_VOP) break;
- PV_BitstreamShowBitsByteAlign(stream, marker_length, &tmpvar);
- if (tmpvar == RESYNC_MARKER) break;
- PV_BitstreamFlushBits(stream, 8);
- }
- while (status == PV_SUCCESS);
-
- return status;
-}
-#ifdef PV_ANNEX_IJKT_SUPPORT
-PV_STATUS quickSearchH263SliceHeader(BitstreamDecVideo *stream)
-{
- PV_STATUS status = PV_SUCCESS;
- uint32 tmpvar;
-
-
- if (stream->searched_frame_boundary == 0)
- {
- PVLocateH263FrameBoundary(stream);
- }
-
- do
- {
- status = BitstreamCheckEndBuffer(stream);
- if (status == PV_END_OF_VOP) break;
- PV_BitstreamShowBitsByteAlignNoForceStuffing(stream, 17, &tmpvar);
- if (tmpvar == RESYNC_MARKER) break;
- PV_BitstreamFlushBits(stream, 8);
- }
- while (status == PV_SUCCESS);
-
- return status;
-}
-#endif
-/* ======================================================================== */
-/* The following functions are for Error Concealment. */
-/* ======================================================================== */
-
-/****************************************************/
-// 01/22/99 Quick search of Resync Marker
-// (actually the first part of it, i.e. 16 0's and a 1.
-
-/* We are not using the fastest algorithm possible. What this function does is
-to locate 11 consecutive 0's and then check if the 5 bits before them and
-the 1 bit after them are all 1's.
-*/
-
-// Table used for quick search of markers. Gives the last `1' in
-// 4 bits. The MSB is bit #1, the LSB is bit #4.
-const int lastOne[] =
-{
- 0, 4, 3, 4, 2, 4, 3, 4,
- 1, 4, 3, 4, 2, 4, 3, 4
-};
-
-// Table used for quick search of markers. Gives the last `0' in
-// 4 bits. The MSB is bit #1, the LSB is bit #4.
-/*const int lastZero[]=
-{
- 4, 3, 4, 2, 4, 3, 4, 1,
- 4, 3, 4, 2, 4, 3, 4, 0
-};
-*/
-// Table used for quick search of markers. Gives the first `0' in
-// 4 bits. The MSB is bit #1, the LSB is bit #4.
-const int firstZero[] =
-{
- 1, 1, 1, 1, 1, 1, 1, 1,
- 2, 2, 2, 2, 3, 3, 4, 0
-};
-
-// Table used for quick search of markers. Gives the first `1' in
-// 4 bits. The MSB is bit #1, the LSB is bit #4.
-const int firstOne[] =
-{
- 0, 4, 3, 3, 2, 2, 2, 2,
- 1, 1, 1, 1, 1, 1, 1, 1
-};
-
-
-/* ======================================================================== */
-/* Function : quickSearchMarkers() */
-/* Date : 01/25/99 */
-/* Purpose : Quick search for Motion marker */
-/* In/out : */
-/* Return : Boolean true of false */
-/* Modified : 12/18/2000 : 32-bit version */
-/* ======================================================================== */
-PV_STATUS quickSearchMotionMarker(BitstreamDecVideo *stream)
-// MM: (11111000000000001)
-{
- PV_STATUS status;
- uint32 tmpvar, tmpvar2;
-
- if (stream->searched_frame_boundary == 0)
- {
- PVLocateM4VFrameBoundary(stream);
- }
-
- while (TRUE)
- {
- status = BitstreamCheckEndBuffer(stream);
- if (status == PV_END_OF_VOP) return PV_END_OF_VOP;
-
- BitstreamShowBits32(stream, 17, &tmpvar);
- if (!tmpvar) return PV_FAIL;
-
- if (tmpvar & 1) // Check if the 17th bit from the curr bit pos is a '1'
- {
- if (tmpvar == MOTION_MARKER_COMB)
- {
- return PV_SUCCESS; // Found
- }
- else
- {
- tmpvar >>= 1;
- tmpvar &= 0xF;
- PV_BitstreamFlushBits(stream, (int)(12 + firstZero[tmpvar]));
- }
- }
- else
- {
- // 01/25/99 Get the first 16 bits
- tmpvar >>= 1;
- tmpvar2 = tmpvar & 0xF;
-
- // 01/26/99 Check bits #13 ~ #16
- if (tmpvar2)
- {
- PV_BitstreamFlushBits(stream, (int)(7 + lastOne[tmpvar2]));
- }
- else
- {
- tmpvar >>= 4;
- tmpvar2 = tmpvar & 0xF;
-
- // 01/26/99 Check bits #9 ~ #12
- if (tmpvar2)
- {
- PV_BitstreamFlushBits(stream, (int)(3 + lastOne[tmpvar2]));
- }
- else
- {
- tmpvar >>= 4;
- tmpvar2 = tmpvar & 0xF;
-
- // 01/26/99 Check bits #5 ~ #8
- // We don't need to check further
- // for the first 5 bits should be all 1's
- if (lastOne[tmpvar2] < 2)
- {
- /* we already have too many consecutive 0's. */
- /* Go directly pass the last of the 17 bits. */
- PV_BitstreamFlushBits(stream, 17);
- }
- else
- {
- PV_BitstreamFlushBits(stream, (int)(lastOne[tmpvar2] - 1));
- }
- }
- }
- }
-
- }
-}
-
-/* ======================================================================== */
-/* Function : quickSearchDCM() */
-/* Date : 01/22/99 */
-/* Purpose : Quick search for DC Marker */
-/* We are not using the fastest algorithm possible. What this */
-/* function does is to locate 11 consecutive 0's and then */
-/* check if the 7 bits before them and the 1 bit after them */
-/* are correct. (actually the first part of it, i.e. 16 0's */
-/* and a 1. */
-/* In/out : */
-/* Return : Boolean true of false */
-/* Modified : 12/18/2000 : 32-bit version */
-/* ======================================================================== */
-PV_STATUS quickSearchDCM(BitstreamDecVideo *stream)
-// DCM: (110 1011 0000 0000 0001)
-{
- PV_STATUS status;
- uint32 tmpvar, tmpvar2;
-
- if (stream->searched_frame_boundary == 0)
- {
- PVLocateM4VFrameBoundary(stream);
- }
-
- while (TRUE)
- {
- status = BitstreamCheckEndBuffer(stream);
- if (status == PV_END_OF_VOP) return PV_END_OF_VOP;
- BitstreamShowBits32(stream, 19, &tmpvar);
-
- if (tmpvar & 1) // Check if the 17th bit from the curr bit pos is a '1'
- {
- if (tmpvar == DC_MARKER)
- {
- return PV_SUCCESS; // Found
- }
- else
- {
- // 01/25/99 We treat the last of the 19 bits as its 7th bit (which is
- // also a `1'
- PV_BitstreamFlushBits(stream, 12);
- }
- }
- else
- {
- tmpvar >>= 1;
- tmpvar2 = tmpvar & 0xF;
-
- if (tmpvar2)
- {
- PV_BitstreamFlushBits(stream, (int)(7 + lastOne[tmpvar2]));
- }
- else
- {
- tmpvar >>= 4;
- tmpvar2 = tmpvar & 0xF;
- if (tmpvar2)
- {
- PV_BitstreamFlushBits(stream, (int)(3 + lastOne[tmpvar2]));
- }
- else
- {
- tmpvar >>= 4;
- tmpvar2 = tmpvar & 0xF;
- if (lastOne[tmpvar2] < 2)
- {
- /* we already have too many consecutive 0's. */
- /* Go directly pass the last of the 17 bits. */
- PV_BitstreamFlushBits(stream, 19);
- }
- else
- {
- PV_BitstreamFlushBits(stream, (int)(lastOne[tmpvar2] - 1));
- }
- }
- }
- }
- }
-}
-
-/* ======================================================================== */
-/* Function : quickSearchGOBHeader() 0000 0000 0000 0000 1 */
-/* Date : 07/06/01 */
-/* Purpose : Quick search of GOBHeader (not byte aligned) */
-/* In/out : */
-/* Return : Integer value indicates type of marker found */
-/* Modified : */
-/* ======================================================================== */
-PV_STATUS quickSearchGOBHeader(BitstreamDecVideo *stream)
-{
- PV_STATUS status;
- int byte0, byte1, byte2, shift, tmpvar;
-
- BitstreamByteAlignNoForceStuffing(stream);
-
- if (stream->searched_frame_boundary == 0)
- {
- PVLocateH263FrameBoundary(stream);
- }
-
- while (TRUE)
- {
- status = BitstreamCheckEndBuffer(stream);
- if (status == PV_END_OF_VOP) return PV_END_OF_VOP;
-
- if (stream->incnt < 24)
- {
- status = BitstreamFillCache(stream);
- }
-
-
- byte1 = (stream->curr_word << 8) >> 24;
- if (byte1 == 0)
- {
- byte2 = (stream->curr_word << 16) >> 24;
- if (byte2)
- {
- tmpvar = byte2 >> 4;
-
- if (tmpvar)
- {
- shift = 9 - firstOne[tmpvar];
- }
- else
- {
- shift = 5 - firstOne[byte2];
- }
- byte0 = stream->curr_word >> 24;
- if ((byte0 & msk[shift]) == 0)
- {
- PV_BitstreamFlushBits(stream, 8 - shift);
- return PV_SUCCESS;
- }
- PV_BitstreamFlushBits(stream, 8); /* third_byte is not zero */
- }
- }
-
- PV_BitstreamFlushBits(stream, 8);
- }
-}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/bitstream.h b/media/libstagefright/codecs/m4v_h263/dec/src/bitstream.h
deleted file mode 100644
index d52fa87..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/bitstream.h
+++ /dev/null
@@ -1,174 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-
-#ifndef _BITSTREAM_D_H_
-#define _BITSTREAM_D_H_
-
-#include "mp4dec_lib.h" /* video decoder function prototypes */
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif /* __cplusplus */
-
-#define PV_BS_INLINE /* support inline bitstream functions */
-
-#define PV_BitstreamFlushBits(A,B) {(A)->bitcnt += (B); (A)->incnt -= (B); (A)->curr_word <<= (B);}
-
- PV_STATUS BitstreamFillBuffer(BitstreamDecVideo *stream);
- PV_STATUS BitstreamFillCache(BitstreamDecVideo *stream);
- void BitstreamReset(BitstreamDecVideo *stream, uint8 *buffer, int32 buffer_size);
- int BitstreamOpen(BitstreamDecVideo *stream, int layer);
- void BitstreamClose(BitstreamDecVideo *stream);
-
- PV_STATUS BitstreamShowBits32(BitstreamDecVideo *stream, int nbits, uint32 *code);
- uint32 BitstreamReadBits32(BitstreamDecVideo *stream, int nbits);
-
- uint BitstreamReadBits16(BitstreamDecVideo *stream, int nbits);
- uint BitstreamRead1Bits(BitstreamDecVideo *stream);
-#ifndef PV_BS_INLINE
- PV_STATUS BitstreamShowBits16(BitstreamDecVideo *stream, int nbits, uint *code);
- PV_STATUS BitstreamShow15Bits(BitstreamDecVideo *stream, uint *code);
- PV_STATUS BitstreamShow13Bits(BitstreamDecVideo *stream, uint *code);
- uint BitstreamReadBits16_INLINE(BitstreamDecVideo *stream, int nbits);
- uint BitstreamRead1Bits_INLINE(BitstreamDecVideo *stream);
-#else
- __inline PV_STATUS BitstreamShowBits16(BitstreamDecVideo *stream, int nbits, uint *code)
- {
- PV_STATUS status = PV_SUCCESS;
-
-
- if (stream->incnt < nbits)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
-
- *code = stream->curr_word >> (32 - nbits);
- return status;
- }
-
-
-
- /* =========================================================================*/
- __inline PV_STATUS BitstreamShow15Bits(BitstreamDecVideo *stream, uint *code)
- {
- PV_STATUS status = PV_SUCCESS;
-
- if (stream->incnt < 15)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
- *code = stream->curr_word >> 17;
- return status;
- }
-
-
- __inline PV_STATUS BitstreamShow13Bits(BitstreamDecVideo *stream, uint *code)
- {
- PV_STATUS status = PV_SUCCESS;
-
- if (stream->incnt < 13)
- {
- /* frame-based decoding */
- status = BitstreamFillCache(stream);
- }
- *code = stream->curr_word >> 19;
- return status;
- }
- __inline uint BitstreamReadBits16_INLINE(BitstreamDecVideo *stream, int nbits)
- {
- uint code;
-
- if (stream->incnt < nbits)
- {
- /* frame-based decoding */
- BitstreamFillCache(stream);
- }
- code = stream->curr_word >> (32 - nbits);
- PV_BitstreamFlushBits(stream, nbits);
- return code;
- }
-
-
- __inline uint BitstreamRead1Bits_INLINE(BitstreamDecVideo *stream)
- {
- uint code;
-
- if (stream->incnt < 1)
- {
- /* frame-based decoding */
- BitstreamFillCache(stream);
- }
- code = stream->curr_word >> 31;
- PV_BitstreamFlushBits(stream, 1);
-
- return code;
- }
-
-#endif
-
-
-
-
-
-
-
- PV_STATUS PV_BitstreamFlushBitsCheck(BitstreamDecVideo *stream, int nbits);
-
- uint32 BitstreamReadBits32HC(BitstreamDecVideo *stream);
- PV_STATUS BitstreamShowBits32HC(BitstreamDecVideo *stream, uint32 *code);
-
-
-
- PV_STATUS BitstreamCheckEndBuffer(BitstreamDecVideo *stream);
-
- PV_STATUS PV_BitstreamShowBitsByteAlign(BitstreamDecVideo *stream, int nbits, uint32 *code);
-#ifdef PV_ANNEX_IJKT_SUPPORT
- PV_STATUS PV_BitstreamShowBitsByteAlignNoForceStuffing(BitstreamDecVideo *stream, int nbits, uint32 *code);
- Bool validStuffing_h263(BitstreamDecVideo *stream);
- PV_STATUS quickSearchH263SliceHeader(BitstreamDecVideo *stream);
-#endif
- PV_STATUS PV_BitstreamByteAlign(BitstreamDecVideo *stream);
- PV_STATUS BitstreamByteAlignNoForceStuffing(BitstreamDecVideo *stream);
- Bool validStuffing(BitstreamDecVideo *stream);
-
- PV_STATUS movePointerTo(BitstreamDecVideo *stream, int32 pos);
- PV_STATUS PVSearchNextM4VFrame(BitstreamDecVideo *stream);
- PV_STATUS PVSearchNextH263Frame(BitstreamDecVideo *stream);
- PV_STATUS quickSearchVideoPacketHeader(BitstreamDecVideo *stream, int marker_length);
-
-
- /* for error concealment & soft-decoding */
- void PVLocateM4VFrameBoundary(BitstreamDecVideo *stream);
- void PVSearchH263FrameBoundary(BitstreamDecVideo *stream);
-
- PV_STATUS quickSearchMotionMarker(BitstreamDecVideo *stream);
- PV_STATUS quickSearchDCM(BitstreamDecVideo *stream);
- PV_STATUS quickSearchGOBHeader(BitstreamDecVideo *stream);
- void BitstreamShowBuffer(BitstreamDecVideo *stream, int32 startbit, int32 endbit, uint8 *bitBfr);
-
- /* 10/8/98 New prototyps. */
- int32 getPointer(BitstreamDecVideo *stream);
-
-#ifdef __cplusplus
-}
-#endif /* __cplusplus */
-
-#endif /* _BITSTREAM_D_H_ */
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/block_idct.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/block_idct.cpp
deleted file mode 100644
index 3d10086..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/block_idct.cpp
+++ /dev/null
@@ -1,912 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
- [input_variable_name] = [description of the input to module, its type
- definition, and length (when applicable)]
-
- Local Stores/Buffers/Pointers Needed:
- [local_store_name] = [description of the local store, its type
- definition, and length (when applicable)]
- [local_buffer_name] = [description of the local buffer, its type
- definition, and length (when applicable)]
- [local_ptr_name] = [description of the local pointer, its type
- definition, and length (when applicable)]
-
- Global Stores/Buffers/Pointers Needed:
- [global_store_name] = [description of the global store, its type
- definition, and length (when applicable)]
- [global_buffer_name] = [description of the global buffer, its type
- definition, and length (when applicable)]
- [global_ptr_name] = [description of the global pointer, its type
- definition, and length (when applicable)]
-
- Outputs:
- [return_variable_name] = [description of data/pointer returned
- by module, its type definition, and length
- (when applicable)]
-
- Pointers and Buffers Modified:
- [variable_bfr_ptr] points to the [describe where the
- variable_bfr_ptr points to, its type definition, and length
- (when applicable)]
- [variable_bfr] contents are [describe the new contents of
- variable_bfr]
-
- Local Stores Modified:
- [local_store_name] = [describe new contents, its type
- definition, and length (when applicable)]
-
- Global Stores Modified:
- [global_store_name] = [describe new contents, its type
- definition, and length (when applicable)]
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
-------------------------------------------------------------------------------
- REFERENCES
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
- RESOURCES USED
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- STACK USAGE: [stack count for this module] + [variable to represent
- stack usage for each subroutine called]
-
- where: [stack usage variable] = stack usage for [subroutine
- name] (see [filename].ext)
-
- DATA MEMORY USED: x words
-
- PROGRAM MEMORY USED: x words
-
- CLOCK CYCLES: [cycle count equation for this module] + [variable
- used to represent cycle count for each subroutine
- called]
-
- where: [cycle count variable] = cycle count for [subroutine
- name] (see [filename].ext)
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include "mp4dec_lib.h"
-#include "idct.h"
-#include "motion_comp.h"
-
-#define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-/* private prototypes */
-static void idctrow(int16 *blk, uint8 *pred, uint8 *dst, int width);
-static void idctrow_intra(int16 *blk, PIXEL *, int width);
-static void idctcol(int16 *blk);
-
-#ifdef FAST_IDCT
-// mapping from nz_coefs to functions to be used
-
-
-// ARM4 does not allow global data when they are not constant hence
-// an array of function pointers cannot be considered as array of constants
-// (actual addresses are only known when the dll is loaded).
-// So instead of arrays of function pointers, we'll store here
-// arrays of rows or columns and then call the idct function
-// corresponding to such the row/column number:
-
-
-static void (*const idctcolVCA[10][4])(int16*) =
-{
- {&idctcol1, &idctcol0, &idctcol0, &idctcol0},
- {&idctcol1, &idctcol1, &idctcol0, &idctcol0},
- {&idctcol2, &idctcol1, &idctcol0, &idctcol0},
- {&idctcol3, &idctcol1, &idctcol0, &idctcol0},
- {&idctcol3, &idctcol2, &idctcol0, &idctcol0},
- {&idctcol3, &idctcol2, &idctcol1, &idctcol0},
- {&idctcol3, &idctcol2, &idctcol1, &idctcol1},
- {&idctcol3, &idctcol2, &idctcol2, &idctcol1},
- {&idctcol3, &idctcol3, &idctcol2, &idctcol1},
- {&idctcol4, &idctcol3, &idctcol2, &idctcol1}
-};
-
-
-static void (*const idctrowVCA[10])(int16*, uint8*, uint8*, int) =
-{
- &idctrow1,
- &idctrow2,
- &idctrow2,
- &idctrow2,
- &idctrow2,
- &idctrow3,
- &idctrow4,
- &idctrow4,
- &idctrow4,
- &idctrow4
-};
-
-
-static void (*const idctcolVCA2[16])(int16*) =
-{
- &idctcol0, &idctcol4, &idctcol3, &idctcol4,
- &idctcol2, &idctcol4, &idctcol3, &idctcol4,
- &idctcol1, &idctcol4, &idctcol3, &idctcol4,
- &idctcol2, &idctcol4, &idctcol3, &idctcol4
-};
-
-static void (*const idctrowVCA2[8])(int16*, uint8*, uint8*, int) =
-{
- &idctrow1, &idctrow4, &idctrow3, &idctrow4,
- &idctrow2, &idctrow4, &idctrow3, &idctrow4
-};
-
-static void (*const idctrowVCA_intra[10])(int16*, PIXEL *, int) =
-{
- &idctrow1_intra,
- &idctrow2_intra,
- &idctrow2_intra,
- &idctrow2_intra,
- &idctrow2_intra,
- &idctrow3_intra,
- &idctrow4_intra,
- &idctrow4_intra,
- &idctrow4_intra,
- &idctrow4_intra
-};
-
-static void (*const idctrowVCA2_intra[8])(int16*, PIXEL *, int) =
-{
- &idctrow1_intra, &idctrow4_intra, &idctrow3_intra, &idctrow4_intra,
- &idctrow2_intra, &idctrow4_intra, &idctrow3_intra, &idctrow4_intra
-};
-#endif
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-void MBlockIDCT(VideoDecData *video)
-{
- Vop *currVop = video->currVop;
- MacroBlock *mblock = video->mblock;
- PIXEL *c_comp;
- PIXEL *cu_comp;
- PIXEL *cv_comp;
- int x_pos = video->mbnum_col;
- int y_pos = video->mbnum_row;
- int width, width_uv;
- int32 offset;
- width = video->width;
- width_uv = width >> 1;
- offset = (int32)(y_pos << 4) * width + (x_pos << 4);
-
- c_comp = currVop->yChan + offset;
- cu_comp = currVop->uChan + (offset >> 2) + (x_pos << 2);
- cv_comp = currVop->vChan + (offset >> 2) + (x_pos << 2);
-
- BlockIDCT_intra(mblock, c_comp, 0, width);
- BlockIDCT_intra(mblock, c_comp + 8, 1, width);
- BlockIDCT_intra(mblock, c_comp + (width << 3), 2, width);
- BlockIDCT_intra(mblock, c_comp + (width << 3) + 8, 3, width);
- BlockIDCT_intra(mblock, cu_comp, 4, width_uv);
- BlockIDCT_intra(mblock, cv_comp, 5, width_uv);
-}
-
-
-void BlockIDCT_intra(
- MacroBlock *mblock, PIXEL *c_comp, int comp, int width)
-{
- /*----------------------------------------------------------------------------
- ; Define all local variables
- ----------------------------------------------------------------------------*/
- int16 *coeff_in = mblock->block[comp];
-#ifdef INTEGER_IDCT
-#ifdef FAST_IDCT /* VCA IDCT using nzcoefs and bitmaps*/
- int i, bmapr;
- int nz_coefs = mblock->no_coeff[comp];
- uint8 *bitmapcol = mblock->bitmapcol[comp];
- uint8 bitmaprow = mblock->bitmaprow[comp];
-
- /*----------------------------------------------------------------------------
- ; Function body here
- ----------------------------------------------------------------------------*/
- if (nz_coefs <= 10)
- {
- bmapr = (nz_coefs - 1);
-
- (*(idctcolVCA[bmapr]))(coeff_in);
- (*(idctcolVCA[bmapr][1]))(coeff_in + 1);
- (*(idctcolVCA[bmapr][2]))(coeff_in + 2);
- (*(idctcolVCA[bmapr][3]))(coeff_in + 3);
-
- (*idctrowVCA_intra[nz_coefs-1])(coeff_in, c_comp, width);
- }
- else
- {
- i = 8;
- while (i--)
- {
- bmapr = (int)bitmapcol[i];
- if (bmapr)
- {
- if ((bmapr&0xf) == 0) /* 07/18/01 */
- {
- (*(idctcolVCA2[bmapr>>4]))(coeff_in + i);
- }
- else
- {
- idctcol(coeff_in + i);
- }
- }
- }
- if ((bitmapcol[4] | bitmapcol[5] | bitmapcol[6] | bitmapcol[7]) == 0)
- {
- bitmaprow >>= 4;
- (*(idctrowVCA2_intra[(int)bitmaprow]))(coeff_in, c_comp, width);
- }
- else
- {
- idctrow_intra(coeff_in, c_comp, width);
- }
- }
-#else
- void idct_intra(int *block, uint8 *comp, int width);
- idct_intra(coeff_in, c_comp, width);
-#endif
-#else
- void idctref_intra(int *block, uint8 *comp, int width);
- idctref_intra(coeff_in, c_comp, width);
-#endif
-
-
- /*----------------------------------------------------------------------------
- ; Return nothing or data or data pointer
- ----------------------------------------------------------------------------*/
- return;
-}
-
-/* 08/04/05, no residue, just copy from pred to output */
-void Copy_Blk_to_Vop(uint8 *dst, uint8 *pred, int width)
-{
- /* copy 4 bytes at a time */
- width -= 4;
- *((uint32*)dst) = *((uint32*)pred);
- *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
- *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
- *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
- *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
- *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
- *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
- *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
- *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
- *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
- *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
- *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
- *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
- *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
- *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
- *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
-
- return ;
-}
-
-/* 08/04/05 compute IDCT and add prediction at the end */
-void BlockIDCT(
- uint8 *dst, /* destination */
- uint8 *pred, /* prediction block, pitch 16 */
- int16 *coeff_in, /* DCT data, size 64 */
- int width, /* width of dst */
- int nz_coefs,
- uint8 *bitmapcol,
- uint8 bitmaprow
-)
-{
-#ifdef INTEGER_IDCT
-#ifdef FAST_IDCT /* VCA IDCT using nzcoefs and bitmaps*/
- int i, bmapr;
- /*----------------------------------------------------------------------------
- ; Function body here
- ----------------------------------------------------------------------------*/
- if (nz_coefs <= 10)
- {
- bmapr = (nz_coefs - 1);
- (*(idctcolVCA[bmapr]))(coeff_in);
- (*(idctcolVCA[bmapr][1]))(coeff_in + 1);
- (*(idctcolVCA[bmapr][2]))(coeff_in + 2);
- (*(idctcolVCA[bmapr][3]))(coeff_in + 3);
-
- (*idctrowVCA[nz_coefs-1])(coeff_in, pred, dst, width);
- return ;
- }
- else
- {
- i = 8;
-
- while (i--)
- {
- bmapr = (int)bitmapcol[i];
- if (bmapr)
- {
- if ((bmapr&0xf) == 0) /* 07/18/01 */
- {
- (*(idctcolVCA2[bmapr>>4]))(coeff_in + i);
- }
- else
- {
- idctcol(coeff_in + i);
- }
- }
- }
- if ((bitmapcol[4] | bitmapcol[5] | bitmapcol[6] | bitmapcol[7]) == 0)
- {
- (*(idctrowVCA2[bitmaprow>>4]))(coeff_in, pred, dst, width);
- }
- else
- {
- idctrow(coeff_in, pred, dst, width);
- }
- return ;
- }
-#else // FAST_IDCT
- void idct(int *block, uint8 *pred, uint8 *dst, int width);
- idct(coeff_in, pred, dst, width);
- return;
-#endif // FAST_IDCT
-#else // INTEGER_IDCT
- void idctref(int *block, uint8 *pred, uint8 *dst, int width);
- idctref(coeff_in, pred, dst, width);
- return;
-#endif // INTEGER_IDCT
-
-}
-/*----------------------------------------------------------------------------
-; End Function: block_idct
-----------------------------------------------------------------------------*/
-
-
-/****************************************************************************/
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: idctrow
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS FOR idctrow
-
- Inputs:
- [input_variable_name] = [description of the input to module, its type
- definition, and length (when applicable)]
-
- Local Stores/Buffers/Pointers Needed:
- [local_store_name] = [description of the local store, its type
- definition, and length (when applicable)]
- [local_buffer_name] = [description of the local buffer, its type
- definition, and length (when applicable)]
- [local_ptr_name] = [description of the local pointer, its type
- definition, and length (when applicable)]
-
- Global Stores/Buffers/Pointers Needed:
- [global_store_name] = [description of the global store, its type
- definition, and length (when applicable)]
- [global_buffer_name] = [description of the global buffer, its type
- definition, and length (when applicable)]
- [global_ptr_name] = [description of the global pointer, its type
- definition, and length (when applicable)]
-
- Outputs:
- [return_variable_name] = [description of data/pointer returned
- by module, its type definition, and length
- (when applicable)]
-
- Pointers and Buffers Modified:
- [variable_bfr_ptr] points to the [describe where the
- variable_bfr_ptr points to, its type definition, and length
- (when applicable)]
- [variable_bfr] contents are [describe the new contents of
- variable_bfr]
-
- Local Stores Modified:
- [local_store_name] = [describe new contents, its type
- definition, and length (when applicable)]
-
- Global Stores Modified:
- [global_store_name] = [describe new contents, its type
- definition, and length (when applicable)]
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION FOR idctrow
-
-------------------------------------------------------------------------------
- REQUIREMENTS FOR idctrow
-
-------------------------------------------------------------------------------
- REFERENCES FOR idctrow
-
-------------------------------------------------------------------------------
- PSEUDO-CODE FOR idctrow
-
-------------------------------------------------------------------------------
- RESOURCES USED FOR idctrow
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- STACK USAGE: [stack count for this module] + [variable to represent
- stack usage for each subroutine called]
-
- where: [stack usage variable] = stack usage for [subroutine
- name] (see [filename].ext)
-
- DATA MEMORY USED: x words
-
- PROGRAM MEMORY USED: x words
-
- CLOCK CYCLES: [cycle count equation for this module] + [variable
- used to represent cycle count for each subroutine
- called]
-
- where: [cycle count variable] = cycle count for [subroutine
- name] (see [filename].ext)
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; Function Code FOR idctrow
-----------------------------------------------------------------------------*/
-void idctrow(
- int16 *blk, uint8 *pred, uint8 *dst, int width
-)
-{
- /*----------------------------------------------------------------------------
- ; Define all local variables
- ----------------------------------------------------------------------------*/
- int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
- int i = 8;
- uint32 pred_word, dst_word;
- int res, res2;
-
- /*----------------------------------------------------------------------------
- ; Function body here
- ----------------------------------------------------------------------------*/
- /* row (horizontal) IDCT
- *
- * 7 pi 1 dst[k] = sum c[l] * src[l] * cos( -- *
- * ( k + - ) * l ) l=0 8 2
- *
- * where: c[0] = 128 c[1..7] = 128*sqrt(2) */
-
- /* preset the offset, such that we can take advantage pre-offset addressing mode */
- width -= 4;
- dst -= width;
- pred -= 12;
- blk -= 8;
-
- while (i--)
- {
- x1 = (int32)blk[12] << 8;
- blk[12] = 0;
- x2 = blk[14];
- blk[14] = 0;
- x3 = blk[10];
- blk[10] = 0;
- x4 = blk[9];
- blk[9] = 0;
- x5 = blk[15];
- blk[15] = 0;
- x6 = blk[13];
- blk[13] = 0;
- x7 = blk[11];
- blk[11] = 0;
- x0 = ((*(blk += 8)) << 8) + 8192;
- blk[0] = 0; /* for proper rounding in the fourth stage */
-
- /* first stage */
- x8 = W7 * (x4 + x5) + 4;
- x4 = (x8 + (W1 - W7) * x4) >> 3;
- x5 = (x8 - (W1 + W7) * x5) >> 3;
- x8 = W3 * (x6 + x7) + 4;
- x6 = (x8 - (W3 - W5) * x6) >> 3;
- x7 = (x8 - (W3 + W5) * x7) >> 3;
-
- /* second stage */
- x8 = x0 + x1;
- x0 -= x1;
- x1 = W6 * (x3 + x2) + 4;
- x2 = (x1 - (W2 + W6) * x2) >> 3;
- x3 = (x1 + (W2 - W6) * x3) >> 3;
- x1 = x4 + x6;
- x4 -= x6;
- x6 = x5 + x7;
- x5 -= x7;
-
- /* third stage */
- x7 = x8 + x3;
- x8 -= x3;
- x3 = x0 + x2;
- x0 -= x2;
- x2 = (181 * (x4 + x5) + 128) >> 8;
- x4 = (181 * (x4 - x5) + 128) >> 8;
-
- /* fourth stage */
- pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
-
- res = (x7 + x1) >> 14;
- ADD_AND_CLIP1(res);
- res2 = (x3 + x2) >> 14;
- ADD_AND_CLIP2(res2);
- dst_word = (res2 << 8) | res;
- res = (x0 + x4) >> 14;
- ADD_AND_CLIP3(res);
- dst_word |= (res << 16);
- res = (x8 + x6) >> 14;
- ADD_AND_CLIP4(res);
- dst_word |= (res << 24);
- *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
-
- pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
-
- res = (x8 - x6) >> 14;
- ADD_AND_CLIP1(res);
- res2 = (x0 - x4) >> 14;
- ADD_AND_CLIP2(res2);
- dst_word = (res2 << 8) | res;
- res = (x3 - x2) >> 14;
- ADD_AND_CLIP3(res);
- dst_word |= (res << 16);
- res = (x7 - x1) >> 14;
- ADD_AND_CLIP4(res);
- dst_word |= (res << 24);
- *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
- }
- /*----------------------------------------------------------------------------
- ; Return nothing or data or data pointer
- ----------------------------------------------------------------------------*/
- return;
-}
-
-__attribute__((no_sanitize("signed-integer-overflow")))
-void idctrow_intra(
- int16 *blk, PIXEL *comp, int width
-)
-{
- /*----------------------------------------------------------------------------
- ; Define all local variables
- ----------------------------------------------------------------------------*/
- int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
- int i = 8;
- int offset = width;
- int32 word;
-
- /*----------------------------------------------------------------------------
- ; Function body here
- ----------------------------------------------------------------------------*/
- /* row (horizontal) IDCT
- *
- * 7 pi 1 dst[k] = sum c[l] * src[l] * cos( -- *
- * ( k + - ) * l ) l=0 8 2
- *
- * where: c[0] = 128 c[1..7] = 128*sqrt(2) */
- while (i--)
- {
- x1 = (int32)blk[4] << 8;
- blk[4] = 0;
- x2 = blk[6];
- blk[6] = 0;
- x3 = blk[2];
- blk[2] = 0;
- x4 = blk[1];
- blk[1] = 0;
- x5 = blk[7];
- blk[7] = 0;
- x6 = blk[5];
- blk[5] = 0;
- x7 = blk[3];
- blk[3] = 0;
-#ifndef FAST_IDCT
- /* shortcut */ /* covered by idctrow1 01/9/2001 */
- if (!(x1 | x2 | x3 | x4 | x5 | x6 | x7))
- {
- blk[0] = blk[1] = blk[2] = blk[3] = blk[4] = blk[5] = blk[6] = blk[7] = (blk[0] + 32) >> 6;
- return;
- }
-#endif
- x0 = ((int32)blk[0] << 8) + 8192;
- blk[0] = 0; /* for proper rounding in the fourth stage */
-
- /* first stage */
- x8 = W7 * (x4 + x5) + 4;
- x4 = (x8 + (W1 - W7) * x4) >> 3;
- x5 = (x8 - (W1 + W7) * x5) >> 3;
- x8 = W3 * (x6 + x7) + 4;
- x6 = (x8 - (W3 - W5) * x6) >> 3;
- x7 = (x8 - (W3 + W5) * x7) >> 3;
-
- /* second stage */
- x8 = x0 + x1;
- x0 -= x1;
- x1 = W6 * (x3 + x2) + 4;
- x2 = (x1 - (W2 + W6) * x2) >> 3;
- x3 = (x1 + (W2 - W6) * x3) >> 3;
- x1 = x4 + x6;
- x4 -= x6;
- x6 = x5 + x7;
- x5 -= x7;
-
- /* third stage */
- x7 = x8 + x3;
- x8 -= x3;
- x3 = x0 + x2;
- x0 -= x2;
- x2 = (181 * (x4 + x5) + 128) >> 8;
- x4 = (181 * (x4 - x5) + 128) >> 8;
-
- /* fourth stage */
- word = ((x7 + x1) >> 14);
- CLIP_RESULT(word)
-
- temp = ((x3 + x2) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 8);
-
- temp = ((x0 + x4) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 16);
-
- temp = ((x8 + x6) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 24);
- *((int32*)(comp)) = word;
-
- word = ((x8 - x6) >> 14);
- CLIP_RESULT(word)
-
- temp = ((x0 - x4) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 8);
-
- temp = ((x3 - x2) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 16);
-
- temp = ((x7 - x1) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 24);
- *((int32*)(comp + 4)) = word;
- comp += offset;
-
- blk += B_SIZE;
- }
- /*----------------------------------------------------------------------------
- ; Return nothing or data or data pointer
- ----------------------------------------------------------------------------*/
- return;
-}
-
-/*----------------------------------------------------------------------------
-; End Function: idctrow
-----------------------------------------------------------------------------*/
-
-
-/****************************************************************************/
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: idctcol
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS FOR idctcol
-
- Inputs:
- [input_variable_name] = [description of the input to module, its type
- definition, and length (when applicable)]
-
- Local Stores/Buffers/Pointers Needed:
- [local_store_name] = [description of the local store, its type
- definition, and length (when applicable)]
- [local_buffer_name] = [description of the local buffer, its type
- definition, and length (when applicable)]
- [local_ptr_name] = [description of the local pointer, its type
- definition, and length (when applicable)]
-
- Global Stores/Buffers/Pointers Needed:
- [global_store_name] = [description of the global store, its type
- definition, and length (when applicable)]
- [global_buffer_name] = [description of the global buffer, its type
- definition, and length (when applicable)]
- [global_ptr_name] = [description of the global pointer, its type
- definition, and length (when applicable)]
-
- Outputs:
- [return_variable_name] = [description of data/pointer returned
- by module, its type definition, and length
- (when applicable)]
-
- Pointers and Buffers Modified:
- [variable_bfr_ptr] points to the [describe where the
- variable_bfr_ptr points to, its type definition, and length
- (when applicable)]
- [variable_bfr] contents are [describe the new contents of
- variable_bfr]
-
- Local Stores Modified:
- [local_store_name] = [describe new contents, its type
- definition, and length (when applicable)]
-
- Global Stores Modified:
- [global_store_name] = [describe new contents, its type
- definition, and length (when applicable)]
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION FOR idctcol
-
-------------------------------------------------------------------------------
- REQUIREMENTS FOR idctcol
-
-------------------------------------------------------------------------------
- REFERENCES FOR idctcol
-
-------------------------------------------------------------------------------
- PSEUDO-CODE FOR idctcol
-
-------------------------------------------------------------------------------
- RESOURCES USED FOR idctcol
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- STACK USAGE: [stack count for this module] + [variable to represent
- stack usage for each subroutine called]
-
- where: [stack usage variable] = stack usage for [subroutine
- name] (see [filename].ext)
-
- DATA MEMORY USED: x words
-
- PROGRAM MEMORY USED: x words
-
- CLOCK CYCLES: [cycle count equation for this module] + [variable
- used to represent cycle count for each subroutine
- called]
-
- where: [cycle count variable] = cycle count for [subroutine
- name] (see [filename].ext)
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; Function Code FOR idctcol
-----------------------------------------------------------------------------*/
-void idctcol(
- int16 *blk
-)
-{
- /*----------------------------------------------------------------------------
- ; Define all local variables
- ----------------------------------------------------------------------------*/
- int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
-
- /*----------------------------------------------------------------------------
- ; Function body here
- ----------------------------------------------------------------------------*/
- /* column (vertical) IDCT
- *
- * 7 pi 1 dst[8*k] = sum c[l] * src[8*l] *
- * cos( -- * ( k + - ) * l ) l=0 8 2
- *
- * where: c[0] = 1/1024 c[1..7] = (1/1024)*sqrt(2) */
- x1 = (int32)blk[32] << 11;
- x2 = blk[48];
- x3 = blk[16];
- x4 = blk[8];
- x5 = blk[56];
- x6 = blk[40];
- x7 = blk[24];
-#ifndef FAST_IDCT
- /* shortcut */ /* covered by idctcolumn1 01/9/2001 */
- if (!(x1 | x2 | x3 | x4 | x5 | x6 | x7))
- {
- blk[0] = blk[8] = blk[16] = blk[24] = blk[32] = blk[40] = blk[48] = blk[56]
- = blk[0] << 3;
- return;
- }
-#endif
-
- x0 = ((int32)blk[0] << 11) + 128;
-
- /* first stage */
- x8 = W7 * (x4 + x5);
- x4 = x8 + (W1 - W7) * x4;
- x5 = x8 - (W1 + W7) * x5;
- x8 = W3 * (x6 + x7);
- x6 = x8 - (W3 - W5) * x6;
- x7 = x8 - (W3 + W5) * x7;
-
- /* second stage */
- x8 = x0 + x1;
- x0 -= x1;
- x1 = W6 * (x3 + x2);
- x2 = x1 - (W2 + W6) * x2;
- x3 = x1 + (W2 - W6) * x3;
- x1 = x4 + x6;
- x4 -= x6;
- x6 = x5 + x7;
- x5 -= x7;
-
- /* third stage */
- x7 = x8 + x3;
- x8 -= x3;
- x3 = x0 + x2;
- x0 -= x2;
- x2 = (181 * (x4 + x5) + 128) >> 8;
- x4 = (181 * (x4 - x5) + 128) >> 8;
-
- /* fourth stage */
- blk[0] = (x7 + x1) >> 8;
- blk[8] = (x3 + x2) >> 8;
- blk[16] = (x0 + x4) >> 8;
- blk[24] = (x8 + x6) >> 8;
- blk[32] = (x8 - x6) >> 8;
- blk[40] = (x0 - x4) >> 8;
- blk[48] = (x3 - x2) >> 8;
- blk[56] = (x7 - x1) >> 8;
- /*----------------------------------------------------------------------------
- ; Return nothing or data or data pointer
- ----------------------------------------------------------------------------*/
- return;
-}
-/*----------------------------------------------------------------------------
-; End Function: idctcol
-----------------------------------------------------------------------------*/
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp
deleted file mode 100644
index f35ce4f..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp
+++ /dev/null
@@ -1,660 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "mp4def.h"
-#include "idct.h"
-#include "motion_comp.h"
-
-#ifdef FAST_IDCT
-
-/****************************************************************
-* vca_idct.c : created 6/1/99 for several options
-* of hard-coded reduced idct function (using nz_coefs)
-******************************************************************/
-
-/*****************************************************/
-//pretested version
-void idctrow0(int16 *, uint8 *, uint8 *, int)
-{
- return ;
-}
-void idctcol0(int16 *)
-{
- return ;
-}
-
-void idctrow1(int16 *blk, uint8 *pred, uint8 *dst, int width)
-{
- /* shortcut */
- int tmp;
- int i = 8;
- uint32 pred_word, dst_word;
- int res, res2;
-
- /* preset the offset, such that we can take advantage pre-offset addressing mode */
- width -= 4;
- dst -= width;
- pred -= 12;
- blk -= 8;
-
- while (i--)
- {
- tmp = (*(blk += 8) + 32) >> 6;
- *blk = 0;
-
- pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
- res = tmp + (pred_word & 0xFF);
- CLIP_RESULT(res);
- res2 = tmp + ((pred_word >> 8) & 0xFF);
- CLIP_RESULT(res2);
- dst_word = (res2 << 8) | res;
- res = tmp + ((pred_word >> 16) & 0xFF);
- CLIP_RESULT(res);
- dst_word |= (res << 16);
- res = tmp + ((pred_word >> 24) & 0xFF);
- CLIP_RESULT(res);
- dst_word |= (res << 24);
- *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
-
- pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
- res = tmp + (pred_word & 0xFF);
- CLIP_RESULT(res);
- res2 = tmp + ((pred_word >> 8) & 0xFF);
- CLIP_RESULT(res2);
- dst_word = (res2 << 8) | res;
- res = tmp + ((pred_word >> 16) & 0xFF);
- CLIP_RESULT(res);
- dst_word |= (res << 16);
- res = tmp + ((pred_word >> 24) & 0xFF);
- CLIP_RESULT(res);
- dst_word |= (res << 24);
- *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
- }
- return;
-}
-
-void idctcol1(int16 *blk)
-{ /* shortcut */
- blk[0] = blk[8] = blk[16] = blk[24] = blk[32] = blk[40] = blk[48] = blk[56] =
- blk[0] << 3;
- return;
-}
-
-void idctrow2(int16 *blk, uint8 *pred, uint8 *dst, int width)
-{
- int32 x0, x1, x2, x4, x5;
- int i = 8;
- uint32 pred_word, dst_word;
- int res, res2;
-
- /* preset the offset, such that we can take advantage pre-offset addressing mode */
- width -= 4;
- dst -= width;
- pred -= 12;
- blk -= 8;
-
- while (i--)
- {
- /* shortcut */
- x4 = blk[9];
- blk[9] = 0;
- x0 = ((*(blk += 8)) << 8) + 8192;
- *blk = 0; /* for proper rounding in the fourth stage */
-
- /* first stage */
- x5 = (W7 * x4 + 4) >> 3;
- x4 = (W1 * x4 + 4) >> 3;
-
- /* third stage */
- x2 = (181 * (x4 + x5) + 128) >> 8;
- x1 = (181 * (x4 - x5) + 128) >> 8;
-
- /* fourth stage */
- pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
- res = (x0 + x4) >> 14;
- ADD_AND_CLIP1(res);
- res2 = (x0 + x2) >> 14;
- ADD_AND_CLIP2(res2);
- dst_word = (res2 << 8) | res;
- res = (x0 + x1) >> 14;
- ADD_AND_CLIP3(res);
- dst_word |= (res << 16);
- res = (x0 + x5) >> 14;
- ADD_AND_CLIP4(res);
- dst_word |= (res << 24);
- *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
-
- pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
- res = (x0 - x5) >> 14;
- ADD_AND_CLIP1(res);
- res2 = (x0 - x1) >> 14;
- ADD_AND_CLIP2(res2);
- dst_word = (res2 << 8) | res;
- res = (x0 - x2) >> 14;
- ADD_AND_CLIP3(res);
- dst_word |= (res << 16);
- res = (x0 - x4) >> 14;
- ADD_AND_CLIP4(res);
- dst_word |= (res << 24);
- *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
- }
- return ;
-}
-
-void idctcol2(int16 *blk)
-{
- int32 x0, x1, x3, x5, x7;//, x8;
-
- x1 = blk[8];
- x0 = ((int32)blk[0] << 11) + 128;
- /* both upper and lower*/
-
- x7 = W7 * x1;
- x1 = W1 * x1;
-
- x3 = x7;
- x5 = (181 * (x1 - x7) + 128) >> 8;
- x7 = (181 * (x1 + x7) + 128) >> 8;
-
- blk[0] = (x0 + x1) >> 8;
- blk[8] = (x0 + x7) >> 8;
- blk[16] = (x0 + x5) >> 8;
- blk[24] = (x0 + x3) >> 8;
- blk[56] = (x0 - x1) >> 8;
- blk[48] = (x0 - x7) >> 8;
- blk[40] = (x0 - x5) >> 8;
- blk[32] = (x0 - x3) >> 8;
-
- return ;
-}
-
-void idctrow3(int16 *blk, uint8 *pred, uint8 *dst, int width)
-{
- int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
- int i = 8;
- uint32 pred_word, dst_word;
- int res, res2;
-
- /* preset the offset, such that we can take advantage pre-offset addressing mode */
- width -= 4;
- dst -= width;
- pred -= 12;
- blk -= 8;
-
- while (i--)
- {
- x2 = blk[10];
- blk[10] = 0;
- x1 = blk[9];
- blk[9] = 0;
- x0 = ((*(blk += 8)) << 8) + 8192;
- *blk = 0; /* for proper rounding in the fourth stage */
- /* both upper and lower*/
- /* both x2orx6 and x0orx4 */
-
- x4 = x0;
- x6 = (W6 * x2 + 4) >> 3;
- x2 = (W2 * x2 + 4) >> 3;
- x8 = x0 - x2;
- x0 += x2;
- x2 = x8;
- x8 = x4 - x6;
- x4 += x6;
- x6 = x8;
-
- x7 = (W7 * x1 + 4) >> 3;
- x1 = (W1 * x1 + 4) >> 3;
- x3 = x7;
- x5 = (181 * (x1 - x7) + 128) >> 8;
- x7 = (181 * (x1 + x7) + 128) >> 8;
-
- pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
- res = (x0 + x1) >> 14;
- ADD_AND_CLIP1(res);
- res2 = (x4 + x7) >> 14;
- ADD_AND_CLIP2(res2);
- dst_word = (res2 << 8) | res;
- res = (x6 + x5) >> 14;
- ADD_AND_CLIP3(res);
- dst_word |= (res << 16);
- res = (x2 + x3) >> 14;
- ADD_AND_CLIP4(res);
- dst_word |= (res << 24);
- *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
-
- pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
- res = (x2 - x3) >> 14;
- ADD_AND_CLIP1(res);
- res2 = (x6 - x5) >> 14;
- ADD_AND_CLIP2(res2);
- dst_word = (res2 << 8) | res;
- res = (x4 - x7) >> 14;
- ADD_AND_CLIP3(res);
- dst_word |= (res << 16);
- res = (x0 - x1) >> 14;
- ADD_AND_CLIP4(res);
- dst_word |= (res << 24);
- *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
- }
-
- return ;
-}
-
-void idctcol3(int16 *blk)
-{
- int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
-
- x2 = blk[16];
- x1 = blk[8];
- x0 = ((int32)blk[0] << 11) + 128;
-
- x4 = x0;
- x6 = W6 * x2;
- x2 = W2 * x2;
- x8 = x0 - x2;
- x0 += x2;
- x2 = x8;
- x8 = x4 - x6;
- x4 += x6;
- x6 = x8;
-
- x7 = W7 * x1;
- x1 = W1 * x1;
- x3 = x7;
- x5 = (181 * (x1 - x7) + 128) >> 8;
- x7 = (181 * (x1 + x7) + 128) >> 8;
-
- blk[0] = (x0 + x1) >> 8;
- blk[8] = (x4 + x7) >> 8;
- blk[16] = (x6 + x5) >> 8;
- blk[24] = (x2 + x3) >> 8;
- blk[56] = (x0 - x1) >> 8;
- blk[48] = (x4 - x7) >> 8;
- blk[40] = (x6 - x5) >> 8;
- blk[32] = (x2 - x3) >> 8;
-
- return;
-}
-
-
-void idctrow4(int16 *blk, uint8 *pred, uint8 *dst, int width)
-{
- int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
- int i = 8;
- uint32 pred_word, dst_word;
- int res, res2;
-
- /* preset the offset, such that we can take advantage pre-offset addressing mode */
- width -= 4;
- dst -= width;
- pred -= 12;
- blk -= 8;
-
- while (i--)
- {
- x2 = blk[10];
- blk[10] = 0;
- x1 = blk[9];
- blk[9] = 0;
- x3 = blk[11];
- blk[11] = 0;
- x0 = ((*(blk += 8)) << 8) + 8192;
- *blk = 0; /* for proper rounding in the fourth stage */
-
- x4 = x0;
- x6 = (W6 * x2 + 4) >> 3;
- x2 = (W2 * x2 + 4) >> 3;
- x8 = x0 - x2;
- x0 += x2;
- x2 = x8;
- x8 = x4 - x6;
- x4 += x6;
- x6 = x8;
-
- x7 = (W7 * x1 + 4) >> 3;
- x1 = (W1 * x1 + 4) >> 3;
- x5 = (W3 * x3 + 4) >> 3;
- x3 = (- W5 * x3 + 4) >> 3;
- x8 = x1 - x5;
- x1 += x5;
- x5 = x8;
- x8 = x7 - x3;
- x3 += x7;
- x7 = (181 * (x5 + x8) + 128) >> 8;
- x5 = (181 * (x5 - x8) + 128) >> 8;
-
- pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
- res = (x0 + x1) >> 14;
- ADD_AND_CLIP1(res);
- res2 = (x4 + x7) >> 14;
- ADD_AND_CLIP2(res2);
- dst_word = (res2 << 8) | res;
- res = (x6 + x5) >> 14;
- ADD_AND_CLIP3(res);
- dst_word |= (res << 16);
- res = (x2 + x3) >> 14;
- ADD_AND_CLIP4(res);
- dst_word |= (res << 24);
- *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
-
- pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
- res = (x2 - x3) >> 14;
- ADD_AND_CLIP1(res);
- res2 = (x6 - x5) >> 14;
- ADD_AND_CLIP2(res2);
- dst_word = (res2 << 8) | res;
- res = (x4 - x7) >> 14;
- ADD_AND_CLIP3(res);
- dst_word |= (res << 16);
- res = (x0 - x1) >> 14;
- ADD_AND_CLIP4(res);
- dst_word |= (res << 24);
- *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
- }
- return ;
-}
-
-void idctcol4(int16 *blk)
-{
- int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
- x2 = blk[16];
- x1 = blk[8];
- x3 = blk[24];
- x0 = ((int32)blk[0] << 11) + 128;
-
- x4 = x0;
- x6 = W6 * x2;
- x2 = W2 * x2;
- x8 = x0 - x2;
- x0 += x2;
- x2 = x8;
- x8 = x4 - x6;
- x4 += x6;
- x6 = x8;
-
- x7 = W7 * x1;
- x1 = W1 * x1;
- x5 = W3 * x3;
- x3 = -W5 * x3;
- x8 = x1 - x5;
- x1 += x5;
- x5 = x8;
- x8 = x7 - x3;
- x3 += x7;
- x7 = (181 * (x5 + x8) + 128) >> 8;
- x5 = (181 * (x5 - x8) + 128) >> 8;
-
-
- blk[0] = (x0 + x1) >> 8;
- blk[8] = (x4 + x7) >> 8;
- blk[16] = (x6 + x5) >> 8;
- blk[24] = (x2 + x3) >> 8;
- blk[56] = (x0 - x1) >> 8;
- blk[48] = (x4 - x7) >> 8;
- blk[40] = (x6 - x5) >> 8;
- blk[32] = (x2 - x3) >> 8;
-
- return ;
-}
-
-void idctrow0_intra(int16 *, PIXEL *, int)
-{
- return ;
-}
-
-void idctrow1_intra(int16 *blk, PIXEL *comp, int width)
-{
- /* shortcut */
- int32 tmp;
- int i = 8;
- int offset = width;
- uint32 word;
-
- comp -= offset;
- while (i--)
- {
- tmp = ((blk[0] + 32) >> 6);
- blk[0] = 0;
- CLIP_RESULT(tmp)
-
- word = (tmp << 8) | tmp;
- word = (word << 16) | word;
-
- *((uint32*)(comp += offset)) = word;
- *((uint32*)(comp + 4)) = word;
-
-
-
-
- blk += B_SIZE;
- }
- return;
-}
-
-void idctrow2_intra(int16 *blk, PIXEL *comp, int width)
-{
- int32 x0, x1, x2, x4, x5, temp;
- int i = 8;
- int offset = width;
- int32 word;
-
- comp -= offset;
- while (i--)
- {
- /* shortcut */
- x4 = blk[1];
- blk[1] = 0;
- x0 = ((int32)blk[0] << 8) + 8192;
- blk[0] = 0; /* for proper rounding in the fourth stage */
-
- /* first stage */
- x5 = (W7 * x4 + 4) >> 3;
- x4 = (W1 * x4 + 4) >> 3;
-
- /* third stage */
- x2 = (181 * (x4 + x5) + 128) >> 8;
- x1 = (181 * (x4 - x5) + 128) >> 8;
-
- /* fourth stage */
- word = ((x0 + x4) >> 14);
- CLIP_RESULT(word)
-
- temp = ((x0 + x2) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 8);
- temp = ((x0 + x1) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 16);
- temp = ((x0 + x5) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 24);
- *((int32*)(comp += offset)) = word;
-
- word = ((x0 - x5) >> 14);
- CLIP_RESULT(word)
- temp = ((x0 - x1) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 8);
- temp = ((x0 - x2) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 16);
- temp = ((x0 - x4) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 24);
- *((int32*)(comp + 4)) = word;
-
- blk += B_SIZE;
- }
- return ;
-}
-
-void idctrow3_intra(int16 *blk, PIXEL *comp, int width)
-{
- int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
- int i = 8;
- int offset = width;
- int32 word;
-
- comp -= offset;
-
- while (i--)
- {
- x2 = blk[2];
- blk[2] = 0;
- x1 = blk[1];
- blk[1] = 0;
- x0 = ((int32)blk[0] << 8) + 8192;
- blk[0] = 0;/* for proper rounding in the fourth stage */
- /* both upper and lower*/
- /* both x2orx6 and x0orx4 */
-
- x4 = x0;
- x6 = (W6 * x2 + 4) >> 3;
- x2 = (W2 * x2 + 4) >> 3;
- x8 = x0 - x2;
- x0 += x2;
- x2 = x8;
- x8 = x4 - x6;
- x4 += x6;
- x6 = x8;
-
- x7 = (W7 * x1 + 4) >> 3;
- x1 = (W1 * x1 + 4) >> 3;
- x3 = x7;
- x5 = (181 * (x1 - x7) + 128) >> 8;
- x7 = (181 * (x1 + x7) + 128) >> 8;
-
- word = ((x0 + x1) >> 14);
- CLIP_RESULT(word)
- temp = ((x4 + x7) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 8);
-
-
- temp = ((x6 + x5) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 16);
-
- temp = ((x2 + x3) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 24);
- *((int32*)(comp += offset)) = word;
-
- word = ((x2 - x3) >> 14);
- CLIP_RESULT(word)
-
- temp = ((x6 - x5) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 8);
-
- temp = ((x4 - x7) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 16);
-
- temp = ((x0 - x1) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 24);
- *((int32*)(comp + 4)) = word;
-
- blk += B_SIZE;
- }
- return ;
-}
-
-void idctrow4_intra(int16 *blk, PIXEL *comp, int width)
-{
- int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
- int i = 8;
- int offset = width;
- int32 word;
-
- comp -= offset;
-
- while (i--)
- {
- x2 = blk[2];
- blk[2] = 0;
- x1 = blk[1];
- blk[1] = 0;
- x3 = blk[3];
- blk[3] = 0;
- x0 = ((int32)blk[0] << 8) + 8192;
- blk[0] = 0;/* for proper rounding in the fourth stage */
-
- x4 = x0;
- x6 = (W6 * x2 + 4) >> 3;
- x2 = (W2 * x2 + 4) >> 3;
- x8 = x0 - x2;
- x0 += x2;
- x2 = x8;
- x8 = x4 - x6;
- x4 += x6;
- x6 = x8;
-
- x7 = (W7 * x1 + 4) >> 3;
- x1 = (W1 * x1 + 4) >> 3;
- x5 = (W3 * x3 + 4) >> 3;
- x3 = (- W5 * x3 + 4) >> 3;
- x8 = x1 - x5;
- x1 += x5;
- x5 = x8;
- x8 = x7 - x3;
- x3 += x7;
- x7 = (181 * (x5 + x8) + 128) >> 8;
- x5 = (181 * (x5 - x8) + 128) >> 8;
-
- word = ((x0 + x1) >> 14);
- CLIP_RESULT(word)
-
- temp = ((x4 + x7) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 8);
-
-
- temp = ((x6 + x5) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 16);
-
- temp = ((x2 + x3) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 24);
- *((int32*)(comp += offset)) = word;
-
- word = ((x2 - x3) >> 14);
- CLIP_RESULT(word)
-
- temp = ((x6 - x5) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 8);
-
- temp = ((x4 - x7) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 16);
-
- temp = ((x0 - x1) >> 14);
- CLIP_RESULT(temp)
- word = word | (temp << 24);
- *((int32*)(comp + 4)) = word;
-
- blk += B_SIZE;
- }
-
- return ;
-}
-
-#endif
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp
deleted file mode 100644
index a11f55e..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp
+++ /dev/null
@@ -1,1667 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "log/log.h"
-
-#include "mp4dec_lib.h"
-#include "bitstream.h"
-#include "vlc_decode.h"
-#include "zigzag.h"
-
-#define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
-
-#ifdef PV_SUPPORT_MAIN_PROFILE
-/* INTRA */
-const static int mpeg_iqmat_def[NCOEFF_BLOCK] =
-{
- 8, 17, 18, 19, 21, 23, 25, 27,
- 17, 18, 19, 21, 23, 25, 27, 28,
- 20, 21, 22, 23, 24, 26, 28, 30,
- 21, 22, 23, 24, 26, 28, 30, 32,
- 22, 23, 24, 26, 28, 30, 32, 35,
- 23, 24, 26, 28, 30, 32, 35, 38,
- 25, 26, 28, 30, 32, 35, 38, 41,
- 27, 28, 30, 32, 35, 38, 41, 45
-};
-
-/* INTER */
-const static int mpeg_nqmat_def[64] =
-{
- 16, 17, 18, 19, 20, 21, 22, 23,
- 17, 18, 19, 20, 21, 22, 23, 24,
- 18, 19, 20, 21, 22, 23, 24, 25,
- 19, 20, 21, 22, 23, 24, 26, 27,
- 20, 21, 22, 23, 25, 26, 27, 28,
- 21, 22, 23, 24, 26, 27, 28, 30,
- 22, 23, 24, 26, 27, 28, 30, 31,
- 23, 24, 25, 27, 28, 30, 31, 33
-};
-#endif
-
-/* ======================================================================== */
-/* Function : CalcNumBits() */
-/* Purpose : */
-/* In/out : */
-/* Return : Calculate the minimum number of bits required to */
-/* represent x. */
-/* Note : This is an equivalent implementation of */
-/* (long)ceil(log((double)x)/log(2.0)) */
-/* Modified : */
-/* ======================================================================== */
-int CalcNumBits(uint x)
-{
- int i = 1;
- while (x >>= 1) i++;
- return i;
-}
-
-
-
-/***********************************************************CommentBegin******
-*
-* -- DecodeVolHeader -- Decode the header of a VOL
-*
-* 04/10/2000 : initial modification to the new PV-Decoder Lib format.
-* 10/12/2001 : reject non compliant bitstreams
-*
-***********************************************************CommentEnd********/
-PV_STATUS DecodeVOLHeader(VideoDecData *video, int layer)
-{
- PV_STATUS status;
- Vol *currVol;
- BitstreamDecVideo *stream;
- uint32 tmpvar, vol_shape;
- uint32 startCode;
-#ifdef PV_SUPPORT_MAIN_PROFILE
- int *qmat, i, j;
-#endif
- int version_id = 1;
-#ifdef PV_TOLERATE_VOL_ERRORS
- uint32 profile = 0x01;
-#endif
- /* There's a "currLayer" variable inside videoDecData. */
- /* However, we don't maintain it until we decode frame data. 04/05/2000 */
- currVol = video->vol[layer];
- stream = currVol->bitstream;
- currVol->moduloTimeBase = 0;
-
- /* Determine which start code for the decoder to begin with */
- status = BitstreamShowBits32HC(stream, &startCode);
-
- if (startCode == VISUAL_OBJECT_SEQUENCE_START_CODE)
- { /* Bitstream Exhchange Fix 9/99 */
- /* Bitstream Exchange requires we allow start with Video Object Sequence */
- /* visual_object_sequence_start_code */
- (void) BitstreamReadBits32HC(stream);
- tmpvar = (uint32) BitstreamReadBits16(stream, 8); /* profile */
-#ifndef PV_TOLERATE_VOL_ERRORS
- if (layer) /* */
- {
- /* support SSPL0-2 */
- if (tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
- tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3/* Core SP@L1-L3 */)
- return PV_FAIL;
- }
- else
- {
- /* support SPL0-3 & SSPL0-2 */
- if (tmpvar != 0x01 && tmpvar != 0x02 && tmpvar != 0x03 && tmpvar != 0x08 &&
- /* While not technically supported, try to decode SPL4&SPL5 files as well. */
- /* We'll fail later if the size is too large. This is to allow playback of */
- /* some <=CIF files generated by other encoders. */
- tmpvar != 0x04 && tmpvar != 0x05 &&
- tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
- tmpvar != 0x21 && tmpvar != 0x22 && /* Core Profile Levels */
- tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3 &&
- tmpvar != 0xF0 && tmpvar != 0xF1 && /* Advanced Simple Profile Levels*/
- tmpvar != 0xF2 && tmpvar != 0xF3 &&
- tmpvar != 0xF4 && tmpvar != 0xF5)
- return PV_FAIL;
- }
-#else
- profile = tmpvar;
-#endif
-
- // save the profile and level for the query
- currVol->profile_level_id = (uint)tmpvar; // 6/10/04
-
-
-
- status = BitstreamShowBits32HC(stream, &tmpvar);
- if (tmpvar == USER_DATA_START_CODE)
- {
- /* Something has to be done with user data 11/11/99 */
- status = DecodeUserData(stream);
- if (status != PV_SUCCESS) return PV_FAIL;
- }
- /* visual_object_start_code */
- BitstreamShowBits32HC(stream, &tmpvar);
- if (tmpvar != VISUAL_OBJECT_START_CODE)
- {
- do
- {
- /* Search for VOL_HEADER */
- status = PVSearchNextM4VFrame(stream); /* search 0x00 0x00 0x01 */
- if (status != PV_SUCCESS) return PV_FAIL; /* breaks the loop */
- BitstreamShowBits32(stream, VOL_START_CODE_LENGTH, &tmpvar);
- PV_BitstreamFlushBits(stream, 8);
- }
- while (tmpvar != VOL_START_CODE);
- goto decode_vol;
- }
- else
- {
- BitstreamReadBits32HC(stream);
- }
-
- /* is_visual_object_identifier */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- if (tmpvar)
- {
- /* visual_object_verid */
- tmpvar = (uint32) BitstreamReadBits16(stream, 4);
- /* visual_object_priority */
- tmpvar = (uint32) BitstreamReadBits16(stream, 3);
- }
- /* visual_object_type */
- BitstreamShowBits32(stream, 4, &tmpvar);
- if (tmpvar == 1)
- { /* video_signal_type */
- PV_BitstreamFlushBits(stream, 4);
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- if (tmpvar == 1)
- {
- /* video_format */
- tmpvar = (uint32) BitstreamReadBits16(stream, 3);
- /* video_range */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- /* color_description */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- if (tmpvar == 1)
- {
- /* color_primaries */
- tmpvar = (uint32) BitstreamReadBits16(stream, 8);
- /* transfer_characteristics */
- tmpvar = (uint32) BitstreamReadBits16(stream, 8);
- /* matrix_coefficients */
- tmpvar = (uint32) BitstreamReadBits16(stream, 8);
- }
- }
- }
- else
- {
- do
- {
- /* Search for VOL_HEADER */
- status = PVSearchNextM4VFrame(stream); /* search 0x00 0x00 0x01 */
- if (status != PV_SUCCESS) return PV_FAIL; /* breaks the loop */
- BitstreamShowBits32(stream, VOL_START_CODE_LENGTH, &tmpvar);
- PV_BitstreamFlushBits(stream, 8);
- }
- while (tmpvar != VOL_START_CODE);
- goto decode_vol;
- }
-
- /* next_start_code() */
- status = PV_BitstreamByteAlign(stream); /* 10/12/01 */
- status = BitstreamShowBits32HC(stream, &tmpvar);
-
- if (tmpvar == USER_DATA_START_CODE)
- {
- /* Something has to be done to deal with user data (parse it) 11/11/99 */
- status = DecodeUserData(stream);
- if (status != PV_SUCCESS) return PV_FAIL;
- }
- status = BitstreamShowBits32(stream, 27, &tmpvar); /* 10/12/01 */
- }
- else
- {
- /* tmpvar = 0; */ /* 10/12/01 */
- status = BitstreamShowBits32(stream, 27, &tmpvar); /* uncomment this line if you want
- to start decoding with a
- video_object_start_code */
- }
-
- if (tmpvar == VO_START_CODE)
- {
- /*****
- *
- * Read the VOL header entries from the bitstream
- *
- *****/
- /* video_object_start_code */
- tmpvar = BitstreamReadBits32(stream, 27);
- tmpvar = (uint32) BitstreamReadBits16(stream, 5);
-
-
- /* video_object_layer_start_code */
- BitstreamShowBits32(stream, VOL_START_CODE_LENGTH, &tmpvar);
- if (tmpvar != VOL_START_CODE)
- {
- status = BitstreamCheckEndBuffer(stream);
- if (status == PV_END_OF_VOP)
- {
- video->shortVideoHeader = TRUE;
- return PV_SUCCESS;
- }
- else
- {
- do
- {
- /* Search for VOL_HEADER */
- status = PVSearchNextM4VFrame(stream);/* search 0x00 0x00 0x01 */
- if (status != PV_SUCCESS) return PV_FAIL; /* breaks the loop */
- BitstreamShowBits32(stream, VOL_START_CODE_LENGTH, &tmpvar);
- PV_BitstreamFlushBits(stream, 8); /* advance the byte ptr */
- }
- while (tmpvar != VOL_START_CODE);
- }
- }
- else
- {
- PV_BitstreamFlushBits(stream, 8);
- }
-
-decode_vol:
- PV_BitstreamFlushBits(stream, VOL_START_CODE_LENGTH - 8);
- video->shortVideoHeader = 0;
-
- /* vol_id (4 bits) */
- currVol->volID = (int) BitstreamReadBits16(stream, 4);
-
- /* RandomAccessible flag */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
-
- /* object type */
- tmpvar = (uint32) BitstreamReadBits16(stream, 8); /* */
-
-#ifdef PV_TOLERATE_VOL_ERRORS
- if (tmpvar == 0)
- {
- if (layer) /* */
- {
- /* support SSPL0-2 */
- if (profile != 0x10 && profile != 0x11 && profile != 0x12)
- return PV_FAIL;
- tmpvar = 0x02;
- }
- else
- {
- /* support SPL0-3 & SSPL0-2 */
- if (profile != 0x01 && profile != 0x02 && profile != 0x03 && profile != 0x08 &&
- profile != 0x10 && profile != 0x11 && profile != 0x12)
- return PV_FAIL;
- tmpvar = 0x01;
- }
- profile |= 0x0100;
- }
-#endif
-
- if (layer)
- {
- if (tmpvar != 0x02) return PV_FAIL;
- }
- else
- {
- if (tmpvar != 0x01) return PV_FAIL;
- }
-
- /* version id specified? */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- if (tmpvar == 1)
- {
- /* version ID */
- version_id = (uint32) BitstreamReadBits16(stream, 4);
- /* priority */
- tmpvar = (uint32) BitstreamReadBits16(stream, 3);
-
- }
-
- /* aspect ratio info */
- tmpvar = (uint32) BitstreamReadBits16(stream, 4);
- if (tmpvar == 0) return PV_FAIL;
- if (tmpvar == 0xf /* extended_par */)
- {
- /* width */
- tmpvar = (uint32) BitstreamReadBits16(stream, 8);
- /* height */
- tmpvar = (uint32) BitstreamReadBits16(stream, 8);
- }
-
-
- /* control parameters present? */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
-
- /* Get the parameters (skipped) */
- /* 03/10/99 */
- if (tmpvar)
- {
- /* chroma_format */
- tmpvar = BitstreamReadBits16(stream, 2);
- if (tmpvar != 1) return PV_FAIL;
- /* low_delay */
- tmpvar = BitstreamRead1Bits(stream);
-
- /* vbv_parameters present? */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- if (tmpvar)
- {
- /* first_half_bit_rate */
- BitstreamReadBits16(stream, 15);
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
- /* latter_half_bit_rate */
- BitstreamReadBits16(stream, 15);
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
- /* first_half_vbv_buffer_size */
- BitstreamReadBits16(stream, 15);
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
- /* latter_half_vbv_buffer_size */
- BitstreamReadBits16(stream, 3);
- /* first_half_vbv_occupancy */
- BitstreamReadBits16(stream, 11);
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
- /* latter_half_vbv_occupancy */
- BitstreamReadBits16(stream, 15);
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
- }
- }
-
- /* video_object_layer_shape (2 bits), only 00 (rect) is supported for now */
- vol_shape = (uint32) BitstreamReadBits16(stream, 2);
- if (vol_shape) return PV_FAIL;
-
- /* marker bit, 03/10/99 */
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
-
- /* vop_time_increment_resolution */
- currVol->timeIncrementResolution = BitstreamReadBits16(stream, 16);
- if (currVol->timeIncrementResolution == 0) return PV_FAIL;
-
- /* . since nbitsTimeIncRes will be used over and over again, */
- /* we should put it in Vol structure. 04/12/2000. */
- currVol->nbitsTimeIncRes = CalcNumBits((uint)currVol->timeIncrementResolution - 1);
-
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
-
- /* fixed_vop_rate */
- currVol->fixedVopRate = (int) BitstreamRead1Bits(stream);
- if (currVol->fixedVopRate)
- {
- /* fixed_vop_time_increment */
- tmpvar = BitstreamReadBits16(stream, currVol->nbitsTimeIncRes);
- }
-
- /* marker bit */
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
-
- /* video_object_layer_width (13 bits) */
- tmpvar = BitstreamReadBits16(stream, 13);
- if (!tmpvar) return PV_FAIL;
- video->displayWidth = video->width = tmpvar;
-
- /* round up to a multiple of MB_SIZE. 08/09/2000 */
- video->width = (video->width + 15) & -16;
-// video->displayWidth += (video->displayWidth & 0x1); /* displayed image should be even size */
-
- /* marker bit */
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
-
- /* video_object_layer_height (13 bits) */
- tmpvar = BitstreamReadBits16(stream, 13);
- if (!tmpvar) return PV_FAIL;
- video->displayHeight = video->height = tmpvar;
-
- /* round up to a multiple of MB_SIZE. 08/09/2000 */
- video->height = (video->height + 15) & -16;
-// video->displayHeight += (video->displayHeight & 0x1); /* displayed image should be even size */
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
-
- /* 03/10/99 */
- /* interlaced */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- if (tmpvar != 0)
- {
- mp4dec_log("DecodeVOLHeader(): Interlaced video is not supported.\n");
- return PV_FAIL;
- }
-
- /* obmc_disable */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- if (tmpvar == 0) return PV_FAIL;
-
- if (version_id == 1)
- {
- /* sprite_enable (1 bits) */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- if (tmpvar)
- {
- mp4dec_log("DecodeVOLHeader(): Sprite is not supported.\n");
- return PV_FAIL;
- }
- }
- else
- {
- /* For version 2, vol_sprite_usage has two bits. */
- /* sprite_enable */
- tmpvar = (uint32) BitstreamReadBits16(stream, 2);
- if (tmpvar)
- {
- mp4dec_log("DecodeVOLHeader(): Sprite is not supported.\n");
- return PV_FAIL;
- }
- }
-
- /* not_8_bit */
- if (BitstreamRead1Bits(stream))
- {
- /* quant_precision */
- currVol->quantPrecision = BitstreamReadBits16(stream, 4);
- /* bits_per_pixel */
- currVol->bitsPerPixel = BitstreamReadBits16(stream, 4);
- mp4dec_log("DecodeVOLHeader(): not an 8-bit stream.\n"); // For the time being we do not support != 8 bits
-
- return PV_FAIL;
- }
- else
- {
- currVol->quantPrecision = 5;
- currVol->bitsPerPixel = 8;
- }
-
- /* quant_type (1 bit) */
- currVol->quantType = BitstreamRead1Bits(stream);
- if (currVol->quantType)
- {
-#ifdef PV_SUPPORT_MAIN_PROFILE
- /* load quantization matrices. 5/22/2000 */
- /* load_intra_quant_mat (1 bit) */
- qmat = currVol->iqmat;
- currVol->loadIntraQuantMat = BitstreamRead1Bits(stream);
- if (currVol->loadIntraQuantMat)
- {
- /* intra_quant_mat (8*64 bits) */
- i = 0;
- do
- {
- qmat[*(zigzag_inv+i)] = (int) BitstreamReadBits16(stream, 8);
- }
- while ((qmat[*(zigzag_inv+i)] != 0) && (++i < 64));
-
- for (j = i; j < 64; j++)
- qmat[*(zigzag_inv+j)] = qmat[*(zigzag_inv+i-1)];
- }
- else
- {
- oscl_memcpy(qmat, mpeg_iqmat_def, 64*sizeof(int));
- }
-
- qmat[0] = 0; /* necessary for switched && MPEG quant 07/09/01 */
-
- /* load_nonintra_quant_mat (1 bit) */
- qmat = currVol->niqmat;
- currVol->loadNonIntraQuantMat = BitstreamRead1Bits(stream);
- if (currVol->loadNonIntraQuantMat)
- {
- /* nonintra_quant_mat (8*64 bits) */
- i = 0;
- do
- {
- qmat[*(zigzag_inv+i)] = (int) BitstreamReadBits16(stream, 8);
- }
- while ((qmat[*(zigzag_inv+i)] != 0) && (++i < 64));
-
- for (j = i; j < 64; j++)
- qmat[*(zigzag_inv+j)] = qmat[*(zigzag_inv+i-1)];
- }
- else
- {
- oscl_memcpy(qmat, mpeg_nqmat_def, 64*sizeof(int));
- }
-#else
- return PV_FAIL;
-#endif
- }
-
- if (version_id != 1)
- {
- /* quarter_sample enabled */
- tmpvar = BitstreamRead1Bits(stream);
- if (tmpvar) return PV_FAIL;
- }
-
- /* complexity_estimation_disable */
- currVol->complexity_estDisable = BitstreamRead1Bits(stream);
- if (currVol->complexity_estDisable == 0)
- {
- currVol->complexity_estMethod = BitstreamReadBits16(stream, 2);
-
- if (currVol->complexity_estMethod < 2)
- {
- /* shape_complexity_estimation_disable */
- tmpvar = BitstreamRead1Bits(stream);
- if (tmpvar == 0)
- {
- mp4dec_log("DecodeVOLHeader(): Shape Complexity estimation is not supported.\n");
- return PV_FAIL;
- }
- /* texture_complexity_estimation_set_1_disable */
- tmpvar = BitstreamRead1Bits(stream);
- if (tmpvar == 0)
- {
- currVol->complexity.text_1 = BitstreamReadBits16(stream, 4);
- }
- /* marker bit */
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
- /* texture_complexity_estimation_set_2_disable */
- tmpvar = BitstreamRead1Bits(stream);
- if (tmpvar == 0)
- {
- currVol->complexity.text_2 = BitstreamReadBits16(stream, 4);
- }
- /* motion_compensation_complexity_disable */
- tmpvar = BitstreamRead1Bits(stream);
- if (tmpvar == 0)
- {
- currVol->complexity.mc = BitstreamReadBits16(stream, 6);
- }
- /* marker bit */
- if (!BitstreamRead1Bits(stream)) return PV_FAIL;
-
- if (currVol->complexity_estMethod == 1)
- { /* version2_complexity_estimation_disable */
- tmpvar = BitstreamRead1Bits(stream);
- if (tmpvar == 0)
- {
- mp4dec_log("DecodeVOLHeader(): sadct, quarter pel not supported.\n");
- return PV_FAIL;
- }
- }
- }
- }
-
- /* 03/10/99 */
- /* resync_marker_disable */
- currVol->errorResDisable = (int) BitstreamRead1Bits(stream);
- /* data_partititioned */
- currVol->dataPartitioning = (int) BitstreamRead1Bits(stream);
-
- video->vlcDecCoeffIntra = &VlcDecTCOEFIntra;
- video->vlcDecCoeffInter = &VlcDecTCOEFInter;
-
- if (currVol->dataPartitioning)
- {
- if (layer) return PV_FAIL; /* */
- /* reversible_vlc */
- currVol->useReverseVLC = (int)BitstreamRead1Bits(stream);
- if (currVol->useReverseVLC)
- {
- video->vlcDecCoeffIntra = &RvlcDecTCOEFIntra;
- video->vlcDecCoeffInter = &RvlcDecTCOEFInter;
- }
- currVol->errorResDisable = 0;
- }
- else
- {
- currVol->useReverseVLC = 0;
- }
-
- if (version_id != 1)
- {
- /* newpred_enable */
- tmpvar = BitstreamRead1Bits(stream);
- if (tmpvar) return PV_FAIL;
-
- /* reduced_resolution_vop */
- tmpvar = BitstreamRead1Bits(stream);
- if (tmpvar) return PV_FAIL;
-
- }
-
- /* Intra AC/DC prediction is always true */
- video->intra_acdcPredDisable = 0;
- /* scalability */
- currVol->scalability = (int) BitstreamRead1Bits(stream);
-
- if (currVol->scalability)
- {
- if (layer == 0) return PV_FAIL; /* */
- /* hierarchy_type: 1 : temporal, 0 : spatial */
- /* 03/10/99 */
- currVol->scalType = (int) BitstreamRead1Bits(stream); /* */
- if (!currVol->scalType) return PV_FAIL;
-
- /* ref_layer_id (4 bits) */
- currVol->refVolID = (int) BitstreamReadBits16(stream, 4);
- if (layer) /* */
- {
- if (currVol->refVolID != video->vol[0]->volID) return PV_FAIL;
- }
- /* ref_layer_sampling_direc (1 bits) */
- /* 1 : ref. layer has higher resolution */
- /* 0 : ref. layer has equal or lower resolution */
- currVol->refSampDir = (int) BitstreamRead1Bits(stream);
- if (currVol->refSampDir) return PV_FAIL;
-
- /* hor_sampling_factor_n (5 bits) */
- currVol->horSamp_n = (int) BitstreamReadBits16(stream, 5);
-
- /* hor_sampling_factor_m (5 bits) */
- currVol->horSamp_m = (int) BitstreamReadBits16(stream, 5);
-
- if (currVol->horSamp_m == 0) return PV_FAIL;
- if (currVol->horSamp_n != currVol->horSamp_m) return PV_FAIL;
-
- /* ver_sampling_factor_n (5 bits) */
- currVol->verSamp_n = (int) BitstreamReadBits16(stream, 5);
-
- /* ver_sampling_factor_m (5 bits) */
- currVol->verSamp_m = (int) BitstreamReadBits16(stream, 5);
-
- if (currVol->verSamp_m == 0) return PV_FAIL;
- if (currVol->verSamp_n != currVol->verSamp_m) return PV_FAIL;
-
-
- /* enhancement_type: 1 : partial region, 0 : full region */
- /* 04/10/2000: we only support full region enhancement layer. */
- if (BitstreamRead1Bits(stream)) return PV_FAIL;
- }
-
- PV_BitstreamByteAlign(stream);
-
- status = BitstreamShowBits32HC(stream, &tmpvar);
-
- /* if we hit the end of buffer, tmpvar == 0. 08/30/2000 */
- if (tmpvar == USER_DATA_START_CODE)
- {
- status = DecodeUserData(stream);
- /* you should not check for status here 03/19/2002 */
- status = PV_SUCCESS;
- }
-
- /* Compute some convenience variables: 04/13/2000 */
- video->nMBPerRow = video->width / MB_SIZE;
- video->nMBPerCol = video->height / MB_SIZE;
- video->nTotalMB = video->nMBPerRow * video->nMBPerCol;
- video->nBitsForMBID = CalcNumBits((uint)video->nTotalMB - 1);
-#ifdef PV_ANNEX_IJKT_SUPPORT
- video->modified_quant = 0;
- video->advanced_INTRA = 0;
- video->deblocking = 0;
- video->slice_structure = 0;
-#endif
- }
- else
- {
- /* SHORT_HEADER */
- status = BitstreamShowBits32(stream, SHORT_VIDEO_START_MARKER_LENGTH, &tmpvar);
-
- if (tmpvar == SHORT_VIDEO_START_MARKER)
- {
- video->shortVideoHeader = TRUE;
- }
- else
- {
- do
- {
- /* Search for VOL_HEADER */
- status = PVSearchNextM4VFrame(stream); /* search 0x00 0x00 0x01 */
- if (status != PV_SUCCESS) return PV_FAIL; /* breaks the loop */
- BitstreamShowBits32(stream, VOL_START_CODE_LENGTH, &tmpvar);
- PV_BitstreamFlushBits(stream, 8);
- }
- while (tmpvar != VOL_START_CODE);
- goto decode_vol;
- }
- }
-#ifdef PV_TOLERATE_VOL_ERRORS
- if (profile > 0xFF || profile == 0)
- {
- return PV_BAD_VOLHEADER;
- }
-#endif
-
- return status;
-}
-
-
-/***********************************************************CommentBegin******
-*
-* -- DecodeGOV -- Decodes the Group of VOPs from bitstream
-*
-* 04/20/2000 initial modification to the new PV-Decoder Lib format.
-*
-***********************************************************CommentEnd********/
-PV_STATUS DecodeGOVHeader(BitstreamDecVideo *stream, uint32 *time_base)
-{
- uint32 tmpvar, time_s;
- int closed_gov, broken_link;
-
- /* group_start_code (32 bits) */
-// tmpvar = BitstreamReadBits32(stream, 32);
-
- /* hours */
- tmpvar = (uint32) BitstreamReadBits16(stream, 5);
- time_s = tmpvar * 3600;
-
- /* minutes */
- tmpvar = (uint32) BitstreamReadBits16(stream, 6);
- time_s += tmpvar * 60;
-
- /* marker bit */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
-
- /* seconds */
- tmpvar = (uint32) BitstreamReadBits16(stream, 6);
- time_s += tmpvar;
-
- /* We have to check the timestamp here. If the sync timestamp is */
- /* earlier than the previous timestamp or longer than 60 sec. */
- /* after the previous timestamp, assume the GOV header is */
- /* corrupted. 05/12/2000 */
- *time_base = time_s; /* 02/27/2002 */
-// *time_base = *time_base/1000;
-// tmpvar = time_s - *time_base;
-// if (tmpvar <= 60) *time_base = time_s;
-// else return PV_FAIL;
-
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- closed_gov = tmpvar;
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- broken_link = tmpvar;
-
- if ((closed_gov == 0) && (broken_link == 1))
- {
- return PV_SUCCESS; /* 03/15/2002 you can also return PV_FAIL */
- }
-
- PV_BitstreamByteAlign(stream);
-
- BitstreamShowBits32HC(stream, &tmpvar);
-
- while (tmpvar == USER_DATA_START_CODE) /* 03/15/2002 */
- {
- DecodeUserData(stream);
- BitstreamShowBits32HC(stream, &tmpvar);
- }
-
- return PV_SUCCESS;
-}
-
-/***********************************************************CommentBegin******
-*
-* -- DecodeVopHeader -- Decodes the VOPheader information from the bitstream
-*
-* 04/12/2000 Initial port to the new PV decoder library format.
-* 05/10/2000 Error resilient decoding of vop header.
-*
-***********************************************************CommentEnd********/
-PV_STATUS DecodeVOPHeader(VideoDecData *video, Vop *currVop, Bool use_ext_timestamp)
-{
- PV_STATUS status = PV_SUCCESS;
- Vol *currVol = video->vol[video->currLayer];
- BitstreamDecVideo *stream = currVol->bitstream;
- uint32 tmpvar;
- int time_base;
-
- /*****
- * Read the VOP header from the bitstream (No shortVideoHeader Mode here!)
- *****/
- BitstreamShowBits32HC(stream, &tmpvar);
-
- /* check if we have a GOV header here. 08/30/2000 */
- if (tmpvar == GROUP_START_CODE)
- {
- tmpvar = BitstreamReadBits32HC(stream);
-// rewindBitstream(stream, START_CODE_LENGTH); /* for backward compatibility */
- status = DecodeGOVHeader(stream, &tmpvar);
- if (status != PV_SUCCESS)
- {
- return status;
- }
-// use_ext_timestamp = TRUE; /* 02/08/2002 */
- /* We should have a VOP header following the GOV header. 03/15/2001 */
- BitstreamShowBits32HC(stream, &tmpvar);
- }
-#ifdef PV_SUPPORT_TEMPORAL_SCALABILITY
- currVop->timeStamp = -1;
-#endif
- if (tmpvar == VOP_START_CODE)
- {
- tmpvar = BitstreamReadBits32HC(stream);
- }
- else
- {
- PV_BitstreamFlushBits(stream, 8); // advance by a byte
- status = PV_FAIL;
- goto return_point;
- }
-
-
-
- /* vop_prediction_type (2 bits) */
- currVop->predictionType = (int) BitstreamReadBits16(stream, 2);
-
- /* modulo_time_base (? bits) */
- time_base = -1;
- do
- {
- time_base++;
- tmpvar = (uint32) BitstreamRead1Bits(stream);
- }
- while (tmpvar == 1);
-
-
-
- if (!use_ext_timestamp)
- {
- currVol->moduloTimeBase += 1000 * time_base; /* milliseconds based MTB 11/12/01 */
- }
-
- /* marker_bit (1 bit) */
- if (!BitstreamRead1Bits(stream))
- {
- status = PV_FAIL;
- goto return_point;
- }
-
- /* vop_time_increment (1-15 bits) in Nov_Compliant (1-16 bits) */
- /* we always assumes fixed vop rate here */
- currVop->timeInc = BitstreamReadBits16(stream, currVol->nbitsTimeIncRes);
-
-
- /* marker_bit (1 bit) */
- if (!BitstreamRead1Bits(stream))
- {
- status = PV_FAIL;
- goto return_point;
- }
-
- /* vop_coded */
- currVop->vopCoded = (int) BitstreamRead1Bits(stream);
-
-
- if (currVop->vopCoded == 0)
- {
- status = PV_SUCCESS;
- goto return_point;
- }
-
-
- /* read vop_rounding_type */
- if (currVop->predictionType == P_VOP)
- {
- currVop->roundingType = (int) BitstreamRead1Bits(stream);
- }
- else
- {
- currVop->roundingType = 0;
- }
-
- if (currVol->complexity_estDisable == 0)
- {
- if (currVol->complexity_estMethod < 2) /* OCT 2002 */
- {
- if ((currVol->complexity.text_1 >> 3) & 0x1) /* intra */
- BitstreamReadBits16(stream, 8);
- if (currVol->complexity.text_1 & 0x1) /* not_coded */
- BitstreamReadBits16(stream, 8);
- if ((currVol->complexity.text_2 >> 3) & 0x1) /* dct_coefs */
- BitstreamReadBits16(stream, 8);
- if ((currVol->complexity.text_2 >> 2) & 0x1) /* dct_lines */
- BitstreamReadBits16(stream, 8);
- if ((currVol->complexity.text_2 >> 1) & 0x1) /* vlc_symbols */
- BitstreamReadBits16(stream, 8);
- if (currVol->complexity.text_2 & 0x1) /* vlc_bits */
- BitstreamReadBits16(stream, 4);
-
- if (currVop->predictionType != I_VOP)
- {
- if ((currVol->complexity.text_1 >> 2) & 0x1) /* inter */
- BitstreamReadBits16(stream, 8);
- if ((currVol->complexity.text_1 >> 1) & 0x1) /* inter_4v */
- BitstreamReadBits16(stream, 8);
- if ((currVol->complexity.mc >> 5) & 0x1) /* apm */
- BitstreamReadBits16(stream, 8);
- if ((currVol->complexity.mc >> 4) & 0x1) /* npm */
- BitstreamReadBits16(stream, 8);
- /* interpolate_mc_q */
- if ((currVol->complexity.mc >> 2) & 0x1) /* forw_back_mc_q */
- BitstreamReadBits16(stream, 8);
- if ((currVol->complexity.mc >> 1) & 0x1) /* halfpel2 */
- BitstreamReadBits16(stream, 8);
- if (currVol->complexity.mc & 0x1) /* halfpel4 */
- BitstreamReadBits16(stream, 8);
- }
- if (currVop->predictionType == B_VOP)
- {
- if ((currVol->complexity.mc >> 3) & 0x1) /* interpolate_mc_q */
- BitstreamReadBits16(stream, 8);
- }
- }
- }
-
- /* read intra_dc_vlc_thr */
- currVop->intraDCVlcThr = (int) BitstreamReadBits16(stream, 3);
-
- /* read vop_quant (currVol->quantPrecision bits) */
- currVop->quantizer = (int16) BitstreamReadBits16(stream, currVol->quantPrecision);
- if (currVop->quantizer == 0)
- {
- currVop->quantizer = video->prevVop->quantizer;
- status = PV_FAIL;
- goto return_point;
- }
-
-
- /* read vop_fcode_forward */
- if (currVop->predictionType != I_VOP)
- {
- tmpvar = (uint32) BitstreamReadBits16(stream, 3);
- if (tmpvar < 1)
- {
- currVop->fcodeForward = 1;
- status = PV_FAIL;
- goto return_point;
- }
- currVop->fcodeForward = tmpvar;
- }
- else
- {
- currVop->fcodeForward = 0;
- }
-
- /* read vop_fcode_backward */
- if (currVop->predictionType == B_VOP)
- {
- tmpvar = (uint32) BitstreamReadBits16(stream, 3);
- if (tmpvar < 1)
- {
- currVop->fcodeBackward = 1;
- status = PV_FAIL;
- goto return_point;
- }
- currVop->fcodeBackward = tmpvar;
- }
- else
- {
- currVop->fcodeBackward = 0;
- }
-
- if (currVol->scalability)
- {
- currVop->refSelectCode = (int) BitstreamReadBits16(stream, 2);
- }
-
-return_point:
- return status;
-}
-
-
-/***********************************************************CommentBegin******
-*
-* -- VideoPlaneWithShortHeader -- Decodes the short_video_header information from the bitstream
-* Modified :
- 04/23/2001. Remove the codes related to the
- "first pass" decoding. We use a different function
- to set up the decoder now.
-***********************************************************CommentEnd********/
-PV_STATUS DecodeShortHeader(VideoDecData *video, Vop *currVop)
-{
- PV_STATUS status = PV_SUCCESS;
- Vol *currVol = video->vol[0];
- BitstreamDecVideo *stream = currVol->bitstream;
- uint32 tmpvar;
- int32 size;
-
- int extended_PTYPE = FALSE;
- int UFEP = 0, custom_PFMT = 0, custom_PCF = 0;
-
- status = BitstreamShowBits32(stream, SHORT_VIDEO_START_MARKER_LENGTH, &tmpvar);
-
- if (tmpvar != SHORT_VIDEO_START_MARKER)
- {
- status = PV_FAIL;
- goto return_point;
- }
-
-
- PV_BitstreamFlushBits(stream, SHORT_VIDEO_START_MARKER_LENGTH);
-
- /* Temporal reference. Using vop_time_increment_resolution = 30000 */
- tmpvar = (uint32) BitstreamReadBits16(stream, 8);
- currVop->temporalRef = (int) tmpvar;
-
-
- currVop->timeInc = 0xff & (256 + currVop->temporalRef - video->prevVop->temporalRef);
- currVol->moduloTimeBase += currVop->timeInc; /* mseconds 11/12/01 */
- /* Marker Bit */
- if (!BitstreamRead1Bits(stream))
- {
- mp4dec_log("DecodeShortHeader(): Marker bit wrong.\n");
- status = PV_FAIL;
- goto return_point;
- }
-
- /* Zero Bit */
- if (BitstreamRead1Bits(stream))
- {
- mp4dec_log("DecodeShortHeader(): Zero bit wrong.\n");
- status = PV_FAIL;
- goto return_point;
- }
-
- /*split_screen_indicator*/
- if (BitstreamRead1Bits(stream))
- {
- mp4dec_log("DecodeShortHeader(): Split Screen not supported.\n");
- VideoDecoderErrorDetected(video);
- }
-
- /*document_freeze_camera*/
- if (BitstreamRead1Bits(stream))
- {
- mp4dec_log("DecodeShortHeader(): Freeze Camera not supported.\n");
- VideoDecoderErrorDetected(video);
- }
-
- /*freeze_picture_release*/
- if (BitstreamRead1Bits(stream))
- {
- mp4dec_log("DecodeShortHeader(): Freeze Release not supported.\n");
- VideoDecoderErrorDetected(video);
- }
- /* source format */
- switch (BitstreamReadBits16(stream, 3))
- {
- case 1:
- if (video->size < 128*96)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = video->width = 128;
- video->displayHeight = video->height = 96;
- break;
-
- case 2:
- if (video->size < 176*144)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = video->width = 176;
- video->displayHeight = video->height = 144;
- break;
-
- case 3:
- if (video->size < 352*288)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = video->width = 352;
- video->displayHeight = video->height = 288;
- break;
-
- case 4:
- if (video->size < 704*576)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = video->width = 704;
- video->displayHeight = video->height = 576;
- break;
-
- case 5:
- if (video->size < 1408*1152)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = video->width = 1408;
- video->displayHeight = video->height = 1152;
- break;
-
- case 7:
- extended_PTYPE = TRUE;
- break;
-
- default:
- /* Msg("H.263 source format not legal\n"); */
- status = PV_FAIL;
- goto return_point;
- }
-
-
- currVop->roundingType = 0;
-
- if (extended_PTYPE == FALSE)
- {
- currVop->predictionType = (int) BitstreamRead1Bits(stream);
-
- /* four_reserved_zero_bits */
- if (BitstreamReadBits16(stream, 4))
- {
- mp4dec_log("DecodeShortHeader(): Reserved bits wrong.\n");
- status = PV_FAIL;
- goto return_point;
- }
- }
- else
- {
- UFEP = BitstreamReadBits16(stream, 3);
- if (UFEP == 1)
- {
- /* source format */
- switch (BitstreamReadBits16(stream, 3))
- {
- case 1:
- if (video->size < 128*96)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = video->width = 128;
- video->displayHeight = video->height = 96;
- break;
-
- case 2:
- if (video->size < 176*144)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = video->width = 176;
- video->displayHeight = video->height = 144;
- break;
-
- case 3:
- if (video->size < 352*288)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = video->width = 352;
- video->displayHeight = video->height = 288;
- break;
-
- case 4:
- if (video->size < 704*576)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = video->width = 704;
- video->displayHeight = video->height = 576;
- break;
-
- case 5:
- if (video->size < 1408*1152)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = video->width = 1408;
- video->displayHeight = video->height = 1152;
- break;
-
- case 6:
- custom_PFMT = TRUE;
- break;
-
- default:
- /* Msg("H.263 source format not legal\n"); */
- status = PV_FAIL;
- goto return_point;
- }
-
- custom_PCF = BitstreamRead1Bits(stream);
- /* unrestricted MV */
- if (BitstreamRead1Bits(stream))
- {
- status = PV_FAIL;
- goto return_point;
- }
- /* SAC */
- if (BitstreamRead1Bits(stream))
- {
- status = PV_FAIL;
- goto return_point;
- }
-
- /* AP */
- if (BitstreamRead1Bits(stream))
- {
- status = PV_FAIL;
- goto return_point;
- }
-
- video->advanced_INTRA = BitstreamRead1Bits(stream);
-
- video->deblocking = BitstreamRead1Bits(stream);
-
- video->slice_structure = BitstreamRead1Bits(stream);
-
- /* RPS, ISD, AIV */
- if (BitstreamReadBits16(stream, 3))
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->modified_quant = BitstreamRead1Bits(stream);
-
- /* Marker Bit and reserved*/
- if (BitstreamReadBits16(stream, 4) != 8)
- {
- status = PV_FAIL;
- goto return_point;
- }
- }
-#ifndef PV_ANNEX_IJKT_SUPPORT
- if (video->advanced_INTRA | video->deblocking | video->modified_quant | video->modified_quant)
- {
- status = PV_FAIL;
- goto return_point;
- }
-#endif
-
- if (UFEP == 0 || UFEP == 1)
- {
- tmpvar = BitstreamReadBits16(stream, 3);
- if (tmpvar > 1)
- {
- status = PV_FAIL;
- goto return_point;
- }
- currVop->predictionType = tmpvar;
- /* RPR */
- if (BitstreamRead1Bits(stream))
- {
- status = PV_FAIL;
- goto return_point;
- }
-
- /* RRU */
- if (BitstreamRead1Bits(stream))
- {
- status = PV_FAIL;
- goto return_point;
- }
- currVop->roundingType = (int) BitstreamRead1Bits(stream);
- if (BitstreamReadBits16(stream, 3) != 1)
- {
- status = PV_FAIL;
- goto return_point;
- }
- }
- else
- {
- status = PV_FAIL;
- goto return_point;
- }
- /* CPM */
- if (BitstreamRead1Bits(stream))
- {
- status = PV_FAIL;
- goto return_point;
- }
- /* CPFMT */
- if (custom_PFMT == 1 && UFEP == 1)
- {
- /* aspect ratio */
- tmpvar = BitstreamReadBits16(stream, 4);
- if (tmpvar == 0)
- {
- status = PV_FAIL;
- goto return_point;
- }
- /* Extended PAR */
- if (tmpvar == 0xF)
- {
- /* Read par_width and par_height but do nothing */
- /* par_width */
- tmpvar = BitstreamReadBits16(stream, 8);
-
- /* par_height */
- tmpvar = BitstreamReadBits16(stream, 8);
- }
- tmpvar = BitstreamReadBits16(stream, 9);
-
- int tmpDisplayWidth = (tmpvar + 1) << 2;
- /* marker bit */
- if (!BitstreamRead1Bits(stream))
- {
- status = PV_FAIL;
- goto return_point;
- }
- tmpvar = BitstreamReadBits16(stream, 9);
- if (tmpvar == 0)
- {
- status = PV_FAIL;
- goto return_point;
- }
- int tmpDisplayHeight = tmpvar << 2;
- int tmpHeight = (tmpDisplayHeight + 15) & -16;
- int tmpWidth = (tmpDisplayWidth + 15) & -16;
-
- if (tmpWidth > video->width)
- {
- // while allowed by the spec, this decoder does not actually
- // support an increase in size.
- ALOGE("width increase not supported");
- status = PV_FAIL;
- goto return_point;
- }
- if (tmpHeight * tmpWidth > video->size)
- {
- // This is just possibly "b/37079296".
- ALOGE("b/37079296");
- status = PV_FAIL;
- goto return_point;
- }
- video->displayWidth = tmpDisplayWidth;
- video->width = tmpWidth;
- video->displayHeight = tmpDisplayHeight;
- video->height = tmpHeight;
-
- video->nTotalMB = video->width / MB_SIZE * video->height / MB_SIZE;
-
- if (video->nTotalMB <= 48)
- {
- video->nBitsForMBID = 6;
- }
- else if (video->nTotalMB <= 99)
- {
- video->nBitsForMBID = 7;
- }
- else if (video->nTotalMB <= 396)
- {
- video->nBitsForMBID = 9;
- }
- else if (video->nTotalMB <= 1584)
- {
- video->nBitsForMBID = 11;
- }
- else if (video->nTotalMB <= 6336)
- {
- video->nBitsForMBID = 13 ;
- }
- else if (video->nTotalMB <= 9216)
- {
- video->nBitsForMBID = 14 ;
- }
- else
- {
- status = PV_FAIL;
- goto return_point;
- }
- }
- if (UFEP == 1 && custom_PCF == 1)
- {
- BitstreamRead1Bits(stream);
-
- tmpvar = BitstreamReadBits16(stream, 7);
- if (tmpvar == 0)
- {
- status = PV_FAIL;
- goto return_point;
- }
- }
-
- if (custom_PCF == 1)
- {
- currVop->ETR = BitstreamReadBits16(stream, 2);
- }
-
- if (UFEP == 1 && video->slice_structure == 1)
- {
- /* SSS */
- tmpvar = BitstreamReadBits16(stream, 2);
- if (tmpvar != 0)
- {
- status = PV_FAIL;
- goto return_point;
- }
- }
- }
-
- /* Recalculate number of macroblocks per row & col since */
- /* the frame size can change. 04/23/2001. */
- video->nMBinGOB = video->nMBPerRow = video->width / MB_SIZE;
- video->nGOBinVop = video->nMBPerCol = video->height / MB_SIZE;
- video->nTotalMB = video->nMBPerRow * video->nMBPerCol;
- if (custom_PFMT == 0 || UFEP == 0)
- {
- video->nBitsForMBID = CalcNumBits((uint)video->nTotalMB - 1); /* otherwise calculate above */
- }
- size = (int32)video->width * video->height;
- if (currVop->predictionType == P_VOP && size > video->videoDecControls->size)
- {
- status = PV_FAIL;
- goto return_point;
- }
- video->videoDecControls->size = size;
- video->currVop->uChan = video->currVop->yChan + size;
- video->currVop->vChan = video->currVop->uChan + (size >> 2);
- video->prevVop->uChan = video->prevVop->yChan + size;
- video->prevVop->vChan = video->prevVop->uChan + (size >> 2);
-
-
- currVop->quantizer = (int16) BitstreamReadBits16(stream, 5);
-
- if (currVop->quantizer == 0) /* 04/03/01 */
- {
- currVop->quantizer = video->prevVop->quantizer;
- status = PV_FAIL;
- goto return_point;
- }
-
-
- /* Zero bit */
- if (extended_PTYPE == FALSE)
- {
- if (BitstreamRead1Bits(stream))
- {
- mp4dec_log("DecodeShortHeader(): Zero bit wrong.\n");
- status = PV_FAIL;
- goto return_point;
- }
- }
- /* pei */
- tmpvar = (uint32) BitstreamRead1Bits(stream);
-
- while (tmpvar)
- {
- tmpvar = (uint32) BitstreamReadBits16(stream, 8); /* "PSPARE" */
- tmpvar = (uint32) BitstreamRead1Bits(stream); /* "PEI" */
- }
-
- if (video->slice_structure) /* ANNEX_K */
- {
- if (!BitstreamRead1Bits(stream)) /* SEPB1 */
- {
- status = PV_FAIL;
- goto return_point;
- }
-
- // if (currVol->nBitsForMBID //
- if (BitstreamReadBits16(stream, video->nBitsForMBID))
- {
- status = PV_FAIL; /* no ASO, RS support for Annex K */
- goto return_point;
- }
-
- if (!BitstreamRead1Bits(stream)) /*SEPB3 */
- {
- status = PV_FAIL;
- goto return_point;
- }
-
- }
- /* Setting of other VOP-header parameters */
- currVop->gobNumber = 0;
- currVop->vopCoded = 1;
-
- currVop->intraDCVlcThr = 0;
- currVop->gobFrameID = 0; /* initial value, 05/22/00 */
- currVol->errorResDisable = 0;
- /*PutVopInterlaced(0,curr_vop); no implemented yet */
- if (currVop->predictionType != I_VOP)
- currVop->fcodeForward = 1;
- else
- currVop->fcodeForward = 0;
-
-return_point:
-
- return status;
-}
-/***********************************************************CommentBegin******
-*
-* -- PV_DecodeVop -- Decodes the VOP information from the bitstream
-*
-* 04/12/2000
-* Initial port to the new PV decoder library format.
-* This function is different from the one in MoMuSys MPEG-4
-* visual decoder. We handle combined mode with or withput
-* error resilience and H.263 mode through the sam path now.
-*
-* 05/04/2000
-* Added temporal scalability to the decoder.
-*
-***********************************************************CommentEnd********/
-PV_STATUS PV_DecodeVop(VideoDecData *video)
-{
- Vol *currVol = video->vol[video->currLayer];
- PV_STATUS status;
- uint32 tmpvar;
-
- /*****
- * Do scalable or non-scalable decoding of the current VOP
- *****/
-
- if (!currVol->scalability)
- {
- if (currVol->dataPartitioning)
- {
- /* Data partitioning mode comes here */
- status = DecodeFrameDataPartMode(video);
- }
- else
- {
- /* Combined mode with or without error resilience */
- /* and short video header comes here. */
- status = DecodeFrameCombinedMode(video);
- }
- }
- else
- {
-#ifdef DO_NOT_FOLLOW_STANDARD
- /* according to the standard, only combined mode is allowed */
- /* in the enhancement layer. 06/01/2000. */
- if (currVol->dataPartitioning)
- {
- /* Data partitioning mode comes here */
- status = DecodeFrameDataPartMode(video);
- }
- else
- {
- /* Combined mode with or without error resilience */
- /* and short video header comes here. */
- status = DecodeFrameCombinedMode(video);
- }
-#else
- status = DecodeFrameCombinedMode(video);
-#endif
- }
-
- /* This part is for consuming Visual_object_sequence_end_code and EOS Code */ /* 10/15/01 */
- if (!video->shortVideoHeader)
- {
- /* at this point bitstream is expected to be byte aligned */
- BitstreamByteAlignNoForceStuffing(currVol->bitstream);
-
- status = BitstreamShowBits32HC(currVol->bitstream, &tmpvar); /* 07/07/01 */
- if (tmpvar == VISUAL_OBJECT_SEQUENCE_END_CODE)/* VOS_END_CODE */
- {
- PV_BitstreamFlushBits(currVol->bitstream, 16);
- PV_BitstreamFlushBits(currVol->bitstream, 16);
- }
-
- }
- else
- {
-#ifdef PV_ANNEX_IJKT_SUPPORT
- if (video->deblocking)
- {
- H263_Deblock(video->currVop->yChan, video->width, video->height, video->QPMB, video->headerInfo.Mode, 0, 0);
- H263_Deblock(video->currVop->uChan, video->width >> 1, video->height >> 1, video->QPMB, video->headerInfo.Mode, 1, video->modified_quant);
- H263_Deblock(video->currVop->vChan, video->width >> 1, video->height >> 1, video->QPMB, video->headerInfo.Mode, 1, video->modified_quant);
- }
-#endif
- /* Read EOS code for shortheader bitstreams */
- status = BitstreamShowBits32(currVol->bitstream, 22, &tmpvar);
- if (tmpvar == SHORT_VIDEO_END_MARKER)
- {
- PV_BitstreamFlushBits(currVol->bitstream, 22);
- }
- else
- {
- status = PV_BitstreamShowBitsByteAlign(currVol->bitstream, 22, &tmpvar);
- if (tmpvar == SHORT_VIDEO_END_MARKER)
- {
- PV_BitstreamByteAlign(currVol->bitstream);
- PV_BitstreamFlushBits(currVol->bitstream, 22);
- }
- }
- }
- return status;
-}
-
-
-/***********************************************************CommentBegin******
-*
-* -- CalcVopDisplayTime -- calculate absolute time when VOP is to be displayed
-*
-* 04/12/2000 Initial port to the new PV decoder library format.
-*
-***********************************************************CommentEnd********/
-uint32 CalcVopDisplayTime(Vol *currVol, Vop *currVop, int shortVideoHeader)
-{
- uint32 display_time;
-
-
- /*****
- * Calculate the time when the VOP is to be displayed next
- *****/
-
- if (!shortVideoHeader)
- {
- display_time = (uint32)(currVol->moduloTimeBase + (((int32)currVop->timeInc - (int32)currVol->timeInc_offset) * 1000) / ((int32)currVol->timeIncrementResolution)); /* 11/12/2001 */
- if (currVop->timeStamp >= display_time)
- {
- display_time += 1000; /* this case is valid if GOVHeader timestamp is ignored */
- }
- }
- else
- {
- display_time = (uint32)(currVol->moduloTimeBase * 33 + (currVol->moduloTimeBase * 11) / 30); /* 11/12/2001 */
- }
-
- return(display_time);
-}
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/AndroidTest.xml b/media/libstagefright/codecs/m4v_h263/dec/test/AndroidTest.xml
deleted file mode 100755
index 47e10ca..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/test/AndroidTest.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2020 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<configuration description="Test module config for Mpeg4H263 Decoder unit tests">
- <option name="test-suite-tag" value="Mpeg4H263DecoderTest" />
- <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
- <option name="cleanup" value="true" />
- <option name="push" value="Mpeg4H263DecoderTest->/data/local/tmp/Mpeg4H263DecoderTest" />
- <option name="push-file"
- key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder.zip?unzip=true"
- value="/data/local/tmp/Mpeg4H263DecoderTestRes/" />
- </target_preparer>
-
- <test class="com.android.tradefed.testtype.GTest" >
- <option name="native-test-device-path" value="/data/local/tmp" />
- <option name="module-name" value="Mpeg4H263DecoderTest" />
- <option name="native-test-flag" value="-P /data/local/tmp/Mpeg4H263DecoderTestRes/" />
- </test>
-</configuration>
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp b/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
deleted file mode 100644
index 967c1ea..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
+++ /dev/null
@@ -1,423 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Mpeg4H263DecoderTest"
-#include <utils/Log.h>
-
-#include <stdio.h>
-#include <string.h>
-#include <utils/String8.h>
-#include <fstream>
-
-#include <media/stagefright/foundation/AUtils.h>
-#include "mp4dec_api.h"
-
-#include "Mpeg4H263DecoderTestEnvironment.h"
-
-using namespace android;
-
-#define OUTPUT_FILE_NAME "/data/local/tmp/Output.yuv"
-#define CODEC_CONFIG_FLAG 32
-#define SYNC_FRAME 1
-#define MPEG4_MAX_WIDTH 1920
-#define MPEG4_MAX_HEIGHT 1080
-#define H263_MAX_WIDTH 352
-#define H263_MAX_HEIGHT 288
-
-constexpr uint32_t kNumOutputBuffers = 2;
-
-struct FrameInfo {
- int32_t bytesCount;
- uint32_t flags;
- int64_t timestamp;
-};
-
-struct tagvideoDecControls;
-
-static Mpeg4H263DecoderTestEnvironment *gEnv = nullptr;
-
-class Mpeg4H263DecoderTest : public ::testing::TestWithParam<tuple<string, string, bool>> {
- public:
- Mpeg4H263DecoderTest()
- : mDecHandle(nullptr),
- mInputBuffer(nullptr),
- mInitialized(false),
- mFramesConfigured(false),
- mNumSamplesOutput(0),
- mWidth(352),
- mHeight(288) {
- memset(mOutputBuffer, 0x0, sizeof(mOutputBuffer));
- }
-
- ~Mpeg4H263DecoderTest() {
- if (mEleStream.is_open()) mEleStream.close();
- if (mDecHandle) {
- delete mDecHandle;
- mDecHandle = nullptr;
- }
- if (mInputBuffer) {
- free(mInputBuffer);
- mInputBuffer = nullptr;
- }
- freeOutputBuffer();
- }
-
- status_t initDecoder();
- void allocOutputBuffer(size_t outputBufferSize);
- void dumpOutput(ofstream &ostrm);
- void freeOutputBuffer();
- void processMpeg4H263Decoder(vector<FrameInfo> Info, int32_t offset, int32_t range,
- ifstream &mEleStream, ofstream &ostrm, MP4DecodingMode inputMode);
- void deInitDecoder();
-
- ifstream mEleStream;
- tagvideoDecControls *mDecHandle;
- char *mInputBuffer;
- uint8_t *mOutputBuffer[kNumOutputBuffers];
- bool mInitialized;
- bool mFramesConfigured;
- uint32_t mNumSamplesOutput;
- uint32_t mWidth;
- uint32_t mHeight;
-};
-
-status_t Mpeg4H263DecoderTest::initDecoder() {
- if (!mDecHandle) {
- mDecHandle = new tagvideoDecControls;
- }
- if (!mDecHandle) {
- return NO_MEMORY;
- }
- memset(mDecHandle, 0, sizeof(tagvideoDecControls));
-
- return OK;
-}
-
-void Mpeg4H263DecoderTest::allocOutputBuffer(size_t outputBufferSize) {
- for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
- if (!mOutputBuffer[i]) {
- mOutputBuffer[i] = (uint8_t *)malloc(outputBufferSize);
- ASSERT_NE(mOutputBuffer[i], nullptr) << "Output buffer allocation failed";
- }
- }
-}
-
-void Mpeg4H263DecoderTest::dumpOutput(ofstream &ostrm) {
- uint8_t *src = mOutputBuffer[mNumSamplesOutput & 1];
- size_t vStride = align(mHeight, 16);
- size_t srcYStride = align(mWidth, 16);
- size_t srcUVStride = srcYStride / 2;
- uint8_t *srcStart = src;
-
- /* Y buffer */
- for (size_t i = 0; i < mHeight; ++i) {
- ostrm.write(reinterpret_cast<char *>(src), mWidth);
- src += srcYStride;
- }
- /* U buffer */
- src = srcStart + vStride * srcYStride;
- for (size_t i = 0; i < mHeight / 2; ++i) {
- ostrm.write(reinterpret_cast<char *>(src), mWidth / 2);
- src += srcUVStride;
- }
- /* V buffer */
- src = srcStart + vStride * srcYStride * 5 / 4;
- for (size_t i = 0; i < mHeight / 2; ++i) {
- ostrm.write(reinterpret_cast<char *>(src), mWidth / 2);
- src += srcUVStride;
- }
-}
-
-void Mpeg4H263DecoderTest::freeOutputBuffer() {
- for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
- if (mOutputBuffer[i]) {
- free(mOutputBuffer[i]);
- mOutputBuffer[i] = nullptr;
- }
- }
-}
-
-void Mpeg4H263DecoderTest::processMpeg4H263Decoder(vector<FrameInfo> Info, int32_t offset,
- int32_t range, ifstream &mEleStream,
- ofstream &ostrm, MP4DecodingMode inputMode) {
- size_t maxWidth = (inputMode == MPEG4_MODE) ? MPEG4_MAX_WIDTH : H263_MAX_WIDTH;
- size_t maxHeight = (inputMode == MPEG4_MODE) ? MPEG4_MAX_HEIGHT : H263_MAX_HEIGHT;
- size_t outputBufferSize = align(maxWidth, 16) * align(maxHeight, 16) * 3 / 2;
- uint32_t frameIndex = offset;
- bool status = true;
- ASSERT_GE(range, 0) << "Invalid range";
- ASSERT_TRUE(offset >= 0 && offset <= Info.size() - 1) << "Invalid offset";
- ASSERT_LE(range + offset, Info.size()) << "range+offset can't be greater than the no of frames";
-
- while (1) {
- if (frameIndex == Info.size() || frameIndex == (offset + range)) break;
-
- int32_t bytesCount = Info[frameIndex].bytesCount;
- ASSERT_GT(bytesCount, 0) << "Size for the memory allocation is negative";
- mInputBuffer = (char *)malloc(bytesCount);
- ASSERT_NE(mInputBuffer, nullptr) << "Insufficient memory to read frame";
- mEleStream.read(mInputBuffer, bytesCount);
- ASSERT_EQ(mEleStream.gcount(), bytesCount) << "mEleStream.gcount() != bytesCount";
- static const uint8_t volInfo[] = {0x00, 0x00, 0x01, 0xB0};
- bool volHeader = memcmp(mInputBuffer, volInfo, 4) == 0;
- if (volHeader) {
- PVCleanUpVideoDecoder(mDecHandle);
- mInitialized = false;
- }
-
- if (!mInitialized) {
- uint8_t *volData[1]{};
- int32_t volSize = 0;
-
- uint32_t flags = Info[frameIndex].flags;
- bool codecConfig = flags == CODEC_CONFIG_FLAG;
- if (codecConfig || volHeader) {
- volData[0] = reinterpret_cast<uint8_t *>(mInputBuffer);
- volSize = bytesCount;
- }
-
- status = PVInitVideoDecoder(mDecHandle, volData, &volSize, 1, maxWidth, maxHeight,
- inputMode);
- ASSERT_TRUE(status) << "PVInitVideoDecoder failed. Unsupported content";
-
- mInitialized = true;
- MP4DecodingMode actualMode = PVGetDecBitstreamMode(mDecHandle);
- ASSERT_EQ(inputMode, actualMode)
- << "Decoded mode not same as actual mode of the decoder";
-
- PVSetPostProcType(mDecHandle, 0);
-
- int32_t dispWidth, dispHeight;
- PVGetVideoDimensions(mDecHandle, &dispWidth, &dispHeight);
-
- int32_t bufWidth, bufHeight;
- PVGetBufferDimensions(mDecHandle, &bufWidth, &bufHeight);
-
- ASSERT_LE(dispWidth, bufWidth) << "Display width is greater than buffer width";
- ASSERT_LE(dispHeight, bufHeight) << "Display height is greater than buffer height";
-
- if (dispWidth != mWidth || dispHeight != mHeight) {
- mWidth = dispWidth;
- mHeight = dispHeight;
- freeOutputBuffer();
- if (inputMode == H263_MODE) {
- PVCleanUpVideoDecoder(mDecHandle);
-
- uint8_t *volData[1]{};
- int32_t volSize = 0;
-
- status = PVInitVideoDecoder(mDecHandle, volData, &volSize, 1, maxWidth,
- maxHeight, H263_MODE);
- ASSERT_TRUE(status) << "PVInitVideoDecoder failed for H263";
- }
- mFramesConfigured = false;
- }
-
- if (codecConfig) {
- frameIndex++;
- continue;
- }
- }
-
- uint32_t yFrameSize = sizeof(uint8) * mDecHandle->size;
- ASSERT_GE(outputBufferSize, yFrameSize * 3 / 2)
- << "Too small output buffer: " << outputBufferSize << " bytes";
- ASSERT_NO_FATAL_FAILURE(allocOutputBuffer(outputBufferSize));
-
- if (!mFramesConfigured) {
- PVSetReferenceYUV(mDecHandle, mOutputBuffer[1]);
- mFramesConfigured = true;
- }
-
- // Need to check if header contains new info, e.g., width/height, etc.
- VopHeaderInfo headerInfo;
- uint32_t useExtTimestamp = 1;
- int32_t inputSize = (Info)[frameIndex].bytesCount;
- uint32_t timestamp = frameIndex;
-
- uint8_t *bitstreamTmp = reinterpret_cast<uint8_t *>(mInputBuffer);
-
- status = PVDecodeVopHeader(mDecHandle, &bitstreamTmp, ×tamp, &inputSize, &headerInfo,
- &useExtTimestamp, mOutputBuffer[mNumSamplesOutput & 1]);
- ASSERT_EQ(status, PV_TRUE) << "failed to decode vop header";
-
- // H263 doesn't have VOL header, the frame size information is in short header, i.e. the
- // decoder may detect size change after PVDecodeVopHeader.
- int32_t dispWidth, dispHeight;
- PVGetVideoDimensions(mDecHandle, &dispWidth, &dispHeight);
-
- int32_t bufWidth, bufHeight;
- PVGetBufferDimensions(mDecHandle, &bufWidth, &bufHeight);
-
- ASSERT_LE(dispWidth, bufWidth) << "Display width is greater than buffer width";
- ASSERT_LE(dispHeight, bufHeight) << "Display height is greater than buffer height";
- if (dispWidth != mWidth || dispHeight != mHeight) {
- mWidth = dispWidth;
- mHeight = dispHeight;
- }
-
- status = PVDecodeVopBody(mDecHandle, &inputSize);
- ASSERT_EQ(status, PV_TRUE) << "failed to decode video frame No = %d" << frameIndex;
-
- dumpOutput(ostrm);
-
- ++mNumSamplesOutput;
- ++frameIndex;
- }
- freeOutputBuffer();
-}
-
-void Mpeg4H263DecoderTest::deInitDecoder() {
- if (mInitialized) {
- if (mDecHandle) {
- PVCleanUpVideoDecoder(mDecHandle);
- delete mDecHandle;
- mDecHandle = nullptr;
- }
- mInitialized = false;
- }
- freeOutputBuffer();
-}
-
-void getInfo(string infoFileName, vector<FrameInfo> &Info) {
- ifstream eleInfo;
- eleInfo.open(infoFileName);
- ASSERT_EQ(eleInfo.is_open(), true) << "Failed to open " << infoFileName;
- int32_t bytesCount = 0;
- uint32_t flags = 0;
- uint32_t timestamp = 0;
- while (1) {
- if (!(eleInfo >> bytesCount)) {
- break;
- }
- eleInfo >> flags;
- eleInfo >> timestamp;
- Info.push_back({bytesCount, flags, timestamp});
- }
- if (eleInfo.is_open()) eleInfo.close();
-}
-
-TEST_P(Mpeg4H263DecoderTest, DecodeTest) {
- tuple<string /* InputFileName */, string /* InfoFileName */, bool /* mode */> params =
- GetParam();
-
- string inputFileName = gEnv->getRes() + get<0>(params);
- mEleStream.open(inputFileName, ifstream::binary);
- ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open " << get<0>(params);
-
- string infoFileName = gEnv->getRes() + get<1>(params);
- vector<FrameInfo> Info;
- ASSERT_NO_FATAL_FAILURE(getInfo(infoFileName, Info));
- ASSERT_NE(Info.empty(), true) << "Invalid Info file";
-
- ofstream ostrm;
- ostrm.open(OUTPUT_FILE_NAME, std::ofstream::binary);
- ASSERT_EQ(ostrm.is_open(), true) << "Failed to open output stream for " << get<0>(params);
-
- status_t err = initDecoder();
- ASSERT_EQ(err, OK) << "initDecoder: failed to create decoder " << err;
-
- bool isMpeg4 = get<2>(params);
- MP4DecodingMode inputMode = isMpeg4 ? MPEG4_MODE : H263_MODE;
- ASSERT_NO_FATAL_FAILURE(
- processMpeg4H263Decoder(Info, 0, Info.size(), mEleStream, ostrm, inputMode));
- deInitDecoder();
- ostrm.close();
- Info.clear();
-}
-
-TEST_P(Mpeg4H263DecoderTest, FlushTest) {
- tuple<string /* InputFileName */, string /* InfoFileName */, bool /* mode */> params =
- GetParam();
-
- string inputFileName = gEnv->getRes() + get<0>(params);
- mEleStream.open(inputFileName, ifstream::binary);
- ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open " << get<0>(params);
-
- string infoFileName = gEnv->getRes() + get<1>(params);
- vector<FrameInfo> Info;
- ASSERT_NO_FATAL_FAILURE(getInfo(infoFileName, Info));
- ASSERT_NE(Info.empty(), true) << "Invalid Info file";
-
- ofstream ostrm;
- ostrm.open(OUTPUT_FILE_NAME, std::ofstream::binary);
- ASSERT_EQ(ostrm.is_open(), true) << "Failed to open output stream for " << get<0>(params);
-
- status_t err = initDecoder();
- ASSERT_EQ(err, OK) << "initDecoder: failed to create decoder " << err;
-
- bool isMpeg4 = get<2>(params);
- MP4DecodingMode inputMode = isMpeg4 ? MPEG4_MODE : H263_MODE;
- // Number of frames to be decoded before flush
- int32_t numFrames = Info.size() / 3;
- ASSERT_NO_FATAL_FAILURE(
- processMpeg4H263Decoder(Info, 0, numFrames, mEleStream, ostrm, inputMode));
-
- if (mInitialized) {
- int32_t status = PVResetVideoDecoder(mDecHandle);
- ASSERT_EQ(status, PV_TRUE);
- }
-
- // Seek to next key frame and start decoding till the end
- int32_t index = numFrames;
- bool keyFrame = false;
- uint32_t flags = 0;
- while (index < (int32_t)Info.size()) {
- if (Info[index].flags) flags = 1u << (Info[index].flags - 1);
- if ((flags & SYNC_FRAME) == SYNC_FRAME) {
- keyFrame = true;
- break;
- }
- flags = 0;
- mEleStream.ignore(Info[index].bytesCount);
- index++;
- }
- ALOGV("Index= %d", index);
- if (keyFrame) {
- mNumSamplesOutput = 0;
- ASSERT_NO_FATAL_FAILURE(processMpeg4H263Decoder(Info, index, (int32_t)Info.size() - index,
- mEleStream, ostrm, inputMode));
- }
- deInitDecoder();
- ostrm.close();
- Info.clear();
-}
-
-INSTANTIATE_TEST_SUITE_P(
- Mpeg4H263DecoderTestAll, Mpeg4H263DecoderTest,
- ::testing::Values(make_tuple("swirl_128x96_h263.h263", "swirl_128x96_h263.info", false),
- make_tuple("swirl_176x144_h263.h263", "swirl_176x144_h263.info", false),
- make_tuple("swirl_352x288_h263.h263", "swirl_352x288_h263.info", false),
- make_tuple("bbb_352x288_h263.h263", "bbb_352x288_h263.info", false),
- make_tuple("bbb_352x288_mpeg4.m4v", "bbb_352x288_mpeg4.info", true),
- make_tuple("swirl_128x128_mpeg4.m4v", "swirl_128x128_mpeg4.info", true),
- make_tuple("swirl_130x132_mpeg4.m4v", "swirl_130x132_mpeg4.info", true),
- make_tuple("swirl_132x130_mpeg4.m4v", "swirl_132x130_mpeg4.info", true),
- make_tuple("swirl_136x144_mpeg4.m4v", "swirl_136x144_mpeg4.info", true),
- make_tuple("swirl_144x136_mpeg4.m4v", "swirl_144x136_mpeg4.info", true)));
-
-int main(int argc, char **argv) {
- gEnv = new Mpeg4H263DecoderTestEnvironment();
- ::testing::AddGlobalTestEnvironment(gEnv);
- ::testing::InitGoogleTest(&argc, argv);
- int status = gEnv->initFromOptions(argc, argv);
- if (status == 0) {
- status = RUN_ALL_TESTS();
- ALOGD("Decoder Test Result = %d\n", status);
- }
- return status;
-}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/README.md b/media/libstagefright/codecs/m4v_h263/dec/test/README.md
deleted file mode 100644
index 7e4aea1..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/test/README.md
+++ /dev/null
@@ -1,39 +0,0 @@
-## Media Testing ##
----
-#### Mpeg4H263Decoder :
-The Mpeg4H263Decoder Test Suite validates the Mpeg4 and H263 decoder available in libstagefright.
-
-Run the following steps to build the test suite:
-```
-m Mpeg4H263DecoderTest
-```
-
-The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
-
-The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
-
-To test 64-bit binary push binaries from nativetest64.
-```
-adb push ${OUT}/data/nativetest64/Mpeg4H263DecoderTest/Mpeg4H263DecoderTest /data/local/tmp/
-```
-
-To test 32-bit binary push binaries from nativetest.
-```
-adb push ${OUT}/data/nativetest/Mpeg4H263DecoderTest/Mpeg4H263DecoderTest /data/local/tmp/
-```
-
-The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder.zip). Download, unzip and push these files into device for testing.
-
-```
-adb push Mpeg4H263Decoder /data/local/tmp/
-```
-
-usage: Mpeg4H263DecoderTest -P \<path_to_folder\>
-```
-adb shell /data/local/tmp/Mpeg4H263DecoderTest -P /data/local/tmp/Mpeg4H263Decoder/
-```
-Alternatively, the test can also be run using atest command.
-
-```
-atest Mpeg4H263DecoderTest -- --enable-module-dynamic-download=true
-```
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.bp b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
index 846f614..9e120d3 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
@@ -1,44 +1,3 @@
-cc_library_static {
- name: "libstagefright_m4vh263enc",
- vendor_available: true,
-
- srcs: [
- "src/bitstream_io.cpp",
- "src/combined_encode.cpp", "src/datapart_encode.cpp",
- "src/dct.cpp",
- "src/findhalfpel.cpp",
- "src/fastcodemb.cpp",
- "src/fastidct.cpp",
- "src/fastquant.cpp",
- "src/me_utils.cpp",
- "src/mp4enc_api.cpp",
- "src/rate_control.cpp",
- "src/motion_est.cpp",
- "src/motion_comp.cpp",
- "src/sad.cpp",
- "src/sad_halfpel.cpp",
- "src/vlc_encode.cpp",
- "src/vop.cpp",
- ],
-
- cflags: [
- "-DBX_RC",
- "-Werror",
- ],
-
- version_script: "exports.lds",
-
- local_include_dirs: ["src"],
- export_include_dirs: ["include"],
-
- sanitize: {
- misc_undefined: [
- "signed-integer-overflow",
- ],
- cfi: true,
- },
-}
-
//###############################################################################
cc_library_shared {
@@ -47,8 +6,6 @@
srcs: ["SoftMPEG4Encoder.cpp"],
- local_include_dirs: ["src"],
-
cflags: [
"-DBX_RC",
],
@@ -63,28 +20,3 @@
},
}
-//###############################################################################
-
-cc_test {
- name: "libstagefright_m4vh263enc_test",
- gtest: false,
-
- srcs: ["test/m4v_h263_enc_test.cpp"],
-
- local_include_dirs: ["src"],
-
- cflags: [
- "-DBX_RC",
- "-Wall",
- "-Werror",
- ],
-
- sanitize: {
- misc_undefined: [
- "signed-integer-overflow",
- ],
- cfi: true,
- },
-
- static_libs: ["libstagefright_m4vh263enc"],
-}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_api.cpp
deleted file mode 100644
index 7ab8f45..0000000
--- a/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_api.cpp
+++ /dev/null
@@ -1,3307 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-
-#include "mp4enc_lib.h"
-#include "bitstream_io.h"
-#include "rate_control.h"
-#include "m4venc_oscl.h"
-
-#ifndef INT32_MAX
-#define INT32_MAX 0x7fffffff
-#endif
-
-#ifndef SIZE_MAX
-#define SIZE_MAX ((size_t) -1)
-#endif
-
-/* Inverse normal zigzag */
-const static Int zigzag_i[NCOEFF_BLOCK] =
-{
- 0, 1, 8, 16, 9, 2, 3, 10,
- 17, 24, 32, 25, 18, 11, 4, 5,
- 12, 19, 26, 33, 40, 48, 41, 34,
- 27, 20, 13, 6, 7, 14, 21, 28,
- 35, 42, 49, 56, 57, 50, 43, 36,
- 29, 22, 15, 23, 30, 37, 44, 51,
- 58, 59, 52, 45, 38, 31, 39, 46,
- 53, 60, 61, 54, 47, 55, 62, 63
-};
-
-/* INTRA */
-const static Int mpeg_iqmat_def[NCOEFF_BLOCK] =
- { 8, 17, 18, 19, 21, 23, 25, 27,
- 17, 18, 19, 21, 23, 25, 27, 28,
- 20, 21, 22, 23, 24, 26, 28, 30,
- 21, 22, 23, 24, 26, 28, 30, 32,
- 22, 23, 24, 26, 28, 30, 32, 35,
- 23, 24, 26, 28, 30, 32, 35, 38,
- 25, 26, 28, 30, 32, 35, 38, 41,
- 27, 28, 30, 32, 35, 38, 41, 45
- };
-
-/* INTER */
-const static Int mpeg_nqmat_def[64] =
- { 16, 17, 18, 19, 20, 21, 22, 23,
- 17, 18, 19, 20, 21, 22, 23, 24,
- 18, 19, 20, 21, 22, 23, 24, 25,
- 19, 20, 21, 22, 23, 24, 26, 27,
- 20, 21, 22, 23, 25, 26, 27, 28,
- 21, 22, 23, 24, 26, 27, 28, 30,
- 22, 23, 24, 26, 27, 28, 30, 31,
- 23, 24, 25, 27, 28, 30, 31, 33
- };
-
-/* Profiles and levels */
-/* Simple profile(level 0-3) and Core profile (level 1-2) */
-/* {SPL0, SPL1, SPL2, SPL3, CPL1, CPL2, CPL2, CPL2} , SPL0: Simple Profile@Level0, CPL1: Core Profile@Level1, the last two are redundant for easy table manipulation */
-const static Int profile_level_code[8] =
-{
- 0x08, 0x01, 0x02, 0x03, 0x21, 0x22, 0x22, 0x22
-};
-
-const static Int profile_level_max_bitrate[8] =
-{
- 64000, 64000, 128000, 384000, 384000, 2000000, 2000000, 2000000
-};
-
-const static Int profile_level_max_packet_size[8] =
-{
- 2048, 2048, 4096, 8192, 4096, 8192, 8192, 8192
-};
-
-const static Int profile_level_max_mbsPerSec[8] =
-{
- 1485, 1485, 5940, 11880, 5940, 23760, 23760, 23760
-};
-
-const static Int profile_level_max_VBV_size[8] =
-{
- 163840, 163840, 655360, 655360, 262144, 1310720, 1310720, 1310720
-};
-
-
-/* Simple scalable profile (level 0-2) and Core scalable profile (level 1-3) */
-/* {SSPL0, SSPL1, SSPL2, SSPL2, CSPL1, CSPL2, CSPL3, CSPL3} , SSPL0: Simple Scalable Profile@Level0, CSPL1: Core Scalable Profile@Level1, the fourth is redundant for easy table manipulation */
-
-const static Int scalable_profile_level_code[8] =
-{
- 0x10, 0x11, 0x12, 0x12, 0xA1, 0xA2, 0xA3, 0xA3
-};
-
-const static Int scalable_profile_level_max_bitrate[8] =
-{
- 128000, 128000, 256000, 256000, 768000, 1500000, 4000000, 4000000
-};
-
-/* in bits */
-const static Int scalable_profile_level_max_packet_size[8] =
-{
- 2048, 2048, 4096, 4096, 4096, 4096, 16384, 16384
-};
-
-const static Int scalable_profile_level_max_mbsPerSec[8] =
-{
- 1485, 7425, 23760, 23760, 14850, 29700, 120960, 120960
-};
-
-const static Int scalable_profile_level_max_VBV_size[8] =
-{
- 163840, 655360, 655360, 655360, 1048576, 1310720, 1310720, 1310720
-};
-
-
-/* H263 profile 0 @ level 10-70 */
-const static Int h263Level[8] = {0, 10, 20, 30, 40, 50, 60, 70};
-const static float rBR_bound[8] = {0, 1, 2, 6, 32, 64, 128, 256};
-const static float max_h263_framerate[2] = {(float)30000 / (float)2002,
- (float)30000 / (float)1001
- };
-const static Int max_h263_width[2] = {176, 352};
-const static Int max_h263_height[2] = {144, 288};
-
-/* 6/2/2001, newly added functions to make PVEncodeVop more readable. */
-Int DetermineCodingLayer(VideoEncData *video, Int *nLayer, ULong modTime);
-void DetermineVopType(VideoEncData *video, Int currLayer);
-Int UpdateSkipNextFrame(VideoEncData *video, ULong *modTime, Int *size, PV_STATUS status);
-Bool SetProfile_BufferSize(VideoEncData *video, float delay, Int bInitialized);
-
-#ifdef PRINT_RC_INFO
-extern FILE *facct;
-extern int tiTotalNumBitsGenerated;
-extern int iStuffBits;
-#endif
-
-#ifdef PRINT_EC
-extern FILE *fec;
-#endif
-
-
-/* ======================================================================== */
-/* Function : PVGetDefaultEncOption() */
-/* Date : 12/12/2005 */
-/* Purpose : */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVGetDefaultEncOption(VideoEncOptions *encOption, Int encUseCase)
-{
- VideoEncOptions defaultUseCase = {H263_MODE, profile_level_max_packet_size[SIMPLE_PROFILE_LEVEL0] >> 3,
- SIMPLE_PROFILE_LEVEL0, PV_OFF, 0, 1, 1000, 33, {144, 144}, {176, 176}, {15, 30}, {64000, 128000},
- {10, 10}, {12, 12}, {0, 0}, CBR_1, 0.0, PV_OFF, -1, 0, PV_OFF, 16, PV_OFF, 0, PV_ON
- };
-
- OSCL_UNUSED_ARG(encUseCase); // unused for now. Later we can add more defaults setting and use this
- // argument to select the right one.
- /* in the future we can create more meaningful use-cases */
- if (encOption == NULL)
- {
- return PV_FALSE;
- }
-
- M4VENC_MEMCPY(encOption, &defaultUseCase, sizeof(VideoEncOptions));
-
- return PV_TRUE;
-}
-
-/* ======================================================================== */
-/* Function : PVInitVideoEncoder() */
-/* Date : 08/22/2000 */
-/* Purpose : Initialization of MP4 Encoder and VO bitstream */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : 5/21/01, allocate only yChan and assign uChan & vChan */
-/* 12/12/05, add encoding option as input argument */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVInitVideoEncoder(VideoEncControls *encoderControl, VideoEncOptions *encOption)
-{
-
- Bool status = PV_TRUE;
- Int nLayers, idx, i, j;
- Int max = 0, max_width = 0, max_height = 0, pitch, offset;
- Int size = 0, nTotalMB = 0;
- VideoEncData *video;
- Vol *pVol;
- VideoEncParams *pEncParams;
- Int temp_w, temp_h, mbsPerSec;
-
- /******************************************/
- /* this part use to be PVSetEncode() */
- Int profile_table_index, *profile_level_table;
- Int profile_level = encOption->profile_level;
- Int PacketSize = encOption->packetSize << 3;
- Int timeInc, timeIncRes;
- float profile_max_framerate;
- VideoEncParams *encParams;
-
- if (encoderControl->videoEncoderData) /* this has been called */
- {
- if (encoderControl->videoEncoderInit) /* check if PVInitVideoEncoder() has been called */
- {
- PVCleanUpVideoEncoder(encoderControl);
- encoderControl->videoEncoderInit = 0;
- }
-
- M4VENC_FREE(encoderControl->videoEncoderData);
- encoderControl->videoEncoderData = NULL;
- }
- encoderControl->videoEncoderInit = 0; /* reset this value */
-
- video = (VideoEncData *)M4VENC_MALLOC(sizeof(VideoEncData)); /* allocate memory for encData */
-
- if (video == NULL)
- return PV_FALSE;
-
- M4VENC_MEMSET(video, 0, sizeof(VideoEncData));
-
- encoderControl->videoEncoderData = (void *) video; /* set up pointer in VideoEncData structure */
-
- video->encParams = (VideoEncParams *)M4VENC_MALLOC(sizeof(VideoEncParams));
- if (video->encParams == NULL)
- goto CLEAN_UP;
-
- M4VENC_MEMSET(video->encParams, 0, sizeof(VideoEncParams));
-
- encParams = video->encParams;
- encParams->nLayers = encOption->numLayers;
-
- /* Check whether the input packetsize is valid (Note: put code here (before any memory allocation) in order to avoid memory leak */
- if ((Int)profile_level < (Int)(SIMPLE_SCALABLE_PROFILE_LEVEL0)) /* non-scalable profile */
- {
- profile_level_table = (Int *)profile_level_max_packet_size;
- profile_table_index = (Int)profile_level;
- if (encParams->nLayers != 1)
- {
- goto CLEAN_UP;
- }
-
- encParams->LayerMaxMbsPerSec[0] = profile_level_max_mbsPerSec[profile_table_index];
-
- }
- else /* scalable profile */
- {
- profile_level_table = (Int *)scalable_profile_level_max_packet_size;
- profile_table_index = (Int)profile_level - (Int)(SIMPLE_SCALABLE_PROFILE_LEVEL0);
- if (encParams->nLayers < 2)
- {
- goto CLEAN_UP;
- }
- for (i = 0; i < encParams->nLayers; i++)
- {
- encParams->LayerMaxMbsPerSec[i] = scalable_profile_level_max_mbsPerSec[profile_table_index];
- }
-
- }
-
- /* cannot have zero size packet with these modes */
- if (PacketSize == 0)
- {
- if (encOption->encMode == DATA_PARTITIONING_MODE)
- {
- goto CLEAN_UP;
- }
- if (encOption->encMode == COMBINE_MODE_WITH_ERR_RES)
- {
- encOption->encMode = COMBINE_MODE_NO_ERR_RES;
- }
- }
-
- if (encOption->gobHeaderInterval == 0)
- {
- if (encOption->encMode == H263_MODE_WITH_ERR_RES)
- {
- encOption->encMode = H263_MODE;
- }
-
- if (encOption->encMode == SHORT_HEADER_WITH_ERR_RES)
- {
- encOption->encMode = SHORT_HEADER;
- }
- }
-
- if (PacketSize > profile_level_table[profile_table_index])
- goto CLEAN_UP;
-
- /* Initial Defaults for all Modes */
-
- encParams->SequenceStartCode = 1;
- encParams->GOV_Enabled = 0;
- encParams->RoundingType = 0;
- encParams->IntraDCVlcThr = PV_MAX(PV_MIN(encOption->intraDCVlcTh, 7), 0);
- encParams->ACDCPrediction = ((encOption->useACPred == PV_ON) ? TRUE : FALSE);
- encParams->RC_Type = encOption->rcType;
- encParams->Refresh = encOption->numIntraMB;
- encParams->ResyncMarkerDisable = 0; /* Enable Resync Marker */
-
- for (i = 0; i < encOption->numLayers; i++)
- {
-#ifdef NO_MPEG_QUANT
- encParams->QuantType[i] = 0;
-#else
- encParams->QuantType[i] = encOption->quantType[i]; /* H263 */
-#endif
- if (encOption->pQuant[i] >= 1 && encOption->pQuant[i] <= 31)
- {
- encParams->InitQuantPvop[i] = encOption->pQuant[i];
- }
- else
- {
- goto CLEAN_UP;
- }
- if (encOption->iQuant[i] >= 1 && encOption->iQuant[i] <= 31)
- {
- encParams->InitQuantIvop[i] = encOption->iQuant[i];
- }
- else
- {
- goto CLEAN_UP;
- }
- }
-
- encParams->HalfPel_Enabled = 1;
- encParams->SearchRange = encOption->searchRange; /* 4/16/2001 */
- encParams->FullSearch_Enabled = 0;
-#ifdef NO_INTER4V
- encParams->MV8x8_Enabled = 0;
-#else
- encParams->MV8x8_Enabled = 0;// comment out for now!! encOption->mv8x8Enable;
-#endif
- encParams->H263_Enabled = 0;
- encParams->GOB_Header_Interval = 0; // need to be reset to 0
- encParams->IntraPeriod = encOption->intraPeriod; /* Intra update period update default*/
- encParams->SceneChange_Det = encOption->sceneDetect;
- encParams->FineFrameSkip_Enabled = 0;
- encParams->NoFrameSkip_Enabled = encOption->noFrameSkipped;
- encParams->NoPreSkip_Enabled = encOption->noFrameSkipped;
- encParams->GetVolHeader[0] = 0;
- encParams->GetVolHeader[1] = 0;
- encParams->ResyncPacketsize = encOption->packetSize << 3;
- encParams->LayerMaxBitRate[0] = 0;
- encParams->LayerMaxBitRate[1] = 0;
- encParams->LayerMaxFrameRate[0] = (float)0.0;
- encParams->LayerMaxFrameRate[1] = (float)0.0;
- encParams->VBV_delay = encOption->vbvDelay; /* 2sec VBV buffer size */
-
- switch (encOption->encMode)
- {
-
- case SHORT_HEADER:
- case SHORT_HEADER_WITH_ERR_RES:
-
- /* From Table 6-26 */
- encParams->nLayers = 1;
- encParams->QuantType[0] = 0; /*H263 */
- encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
- encParams->DataPartitioning = 0; /* Combined Mode */
- encParams->ReversibleVLC = 0; /* Disable RVLC */
- encParams->RoundingType = 0;
- encParams->IntraDCVlcThr = 7; /* use_intra_dc_vlc = 0 */
- encParams->MV8x8_Enabled = 0;
-
- encParams->GOB_Header_Interval = encOption->gobHeaderInterval;
- encParams->H263_Enabled = 2;
- encParams->GOV_Enabled = 0;
- encParams->TimeIncrementRes = 30000; /* timeIncrementRes for H263 */
- break;
-
- case H263_MODE:
- case H263_MODE_WITH_ERR_RES:
-
- /* From Table 6-26 */
- encParams->nLayers = 1;
- encParams->QuantType[0] = 0; /*H263 */
- encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
- encParams->DataPartitioning = 0; /* Combined Mode */
- encParams->ReversibleVLC = 0; /* Disable RVLC */
- encParams->RoundingType = 0;
- encParams->IntraDCVlcThr = 7; /* use_intra_dc_vlc = 0 */
- encParams->MV8x8_Enabled = 0;
-
- encParams->H263_Enabled = 1;
- encParams->GOV_Enabled = 0;
- encParams->TimeIncrementRes = 30000; /* timeIncrementRes for H263 */
-
- break;
-#ifndef H263_ONLY
- case DATA_PARTITIONING_MODE:
-
- encParams->DataPartitioning = 1; /* Base Layer Data Partitioning */
- encParams->ResyncMarkerDisable = 0; /* Resync Marker */
-#ifdef NO_RVLC
- encParams->ReversibleVLC = 0;
-#else
- encParams->ReversibleVLC = (encOption->rvlcEnable == PV_ON); /* RVLC when Data Partitioning */
-#endif
- encParams->ResyncPacketsize = PacketSize;
- break;
-
- case COMBINE_MODE_WITH_ERR_RES:
-
- encParams->DataPartitioning = 0; /* Combined Mode */
- encParams->ResyncMarkerDisable = 0; /* Resync Marker */
- encParams->ReversibleVLC = 0; /* No RVLC */
- encParams->ResyncPacketsize = PacketSize;
- break;
-
- case COMBINE_MODE_NO_ERR_RES:
-
- encParams->DataPartitioning = 0; /* Combined Mode */
- encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
- encParams->ReversibleVLC = 0; /* No RVLC */
- break;
-#endif
- default:
- goto CLEAN_UP;
- }
- /* Set the constraints (maximum values) according to the input profile and level */
- /* Note that profile_table_index is already figured out above */
-
- /* base layer */
- encParams->profile_table_index = profile_table_index; /* Used to limit the profile and level in SetProfile_BufferSize() */
-
- /* check timeIncRes */
- timeIncRes = encOption->timeIncRes;
- timeInc = encOption->tickPerSrc;
-
- if ((timeIncRes >= 1) && (timeIncRes <= 65536) && (timeInc < timeIncRes) && (timeInc != 0))
- {
- if (!encParams->H263_Enabled)
- {
- encParams->TimeIncrementRes = timeIncRes;
- }
- else
- {
- encParams->TimeIncrementRes = 30000;
-// video->FrameRate = 30000/(float)1001; /* fix it to 29.97 fps */
- }
- video->FrameRate = timeIncRes / ((float)timeInc);
- }
- else
- {
- goto CLEAN_UP;
- }
-
- /* check frame dimension */
- if (encParams->H263_Enabled)
- {
- switch (encOption->encWidth[0])
- {
- case 128:
- if (encOption->encHeight[0] != 96) /* source_format = 1 */
- goto CLEAN_UP;
- break;
- case 176:
- if (encOption->encHeight[0] != 144) /* source_format = 2 */
- goto CLEAN_UP;
- break;
- case 352:
- if (encOption->encHeight[0] != 288) /* source_format = 2 */
- goto CLEAN_UP;
- break;
-
- case 704:
- if (encOption->encHeight[0] != 576) /* source_format = 2 */
- goto CLEAN_UP;
- break;
- case 1408:
- if (encOption->encHeight[0] != 1152) /* source_format = 2 */
- goto CLEAN_UP;
- break;
-
- default:
- goto CLEAN_UP;
- }
- }
- for (i = 0; i < encParams->nLayers; i++)
- {
- encParams->LayerHeight[i] = encOption->encHeight[i];
- encParams->LayerWidth[i] = encOption->encWidth[i];
- }
-
- /* check frame rate */
- for (i = 0; i < encParams->nLayers; i++)
- {
- encParams->LayerFrameRate[i] = encOption->encFrameRate[i];
- }
-
- if (encParams->nLayers > 1)
- {
- if (encOption->encFrameRate[0] == encOption->encFrameRate[1] ||
- encOption->encFrameRate[0] == 0. || encOption->encFrameRate[1] == 0.) /* 7/31/03 */
- goto CLEAN_UP;
- }
- /* set max frame rate */
- for (i = 0; i < encParams->nLayers; i++)
- {
-
- /* Make sure the maximum framerate is consistent with the given profile and level */
- nTotalMB = ((encParams->LayerWidth[i] + 15) / 16) * ((encParams->LayerHeight[i] + 15) / 16);
-
- if (nTotalMB > 0)
- profile_max_framerate = (float)encParams->LayerMaxMbsPerSec[i] / (float)nTotalMB;
-
- else
- profile_max_framerate = (float)30.0;
-
- encParams->LayerMaxFrameRate[i] = PV_MIN(profile_max_framerate, encParams->LayerFrameRate[i]);
- }
-
- /* check bit rate */
- /* set max bit rate */
- for (i = 0; i < encParams->nLayers; i++)
- {
- encParams->LayerBitRate[i] = encOption->bitRate[i];
- encParams->LayerMaxBitRate[i] = encOption->bitRate[i];
- }
- if (encParams->nLayers > 1)
- {
- if (encOption->bitRate[0] == encOption->bitRate[1] ||
- encOption->bitRate[0] == 0 || encOption->bitRate[1] == 0) /* 7/31/03 */
- goto CLEAN_UP;
- }
- /* check rate control and vbv delay*/
- encParams->RC_Type = encOption->rcType;
-
- if (encOption->vbvDelay == 0.0) /* set to default */
- {
- switch (encOption->rcType)
- {
- case CBR_1:
- case CBR_2:
- encParams->VBV_delay = (float)2.0; /* default 2sec VBV buffer size */
- break;
-
- case CBR_LOWDELAY:
- encParams->VBV_delay = (float)0.5; /* default 0.5sec VBV buffer size */
- break;
-
- case VBR_1:
- case VBR_2:
- encParams->VBV_delay = (float)10.0; /* default 10sec VBV buffer size */
- break;
- default:
- break;
- }
- }
- else /* force this value */
- {
- encParams->VBV_delay = encOption->vbvDelay;
- }
-
- /* check search range */
- if (encParams->H263_Enabled && encOption->searchRange > 16)
- {
- encParams->SearchRange = 16; /* 4/16/2001 */
- }
-
- /*****************************************/
- /* checking for conflict between options */
- /*****************************************/
-
- if (video->encParams->RC_Type == CBR_1 || video->encParams->RC_Type == CBR_2 || video->encParams->RC_Type == CBR_LOWDELAY) /* if CBR */
- {
-#ifdef _PRINT_STAT
- if (video->encParams->NoFrameSkip_Enabled == PV_ON ||
- video->encParams->NoPreSkip_Enabled == PV_ON) /* don't allow frame skip*/
- printf("WARNING!!!! CBR with NoFrameSkip\n");
-#endif
- }
- else if (video->encParams->RC_Type == CONSTANT_Q) /* constant_Q */
- {
- video->encParams->NoFrameSkip_Enabled = PV_ON; /* no frame skip */
- video->encParams->NoPreSkip_Enabled = PV_ON; /* no frame skip */
-#ifdef _PRINT_STAT
- printf("Turn on NoFrameSkip\n");
-#endif
- }
-
- if (video->encParams->NoFrameSkip_Enabled == PV_ON) /* if no frame skip */
- {
- video->encParams->FineFrameSkip_Enabled = PV_OFF;
-#ifdef _PRINT_STAT
- printf("NoFrameSkip !!! may violate VBV_BUFFER constraint.\n");
- printf("Turn off FineFrameSkip\n");
-#endif
- }
-
- /******************************************/
- /******************************************/
-
- nLayers = video->encParams->nLayers; /* Number of Layers to be encoded */
-
- /* Find the maximum width*height for memory allocation of the VOPs */
- for (idx = 0; idx < nLayers; idx++)
- {
- temp_w = video->encParams->LayerWidth[idx];
- temp_h = video->encParams->LayerHeight[idx];
-
- if ((temp_w*temp_h) > max)
- {
- max = temp_w * temp_h;
- max_width = ((temp_w + 15) >> 4) << 4;
- max_height = ((temp_h + 15) >> 4) << 4;
- if (((uint64_t)max_width * max_height) > (uint64_t)INT32_MAX
- || temp_w > INT32_MAX - 15 || temp_h > INT32_MAX - 15) {
- goto CLEAN_UP;
- }
- nTotalMB = ((max_width * max_height) >> 8);
- }
-
- /* Check if the video size and framerate(MBsPerSec) are vald */
- mbsPerSec = (Int)(nTotalMB * video->encParams->LayerFrameRate[idx]);
- if (mbsPerSec > video->encParams->LayerMaxMbsPerSec[idx]) status = PV_FALSE;
- }
-
- /****************************************************/
- /* Set Profile and Video Buffer Size for each layer */
- /****************************************************/
- if (video->encParams->RC_Type == CBR_LOWDELAY) video->encParams->VBV_delay = 0.5; /* For CBR_LOWDELAY, we set 0.5sec buffer */
- status = SetProfile_BufferSize(video, video->encParams->VBV_delay, 1);
- if (status != PV_TRUE)
- goto CLEAN_UP;
-
- /****************************************/
- /* memory allocation and initialization */
- /****************************************/
-
- if (video == NULL) goto CLEAN_UP;
-
- /* cyclic reference for passing through both structures */
- video->videoEncControls = encoderControl;
-
- //video->currLayer = 0; /* Set current Layer to 0 */
- //video->currFrameNo = 0; /* Set current frame Number to 0 */
- video->nextModTime = 0;
- video->nextEncIVop = 0; /* Sets up very first frame to be I-VOP! */
- video->numVopsInGOP = 0; /* counter for Vops in Gop, 2/8/01 */
-
- //video->frameRate = video->encParams->LayerFrameRate[0]; /* Set current layer frame rate */
-
- video->QPMB = (UChar *) M4VENC_MALLOC(nTotalMB * sizeof(UChar)); /* Memory for MB quantizers */
- if (video->QPMB == NULL) goto CLEAN_UP;
-
-
- video->headerInfo.Mode = (UChar *) M4VENC_MALLOC(sizeof(UChar) * nTotalMB); /* Memory for MB Modes */
- if (video->headerInfo.Mode == NULL) goto CLEAN_UP;
- video->headerInfo.CBP = (UChar *) M4VENC_MALLOC(sizeof(UChar) * nTotalMB); /* Memory for CBP (Y and C) of each MB */
- if (video->headerInfo.CBP == NULL) goto CLEAN_UP;
-
- /* Allocating motion vector space and interpolation memory*/
-
- if ((size_t)nTotalMB > SIZE_MAX / sizeof(MOT *)) {
- goto CLEAN_UP;
- }
- video->mot = (MOT **)M4VENC_MALLOC(sizeof(MOT *) * nTotalMB);
- if (video->mot == NULL) goto CLEAN_UP;
-
- for (idx = 0; idx < nTotalMB; idx++)
- {
- video->mot[idx] = (MOT *)M4VENC_MALLOC(sizeof(MOT) * 8);
- if (video->mot[idx] == NULL)
- {
- goto CLEAN_UP;
- }
- }
-
- video->intraArray = (UChar *)M4VENC_MALLOC(sizeof(UChar) * nTotalMB);
- if (video->intraArray == NULL) goto CLEAN_UP;
-
- video->sliceNo = (UChar *) M4VENC_MALLOC(nTotalMB); /* Memory for Slice Numbers */
- if (video->sliceNo == NULL) goto CLEAN_UP;
- /* Allocating space for predDCAC[][8][16], Not that I intentionally */
- /* increase the dimension of predDCAC from [][6][15] to [][8][16] */
- /* so that compilers can generate faster code to indexing the */
- /* data inside (by using << instead of *). 04/14/2000. */
- /* 5/29/01, use decoder lib ACDC prediction memory scheme. */
- if ((size_t)nTotalMB > SIZE_MAX / sizeof(typeDCStore)) {
- goto CLEAN_UP;
- }
- video->predDC = (typeDCStore *) M4VENC_MALLOC(nTotalMB * sizeof(typeDCStore));
- if (video->predDC == NULL) goto CLEAN_UP;
-
- if (!video->encParams->H263_Enabled)
- {
- if ((size_t)((max_width >> 4) + 1) > SIZE_MAX / sizeof(typeDCACStore)) {
- goto CLEAN_UP;
- }
- video->predDCAC_col = (typeDCACStore *) M4VENC_MALLOC(((max_width >> 4) + 1) * sizeof(typeDCACStore));
- if (video->predDCAC_col == NULL) goto CLEAN_UP;
-
- /* element zero will be used for storing vertical (col) AC coefficients */
- /* the rest will be used for storing horizontal (row) AC coefficients */
- video->predDCAC_row = video->predDCAC_col + 1; /* ACDC */
-
- if ((size_t)nTotalMB > SIZE_MAX / sizeof(Int)) {
- goto CLEAN_UP;
- }
- video->acPredFlag = (Int *) M4VENC_MALLOC(nTotalMB * sizeof(Int)); /* Memory for acPredFlag */
- if (video->acPredFlag == NULL) goto CLEAN_UP;
- }
-
- video->outputMB = (MacroBlock *) M4VENC_MALLOC(sizeof(MacroBlock)); /* Allocating macroblock space */
- if (video->outputMB == NULL) goto CLEAN_UP;
- M4VENC_MEMSET(video->outputMB->block[0], 0, (sizeof(Short) << 6)*6);
-
- M4VENC_MEMSET(video->dataBlock, 0, sizeof(Short) << 7);
- /* Allocate (2*packetsize) working bitstreams */
-
- video->bitstream1 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 1*/
- if (video->bitstream1 == NULL) goto CLEAN_UP;
- video->bitstream2 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 2*/
- if (video->bitstream2 == NULL) goto CLEAN_UP;
- video->bitstream3 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 3*/
- if (video->bitstream3 == NULL) goto CLEAN_UP;
-
- /* allocate overrun buffer */
- // this buffer is used when user's buffer is too small to hold one frame.
- // It is not needed for slice-based encoding.
- if (nLayers == 1)
- {
- video->oBSize = encParams->BufferSize[0] >> 3;
- }
- else
- {
- video->oBSize = PV_MAX((encParams->BufferSize[0] >> 3), (encParams->BufferSize[1] >> 3));
- }
-
- if (video->oBSize > DEFAULT_OVERRUN_BUFFER_SIZE || encParams->RC_Type == CONSTANT_Q) // set limit
- {
- video->oBSize = DEFAULT_OVERRUN_BUFFER_SIZE;
- }
- video->overrunBuffer = (UChar*) M4VENC_MALLOC(sizeof(UChar) * video->oBSize);
- if (video->overrunBuffer == NULL) goto CLEAN_UP;
-
-
- video->currVop = (Vop *) M4VENC_MALLOC(sizeof(Vop)); /* Memory for Current VOP */
- if (video->currVop == NULL) goto CLEAN_UP;
-
- /* add padding, 09/19/05 */
- if (video->encParams->H263_Enabled) /* make it conditional 11/28/05 */
- {
- pitch = max_width;
- offset = 0;
- }
- else
- {
- pitch = max_width + 32;
- offset = (pitch << 4) + 16;
- max_height += 32;
- }
- if (((uint64_t)pitch * max_height) > (uint64_t)INT32_MAX) {
- goto CLEAN_UP;
- }
- size = pitch * max_height;
-
- if (size > INT32_MAX - (size >> 1)
- || (size_t)(size + (size >> 1)) > SIZE_MAX / sizeof(PIXEL)) {
- goto CLEAN_UP;
- }
- video->currVop->allChan = video->currVop->yChan = (PIXEL *)M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for currVop Y */
- if (video->currVop->yChan == NULL) goto CLEAN_UP;
- video->currVop->uChan = video->currVop->yChan + size;/* Memory for currVop U */
- video->currVop->vChan = video->currVop->uChan + (size >> 2);/* Memory for currVop V */
-
- /* shift for the offset */
- if (offset)
- {
- video->currVop->yChan += offset; /* offset to the origin.*/
- video->currVop->uChan += (offset >> 2) + 4;
- video->currVop->vChan += (offset >> 2) + 4;
- }
-
- video->forwardRefVop = video->currVop; /* Initialize forwardRefVop */
- video->backwardRefVop = video->currVop; /* Initialize backwardRefVop */
-
- video->prevBaseVop = (Vop *) M4VENC_MALLOC(sizeof(Vop)); /* Memory for Previous Base Vop */
- if (video->prevBaseVop == NULL) goto CLEAN_UP;
- video->prevBaseVop->allChan = video->prevBaseVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for prevBaseVop Y */
- if (video->prevBaseVop->yChan == NULL) goto CLEAN_UP;
- video->prevBaseVop->uChan = video->prevBaseVop->yChan + size; /* Memory for prevBaseVop U */
- video->prevBaseVop->vChan = video->prevBaseVop->uChan + (size >> 2); /* Memory for prevBaseVop V */
-
- if (offset)
- {
- video->prevBaseVop->yChan += offset; /* offset to the origin.*/
- video->prevBaseVop->uChan += (offset >> 2) + 4;
- video->prevBaseVop->vChan += (offset >> 2) + 4;
- }
-
-
- if (0) /* If B Frames */
- {
- video->nextBaseVop = (Vop *) M4VENC_MALLOC(sizeof(Vop)); /* Memory for Next Base Vop */
- if (video->nextBaseVop == NULL) goto CLEAN_UP;
- video->nextBaseVop->allChan = video->nextBaseVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for nextBaseVop Y */
- if (video->nextBaseVop->yChan == NULL) goto CLEAN_UP;
- video->nextBaseVop->uChan = video->nextBaseVop->yChan + size; /* Memory for nextBaseVop U */
- video->nextBaseVop->vChan = video->nextBaseVop->uChan + (size >> 2); /* Memory for nextBaseVop V */
-
- if (offset)
- {
- video->nextBaseVop->yChan += offset; /* offset to the origin.*/
- video->nextBaseVop->uChan += (offset >> 2) + 4;
- video->nextBaseVop->vChan += (offset >> 2) + 4;
- }
- }
-
- if (nLayers > 1) /* If enhancement layers */
- {
- video->prevEnhanceVop = (Vop *) M4VENC_MALLOC(sizeof(Vop)); /* Memory for Previous Enhancement Vop */
- if (video->prevEnhanceVop == NULL) goto CLEAN_UP;
- video->prevEnhanceVop->allChan = video->prevEnhanceVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for Previous Ehancement Y */
- if (video->prevEnhanceVop->yChan == NULL) goto CLEAN_UP;
- video->prevEnhanceVop->uChan = video->prevEnhanceVop->yChan + size; /* Memory for Previous Enhancement U */
- video->prevEnhanceVop->vChan = video->prevEnhanceVop->uChan + (size >> 2); /* Memory for Previous Enhancement V */
-
- if (offset)
- {
- video->prevEnhanceVop->yChan += offset; /* offset to the origin.*/
- video->prevEnhanceVop->uChan += (offset >> 2) + 4;
- video->prevEnhanceVop->vChan += (offset >> 2) + 4;
- }
- }
-
- video->numberOfLayers = nLayers; /* Number of Layers */
- video->sumMAD = 0;
-
-
- /* 04/09/01, for Vops in the use multipass processing */
- for (idx = 0; idx < nLayers; idx++)
- {
- video->pMP[idx] = (MultiPass *)M4VENC_MALLOC(sizeof(MultiPass));
- if (video->pMP[idx] == NULL) goto CLEAN_UP;
- M4VENC_MEMSET(video->pMP[idx], 0, sizeof(MultiPass));
-
- video->pMP[idx]->encoded_frames = -1; /* forget about the very first I frame */
-
-
- /* RDInfo **pRDSamples */
- video->pMP[idx]->pRDSamples = (RDInfo **)M4VENC_MALLOC(30 * sizeof(RDInfo *));
- if (video->pMP[idx]->pRDSamples == NULL) goto CLEAN_UP;
- for (i = 0; i < 30; i++)
- {
- video->pMP[idx]->pRDSamples[i] = (RDInfo *)M4VENC_MALLOC(32 * sizeof(RDInfo));
- if (video->pMP[idx]->pRDSamples[i] == NULL) goto CLEAN_UP;
- for (j = 0; j < 32; j++) M4VENC_MEMSET(&(video->pMP[idx]->pRDSamples[i][j]), 0, sizeof(RDInfo));
- }
- video->pMP[idx]->frameRange = (Int)(video->encParams->LayerFrameRate[idx] * 1.0); /* 1.0s time frame*/
- video->pMP[idx]->frameRange = PV_MAX(video->pMP[idx]->frameRange, 5);
- video->pMP[idx]->frameRange = PV_MIN(video->pMP[idx]->frameRange, 30);
-
- video->pMP[idx]->framePos = -1;
-
- }
- /* /// End /////////////////////////////////////// */
-
-
- if ((size_t)nLayers > SIZE_MAX / sizeof(Vol *)) {
- goto CLEAN_UP;
- }
- video->vol = (Vol **)M4VENC_MALLOC(nLayers * sizeof(Vol *)); /* Memory for VOL pointers */
-
- /* Memory allocation and Initialization of Vols and writing of headers */
- if (video->vol == NULL) goto CLEAN_UP;
-
- for (idx = 0; idx < nLayers; idx++)
- {
- video->volInitialize[idx] = 1;
- video->refTick[idx] = 0;
- video->relLayerCodeTime[idx] = 1000;
- video->vol[idx] = (Vol *)M4VENC_MALLOC(sizeof(Vol));
- if (video->vol[idx] == NULL) goto CLEAN_UP;
-
- pVol = video->vol[idx];
- pEncParams = video->encParams;
-
- M4VENC_MEMSET(video->vol[idx], 0, sizeof(Vol));
- /* Initialize some VOL parameters */
- pVol->volID = idx; /* Set VOL ID */
- pVol->shortVideoHeader = pEncParams->H263_Enabled; /*Short Header */
- pVol->GOVStart = pEncParams->GOV_Enabled; /* GOV Header */
- pVol->timeIncrementResolution = video->encParams->TimeIncrementRes;
- pVol->nbitsTimeIncRes = 1;
- while (pVol->timeIncrementResolution > (1 << pVol->nbitsTimeIncRes))
- {
- pVol->nbitsTimeIncRes++;
- }
-
- /* timing stuff */
- pVol->timeIncrement = 0;
- pVol->moduloTimeBase = 0;
- pVol->fixedVopRate = 0; /* No fixed VOP rate */
- pVol->stream = (BitstreamEncVideo *)M4VENC_MALLOC(sizeof(BitstreamEncVideo)); /* allocate BitstreamEncVideo Instance */
- if (pVol->stream == NULL) goto CLEAN_UP;
-
- pVol->width = pEncParams->LayerWidth[idx]; /* Layer Width */
- pVol->height = pEncParams->LayerHeight[idx]; /* Layer Height */
- // pVol->intra_acdcPredDisable = pEncParams->ACDCPrediction; /* ACDC Prediction */
- pVol->ResyncMarkerDisable = pEncParams->ResyncMarkerDisable; /* Resync Marker Mode */
- pVol->dataPartitioning = pEncParams->DataPartitioning; /* Data Partitioning */
- pVol->useReverseVLC = pEncParams->ReversibleVLC; /* RVLC */
- if (idx > 0) /* Scalability layers */
- {
- pVol->ResyncMarkerDisable = 1;
- pVol->dataPartitioning = 0;
- pVol->useReverseVLC = 0; /* No RVLC */
- }
- pVol->quantType = pEncParams->QuantType[idx]; /* Quantizer Type */
-
- /* no need to init Quant Matrices */
-
- pVol->scalability = 0; /* Vol Scalability */
- if (idx > 0)
- pVol->scalability = 1; /* Multiple layers => Scalability */
-
- /* Initialize Vol to Temporal scalability. It can change during encoding */
- pVol->scalType = 1;
- /* Initialize reference Vol ID to the base layer = 0 */
- pVol->refVolID = 0;
- /* Initialize layer resolution to same as the reference */
- pVol->refSampDir = 0;
- pVol->horSamp_m = 1;
- pVol->horSamp_n = 1;
- pVol->verSamp_m = 1;
- pVol->verSamp_n = 1;
- pVol->enhancementType = 0; /* We always enhance the entire region */
-
- pVol->nMBPerRow = (pVol->width + 15) / 16;
- pVol->nMBPerCol = (pVol->height + 15) / 16;
- pVol->nTotalMB = pVol->nMBPerRow * pVol->nMBPerCol;
-
- if (pVol->nTotalMB >= 1)
- pVol->nBitsForMBID = 1;
- if (pVol->nTotalMB >= 3)
- pVol->nBitsForMBID = 2;
- if (pVol->nTotalMB >= 5)
- pVol->nBitsForMBID = 3;
- if (pVol->nTotalMB >= 9)
- pVol->nBitsForMBID = 4;
- if (pVol->nTotalMB >= 17)
- pVol->nBitsForMBID = 5;
- if (pVol->nTotalMB >= 33)
- pVol->nBitsForMBID = 6;
- if (pVol->nTotalMB >= 65)
- pVol->nBitsForMBID = 7;
- if (pVol->nTotalMB >= 129)
- pVol->nBitsForMBID = 8;
- if (pVol->nTotalMB >= 257)
- pVol->nBitsForMBID = 9;
- if (pVol->nTotalMB >= 513)
- pVol->nBitsForMBID = 10;
- if (pVol->nTotalMB >= 1025)
- pVol->nBitsForMBID = 11;
- if (pVol->nTotalMB >= 2049)
- pVol->nBitsForMBID = 12;
- if (pVol->nTotalMB >= 4097)
- pVol->nBitsForMBID = 13;
- if (pVol->nTotalMB >= 8193)
- pVol->nBitsForMBID = 14;
- if (pVol->nTotalMB >= 16385)
- pVol->nBitsForMBID = 15;
- if (pVol->nTotalMB >= 32769)
- pVol->nBitsForMBID = 16;
- if (pVol->nTotalMB >= 65537)
- pVol->nBitsForMBID = 17;
- if (pVol->nTotalMB >= 131073)
- pVol->nBitsForMBID = 18;
-
- if (pVol->shortVideoHeader)
- {
- switch (pVol->width)
- {
- case 128:
- if (pVol->height == 96) /* source_format = 1 */
- {
- pVol->nGOBinVop = 6;
- pVol->nMBinGOB = 8;
- }
- else
- status = PV_FALSE;
- break;
-
- case 176:
- if (pVol->height == 144) /* source_format = 2 */
- {
- pVol->nGOBinVop = 9;
- pVol->nMBinGOB = 11;
- }
- else
- status = PV_FALSE;
- break;
- case 352:
- if (pVol->height == 288) /* source_format = 2 */
- {
- pVol->nGOBinVop = 18;
- pVol->nMBinGOB = 22;
- }
- else
- status = PV_FALSE;
- break;
-
- case 704:
- if (pVol->height == 576) /* source_format = 2 */
- {
- pVol->nGOBinVop = 18;
- pVol->nMBinGOB = 88;
- }
- else
- status = PV_FALSE;
- break;
- case 1408:
- if (pVol->height == 1152) /* source_format = 2 */
- {
- pVol->nGOBinVop = 18;
- pVol->nMBinGOB = 352;
- }
- else
- status = PV_FALSE;
- break;
-
- default:
- status = PV_FALSE;
- break;
- }
- }
- }
-
- /***************************************************/
- /* allocate and initialize rate control parameters */
- /***************************************************/
-
- /* BEGIN INITIALIZATION OF ANNEX L RATE CONTROL */
- if (video->encParams->RC_Type != CONSTANT_Q)
- {
- for (idx = 0; idx < nLayers; idx++) /* 12/25/00 */
- {
- video->rc[idx] =
- (rateControl *)M4VENC_MALLOC(sizeof(rateControl));
-
- if (video->rc[idx] == NULL) goto CLEAN_UP;
-
- M4VENC_MEMSET(video->rc[idx], 0, sizeof(rateControl));
- }
- if (PV_SUCCESS != RC_Initialize(video))
- {
- goto CLEAN_UP;
- }
- /* initialization for 2-pass rate control */
- }
- /* END INITIALIZATION OF ANNEX L RATE CONTROL */
-
- /********** assign platform dependent functions ***********************/
- /* 1/23/01 */
- /* This must be done at run-time not a compile time */
- video->functionPointer = (FuncPtr*) M4VENC_MALLOC(sizeof(FuncPtr));
- if (video->functionPointer == NULL) goto CLEAN_UP;
-
- video->functionPointer->ComputeMBSum = &ComputeMBSum_C;
- video->functionPointer->SAD_MB_HalfPel[0] = NULL;
- video->functionPointer->SAD_MB_HalfPel[1] = &SAD_MB_HalfPel_Cxh;
- video->functionPointer->SAD_MB_HalfPel[2] = &SAD_MB_HalfPel_Cyh;
- video->functionPointer->SAD_MB_HalfPel[3] = &SAD_MB_HalfPel_Cxhyh;
-
-#ifndef NO_INTER4V
- video->functionPointer->SAD_Blk_HalfPel = &SAD_Blk_HalfPel_C;
- video->functionPointer->SAD_Block = &SAD_Block_C;
-#endif
- video->functionPointer->SAD_Macroblock = &SAD_Macroblock_C;
- video->functionPointer->ChooseMode = &ChooseMode_C;
- video->functionPointer->GetHalfPelMBRegion = &GetHalfPelMBRegion_C;
-// video->functionPointer->SAD_MB_PADDING = &SAD_MB_PADDING; /* 4/21/01 */
-
-
- encoderControl->videoEncoderInit = 1; /* init done! */
-
- return PV_TRUE;
-
-CLEAN_UP:
- PVCleanUpVideoEncoder(encoderControl);
-
- return PV_FALSE;
-}
-
-
-/* ======================================================================== */
-/* Function : PVCleanUpVideoEncoder() */
-/* Date : 08/22/2000 */
-/* Purpose : Deallocates allocated memory from InitVideoEncoder() */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : 5/21/01, free only yChan in Vop */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVCleanUpVideoEncoder(VideoEncControls *encoderControl)
-{
- Int idx, i;
- VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
- int nTotalMB;
- int max_width, offset;
-
-#ifdef PRINT_RC_INFO
- if (facct != NULL)
- {
- fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
- fprintf(facct, "TOTAL NUM BITS GENERATED %d\n", tiTotalNumBitsGenerated);
- fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
- fprintf(facct, "TOTAL NUMBER OF FRAMES CODED %d\n",
- video->encParams->rc[0]->totalFrameNumber);
- fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
- fprintf(facct, "Average BitRate %d\n",
- (tiTotalNumBitsGenerated / (90 / 30)));
- fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
- fprintf(facct, "TOTAL NUMBER OF STUFF BITS %d\n", (iStuffBits + 10740));
- fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
- fprintf(facct, "TOTAL NUMBER OF BITS TO NETWORK %d\n", (35800*90 / 30));;
- fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
- fprintf(facct, "SUM OF STUFF BITS AND GENERATED BITS %d\n",
- (tiTotalNumBitsGenerated + iStuffBits + 10740));
- fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
- fprintf(facct, "UNACCOUNTED DIFFERENCE %d\n",
- ((35800*90 / 30) - (tiTotalNumBitsGenerated + iStuffBits + 10740)));
- fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
- fclose(facct);
- }
-#endif
-
-#ifdef PRINT_EC
- fclose(fec);
-#endif
-
- if (video != NULL)
- {
-
- if (video->QPMB) M4VENC_FREE(video->QPMB);
- if (video->headerInfo.Mode)M4VENC_FREE(video->headerInfo.Mode);
- if (video->headerInfo.CBP)M4VENC_FREE(video->headerInfo.CBP);
-
-
- if (video->mot)
- {
- nTotalMB = video->vol[0]->nTotalMB;
- for (idx = 1; idx < video->currLayer; idx++)
- if (video->vol[idx]->nTotalMB > nTotalMB)
- nTotalMB = video->vol[idx]->nTotalMB;
- for (idx = 0; idx < nTotalMB; idx++)
- {
- if (video->mot[idx])
- M4VENC_FREE(video->mot[idx]);
- }
- M4VENC_FREE(video->mot);
- }
-
- if (video->intraArray) M4VENC_FREE(video->intraArray);
-
- if (video->sliceNo)M4VENC_FREE(video->sliceNo);
- if (video->acPredFlag)M4VENC_FREE(video->acPredFlag);
-// if(video->predDCAC)M4VENC_FREE(video->predDCAC);
- if (video->predDC) M4VENC_FREE(video->predDC);
- video->predDCAC_row = NULL;
- if (video->predDCAC_col) M4VENC_FREE(video->predDCAC_col);
- if (video->outputMB)M4VENC_FREE(video->outputMB);
-
- if (video->bitstream1)BitstreamCloseEnc(video->bitstream1);
- if (video->bitstream2)BitstreamCloseEnc(video->bitstream2);
- if (video->bitstream3)BitstreamCloseEnc(video->bitstream3);
-
- if (video->overrunBuffer) M4VENC_FREE(video->overrunBuffer);
-
- max_width = video->encParams->LayerWidth[0];
- max_width = (((max_width + 15) >> 4) << 4); /* 09/19/05 */
- if (video->encParams->H263_Enabled)
- {
- offset = 0;
- }
- else
- {
- offset = ((max_width + 32) << 4) + 16;
- }
-
- if (video->currVop)
- {
- if (video->currVop->allChan)
- {
- M4VENC_FREE(video->currVop->allChan);
- }
- M4VENC_FREE(video->currVop);
- }
-
- if (video->nextBaseVop)
- {
- if (video->nextBaseVop->allChan)
- {
- M4VENC_FREE(video->nextBaseVop->allChan);
- }
- M4VENC_FREE(video->nextBaseVop);
- }
-
- if (video->prevBaseVop)
- {
- if (video->prevBaseVop->allChan)
- {
- M4VENC_FREE(video->prevBaseVop->allChan);
- }
- M4VENC_FREE(video->prevBaseVop);
- }
- if (video->prevEnhanceVop)
- {
- if (video->prevEnhanceVop->allChan)
- {
- M4VENC_FREE(video->prevEnhanceVop->allChan);
- }
- M4VENC_FREE(video->prevEnhanceVop);
- }
-
- /* 04/09/01, for Vops in the use multipass processing */
- for (idx = 0; idx < video->encParams->nLayers; idx++)
- {
- if (video->pMP[idx])
- {
- if (video->pMP[idx]->pRDSamples)
- {
- for (i = 0; i < 30; i++)
- {
- if (video->pMP[idx]->pRDSamples[i])
- M4VENC_FREE(video->pMP[idx]->pRDSamples[i]);
- }
- M4VENC_FREE(video->pMP[idx]->pRDSamples);
- }
-
- M4VENC_MEMSET(video->pMP[idx], 0, sizeof(MultiPass));
- M4VENC_FREE(video->pMP[idx]);
- }
- }
- /* // End /////////////////////////////////////// */
-
- if (video->vol)
- {
- for (idx = 0; idx < video->encParams->nLayers; idx++)
- {
- if (video->vol[idx])
- {
- if (video->vol[idx]->stream)
- M4VENC_FREE(video->vol[idx]->stream);
- M4VENC_FREE(video->vol[idx]);
- }
- }
- M4VENC_FREE(video->vol);
- }
-
- /***************************************************/
- /* stop rate control parameters */
- /***************************************************/
-
- /* ANNEX L RATE CONTROL */
- if (video->encParams->RC_Type != CONSTANT_Q)
- {
- RC_Cleanup(video->rc, video->encParams->nLayers);
-
- for (idx = 0; idx < video->encParams->nLayers; idx++)
- {
- if (video->rc[idx])
- M4VENC_FREE(video->rc[idx]);
- }
- }
-
- if (video->functionPointer) M4VENC_FREE(video->functionPointer);
-
- /* If application has called PVCleanUpVideoEncoder then we deallocate */
- /* If PVInitVideoEncoder class it, then we DO NOT deallocate */
- if (video->encParams)
- {
- M4VENC_FREE(video->encParams);
- }
-
- M4VENC_FREE(video);
- encoderControl->videoEncoderData = NULL; /* video */
- }
-
- encoderControl->videoEncoderInit = 0;
-
- return PV_TRUE;
-}
-
-/* ======================================================================== */
-/* Function : PVGetVolHeader() */
-/* Date : 7/17/2001, */
-/* Purpose : */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVGetVolHeader(VideoEncControls *encCtrl, UChar *volHeader, Int *size, Int layer)
-{
- VideoEncData *encData;
- PV_STATUS EncodeVOS_Start(VideoEncControls *encCtrl);
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
-
-
- encData->currLayer = layer; /* Set Layer */
- /*pv_status = */
- EncodeVOS_Start(encCtrl); /* Encode VOL Header */
-
- encData->encParams->GetVolHeader[layer] = 1; /* Set usage flag: Needed to support old method*/
-
- /* Copy bitstream to buffer and set the size */
-
- if (*size > encData->bitstream1->byteCount)
- {
- *size = encData->bitstream1->byteCount;
- M4VENC_MEMCPY(volHeader, encData->bitstream1->bitstreamBuffer, *size);
- }
- else
- return PV_FALSE;
-
- /* Reset bitstream1 buffer parameters */
- BitstreamEncReset(encData->bitstream1);
-
- return PV_TRUE;
-}
-
-/* ======================================================================== */
-/* Function : PVGetOverrunBuffer() */
-/* Purpose : Get the overrun buffer ` */
-/* In/out : */
-/* Return : Pointer to overrun buffer. */
-/* Modified : */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF UChar* PVGetOverrunBuffer(VideoEncControls *encCtrl)
-{
- VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
- Int currLayer = video->currLayer;
- Vol *currVol = video->vol[currLayer];
-
- if (currVol->stream->bitstreamBuffer != video->overrunBuffer) // not used
- {
- return NULL;
- }
-
- return video->overrunBuffer;
-}
-
-
-
-
-/* ======================================================================== */
-/* Function : EncodeVideoFrame() */
-/* Date : 08/22/2000 */
-/* Purpose : Encode video frame and return bitstream */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* 02.14.2001 */
-/* Finishing new timestamp 32-bit input */
-/* Applications need to take care of wrap-around */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVEncodeVideoFrame(VideoEncControls *encCtrl, VideoEncFrameIO *vid_in, VideoEncFrameIO *vid_out,
- ULong *nextModTime, UChar *bstream, Int *size, Int *nLayer)
-{
- Bool status = PV_TRUE;
- PV_STATUS pv_status;
- VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
- VideoEncParams *encParams = video->encParams;
- Vol *currVol;
- Vop *tempForwRefVop = NULL;
- Int tempRefSelCode = 0;
- PV_STATUS EncodeVOS_Start(VideoEncControls *encCtrl);
- Int width_16, height_16;
- Int width, height;
- Vop *temp;
- Int encodeVop = 0;
- void PaddingEdge(Vop *padVop);
- Int currLayer = -1;
- //Int nLayers = encParams->nLayers;
-
- ULong modTime = vid_in->timestamp;
-
-#ifdef RANDOM_REFSELCODE /* add random selection of reference Vop */
- Int random_val[30] = {0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0};
- static Int rand_idx = 0;
-#endif
-
- /*******************************************************/
- /* Determine Next Vop to encode, if any, and nLayer */
- /*******************************************************/
- //i = nLayers-1;
-
- if (video->volInitialize[0]) /* first vol to code */
- {
- video->nextModTime = video->modTimeRef = ((modTime) - ((modTime) % 1000));
- }
-
- encodeVop = DetermineCodingLayer(video, nLayer, modTime);
- currLayer = *nLayer;
- if ((currLayer < 0) || (currLayer > encParams->nLayers - 1))
- return PV_FALSE;
-
- /******************************************/
- /* If post-skipping still effective --- return */
- /******************************************/
-
- if (!encodeVop) /* skip enh layer, no base layer coded --- return */
- {
-#ifdef _PRINT_STAT
- printf("No frame coded. Continue to next frame.");
-#endif
- /* expected next code time, convert back to millisec */
- *nextModTime = video->nextModTime;
-
-#ifdef ALLOW_VOP_NOT_CODED
- if (video->vol[0]->shortVideoHeader) /* Short Video Header = 1 */
- {
- *size = 0;
- *nLayer = -1;
- }
- else
- {
- *nLayer = 0;
- EncodeVopNotCoded(video, bstream, size, modTime);
- *size = video->vol[0]->stream->byteCount;
- }
-#else
- *size = 0;
- *nLayer = -1;
-#endif
- return status;
- }
-
-
-//ENCODE_VOP_AGAIN: /* 12/30/00 */
-
- /**************************************************************/
- /* Initialize Vol stream structure with application bitstream */
- /**************************************************************/
-
- currVol = video->vol[currLayer];
- currVol->stream->bitstreamBuffer = bstream;
- currVol->stream->bufferSize = *size;
- BitstreamEncReset(currVol->stream);
- BitstreamSetOverrunBuffer(currVol->stream, video->overrunBuffer, video->oBSize, video);
-
- /***********************************************************/
- /* Encode VOS and VOL Headers on first call for each layer */
- /***********************************************************/
-
- if (video->volInitialize[currLayer])
- {
- video->currVop->timeInc = 0;
- video->prevBaseVop->timeInc = 0;
- if (!video->encParams->GetVolHeader[currLayer])
- pv_status = EncodeVOS_Start(encCtrl);
- }
-
- /***************************************************/
- /* Copy Input Video Frame to Internal Video Buffer */
- /***************************************************/
- /* Determine Width and Height of Vop Layer */
-
- width = encParams->LayerWidth[currLayer]; /* Get input width */
- height = encParams->LayerHeight[currLayer]; /* Get input height */
- /* Round Up to nearest multiple of 16 : MPEG-4 Standard */
-
- width_16 = ((width + 15) / 16) * 16; /* Round up to nearest multiple of 16 */
- height_16 = ((height + 15) / 16) * 16; /* Round up to nearest multiple of 16 */
-
- video->input = vid_in; /* point to the frame input */
-
- /*// End ////////////////////////////// */
-
-
- /**************************************/
- /* Determine VOP Type */
- /* 6/2/2001, separate function */
- /**************************************/
- DetermineVopType(video, currLayer);
-
- /****************************/
- /* Initialize VOP */
- /****************************/
- video->currVop->volID = currVol->volID;
- video->currVop->width = width_16;
- video->currVop->height = height_16;
- if (video->encParams->H263_Enabled) /* 11/28/05 */
- {
- video->currVop->pitch = width_16;
- }
- else
- {
- video->currVop->pitch = width_16 + 32;
- }
- video->currVop->timeInc = currVol->timeIncrement;
- video->currVop->vopCoded = 1;
- video->currVop->roundingType = 0;
- video->currVop->intraDCVlcThr = encParams->IntraDCVlcThr;
-
- if (currLayer == 0
-#ifdef RANDOM_REFSELCODE /* add random selection of reference Vop */
- || random_val[rand_idx] || video->volInitialize[currLayer]
-#endif
- )
- {
- tempForwRefVop = video->forwardRefVop; /* keep initial state */
- if (tempForwRefVop != NULL) tempRefSelCode = tempForwRefVop->refSelectCode;
-
- video->forwardRefVop = video->prevBaseVop;
- video->forwardRefVop->refSelectCode = 1;
- }
-#ifdef RANDOM_REFSELCODE
- else
- {
- tempForwRefVop = video->forwardRefVop; /* keep initial state */
- if (tempForwRefVop != NULL) tempRefSelCode = tempForwRefVop->refSelectCode;
-
- video->forwardRefVop = video->prevEnhanceVop;
- video->forwardRefVop->refSelectCode = 0;
- }
- rand_idx++;
- rand_idx %= 30;
-#endif
-
- video->currVop->refSelectCode = video->forwardRefVop->refSelectCode;
- video->currVop->gobNumber = 0;
- video->currVop->gobFrameID = video->currVop->predictionType;
- video->currVop->temporalRef = (modTime * 30 / 1001) % 256;
-
- video->currVop->temporalInterval = 0;
-
- if (video->currVop->predictionType == I_VOP)
- video->currVop->quantizer = encParams->InitQuantIvop[currLayer];
- else
- video->currVop->quantizer = encParams->InitQuantPvop[currLayer];
-
-
- /****************/
- /* Encode Vop */
- /****************/
- video->slice_coding = 0;
-
- pv_status = EncodeVop(video);
-#ifdef _PRINT_STAT
- if (video->currVop->predictionType == I_VOP)
- printf(" I-VOP ");
- else
- printf(" P-VOP (ref.%d)", video->forwardRefVop->refSelectCode);
-#endif
-
- /************************************/
- /* Update Skip Next Frame */
- /************************************/
- *nLayer = UpdateSkipNextFrame(video, nextModTime, size, pv_status);
- if (*nLayer == -1) /* skip current frame */
- {
- /* make sure that pointers are restored to the previous state */
- if (currLayer == 0)
- {
- video->forwardRefVop = tempForwRefVop; /* For P-Vop base only */
- video->forwardRefVop->refSelectCode = tempRefSelCode;
- }
-
- return status;
- }
-
- /* If I-VOP was encoded, reset IntraPeriod */
- if ((currLayer == 0) && (encParams->IntraPeriod > 0) && (video->currVop->predictionType == I_VOP))
- video->nextEncIVop = encParams->IntraPeriod;
-
- /* Set HintTrack Information */
- if (currLayer != -1)
- {
- if (currVol->prevModuloTimeBase)
- video->hintTrackInfo.MTB = 1;
- else
- video->hintTrackInfo.MTB = 0;
- video->hintTrackInfo.LayerID = (UChar)currVol->volID;
- video->hintTrackInfo.CodeType = (UChar)video->currVop->predictionType;
- video->hintTrackInfo.RefSelCode = (UChar)video->currVop->refSelectCode;
- }
-
- /************************************************/
- /* Determine nLayer and timeInc for next encode */
- /* 12/27/00 always go by the highest layer*/
- /************************************************/
-
- /**********************************************************/
- /* Copy Reconstructed Buffer to Output Video Frame Buffer */
- /**********************************************************/
- vid_out->yChan = video->currVop->yChan;
- vid_out->uChan = video->currVop->uChan;
- vid_out->vChan = video->currVop->vChan;
- if (video->encParams->H263_Enabled)
- {
- vid_out->height = video->currVop->height; /* padded height */
- vid_out->pitch = video->currVop->width; /* padded width */
- }
- else
- {
- vid_out->height = video->currVop->height + 32; /* padded height */
- vid_out->pitch = video->currVop->width + 32; /* padded width */
- }
- //video_out->timestamp = video->modTime;
- vid_out->timestamp = (ULong)(((video->prevFrameNum[currLayer] * 1000) / encParams->LayerFrameRate[currLayer]) + video->modTimeRef + 0.5);
-
- /*// End /////////////////////// */
-
- /***********************************/
- /* Update Ouput bstream byte count */
- /***********************************/
-
- *size = currVol->stream->byteCount;
-
- /****************************************/
- /* Swap Vop Pointers for Base Layer */
- /****************************************/
- if (currLayer == 0)
- {
- temp = video->prevBaseVop;
- video->prevBaseVop = video->currVop;
- video->prevBaseVop->padded = 0; /* not padded */
- video->currVop = temp;
- video->forwardRefVop = video->prevBaseVop; /* For P-Vop base only */
- video->forwardRefVop->refSelectCode = 1;
- }
- else
- {
- temp = video->prevEnhanceVop;
- video->prevEnhanceVop = video->currVop;
- video->prevEnhanceVop->padded = 0; /* not padded */
- video->currVop = temp;
- video->forwardRefVop = video->prevEnhanceVop;
- video->forwardRefVop->refSelectCode = 0;
- }
-
- /****************************************/
- /* Modify the intialize flag at the end.*/
- /****************************************/
- if (video->volInitialize[currLayer])
- video->volInitialize[currLayer] = 0;
-
- return status;
-}
-
-#ifndef NO_SLICE_ENCODE
-/* ======================================================================== */
-/* Function : PVEncodeFrameSet() */
-/* Date : 04/18/2000 */
-/* Purpose : Enter a video frame and perform front-end time check plus ME */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVEncodeFrameSet(VideoEncControls *encCtrl, VideoEncFrameIO *vid_in, ULong *nextModTime, Int *nLayer)
-{
- Bool status = PV_TRUE;
- VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
- VideoEncParams *encParams = video->encParams;
- Vol *currVol;
- PV_STATUS EncodeVOS_Start(VideoEncControls *encCtrl);
- Int width_16, height_16;
- Int width, height;
- Int encodeVop = 0;
- void PaddingEdge(Vop *padVop);
- Int currLayer = -1;
- //Int nLayers = encParams->nLayers;
-
- ULong modTime = vid_in->timestamp;
-
-#ifdef RANDOM_REFSELCODE /* add random selection of reference Vop */
- Int random_val[30] = {0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0};
- static Int rand_idx = 0;
-#endif
- /*******************************************************/
- /* Determine Next Vop to encode, if any, and nLayer */
- /*******************************************************/
-
- video->modTime = modTime;
-
- //i = nLayers-1;
-
- if (video->volInitialize[0]) /* first vol to code */
- {
- video->nextModTime = video->modTimeRef = ((modTime) - ((modTime) % 1000));
- }
-
-
- encodeVop = DetermineCodingLayer(video, nLayer, modTime);
-
- currLayer = *nLayer;
-
- /******************************************/
- /* If post-skipping still effective --- return */
- /******************************************/
-
- if (!encodeVop) /* skip enh layer, no base layer coded --- return */
- {
-#ifdef _PRINT_STAT
- printf("No frame coded. Continue to next frame.");
-#endif
- *nLayer = -1;
-
- /* expected next code time, convert back to millisec */
- *nextModTime = video->nextModTime;;
- return status;
- }
-
- /**************************************************************/
- /* Initialize Vol stream structure with application bitstream */
- /**************************************************************/
-
- currVol = video->vol[currLayer];
- currVol->stream->bufferSize = 0;
- BitstreamEncReset(currVol->stream);
-
- /***********************************************************/
- /* Encode VOS and VOL Headers on first call for each layer */
- /***********************************************************/
-
- if (video->volInitialize[currLayer])
- {
- video->currVop->timeInc = 0;
- video->prevBaseVop->timeInc = 0;
- }
-
- /***************************************************/
- /* Copy Input Video Frame to Internal Video Buffer */
- /***************************************************/
- /* Determine Width and Height of Vop Layer */
-
- width = encParams->LayerWidth[currLayer]; /* Get input width */
- height = encParams->LayerHeight[currLayer]; /* Get input height */
- /* Round Up to nearest multiple of 16 : MPEG-4 Standard */
-
- width_16 = ((width + 15) / 16) * 16; /* Round up to nearest multiple of 16 */
- height_16 = ((height + 15) / 16) * 16; /* Round up to nearest multiple of 16 */
-
- video->input = vid_in; /* point to the frame input */
-
- /*// End ////////////////////////////// */
-
-
- /**************************************/
- /* Determine VOP Type */
- /* 6/2/2001, separate function */
- /**************************************/
- DetermineVopType(video, currLayer);
-
- /****************************/
- /* Initialize VOP */
- /****************************/
- video->currVop->volID = currVol->volID;
- video->currVop->width = width_16;
- video->currVop->height = height_16;
- if (video->encParams->H263_Enabled) /* 11/28/05 */
- {
- video->currVop->pitch = width_16;
- }
- else
- {
- video->currVop->pitch = width_16 + 32;
- }
- video->currVop->timeInc = currVol->timeIncrement;
- video->currVop->vopCoded = 1;
- video->currVop->roundingType = 0;
- video->currVop->intraDCVlcThr = encParams->IntraDCVlcThr;
-
- if (currLayer == 0
-#ifdef RANDOM_REFSELCODE /* add random selection of reference Vop */
- || random_val[rand_idx] || video->volInitialize[currLayer]
-#endif
- )
- {
- video->tempForwRefVop = video->forwardRefVop; /* keep initial state */
- if (video->tempForwRefVop != NULL) video->tempRefSelCode = video->tempForwRefVop->refSelectCode;
-
- video->forwardRefVop = video->prevBaseVop;
- video->forwardRefVop->refSelectCode = 1;
- }
-#ifdef RANDOM_REFSELCODE
- else
- {
- video->tempForwRefVop = video->forwardRefVop; /* keep initial state */
- if (video->tempForwRefVop != NULL) video->tempRefSelCode = video->tempForwRefVop->refSelectCode;
-
- video->forwardRefVop = video->prevEnhanceVop;
- video->forwardRefVop->refSelectCode = 0;
- }
- rand_idx++;
- rand_idx %= 30;
-#endif
-
- video->currVop->refSelectCode = video->forwardRefVop->refSelectCode;
- video->currVop->gobNumber = 0;
- video->currVop->gobFrameID = video->currVop->predictionType;
- video->currVop->temporalRef = ((modTime) * 30 / 1001) % 256;
-
- video->currVop->temporalInterval = 0;
-
- if (video->currVop->predictionType == I_VOP)
- video->currVop->quantizer = encParams->InitQuantIvop[currLayer];
- else
- video->currVop->quantizer = encParams->InitQuantPvop[currLayer];
-
- /****************/
- /* Encode Vop */
- /****************/
- video->slice_coding = 1;
-
- /*pv_status =*/
- EncodeVop(video);
-
-#ifdef _PRINT_STAT
- if (video->currVop->predictionType == I_VOP)
- printf(" I-VOP ");
- else
- printf(" P-VOP (ref.%d)", video->forwardRefVop->refSelectCode);
-#endif
-
- /* Set HintTrack Information */
- if (currVol->prevModuloTimeBase)
- video->hintTrackInfo.MTB = 1;
- else
- video->hintTrackInfo.MTB = 0;
-
- video->hintTrackInfo.LayerID = (UChar)currVol->volID;
- video->hintTrackInfo.CodeType = (UChar)video->currVop->predictionType;
- video->hintTrackInfo.RefSelCode = (UChar)video->currVop->refSelectCode;
-
- return status;
-}
-#endif /* NO_SLICE_ENCODE */
-
-#ifndef NO_SLICE_ENCODE
-/* ======================================================================== */
-/* Function : PVEncodePacket() */
-/* Date : 04/18/2002 */
-/* Purpose : Encode one packet and return bitstream */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVEncodeSlice(VideoEncControls *encCtrl, UChar *bstream, Int *size,
- Int *endofFrame, VideoEncFrameIO *vid_out, ULong *nextModTime)
-{
- PV_STATUS pv_status;
- VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
- VideoEncParams *encParams = video->encParams;
- Vol *currVol;
- PV_STATUS EncodeVOS_Start(VideoEncControls *encCtrl);
- Vop *temp;
- void PaddingEdge(Vop *padVop);
- Int currLayer = video->currLayer;
- Int pre_skip;
- Int pre_size;
- /**************************************************************/
- /* Initialize Vol stream structure with application bitstream */
- /**************************************************************/
-
- currVol = video->vol[currLayer];
- currVol->stream->bitstreamBuffer = bstream;
- pre_size = currVol->stream->byteCount;
- currVol->stream->bufferSize = pre_size + (*size);
-
- /***********************************************************/
- /* Encode VOS and VOL Headers on first call for each layer */
- /***********************************************************/
-
- if (video->volInitialize[currLayer])
- {
- if (!video->encParams->GetVolHeader[currLayer])
- pv_status = EncodeVOS_Start(encCtrl);
- }
-
- /****************/
- /* Encode Slice */
- /****************/
- pv_status = EncodeSlice(video);
-
- *endofFrame = 0;
-
- if (video->mbnum >= currVol->nTotalMB && !video->end_of_buf)
- {
- *endofFrame = 1;
-
- /************************************/
- /* Update Skip Next Frame */
- /************************************/
- pre_skip = UpdateSkipNextFrame(video, nextModTime, size, pv_status); /* modified such that no pre-skipped */
-
- if (pre_skip == -1) /* error */
- {
- *endofFrame = -1;
- /* make sure that pointers are restored to the previous state */
- if (currLayer == 0)
- {
- video->forwardRefVop = video->tempForwRefVop; /* For P-Vop base only */
- video->forwardRefVop->refSelectCode = video->tempRefSelCode;
- }
-
- return pv_status;
- }
-
- /* If I-VOP was encoded, reset IntraPeriod */
- if ((currLayer == 0) && (encParams->IntraPeriod > 0) && (video->currVop->predictionType == I_VOP))
- video->nextEncIVop = encParams->IntraPeriod;
-
- /**********************************************************/
- /* Copy Reconstructed Buffer to Output Video Frame Buffer */
- /**********************************************************/
- vid_out->yChan = video->currVop->yChan;
- vid_out->uChan = video->currVop->uChan;
- vid_out->vChan = video->currVop->vChan;
- if (video->encParams->H263_Enabled)
- {
- vid_out->height = video->currVop->height; /* padded height */
- vid_out->pitch = video->currVop->width; /* padded width */
- }
- else
- {
- vid_out->height = video->currVop->height + 32; /* padded height */
- vid_out->pitch = video->currVop->width + 32; /* padded width */
- }
- //vid_out->timestamp = video->modTime;
- vid_out->timestamp = (ULong)(((video->prevFrameNum[currLayer] * 1000) / encParams->LayerFrameRate[currLayer]) + video->modTimeRef + 0.5);
-
- /*// End /////////////////////// */
-
- /****************************************/
- /* Swap Vop Pointers for Base Layer */
- /****************************************/
-
- if (currLayer == 0)
- {
- temp = video->prevBaseVop;
- video->prevBaseVop = video->currVop;
- video->prevBaseVop->padded = 0; /* not padded */
- video->currVop = temp;
- video->forwardRefVop = video->prevBaseVop; /* For P-Vop base only */
- video->forwardRefVop->refSelectCode = 1;
- }
- else
- {
- temp = video->prevEnhanceVop;
- video->prevEnhanceVop = video->currVop;
- video->prevEnhanceVop->padded = 0; /* not padded */
- video->currVop = temp;
- video->forwardRefVop = video->prevEnhanceVop;
- video->forwardRefVop->refSelectCode = 0;
- }
- }
-
- /***********************************/
- /* Update Ouput bstream byte count */
- /***********************************/
-
- *size = currVol->stream->byteCount - pre_size;
-
- /****************************************/
- /* Modify the intialize flag at the end.*/
- /****************************************/
- if (video->volInitialize[currLayer])
- video->volInitialize[currLayer] = 0;
-
- return pv_status;
-}
-#endif /* NO_SLICE_ENCODE */
-
-
-/* ======================================================================== */
-/* Function : PVGetH263ProfileLevelID() */
-/* Date : 02/05/2003 */
-/* Purpose : Get H.263 Profile ID and level ID for profile 0 */
-/* In/out : Profile ID=0, levelID is what we want */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* Note : h263Level[8], rBR_bound[8], max_h263_framerate[2] */
-/* max_h263_width[2], max_h263_height[2] are global */
-/* */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVGetH263ProfileLevelID(VideoEncControls *encCtrl, Int *profileID, Int *levelID)
-{
- VideoEncData *encData;
- Int width, height;
- float bitrate_r, framerate;
-
-
- /* For this version, we only support H.263 profile 0 */
- *profileID = 0;
-
- *levelID = 0;
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
-
- if (!encData->encParams->H263_Enabled) return PV_FALSE;
-
-
- /* get image width, height, bitrate and framerate */
- width = encData->encParams->LayerWidth[0];
- height = encData->encParams->LayerHeight[0];
- bitrate_r = (float)(encData->encParams->LayerBitRate[0]) / (float)64000.0;
- framerate = encData->encParams->LayerFrameRate[0];
- if (!width || !height || !(bitrate_r > 0 && framerate > 0)) return PV_FALSE;
-
- /* This is the most frequent case : level 10 */
- if (bitrate_r <= rBR_bound[1] && framerate <= max_h263_framerate[0] &&
- (width <= max_h263_width[0] && height <= max_h263_height[0]))
- {
- *levelID = h263Level[1];
- return PV_TRUE;
- }
- else if (bitrate_r > rBR_bound[4] ||
- (width > max_h263_width[1] || height > max_h263_height[1]) ||
- framerate > max_h263_framerate[1]) /* check the highest level 70 */
- {
- *levelID = h263Level[7];
- return PV_TRUE;
- }
- else /* search level 20, 30, 40 */
- {
-
- /* pick out level 20 */
- if (bitrate_r <= rBR_bound[2] &&
- ((width <= max_h263_width[0] && height <= max_h263_height[0] && framerate <= max_h263_framerate[1]) ||
- (width <= max_h263_width[1] && height <= max_h263_height[1] && framerate <= max_h263_framerate[0])))
- {
- *levelID = h263Level[2];
- return PV_TRUE;
- }
- else /* width, height and framerate are ok, now choose level 30 or 40 */
- {
- *levelID = (bitrate_r <= rBR_bound[3] ? h263Level[3] : h263Level[4]);
- return PV_TRUE;
- }
- }
-}
-
-/* ======================================================================== */
-/* Function : PVGetMPEG4ProfileLevelID() */
-/* Date : 26/06/2008 */
-/* Purpose : Get MPEG4 Level after initialized */
-/* In/out : profile_level according to interface */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVGetMPEG4ProfileLevelID(VideoEncControls *encCtrl, Int *profile_level, Int nLayer)
-{
- VideoEncData* video;
- Int i;
-
- video = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (nLayer == 0)
- {
- for (i = 0; i < 8; i++)
- {
- if (video->encParams->ProfileLevel[0] == profile_level_code[i])
- {
- break;
- }
- }
- *profile_level = i;
- }
- else
- {
- for (i = 0; i < 8; i++)
- {
- if (video->encParams->ProfileLevel[0] == scalable_profile_level_code[i])
- {
- break;
- }
- }
- *profile_level = i + SIMPLE_SCALABLE_PROFILE_LEVEL0;
- }
-
- return true;
-}
-
-#ifndef LIMITED_API
-/* ======================================================================== */
-/* Function : PVUpdateEncFrameRate */
-/* Date : 04/08/2002 */
-/* Purpose : Update target frame rates of the encoded base and enhance */
-/* layer(if any) while encoding operation is ongoing */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVUpdateEncFrameRate(VideoEncControls *encCtrl, float *frameRate)
-{
- VideoEncData *encData;
- Int i;// nTotalMB, mbPerSec;
-
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
-
- /* Update the framerates for all the layers */
- for (i = 0; i < encData->encParams->nLayers; i++)
- {
-
- /* New check: encoding framerate should be consistent with the given profile and level */
- //nTotalMB = (((encData->encParams->LayerWidth[i]+15)/16)*16)*(((encData->encParams->LayerHeight[i]+15)/16)*16)/(16*16);
- //mbPerSec = (Int)(nTotalMB * frameRate[i]);
- //if(mbPerSec > encData->encParams->LayerMaxMbsPerSec[i]) return PV_FALSE;
- if (frameRate[i] > encData->encParams->LayerMaxFrameRate[i]) return PV_FALSE; /* set by users or profile */
-
- encData->encParams->LayerFrameRate[i] = frameRate[i];
- }
-
- return RC_UpdateBXRCParams((void*) encData);
-
-}
-#endif
-#ifndef LIMITED_API
-/* ======================================================================== */
-/* Function : PVUpdateBitRate */
-/* Date : 04/08/2002 */
-/* Purpose : Update target bit rates of the encoded base and enhance */
-/* layer(if any) while encoding operation is ongoing */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVUpdateBitRate(VideoEncControls *encCtrl, Int *bitRate)
-{
- VideoEncData *encData;
- Int i;
-
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
-
- /* Update the bitrates for all the layers */
- for (i = 0; i < encData->encParams->nLayers; i++)
- {
- if (bitRate[i] > encData->encParams->LayerMaxBitRate[i]) /* set by users or profile */
- {
- return PV_FALSE;
- }
- encData->encParams->LayerBitRate[i] = bitRate[i];
- }
-
- return RC_UpdateBXRCParams((void*) encData);
-
-}
-#endif
-#ifndef LIMITED_API
-/* ============================================================================ */
-/* Function : PVUpdateVBVDelay() */
-/* Date : 4/23/2004 */
-/* Purpose : Update VBV buffer size(in delay) */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ============================================================================ */
-
-Bool PVUpdateVBVDelay(VideoEncControls *encCtrl, float delay)
-{
-
- VideoEncData *encData;
- Int total_bitrate, max_buffer_size;
- int index;
-
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
-
- /* Check whether the input delay is valid based on the given profile */
- total_bitrate = (encData->encParams->nLayers == 1 ? encData->encParams->LayerBitRate[0] :
- encData->encParams->LayerBitRate[1]);
- index = encData->encParams->profile_table_index;
- max_buffer_size = (encData->encParams->nLayers == 1 ? profile_level_max_VBV_size[index] :
- scalable_profile_level_max_VBV_size[index]);
-
- if (total_bitrate*delay > (float)max_buffer_size)
- return PV_FALSE;
-
- encData->encParams->VBV_delay = delay;
- return PV_TRUE;
-
-}
-#endif
-#ifndef LIMITED_API
-/* ======================================================================== */
-/* Function : PVUpdateIFrameInterval() */
-/* Date : 04/10/2002 */
-/* Purpose : updates the INTRA frame refresh interval while encoding */
-/* is ongoing */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVUpdateIFrameInterval(VideoEncControls *encCtrl, Int aIFramePeriod)
-{
- VideoEncData *encData;
-
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
-
- encData->encParams->IntraPeriod = aIFramePeriod;
- return PV_TRUE;
-}
-#endif
-#ifndef LIMITED_API
-/* ======================================================================== */
-/* Function : PVSetNumIntraMBRefresh() */
-/* Date : 08/05/2003 */
-/* Purpose : */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVUpdateNumIntraMBRefresh(VideoEncControls *encCtrl, Int numMB)
-{
- VideoEncData *encData;
-
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
-
- encData->encParams->Refresh = numMB;
-
- return PV_TRUE;
-}
-#endif
-#ifndef LIMITED_API
-/* ======================================================================== */
-/* Function : PVIFrameRequest() */
-/* Date : 04/10/2002 */
-/* Purpose : encodes the next base frame as an I-Vop */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVIFrameRequest(VideoEncControls *encCtrl)
-{
- VideoEncData *encData;
-
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
-
- encData->nextEncIVop = 1;
- return PV_TRUE;
-}
-#endif
-#ifndef LIMITED_API
-/* ======================================================================== */
-/* Function : PVGetEncMemoryUsage() */
-/* Date : 10/17/2000 */
-/* Purpose : */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Int PVGetEncMemoryUsage(VideoEncControls *encCtrl)
-{
- VideoEncData *encData;
-
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
- return encData->encParams->MemoryUsage;
-}
-#endif
-
-/* ======================================================================== */
-/* Function : PVGetHintTrack() */
-/* Date : 1/17/2001, */
-/* Purpose : */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVGetHintTrack(VideoEncControls *encCtrl, MP4HintTrack *info)
-{
- VideoEncData *encData;
-
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
- info->MTB = encData->hintTrackInfo.MTB;
- info->LayerID = encData->hintTrackInfo.LayerID;
- info->CodeType = encData->hintTrackInfo.CodeType;
- info->RefSelCode = encData->hintTrackInfo.RefSelCode;
-
- return PV_TRUE;
-}
-
-/* ======================================================================== */
-/* Function : PVGetMaxVideoFrameSize() */
-/* Date : 7/17/2001, */
-/* Purpose : Function merely returns the maximum buffer size */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVGetMaxVideoFrameSize(VideoEncControls *encCtrl, Int *maxVideoFrameSize)
-{
- VideoEncData *encData;
-
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
-
-
-
- *maxVideoFrameSize = encData->encParams->BufferSize[0];
-
- if (encData->encParams->nLayers == 2)
- if (*maxVideoFrameSize < encData->encParams->BufferSize[1])
- *maxVideoFrameSize = encData->encParams->BufferSize[1];
- *maxVideoFrameSize >>= 3; /* Convert to Bytes */
-
- if (*maxVideoFrameSize <= 4000)
- *maxVideoFrameSize = 4000;
-
- return PV_TRUE;
-}
-#ifndef LIMITED_API
-/* ======================================================================== */
-/* Function : PVGetVBVSize() */
-/* Date : 4/15/2002 */
-/* Purpose : Function merely returns the maximum buffer size */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVGetVBVSize(VideoEncControls *encCtrl, Int *VBVSize)
-{
- VideoEncData *encData;
-
- encData = (VideoEncData *)encCtrl->videoEncoderData;
-
- if (encData == NULL)
- return PV_FALSE;
- if (encData->encParams == NULL)
- return PV_FALSE;
-
- *VBVSize = encData->encParams->BufferSize[0];
- if (encData->encParams->nLayers == 2)
- *VBVSize += encData->encParams->BufferSize[1];
-
- return PV_TRUE;
-
-}
-#endif
-/* ======================================================================== */
-/* Function : EncodeVOS_Start() */
-/* Date : 08/22/2000 */
-/* Purpose : Encodes the VOS,VO, and VOL or Short Headers */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-PV_STATUS EncodeVOS_Start(VideoEncControls *encoderControl)
-{
-
- VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
- Vol *currVol = video->vol[video->currLayer];
- PV_STATUS status = PV_SUCCESS;
- //int profile_level=0x01;
- BitstreamEncVideo *stream = video->bitstream1;
- int i, j;
-
- /********************************/
- /* Check for short_video_header */
- /********************************/
- if (currVol->shortVideoHeader == 1)
- return status;
- else
- {
- /* Short Video Header or M4V */
-
- /**************************/
- /* VisualObjectSequence ()*/
- /**************************/
- status = BitstreamPutGT16Bits(stream, 32, SESSION_START_CODE);
- /* Determine profile_level */
- status = BitstreamPutBits(stream, 8, video->encParams->ProfileLevel[video->currLayer]);
-
- /******************/
- /* VisualObject() */
- /******************/
-
- status = BitstreamPutGT16Bits(stream, 32, VISUAL_OBJECT_START_CODE);
- status = BitstreamPut1Bits(stream, 0x00); /* visual object identifier */
- status = BitstreamPutBits(stream, 4, 0x01); /* visual object Type == "video ID" */
- status = BitstreamPut1Bits(stream, 0x00); /* no video signal type */
-
- /*temp = */
- BitstreamMpeg4ByteAlignStuffing(stream);
-
-
- status = BitstreamPutGT16Bits(stream, 27, VO_START_CODE);/* byte align: should be 2 bits */
- status = BitstreamPutBits(stream, 5, 0x00);/* Video ID = 0 */
-
-
-
- /**********************/
- /* VideoObjectLayer() */
- /**********************/
- if (currVol->shortVideoHeader == 0)
- { /* M4V else Short Video Header */
- status = BitstreamPutGT16Bits(stream, VOL_START_CODE_LENGTH, VOL_START_CODE);
- status = BitstreamPutBits(stream, 4, currVol->volID);/* video_object_layer_id */
- status = BitstreamPut1Bits(stream, 0x00);/* Random Access = 0 */
-
- if (video->currLayer == 0)
- status = BitstreamPutBits(stream, 8, 0x01);/* Video Object Type Indication = 1 ... Simple Object Type */
- else
- status = BitstreamPutBits(stream, 8, 0x02);/* Video Object Type Indication = 2 ... Simple Scalable Object Type */
-
- status = BitstreamPut1Bits(stream, 0x00);/* is_object_layer_identifer = 0 */
-
-
- status = BitstreamPutBits(stream, 4, 0x01); /* aspect_ratio_info = 1 ... 1:1(Square) */
- status = BitstreamPut1Bits(stream, 0x00);/* vol_control_parameters = 0 */
- status = BitstreamPutBits(stream, 2, 0x00);/* video_object_layer_shape = 00 ... rectangular */
- status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
- status = BitstreamPutGT8Bits(stream, 16, currVol->timeIncrementResolution);/* vop_time_increment_resolution */
- status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
- status = BitstreamPut1Bits(stream, currVol->fixedVopRate);/* fixed_vop_rate = 0 */
-
- /* For Rectangular VO layer shape */
- status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
- status = BitstreamPutGT8Bits(stream, 13, currVol->width);/* video_object_layer_width */
- status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
- status = BitstreamPutGT8Bits(stream, 13, currVol->height);/* video_object_layer_height */
- status = BitstreamPut1Bits(stream, 0x01);/*marker bit */
-
- status = BitstreamPut1Bits(stream, 0x00);/*interlaced = 0 */
- status = BitstreamPut1Bits(stream, 0x01);/* obmc_disable = 1 */
- status = BitstreamPut1Bits(stream, 0x00);/* sprite_enable = 0 */
- status = BitstreamPut1Bits(stream, 0x00);/* not_8_bit = 0 */
- status = BitstreamPut1Bits(stream, currVol->quantType);/* quant_type */
-
- if (currVol->quantType)
- {
- status = BitstreamPut1Bits(stream, currVol->loadIntraQuantMat); /* Intra quant matrix */
- if (currVol->loadIntraQuantMat)
- {
- for (j = 63; j >= 1; j--)
- if (currVol->iqmat[*(zigzag_i+j)] != currVol->iqmat[*(zigzag_i+j-1)])
- break;
- if ((j == 1) && (currVol->iqmat[*(zigzag_i+j)] == currVol->iqmat[*(zigzag_i+j-1)]))
- j = 0;
- for (i = 0; i < j + 1; i++)
- BitstreamPutBits(stream, 8, currVol->iqmat[*(zigzag_i+i)]);
- if (j < 63)
- BitstreamPutBits(stream, 8, 0);
- }
- else
- {
- for (j = 0; j < 64; j++)
- currVol->iqmat[j] = mpeg_iqmat_def[j];
-
- }
- status = BitstreamPut1Bits(stream, currVol->loadNonIntraQuantMat); /* Non-Intra quant matrix */
- if (currVol->loadNonIntraQuantMat)
- {
- for (j = 63; j >= 1; j--)
- if (currVol->niqmat[*(zigzag_i+j)] != currVol->niqmat[*(zigzag_i+j-1)])
- break;
- if ((j == 1) && (currVol->niqmat[*(zigzag_i+j)] == currVol->niqmat[*(zigzag_i+j-1)]))
- j = 0;
- for (i = 0; i < j + 1; i++)
- BitstreamPutBits(stream, 8, currVol->niqmat[*(zigzag_i+i)]);
- if (j < 63)
- BitstreamPutBits(stream, 8, 0);
- }
- else
- {
- for (j = 0; j < 64; j++)
- currVol->niqmat[j] = mpeg_nqmat_def[j];
- }
- }
-
- status = BitstreamPut1Bits(stream, 0x01); /* complexity_estimation_disable = 1 */
- status = BitstreamPut1Bits(stream, currVol->ResyncMarkerDisable);/* Resync_marker_disable */
- status = BitstreamPut1Bits(stream, currVol->dataPartitioning);/* Data partitioned */
-
- if (currVol->dataPartitioning)
- status = BitstreamPut1Bits(stream, currVol->useReverseVLC); /* Reversible_vlc */
-
-
- if (currVol->scalability) /* Scalability*/
- {
-
- status = BitstreamPut1Bits(stream, currVol->scalability);/* Scalability = 1 */
- status = BitstreamPut1Bits(stream, currVol->scalType);/* hierarchy _type ... Spatial= 0 and Temporal = 1 */
- status = BitstreamPutBits(stream, 4, currVol->refVolID);/* ref_layer_id */
- status = BitstreamPut1Bits(stream, currVol->refSampDir);/* ref_layer_sampling_direc*/
- status = BitstreamPutBits(stream, 5, currVol->horSamp_n);/*hor_sampling_factor_n*/
- status = BitstreamPutBits(stream, 5, currVol->horSamp_m);/*hor_sampling_factor_m*/
- status = BitstreamPutBits(stream, 5, currVol->verSamp_n);/*vert_sampling_factor_n*/
- status = BitstreamPutBits(stream, 5, currVol->verSamp_m);/*vert_sampling_factor_m*/
- status = BitstreamPut1Bits(stream, currVol->enhancementType);/* enhancement_type*/
- }
- else /* No Scalability */
- status = BitstreamPut1Bits(stream, currVol->scalability);/* Scalability = 0 */
-
- /*temp = */
- BitstreamMpeg4ByteAlignStuffing(stream); /* Byte align Headers for VOP */
- }
- }
-
- return status;
-}
-
-/* ======================================================================== */
-/* Function : VOS_End() */
-/* Date : 08/22/2000 */
-/* Purpose : Visual Object Sequence End */
-/* In/out : */
-/* Return : PV_TRUE if successed, PV_FALSE if failed. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-PV_STATUS VOS_End(VideoEncControls *encoderControl)
-{
- PV_STATUS status = PV_SUCCESS;
- VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
- Vol *currVol = video->vol[video->currLayer];
- BitstreamEncVideo *stream = currVol->stream;
-
-
- status = BitstreamPutBits(stream, SESSION_END_CODE, 32);
-
- return status;
-}
-
-/* ======================================================================== */
-/* Function : DetermineCodingLayer */
-/* Date : 06/02/2001 */
-/* Purpose : Find layer to code based on current mod time, assuming that
- it's time to encode enhanced layer. */
-/* In/out : */
-/* Return : Number of layer to code. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-Int DetermineCodingLayer(VideoEncData *video, Int *nLayer, ULong modTime)
-{
- Vol **vol = video->vol;
- VideoEncParams *encParams = video->encParams;
- Int numLayers = encParams->nLayers;
- UInt modTimeRef = video->modTimeRef;
- float *LayerFrameRate = encParams->LayerFrameRate;
- UInt frameNum[4], frameTick;
- ULong frameModTime, nextFrmModTime;
-#ifdef REDUCE_FRAME_VARIANCE /* To limit how close 2 frames can be */
- float frameInterval;
-#endif
- float srcFrameInterval;
- Int frameInc;
- Int i, extra_skip;
- Int encodeVop = 0;
-
- i = numLayers - 1;
-
- if (modTime - video->nextModTime > ((ULong)(-1)) >> 1) /* next time wrapped around */
- return 0; /* not time to code it yet */
-
- video->relLayerCodeTime[i] -= 1000;
- video->nextEncIVop--; /* number of Vops in highest layer resolution. */
- video->numVopsInGOP++;
-
- /* from this point frameModTime and nextFrmModTime are internal */
-
- frameNum[i] = (UInt)((modTime - modTimeRef) * LayerFrameRate[i] + 500) / 1000;
- if (video->volInitialize[i])
- {
- video->prevFrameNum[i] = frameNum[i] - 1;
- }
- else if (frameNum[i] <= video->prevFrameNum[i])
- {
- return 0; /* do not encode this frame */
- }
-
- /**** this part computes expected next frame *******/
- frameModTime = (ULong)(((frameNum[i] * 1000) / LayerFrameRate[i]) + modTimeRef + 0.5); /* rec. time */
- nextFrmModTime = (ULong)((((frameNum[i] + 1) * 1000) / LayerFrameRate[i]) + modTimeRef + 0.5); /* rec. time */
-
- srcFrameInterval = 1000 / video->FrameRate;
-
- video->nextModTime = nextFrmModTime - (ULong)(srcFrameInterval / 2.) - 1; /* between current and next frame */
-
-#ifdef REDUCE_FRAME_VARIANCE /* To limit how close 2 frames can be */
- frameInterval = 1000 / LayerFrameRate[i]; /* next rec. time */
- delta = (Int)(frameInterval / 4); /* empirical number */
- if (video->nextModTime - modTime < (ULong)delta) /* need to move nextModTime further. */
- {
- video->nextModTime += ((delta - video->nextModTime + modTime)); /* empirical formula */
- }
-#endif
- /****************************************************/
-
- /* map frame no.to tick from modTimeRef */
- /*frameTick = (frameNum[i]*vol[i]->timeIncrementResolution) ;
- frameTick = (UInt)((frameTick + (encParams->LayerFrameRate[i]/2))/encParams->LayerFrameRate[i]);*/
- /* 11/16/01, change frameTick to be the closest tick from the actual modTime */
- /* 12/12/02, add (double) to prevent large number wrap-around */
- frameTick = (Int)(((double)(modTime - modTimeRef) * vol[i]->timeIncrementResolution + 500) / 1000);
-
- /* find timeIncrement to be put in the bitstream */
- /* refTick is second boundary reference. */
- vol[i]->timeIncrement = frameTick - video->refTick[i];
-
-
- vol[i]->moduloTimeBase = 0;
- while (vol[i]->timeIncrement >= vol[i]->timeIncrementResolution)
- {
- vol[i]->timeIncrement -= vol[i]->timeIncrementResolution;
- vol[i]->moduloTimeBase++;
- /* do not update refTick and modTimeRef yet, do it after encoding!! */
- }
-
- if (video->relLayerCodeTime[i] <= 0) /* no skipping */
- {
- encodeVop = 1;
- video->currLayer = *nLayer = i;
- video->relLayerCodeTime[i] += 1000;
-
- /* takes care of more dropped frame than expected */
- extra_skip = -1;
- frameInc = (frameNum[i] - video->prevFrameNum[i]);
- extra_skip += frameInc;
-
- if (extra_skip > 0)
- { /* update rc->Nr, rc->B, (rc->Rr)*/
- video->nextEncIVop -= extra_skip;
- video->numVopsInGOP += extra_skip;
- if (encParams->RC_Type != CONSTANT_Q)
- {
- RC_UpdateBuffer(video, i, extra_skip);
- }
- }
-
- }
- /* update frame no. */
- video->prevFrameNum[i] = frameNum[i];
-
- /* go through all lower layer */
- for (i = (numLayers - 2); i >= 0; i--)
- {
-
- video->relLayerCodeTime[i] -= 1000;
-
- /* find timeIncrement to be put in the bitstream */
- vol[i]->timeIncrement = frameTick - video->refTick[i];
-
- if (video->relLayerCodeTime[i] <= 0) /* time to encode base */
- {
- /* 12/27/00 */
- encodeVop = 1;
- video->currLayer = *nLayer = i;
- video->relLayerCodeTime[i] +=
- (Int)((1000.0 * encParams->LayerFrameRate[numLayers-1]) / encParams->LayerFrameRate[i]);
-
- vol[i]->moduloTimeBase = 0;
- while (vol[i]->timeIncrement >= vol[i]->timeIncrementResolution)
- {
- vol[i]->timeIncrement -= vol[i]->timeIncrementResolution;
- vol[i]->moduloTimeBase++;
- /* do not update refTick and modTimeRef yet, do it after encoding!! */
- }
-
- /* takes care of more dropped frame than expected */
- frameNum[i] = (UInt)((frameModTime - modTimeRef) * encParams->LayerFrameRate[i] + 500) / 1000;
- if (video->volInitialize[i])
- video->prevFrameNum[i] = frameNum[i] - 1;
-
- extra_skip = -1;
- frameInc = (frameNum[i] - video->prevFrameNum[i]);
- extra_skip += frameInc;
-
- if (extra_skip > 0)
- { /* update rc->Nr, rc->B, (rc->Rr)*/
- if (encParams->RC_Type != CONSTANT_Q)
- {
- RC_UpdateBuffer(video, i, extra_skip);
- }
- }
- /* update frame no. */
- video->prevFrameNum[i] = frameNum[i];
- }
- }
-
-#ifdef _PRINT_STAT
- if (encodeVop)
- printf(" TI: %d ", vol[*nLayer]->timeIncrement);
-#endif
-
- return encodeVop;
-}
-
-/* ======================================================================== */
-/* Function : DetermineVopType */
-/* Date : 06/02/2001 */
-/* Purpose : The name says it all. */
-/* In/out : */
-/* Return : void . */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-void DetermineVopType(VideoEncData *video, Int currLayer)
-{
- VideoEncParams *encParams = video->encParams;
-// Vol *currVol = video->vol[currLayer];
-
- if (encParams->IntraPeriod == 0) /* I-VOPs only */
- {
- if (video->currLayer > 0)
- video->currVop->predictionType = P_VOP;
- else
- {
- video->currVop->predictionType = I_VOP;
- if (video->numVopsInGOP >= 132)
- video->numVopsInGOP = 0;
- }
- }
- else if (encParams->IntraPeriod == -1) /* IPPPPP... */
- {
-
- /* maintain frame type if previous frame is pre-skipped, 06/02/2001 */
- if (encParams->RC_Type == CONSTANT_Q || video->rc[currLayer]->skip_next_frame != -1)
- video->currVop->predictionType = P_VOP;
-
- if (video->currLayer == 0)
- {
- if (/*video->numVopsInGOP>=132 || */video->volInitialize[currLayer])
- {
- video->currVop->predictionType = I_VOP;
- video->numVopsInGOP = 0; /* force INTRA update every 132 base frames*/
- video->nextEncIVop = 1;
- }
- else if (video->nextEncIVop == 0 || video->currVop->predictionType == I_VOP)
- {
- video->numVopsInGOP = 0;
- video->nextEncIVop = 1;
- }
- }
- }
- else /* IntraPeriod>0 : IPPPPPIPPPPPI... */
- {
-
- /* maintain frame type if previous frame is pre-skipped, 06/02/2001 */
- if (encParams->RC_Type == CONSTANT_Q || video->rc[currLayer]->skip_next_frame != -1)
- video->currVop->predictionType = P_VOP;
-
- if (currLayer == 0)
- {
- if (video->nextEncIVop <= 0 || video->currVop->predictionType == I_VOP)
- {
- video->nextEncIVop = encParams->IntraPeriod;
- video->currVop->predictionType = I_VOP;
- video->numVopsInGOP = 0;
- }
- }
- }
-
- return ;
-}
-
-/* ======================================================================== */
-/* Function : UpdateSkipNextFrame */
-/* Date : 06/02/2001 */
-/* Purpose : From rate control frame skipping decision, update timing
- related parameters. */
-/* In/out : */
-/* Return : Current coded layer. */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-Int UpdateSkipNextFrame(VideoEncData *video, ULong *modTime, Int *size, PV_STATUS status)
-{
- Int currLayer = video->currLayer;
- Int nLayer = currLayer;
- VideoEncParams *encParams = video->encParams;
- Int numLayers = encParams->nLayers;
- Vol *currVol = video->vol[currLayer];
- Vol **vol = video->vol;
- Int num_skip, extra_skip;
- Int i;
- UInt newRefTick, deltaModTime;
- UInt temp;
-
- if (encParams->RC_Type != CONSTANT_Q)
- {
- if (video->volInitialize[0] && currLayer == 0) /* always encode the first frame */
- {
- RC_ResetSkipNextFrame(video, currLayer);
- //return currLayer; 09/15/05
- }
- else
- {
- if (RC_GetSkipNextFrame(video, currLayer) < 0 || status == PV_END_OF_BUF) /* Skip Current Frame */
- {
-
-#ifdef _PRINT_STAT
- printf("Skip current frame");
-#endif
- currVol->moduloTimeBase = currVol->prevModuloTimeBase;
-
- /*********************/
- /* prepare to return */
- /*********************/
- *size = 0; /* Set Bitstream buffer to zero */
-
- /* Determine nLayer and modTime for next encode */
-
- *modTime = video->nextModTime;
- nLayer = -1;
-
- return nLayer; /* return immediately without updating RefTick & modTimeRef */
- /* If I-VOP was attempted, then ensure next base is I-VOP */
- /*if((encParams->IntraPeriod>0) && (video->currVop->predictionType == I_VOP))
- video->nextEncIVop = 0; commented out by 06/05/01 */
-
- }
- else if ((num_skip = RC_GetSkipNextFrame(video, currLayer)) > 0)
- {
-
-#ifdef _PRINT_STAT
- printf("Skip next %d frames", num_skip);
-#endif
- /* to keep the Nr of enh layer the same */
- /* adjust relLayerCodeTime only, do not adjust layerCodeTime[numLayers-1] */
- extra_skip = 0;
- for (i = 0; i < currLayer; i++)
- {
- if (video->relLayerCodeTime[i] <= 1000)
- {
- extra_skip = 1;
- break;
- }
- }
-
- for (i = currLayer; i < numLayers; i++)
- {
- video->relLayerCodeTime[i] += (num_skip + extra_skip) *
- ((Int)((1000.0 * encParams->LayerFrameRate[numLayers-1]) / encParams->LayerFrameRate[i]));
- }
- }
- }/* first frame */
- }
- /***** current frame is encoded, now update refTick ******/
-
- video->refTick[currLayer] += vol[currLayer]->prevModuloTimeBase * vol[currLayer]->timeIncrementResolution;
-
- /* Reset layerCodeTime every I-VOP to prevent overflow */
- if (currLayer == 0)
- {
- /* 12/12/02, fix for weird targer frame rate of 9.99 fps or 3.33 fps */
- if (((encParams->IntraPeriod != 0) /*&& (video->currVop->predictionType==I_VOP)*/) ||
- ((encParams->IntraPeriod == 0) && (video->numVopsInGOP == 0)))
- {
- newRefTick = video->refTick[0];
-
- for (i = 1; i < numLayers; i++)
- {
- if (video->refTick[i] < newRefTick)
- newRefTick = video->refTick[i];
- }
-
- /* check to make sure that the update is integer multiple of frame number */
- /* how many msec elapsed from last modTimeRef */
- deltaModTime = (newRefTick / vol[0]->timeIncrementResolution) * 1000;
-
- for (i = numLayers - 1; i >= 0; i--)
- {
- temp = (UInt)(deltaModTime * encParams->LayerFrameRate[i]); /* 12/12/02 */
- if (temp % 1000)
- newRefTick = 0;
-
- }
- if (newRefTick > 0)
- {
- video->modTimeRef += deltaModTime;
- for (i = numLayers - 1; i >= 0; i--)
- {
- video->prevFrameNum[i] -= (UInt)(deltaModTime * encParams->LayerFrameRate[i]) / 1000;
- video->refTick[i] -= newRefTick;
- }
- }
- }
- }
-
- *modTime = video->nextModTime;
-
- return nLayer;
-}
-
-
-#ifndef ORIGINAL_VERSION
-
-/* ======================================================================== */
-/* Function : SetProfile_BufferSize */
-/* Date : 04/08/2002 */
-/* Purpose : Set profile and video buffer size, copied from Jim's code */
-/* in PVInitVideoEncoder(.), since we have different places */
-/* to reset profile and video buffer size */
-/* In/out : */
-/* Return : */
-/* Modified : */
-/* */
-/* ======================================================================== */
-
-Bool SetProfile_BufferSize(VideoEncData *video, float delay, Int bInitialized)
-{
- Int i, j, start, end;
-// Int BaseMBsPerSec = 0, EnhMBsPerSec = 0;
- Int nTotalMB = 0;
- Int idx, temp_w, temp_h, max = 0, max_width, max_height;
-
- Int nLayers = video->encParams->nLayers; /* Number of Layers to be encoded */
-
- Int total_bitrate = 0, base_bitrate;
- Int total_packet_size = 0, base_packet_size;
- Int total_MBsPerSec = 0, base_MBsPerSec;
- Int total_VBV_size = 0, base_VBV_size, enhance_VBV_size = 0;
- float total_framerate, base_framerate;
- float upper_bound_ratio;
- Int bFound = 0;
- Int k = 0, width16, height16, index;
- Int lowest_level;
-
-#define MIN_BUFF 16000 /* 16k minimum buffer size */
-#define BUFF_CONST 2.0 /* 2000ms */
-#define UPPER_BOUND_RATIO 8.54 /* upper_bound = 1.4*(1.1+bound/10)*bitrate/framerate */
-
-#define QCIF_WIDTH 176
-#define QCIF_HEIGHT 144
-
- index = video->encParams->profile_table_index;
-
- /* Calculate "nTotalMB" */
- /* Find the maximum width*height for memory allocation of the VOPs */
- for (idx = 0; idx < nLayers; idx++)
- {
- temp_w = video->encParams->LayerWidth[idx];
- temp_h = video->encParams->LayerHeight[idx];
-
- if ((temp_w*temp_h) > max)
- {
- max = temp_w * temp_h;
- max_width = temp_w;
- max_height = temp_h;
- nTotalMB = ((max_width + 15) >> 4) * ((max_height + 15) >> 4);
- }
- }
- upper_bound_ratio = (video->encParams->RC_Type == CBR_LOWDELAY ? (float)5.0 : (float)UPPER_BOUND_RATIO);
-
-
- /* Get the basic information: bitrate, packet_size, MBs/s and VBV_size */
- base_bitrate = video->encParams->LayerBitRate[0];
- if (video->encParams->LayerMaxBitRate[0] != 0) /* video->encParams->LayerMaxBitRate[0] == 0 means it has not been set */
- {
- base_bitrate = PV_MAX(base_bitrate, video->encParams->LayerMaxBitRate[0]);
- }
- else /* if the max is not set, set it to the specified profile/level */
- {
- video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[index];
- }
-
- base_framerate = video->encParams->LayerFrameRate[0];
- if (video->encParams->LayerMaxFrameRate[0] != 0)
- {
- base_framerate = PV_MAX(base_framerate, video->encParams->LayerMaxFrameRate[0]);
- }
- else /* if the max is not set, set it to the specified profile/level */
- {
- video->encParams->LayerMaxFrameRate[0] = (float)profile_level_max_mbsPerSec[index] / nTotalMB;
- }
-
- base_packet_size = video->encParams->ResyncPacketsize;
- base_MBsPerSec = (Int)(base_framerate * nTotalMB);
- base_VBV_size = PV_MAX((Int)(base_bitrate * delay),
- (Int)(upper_bound_ratio * base_bitrate / base_framerate));
- base_VBV_size = PV_MAX(base_VBV_size, MIN_BUFF);
-
- /* if the buffer is larger than maximum buffer size, we'll clip it */
- if (base_VBV_size > profile_level_max_VBV_size[5])
- base_VBV_size = profile_level_max_VBV_size[5];
-
-
- /* Check if the buffer exceeds the maximum buffer size given the maximum profile and level */
- if (nLayers == 1 && base_VBV_size > profile_level_max_VBV_size[index])
- return FALSE;
-
-
- if (nLayers == 2)
- {
- total_bitrate = video->encParams->LayerBitRate[1];
- if (video->encParams->LayerMaxBitRate[1] != 0)
- {
- total_bitrate = PV_MIN(total_bitrate, video->encParams->LayerMaxBitRate[1]);
- }
- else /* if the max is not set, set it to the specified profile/level */
- {
- video->encParams->LayerMaxBitRate[1] = scalable_profile_level_max_bitrate[index];
- }
-
- total_framerate = video->encParams->LayerFrameRate[1];
- if (video->encParams->LayerMaxFrameRate[1] != 0)
- {
- total_framerate = PV_MIN(total_framerate, video->encParams->LayerMaxFrameRate[1]);
- }
- else /* if the max is not set, set it to the specified profile/level */
- {
- video->encParams->LayerMaxFrameRate[1] = (float)scalable_profile_level_max_mbsPerSec[index] / nTotalMB;
- }
-
- total_packet_size = video->encParams->ResyncPacketsize;
- total_MBsPerSec = (Int)(total_framerate * nTotalMB);
-
- enhance_VBV_size = PV_MAX((Int)((total_bitrate - base_bitrate) * delay),
- (Int)(upper_bound_ratio * (total_bitrate - base_bitrate) / (total_framerate - base_framerate)));
- enhance_VBV_size = PV_MAX(enhance_VBV_size, MIN_BUFF);
-
- total_VBV_size = base_VBV_size + enhance_VBV_size;
-
- /* if the buffer is larger than maximum buffer size, we'll clip it */
- if (total_VBV_size > scalable_profile_level_max_VBV_size[6])
- {
- total_VBV_size = scalable_profile_level_max_VBV_size[6];
- enhance_VBV_size = total_VBV_size - base_VBV_size;
- }
-
- /* Check if the buffer exceeds the maximum buffer size given the maximum profile and level */
- if (total_VBV_size > scalable_profile_level_max_VBV_size[index])
- return FALSE;
- }
-
-
- if (!bInitialized) /* Has been initialized --> profile @ level has been figured out! */
- {
- video->encParams->BufferSize[0] = base_VBV_size;
- if (nLayers > 1)
- video->encParams->BufferSize[1] = enhance_VBV_size;
-
- return PV_TRUE;
- }
-
-
- /* Profile @ level determination */
- if (nLayers == 1)
- {
- /* BASE ONLY : Simple Profile(SP) Or Core Profile(CP) */
- if (base_bitrate > profile_level_max_bitrate[index] ||
- base_packet_size > profile_level_max_packet_size[index] ||
- base_MBsPerSec > profile_level_max_mbsPerSec[index] ||
- base_VBV_size > profile_level_max_VBV_size[index])
-
- return PV_FALSE; /* Beyond the bound of Core Profile @ Level2 */
-
- /* For H263/Short header, determine k*16384 */
- width16 = ((video->encParams->LayerWidth[0] + 15) >> 4) << 4;
- height16 = ((video->encParams->LayerHeight[0] + 15) >> 4) << 4;
- if (video->encParams->H263_Enabled)
- {
- k = 4;
- if (width16 == 2*QCIF_WIDTH && height16 == 2*QCIF_HEIGHT) /* CIF */
- k = 16;
-
- else if (width16 == 4*QCIF_WIDTH && height16 == 4*QCIF_HEIGHT) /* 4CIF */
- k = 32;
-
- else if (width16 == 8*QCIF_WIDTH && height16 == 8*QCIF_HEIGHT) /* 16CIF */
- k = 64;
-
- video->encParams->maxFrameSize = k * 16384;
-
- /* Make sure the buffer size is limited to the top profile and level: the Core profile and level 2 */
- if (base_VBV_size > (Int)(k*16384 + 4*(float)profile_level_max_bitrate[5]*1001.0 / 30000.0))
- base_VBV_size = (Int)(k * 16384 + 4 * (float)profile_level_max_bitrate[5] * 1001.0 / 30000.0);
-
- if (base_VBV_size > (Int)(k*16384 + 4*(float)profile_level_max_bitrate[index]*1001.0 / 30000.0))
- return PV_FALSE;
- }
-
- /* Search the appropriate profile@level index */
- if (!video->encParams->H263_Enabled &&
- (video->encParams->IntraDCVlcThr != 0 || video->encParams->SearchRange > 16))
- {
- lowest_level = 1; /* cannot allow SPL0 */
- }
- else
- {
- lowest_level = 0; /* SPL0 */
- }
-
- for (i = lowest_level; i <= index; i++)
- {
- if (i != 4 && /* skip Core Profile@Level1 because the parameters in it are smaller than those in Simple Profile@Level3 */
- base_bitrate <= profile_level_max_bitrate[i] &&
- base_packet_size <= profile_level_max_packet_size[i] &&
- base_MBsPerSec <= profile_level_max_mbsPerSec[i] &&
- base_VBV_size <= (video->encParams->H263_Enabled ? (Int)(k*16384 + 4*(float)profile_level_max_bitrate[i]*1001.0 / 30000.0) :
- profile_level_max_VBV_size[i]))
- break;
- }
- if (i > index) return PV_FALSE; /* Nothing found!! */
-
- /* Found out the actual profile @ level : index "i" */
- if (i == 0)
- {
- /* For Simple Profile @ Level 0, we need to do one more check: image size <= QCIF */
- if (width16 > QCIF_WIDTH || height16 > QCIF_HEIGHT)
- i = 1; /* image size > QCIF, then set SP level1 */
- }
-
- video->encParams->ProfileLevel[0] = profile_level_code[i];
- video->encParams->BufferSize[0] = base_VBV_size;
-
- if (video->encParams->LayerMaxBitRate[0] == 0)
- video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[i];
-
- if (video->encParams->LayerMaxFrameRate[0] == 0)
- video->encParams->LayerMaxFrameRate[0] = PV_MIN(30, (float)profile_level_max_mbsPerSec[i] / nTotalMB);
-
- /* For H263/Short header, one special constraint for VBV buffer size */
- if (video->encParams->H263_Enabled)
- video->encParams->BufferSize[0] = (Int)(k * 16384 + 4 * (float)profile_level_max_bitrate[i] * 1001.0 / 30000.0);
-
- }
- else
- {
- /* SCALABALE MODE: Simple Scalable Profile(SSP) Or Core Scalable Profile(CSP) */
-
- if (total_bitrate > scalable_profile_level_max_bitrate[index] ||
- total_packet_size > scalable_profile_level_max_packet_size[index] ||
- total_MBsPerSec > scalable_profile_level_max_mbsPerSec[index] ||
- total_VBV_size > scalable_profile_level_max_VBV_size[index])
-
- return PV_FALSE; /* Beyond given profile and level */
-
- /* One-time check: Simple Scalable Profile or Core Scalable Profile */
- if (total_bitrate <= scalable_profile_level_max_bitrate[2] &&
- total_packet_size <= scalable_profile_level_max_packet_size[2] &&
- total_MBsPerSec <= scalable_profile_level_max_mbsPerSec[2] &&
- total_VBV_size <= scalable_profile_level_max_VBV_size[2])
-
- {
- start = 0;
- end = index;
- }
-
- else
- {
- start = 4;
- end = index;
- }
-
-
- /* Search the scalable profile */
- for (i = start; i <= end; i++)
- {
- if (total_bitrate <= scalable_profile_level_max_bitrate[i] &&
- total_packet_size <= scalable_profile_level_max_packet_size[i] &&
- total_MBsPerSec <= scalable_profile_level_max_mbsPerSec[i] &&
- total_VBV_size <= scalable_profile_level_max_VBV_size[i])
-
- break;
- }
- if (i > end) return PV_FALSE;
-
- /* Search the base profile */
- if (i == 0)
- {
- j = 0;
- bFound = 1;
- }
- else bFound = 0;
-
- for (j = start; !bFound && j <= i; j++)
- {
- if (base_bitrate <= profile_level_max_bitrate[j] &&
- base_packet_size <= profile_level_max_packet_size[j] &&
- base_MBsPerSec <= profile_level_max_mbsPerSec[j] &&
- base_VBV_size <= profile_level_max_VBV_size[j])
-
- {
- bFound = 1;
- break;
- }
- }
-
- if (!bFound) // && start == 4)
- return PV_FALSE; /* mis-match in the profiles between base layer and enhancement layer */
-
- /* j for base layer, i for enhancement layer */
- video->encParams->ProfileLevel[0] = profile_level_code[j];
- video->encParams->ProfileLevel[1] = scalable_profile_level_code[i];
- video->encParams->BufferSize[0] = base_VBV_size;
- video->encParams->BufferSize[1] = enhance_VBV_size;
-
- if (video->encParams->LayerMaxBitRate[0] == 0)
- video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[j];
-
- if (video->encParams->LayerMaxBitRate[1] == 0)
- video->encParams->LayerMaxBitRate[1] = scalable_profile_level_max_bitrate[i];
-
- if (video->encParams->LayerMaxFrameRate[0] == 0)
- video->encParams->LayerMaxFrameRate[0] = PV_MIN(30, (float)profile_level_max_mbsPerSec[j] / nTotalMB);
-
- if (video->encParams->LayerMaxFrameRate[1] == 0)
- video->encParams->LayerMaxFrameRate[1] = PV_MIN(30, (float)scalable_profile_level_max_mbsPerSec[i] / nTotalMB);
-
-
- } /* end of: if(nLayers == 1) */
-
-
- if (!video->encParams->H263_Enabled && (video->encParams->ProfileLevel[0] == 0x08)) /* SPL0 restriction*/
- {
- /* PV only allow frame-based rate control, no QP change from one MB to another
- if(video->encParams->ACDCPrediction == TRUE && MB-based rate control)
- return PV_FALSE */
- }
-
- return PV_TRUE;
-}
-
-#endif /* #ifndef ORIGINAL_VERSION */
-
-
-
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/Android.bp b/media/libstagefright/codecs/m4v_h263/fuzzer/Android.bp
deleted file mode 100644
index aa79d37..0000000
--- a/media/libstagefright/codecs/m4v_h263/fuzzer/Android.bp
+++ /dev/null
@@ -1,60 +0,0 @@
-/******************************************************************************
- *
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at:
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- *****************************************************************************
- * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
- */
-
-cc_fuzz {
- name: "mpeg4_dec_fuzzer",
- host_supported: true,
- srcs: [
- "mpeg4_h263_dec_fuzzer.cpp",
- ],
- static_libs: [
- "libstagefright_m4vh263dec",
- "liblog",
- ],
- cflags: [
- "-DOSCL_IMPORT_REF=",
- "-DMPEG4",
- ],
- target: {
- darwin: {
- enabled: false,
- },
- },
-}
-
-cc_fuzz {
- name: "h263_dec_fuzzer",
- host_supported: true,
- srcs: [
- "mpeg4_h263_dec_fuzzer.cpp",
- ],
- static_libs: [
- "libstagefright_m4vh263dec",
- "liblog",
- ],
- cflags: [
- "-DOSCL_IMPORT_REF=",
- ],
- target: {
- darwin: {
- enabled: false,
- },
- },
-}
diff --git a/media/libstagefright/codecs/m4v_h263/fuzzer/README.md b/media/libstagefright/codecs/m4v_h263/fuzzer/README.md
deleted file mode 100644
index c2a4f69..0000000
--- a/media/libstagefright/codecs/m4v_h263/fuzzer/README.md
+++ /dev/null
@@ -1,57 +0,0 @@
-# Fuzzer for libstagefright_m4vh263dec decoder
-
-## Plugin Design Considerations
-The fuzzer plugin for MPEG4/H263 is designed based on the understanding of the
-codec and tries to achieve the following:
-
-##### Maximize code coverage
-Dict files (dictionary files) are created for MPEG4 and H263 to ensure that the required start
-bytes are present in every input file that goes to the fuzzer.
-This ensures that decoder does not reject any input file in the first check
-
-##### Maximize utilization of input data
-The plugin feeds the entire input data to the codec using a loop.
- * If the decode operation was successful, the input is advanced by the number of bytes consumed
- in the decode call.
- * If the decode operation was un-successful, the input is advanced by 1 byte so that the fuzzer
- can proceed to feed the next frame.
-
-This ensures that the plugin tolerates any kind of input (empty, huge, malformed, etc)
-and doesnt `exit()` on any input and thereby increasing the chance of identifying vulnerabilities.
-
-##### Other considerations
- * Two fuzzer binaries - mpeg4_dec_fuzzer and h263_dec_fuzzer are generated based on the presence
- of a flag - 'MPEG4'
- * The number of decode calls are kept to a maximum of 100 so that the fuzzer does not timeout.
-
-## Build
-
-This describes steps to build mpeg4_dec_fuzzer and h263_dec_fuzzer binary.
-
-### Android
-#### Steps to build
-Build the fuzzer
-```
- $ mm -j$(nproc) mpeg4_dec_fuzzer
- $ mm -j$(nproc) h263_dec_fuzzer
-```
-
-#### Steps to run
-Create a directory CORPUS_DIR and copy some MPEG4 or H263 files to that folder
-Push this directory to device.
-
-To run on device
-```
- $ adb sync data
- $ adb shell /data/fuzz/arm64/mpeg4_dec_fuzzer/mpeg4_dec_fuzzer CORPUS_DIR
- $ adb shell /data/fuzz/arm64/h263_dec_fuzzer/h263_dec_fuzzer CORPUS_DIR
-```
-To run on host
-```
- $ $ANDROID_HOST_OUT/fuzz/x86_64/mpeg4_dec_fuzzer/mpeg4_dec_fuzzer CORPUS_DIR
- $ $ANDROID_HOST_OUT/fuzz/x86_64/h263_dec_fuzzer/h263_dec_fuzzer CORPUS_DIR
-```
-
-## References:
- * http://llvm.org/docs/LibFuzzer.html
- * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/codecs/mp3dec/Android.bp b/media/libstagefright/codecs/mp3dec/Android.bp
index 316d63c..61b248b 100644
--- a/media/libstagefright/codecs/mp3dec/Android.bp
+++ b/media/libstagefright/codecs/mp3dec/Android.bp
@@ -1,88 +1,3 @@
-cc_library_static {
- name: "libstagefright_mp3dec",
- vendor_available: true,
- min_sdk_version: "29",
-
- host_supported:true,
- srcs: [
- "src/pvmp3_normalize.cpp",
- "src/pvmp3_alias_reduction.cpp",
- "src/pvmp3_crc.cpp",
- "src/pvmp3_decode_header.cpp",
- "src/pvmp3_decode_huff_cw.cpp",
- "src/pvmp3_getbits.cpp",
- "src/pvmp3_dequantize_sample.cpp",
- "src/pvmp3_framedecoder.cpp",
- "src/pvmp3_get_main_data_size.cpp",
- "src/pvmp3_get_side_info.cpp",
- "src/pvmp3_get_scale_factors.cpp",
- "src/pvmp3_mpeg2_get_scale_data.cpp",
- "src/pvmp3_mpeg2_get_scale_factors.cpp",
- "src/pvmp3_mpeg2_stereo_proc.cpp",
- "src/pvmp3_huffman_decoding.cpp",
- "src/pvmp3_huffman_parsing.cpp",
- "src/pvmp3_tables.cpp",
- "src/pvmp3_imdct_synth.cpp",
- "src/pvmp3_mdct_6.cpp",
- "src/pvmp3_dct_6.cpp",
- "src/pvmp3_poly_phase_synthesis.cpp",
- "src/pvmp3_equalizer.cpp",
- "src/pvmp3_seek_synch.cpp",
- "src/pvmp3_stereo_proc.cpp",
- "src/pvmp3_reorder.cpp",
-
- "src/pvmp3_polyphase_filter_window.cpp",
- "src/pvmp3_mdct_18.cpp",
- "src/pvmp3_dct_9.cpp",
- "src/pvmp3_dct_16.cpp",
- ],
-
- arch: {
- arm: {
- exclude_srcs: [
- "src/pvmp3_polyphase_filter_window.cpp",
- "src/pvmp3_mdct_18.cpp",
- "src/pvmp3_dct_9.cpp",
- "src/pvmp3_dct_16.cpp",
- ],
- srcs: [
- "src/asm/pvmp3_polyphase_filter_window_gcc.s",
- "src/asm/pvmp3_mdct_18_gcc.s",
- "src/asm/pvmp3_dct_9_gcc.s",
- "src/asm/pvmp3_dct_16_gcc.s",
- ],
-
- instruction_set: "arm",
- },
- },
-
- sanitize: {
- misc_undefined: [
- "signed-integer-overflow",
- ],
- cfi: true,
- },
-
- include_dirs: ["frameworks/av/media/libstagefright/include"],
-
- export_include_dirs: [
- "include",
- "src",
- ],
-
- cflags: [
- "-DOSCL_UNUSED_ARG(x)=(void)(x)",
- "-Werror",
- ],
-
- target: {
- darwin: {
- enabled: false,
- },
- },
-}
-
-//###############################################################################
cc_library_shared {
name: "libstagefright_soft_mp3dec",
@@ -90,11 +5,6 @@
srcs: ["SoftMP3.cpp"],
- local_include_dirs: [
- "src",
- "include",
- ],
-
version_script: "exports.lds",
sanitize: {
@@ -107,34 +17,3 @@
static_libs: ["libstagefright_mp3dec"],
}
-//###############################################################################
-cc_test {
- name: "libstagefright_mp3dec_test",
- gtest: false,
-
- srcs: [
- "test/mp3dec_test.cpp",
- "test/mp3reader.cpp",
- ],
-
- cflags: ["-Wall", "-Werror"],
-
- local_include_dirs: [
- "src",
- "include",
- ],
-
- sanitize: {
- misc_undefined: [
- "signed-integer-overflow",
- ],
- cfi: true,
- },
-
- static_libs: [
- "libstagefright_mp3dec",
- "libsndfile",
- ],
-
- shared_libs: ["libaudioutils"],
-}
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
index 80083f7..15cde20 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
@@ -23,7 +23,7 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaDefs.h>
-#include "include/pvmp3decoder_api.h"
+#include <pvmp3decoder_api.h>
namespace android {
@@ -307,6 +307,20 @@
if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
mSawInputEos = true;
+ if (mIsFirst && !inHeader->nFilledLen) {
+ ALOGV("empty first EOS");
+ outHeader->nFilledLen = 0;
+ outHeader->nTimeStamp = inHeader->nTimeStamp;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ mSignalledOutputEos = true;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ notifyFillBufferDone(outHeader);
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ notifyEmptyBufferDone(inHeader);
+ return;
+ }
}
mConfig->pInputBuffer =
diff --git a/media/libstagefright/codecs/mp3dec/TEST_MAPPING b/media/libstagefright/codecs/mp3dec/TEST_MAPPING
deleted file mode 100644
index b237d65..0000000
--- a/media/libstagefright/codecs/mp3dec/TEST_MAPPING
+++ /dev/null
@@ -1,7 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/mp3dec
-{
- "presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
- // { "name": "Mp3DecoderTest"}
- ]
-}
diff --git a/media/libstagefright/codecs/mp3dec/fuzzer/Android.bp b/media/libstagefright/codecs/mp3dec/fuzzer/Android.bp
deleted file mode 100644
index 2f0eda7..0000000
--- a/media/libstagefright/codecs/mp3dec/fuzzer/Android.bp
+++ /dev/null
@@ -1,32 +0,0 @@
-/******************************************************************************
- *
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at:
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- *****************************************************************************
- * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
- */
-
-cc_fuzz {
- name: "mp3_dec_fuzzer",
- host_supported: true,
-
- static_libs: [
- "libstagefright_mp3dec",
- ],
-
- srcs: [
- "mp3_dec_fuzzer.cpp",
- ],
-}
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dct_6.cpp b/media/libstagefright/codecs/mp3dec/src/pvmp3_dct_6.cpp
deleted file mode 100644
index 1f8018a..0000000
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_dct_6.cpp
+++ /dev/null
@@ -1,152 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
-
- PacketVideo Corp.
- MP3 Decoder Library
-
- Filename: pvmp3_dct6.cpp
-
- Date: 09/21/2007
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
-
- Description:
-
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
-Input
- Int32 vec[] vector of 6 32-bit integers
-Returns
- Int32 vec[] dct computation in-place
-
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- Returns the dct of length 6 of the input vector
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
-
-------------------------------------------------------------------------------
- REFERENCES
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-
-#include "pvmp3_audio_type_defs.h"
-#include "pv_mp3dec_fxd_op.h"
-#include "pvmp3_mdct_6.h"
-
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-#define Qfmt30(a) (Int32)((a)*((Int32)1<<30) + ((a)>=0?0.5F:-0.5F))
-
-#define cos_pi_6 Qfmt30( 0.86602540378444f)
-#define cos_2_pi_6 Qfmt30( 0.5f)
-#define cos_7_pi_12 Qfmt30( -0.25881904510252f)
-#define cos_3_pi_12 Qfmt30( 0.70710678118655f)
-#define cos_11_pi_12 Qfmt30( -0.96592582628907f)
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-void pvmp3_dct_6(int32 vec[])
-{
-
- Int32 tmp0;
- Int32 tmp1;
- Int32 tmp2;
- Int32 tmp3;
- Int32 tmp4;
- Int32 tmp5;
-
-
- /* split input vector */
-
- tmp0 = vec[5] + vec[0];
- tmp5 = vec[5] - vec[0];
- tmp1 = vec[4] + vec[1];
- tmp4 = vec[4] - vec[1];
- tmp2 = vec[3] + vec[2];
- tmp3 = vec[3] - vec[2];
-
- vec[0] = tmp0 + tmp2 ;
- vec[2] = fxp_mul32_Q30(tmp0 - tmp2, cos_pi_6);
- vec[4] = (vec[0] >> 1) - tmp1;
- vec[0] += tmp1;
-
- tmp0 = fxp_mul32_Q30(tmp3, cos_7_pi_12);
- tmp0 = fxp_mac32_Q30(tmp4, -cos_3_pi_12, tmp0);
- vec[1] = fxp_mac32_Q30(tmp5, cos_11_pi_12, tmp0);
-
- vec[3] = fxp_mul32_Q30((tmp3 + tmp4 - tmp5), cos_3_pi_12);
- tmp0 = fxp_mul32_Q30(tmp3, cos_11_pi_12);
- tmp0 = fxp_mac32_Q30(tmp4, cos_3_pi_12, tmp0);
- vec[5] = fxp_mac32_Q30(tmp5, cos_7_pi_12, tmp0);
-
-}
-
-
-
-
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.cpp b/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.cpp
deleted file mode 100644
index a5c7f5e..0000000
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.cpp
+++ /dev/null
@@ -1,830 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
-
- PacketVideo Corp.
- MP3 Decoder Library
-
- Filename: pvmp3_framedecoder.cpp
-
- Functions:
- pvmp3_framedecoder
- pvmp3_InitDecoder
- pvmp3_resetDecoder
-
- Date: 09/21/2007
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
-
- Description:
-
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
-Input
- pExt = pointer to the external interface structure. See the file
- pvmp3decoder_api.h for a description of each field.
- Data type of pointer to a tPVMP3DecoderExternal
- structure.
-
- pMem = void pointer to hide the internal implementation of the library
- It is cast back to a tmp3dec_file structure. This structure
- contains information that needs to persist between calls to
- this function, or is too big to be placed on the stack, even
- though the data is only needed during execution of this function
- Data type void pointer, internally pointer to a tmp3dec_file
- structure.
-
-
- Outputs:
- status = ERROR condition. see structure ERROR_CODE
-
- Pointers and Buffers Modified:
- pMem contents are modified.
- pExt: (more detail in the file pvmp3decoder_api.h)
- inputBufferUsedLength - number of array elements used up by the stream.
- samplingRate - sampling rate in samples per sec
- bitRate - bit rate in bits per second, varies frame to frame.
-
-
-
-------------------------------------------------------------------------------
- FUNCTIONS DESCRIPTION
-
- pvmp3_framedecoder
- frame decoder library driver
- pvmp3_InitDecoder
- Decoder Initialization
- pvmp3_resetDecoder
- Reset Decoder
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
-
-------------------------------------------------------------------------------
- REFERENCES
-
- [1] ISO MPEG Audio Subgroup Software Simulation Group (1996)
- ISO 13818-3 MPEG-2 Audio Decoder - Lower Sampling Frequency Extension
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-
-
-#include "pvmp3_framedecoder.h"
-#include "pvmp3_dec_defs.h"
-#include "pvmp3_poly_phase_synthesis.h"
-#include "pvmp3_tables.h"
-#include "pvmp3_imdct_synth.h"
-#include "pvmp3_alias_reduction.h"
-#include "pvmp3_reorder.h"
-#include "pvmp3_dequantize_sample.h"
-#include "pvmp3_stereo_proc.h"
-#include "pvmp3_mpeg2_stereo_proc.h"
-#include "pvmp3_get_side_info.h"
-#include "pvmp3_get_scale_factors.h"
-#include "pvmp3_mpeg2_get_scale_factors.h"
-#include "pvmp3_decode_header.h"
-#include "pvmp3_get_main_data_size.h"
-#include "s_tmp3dec_file.h"
-#include "pvmp3_getbits.h"
-#include "mp3_mem_funcs.h"
-
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-ERROR_CODE pvmp3_framedecoder(tPVMP3DecoderExternal *pExt,
- void *pMem)
-{
-
- ERROR_CODE errorCode = NO_DECODING_ERROR;
-
- int32 crc_error_count = 0;
- uint32 sent_crc = 0;
- uint32 computed_crc = 0;
-
- tmp3dec_chan *pChVars[CHAN];
- tmp3dec_file *pVars = (tmp3dec_file *)pMem;
-
- mp3Header info_data;
- mp3Header *info = &info_data;
-
- pVars->inputStream.pBuffer = pExt->pInputBuffer;
-
-
- pVars->inputStream.usedBits = pExt->inputBufferUsedLength << 3;
- pVars->inputStream.inputBufferCurrentLength = pExt->inputBufferCurrentLength;
-
-
- errorCode = pvmp3_decode_header(&pVars->inputStream,
- info,
- &computed_crc);
-
- if (errorCode != NO_DECODING_ERROR)
- {
- pExt->outputFrameSize = 0;
- return errorCode;
- }
-
- pVars->num_channels = (info->mode == MPG_MD_MONO) ? 1 : 2;
- pExt->num_channels = pVars->num_channels;
-
- int32 outputFrameSize = (info->version_x == MPEG_1) ?
- 2 * SUBBANDS_NUMBER * FILTERBANK_BANDS :
- SUBBANDS_NUMBER * FILTERBANK_BANDS;
-
- outputFrameSize = (info->mode == MPG_MD_MONO) ? outputFrameSize : outputFrameSize << 1;
-
-
- /*
- * Check if output buffer has enough room to hold output PCM
- */
- if (pExt->outputFrameSize >= outputFrameSize)
- {
- pExt->outputFrameSize = outputFrameSize;
- }
- else
- {
- pExt->outputFrameSize = 0;
- return OUTPUT_BUFFER_TOO_SMALL;
- }
-
-
- pChVars[ LEFT] = &pVars->perChan[ LEFT];
- pChVars[RIGHT] = &pVars->perChan[RIGHT];
-
-
-
-
- if (info->error_protection)
- {
- /*
- * Get crc content
- */
- sent_crc = getUpTo17bits(&pVars->inputStream, 16);
- }
-
-
- if (info->layer_description == 3)
- {
- int32 gr;
- int32 ch;
- uint32 main_data_end;
- int32 bytes_to_discard;
- int16 *ptrOutBuffer = pExt->pOutputBuffer;
-
- /*
- * Side Information must be extracted from the bitstream and store for use
- * during the decoded of the associated frame
- */
-
- errorCode = pvmp3_get_side_info(&pVars->inputStream,
- &pVars->sideInfo,
- info,
- &computed_crc);
-
- if (errorCode != NO_DECODING_ERROR)
- {
- pExt->outputFrameSize = 0;
- return errorCode;
- }
-
- /*
- * If CRC was sent, check that matches the one got while parsing data
- * disable crc if this is the desired mode
- */
- if (info->error_protection)
- {
- if ((computed_crc != sent_crc) && pExt->crcEnabled)
- {
- crc_error_count++;
- }
- }
-
- /*
- * main data (scalefactors, Huffman coded, etc,) are not necessarily located
- * adjacent to the side-info. Beginning of main data is located using
- * field "main_data_begin" of the current frame. The length does not include
- * header and side info.
- * "main_data_begin" points to the first bit of main data of a frame. It is a negative
- * offset in bytes from the first byte of the sync word
- * main_data_begin = 0 <===> main data start rigth after side info.
- */
-
- int32 temp = pvmp3_get_main_data_size(info, pVars);
-
-
- /*
- * Check if available data holds a full frame, if not flag an error
- */
-
- if ((uint32)pVars->predicted_frame_size > pVars->inputStream.inputBufferCurrentLength)
- {
- pExt->outputFrameSize = 0;
- return NO_ENOUGH_MAIN_DATA_ERROR;
- }
-
- /*
- * Fill in internal circular buffer
- */
- fillMainDataBuf(pVars, temp);
-
-
- main_data_end = pVars->mainDataStream.usedBits >> 3; /* in bytes */
- if ((main_data_end << 3) < pVars->mainDataStream.usedBits)
- {
- main_data_end++;
- pVars->mainDataStream.usedBits = main_data_end << 3;
- }
-
-
- // force signed computation; buffer sizes and offsets are all going to be
- // well within the constraints of 32-bit signed math.
- bytes_to_discard = pVars->frame_start
- - ((int32)pVars->sideInfo.main_data_begin)
- - ((int32)main_data_end);
-
-
- if (main_data_end > BUFSIZE) /* check overflow on the buffer */
- {
- pVars->frame_start -= BUFSIZE;
-
- pVars->mainDataStream.usedBits -= (BUFSIZE << 3);
- }
-
- pVars->frame_start += temp;
-
-
- if (bytes_to_discard < 0 || crc_error_count)
- {
- /*
- * Not enough data to decode, then we should avoid reading this
- * data ( getting/ignoring sido info and scale data)
- * Main data could be located in the previous frame, so an unaccounted
- * frame can cause incorrect processing
- * Just run the polyphase filter to "clean" the history buffer
- */
- errorCode = NO_ENOUGH_MAIN_DATA_ERROR;
-
- /*
- * Clear the input to these filters
- */
-
- pv_memset((void*)pChVars[RIGHT]->work_buf_int32,
- 0,
- SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[RIGHT]->work_buf_int32[0]));
-
- pv_memset((void*)pChVars[LEFT]->work_buf_int32,
- 0,
- SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[LEFT]->work_buf_int32[0]));
-
- /* clear circular buffers, to avoid any glitch */
- pv_memset((void*)&pChVars[ LEFT]->circ_buffer[576],
- 0,
- 480*sizeof(pChVars[ LEFT]->circ_buffer[0]));
- pv_memset((void*)&pChVars[RIGHT]->circ_buffer[576],
- 0,
- 480*sizeof(pChVars[RIGHT]->circ_buffer[0]));
-
- pChVars[ LEFT]->used_freq_lines = 575;
- pChVars[RIGHT]->used_freq_lines = 575;
-
- }
- else
- {
- pVars->mainDataStream.usedBits += (bytes_to_discard << 3);
- }
-
- /*
- * if (fr_ps->header->version_x == MPEG_1), use 2 granules, otherwise just 1
- */
- for (gr = 0; gr < (1 + !(info->version_x)); gr++)
- {
- if (errorCode != NO_ENOUGH_MAIN_DATA_ERROR)
- {
- for (ch = 0; ch < pVars->num_channels; ch++)
- {
- int32 part2_start = pVars->mainDataStream.usedBits;
-
- if (info->version_x == MPEG_1)
- {
-
- pvmp3_get_scale_factors(&pVars->scaleFactors[ch],
- &pVars->sideInfo,
- gr,
- ch,
- &pVars->mainDataStream);
- }
- else
- {
- int32 * tmp = pVars->Scratch_mem;
- pvmp3_mpeg2_get_scale_factors(&pVars->scaleFactors[ch],
- &pVars->sideInfo,
- gr,
- ch,
- info,
- (uint32 *)tmp,
- &pVars->mainDataStream);
- }
-
- pChVars[ch]->used_freq_lines = pvmp3_huffman_parsing(pChVars[ch]->work_buf_int32,
- &pVars->sideInfo.ch[ch].gran[gr],
- pVars,
- part2_start,
- info);
-
-
- pvmp3_dequantize_sample(pChVars[ch]->work_buf_int32,
- &pVars->scaleFactors[ch],
- &pVars->sideInfo.ch[ch].gran[gr],
- pChVars[ch]->used_freq_lines,
- info);
-
-
-
-
- } /* for (ch=0; ch<stereo; ch++) */
-
- if (pVars->num_channels == 2)
- {
-
- int32 used_freq_lines = (pChVars[ LEFT]->used_freq_lines >
- pChVars[RIGHT]->used_freq_lines) ?
- pChVars[ LEFT]->used_freq_lines :
- pChVars[RIGHT]->used_freq_lines;
-
- pChVars[ LEFT]->used_freq_lines = used_freq_lines;
- pChVars[RIGHT]->used_freq_lines = used_freq_lines;
-
- if (info->version_x == MPEG_1)
- {
- pvmp3_stereo_proc(pChVars[ LEFT]->work_buf_int32,
- pChVars[RIGHT]->work_buf_int32,
- &pVars->scaleFactors[RIGHT],
- &pVars->sideInfo.ch[LEFT].gran[gr],
- used_freq_lines,
- info);
- }
- else
- {
- int32 * tmp = pVars->Scratch_mem;
- pvmp3_mpeg2_stereo_proc(pChVars[ LEFT]->work_buf_int32,
- pChVars[RIGHT]->work_buf_int32,
- &pVars->scaleFactors[RIGHT],
- &pVars->sideInfo.ch[ LEFT].gran[gr],
- &pVars->sideInfo.ch[RIGHT].gran[gr],
- (uint32 *)tmp,
- used_freq_lines,
- info);
- }
- }
-
- } /* if ( errorCode != NO_ENOUGH_MAIN_DATA_ERROR) */
-
- for (ch = 0; ch < pVars->num_channels; ch++)
- {
-
- pvmp3_reorder(pChVars[ch]->work_buf_int32,
- &pVars->sideInfo.ch[ch].gran[gr],
- &pChVars[ ch]->used_freq_lines,
- info,
- pVars->Scratch_mem);
-
- pvmp3_alias_reduction(pChVars[ch]->work_buf_int32,
- &pVars->sideInfo.ch[ch].gran[gr],
- &pChVars[ ch]->used_freq_lines,
- info);
-
-
- /*
- * IMDCT
- */
- /* set mxposition
- * In case of mixed blocks, # of bands with long
- * blocks (2 or 4) else 0
- */
- uint16 mixedBlocksLongBlocks = 0; /* 0 = long or short, 2=mixed, 4=mixed 2.5@8000 */
- if (pVars->sideInfo.ch[ch].gran[gr].mixed_block_flag &&
- pVars->sideInfo.ch[ch].gran[gr].window_switching_flag)
- {
- if ((info->version_x == MPEG_2_5) && (info->sampling_frequency == 2))
- {
- mixedBlocksLongBlocks = 4; /* mpeg2.5 @ 8 KHz */
- }
- else
- {
- mixedBlocksLongBlocks = 2;
- }
- }
-
- pvmp3_imdct_synth(pChVars[ch]->work_buf_int32,
- pChVars[ch]->overlap,
- pVars->sideInfo.ch[ch].gran[gr].block_type,
- mixedBlocksLongBlocks,
- pChVars[ ch]->used_freq_lines,
- pVars->Scratch_mem);
-
-
- /*
- * Polyphase synthesis
- */
-
- pvmp3_poly_phase_synthesis(pChVars[ch],
- pVars->num_channels,
- pExt->equalizerType,
- &ptrOutBuffer[ch]);
-
-
- }/* end ch loop */
-
- ptrOutBuffer += pVars->num_channels * SUBBANDS_NUMBER * FILTERBANK_BANDS;
- } /* for (gr=0;gr<Max_gr;gr++) */
-
- /* skip ancillary data */
- if (info->bitrate_index > 0)
- { /* if not free-format */
-
- int32 ancillary_data_lenght = pVars->predicted_frame_size << 3;
-
- ancillary_data_lenght -= pVars->inputStream.usedBits;
-
- /* skip ancillary data */
- if (ancillary_data_lenght > 0)
- {
- pVars->inputStream.usedBits += ancillary_data_lenght;
- }
-
- }
-
- /*
- * This overrides a possible NO_ENOUGH_MAIN_DATA_ERROR
- */
- errorCode = NO_DECODING_ERROR;
-
- }
- else
- {
- /*
- * The info on the header leads to an unsupported layer, more data
- * will not fix this, so this is a bad frame,
- */
-
- pExt->outputFrameSize = 0;
- return UNSUPPORTED_LAYER;
- }
-
- pExt->inputBufferUsedLength = pVars->inputStream.usedBits >> 3;
- pExt->totalNumberOfBitsUsed += pVars->inputStream.usedBits;
- pExt->version = info->version_x;
- pExt->samplingRate = mp3_s_freq[info->version_x][info->sampling_frequency];
- pExt->bitRate = mp3_bitrate[pExt->version][info->bitrate_index];
-
-
- /*
- * Always verify buffer overrun condition
- */
-
- if (pExt->inputBufferUsedLength > pExt->inputBufferCurrentLength)
- {
- pExt->outputFrameSize = 0;
- errorCode = NO_ENOUGH_MAIN_DATA_ERROR;
- }
-
- return errorCode;
-
-}
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-__inline void fillDataBuf(tmp3Bits *pMainData,
- uint32 val) /* val to write into the buffer */
-{
- pMainData->pBuffer[module(pMainData->offset++, BUFSIZE)] = (uint8)val;
-}
-
-
-void fillMainDataBuf(void *pMem, int32 temp)
-{
- tmp3dec_file *pVars = (tmp3dec_file *)pMem;
-
-
- int32 offset = (pVars->inputStream.usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
-
- /*
- * Check if input circular buffer boundaries need to be enforced
- */
- if ((offset + temp) < BUFSIZE)
- {
- uint8 * ptr = pVars->inputStream.pBuffer + offset;
-
- offset = pVars->mainDataStream.offset;
-
- /*
- * Check if main data circular buffer boundaries need to be enforced
- */
- if ((offset + temp) < BUFSIZE)
- {
- pv_memcpy((pVars->mainDataStream.pBuffer + offset), ptr, temp*sizeof(uint8));
- pVars->mainDataStream.offset += temp;
- }
- else
- {
- int32 tmp1 = *(ptr++);
- for (int32 nBytes = temp >> 1; nBytes != 0; nBytes--) /* read main data. */
- {
- int32 tmp2 = *(ptr++);
- fillDataBuf(&pVars->mainDataStream, tmp1);
- fillDataBuf(&pVars->mainDataStream, tmp2);
- tmp1 = *(ptr++);
- }
-
- if (temp&1)
- {
- fillDataBuf(&pVars->mainDataStream, tmp1);
- }
-
- /* adjust circular buffer counter */
- pVars->mainDataStream.offset = module(pVars->mainDataStream.offset, BUFSIZE);
- }
- }
- else
- {
- for (int32 nBytes = temp >> 1; nBytes != 0; nBytes--) /* read main data. */
- {
- fillDataBuf(&pVars->mainDataStream, *(pVars->inputStream.pBuffer + module(offset++ , BUFSIZE)));
- fillDataBuf(&pVars->mainDataStream, *(pVars->inputStream.pBuffer + module(offset++ , BUFSIZE)));
- }
- if (temp&1)
- {
- fillDataBuf(&pVars->mainDataStream, *(pVars->inputStream.pBuffer + module(offset , BUFSIZE)));
- }
- }
-
-
- pVars->inputStream.usedBits += (temp) << INBUF_ARRAY_INDEX_SHIFT;
-}
-
-
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-uint32 pvmp3_decoderMemRequirements(void)
-{
- uint32 size;
-
- size = (uint32) sizeof(tmp3dec_file);
- return (size);
-}
-
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-#include "pvmp3_decode_huff_cw.h"
-
-void pvmp3_InitDecoder(tPVMP3DecoderExternal *pExt,
- void *pMem)
-{
-
- tmp3dec_file *pVars;
- huffcodetab *pHuff;
-
- pVars = (tmp3dec_file *)pMem;
- memset(pVars, 0, sizeof(*pVars));
-
- pExt->totalNumberOfBitsUsed = 0;
- pExt->inputBufferCurrentLength = 0;
- pExt->inputBufferUsedLength = 0;
-
- pVars->inputStream.pBuffer = pExt->pInputBuffer;
-
- /*
- * Initialize huffman decoding table
- */
-
- pHuff = pVars->ht;
- pHuff[0].linbits = 0;
- pHuff[0].pdec_huff_tab = pvmp3_decode_huff_cw_tab0;
- pHuff[1].linbits = 0;
- pHuff[1].pdec_huff_tab = pvmp3_decode_huff_cw_tab1;
- pHuff[2].linbits = 0;
- pHuff[2].pdec_huff_tab = pvmp3_decode_huff_cw_tab2;
- pHuff[3].linbits = 0;
- pHuff[3].pdec_huff_tab = pvmp3_decode_huff_cw_tab3;
- pHuff[4].linbits = 0;
- pHuff[4].pdec_huff_tab = pvmp3_decode_huff_cw_tab0; /* tbl 4 is not used */
- pHuff[5].linbits = 4;
- pHuff[5].pdec_huff_tab = pvmp3_decode_huff_cw_tab5;
- pHuff[6].linbits = 0;
- pHuff[6].pdec_huff_tab = pvmp3_decode_huff_cw_tab6;
- pHuff[7].linbits = 0;
- pHuff[7].pdec_huff_tab = pvmp3_decode_huff_cw_tab7;
- pHuff[8].linbits = 0;
- pHuff[8].pdec_huff_tab = pvmp3_decode_huff_cw_tab8;
- pHuff[9].linbits = 0;
- pHuff[9].pdec_huff_tab = pvmp3_decode_huff_cw_tab9;
- pHuff[10].linbits = 0;
- pHuff[10].pdec_huff_tab = pvmp3_decode_huff_cw_tab10;
- pHuff[11].linbits = 0;
- pHuff[11].pdec_huff_tab = pvmp3_decode_huff_cw_tab11;
- pHuff[12].linbits = 0;
- pHuff[12].pdec_huff_tab = pvmp3_decode_huff_cw_tab12;
- pHuff[13].linbits = 0;
- pHuff[13].pdec_huff_tab = pvmp3_decode_huff_cw_tab13;
- pHuff[14].linbits = 0;
- pHuff[14].pdec_huff_tab = pvmp3_decode_huff_cw_tab0; /* tbl 14 is not used */
- pHuff[15].linbits = 0;
- pHuff[15].pdec_huff_tab = pvmp3_decode_huff_cw_tab15;
- pHuff[16].linbits = 1;
- pHuff[16].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
- pHuff[17].linbits = 2;
- pHuff[17].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
- pHuff[18].linbits = 3;
- pHuff[18].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
- pHuff[19].linbits = 4;
- pHuff[19].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
- pHuff[20].linbits = 6;
- pHuff[20].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
- pHuff[21].linbits = 8;
- pHuff[21].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
- pHuff[22].linbits = 10;
- pHuff[22].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
- pHuff[23].linbits = 13;
- pHuff[23].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
- pHuff[24].linbits = 4;
- pHuff[24].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
- pHuff[25].linbits = 5;
- pHuff[25].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
- pHuff[26].linbits = 6;
- pHuff[26].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
- pHuff[27].linbits = 7;
- pHuff[27].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
- pHuff[28].linbits = 8;
- pHuff[28].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
- pHuff[29].linbits = 9;
- pHuff[29].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
- pHuff[30].linbits = 11;
- pHuff[30].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
- pHuff[31].linbits = 13;
- pHuff[31].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
- pHuff[32].linbits = 0;
- pHuff[32].pdec_huff_tab = pvmp3_decode_huff_cw_tab32;
- pHuff[33].linbits = 0;
- pHuff[33].pdec_huff_tab = pvmp3_decode_huff_cw_tab33;
-
- /*
- * Initialize polysynthesis circular buffer mechanism
- */
- /* clear buffers */
-
- pvmp3_resetDecoder(pMem);
-
-}
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-
-void pvmp3_resetDecoder(void *pMem)
-{
-
- tmp3dec_file *pVars;
- tmp3dec_chan *pChVars[CHAN];
-
- pVars = (tmp3dec_file *)pMem;
- pChVars[ LEFT] = &pVars->perChan[ LEFT];
- pChVars[RIGHT] = &pVars->perChan[RIGHT];
-
- pVars->frame_start = 0;
-
- pVars->mainDataStream.offset = 0;
-
- pVars->mainDataStream.pBuffer = pVars->mainDataBuffer;
- pVars->mainDataStream.usedBits = 0;
-
-
- pVars->inputStream.usedBits = 0; // in bits
-
-
- pChVars[ LEFT]->used_freq_lines = 575;
- pChVars[RIGHT]->used_freq_lines = 575;
-
-
- /*
- * Initialize polysynthesis circular buffer mechanism
- */
-
- pv_memset((void*)&pChVars[ LEFT]->circ_buffer[576],
- 0,
- 480*sizeof(pChVars[ LEFT]->circ_buffer[0]));
- pv_memset((void*)&pChVars[RIGHT]->circ_buffer[576],
- 0,
- 480*sizeof(pChVars[RIGHT]->circ_buffer[0]));
-
-
- pv_memset((void*)pChVars[ LEFT]->overlap,
- 0,
- SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[ LEFT]->overlap[0]));
-
-
- pv_memset((void*)pChVars[ RIGHT]->overlap,
- 0,
- SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[ RIGHT]->overlap[0]));
-
-
-
-
-
- /*
- * Clear all the structures
- */
-
-
- pv_memset((void*)&pVars->scaleFactors[RIGHT],
- 0,
- sizeof(mp3ScaleFactors));
-
- pv_memset((void*)&pVars->scaleFactors[LEFT],
- 0,
- sizeof(mp3ScaleFactors));
-
- pv_memset((void*)&pVars->sideInfo,
- 0,
- sizeof(mp3SideInfo));
-
- pv_memset((void*)&pVars->sideInfo,
- 0,
- sizeof(mp3SideInfo));
-
-}
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.cpp b/media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.cpp
deleted file mode 100644
index d644207..0000000
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.cpp
+++ /dev/null
@@ -1,285 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
-
- PacketVideo Corp.
- MP3 Decoder Library
-
- Filename: pvmp3_get_side_info.cpp
-
- Date: 09/21/2007
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
-
- Description:
-
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
-Input
- mp3SideInfo *si,
- mp3Header *info, mp3 header information
- uint32 *crc initialized crc value (if enabled)
-
-
- Returns
-
- mp3SideInfo *si, side information
-
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- acquires side information
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
-
-------------------------------------------------------------------------------
- REFERENCES
-
- [1] ISO MPEG Audio Subgroup Software Simulation Group (1996)
- ISO 13818-3 MPEG-2 Audio Decoder - Lower Sampling Frequency Extension
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-
-#include "pvmp3_get_side_info.h"
-#include "pvmp3_crc.h"
-
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-ERROR_CODE pvmp3_get_side_info(tmp3Bits *inputStream,
- mp3SideInfo *si,
- mp3Header *info,
- uint32 *crc)
-{
- int32 ch, gr;
- uint32 tmp;
-
- int stereo = (info->mode == MPG_MD_MONO) ? 1 : 2;
-
- if (info->version_x == MPEG_1)
- {
- if (stereo == 1)
- {
- tmp = getbits_crc(inputStream, 14, crc, info->error_protection);
- si->main_data_begin = (tmp << 18) >> 23; /* 9 */
- si->private_bits = (tmp << 27) >> 27; /* 5 */
- }
- else
- {
- tmp = getbits_crc(inputStream, 12, crc, info->error_protection);
- si->main_data_begin = (tmp << 20) >> 23; /* 9 */
- si->private_bits = (tmp << 29) >> 29; /* 3 */
-
- }
-
- for (ch = 0; ch < stereo; ch++)
- {
- tmp = getbits_crc(inputStream, 4, crc, info->error_protection);
- si->ch[ch].scfsi[0] = (tmp << 28) >> 31; /* 1 */
- si->ch[ch].scfsi[1] = (tmp << 29) >> 31; /* 1 */
- si->ch[ch].scfsi[2] = (tmp << 30) >> 31; /* 1 */
- si->ch[ch].scfsi[3] = tmp & 1; /* 1 */
- }
-
- for (gr = 0; gr < 2 ; gr++)
- {
- for (ch = 0; ch < stereo; ch++)
- {
- si->ch[ch].gran[gr].part2_3_length = getbits_crc(inputStream, 12, crc, info->error_protection);
- tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
-
- si->ch[ch].gran[gr].big_values = (tmp << 10) >> 23; /* 9 */
- si->ch[ch].gran[gr].global_gain = (int32)((tmp << 19) >> 24) - 210; /* 8 */
- si->ch[ch].gran[gr].scalefac_compress = (tmp << 27) >> 28; /* 4 */
- si->ch[ch].gran[gr].window_switching_flag = tmp & 1; /* 1 */
-
- if (si->ch[ch].gran[gr].window_switching_flag)
- {
- tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
-
- si->ch[ch].gran[gr].block_type = (tmp << 10) >> 30; /* 2 */;
- si->ch[ch].gran[gr].mixed_block_flag = (tmp << 12) >> 31; /* 1 */;
-
- si->ch[ch].gran[gr].table_select[0] = (tmp << 13) >> 27; /* 5 */;
- si->ch[ch].gran[gr].table_select[1] = (tmp << 18) >> 27; /* 5 */;
-
- si->ch[ch].gran[gr].subblock_gain[0] = (tmp << 23) >> 29; /* 3 */;
- si->ch[ch].gran[gr].subblock_gain[1] = (tmp << 26) >> 29; /* 3 */;
- si->ch[ch].gran[gr].subblock_gain[2] = (tmp << 29) >> 29; /* 3 */;
-
- /* Set region_count parameters since they are implicit in this case. */
-
- if (si->ch[ch].gran[gr].block_type == 0)
- {
- return(SIDE_INFO_ERROR);
- }
- else if ((si->ch[ch].gran[gr].block_type == 2)
- && (si->ch[ch].gran[gr].mixed_block_flag == 0))
- {
- si->ch[ch].gran[gr].region0_count = 8; /* MI 9; */
- si->ch[ch].gran[gr].region1_count = 12;
- }
- else
- {
- si->ch[ch].gran[gr].region0_count = 7; /* MI 8; */
- si->ch[ch].gran[gr].region1_count = 13;
- }
- }
- else
- {
- tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
-
- si->ch[ch].gran[gr].table_select[0] = (tmp << 10) >> 27; /* 5 */;
- si->ch[ch].gran[gr].table_select[1] = (tmp << 15) >> 27; /* 5 */;
- si->ch[ch].gran[gr].table_select[2] = (tmp << 20) >> 27; /* 5 */;
-
- si->ch[ch].gran[gr].region0_count = (tmp << 25) >> 28; /* 4 */;
- si->ch[ch].gran[gr].region1_count = (tmp << 29) >> 29; /* 3 */;
-
- si->ch[ch].gran[gr].block_type = 0;
- }
-
- tmp = getbits_crc(inputStream, 3, crc, info->error_protection);
- si->ch[ch].gran[gr].preflag = (tmp << 29) >> 31; /* 1 */
- si->ch[ch].gran[gr].scalefac_scale = (tmp << 30) >> 31; /* 1 */
- si->ch[ch].gran[gr].count1table_select = tmp & 1; /* 1 */
- }
- }
- }
- else /* Layer 3 LSF */
- {
- si->main_data_begin = getbits_crc(inputStream, 8, crc, info->error_protection);
- si->private_bits = getbits_crc(inputStream, stereo, crc, info->error_protection);
-
- for (ch = 0; ch < stereo; ch++)
- {
- tmp = getbits_crc(inputStream, 21, crc, info->error_protection);
- si->ch[ch].gran[0].part2_3_length = (tmp << 11) >> 20; /* 12 */
- si->ch[ch].gran[0].big_values = (tmp << 23) >> 23; /* 9 */
-
- tmp = getbits_crc(inputStream, 18, crc, info->error_protection);
- si->ch[ch].gran[0].global_gain = ((tmp << 14) >> 24) - 210; /* 8 */
- si->ch[ch].gran[0].scalefac_compress = (tmp << 22) >> 23; /* 9 */
- si->ch[ch].gran[0].window_switching_flag = tmp & 1; /* 1 */
-
- if (si->ch[ch].gran[0].window_switching_flag)
- {
-
- tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
-
- si->ch[ch].gran[0].block_type = (tmp << 10) >> 30; /* 2 */;
- si->ch[ch].gran[0].mixed_block_flag = (tmp << 12) >> 31; /* 1 */;
-
- si->ch[ch].gran[0].table_select[0] = (tmp << 13) >> 27; /* 5 */;
- si->ch[ch].gran[0].table_select[1] = (tmp << 18) >> 27; /* 5 */;
-
- si->ch[ch].gran[0].subblock_gain[0] = (tmp << 23) >> 29; /* 3 */;
- si->ch[ch].gran[0].subblock_gain[1] = (tmp << 26) >> 29; /* 3 */;
- si->ch[ch].gran[0].subblock_gain[2] = (tmp << 29) >> 29; /* 3 */;
-
- /* Set region_count parameters since they are implicit in this case. */
-
- if (si->ch[ch].gran[0].block_type == 0)
- {
- return(SIDE_INFO_ERROR);
- }
- else if ((si->ch[ch].gran[0].block_type == 2)
- && (si->ch[ch].gran[0].mixed_block_flag == 0))
- {
- si->ch[ch].gran[0].region0_count = 8; /* MI 9; */
- si->ch[ch].gran[0].region1_count = 12;
- }
- else
- {
- si->ch[ch].gran[0].region0_count = 7; /* MI 8; */
- si->ch[ch].gran[0].region1_count = 13;
- }
- }
- else
- {
- tmp = getbits_crc(inputStream, 22, crc, info->error_protection);
-
- si->ch[ch].gran[0].table_select[0] = (tmp << 10) >> 27; /* 5 */;
- si->ch[ch].gran[0].table_select[1] = (tmp << 15) >> 27; /* 5 */;
- si->ch[ch].gran[0].table_select[2] = (tmp << 20) >> 27; /* 5 */;
-
- si->ch[ch].gran[0].region0_count = (tmp << 25) >> 28; /* 4 */;
- si->ch[ch].gran[0].region1_count = (tmp << 29) >> 29; /* 3 */;
-
- si->ch[ch].gran[0].block_type = 0;
- }
-
- tmp = getbits_crc(inputStream, 2, crc, info->error_protection);
- si->ch[ch].gran[0].scalefac_scale = tmp >> 1; /* 1 */
- si->ch[ch].gran[0].count1table_select = tmp & 1; /* 1 */
-
- }
- }
- return (NO_DECODING_ERROR);
-}
-
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.cpp b/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.cpp
deleted file mode 100644
index 8ff7953..0000000
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.cpp
+++ /dev/null
@@ -1,257 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
-
- PacketVideo Corp.
- MP3 Decoder Library
-
- Filename: pvmp3_getbits.cpp
-
-
- Date: 09/21/2007
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
-
- Description:
-
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-
- tmp3Bits *inputStream, structure holding the input stream parameters
- int32 neededBits number of bits to read from the bit stream
-
- Outputs:
-
- word parsed from teh bitstream, with size neededBits-bits,
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
-
-------------------------------------------------------------------------------
- REFERENCES
- [1] ISO MPEG Audio Subgroup Software Simulation Group (1996)
- ISO 13818-3 MPEG-2 Audio Decoder - Lower Sampling Frequency Extension
-
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include "pvmp3_getbits.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-uint32 getNbits(tmp3Bits *ptBitStream,
- int32 neededBits) /* number of bits to read from the bitstream (up to 25) */
-{
-
- uint32 offset;
- uint32 bitIndex;
- uint8 Elem; /* Needs to be same type as pInput->pBuffer */
- uint8 Elem1;
- uint8 Elem2;
- uint8 Elem3;
- uint32 returnValue = 0;
-
- if (!neededBits)
- {
- return (returnValue);
- }
-
- offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
-
- Elem = *(ptBitStream->pBuffer + module(offset , BUFSIZE));
- Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
- Elem2 = *(ptBitStream->pBuffer + module(offset + 2, BUFSIZE));
- Elem3 = *(ptBitStream->pBuffer + module(offset + 3, BUFSIZE));
-
-
- returnValue = (((uint32)(Elem)) << 24) |
- (((uint32)(Elem1)) << 16) |
- (((uint32)(Elem2)) << 8) |
- ((uint32)(Elem3));
-
- /* Remove extra high bits by shifting up */
- bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
-
- /* This line is faster than to mask off the high bits. */
- returnValue <<= bitIndex;
-
- /* Move the field down. */
- returnValue >>= (32 - neededBits);
-
- ptBitStream->usedBits += neededBits;
-
- return (returnValue);
-}
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-uint16 getUpTo9bits(tmp3Bits *ptBitStream,
- int32 neededBits) /* number of bits to read from the bit stream 2 to 9 */
-{
-
- uint32 offset;
- uint32 bitIndex;
- uint8 Elem; /* Needs to be same type as pInput->pBuffer */
- uint8 Elem1;
- uint16 returnValue;
-
- offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
-
- Elem = *(ptBitStream->pBuffer + module(offset , BUFSIZE));
- Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
-
-
- returnValue = (((uint16)(Elem)) << 8) |
- ((uint16)(Elem1));
-
- /* Remove extra high bits by shifting up */
- bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
-
- ptBitStream->usedBits += neededBits;
- /* This line is faster than to mask off the high bits. */
- returnValue = (returnValue << (bitIndex));
-
- /* Move the field down. */
-
- return (uint16)(returnValue >> (16 - neededBits));
-
-}
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-uint32 getUpTo17bits(tmp3Bits *ptBitStream,
- int32 neededBits) /* number of bits to read from the bit stream 2 to 8 */
-{
-
- uint32 offset;
- uint32 bitIndex;
- uint8 Elem; /* Needs to be same type as pInput->pBuffer */
- uint8 Elem1;
- uint8 Elem2;
- uint32 returnValue;
-
- offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
-
- Elem = *(ptBitStream->pBuffer + module(offset , BUFSIZE));
- Elem1 = *(ptBitStream->pBuffer + module(offset + 1, BUFSIZE));
- Elem2 = *(ptBitStream->pBuffer + module(offset + 2, BUFSIZE));
-
-
- returnValue = (((uint32)(Elem)) << 16) |
- (((uint32)(Elem1)) << 8) |
- ((uint32)(Elem2));
-
- /* Remove extra high bits by shifting up */
- bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
-
- ptBitStream->usedBits += neededBits;
- /* This line is faster than to mask off the high bits. */
- returnValue = 0xFFFFFF & (returnValue << (bitIndex));
-
- /* Move the field down. */
-
- return (uint32)(returnValue >> (24 - neededBits));
-
-}
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-uint8 get1bit(tmp3Bits *ptBitStream) /* number of bits to read from the bit stream */
-{
-
- uint32 offset;
- uint32 bitIndex;
- uint8 returnValue;
-
- offset = (ptBitStream->usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
-
- returnValue = *(ptBitStream->pBuffer + module(offset , BUFSIZE));
-
- /* Remove extra high bits by shifting up */
- bitIndex = module(ptBitStream->usedBits, INBUF_BIT_WIDTH);
- ptBitStream->usedBits++;
-
- /* This line is faster than to mask off the high bits. */
- returnValue = (returnValue << (bitIndex));
-
- return (uint8)(returnValue >> 7);
-
-}
-
-
-
-
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.h b/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.h
deleted file mode 100644
index b058b00..0000000
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.h
+++ /dev/null
@@ -1,112 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- PacketVideo Corp.
- MP3 Decoder Library
-
- Filename: pvmp3_getbits.h
-
- Date: 09/21/2007
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef PVMP3_GETBITS_H
-#define PVMP3_GETBITS_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include "pvmp3_dec_defs.h"
-#include "s_mp3bits.h"
-#include "pvmp3_audio_type_defs.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here.
-----------------------------------------------------------------------------*/
-#define INBUF_ARRAY_INDEX_SHIFT (3)
-#define INBUF_BIT_WIDTH (1<<(INBUF_ARRAY_INDEX_SHIFT))
-#define INBUF_BIT_MODULO_MASK ((INBUF_BIT_WIDTH)-1)
-
-
-/*----------------------------------------------------------------------------
-; EXTERNAL VARIABLES REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; SIMPLE TYPEDEF'S
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; ENUMERATED TYPEDEF'S
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; STRUCTURES TYPEDEF'S
-----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
- uint32 getNbits(tmp3Bits *pMainData,
- int32 neededBits);
-
- uint16 getUpTo9bits(tmp3Bits *pMainData,
- int32 neededBits);
-
- uint32 getUpTo17bits(tmp3Bits *pMainData,
- int32 neededBits);
-
- uint8 get1bit(tmp3Bits *pMainData);
-
-#ifdef __cplusplus
-}
-#endif
-
-/*----------------------------------------------------------------------------
-; GLOBAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; END
-----------------------------------------------------------------------------*/
-
-#endif
-
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_6.cpp b/media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_6.cpp
deleted file mode 100644
index 8d80e8f..0000000
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_6.cpp
+++ /dev/null
@@ -1,165 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
-
- PacketVideo Corp.
- MP3 Decoder Library
- Filename: mdct_18.cpp
-
- Date: 09/21/2007
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
-
- Description:
-
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
-Input
- int32 vec[], input vector of length 6
- int32 *history input for overlap and add, vector updated with
- next overlap and add values
-Returns
- none mdct computation in-place
-
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- Returns the mdct of length 6 of the input vector, as well as the overlap
- vector for next iteration ( on history[])
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
-
-------------------------------------------------------------------------------
- REFERENCES
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-
-#include "pv_mp3dec_fxd_op.h"
-#include "pvmp3_mdct_6.h"
-
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-#define QFORMAT 29
-#define Qfmt29(a) (int32)((a)*((int32)1<<QFORMAT) + ((a)>=0?0.5F:-0.5F))
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-/*
- * (1./(2*cos((pi/(2*N))*(2*i+1)))), N = 12, i = [0:N/2-1]
- */
-
-const int32 cosTerms_1_ov_cos_phi_N6[6] =
-{
-
- Qfmt29(0.50431448029008f), Qfmt29(0.54119610014620f),
- Qfmt29(0.63023620700513f), Qfmt29(0.82133981585229f),
- Qfmt29(1.30656296487638f), Qfmt29(3.83064878777019f)
-};
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-
-void pvmp3_mdct_6(int32 vec[], int32 *history)
-{
- int32 i;
- int32 tmp;
- int32 tmp1;
- int32 tmp2;
-
- int32 *pt_vec = vec;
- int32 *pt_vec_o = vec;
- const int32 *pt_cos = cosTerms_1_ov_cos_phi_N6;
-
- for (i = 2; i != 0; i--)
- {
- tmp = *(pt_vec++);
- tmp1 = *(pt_vec++);
- tmp2 = *(pt_vec++);
- *(pt_vec_o++) = fxp_mul32_Q29(tmp, *(pt_cos++));
- *(pt_vec_o++) = fxp_mul32_Q29(tmp1, *(pt_cos++));
- *(pt_vec_o++) = fxp_mul32_Q29(tmp2, *(pt_cos++));
- }
-
-
- pvmp3_dct_6(vec); // Even terms
-
-
- tmp = -(vec[0] + vec[1]);
- history[3] = tmp;
- history[2] = tmp;
- tmp = -(vec[1] + vec[2]);
- vec[0] = vec[3] + vec[4];
- vec[1] = vec[4] + vec[5];
- history[4] = tmp;
- history[1] = tmp;
- tmp = -(vec[2] + vec[3]);
- vec[4] = -vec[1];
- history[5] = tmp;
- history[0] = tmp;
-
- vec[2] = vec[5];
- vec[3] = -vec[5];
- vec[5] = -vec[0];
-
-}
-
diff --git a/media/libstagefright/codecs/mp3dec/test/AndroidTest.xml b/media/libstagefright/codecs/mp3dec/test/AndroidTest.xml
deleted file mode 100644
index 7ff9732..0000000
--- a/media/libstagefright/codecs/mp3dec/test/AndroidTest.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2020 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<configuration description="Test module config for Mp3 Decoder unit test">
- <option name="test-suite-tag" value="Mp3DecoderTest" />
- <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
- <option name="cleanup" value="true" />
- <option name="push" value="Mp3DecoderTest->/data/local/tmp/Mp3DecoderTest" />
- <option name="push-file"
- key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mp3dec/test/Mp3DecoderTest.zip?unzip=true"
- value="/data/local/tmp/Mp3DecoderTestRes/" />
- </target_preparer>
-
- <test class="com.android.tradefed.testtype.GTest" >
- <option name="native-test-device-path" value="/data/local/tmp" />
- <option name="module-name" value="Mp3DecoderTest" />
- <option name="native-test-flag" value="-P /data/local/tmp/Mp3DecoderTestRes/" />
- </test>
-</configuration>
diff --git a/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTest.cpp b/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTest.cpp
deleted file mode 100644
index 99553ec..0000000
--- a/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTest.cpp
+++ /dev/null
@@ -1,200 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Mp3DecoderTest"
-
-#include <utils/Log.h>
-
-#include <audio_utils/sndfile.h>
-#include <stdio.h>
-
-#include "mp3reader.h"
-#include "pvmp3decoder_api.h"
-
-#include "Mp3DecoderTestEnvironment.h"
-
-#define OUTPUT_FILE "/data/local/tmp/mp3Decode.out"
-
-constexpr int32_t kInputBufferSize = 1024 * 10;
-constexpr int32_t kOutputBufferSize = 4608 * 2;
-constexpr int32_t kMaxCount = 10;
-constexpr int32_t kNumFrameReset = 150;
-
-static Mp3DecoderTestEnvironment *gEnv = nullptr;
-
-class Mp3DecoderTest : public ::testing::TestWithParam<string> {
- public:
- Mp3DecoderTest() : mConfig(nullptr) {}
-
- ~Mp3DecoderTest() {
- if (mConfig) {
- delete mConfig;
- mConfig = nullptr;
- }
- }
-
- virtual void SetUp() override {
- mConfig = new tPVMP3DecoderExternal{};
- ASSERT_NE(mConfig, nullptr) << "Failed to initialize config. No Memory available";
- mConfig->equalizerType = flat;
- mConfig->crcEnabled = false;
- }
-
- tPVMP3DecoderExternal *mConfig;
- Mp3Reader mMp3Reader;
-
- ERROR_CODE DecodeFrames(void *decoderbuf, SNDFILE *outFileHandle, SF_INFO sfInfo,
- int32_t frameCount = INT32_MAX);
- SNDFILE *openOutputFile(SF_INFO *sfInfo);
-};
-
-ERROR_CODE Mp3DecoderTest::DecodeFrames(void *decoderBuf, SNDFILE *outFileHandle, SF_INFO sfInfo,
- int32_t frameCount) {
- uint8_t inputBuf[kInputBufferSize];
- int16_t outputBuf[kOutputBufferSize];
- uint32_t bytesRead;
- ERROR_CODE decoderErr;
- while (frameCount > 0) {
- bool success = mMp3Reader.getFrame(inputBuf, &bytesRead);
- if (!success) {
- break;
- }
- mConfig->inputBufferCurrentLength = bytesRead;
- mConfig->inputBufferMaxLength = 0;
- mConfig->inputBufferUsedLength = 0;
- mConfig->pInputBuffer = inputBuf;
- mConfig->pOutputBuffer = outputBuf;
- mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t);
- decoderErr = pvmp3_framedecoder(mConfig, decoderBuf);
- if (decoderErr != NO_DECODING_ERROR) break;
- sf_writef_short(outFileHandle, outputBuf, mConfig->outputFrameSize / sfInfo.channels);
- frameCount--;
- }
- return decoderErr;
-}
-
-SNDFILE *Mp3DecoderTest::openOutputFile(SF_INFO *sfInfo) {
- memset(sfInfo, 0, sizeof(SF_INFO));
- sfInfo->channels = mMp3Reader.getNumChannels();
- sfInfo->format = SF_FORMAT_WAV | SF_FORMAT_PCM_16;
- sfInfo->samplerate = mMp3Reader.getSampleRate();
- SNDFILE *outFileHandle = sf_open(OUTPUT_FILE, SFM_WRITE, sfInfo);
- return outFileHandle;
-}
-
-TEST_F(Mp3DecoderTest, MultiCreateMp3DecoderTest) {
- size_t memRequirements = pvmp3_decoderMemRequirements();
- ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
- void *decoderBuf = malloc(memRequirements);
- ASSERT_NE(decoderBuf, nullptr)
- << "Failed to allocate decoder memory of size " << memRequirements;
- for (int count = 0; count < kMaxCount; count++) {
- pvmp3_InitDecoder(mConfig, decoderBuf);
- ALOGV("Decoder created successfully");
- }
- if (decoderBuf) {
- free(decoderBuf);
- decoderBuf = nullptr;
- }
-}
-
-TEST_P(Mp3DecoderTest, DecodeTest) {
- size_t memRequirements = pvmp3_decoderMemRequirements();
- ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
- void *decoderBuf = malloc(memRequirements);
- ASSERT_NE(decoderBuf, nullptr)
- << "Failed to allocate decoder memory of size " << memRequirements;
-
- pvmp3_InitDecoder(mConfig, decoderBuf);
- ALOGV("Decoder created successfully");
- string inputFile = gEnv->getRes() + GetParam();
- bool status = mMp3Reader.init(inputFile.c_str());
- ASSERT_TRUE(status) << "Unable to initialize the mp3Reader";
-
- // Open the output file.
- SF_INFO sfInfo;
- SNDFILE *outFileHandle = openOutputFile(&sfInfo);
- ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
-
- ERROR_CODE decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo);
- ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
- ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
- ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
-
- mMp3Reader.close();
- sf_close(outFileHandle);
- if (decoderBuf) {
- free(decoderBuf);
- decoderBuf = nullptr;
- }
-}
-
-TEST_P(Mp3DecoderTest, ResetDecoderTest) {
- size_t memRequirements = pvmp3_decoderMemRequirements();
- ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
- void *decoderBuf = malloc(memRequirements);
- ASSERT_NE(decoderBuf, nullptr)
- << "Failed to allocate decoder memory of size " << memRequirements;
-
- pvmp3_InitDecoder(mConfig, decoderBuf);
- ALOGV("Decoder created successfully.");
- string inputFile = gEnv->getRes() + GetParam();
- bool status = mMp3Reader.init(inputFile.c_str());
- ASSERT_TRUE(status) << "Unable to initialize the mp3Reader";
-
- // Open the output file.
- SF_INFO sfInfo;
- SNDFILE *outFileHandle = openOutputFile(&sfInfo);
- ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
-
- ERROR_CODE decoderErr;
- decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo, kNumFrameReset);
- ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
- ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
- ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
-
- pvmp3_resetDecoder(decoderBuf);
- // Decode the same file.
- decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo);
- ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
- ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
- ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
-
- mMp3Reader.close();
- sf_close(outFileHandle);
- if (decoderBuf) {
- free(decoderBuf);
- decoderBuf = nullptr;
- }
-}
-
-INSTANTIATE_TEST_SUITE_P(Mp3DecoderTestAll, Mp3DecoderTest,
- ::testing::Values(("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3"),
- ("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3"),
- ("bbb_mp3_stereo_192kbps_48000hz.mp3")));
-
-int main(int argc, char **argv) {
- gEnv = new Mp3DecoderTestEnvironment();
- ::testing::AddGlobalTestEnvironment(gEnv);
- ::testing::InitGoogleTest(&argc, argv);
- int status = gEnv->initFromOptions(argc, argv);
- if (status == 0) {
- status = RUN_ALL_TESTS();
- ALOGV("Test result = %d\n", status);
- }
- return status;
-}
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
index 4f61aa8..5bb1879 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -58,6 +58,8 @@
mInputBufferCount(0),
mDecoder(NULL),
mHeader(NULL),
+ mNumChannels(1),
+ mSamplingRate(kRate),
mCodecDelay(0),
mSeekPreRoll(0),
mAnchorTimeUs(0),
@@ -169,11 +171,11 @@
}
opusParams->nAudioBandWidth = 0;
- opusParams->nSampleRate = kRate;
+ opusParams->nSampleRate = mSamplingRate;
opusParams->nBitRate = 0;
if (!isConfigured()) {
- opusParams->nChannels = 1;
+ opusParams->nChannels = mNumChannels;
} else {
opusParams->nChannels = mHeader->channels;
}
@@ -274,7 +276,8 @@
if (opusParams->nPortIndex != 0) {
return OMX_ErrorUndefined;
}
-
+ mNumChannels = opusParams->nChannels;
+ mSamplingRate = opusParams->nSampleRate;
return OMX_ErrorNone;
}
@@ -496,6 +499,8 @@
*(reinterpret_cast<int64_t*>(inHeader->pBuffer +
inHeader->nOffset)),
kRate);
+ mSamplingRate = kRate;
+ mNumChannels = mHeader->channels;
notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
mOutputPortSettingsChange = AWAITING_DISABLED;
}
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
index 91cafa1..00058c8 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.h
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h
@@ -70,6 +70,8 @@
OpusMSDecoder *mDecoder;
OpusHeader *mHeader;
+ int32_t mNumChannels;
+ int32_t mSamplingRate;
int64_t mCodecDelay;
int64_t mSeekPreRoll;
int64_t mSamplesToDiscard;
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 32b2075..b63353c 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -2,6 +2,7 @@
name: "libstagefright_flacdec",
vendor_available: true,
min_sdk_version: "29",
+ host_supported: true,
srcs: [
"FLACDecoder.cpp",
@@ -33,6 +34,13 @@
],
header_libs: [
- "libmedia_headers",
+ "libstagefright_foundation_headers",
+ "libstagefright_headers",
],
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
}
diff --git a/media/libstagefright/flac/dec/FLACDecoder.cpp b/media/libstagefright/flac/dec/FLACDecoder.cpp
index cef0bc6..f5e9532 100644
--- a/media/libstagefright/flac/dec/FLACDecoder.cpp
+++ b/media/libstagefright/flac/dec/FLACDecoder.cpp
@@ -433,7 +433,7 @@
if (mBuffer == nullptr) {
mBufferDataSize = 0;
mBufferLen = 0;
- ALOGE("decodeOneFrame: failed to allocate memory for input buffer");
+ ALOGE("addDataToBuffer: failed to allocate memory for input buffer");
return NO_MEMORY;
}
mBufferLen = mBufferDataSize + inBufferLen;
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index 8a7c3eb..0a4e598 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -166,7 +166,7 @@
}
s.append("\n");
}
- write(fd, s.string(), s.size());
+ (void)write(fd, s.string(), s.size());
}
} // namespace android
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 7752bda..f242b19 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -33,7 +33,7 @@
#include <media/stagefright/foundation/hexdump.h>
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
#include <binder/Parcel.h>
#endif
@@ -646,7 +646,7 @@
return s;
}
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
// static
sp<AMessage> AMessage::FromParcel(const Parcel &parcel, size_t maxNestingLevel) {
int32_t what = parcel.readInt32();
@@ -813,7 +813,7 @@
}
}
}
-#endif // __ANDROID_VNDK__
+#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
sp<AMessage> AMessage::changesFrom(const sp<const AMessage> &other, bool deep) const {
if (other == NULL) {
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index 4bd186c..b1ed077 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -27,7 +27,7 @@
#include "ADebug.h"
#include "AString.h"
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
#include <binder/Parcel.h>
#endif
@@ -365,7 +365,7 @@
return !strcasecmp(mData + mSize - suffixLen, suffix);
}
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
// static
AString AString::FromParcel(const Parcel &parcel) {
size_t size = static_cast<size_t>(parcel.readInt32());
@@ -380,17 +380,21 @@
}
return err;
}
-#endif
+#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
AString AStringPrintf(const char *format, ...) {
va_list ap;
va_start(ap, format);
char *buffer;
- vasprintf(&buffer, format, ap);
+ int bufferSize = vasprintf(&buffer, format, ap);
va_end(ap);
+ if(bufferSize < 0) {
+ return AString();
+ }
+
AString result(buffer);
free(buffer);
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 682758a..39670a2 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -3,6 +3,7 @@
export_include_dirs: ["include"],
vendor_available: true,
host_supported: true,
+ min_sdk_version: "29",
}
cc_defaults {
@@ -11,6 +12,7 @@
vndk: {
enabled: true,
},
+ host_supported: true,
double_loadable: true,
include_dirs: [
"frameworks/av/include",
@@ -84,6 +86,14 @@
"-DNO_IMEMORY",
],
},
+ apex: {
+ exclude_shared_libs: [
+ "libbinder",
+ ],
+ },
+ darwin: {
+ enabled: false,
+ },
},
clang: true,
@@ -100,11 +110,13 @@
cc_library {
name: "libstagefright_foundation",
defaults: ["libstagefright_foundation_defaults"],
+ min_sdk_version: "29",
}
cc_library_static {
name: "libstagefright_foundation_without_imemory",
defaults: ["libstagefright_foundation_defaults"],
+ min_sdk_version: "29",
cflags: [
"-Wno-multichar",
diff --git a/media/libstagefright/foundation/MediaBuffer.cpp b/media/libstagefright/foundation/MediaBuffer.cpp
index 8e245dc..68df21f 100644
--- a/media/libstagefright/foundation/MediaBuffer.cpp
+++ b/media/libstagefright/foundation/MediaBuffer.cpp
@@ -51,12 +51,12 @@
mRangeLength(size),
mOwnsData(true),
mMetaData(new MetaDataBase) {
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
if (size < kSharedMemThreshold
|| std::atomic_load_explicit(&mUseSharedMemory, std::memory_order_seq_cst) == 0) {
#endif
mData = malloc(size);
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
} else {
ALOGV("creating memoryDealer");
size_t newSize = 0;
diff --git a/media/libstagefright/foundation/MediaBufferGroup.cpp b/media/libstagefright/foundation/MediaBufferGroup.cpp
index 3c25047..fc98f28 100644
--- a/media/libstagefright/foundation/MediaBufferGroup.cpp
+++ b/media/libstagefright/foundation/MediaBufferGroup.cpp
@@ -62,7 +62,7 @@
mInternal->mGrowthLimit = buffers;
}
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
if (buffer_size >= kSharedMemoryThreshold) {
ALOGD("creating MemoryDealer");
// Using a single MemoryDealer is efficient for a group of shared memory objects.
diff --git a/media/libstagefright/foundation/MediaDefs.cpp b/media/libstagefright/foundation/MediaDefs.cpp
index a08fed1..c216bc5 100644
--- a/media/libstagefright/foundation/MediaDefs.cpp
+++ b/media/libstagefright/foundation/MediaDefs.cpp
@@ -20,6 +20,7 @@
const char *MEDIA_MIMETYPE_IMAGE_JPEG = "image/jpeg";
const char *MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC = "image/vnd.android.heic";
+const char *MEDIA_MIMETYPE_IMAGE_AVIF = "image/avif";
const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
diff --git a/media/libstagefright/foundation/MetaData.cpp b/media/libstagefright/foundation/MetaData.cpp
index 8174597..7f48cfd 100644
--- a/media/libstagefright/foundation/MetaData.cpp
+++ b/media/libstagefright/foundation/MetaData.cpp
@@ -28,7 +28,7 @@
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MetaData.h>
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
#include <binder/Parcel.h>
#endif
@@ -48,7 +48,7 @@
MetaData::~MetaData() {
}
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
/* static */
sp<MetaData> MetaData::createFromParcel(const Parcel &parcel) {
diff --git a/media/libstagefright/foundation/MetaDataBase.cpp b/media/libstagefright/foundation/MetaDataBase.cpp
index 4b439c6..3f050ea 100644
--- a/media/libstagefright/foundation/MetaDataBase.cpp
+++ b/media/libstagefright/foundation/MetaDataBase.cpp
@@ -28,7 +28,7 @@
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MetaDataBase.h>
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
#include <binder/Parcel.h>
#endif
@@ -452,7 +452,7 @@
}
}
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
status_t MetaDataBase::writeToParcel(Parcel &parcel) {
status_t ret;
size_t numItems = mInternalData->mItems.size();
@@ -532,7 +532,7 @@
ALOGW("no metadata in parcel");
return UNKNOWN_ERROR;
}
-#endif
+#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
} // namespace android
diff --git a/media/libstagefright/foundation/TEST_MAPPING b/media/libstagefright/foundation/TEST_MAPPING
index 0d6a6da..a70c352 100644
--- a/media/libstagefright/foundation/TEST_MAPPING
+++ b/media/libstagefright/foundation/TEST_MAPPING
@@ -1,9 +1,13 @@
// mappings for frameworks/av/media/libstagefright/foundation
{
- "presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
- //{ "name": "OpusHeaderTest" },
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
+ { "name": "OpusHeaderTest" }
+ ],
+ "presubmit": [
{ "name": "sf_foundation_test" },
{ "name": "MetaDataBaseUnitTest"}
]
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
index b5d6666..31e58ba 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
@@ -63,7 +63,7 @@
AMessage();
AMessage(uint32_t what, const sp<const AHandler> &handler);
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
// Construct an AMessage from a parcel.
// nestingAllowed determines how many levels AMessage can be nested inside
// AMessage. The default value here is arbitrarily set to 255.
@@ -88,7 +88,7 @@
// All items in the AMessage must have types that are recognized by
// FromParcel(); otherwise, TRESPASS error will occur.
void writeToParcel(Parcel *parcel) const;
-#endif
+#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
void setWhat(uint32_t what);
uint32_t what() const;
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AString.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AString.h
index deef0d4..517774b 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AString.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AString.h
@@ -89,7 +89,7 @@
void tolower();
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
static AString FromParcel(const Parcel &parcel);
status_t writeToParcel(Parcel *parcel) const;
#endif
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
index 1f9e636..e96243e 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -22,6 +22,7 @@
extern const char *MEDIA_MIMETYPE_IMAGE_JPEG;
extern const char *MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC;
+extern const char *MEDIA_MIMETYPE_IMAGE_AVIF;
extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
diff --git a/media/libstagefright/foundation/tests/OpusHeader/Android.bp b/media/libstagefright/foundation/tests/OpusHeader/Android.bp
index c1251a8..ed3298c 100644
--- a/media/libstagefright/foundation/tests/OpusHeader/Android.bp
+++ b/media/libstagefright/foundation/tests/OpusHeader/Android.bp
@@ -16,6 +16,7 @@
cc_test {
name: "OpusHeaderTest",
+ test_suites: ["device-tests"],
gtest: true,
srcs: [
diff --git a/media/libstagefright/foundation/tests/TypeTraits_test.cpp b/media/libstagefright/foundation/tests/TypeTraits_test.cpp
index 1e2049d..d5383d1 100644
--- a/media/libstagefright/foundation/tests/TypeTraits_test.cpp
+++ b/media/libstagefright/foundation/tests/TypeTraits_test.cpp
@@ -30,7 +30,7 @@
enum IA : int32_t { };
};
-// =========== basic sanity tests for type-support templates
+// =========== basic tests for type-support templates
TEST_F(TypeTraitsTest, StaticTests) {
// ============ is_integral_or_enum
diff --git a/media/libstagefright/foundation/tests/colorutils/Android.bp b/media/libstagefright/foundation/tests/colorutils/Android.bp
new file mode 100644
index 0000000..d77f405
--- /dev/null
+++ b/media/libstagefright/foundation/tests/colorutils/Android.bp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+ name: "ColorUtilsTest",
+ gtest: true,
+
+ srcs: [
+ "ColorUtilsTest.cpp",
+ ],
+
+ shared_libs: [
+ "liblog",
+ "libutils",
+ "libmediandk",
+ ],
+
+ static_libs: [
+ "libstagefright_foundation",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp b/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp
new file mode 100644
index 0000000..0d802b4
--- /dev/null
+++ b/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp
@@ -0,0 +1,773 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtilsTest"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <stdio.h>
+
+#include <media/NdkMediaFormat.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+
+const size_t kHDRBufferSize = 25;
+const uint16_t kHDRInfoTestValue1 = 420;
+const uint16_t kHDRInfoTestValue2 = 42069;
+
+using namespace android;
+
+typedef ColorAspects CA;
+
+class ColorRangeTest : public ::testing::TestWithParam</* ColorRange */ CA::Range> {
+ public:
+ ColorRangeTest() { mRange = GetParam(); };
+
+ CA::Range mRange;
+};
+
+class ColorTransferTest : public ::testing::TestWithParam</* ColorTransfer */ CA::Transfer> {
+ public:
+ ColorTransferTest() { mTransfer = GetParam(); };
+
+ CA::Transfer mTransfer;
+};
+
+class ColorStandardTest : public ::testing::TestWithParam<std::pair<
+ /* Primaries */ CA::Primaries,
+ /* MatrixCoeffs */ CA::MatrixCoeffs>> {
+ public:
+ ColorStandardTest() {
+ mPrimaries = GetParam().first;
+ mMatrixCoeffs = GetParam().second;
+ };
+
+ CA::Primaries mPrimaries;
+ CA::MatrixCoeffs mMatrixCoeffs;
+};
+
+class IsoToPlatformAspectsTest : public ::testing::TestWithParam<std::tuple<
+ /* Primaries */ CA::Primaries,
+ /* Transfer */ CA::Transfer,
+ /* MatrixCoeffs */ CA::MatrixCoeffs,
+ /* Standard */ int32_t,
+ /* Transfer */ int32_t>> {
+ public:
+ IsoToPlatformAspectsTest() {
+ mPrimaries = std::get<0>(GetParam());
+ mTransfer = std::get<1>(GetParam());
+ mMatrixCoeffs = std::get<2>(GetParam());
+ mPlatformStandard = std::get<3>(GetParam());
+ mPlatformTransfer = std::get<4>(GetParam());
+ };
+
+ CA::Primaries mPrimaries;
+ CA::Transfer mTransfer;
+ CA::MatrixCoeffs mMatrixCoeffs;
+ int32_t mPlatformStandard;
+ int32_t mPlatformTransfer;
+};
+
+class ColorAspectsTest : public ::testing::TestWithParam<std::tuple<
+ /* Primaries */ CA::Primaries,
+ /* ColorTransfer */ CA::Transfer,
+ /* MatrixCoeffs */ CA::MatrixCoeffs,
+ /* ColorRange */ CA::Range,
+ /* ColorStandard */ CA::Standard>> {
+ public:
+ ColorAspectsTest() {
+ mPrimaries = std::get<0>(GetParam());
+ mTransfer = std::get<1>(GetParam());
+ mMatrixCoeffs = std::get<2>(GetParam());
+ mRange = std::get<3>(GetParam());
+ mStandard = std::get<4>(GetParam());
+ };
+
+ CA::Primaries mPrimaries;
+ CA::Transfer mTransfer;
+ CA::MatrixCoeffs mMatrixCoeffs;
+ CA::Range mRange;
+ CA::Standard mStandard;
+};
+
+class DefaultColorAspectsTest : public ::testing::TestWithParam<std::tuple<
+ /* Width */ int32_t,
+ /* Height */ int32_t,
+ /* Primaries */ CA::Primaries,
+ /* MatrixCoeffs */ CA::MatrixCoeffs>> {
+ public:
+ DefaultColorAspectsTest() {
+ mWidth = std::get<0>(GetParam());
+ mHeight = std::get<1>(GetParam());
+ mPrimaries = std::get<2>(GetParam());
+ mMatrixCoeffs = std::get<3>(GetParam());
+ };
+
+ int32_t mWidth;
+ int32_t mHeight;
+ CA::Primaries mPrimaries;
+ CA::MatrixCoeffs mMatrixCoeffs;
+};
+
+class DataSpaceTest : public ::testing::TestWithParam<std::tuple<
+ /* ColorRange */ CA::Range,
+ /* Primaries */ CA::Primaries,
+ /* ColorTransfer */ CA::Transfer,
+ /* MatrixCoeffs */ CA::MatrixCoeffs,
+ /* v0_android_dataspace */ android_dataspace,
+ /* android_dataspace */ android_dataspace>> {
+ public:
+ DataSpaceTest() {
+ mRange = std::get<0>(GetParam());
+ mPrimaries = std::get<1>(GetParam());
+ mTransfer = std::get<2>(GetParam());
+ mMatrixCoeffs = std::get<3>(GetParam());
+ mDataSpaceV0 = std::get<4>(GetParam());
+ mDataSpace = std::get<5>(GetParam());
+ };
+
+ CA::Range mRange;
+ CA::Primaries mPrimaries;
+ CA::Transfer mTransfer;
+ CA::MatrixCoeffs mMatrixCoeffs;
+ android_dataspace mDataSpaceV0;
+ android_dataspace mDataSpace;
+};
+
+TEST_P(ColorRangeTest, WrapColorRangeTest) {
+ int32_t range = ColorUtils::wrapColorAspectsIntoColorRange(mRange);
+ CA::Range unwrappedRange;
+ status_t status = ColorUtils::unwrapColorAspectsFromColorRange(range, &unwrappedRange);
+ ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorRange failed";
+ EXPECT_EQ(unwrappedRange, mRange) << "Returned ColorRange doesn't match";
+ ALOGV("toString test: Range: %s", asString(mRange, "default"));
+}
+
+TEST_P(ColorTransferTest, WrapColorTransferTest) {
+ int32_t transfer = ColorUtils::wrapColorAspectsIntoColorTransfer(mTransfer);
+ CA::Transfer unwrappedTransfer;
+ status_t status = ColorUtils::unwrapColorAspectsFromColorTransfer(transfer, &unwrappedTransfer);
+ ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorTransfer failed";
+ EXPECT_EQ(unwrappedTransfer, mTransfer) << "Returned ColorTransfer doesn't match";
+ ALOGV("toString test: Transfer: %s", asString(mTransfer, "default"));
+}
+
+TEST_P(ColorStandardTest, WrapColorStandardTest) {
+ int32_t standard = ColorUtils::wrapColorAspectsIntoColorStandard(mPrimaries, mMatrixCoeffs);
+ CA::Primaries unwrappedPrimaries;
+ CA::MatrixCoeffs unwrappedMatrixCoeffs;
+ status_t status = ColorUtils::unwrapColorAspectsFromColorStandard(standard, &unwrappedPrimaries,
+ &unwrappedMatrixCoeffs);
+ ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorStandard failed";
+ EXPECT_EQ(unwrappedPrimaries, mPrimaries) << "Returned primaries doesn't match";
+ EXPECT_EQ(unwrappedMatrixCoeffs, mMatrixCoeffs) << "Returned matrixCoeffs doesn't match";
+}
+
+TEST_P(ColorAspectsTest, PlatformAspectsTest) {
+ CA aspects;
+ aspects.mRange = mRange;
+ aspects.mPrimaries = mPrimaries;
+ aspects.mTransfer = mTransfer;
+ aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+ int32_t range = -1;
+ int32_t standard = -1;
+ int32_t transfer = -1;
+ status_t status = ColorUtils::convertCodecColorAspectsToPlatformAspects(aspects, &range,
+ &standard, &transfer);
+ ASSERT_EQ(status, OK) << "Conversion of ColorAspects to PlatformAspects failed";
+
+ CA returnedAspects;
+ status = ColorUtils::convertPlatformColorAspectsToCodecAspects(range, standard, transfer,
+ returnedAspects);
+ ASSERT_EQ(status, OK) << "Conversion of PlatformAspects to ColorAspects failed";
+ EXPECT_EQ(returnedAspects.mRange, aspects.mRange)
+ << "range mismatch for conversion between PlatformAspects";
+ EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries)
+ << "primaries mismatch for conversion between PlatformAspects";
+ EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer)
+ << "transfer mismatch for conversion between PlatformAspects";
+ EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs)
+ << "matrixCoeffs mismatch for conversion between PlatformAspects";
+}
+
+TEST_P(ColorAspectsTest, IsoAspectsTest) {
+ CA aspects;
+ aspects.mRange = mRange;
+ aspects.mPrimaries = mPrimaries;
+ aspects.mTransfer = mTransfer;
+ aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+ int32_t primaries = -1;
+ int32_t colorTransfer = -1;
+ int32_t matrixCoeffs = -1;
+ bool fullRange = false;
+ ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &primaries, &colorTransfer,
+ &matrixCoeffs, &fullRange);
+
+ CA returnedAspects;
+ ColorUtils::convertIsoColorAspectsToCodecAspects(primaries, colorTransfer, matrixCoeffs,
+ fullRange, returnedAspects);
+ EXPECT_EQ(returnedAspects.mRange, aspects.mRange)
+ << "range mismatch for conversion between IsoAspects";
+ EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries)
+ << "primaries mismatch for conversion between IsoAspects";
+ EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer)
+ << "transfer mismatch for conversion between IsoAspects";
+ EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs)
+ << "matrixCoeffs mismatch for conversion between IsoAspects";
+}
+
+TEST_P(IsoToPlatformAspectsTest, IsoAspectsToPlatformAspectsTest) {
+ CA aspects;
+ aspects.mPrimaries = mPrimaries;
+ aspects.mTransfer = mTransfer;
+ aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+ int32_t isoPrimaries = -1;
+ int32_t isoTransfer = -1;
+ int32_t isoMatrixCoeffs = -1;
+ bool fullrange = false;
+ ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &isoPrimaries, &isoTransfer,
+ &isoMatrixCoeffs, &fullrange);
+
+ int32_t range = -1;
+ int32_t standard = -1;
+ int32_t transfer = -1;
+ ColorUtils::convertIsoColorAspectsToPlatformAspects(isoPrimaries, isoTransfer, isoMatrixCoeffs,
+ fullrange, &range, &standard, &transfer);
+ if (fullrange) {
+ EXPECT_EQ(range, ColorUtils::kColorRangeFull)
+ << "range incorrect converting to PlatformAspects";
+ }
+ EXPECT_EQ(standard, mPlatformStandard) << "standard incorrect converting to PlatformAspects";
+ EXPECT_EQ(transfer, mPlatformTransfer) << "transfer incorrect converting to PlatformAspects";
+}
+
+TEST_P(ColorAspectsTest, PackColorAspectsTest) {
+ CA aspects;
+ aspects.mRange = mRange;
+ aspects.mPrimaries = mPrimaries;
+ aspects.mTransfer = mTransfer;
+ aspects.mMatrixCoeffs = mMatrixCoeffs;
+ uint32_t packedColorAspects = ColorUtils::packToU32(aspects);
+
+ CA unpackedAspects = ColorUtils::unpackToColorAspects(packedColorAspects);
+ EXPECT_EQ(unpackedAspects.mRange, mRange) << "range mismatch after unpacking";
+ EXPECT_EQ(unpackedAspects.mPrimaries, mPrimaries) << "primaries mismatch after unpacking";
+ EXPECT_EQ(unpackedAspects.mTransfer, mTransfer) << "transfer mismatch after unpacking";
+ EXPECT_EQ(unpackedAspects.mMatrixCoeffs, mMatrixCoeffs)
+ << "matrixCoeffs mismatch after unpacking";
+ ALOGV("toString test: Standard: %s", asString(mStandard, "default"));
+}
+
+TEST_P(DefaultColorAspectsTest, DefaultColorAspectsTest) {
+ CA aspects;
+ aspects.mRange = CA::RangeUnspecified;
+ aspects.mPrimaries = CA::PrimariesUnspecified;
+ aspects.mMatrixCoeffs = CA::MatrixUnspecified;
+ aspects.mTransfer = CA::TransferUnspecified;
+
+ ColorUtils::setDefaultCodecColorAspectsIfNeeded(aspects, mWidth, mHeight);
+ EXPECT_EQ(aspects.mRange, CA::RangeLimited) << "range not set to default";
+ EXPECT_EQ(aspects.mPrimaries, mPrimaries) << "primaries not set to default";
+ EXPECT_EQ(aspects.mMatrixCoeffs, mMatrixCoeffs) << "matrixCoeffs not set to default";
+ EXPECT_EQ(aspects.mTransfer, CA::TransferSMPTE170M) << "transfer not set to default";
+}
+
+TEST_P(DataSpaceTest, DataSpaceTest) {
+ CA aspects;
+ aspects.mRange = mRange;
+ aspects.mPrimaries = mPrimaries;
+ aspects.mTransfer = mTransfer;
+ aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+ android_dataspace dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, false);
+ EXPECT_EQ(dataSpace, mDataSpace) << "Returned incorrect dataspace";
+
+ bool status = ColorUtils::convertDataSpaceToV0(dataSpace);
+ ASSERT_TRUE(status) << "Returned v0 dataspace is not aspect-only";
+ EXPECT_EQ(dataSpace, mDataSpaceV0) << "Returned incorrect v0 dataspace";
+}
+
+TEST(ColorUtilsUnitTest, AspectsChangedTest) {
+ CA origAspects;
+ origAspects.mRange = CA::Range::RangeFull;
+ origAspects.mPrimaries = CA::Primaries::PrimariesBT709_5;
+ origAspects.mTransfer = CA::Transfer::TransferLinear;
+ origAspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixBT709_5;
+
+ CA aspects;
+ aspects.mRange = CA::Range::RangeFull;
+ aspects.mPrimaries = CA::Primaries::PrimariesBT709_5;
+ aspects.mTransfer = CA::Transfer::TransferLinear;
+ aspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixBT709_5;
+
+ bool status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+ ASSERT_FALSE(status) << "ColorAspects comparison check failed";
+
+ aspects.mRange = CA::Range::RangeLimited;
+ status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+ ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+ EXPECT_EQ(aspects.mRange, CA::Range::RangeUnspecified) << "range should have been unspecified";
+ aspects.mRange = CA::Range::RangeFull;
+
+ aspects.mTransfer = CA::Transfer::TransferSRGB;
+ status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+ ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+ EXPECT_EQ(aspects.mTransfer, CA::Transfer::TransferUnspecified)
+ << "transfer should have been unspecified";
+ aspects.mTransfer = CA::Transfer::TransferLinear;
+
+ aspects.mPrimaries = CA::Primaries::PrimariesBT2020;
+ status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects, true);
+ ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+ EXPECT_EQ(aspects.mPrimaries, CA::Primaries::PrimariesUnspecified)
+ << "primaries should have been unspecified";
+ EXPECT_EQ(aspects.mMatrixCoeffs, CA::MatrixCoeffs::MatrixUnspecified)
+ << "matrixCoeffs should have been unspecified";
+
+ aspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixSMPTE240M;
+ status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects, true);
+ ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+ EXPECT_EQ(aspects.mPrimaries, CA::Primaries::PrimariesUnspecified)
+ << "primaries should have been unspecified";
+ EXPECT_EQ(aspects.mMatrixCoeffs, CA::MatrixCoeffs::MatrixUnspecified)
+ << "matrixCoeffs should have been unspecified";
+}
+
+TEST(ColorUtilsUnitTest, ColorConfigFromFormatTest) {
+ int range = -1;
+ int standard = -1;
+ int transfer = -1;
+ sp<AMessage> format = new AMessage();
+ ASSERT_NE(format, nullptr) << "failed to create AMessage";
+ ColorUtils::getColorConfigFromFormat(format, &range, &standard, &transfer);
+ EXPECT_EQ(range | standard | transfer, 0) << "color config didn't default to 0";
+
+ format->setInt32(KEY_COLOR_RANGE, CA::Range::RangeFull);
+ format->setInt32(KEY_COLOR_STANDARD, CA::Standard::StandardBT709);
+ format->setInt32(KEY_COLOR_TRANSFER, CA::Transfer::TransferLinear);
+ ColorUtils::getColorConfigFromFormat(format, &range, &standard, &transfer);
+ EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+ EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+ EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatch";
+
+ range = standard = transfer = -1;
+ sp<AMessage> copyFormat = new AMessage();
+ ASSERT_NE(copyFormat, nullptr) << "failed to create AMessage";
+ ColorUtils::copyColorConfig(format, copyFormat);
+ bool status = copyFormat->findInt32(KEY_COLOR_RANGE, &range);
+ ASSERT_TRUE(status) << "ColorConfig range entry missing";
+ status = copyFormat->findInt32(KEY_COLOR_STANDARD, &standard);
+ ASSERT_TRUE(status) << "ColorConfig standard entry missing";
+ status = copyFormat->findInt32(KEY_COLOR_TRANSFER, &transfer);
+ ASSERT_TRUE(status) << "ColorConfig transfer entry missing";
+ EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+ EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+ EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatchd";
+
+ range = standard = transfer = -1;
+ ColorUtils::getColorConfigFromFormat(copyFormat, &range, &standard, &transfer);
+ EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+ EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+ EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatch";
+}
+
+TEST_P(ColorAspectsTest, FormatTest) {
+ CA aspects;
+ sp<AMessage> format = new AMessage();
+ ASSERT_NE(format, nullptr) << "failed to create AMessage";
+ ColorUtils::setColorAspectsIntoFormat(aspects, format, true);
+
+ CA returnedAspects;
+ ColorUtils::getColorAspectsFromFormat(format, returnedAspects);
+ EXPECT_EQ(returnedAspects.mRange, aspects.mRange) << "range mismatch";
+ EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries) << "primaries mismatch";
+ EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer) << "transfer mismatch";
+ EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs) << "matrixCoeffs mismatch";
+
+ aspects.mRange = mRange;
+ aspects.mPrimaries = mPrimaries;
+ aspects.mTransfer = mTransfer;
+ aspects.mMatrixCoeffs = mMatrixCoeffs;
+ ColorUtils::setColorAspectsIntoFormat(aspects, format);
+
+ memset(&returnedAspects, 0, sizeof(returnedAspects));
+ ColorUtils::getColorAspectsFromFormat(format, returnedAspects);
+ EXPECT_EQ(returnedAspects.mRange, aspects.mRange) << "range mismatch";
+ EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries) << "primaries mismatch";
+ EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer) << "transfer mismatch";
+ EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs) << "matrixCoeffs mismatch";
+}
+
+TEST(ColorUtilsUnitTest, HDRStaticInfoTest) {
+ sp<AMessage> format = new AMessage();
+ ASSERT_NE(format, nullptr) << "failed to create AMessage";
+
+ HDRStaticInfo returnedInfoHDR;
+ bool status = ColorUtils::getHDRStaticInfoFromFormat(format, &returnedInfoHDR);
+ ASSERT_FALSE(status) << "HDR info should be absent in empty format";
+
+ HDRStaticInfo infoHDR;
+ infoHDR.sType1.mMaxDisplayLuminance = kHDRInfoTestValue2;
+ infoHDR.sType1.mMinDisplayLuminance = kHDRInfoTestValue1;
+ infoHDR.sType1.mMaxContentLightLevel = kHDRInfoTestValue2;
+ infoHDR.sType1.mMaxFrameAverageLightLevel = kHDRInfoTestValue1;
+ infoHDR.sType1.mR.x = kHDRInfoTestValue1;
+ infoHDR.sType1.mR.y = kHDRInfoTestValue2;
+ infoHDR.sType1.mG.x = kHDRInfoTestValue1;
+ infoHDR.sType1.mG.y = kHDRInfoTestValue2;
+ infoHDR.sType1.mB.x = kHDRInfoTestValue1;
+ infoHDR.sType1.mB.y = kHDRInfoTestValue2;
+ infoHDR.sType1.mW.x = kHDRInfoTestValue1;
+ infoHDR.sType1.mW.y = kHDRInfoTestValue2;
+ ColorUtils::setHDRStaticInfoIntoFormat(infoHDR, format);
+
+ status = ColorUtils::getHDRStaticInfoFromFormat(format, &returnedInfoHDR);
+ ASSERT_TRUE(status) << "Failed to get HDR info from format";
+ ASSERT_EQ(0, memcmp(&returnedInfoHDR, &infoHDR, sizeof(infoHDR))) << " HDRStaticInfo mismatch";
+
+ AMediaFormat *mediaFormat = AMediaFormat_new();
+ ASSERT_NE(mediaFormat, nullptr) << "Unable to create AMediaFormat";
+ ColorUtils::setHDRStaticInfoIntoAMediaFormat(infoHDR, mediaFormat);
+ memset(&returnedInfoHDR, 0, sizeof(returnedInfoHDR));
+ status = ColorUtils::getHDRStaticInfoFromFormat(mediaFormat->mFormat, &returnedInfoHDR);
+ AMediaFormat_delete(mediaFormat);
+ ASSERT_TRUE(status) << "Failed to get HDR info from media format";
+ ASSERT_EQ(0, memcmp(&returnedInfoHDR, &infoHDR, sizeof(infoHDR))) << " HDRStaticInfo mismatch";
+}
+
+TEST(ColorUtilsUnitTest, SanityTest) {
+ CA::Primaries unmappedPrimaries = (CA::Primaries)(CA::Primaries::PrimariesOther + 1);
+ CA::MatrixCoeffs unmappedMatrixCoeffs = (CA::MatrixCoeffs)(CA::MatrixOther + 1);
+ int32_t colorStandard =
+ ColorUtils::wrapColorAspectsIntoColorStandard(unmappedPrimaries, CA::MatrixUnspecified);
+ EXPECT_EQ(colorStandard, ColorUtils::kColorStandardUnspecified)
+ << "Standard unspecified expected";
+ colorStandard =
+ ColorUtils::wrapColorAspectsIntoColorStandard(CA::PrimariesOther, unmappedMatrixCoeffs);
+ EXPECT_EQ(colorStandard, ColorUtils::kColorStandardUnspecified)
+ << "Standard unspecified expected";
+ colorStandard = ColorUtils::wrapColorAspectsIntoColorStandard(CA::PrimariesBT601_6_525,
+ CA::MatrixBT2020);
+ EXPECT_GE(colorStandard, ColorUtils::kColorStandardExtendedStart)
+ << "Standard greater than extended start expected";
+ unmappedPrimaries = (CA::Primaries)(CA::Primaries::PrimariesBT2020 + 1);
+ unmappedMatrixCoeffs = (CA::MatrixCoeffs)(CA::MatrixBT2020Constant + 1);
+ colorStandard =
+ ColorUtils::wrapColorAspectsIntoColorStandard(unmappedPrimaries, unmappedMatrixCoeffs);
+ EXPECT_GE(colorStandard, ColorUtils::kColorStandardExtendedStart)
+ << "Standard greater than extended start expected";
+
+ CA aspects;
+ int32_t colorRange = -1;
+ colorStandard = -1;
+ int32_t colorTransfer = -1;
+ aspects.mPrimaries = (CA::Primaries)(CA::Primaries::PrimariesOther + 1);
+ status_t status = ColorUtils::convertCodecColorAspectsToPlatformAspects(
+ aspects, &colorRange, &colorStandard, &colorTransfer);
+ EXPECT_NE(status, OK) << "invalid colorAspects value accepted";
+
+ int32_t colorPrimaries = -1;
+ colorTransfer = -1;
+ int32_t colorMatrixCoeffs = -1;
+ bool fullRange = false;
+ aspects.mPrimaries = CA::PrimariesOther;
+ aspects.mTransfer = CA::TransferOther;
+ aspects.mMatrixCoeffs = CA::MatrixOther;
+ ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &colorPrimaries, &colorTransfer,
+ &colorMatrixCoeffs, &fullRange);
+ CA returnedAspects;
+ ColorUtils::convertIsoColorAspectsToCodecAspects(colorPrimaries, colorTransfer,
+ colorMatrixCoeffs, fullRange, returnedAspects);
+ EXPECT_EQ(returnedAspects.mPrimaries, CA::PrimariesUnspecified)
+ << "expected unspecified Primaries";
+ EXPECT_EQ(returnedAspects.mTransfer, CA::TransferUnspecified)
+ << "expected unspecified Transfer";
+ EXPECT_EQ(returnedAspects.mMatrixCoeffs, CA::MatrixUnspecified)
+ << "expected unspecified MatrixCoeffs";
+
+ // invalid values, other value equals 0xFF
+ colorPrimaries = CA::PrimariesOther;
+ colorTransfer = CA::TransferOther;
+ colorMatrixCoeffs = CA::MatrixOther;
+ fullRange = false;
+ memset(&returnedAspects, 0, sizeof(returnedAspects));
+ ColorUtils::convertIsoColorAspectsToCodecAspects(colorPrimaries, colorTransfer,
+ colorMatrixCoeffs, fullRange, returnedAspects);
+ EXPECT_EQ(returnedAspects.mPrimaries, CA::PrimariesUnspecified)
+ << "expected unspecified Primaries";
+ EXPECT_EQ(returnedAspects.mTransfer, CA::TransferUnspecified)
+ << "expected unspecified Transfer";
+ EXPECT_EQ(returnedAspects.mMatrixCoeffs, CA::MatrixUnspecified)
+ << "expected unspecified MatrixCoeffs";
+
+ CA::Primaries primaries = CA::PrimariesUnspecified;
+ CA::MatrixCoeffs matrixCoeffs = CA::MatrixUnspecified;
+ status = ColorUtils::unwrapColorAspectsFromColorStandard(ColorUtils::kColorStandardVendorStart,
+ &primaries, &matrixCoeffs);
+ EXPECT_EQ(status, OK) << "unwrapping aspects from color standard failed";
+
+ primaries = CA::PrimariesUnspecified;
+ matrixCoeffs = CA::MatrixUnspecified;
+ status = ColorUtils::unwrapColorAspectsFromColorStandard(
+ ColorUtils::kColorStandardVendorStart * 4, &primaries, &matrixCoeffs);
+ EXPECT_NE(status, OK) << "unwrapping aspects from color standard failed";
+
+ colorRange = ColorUtils::wrapColorAspectsIntoColorRange((CA::Range)(CA::RangeOther + 1));
+ EXPECT_EQ(colorRange, ColorUtils::kColorRangeUnspecified) << "expected unspecified color range";
+
+ CA::Range range;
+ status = ColorUtils::unwrapColorAspectsFromColorRange(
+ ColorUtils::kColorRangeVendorStart + CA::RangeOther + 1, &range);
+ EXPECT_NE(status, OK) << "invalid range value accepted";
+ EXPECT_EQ(range, CA::RangeOther) << "returned unexpected range value";
+
+ colorTransfer =
+ ColorUtils::wrapColorAspectsIntoColorTransfer((CA::Transfer)(CA::TransferOther + 1));
+ EXPECT_EQ(colorTransfer, ColorUtils::kColorTransferUnspecified)
+ << "expected unspecified color transfer";
+
+ CA::Transfer transfer;
+ status = ColorUtils::unwrapColorAspectsFromColorTransfer(
+ ColorUtils::kColorTransferVendorStart + CA::TransferOther + 1, &transfer);
+ EXPECT_NE(status, OK) << "invalid transfer value accepted";
+ EXPECT_EQ(transfer, CA::TransferOther) << "expected other color transfer";
+}
+
+TEST(ColorUtilsUnitTest, HDRInfoSanityTest) {
+ HDRStaticInfo hdrInfo;
+ sp<AMessage> format = new AMessage();
+ ASSERT_NE(format, nullptr) << "failed to create AMessage";
+
+ bool boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+ EXPECT_FALSE(boolStatus) << "HDRStaticInfo should not be present";
+
+ sp<ABuffer> invalidSizeHDRInfoBuffer = new ABuffer(kHDRBufferSize - 1);
+ ASSERT_NE(invalidSizeHDRInfoBuffer, nullptr) << "failed to create ABuffer";
+ format->setBuffer(KEY_HDR_STATIC_INFO, invalidSizeHDRInfoBuffer);
+ memset(&hdrInfo, 0, sizeof(hdrInfo));
+ boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+ EXPECT_FALSE(boolStatus) << "incorrect HDRStaticInfo buffer accepted";
+
+ sp<ABuffer> invalidHDRInfoBuffer = new ABuffer(kHDRBufferSize);
+ ASSERT_NE(invalidHDRInfoBuffer, nullptr) << "failed to create ABuffer";
+ uint8_t *data = invalidHDRInfoBuffer->data();
+ *data = HDRStaticInfo::kType1 + 1;
+ format->setBuffer(KEY_HDR_STATIC_INFO, invalidHDRInfoBuffer);
+ memset(&hdrInfo, 0, sizeof(hdrInfo));
+ boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+ EXPECT_FALSE(boolStatus) << "incorrect HDRStaticInfo buffer accepted";
+
+ CA aspects;
+ format->setInt32(KEY_COLOR_RANGE, ColorUtils::kColorRangeVendorStart + CA::RangeOther + 1);
+ format->setInt32(KEY_COLOR_STANDARD, CA::Standard::StandardBT709);
+ format->setInt32(KEY_COLOR_TRANSFER, CA::Transfer::TransferLinear);
+ ColorUtils::getColorAspectsFromFormat(format, aspects);
+ EXPECT_EQ(aspects.mRange, CA::RangeOther) << "unexpected range";
+}
+
+TEST(ColorUtilsUnitTest, DataSpaceSanityTest) {
+ CA aspects;
+ aspects.mRange = CA::RangeUnspecified;
+ aspects.mPrimaries = CA::PrimariesUnspecified;
+ aspects.mMatrixCoeffs = CA::MatrixUnspecified;
+ aspects.mTransfer = CA::TransferUnspecified;
+ android_dataspace dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, true);
+ EXPECT_EQ(dataSpace, 0) << "expected invalid dataspace";
+ aspects.mPrimaries = CA::PrimariesUnspecified;
+ aspects.mMatrixCoeffs = CA::MatrixBT2020Constant;
+ dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, true);
+ EXPECT_NE(dataSpace, 0) << "unexpected value";
+}
+
+INSTANTIATE_TEST_SUITE_P(ColorUtilsUnitTest, ColorRangeTest,
+ ::testing::Values(
+ // ColorRange
+ CA::Range::RangeLimited, CA::Range::RangeFull,
+ CA::Range::RangeUnspecified, CA::Range::RangeOther));
+
+INSTANTIATE_TEST_SUITE_P(ColorUtilsUnitTest, ColorTransferTest,
+ ::testing::Values(
+ // ColorTransfer
+ CA::Transfer::TransferUnspecified, CA::Transfer::TransferLinear,
+ CA::Transfer::TransferSRGB, CA::Transfer::TransferSMPTE170M,
+ CA::Transfer::TransferGamma22, CA::Transfer::TransferGamma28,
+ CA::Transfer::TransferST2084, CA::Transfer::TransferHLG,
+ CA::Transfer::TransferSMPTE240M, CA::Transfer::TransferXvYCC,
+ CA::Transfer::TransferBT1361, CA::Transfer::TransferST428,
+ CA::Transfer::TransferOther));
+
+INSTANTIATE_TEST_SUITE_P(
+ ColorUtilsUnitTest, ColorStandardTest,
+ ::testing::Values(
+ // Primaries, MatrixCoeffs
+ std::make_pair(CA::Primaries::PrimariesUnspecified,
+ CA::MatrixCoeffs::MatrixUnspecified),
+ std::make_pair(CA::Primaries::PrimariesBT709_5,
+ CA::MatrixCoeffs::MatrixBT709_5),
+ std::make_pair(CA::Primaries::PrimariesBT601_6_625,
+ CA::MatrixCoeffs::MatrixBT601_6),
+ std::make_pair(CA::Primaries::PrimariesBT601_6_625,
+ CA::MatrixCoeffs::MatrixBT709_5),
+ std::make_pair(CA::Primaries::PrimariesBT601_6_525,
+ CA::MatrixCoeffs::MatrixBT601_6),
+ std::make_pair(CA::Primaries::PrimariesBT601_6_525,
+ CA::MatrixCoeffs::MatrixSMPTE240M),
+ std::make_pair(CA::Primaries::PrimariesBT2020,
+ CA::MatrixCoeffs::MatrixBT2020),
+ std::make_pair(CA::Primaries::PrimariesBT2020,
+ CA::MatrixCoeffs::MatrixBT2020Constant),
+ std::make_pair(CA::Primaries::PrimariesBT470_6M,
+ CA::MatrixCoeffs::MatrixBT470_6M),
+ std::make_pair(CA::Primaries::PrimariesGenericFilm,
+ CA::MatrixCoeffs::MatrixBT2020)));
+
+INSTANTIATE_TEST_SUITE_P(
+ ColorUtilsUnitTest, ColorAspectsTest,
+ ::testing::Values(
+ // Primaries, ColorTransfer, MatrixCoeffs, ColorRange, ColorStandard
+ std::make_tuple(CA::Primaries::PrimariesUnspecified,
+ CA::Transfer::TransferUnspecified,
+ CA::MatrixCoeffs::MatrixUnspecified, CA::Range::RangeFull,
+ CA::Standard::StandardUnspecified),
+ std::make_tuple(CA::Primaries::PrimariesBT709_5, CA::Transfer::TransferLinear,
+ CA::MatrixCoeffs::MatrixBT709_5, CA::Range::RangeFull,
+ CA::Standard::StandardBT709),
+ std::make_tuple(CA::Primaries::PrimariesBT601_6_625, CA::Transfer::TransferSRGB,
+ CA::MatrixCoeffs::MatrixBT601_6, CA::Range::RangeFull,
+ CA::Standard::StandardUnspecified),
+ std::make_tuple(CA::Primaries::PrimariesBT601_6_625,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT709_5,
+ CA::Range::RangeFull, CA::Standard::StandardUnspecified),
+ std::make_tuple(CA::Primaries::PrimariesBT601_6_525, CA::Transfer::TransferGamma22,
+ CA::MatrixCoeffs::MatrixBT601_6, CA::Range::RangeFull,
+ CA::Standard::StandardUnspecified),
+ std::make_tuple(CA::Primaries::PrimariesBT601_6_525, CA::Transfer::TransferGamma28,
+ CA::MatrixCoeffs::MatrixSMPTE240M, CA::Range::RangeFull,
+ CA::Standard::StandardBT470M),
+ std::make_tuple(CA::Primaries::PrimariesBT2020, CA::Transfer::TransferST2084,
+ CA::MatrixCoeffs::MatrixBT2020, CA::Range::RangeFull,
+ CA::Standard::StandardBT601_525),
+ std::make_tuple(CA::Primaries::PrimariesBT2020, CA::Transfer::TransferHLG,
+ CA::MatrixCoeffs::MatrixBT2020Constant, CA::Range::RangeFull,
+ CA::Standard::StandardBT601_525),
+ std::make_tuple(CA::Primaries::PrimariesBT470_6M, CA::Transfer::TransferLinear,
+ CA::MatrixCoeffs::MatrixBT470_6M, CA::Range::RangeFull,
+ CA::Standard::StandardUnspecified),
+ std::make_tuple(CA::Primaries::PrimariesGenericFilm, CA::Transfer::TransferLinear,
+ CA::MatrixCoeffs::MatrixBT2020, CA::Range::RangeFull,
+ CA::Standard::StandardBT601_625)));
+
+INSTANTIATE_TEST_SUITE_P(
+ ColorUtilsUnitTest, IsoToPlatformAspectsTest,
+ ::testing::Values(
+ // Primaries, Transfer, MatrixCoeffs, Standard, Transfer
+ std::make_tuple(CA::PrimariesUnspecified, CA::TransferUnspecified,
+ CA::MatrixUnspecified, ColorUtils::kColorStandardUnspecified,
+ ColorUtils::kColorTransferUnspecified),
+ std::make_tuple(CA::PrimariesBT709_5, CA::TransferLinear, CA::MatrixBT709_5,
+ ColorUtils::kColorStandardBT709, ColorUtils::kColorTransferLinear),
+ std::make_tuple(CA::PrimariesBT601_6_625, CA::TransferSRGB, CA::MatrixBT601_6,
+ ColorUtils::kColorStandardBT601_625,
+ ColorUtils::kColorTransferSRGB),
+ std::make_tuple(CA::PrimariesBT601_6_625, CA::TransferSMPTE170M, CA::MatrixBT709_5,
+ ColorUtils::kColorStandardBT601_625_Unadjusted,
+ ColorUtils::kColorTransferSMPTE_170M),
+ std::make_tuple(CA::PrimariesBT601_6_525, CA::TransferGamma22, CA::MatrixBT601_6,
+ ColorUtils::kColorStandardBT601_525,
+ ColorUtils::kColorTransferGamma22),
+ std::make_tuple(CA::PrimariesBT601_6_525, CA::TransferGamma28, CA::MatrixSMPTE240M,
+ ColorUtils::kColorStandardBT601_525_Unadjusted,
+ ColorUtils::kColorTransferGamma28),
+ std::make_tuple(CA::PrimariesBT2020, CA::TransferST2084, CA::MatrixBT2020,
+ ColorUtils::kColorStandardBT2020, ColorUtils::kColorTransferST2084),
+ std::make_tuple(CA::PrimariesBT2020, CA::TransferHLG, CA::MatrixBT2020Constant,
+ ColorUtils::kColorStandardBT2020Constant,
+ ColorUtils::kColorTransferHLG),
+ std::make_tuple(CA::PrimariesBT470_6M, CA::TransferUnspecified, CA::MatrixBT470_6M,
+ ColorUtils::kColorStandardBT470M,
+ ColorUtils::kColorTransferUnspecified),
+ std::make_tuple(CA::PrimariesGenericFilm, CA::TransferLinear, CA::MatrixBT2020,
+ ColorUtils::kColorStandardFilm, ColorUtils::kColorTransferLinear)));
+
+INSTANTIATE_TEST_SUITE_P(
+ ColorUtilsUnitTest, DefaultColorAspectsTest,
+ ::testing::Values(
+ // Width, Height, Primaries, MatrixCoeffs
+ std::make_tuple(3840, 3840, CA::PrimariesBT2020, CA::MatrixBT2020),
+ std::make_tuple(720, 576, CA::PrimariesBT601_6_625, CA::MatrixBT601_6),
+ std::make_tuple(480, 360, CA::PrimariesBT601_6_525, CA::MatrixBT601_6),
+ std::make_tuple(480, 1920, CA::PrimariesBT709_5, CA::MatrixBT709_5)));
+
+INSTANTIATE_TEST_SUITE_P(
+ ColorUtilsUnitTest, DataSpaceTest,
+ ::testing::Values(
+ // ColorRange, Primaries, ColorTransfer, MatrixCoeffs, v0_android_dataspace,
+ // android_dataspace
+ std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT709_5,
+ CA::Transfer::TransferSRGB, CA::MatrixCoeffs::MatrixBT709_5,
+ HAL_DATASPACE_V0_SRGB, HAL_DATASPACE_SRGB),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT709_5,
+ HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+ std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT709_5,
+ CA::Transfer::TransferLinear, CA::MatrixCoeffs::MatrixBT709_5,
+ HAL_DATASPACE_V0_SRGB_LINEAR, HAL_DATASPACE_SRGB_LINEAR),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+ HAL_DATASPACE_V0_BT601_525, HAL_DATASPACE_BT601_525),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+ HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+ std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT601_6_625,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+ HAL_DATASPACE_V0_JFIF, HAL_DATASPACE_JFIF),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+ HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+ HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixSMPTE240M,
+ HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT2020,
+ HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+ CA::Transfer::TransferSMPTE170M,
+ CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+ HAL_DATASPACE_BT601_525),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+ HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+ CA::Transfer::TransferSMPTE170M,
+ CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+ HAL_DATASPACE_BT601_525),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+ CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+ HAL_DATASPACE_V0_BT601_525, HAL_DATASPACE_BT601_525),
+ std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+ CA::Transfer::TransferSMPTE170M,
+ CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+ HAL_DATASPACE_BT601_525)));
diff --git a/media/libstagefright/id3/Android.bp b/media/libstagefright/id3/Android.bp
index 02de2c0..e34504d 100644
--- a/media/libstagefright/id3/Android.bp
+++ b/media/libstagefright/id3/Android.bp
@@ -1,5 +1,6 @@
cc_library_static {
name: "libstagefright_id3",
+ min_sdk_version: "29",
srcs: ["ID3.cpp"],
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index e97f6eb..5509512 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -813,10 +813,6 @@
baseSize = U32_AT(&mParent.mData[mOffset + 4]);
}
- if (baseSize == 0) {
- return;
- }
-
// Prevent integer overflow when adding
if (SIZE_MAX - 10 <= baseSize) {
return;
diff --git a/media/libstagefright/id3/TEST_MAPPING b/media/libstagefright/id3/TEST_MAPPING
index e4454c1..d82d26e 100644
--- a/media/libstagefright/id3/TEST_MAPPING
+++ b/media/libstagefright/id3/TEST_MAPPING
@@ -1,9 +1,13 @@
// frameworks/av/media/libstagefright/id3
{
- "presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
- //{ "name": "ID3Test" },
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
+ { "name": "ID3Test" }
+ ],
+ "presubmit-large": [
// this doesn't seem to run any tests.
// but: cts-tradefed run -m CtsMediaTestCases -t android.media.cts.MediaMetadataRetrieverTest
// does run he 32 and 64 bit tests, but not the instant tests
diff --git a/media/libstagefright/id3/test/Android.bp b/media/libstagefright/id3/test/Android.bp
index 9d26eec..acf38e2 100644
--- a/media/libstagefright/id3/test/Android.bp
+++ b/media/libstagefright/id3/test/Android.bp
@@ -16,6 +16,7 @@
cc_test {
name: "ID3Test",
+ test_suites: ["device-tests"],
gtest: true,
srcs: ["ID3Test.cpp"],
diff --git a/media/libstagefright/include/HevcUtils.h b/media/libstagefright/include/HevcUtils.h
index d2a86eb..6a4a168 100644
--- a/media/libstagefright/include/HevcUtils.h
+++ b/media/libstagefright/include/HevcUtils.h
@@ -94,6 +94,8 @@
// Note that this method does not write the start code.
bool write(size_t index, uint8_t* dest, size_t size);
status_t makeHvcc(uint8_t *hvcc, size_t *hvccSize, size_t nalSizeLength);
+ void FindHEVCDimensions(
+ const sp<ABuffer> &SpsBuffer, int32_t *width, int32_t *height);
Info getInfo() const { return mInfo; }
static bool IsHevcIDR(const uint8_t *data, size_t size);
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index cc40f76..4c97b4d 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -147,6 +147,7 @@
kWhatReleaseCodecInstance = 'relC',
kWhatForceStateTransition = 'fstt',
kWhatCheckIfStuck = 'Cstk',
+ kWhatSubmitExtraOutputMetadataBuffer = 'sbxo',
};
enum {
@@ -273,6 +274,7 @@
bool mExplicitShutdown;
bool mIsLegacyVP9Decoder;
bool mIsStreamCorruptFree;
+ bool mIsLowLatency;
// If "mKeepComponentAllocated" we only transition back to Loaded state
// and do not release the component instance.
@@ -500,6 +502,7 @@
status_t setupAMRCodec(bool encoder, bool isWAMR, int32_t bitRate);
status_t setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels);
+ status_t setupOpusCodec(bool encoder, int32_t sampleRate, int32_t numChannels);
status_t setupFlacCodec(
bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel,
AudioEncoding encoding);
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 6f0d3b5..16e7d89 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -23,7 +23,6 @@
#include <media/stagefright/MediaBuffer.h>
#include <camera/android/hardware/ICamera.h>
#include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
#include <camera/CameraParameters.h>
#include <gui/BufferItemConsumer.h>
#include <utils/List.h>
@@ -40,17 +39,6 @@
class CameraSource : public MediaSource, public MediaBufferObserver {
public:
/**
- * Factory method to create a new CameraSource using the current
- * settings (such as video size, frame rate, color format, etc)
- * from the default camera.
- *
- * @param clientName The package/process name of the client application.
- * This is used for permissions checking.
- * @return NULL on error.
- */
- static CameraSource *Create(const String16 &clientName);
-
- /**
* Factory method to create a new CameraSource.
*
* @param camera the video input frame data source. If it is NULL,
@@ -89,8 +77,7 @@
pid_t clientPid,
Size videoSize,
int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface,
- bool storeMetaDataInVideoBuffers = true);
+ const sp<IGraphicBufferProducer>& surface);
virtual ~CameraSource();
@@ -132,26 +119,6 @@
protected:
/**
- * The class for listening to BnCameraRecordingProxyListener. This is used to receive video
- * buffers in VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV and VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA
- * mode. When a frame is available, CameraSource::dataCallbackTimestamp() will be called.
- */
- class ProxyListener: public BnCameraRecordingProxyListener {
- public:
- ProxyListener(const sp<CameraSource>& source);
- virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
- const sp<IMemory> &data);
- virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
- native_handle_t* handle);
- virtual void recordingFrameHandleCallbackTimestampBatch(
- const std::vector<int64_t>& timestampsUs,
- const std::vector<native_handle_t*>& handles);
-
- private:
- sp<CameraSource> mSource;
- };
-
- /**
* The class for listening to BufferQueue's onFrameAvailable. This is used to receive video
* buffers in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode. When a frame is available,
* CameraSource::processBufferQueueFrame() will be called.
@@ -213,32 +180,15 @@
CameraSource(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
Size videoSize, int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface,
- bool storeMetaDataInVideoBuffers);
+ const sp<IGraphicBufferProducer>& surface);
virtual status_t startCameraRecording();
virtual void releaseRecordingFrame(const sp<IMemory>& frame);
- virtual void releaseRecordingFrameHandle(native_handle_t* handle);
- // stagefright recorder not using this for now
- virtual void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles);
// Returns true if need to skip the current frame.
// Called from dataCallbackTimestamp.
virtual bool skipCurrentFrame(int64_t /*timestampUs*/) {return false;}
- // Callback called when still camera raw data is available.
- virtual void dataCallback(int32_t /*msgType*/, const sp<IMemory>& /*data*/) {}
-
- virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
- const sp<IMemory> &data);
-
- virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
- native_handle_t* handle);
-
- virtual void recordingFrameHandleCallbackTimestampBatch(
- const std::vector<int64_t>& timestampsUs,
- const std::vector<native_handle_t*>& handles);
-
// Process a buffer item received in BufferQueueListener.
virtual void processBufferQueueFrame(BufferItem& buffer);
@@ -261,9 +211,6 @@
int64_t mGlitchDurationThresholdUs;
bool mCollectStats;
- // The mode video buffers are received from camera. One of VIDEO_BUFFER_MODE_*.
- int32_t mVideoBufferMode;
-
static const uint32_t kDefaultVideoBufferCount = 32;
/**
@@ -297,12 +244,12 @@
status_t init(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
- Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers);
+ Size videoSize, int32_t frameRate);
status_t initWithCameraAccess(
const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
- Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers);
+ Size videoSize, int32_t frameRate);
// Initialize the buffer queue used in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
status_t initBufferQueue(uint32_t width, uint32_t height, uint32_t format,
diff --git a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
index 533e33b..3c311cf 100644
--- a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
@@ -45,8 +45,7 @@
Size videoSize,
int32_t videoFrameRate,
const sp<IGraphicBufferProducer>& surface,
- int64_t timeBetweenTimeLapseFrameCaptureUs,
- bool storeMetaDataInVideoBuffers = true);
+ int64_t timeBetweenTimeLapseFrameCaptureUs);
virtual ~CameraSourceTimeLapse();
@@ -122,8 +121,7 @@
Size videoSize,
int32_t videoFrameRate,
const sp<IGraphicBufferProducer>& surface,
- int64_t timeBetweenTimeLapseFrameCaptureUs,
- bool storeMetaDataInVideoBuffers = true);
+ int64_t timeBetweenTimeLapseFrameCaptureUs);
// Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
// It only handles the case when mLastReadBufferCopy is signalled. Otherwise
@@ -137,33 +135,6 @@
// frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
virtual bool skipCurrentFrame(int64_t timestampUs);
- // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
- // timestamp and set mSkipCurrentFrame.
- // Then it calls the base CameraSource::dataCallbackTimestamp()
- // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV and
- // VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode.
- virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
- const sp<IMemory> &data);
-
- // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
- // timestamp and set mSkipCurrentFrame.
- // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp() or
- // CameraSource::recordingFrameHandleCallbackTimestampBatch()
- // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
- // the metadata is VideoNativeHandleMetadata.
- virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
- native_handle_t* handle);
-
- // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
- // timestamp and set mSkipCurrentFrame.
- // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp() or
- // CameraSource::recordingFrameHandleCallbackTimestampBatch()
- // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
- // the metadata is VideoNativeHandleMetadata.
- virtual void recordingFrameHandleCallbackTimestampBatch(
- const std::vector<int64_t>& timestampsUs,
- const std::vector<native_handle_t*>& handles);
-
// Process a buffer item received in CameraSource::BufferQueueListener.
// This will be called in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
virtual void processBufferQueueFrame(BufferItem& buffer);
@@ -187,9 +158,6 @@
// Wrapper to enter threadTimeLapseEntry()
static void *ThreadTimeLapseWrapper(void *me);
- // Creates a copy of source_data into a new memory of final type MemoryBase.
- sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data);
-
CameraSourceTimeLapse(const CameraSourceTimeLapse &);
CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &);
};
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 34a7d55..2582ed0 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -27,6 +27,7 @@
#include <media/stagefright/foundation/AHandlerReflector.h>
#include <media/stagefright/foundation/ALooper.h>
#include <mutex>
+#include <queue>
namespace android {
@@ -45,7 +46,7 @@
// Returns INVALID_OPERATION if there is no source or track.
virtual status_t start(MetaData *param = NULL);
- virtual status_t stop() { return reset(); }
+ virtual status_t stop();
virtual status_t pause();
virtual bool reachedEOS();
virtual status_t dump(int fd, const Vector<String16>& args);
@@ -85,9 +86,10 @@
friend struct AHandlerReflector<MPEG4Writer>;
enum {
- kWhatSwitch = 'swch',
- kWhatHandleIOError = 'ioer',
- kWhatHandleFallocateError = 'faer'
+ kWhatSwitch = 'swch',
+ kWhatIOError = 'ioer',
+ kWhatFallocateError = 'faer',
+ kWhatNoIOErrorSoFar = 'noie'
};
int mFd;
@@ -124,12 +126,19 @@
bool mWriteSeekErr;
bool mFallocateErr;
bool mPreAllocationEnabled;
+ status_t mResetStatus;
+ // Queue to hold top long write durations
+ std::priority_queue<std::chrono::microseconds, std::vector<std::chrono::microseconds>,
+ std::greater<std::chrono::microseconds>> mWriteDurationPQ;
+ const uint8_t kWriteDurationsCount = 5;
sp<ALooper> mLooper;
sp<AHandlerReflector<MPEG4Writer> > mReflector;
Mutex mLock;
+ // Serialize reset calls from client of MPEG4Writer and MP4WtrCtrlHlpLooper.
std::mutex mResetMutex;
+ // Serialize preallocation calls from different track threads.
std::mutex mFallocMutex;
bool mPreAllocFirstTime; // Pre-allocate space for file and track headers only once per file.
uint64_t mPrevAllTracksTotalMetaDataSizeEstimate;
@@ -148,6 +157,7 @@
int64_t estimateMoovBoxSize(int32_t bitRate);
int64_t estimateFileLevelMetaSize(MetaData *params);
void writeCachedBoxToFile(const char *type);
+ void printWriteDurations();
struct Chunk {
Track *mTrack; // Owner
@@ -231,7 +241,7 @@
status_t stopWriterThread();
static void *ThreadWrapper(void *me);
void threadFunc();
- void setupAndStartLooper();
+ status_t setupAndStartLooper();
void stopAndReleaseLooper();
// Buffer a single chunk to be written out later.
@@ -287,7 +297,8 @@
bool exceedsFileDurationLimit();
bool approachingFileSizeLimit();
bool isFileStreamable() const;
- void trackProgressStatus(size_t trackId, int64_t timeUs, status_t err = OK);
+ void trackProgressStatus(uint32_t trackId, int64_t timeUs, status_t err = OK);
+ status_t validateAllTracksId(bool akKey4BitTrackIds);
void writeCompositionMatrix(int32_t degrees);
void writeMvhdBox(int64_t durationUs);
void writeMoovBox(int64_t durationUs);
@@ -296,7 +307,7 @@
void writeGeoDataBox();
void writeLatitude(int degreex10000);
void writeLongitude(int degreex10000);
- void finishCurrentSession();
+ status_t finishCurrentSession();
void addDeviceMeta();
void writeHdlr(const char *handlerType);
@@ -310,7 +321,7 @@
*/
bool preAllocate(uint64_t wantSize);
/*
- * Truncate file as per the size used for meta data and actual data in a session.
+ * Truncate file as per the size used for metadata and actual data in a session.
*/
bool truncatePreAllocation();
@@ -327,9 +338,9 @@
void writeFileLevelMetaBox();
void sendSessionSummary();
- void release();
+ status_t release();
status_t switchFd();
- status_t reset(bool stopSource = true);
+ status_t reset(bool stopSource = true, bool waitForAnyPreviousCallToComplete = true);
static uint32_t getMpeg4Time();
diff --git a/media/libstagefright/include/media/stagefright/MediaAdapter.h b/media/libstagefright/include/media/stagefright/MediaAdapter.h
index 177a9e9..c7d7765 100644
--- a/media/libstagefright/include/media/stagefright/MediaAdapter.h
+++ b/media/libstagefright/include/media/stagefright/MediaAdapter.h
@@ -58,6 +58,7 @@
private:
Mutex mAdapterLock;
+ std::mutex mBufferGatingMutex;
// Make sure the read() wait for the incoming buffer.
Condition mBufferReadCond;
// Make sure the pushBuffer() wait for the current buffer consumed.
diff --git a/media/libstagefright/include/media/stagefright/MediaBuffer.h b/media/libstagefright/include/media/stagefright/MediaBuffer.h
index 9145b63..2c03f27 100644
--- a/media/libstagefright/include/media/stagefright/MediaBuffer.h
+++ b/media/libstagefright/include/media/stagefright/MediaBuffer.h
@@ -46,7 +46,7 @@
explicit MediaBuffer(size_t size);
explicit MediaBuffer(const sp<ABuffer> &buffer);
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
MediaBuffer(const sp<IMemory> &mem) :
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
@@ -97,7 +97,7 @@
}
virtual int remoteRefcount() const {
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
// Either document why it is safe in this case or address the
@@ -114,7 +114,7 @@
// returns old value
int addRemoteRefcount(int32_t value) {
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
// Either document why it is safe in this case or address the
@@ -132,7 +132,7 @@
}
static bool isDeadObject(const sp<IMemory> &memory) {
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
// Either document why it is safe in this case or address the
@@ -235,7 +235,7 @@
};
inline SharedControl *getSharedControl() const {
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
// Either document why it is safe in this case or address the
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index c6b6639..a28d479 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -146,7 +146,7 @@
// object.
status_t release();
- status_t releaseAsync();
+ status_t releaseAsync(const sp<AMessage> ¬ify);
status_t flush();
@@ -373,12 +373,15 @@
AString mOwnerName;
sp<MediaCodecInfo> mCodecInfo;
sp<AReplyToken> mReplyID;
+ std::string mLastReplyOrigin;
+ std::vector<sp<AMessage>> mDeferredMessages;
uint32_t mFlags;
status_t mStickyError;
sp<Surface> mSurface;
SoftwareRenderer *mSoftRenderer;
- mediametrics_handle_t mMetricsHandle;
+ mediametrics_handle_t mMetricsHandle = 0;
+ nsecs_t mLifetimeStartNs = 0;
void initMediametrics();
void updateMediametrics();
void flushMediametrics();
@@ -389,6 +392,7 @@
sp<AMessage> mInputFormat;
sp<AMessage> mCallback;
sp<AMessage> mOnFrameRenderedNotification;
+ sp<AMessage> mAsyncReleaseCompleteNotification;
sp<ResourceManagerServiceProxy> mResourceManagerProxy;
@@ -421,6 +425,10 @@
sp<ICrypto> mCrypto;
+ int32_t mTunneledInputWidth;
+ int32_t mTunneledInputHeight;
+ bool mTunneled;
+
sp<IDescrambler> mDescrambler;
List<sp<ABuffer> > mCSD;
@@ -433,13 +441,17 @@
std::shared_ptr<BufferChannelBase> mBufferChannel;
- MediaCodec(const sp<ALooper> &looper, pid_t pid, uid_t uid);
+ MediaCodec(
+ const sp<ALooper> &looper, pid_t pid, uid_t uid,
+ std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase = nullptr,
+ std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo = nullptr);
static sp<CodecBase> GetCodecBase(const AString &name, const char *owner = nullptr);
static status_t PostAndAwaitResponse(
const sp<AMessage> &msg, sp<AMessage> *response);
+ void PostReplyWithError(const sp<AMessage> &msg, int32_t err);
void PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err);
status_t init(const AString &name);
@@ -450,6 +462,7 @@
size_t updateBuffers(int32_t portIndex, const sp<AMessage> &msg);
status_t onQueueInputBuffer(const sp<AMessage> &msg);
status_t onReleaseOutputBuffer(const sp<AMessage> &msg);
+ BufferInfo *peekNextPortBuffer(int32_t portIndex);
ssize_t dequeuePortBuffer(int32_t portIndex);
status_t getBufferAndFormat(
@@ -481,6 +494,7 @@
status_t onSetParameters(const sp<AMessage> ¶ms);
status_t amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> &buffer);
+ void handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer);
bool isExecuting() const;
uint64_t getGraphicBufferSize();
@@ -489,6 +503,9 @@
bool hasPendingBuffer(int portIndex);
bool hasPendingBuffer();
+ void postPendingRepliesAndDeferredMessages(std::string origin, status_t err = OK);
+ void postPendingRepliesAndDeferredMessages(std::string origin, const sp<AMessage> &response);
+
/* called to get the last codec error when the sticky flag is set.
* if no such codec error is found, returns UNKNOWN_ERROR.
*/
@@ -521,6 +538,9 @@
class ReleaseSurface;
std::unique_ptr<ReleaseSurface> mReleaseSurface;
+ std::list<sp<AMessage>> mLeftover;
+ status_t handleLeftover(size_t index);
+
sp<BatteryChecker> mBatteryChecker;
void statsBufferSent(int64_t presentationUs);
@@ -571,6 +591,10 @@
Histogram mLatencyHist;
+ std::function<sp<CodecBase>(const AString &, const char *)> mGetCodecBase;
+ std::function<status_t(const AString &, sp<MediaCodecInfo> *)> mGetCodecInfo;
+ friend class MediaTestHelper;
+
DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
};
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h b/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
index f53b23e..bf85d7e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
@@ -19,7 +19,6 @@
#define MEDIA_CODEC_LIST_WRITER_H_
#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaCodecListWriter.h>
#include <media/MediaCodecInfo.h>
#include <utils/Errors.h>
@@ -65,6 +64,7 @@
std::vector<sp<MediaCodecInfo>> mCodecInfos;
friend struct MediaCodecList;
+ friend class MediaTestHelper;
};
/**
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxer.h b/media/libstagefright/include/media/stagefright/MediaMuxer.h
index 7c75f74..a1b9465 100644
--- a/media/libstagefright/include/media/stagefright/MediaMuxer.h
+++ b/media/libstagefright/include/media/stagefright/MediaMuxer.h
@@ -117,8 +117,6 @@
status_t writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
int64_t timeUs, uint32_t flags) ;
- void notify(int msg, int ext1, int ext2);
-
private:
const OutputFormat mFormat;
sp<MediaWriter> mWriter;
@@ -130,11 +128,9 @@
UNINITIALIZED,
INITIALIZED,
STARTED,
- STOPPED,
- ERROR
+ STOPPED
};
State mState;
- status_t mError;
DISALLOW_EVIL_CONSTRUCTORS(MediaMuxer);
};
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index fd2c171..17b1abf 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -21,7 +21,7 @@
#include <utils/RefBase.h>
#include <media/stagefright/MediaSource.h>
#include <media/IMediaRecorderClient.h>
-#include <media/stagefright/MediaMuxer.h>
+#include <media/mediarecorder.h>
namespace android {
@@ -46,7 +46,6 @@
virtual void setListener(const sp<IMediaRecorderClient>& listener) {
mListener = listener;
}
- virtual void setMuxerListener(const wp<MediaMuxer>& muxer) { mMuxer = muxer; }
virtual status_t dump(int /*fd*/, const Vector<String16>& /*args*/) {
return OK;
@@ -65,17 +64,20 @@
int64_t mMaxFileSizeLimitBytes;
int64_t mMaxFileDurationLimitUs;
sp<IMediaRecorderClient> mListener;
- wp<MediaMuxer> mMuxer;
void notify(int msg, int ext1, int ext2) {
- ALOG(LOG_VERBOSE, "MediaWriter", "notify msg:%d, ext1:%d, ext2:%d", msg, ext1, ext2);
+ if (msg == MEDIA_RECORDER_TRACK_EVENT_INFO || msg == MEDIA_RECORDER_TRACK_EVENT_ERROR) {
+ uint32_t trackId = (ext1 >> 28) & 0xf;
+ int type = ext1 & 0xfffffff;
+ ALOG(LOG_VERBOSE, "MediaWriter", "Track event err/info msg:%d, trackId:%u, type:%d,"
+ "val:%d", msg, trackId, type, ext2);
+ } else {
+ ALOG(LOG_VERBOSE, "MediaWriter", "Recorder event msg:%d, ext1:%d, ext2:%d",
+ msg, ext1, ext2);
+ }
if (mListener != nullptr) {
mListener->notify(msg, ext1, ext2);
}
- sp<MediaMuxer> muxer = mMuxer.promote();
- if (muxer != nullptr) {
- muxer->notify(msg, ext1, ext2);
- }
}
private:
MediaWriter(const MediaWriter &);
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 847093d..940bd86 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -62,6 +62,7 @@
kKeyDVCC = 'dvcc', // raw data
kKeyAV1C = 'av1c', // raw data
kKeyThumbnailHVCC = 'thvc', // raw data
+ kKeyThumbnailAV1C = 'tav1', // raw data
kKeyD263 = 'd263', // raw data
kKeyOpusHeader = 'ohdr', // raw data
kKeyOpusCodecDelay = 'ocod', // uint64_t (codec delay in ns)
@@ -224,6 +225,8 @@
kKeyExifSize = 'exsz', // int64_t, Exif data size
kKeyExifTiffOffset = 'thdr', // int32_t, if > 0, buffer contains exif data block with
// tiff hdr at specified offset
+ kKeyXmpOffset = 'xmof', // int64_t, XMP data offset
+ kKeyXmpSize = 'xmsz', // int64_t, XMP data size
kKeyPcmBigEndian = 'pcmb', // bool (int32_t)
// Key for ALAC Magic Cookie
@@ -239,17 +242,28 @@
kKeyHapticChannelCount = 'hapC',
+ /* MediaRecorder.h, error notifications can represent track ids with 4 bits only.
+ * | track id | reserved | error or info type |
+ * 31 28 16 0
+ */
+ kKey4BitTrackIds = '4bid',
+
// Treat empty track as malformed for MediaRecorder.
kKeyEmptyTrackMalFormed = 'nemt', // bool (int32_t)
- kKeySps = 'sSps', // int32_t, indicates that a buffer is sps (value ignored).
- kKeyPps = 'sPps', // int32_t, indicates that a buffer is pps (value ignored).
+ kKeyVps = 'sVps', // int32_t, indicates that a buffer has vps.
+ kKeySps = 'sSps', // int32_t, indicates that a buffer has sps.
+ kKeyPps = 'sPps', // int32_t, indicates that a buffer has pps.
kKeySelfID = 'sfid', // int32_t, source ID to identify itself on RTP protocol.
kKeyPayloadType = 'pTyp', // int32_t, SDP negotiated payload type.
kKeyRtpExtMap = 'extm', // int32_t, rtp extension ID for cvo on RTP protocol.
kKeyRtpCvoDegrees = 'cvod', // int32_t, rtp cvo degrees as per 3GPP 26.114.
kKeyRtpDscp = 'dscp', // int32_t, DSCP(Differentiated services codepoint) of RFC 2474.
kKeySocketNetwork = 'sNet', // int64_t, socket will be bound to network handle.
+
+ // Slow-motion markers
+ kKeySlowMotionMarkers = 'slmo', // raw data, byte array following spec for
+ // MediaFormat#KEY_SLOW_MOTION_MARKERS
};
enum {
diff --git a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
index 227cead..d8f2b00 100644
--- a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
@@ -47,12 +47,14 @@
SAMPLE_FLAG_ENCRYPTED = 2,
};
+ typedef IMediaExtractor::EntryPoint EntryPoint;
+
// identical to IMediaExtractor::GetTrackMetaDataFlags
enum GetTrackFormatFlags {
kIncludeExtensiveMetaData = 1, // reads sample table and possibly stream headers
};
- NuMediaExtractor();
+ explicit NuMediaExtractor(EntryPoint entryPoint);
status_t setDataSource(
const sp<MediaHTTPService> &httpService,
@@ -128,6 +130,8 @@
uint32_t mTrackFlags; // bitmask of "TrackFlags"
};
+ const EntryPoint mEntryPoint;
+
mutable Mutex mLock;
sp<DataSource> mDataSource;
@@ -139,6 +143,8 @@
int64_t mTotalBitrate; // in bits/sec
int64_t mDurationUs;
+ void setEntryPointToRemoteMediaExtractor();
+
ssize_t fetchAllTrackSamples(
int64_t seekTimeUs = -1ll,
MediaSource::ReadOptions::SeekMode mode =
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfo.h b/media/libstagefright/include/media/stagefright/ProcessInfo.h
index 0be1a52..b8a3c10 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfo.h
+++ b/media/libstagefright/include/media/stagefright/ProcessInfo.h
@@ -20,6 +20,9 @@
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/ProcessInfoInterface.h>
+#include <map>
+#include <mutex>
+#include <utils/Condition.h>
namespace android {
@@ -28,11 +31,20 @@
virtual bool getPriority(int pid, int* priority);
virtual bool isValidPid(int pid);
+ virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
+ virtual void removeProcessInfoOverride(int pid);
protected:
virtual ~ProcessInfo();
private:
+ struct ProcessInfoOverride {
+ int procState;
+ int oomScore;
+ };
+ std::mutex mOverrideLock;
+ std::map<int, ProcessInfoOverride> mOverrideMap GUARDED_BY(mOverrideLock);
+
DISALLOW_EVIL_CONSTRUCTORS(ProcessInfo);
};
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h b/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
index b39112a..9260181 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
+++ b/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
@@ -24,6 +24,8 @@
struct ProcessInfoInterface : public RefBase {
virtual bool getPriority(int pid, int* priority) = 0;
virtual bool isValidPid(int pid) = 0;
+ virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
+ virtual void removeProcessInfoOverride(int pid);
protected:
virtual ~ProcessInfoInterface() {}
diff --git a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
index 2ce7bc7..25125f2 100644
--- a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
@@ -42,6 +42,7 @@
virtual uint32_t flags() const;
virtual status_t setMediaCas(const HInterfaceToken &casToken);
virtual String8 name();
+ virtual status_t setEntryPoint(EntryPoint entryPoint);
private:
MediaExtractor *mExtractor;
diff --git a/media/libstagefright/include/media/stagefright/Utils.h b/media/libstagefright/include/media/stagefright/Utils.h
index 2b9b759..1673120 100644
--- a/media/libstagefright/include/media/stagefright/Utils.h
+++ b/media/libstagefright/include/media/stagefright/Utils.h
@@ -33,7 +33,7 @@
const MetaDataBase *meta, sp<AMessage> *format);
status_t convertMetaDataToMessage(
const sp<MetaData> &meta, sp<AMessage> *format);
-void convertMessageToMetaData(
+status_t convertMessageToMetaData(
const sp<AMessage> &format, sp<MetaData> &meta);
// Returns a pointer to the next NAL start code in buffer of size |length| starting at |data|, or
diff --git a/media/libstagefright/mpeg2ts/Android.bp b/media/libstagefright/mpeg2ts/Android.bp
index fbb2d0c..5d697f7 100644
--- a/media/libstagefright/mpeg2ts/Android.bp
+++ b/media/libstagefright/mpeg2ts/Android.bp
@@ -1,12 +1,11 @@
-cc_library_static {
- name: "libstagefright_mpeg2support",
+cc_defaults {
+ name: "libstagefright_mpeg2support_defaults",
srcs: [
"AnotherPacketSource.cpp",
"ATSParser.cpp",
"CasManager.cpp",
"ESQueue.cpp",
- "HlsSampleDecryptor.cpp",
],
include_dirs: [
@@ -28,7 +27,6 @@
},
shared_libs: [
- "libcrypto",
"libhidlmemory",
"android.hardware.cas.native@1.0",
"android.hidl.memory@1.0",
@@ -36,9 +34,10 @@
],
header_libs: [
- "libmedia_headers",
+ "libmedia_datasource_headers",
"libaudioclient_headers",
"media_ndk_headers",
+ "libstagefright_foundation_headers",
],
export_include_dirs: ["."],
@@ -48,4 +47,39 @@
],
min_sdk_version: "29",
+
+ host_supported: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
+
+cc_library_static {
+ name: "libstagefright_mpeg2support",
+ defaults: [
+ "libstagefright_mpeg2support_defaults",
+ ],
+ cflags: [
+ "-DENABLE_CRYPTO",
+ ],
+ shared_libs: [
+ "libcrypto",
+ ],
+ srcs: [
+ "HlsSampleDecryptor.cpp",
+ ],
+}
+
+cc_library_static {
+ name: "libstagefright_mpeg2support_nocrypto",
+ defaults: [
+ "libstagefright_mpeg2support_defaults",
+ ],
+ apex_available: [
+ "com.android.media",
+ ],
}
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index 62e3a4b..27a94fd 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -60,21 +60,23 @@
mIsAudio = false;
mIsVideo = false;
+ const char *mime;
- if (meta == NULL) {
+ // Do not use meta if no mime.
+ if (meta == NULL || !meta->findCString(kKeyMIMEType, &mime)) {
return;
}
mFormat = meta;
- const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
if (!strncasecmp("audio/", mime, 6)) {
mIsAudio = true;
- } else if (!strncasecmp("video/", mime, 6)) {
+ } else if (!strncasecmp("video/", mime, 6)) {
mIsVideo = true;
+ } else if (!strncasecmp("text/", mime, 5) || !strncasecmp("application/", mime, 12)) {
+ return;
} else {
- CHECK(!strncasecmp("text/", mime, 5) || !strncasecmp("application/", mime, 12));
+ ALOGW("Unsupported mime type: %s", mime);
}
}
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index 4bb21fa..192ba77 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -36,7 +36,7 @@
#include <inttypes.h>
#include <netinet/in.h>
-#ifndef __ANDROID_APEX__
+#ifdef ENABLE_CRYPTO
#include "HlsSampleDecryptor.h"
#endif
@@ -55,10 +55,10 @@
// Create the decryptor anyway since we don't know the use-case unless key is provided
// Won't decrypt if key info not available (e.g., scanner/extractor just parsing ts files)
mSampleDecryptor = isSampleEncrypted() ?
-#ifdef __ANDROID_APEX__
- new SampleDecryptor
-#else
+#ifdef ENABLE_CRYPTO
new HlsSampleDecryptor
+#else
+ new SampleDecryptor
#endif
: NULL;
}
@@ -172,29 +172,26 @@
return 0;
}
- unsigned bsmod __unused = bits.getBits(3);
+ bits.skipBits(3); // bsmod
unsigned acmod = bits.getBits(3);
- unsigned cmixlev __unused = 0;
- unsigned surmixlev __unused = 0;
- unsigned dsurmod __unused = 0;
if ((acmod & 1) > 0 && acmod != 1) {
if (bits.numBitsLeft() < 2) {
return 0;
}
- cmixlev = bits.getBits(2);
+ bits.skipBits(2); //cmixlev
}
if ((acmod & 4) > 0) {
if (bits.numBitsLeft() < 2) {
return 0;
}
- surmixlev = bits.getBits(2);
+ bits.skipBits(2); //surmixlev
}
if (acmod == 2) {
if (bits.numBitsLeft() < 2) {
return 0;
}
- dsurmod = bits.getBits(2);
+ bits.skipBits(2); //dsurmod
}
if (bits.numBitsLeft() < 1) {
@@ -269,7 +266,7 @@
samplingRate = samplingRateTable2[fscod2];
} else {
samplingRate = samplingRateTable[fscod];
- unsigned numblkscod __unused = bits.getBits(2);
+ bits.skipBits(2); // numblkscod
}
unsigned acmod = bits.getBits(3);
@@ -1087,7 +1084,7 @@
}
unsigned numAUs = bits.getBits(8);
bits.skipBits(8);
- unsigned quantization_word_length __unused = bits.getBits(2);
+ bits.skipBits(2); // quantization_word_length
unsigned audio_sampling_frequency = bits.getBits(3);
unsigned num_channels = bits.getBits(3);
@@ -1433,7 +1430,13 @@
if (mSampleDecryptor != NULL && (nalType == 1 || nalType == 5)) {
uint8_t *nalData = mBuffer->data() + pos.nalOffset;
size_t newSize = mSampleDecryptor->processNal(nalData, pos.nalSize);
- // Note: the data can shrink due to unescaping
+ // Note: the data can shrink due to unescaping, but it can never grow
+ if (newSize > pos.nalSize) {
+ // don't log unless verbose, since this can get called a lot if
+ // the caller is trying to resynchronize
+ ALOGV("expected sample size < %u, got %zu", pos.nalSize, newSize);
+ return NULL;
+ }
memcpy(accessUnit->data() + dstOffset + 4,
nalData,
newSize);
diff --git a/media/libstagefright/mpeg2ts/TEST_MAPPING b/media/libstagefright/mpeg2ts/TEST_MAPPING
index b25d732..9f4bbdf 100644
--- a/media/libstagefright/mpeg2ts/TEST_MAPPING
+++ b/media/libstagefright/mpeg2ts/TEST_MAPPING
@@ -1,7 +1,9 @@
// frameworks/av/media/libstagefright/mpeg2ts
{
- "presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
- //{ "name": "Mpeg2tsUnitTest" }
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
+ { "name": "Mpeg2tsUnitTest" }
]
}
diff --git a/media/libstagefright/omx/1.0/Omx.cpp b/media/libstagefright/omx/1.0/Omx.cpp
index eef9ce3..eb15039 100644
--- a/media/libstagefright/omx/1.0/Omx.cpp
+++ b/media/libstagefright/omx/1.0/Omx.cpp
@@ -22,7 +22,7 @@
#include <media/openmax/OMX_AsString.h>
#include <media/stagefright/omx/OMXUtils.h>
-#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/OMXStore.h>
#include <media/stagefright/omx/OmxGraphicBufferSource.h>
#include <media/stagefright/omx/1.0/WOmxNode.h>
@@ -41,21 +41,21 @@
constexpr size_t kMaxNodeInstances = (1 << 16);
Omx::Omx() :
- mMaster(new OMXMaster()),
+ mStore(new OMXStore()),
mParser() {
(void)mParser.parseXmlFilesInSearchDirs();
(void)mParser.parseXmlPath(mParser.defaultProfilingResultsXmlPath);
}
Omx::~Omx() {
- delete mMaster;
+ delete mStore;
}
Return<void> Omx::listNodes(listNodes_cb _hidl_cb) {
std::list<::android::IOMX::ComponentInfo> list;
char componentName[256];
for (OMX_U32 index = 0;
- mMaster->enumerateComponents(
+ mStore->enumerateComponents(
componentName, sizeof(componentName), index) == OMX_ErrorNone;
++index) {
list.push_back(::android::IOMX::ComponentInfo());
@@ -63,7 +63,7 @@
info.mName = componentName;
::android::Vector<::android::String8> roles;
OMX_ERRORTYPE err =
- mMaster->getRolesOfComponent(componentName, &roles);
+ mStore->getRolesOfComponent(componentName, &roles);
if (err == OMX_ErrorNone) {
for (OMX_U32 i = 0; i < roles.size(); ++i) {
info.mRoles.push_back(roles[i]);
@@ -101,7 +101,7 @@
this, new LWOmxObserver(observer), name.c_str());
OMX_COMPONENTTYPE *handle;
- OMX_ERRORTYPE err = mMaster->makeComponentInstance(
+ OMX_ERRORTYPE err = mStore->makeComponentInstance(
name.c_str(), &OMXNodeInstance::kCallbacks,
instance.get(), &handle);
@@ -208,7 +208,7 @@
OMX_ERRORTYPE err = OMX_ErrorNone;
if (instance->handle() != NULL) {
- err = mMaster->destroyComponentInstance(
+ err = mStore->destroyComponentInstance(
static_cast<OMX_COMPONENTTYPE*>(instance->handle()));
}
return StatusFromOMXError(err);
diff --git a/media/libstagefright/omx/1.0/OmxStore.cpp b/media/libstagefright/omx/1.0/OmxStore.cpp
index 67f478e..b5c1166 100644
--- a/media/libstagefright/omx/1.0/OmxStore.cpp
+++ b/media/libstagefright/omx/1.0/OmxStore.cpp
@@ -54,6 +54,24 @@
});
}
+ if (!nodes.empty()) {
+ auto anyNode = nodes.cbegin();
+ std::string::const_iterator first = anyNode->cbegin();
+ std::string::const_iterator last = anyNode->cend();
+ for (const std::string &name : nodes) {
+ std::string::const_iterator it1 = first;
+ for (std::string::const_iterator it2 = name.cbegin();
+ it1 != last && it2 != name.cend() && tolower(*it1) == tolower(*it2);
+ ++it1, ++it2) {
+ }
+ last = it1;
+ }
+ mPrefix = std::string(first, last);
+ LOG(INFO) << "omx common prefix: '" << mPrefix.c_str() << "'";
+ } else {
+ LOG(INFO) << "omx common prefix: no nodes";
+ }
+
MediaCodecsXmlParser parser;
parser.parseXmlFilesInSearchDirs(xmlNames, searchDirs);
if (profilingResultsXmlPath != nullptr) {
@@ -112,8 +130,6 @@
mRoleList[i] = std::move(role);
++i;
}
-
- mPrefix = parser.getCommonPrefix();
}
OmxStore::~OmxStore() {
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
index 7d217eb..f7bf3ba 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
@@ -67,7 +67,7 @@
int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
Message tMsg;
tMsg.type = Message::Type::EVENT;
- tMsg.fence = native_handle_create(0, 0);
+ tMsg.fence.setTo(native_handle_create(0, 0), /* shouldOwn = */ true);
tMsg.data.eventData.event = uint32_t(OMX_EventDataSpaceChanged);
tMsg.data.eventData.data1 = dataSpace;
tMsg.data.eventData.data2 = aspects;
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 78b4f19..7c372cd 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -7,7 +7,7 @@
double_loadable: true,
srcs: [
- "OMXMaster.cpp",
+ "OMXStore.cpp",
"OMXNodeInstance.cpp",
"OMXUtils.cpp",
"OmxGraphicBufferSource.cpp",
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
deleted file mode 100644
index 094b1f5..0000000
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ /dev/null
@@ -1,233 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "OMXMaster"
-#include <android-base/properties.h>
-#include <utils/Log.h>
-
-#include <media/stagefright/omx/OMXMaster.h>
-#include <media/stagefright/omx/SoftOMXPlugin.h>
-#include <media/stagefright/foundation/ADebug.h>
-
-#include <vndksupport/linker.h>
-
-#include <dlfcn.h>
-#include <fcntl.h>
-
-namespace android {
-
-OMXMaster::OMXMaster() {
-
- pid_t pid = getpid();
- char filename[20];
- snprintf(filename, sizeof(filename), "/proc/%d/comm", pid);
- int fd = open(filename, O_RDONLY);
- if (fd < 0) {
- ALOGW("couldn't determine process name");
- strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
- } else {
- ssize_t len = read(fd, mProcessName, sizeof(mProcessName));
- if (len < 2) {
- ALOGW("couldn't determine process name");
- strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
- } else {
- // the name is newline terminated, so erase the newline
- mProcessName[len - 1] = 0;
- }
- close(fd);
- }
-
- addVendorPlugin();
- addPlatformPlugin();
-}
-
-OMXMaster::~OMXMaster() {
- clearPlugins();
-}
-
-void OMXMaster::addVendorPlugin() {
- addPlugin("libstagefrighthw.so");
-}
-
-void OMXMaster::addPlatformPlugin() {
- addPlugin("libstagefright_softomx_plugin.so");
-}
-
-void OMXMaster::addPlugin(const char *libname) {
- if (::android::base::GetIntProperty("vendor.media.omx", int64_t(1)) == 0) {
- return;
- }
-
- void *libHandle = android_load_sphal_library(libname, RTLD_NOW);
-
- if (libHandle == NULL) {
- return;
- }
-
- typedef OMXPluginBase *(*CreateOMXPluginFunc)();
- CreateOMXPluginFunc createOMXPlugin =
- (CreateOMXPluginFunc)dlsym(
- libHandle, "createOMXPlugin");
- if (!createOMXPlugin)
- createOMXPlugin = (CreateOMXPluginFunc)dlsym(
- libHandle, "_ZN7android15createOMXPluginEv");
-
- OMXPluginBase *plugin = nullptr;
- if (createOMXPlugin) {
- plugin = (*createOMXPlugin)();
- }
-
- if (plugin) {
- mPlugins.push_back({ plugin, libHandle });
- addPlugin(plugin);
- } else {
- android_unload_sphal_library(libHandle);
- }
-}
-
-void OMXMaster::addPlugin(OMXPluginBase *plugin) {
- Mutex::Autolock autoLock(mLock);
-
- OMX_U32 index = 0;
-
- char name[128];
- OMX_ERRORTYPE err;
- while ((err = plugin->enumerateComponents(
- name, sizeof(name), index++)) == OMX_ErrorNone) {
- String8 name8(name);
-
- if (mPluginByComponentName.indexOfKey(name8) >= 0) {
- ALOGE("A component of name '%s' already exists, ignoring this one.",
- name8.string());
-
- continue;
- }
-
- mPluginByComponentName.add(name8, plugin);
- }
-
- if (err != OMX_ErrorNoMore) {
- ALOGE("OMX plugin failed w/ error 0x%08x after registering %zu "
- "components", err, mPluginByComponentName.size());
- }
-}
-
-void OMXMaster::clearPlugins() {
- Mutex::Autolock autoLock(mLock);
-
- mPluginByComponentName.clear();
- mPluginByInstance.clear();
-
- typedef void (*DestroyOMXPluginFunc)(OMXPluginBase*);
- for (const Plugin &plugin : mPlugins) {
- DestroyOMXPluginFunc destroyOMXPlugin =
- (DestroyOMXPluginFunc)dlsym(
- plugin.mLibHandle, "destroyOMXPlugin");
- if (destroyOMXPlugin)
- destroyOMXPlugin(plugin.mOmx);
- else
- delete plugin.mOmx;
-
- android_unload_sphal_library(plugin.mLibHandle);
- }
-
- mPlugins.clear();
-}
-
-OMX_ERRORTYPE OMXMaster::makeComponentInstance(
- const char *name,
- const OMX_CALLBACKTYPE *callbacks,
- OMX_PTR appData,
- OMX_COMPONENTTYPE **component) {
- ALOGI("makeComponentInstance(%s) in %s process", name, mProcessName);
- Mutex::Autolock autoLock(mLock);
-
- *component = NULL;
-
- ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
-
- if (index < 0) {
- return OMX_ErrorInvalidComponentName;
- }
-
- OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
- OMX_ERRORTYPE err =
- plugin->makeComponentInstance(name, callbacks, appData, component);
-
- if (err != OMX_ErrorNone) {
- return err;
- }
-
- mPluginByInstance.add(*component, plugin);
-
- return err;
-}
-
-OMX_ERRORTYPE OMXMaster::destroyComponentInstance(
- OMX_COMPONENTTYPE *component) {
- Mutex::Autolock autoLock(mLock);
-
- ssize_t index = mPluginByInstance.indexOfKey(component);
-
- if (index < 0) {
- return OMX_ErrorBadParameter;
- }
-
- OMXPluginBase *plugin = mPluginByInstance.valueAt(index);
- mPluginByInstance.removeItemsAt(index);
-
- return plugin->destroyComponentInstance(component);
-}
-
-OMX_ERRORTYPE OMXMaster::enumerateComponents(
- OMX_STRING name,
- size_t size,
- OMX_U32 index) {
- Mutex::Autolock autoLock(mLock);
-
- size_t numComponents = mPluginByComponentName.size();
-
- if (index >= numComponents) {
- return OMX_ErrorNoMore;
- }
-
- const String8 &name8 = mPluginByComponentName.keyAt(index);
-
- CHECK(size >= 1 + name8.size());
- strcpy(name, name8.string());
-
- return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE OMXMaster::getRolesOfComponent(
- const char *name,
- Vector<String8> *roles) {
- Mutex::Autolock autoLock(mLock);
-
- roles->clear();
-
- ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
-
- if (index < 0) {
- return OMX_ErrorInvalidComponentName;
- }
-
- OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
- return plugin->getRolesOfComponent(name, roles);
-}
-
-} // namespace android
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 5b2f6de..bebd516 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -22,7 +22,7 @@
#include <inttypes.h>
#include <media/stagefright/omx/OMXNodeInstance.h>
-#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/OMXStore.h>
#include <media/stagefright/omx/OMXUtils.h>
#include <android/IOMXBufferSource.h>
@@ -214,6 +214,80 @@
}
}
+template <typename T>
+inline static T *asSetting(void *setting /* nonnull */, size_t size) {
+ // no need to check internally stored size as that is outside of sanitizing
+ // the underlying buffer's size is the one passed into this method.
+ if (size < sizeof(T)) {
+ return nullptr;
+ }
+
+ return (T *)setting;
+}
+
+inline static void sanitize(OMX_CONFIG_CONTAINERNODEIDTYPE *s) {
+ s->cNodeName = 0;
+}
+
+inline static void sanitize(OMX_CONFIG_METADATAITEMTYPE *s) {
+ s->sLanguageCountry = 0;
+}
+
+inline static void sanitize(OMX_PARAM_PORTDEFINITIONTYPE *s) {
+ switch (s->eDomain) {
+ case OMX_PortDomainAudio:
+ s->format.audio.cMIMEType = 0;
+ break;
+ case OMX_PortDomainVideo:
+ s->format.video.cMIMEType = 0;
+ break;
+ case OMX_PortDomainImage:
+ s->format.image.cMIMEType = 0;
+ break;
+ default:
+ break;
+ }
+}
+
+template <typename T>
+static bool sanitizeAs(void *setting, size_t size) {
+ T *s = asSetting<T>(setting, size);
+ if (s) {
+ sanitize(s);
+ return true;
+ }
+ return false;
+}
+
+static void sanitizeSetting(OMX_INDEXTYPE index, void *setting, size_t size) {
+ if (size < 8 || setting == nullptr) {
+ return;
+ }
+
+ bool ok = true;
+
+ // there are 3 standard OMX settings that contain pointer members
+ switch ((OMX_U32)index) {
+ case OMX_IndexConfigCounterNodeID:
+ ok = sanitizeAs<OMX_CONFIG_CONTAINERNODEIDTYPE>(setting, size);
+ break;
+ case OMX_IndexConfigMetadataItem:
+ ok = sanitizeAs<OMX_CONFIG_METADATAITEMTYPE>(setting, size);
+ break;
+ case OMX_IndexParamPortDefinition:
+ ok = sanitizeAs<OMX_PARAM_PORTDEFINITIONTYPE>(setting, size);
+ break;
+ }
+
+ if (!ok) {
+ // cannot effectively sanitize - we should not be here as IOMX.cpp
+ // should guard against size being too small. Nonetheless, log and
+ // clear result.
+ android_errorWriteLog(0x534e4554, "120781925");
+ memset(setting, 0, size);
+ }
+}
+
////////////////////////////////////////////////////////////////////////////////
// This provides the underlying Thread used by CallbackDispatcher.
@@ -608,7 +682,7 @@
}
status_t OMXNodeInstance::getParameter(
- OMX_INDEXTYPE index, void *params, size_t /* size */) {
+ OMX_INDEXTYPE index, void *params, size_t size) {
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
@@ -625,6 +699,7 @@
if (err != OMX_ErrorNoMore) {
CLOG_IF_ERROR(getParameter, err, "%s(%#x)", asString(extIndex), index);
}
+ sanitizeSetting(index, params, size);
return StatusFromOMXError(err);
}
@@ -650,11 +725,12 @@
OMX_ERRORTYPE err = OMX_SetParameter(
mHandle, index, const_cast<void *>(params));
CLOG_IF_ERROR(setParameter, err, "%s(%#x)", asString(extIndex), index);
+ sanitizeSetting(index, const_cast<void *>(params), size);
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::getConfig(
- OMX_INDEXTYPE index, void *params, size_t /* size */) {
+ OMX_INDEXTYPE index, void *params, size_t size) {
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
@@ -671,6 +747,8 @@
if (err != OMX_ErrorNoMore) {
CLOG_IF_ERROR(getConfig, err, "%s(%#x)", asString(extIndex), index);
}
+
+ sanitizeSetting(index, params, size);
return StatusFromOMXError(err);
}
@@ -692,6 +770,7 @@
OMX_ERRORTYPE err = OMX_SetConfig(
mHandle, index, const_cast<void *>(params));
CLOG_IF_ERROR(setConfig, err, "%s(%#x)", asString(extIndex), index);
+ sanitizeSetting(index, const_cast<void *>(params), size);
return StatusFromOMXError(err);
}
diff --git a/media/libstagefright/omx/OMXStore.cpp b/media/libstagefright/omx/OMXStore.cpp
new file mode 100644
index 0000000..e8fee42
--- /dev/null
+++ b/media/libstagefright/omx/OMXStore.cpp
@@ -0,0 +1,233 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OMXStore"
+#include <android-base/properties.h>
+#include <utils/Log.h>
+
+#include <media/stagefright/omx/OMXStore.h>
+#include <media/stagefright/omx/SoftOMXPlugin.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <vndksupport/linker.h>
+
+#include <dlfcn.h>
+#include <fcntl.h>
+
+namespace android {
+
+OMXStore::OMXStore() {
+
+ pid_t pid = getpid();
+ char filename[20];
+ snprintf(filename, sizeof(filename), "/proc/%d/comm", pid);
+ int fd = open(filename, O_RDONLY);
+ if (fd < 0) {
+ ALOGW("couldn't determine process name");
+ strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
+ } else {
+ ssize_t len = read(fd, mProcessName, sizeof(mProcessName));
+ if (len < 2) {
+ ALOGW("couldn't determine process name");
+ strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
+ } else {
+ // the name is newline terminated, so erase the newline
+ mProcessName[len - 1] = 0;
+ }
+ close(fd);
+ }
+
+ addVendorPlugin();
+ addPlatformPlugin();
+}
+
+OMXStore::~OMXStore() {
+ clearPlugins();
+}
+
+void OMXStore::addVendorPlugin() {
+ addPlugin("libstagefrighthw.so");
+}
+
+void OMXStore::addPlatformPlugin() {
+ addPlugin("libstagefright_softomx_plugin.so");
+}
+
+void OMXStore::addPlugin(const char *libname) {
+ if (::android::base::GetIntProperty("vendor.media.omx", int64_t(1)) == 0) {
+ return;
+ }
+
+ void *libHandle = android_load_sphal_library(libname, RTLD_NOW);
+
+ if (libHandle == NULL) {
+ return;
+ }
+
+ typedef OMXPluginBase *(*CreateOMXPluginFunc)();
+ CreateOMXPluginFunc createOMXPlugin =
+ (CreateOMXPluginFunc)dlsym(
+ libHandle, "createOMXPlugin");
+ if (!createOMXPlugin)
+ createOMXPlugin = (CreateOMXPluginFunc)dlsym(
+ libHandle, "_ZN7android15createOMXPluginEv");
+
+ OMXPluginBase *plugin = nullptr;
+ if (createOMXPlugin) {
+ plugin = (*createOMXPlugin)();
+ }
+
+ if (plugin) {
+ mPlugins.push_back({ plugin, libHandle });
+ addPlugin(plugin);
+ } else {
+ android_unload_sphal_library(libHandle);
+ }
+}
+
+void OMXStore::addPlugin(OMXPluginBase *plugin) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_U32 index = 0;
+
+ char name[128];
+ OMX_ERRORTYPE err;
+ while ((err = plugin->enumerateComponents(
+ name, sizeof(name), index++)) == OMX_ErrorNone) {
+ String8 name8(name);
+
+ if (mPluginByComponentName.indexOfKey(name8) >= 0) {
+ ALOGE("A component of name '%s' already exists, ignoring this one.",
+ name8.string());
+
+ continue;
+ }
+
+ mPluginByComponentName.add(name8, plugin);
+ }
+
+ if (err != OMX_ErrorNoMore) {
+ ALOGE("OMX plugin failed w/ error 0x%08x after registering %zu "
+ "components", err, mPluginByComponentName.size());
+ }
+}
+
+void OMXStore::clearPlugins() {
+ Mutex::Autolock autoLock(mLock);
+
+ mPluginByComponentName.clear();
+ mPluginByInstance.clear();
+
+ typedef void (*DestroyOMXPluginFunc)(OMXPluginBase*);
+ for (const Plugin &plugin : mPlugins) {
+ DestroyOMXPluginFunc destroyOMXPlugin =
+ (DestroyOMXPluginFunc)dlsym(
+ plugin.mLibHandle, "destroyOMXPlugin");
+ if (destroyOMXPlugin)
+ destroyOMXPlugin(plugin.mOmx);
+ else
+ delete plugin.mOmx;
+
+ android_unload_sphal_library(plugin.mLibHandle);
+ }
+
+ mPlugins.clear();
+}
+
+OMX_ERRORTYPE OMXStore::makeComponentInstance(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component) {
+ ALOGI("makeComponentInstance(%s) in %s process", name, mProcessName);
+ Mutex::Autolock autoLock(mLock);
+
+ *component = NULL;
+
+ ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
+
+ if (index < 0) {
+ return OMX_ErrorInvalidComponentName;
+ }
+
+ OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
+ OMX_ERRORTYPE err =
+ plugin->makeComponentInstance(name, callbacks, appData, component);
+
+ if (err != OMX_ErrorNone) {
+ return err;
+ }
+
+ mPluginByInstance.add(*component, plugin);
+
+ return err;
+}
+
+OMX_ERRORTYPE OMXStore::destroyComponentInstance(
+ OMX_COMPONENTTYPE *component) {
+ Mutex::Autolock autoLock(mLock);
+
+ ssize_t index = mPluginByInstance.indexOfKey(component);
+
+ if (index < 0) {
+ return OMX_ErrorBadParameter;
+ }
+
+ OMXPluginBase *plugin = mPluginByInstance.valueAt(index);
+ mPluginByInstance.removeItemsAt(index);
+
+ return plugin->destroyComponentInstance(component);
+}
+
+OMX_ERRORTYPE OMXStore::enumerateComponents(
+ OMX_STRING name,
+ size_t size,
+ OMX_U32 index) {
+ Mutex::Autolock autoLock(mLock);
+
+ size_t numComponents = mPluginByComponentName.size();
+
+ if (index >= numComponents) {
+ return OMX_ErrorNoMore;
+ }
+
+ const String8 &name8 = mPluginByComponentName.keyAt(index);
+
+ CHECK(size >= 1 + name8.size());
+ strcpy(name, name8.string());
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OMXStore::getRolesOfComponent(
+ const char *name,
+ Vector<String8> *roles) {
+ Mutex::Autolock autoLock(mLock);
+
+ roles->clear();
+
+ ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
+
+ if (index < 0) {
+ return OMX_ErrorInvalidComponentName;
+ }
+
+ OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
+ return plugin->getRolesOfComponent(name, roles);
+}
+
+} // namespace android
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index 1b8493a..49b2dec 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -172,6 +172,8 @@
"audio_decoder.ac4", "audio_encoder.ac4" },
{ MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
"image_decoder.heic", "image_encoder.heic" },
+ { MEDIA_MIMETYPE_IMAGE_AVIF,
+ "image_decoder.avif", "image_encoder.avif" },
};
static const size_t kNumMimeToRole =
@@ -354,7 +356,7 @@
DescribeColorFormat2Params describeParams;
InitOMXParams(&describeParams);
describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
- // reasonable dummy values
+ // reasonable initial values (that will be overwritten)
describeParams.nFrameWidth = 128;
describeParams.nFrameHeight = 128;
describeParams.nStride = 128;
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index ddb459f..44415aa 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -17,6 +17,10 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "SimpleSoftOMXComponent"
#include <utils/Log.h>
+#include <OMX_Core.h>
+#include <OMX_Audio.h>
+#include <OMX_IndexExt.h>
+#include <OMX_AudioExt.h>
#include <media/stagefright/omx/SimpleSoftOMXComponent.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -74,7 +78,7 @@
OMX_U32 portIndex;
- switch (index) {
+ switch ((int)index) {
case OMX_IndexParamPortDefinition:
{
const OMX_PARAM_PORTDEFINITIONTYPE *portDefs =
@@ -108,6 +112,19 @@
break;
}
+ case OMX_IndexParamAudioAndroidAacDrcPresentation:
+ {
+ if (mState == OMX_StateInvalid) {
+ return false;
+ }
+ const OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *aacPresParams =
+ (const OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *)params;
+ if (!isValidOMXParam(aacPresParams)) {
+ return false;
+ }
+ return true;
+ }
+
default:
return false;
}
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
index 5a46b26..84ae511 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
@@ -27,7 +27,7 @@
namespace android {
-struct OMXMaster;
+struct OMXStore;
struct OMXNodeInstance;
namespace hardware {
@@ -51,7 +51,7 @@
using ::android::sp;
using ::android::wp;
-using ::android::OMXMaster;
+using ::android::OMXStore;
using ::android::OMXNodeInstance;
struct Omx : public IOmx, public hidl_death_recipient {
@@ -73,7 +73,7 @@
status_t freeNode(sp<OMXNodeInstance> const& instance);
protected:
- OMXMaster* mMaster;
+ OMXStore* mStore;
Mutex mLock;
KeyedVector<wp<IBase>, sp<OMXNodeInstance> > mLiveNodes;
KeyedVector<OMXNodeInstance*, wp<IBase> > mNode2Observer;
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
deleted file mode 100644
index 93eaef1..0000000
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef OMX_MASTER_H_
-
-#define OMX_MASTER_H_
-
-#include <media/hardware/OMXPluginBase.h>
-
-#include <utils/threads.h>
-#include <utils/KeyedVector.h>
-#include <utils/List.h>
-#include <utils/String8.h>
-
-namespace android {
-
-struct OMXMaster : public OMXPluginBase {
- OMXMaster();
- virtual ~OMXMaster();
-
- virtual OMX_ERRORTYPE makeComponentInstance(
- const char *name,
- const OMX_CALLBACKTYPE *callbacks,
- OMX_PTR appData,
- OMX_COMPONENTTYPE **component);
-
- virtual OMX_ERRORTYPE destroyComponentInstance(
- OMX_COMPONENTTYPE *component);
-
- virtual OMX_ERRORTYPE enumerateComponents(
- OMX_STRING name,
- size_t size,
- OMX_U32 index);
-
- virtual OMX_ERRORTYPE getRolesOfComponent(
- const char *name,
- Vector<String8> *roles);
-
-private:
- char mProcessName[16];
- Mutex mLock;
- struct Plugin {
- OMXPluginBase *mOmx;
- void *mLibHandle;
- };
- List<Plugin> mPlugins;
- KeyedVector<String8, OMXPluginBase *> mPluginByComponentName;
- KeyedVector<OMX_COMPONENTTYPE *, OMXPluginBase *> mPluginByInstance;
-
- void addVendorPlugin();
- void addPlatformPlugin();
- void addPlugin(const char *libname);
- void addPlugin(OMXPluginBase *plugin);
- void clearPlugins();
-
- OMXMaster(const OMXMaster &);
- OMXMaster &operator=(const OMXMaster &);
-};
-
-} // namespace android
-
-#endif // OMX_MASTER_H_
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
index a761ef6..5f32c9e 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
@@ -33,7 +33,7 @@
class GraphicBuffer;
class IOMXBufferSource;
class IOMXObserver;
-struct OMXMaster;
+struct OMXStore;
class OMXBuffer;
using IHidlMemory = hidl::memory::V1_0::IMemory;
using hardware::media::omx::V1_0::implementation::Omx;
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h
new file mode 100644
index 0000000..5d6c3ed
--- /dev/null
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OMX_STORE_H_
+
+#define OMX_STORE_H_
+
+#include <media/hardware/OMXPluginBase.h>
+
+#include <utils/threads.h>
+#include <utils/KeyedVector.h>
+#include <utils/List.h>
+#include <utils/String8.h>
+
+namespace android {
+
+struct OMXStore : public OMXPluginBase {
+ OMXStore();
+ virtual ~OMXStore();
+
+ virtual OMX_ERRORTYPE makeComponentInstance(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+ virtual OMX_ERRORTYPE destroyComponentInstance(
+ OMX_COMPONENTTYPE *component);
+
+ virtual OMX_ERRORTYPE enumerateComponents(
+ OMX_STRING name,
+ size_t size,
+ OMX_U32 index);
+
+ virtual OMX_ERRORTYPE getRolesOfComponent(
+ const char *name,
+ Vector<String8> *roles);
+
+private:
+ char mProcessName[16];
+ Mutex mLock;
+ struct Plugin {
+ OMXPluginBase *mOmx;
+ void *mLibHandle;
+ };
+ List<Plugin> mPlugins;
+ KeyedVector<String8, OMXPluginBase *> mPluginByComponentName;
+ KeyedVector<OMX_COMPONENTTYPE *, OMXPluginBase *> mPluginByInstance;
+
+ void addVendorPlugin();
+ void addPlatformPlugin();
+ void addPlugin(const char *libname);
+ void addPlugin(OMXPluginBase *plugin);
+ void clearPlugins();
+
+ OMXStore(const OMXStore &);
+ OMXStore &operator=(const OMXStore &);
+};
+
+} // namespace android
+
+#endif // OMX_STORE_H_
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 7893148..039991c 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -211,6 +211,17 @@
status_t err = getPortDefinition(portIndex, &def);
EXPECT_SUCCESS(err, "getPortDefinition");
+ switch (def.eDomain) {
+ case OMX_PortDomainVideo:
+ EXPECT(def.format.video.cMIMEType == 0, "portDefinition video MIME");
+ break;
+ case OMX_PortDomainAudio:
+ EXPECT(def.format.audio.cMIMEType == 0, "portDefinition audio MIME");
+ break;
+ default:
+ break;
+ }
+
for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
Buffer buffer;
buffer.mFlags = 0;
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
new file mode 100644
index 0000000..c17f84e
--- /dev/null
+++ b/media/libstagefright/renderfright/Android.bp
@@ -0,0 +1,111 @@
+cc_defaults {
+ name: "renderfright_defaults",
+ cflags: [
+ "-DLOG_TAG=\"renderfright\"",
+ "-Wall",
+ "-Werror",
+ "-Wthread-safety",
+ "-Wunused",
+ "-Wunreachable-code",
+ ],
+}
+
+cc_defaults {
+ name: "librenderfright_defaults",
+ defaults: ["renderfright_defaults"],
+ cflags: [
+ "-DGL_GLEXT_PROTOTYPES",
+ "-DEGL_EGLEXT_PROTOTYPES",
+ ],
+ shared_libs: [
+ "libbase",
+ "libcutils",
+ "libEGL",
+ "libGLESv1_CM",
+ "libGLESv2",
+ "libgui",
+ "liblog",
+ "libnativewindow",
+ "libprocessgroup",
+ "libsync",
+ "libui",
+ "libutils",
+ ],
+ local_include_dirs: ["include"],
+ export_include_dirs: ["include"],
+}
+
+filegroup {
+ name: "librenderfright_sources",
+ srcs: [
+ "Description.cpp",
+ "Mesh.cpp",
+ "RenderEngine.cpp",
+ "Texture.cpp",
+ ],
+}
+
+filegroup {
+ name: "librenderfright_gl_sources",
+ srcs: [
+ "gl/GLESRenderEngine.cpp",
+ "gl/GLExtensions.cpp",
+ "gl/GLFramebuffer.cpp",
+ "gl/GLImage.cpp",
+ "gl/GLShadowTexture.cpp",
+ "gl/GLShadowVertexGenerator.cpp",
+ "gl/GLSkiaShadowPort.cpp",
+ "gl/GLVertexBuffer.cpp",
+ "gl/ImageManager.cpp",
+ "gl/Program.cpp",
+ "gl/ProgramCache.cpp",
+ "gl/filters/BlurFilter.cpp",
+ "gl/filters/GenericProgram.cpp",
+ ],
+}
+
+filegroup {
+ name: "librenderfright_threaded_sources",
+ srcs: [
+ "threaded/RenderEngineThreaded.cpp",
+ ],
+}
+
+cc_library_static {
+ name: "librenderfright",
+ defaults: ["librenderfright_defaults"],
+ vendor_available: true,
+ vndk: {
+ enabled: true,
+ },
+ double_loadable: true,
+ clang: true,
+ cflags: [
+ "-fvisibility=hidden",
+ "-Werror=format",
+ ],
+ srcs: [
+ ":librenderfright_sources",
+ ":librenderfright_gl_sources",
+ ":librenderfright_threaded_sources",
+ ],
+ lto: {
+ thin: true,
+ },
+}
+
+cc_library_static {
+ name: "librenderfright_mocks",
+ defaults: ["librenderfright_defaults"],
+ srcs: [
+ "mock/Framebuffer.cpp",
+ "mock/Image.cpp",
+ "mock/RenderEngine.cpp",
+ ],
+ static_libs: [
+ "libgtest",
+ "libgmock",
+ ],
+ local_include_dirs: ["include"],
+ export_include_dirs: ["include"],
+}
diff --git a/media/libstagefright/renderfright/Description.cpp b/media/libstagefright/renderfright/Description.cpp
new file mode 100644
index 0000000..b9cea10
--- /dev/null
+++ b/media/libstagefright/renderfright/Description.cpp
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/private/Description.h>
+
+#include <stdint.h>
+
+#include <utils/TypeHelpers.h>
+
+namespace android {
+namespace renderengine {
+
+Description::TransferFunction Description::dataSpaceToTransferFunction(ui::Dataspace dataSpace) {
+ ui::Dataspace transfer = static_cast<ui::Dataspace>(dataSpace & ui::Dataspace::TRANSFER_MASK);
+ switch (transfer) {
+ case ui::Dataspace::TRANSFER_ST2084:
+ return Description::TransferFunction::ST2084;
+ case ui::Dataspace::TRANSFER_HLG:
+ return Description::TransferFunction::HLG;
+ case ui::Dataspace::TRANSFER_LINEAR:
+ return Description::TransferFunction::LINEAR;
+ default:
+ return Description::TransferFunction::SRGB;
+ }
+}
+
+bool Description::hasInputTransformMatrix() const {
+ const mat4 identity;
+ return inputTransformMatrix != identity;
+}
+
+bool Description::hasOutputTransformMatrix() const {
+ const mat4 identity;
+ return outputTransformMatrix != identity;
+}
+
+bool Description::hasColorMatrix() const {
+ const mat4 identity;
+ return colorMatrix != identity;
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/Mesh.cpp b/media/libstagefright/renderfright/Mesh.cpp
new file mode 100644
index 0000000..ed2f45f
--- /dev/null
+++ b/media/libstagefright/renderfright/Mesh.cpp
@@ -0,0 +1,146 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/Mesh.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace renderengine {
+
+Mesh::Mesh(Primitive primitive, size_t vertexCount, size_t vertexSize, size_t texCoordSize,
+ size_t cropCoordsSize, size_t shadowColorSize, size_t shadowParamsSize,
+ size_t indexCount)
+ : mVertexCount(vertexCount),
+ mVertexSize(vertexSize),
+ mTexCoordsSize(texCoordSize),
+ mCropCoordsSize(cropCoordsSize),
+ mShadowColorSize(shadowColorSize),
+ mShadowParamsSize(shadowParamsSize),
+ mPrimitive(primitive),
+ mIndexCount(indexCount) {
+ if (vertexCount == 0) {
+ mVertices.resize(1);
+ mVertices[0] = 0.0f;
+ mStride = 0;
+ return;
+ }
+ size_t stride = vertexSize + texCoordSize + cropCoordsSize + shadowColorSize + shadowParamsSize;
+ size_t remainder = (stride * vertexCount) / vertexCount;
+ // Since all of the input parameters are unsigned, if stride is less than
+ // either vertexSize or texCoordSize, it must have overflowed. remainder
+ // will be equal to stride as long as stride * vertexCount doesn't overflow.
+ if ((stride < vertexSize) || (remainder != stride)) {
+ ALOGE("Overflow in Mesh(..., %zu, %zu, %zu, %zu, %zu, %zu)", vertexCount, vertexSize,
+ texCoordSize, cropCoordsSize, shadowColorSize, shadowParamsSize);
+ mVertices.resize(1);
+ mVertices[0] = 0.0f;
+ mVertexCount = 0;
+ mVertexSize = 0;
+ mTexCoordsSize = 0;
+ mCropCoordsSize = 0;
+ mShadowColorSize = 0;
+ mShadowParamsSize = 0;
+ mStride = 0;
+ return;
+ }
+
+ mVertices.resize(stride * vertexCount);
+ mStride = stride;
+ mIndices.resize(indexCount);
+}
+
+Mesh::Primitive Mesh::getPrimitive() const {
+ return mPrimitive;
+}
+
+float const* Mesh::getPositions() const {
+ return mVertices.data();
+}
+float* Mesh::getPositions() {
+ return mVertices.data();
+}
+
+float const* Mesh::getTexCoords() const {
+ return mVertices.data() + mVertexSize;
+}
+float* Mesh::getTexCoords() {
+ return mVertices.data() + mVertexSize;
+}
+
+float const* Mesh::getCropCoords() const {
+ return mVertices.data() + mVertexSize + mTexCoordsSize;
+}
+float* Mesh::getCropCoords() {
+ return mVertices.data() + mVertexSize + mTexCoordsSize;
+}
+
+float const* Mesh::getShadowColor() const {
+ return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize;
+}
+float* Mesh::getShadowColor() {
+ return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize;
+}
+
+float const* Mesh::getShadowParams() const {
+ return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize + mShadowColorSize;
+}
+float* Mesh::getShadowParams() {
+ return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize + mShadowColorSize;
+}
+
+uint16_t const* Mesh::getIndices() const {
+ return mIndices.data();
+}
+
+uint16_t* Mesh::getIndices() {
+ return mIndices.data();
+}
+
+size_t Mesh::getVertexCount() const {
+ return mVertexCount;
+}
+
+size_t Mesh::getVertexSize() const {
+ return mVertexSize;
+}
+
+size_t Mesh::getTexCoordsSize() const {
+ return mTexCoordsSize;
+}
+
+size_t Mesh::getShadowColorSize() const {
+ return mShadowColorSize;
+}
+
+size_t Mesh::getShadowParamsSize() const {
+ return mShadowParamsSize;
+}
+
+size_t Mesh::getByteStride() const {
+ return mStride * sizeof(float);
+}
+
+size_t Mesh::getStride() const {
+ return mStride;
+}
+
+size_t Mesh::getIndexCount() const {
+ return mIndexCount;
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/RenderEngine.cpp b/media/libstagefright/renderfright/RenderEngine.cpp
new file mode 100644
index 0000000..c3fbb60
--- /dev/null
+++ b/media/libstagefright/renderfright/RenderEngine.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/RenderEngine.h>
+
+#include <cutils/properties.h>
+#include <log/log.h>
+#include <private/gui/SyncFeatures.h>
+#include "gl/GLESRenderEngine.h"
+#include "threaded/RenderEngineThreaded.h"
+
+namespace android {
+namespace renderengine {
+
+std::unique_ptr<RenderEngine> RenderEngine::create(const RenderEngineCreationArgs& args) {
+ RenderEngineType renderEngineType = args.renderEngineType;
+
+ // Keep the ability to override by PROPERTIES:
+ char prop[PROPERTY_VALUE_MAX];
+ property_get(PROPERTY_DEBUG_RENDERENGINE_BACKEND, prop, "");
+ if (strcmp(prop, "gles") == 0) {
+ renderEngineType = RenderEngineType::GLES;
+ }
+ if (strcmp(prop, "threaded") == 0) {
+ renderEngineType = RenderEngineType::THREADED;
+ }
+
+ switch (renderEngineType) {
+ case RenderEngineType::THREADED:
+ ALOGD("Threaded RenderEngine with GLES Backend");
+ return renderengine::threaded::RenderEngineThreaded::create(
+ [args]() { return android::renderengine::gl::GLESRenderEngine::create(args); });
+ case RenderEngineType::GLES:
+ default:
+ ALOGD("RenderEngine with GLES Backend");
+ return renderengine::gl::GLESRenderEngine::create(args);
+ }
+}
+
+RenderEngine::~RenderEngine() = default;
+
+namespace impl {
+
+RenderEngine::RenderEngine(const RenderEngineCreationArgs& args) : mArgs(args) {}
+
+RenderEngine::~RenderEngine() = default;
+
+bool RenderEngine::useNativeFenceSync() const {
+ return SyncFeatures::getInstance().useNativeFenceSync();
+}
+
+bool RenderEngine::useWaitSync() const {
+ return SyncFeatures::getInstance().useWaitSync();
+}
+
+} // namespace impl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/Texture.cpp b/media/libstagefright/renderfright/Texture.cpp
new file mode 100644
index 0000000..154cde8
--- /dev/null
+++ b/media/libstagefright/renderfright/Texture.cpp
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/Texture.h>
+
+namespace android {
+namespace renderengine {
+
+Texture::Texture()
+ : mTextureName(0), mTextureTarget(TEXTURE_2D), mWidth(0), mHeight(0), mFiltering(false) {}
+
+Texture::Texture(Target textureTarget, uint32_t textureName)
+ : mTextureName(textureName),
+ mTextureTarget(textureTarget),
+ mWidth(0),
+ mHeight(0),
+ mFiltering(false) {}
+
+void Texture::init(Target textureTarget, uint32_t textureName) {
+ mTextureName = textureName;
+ mTextureTarget = textureTarget;
+}
+
+Texture::~Texture() {}
+
+void Texture::setMatrix(float const* matrix) {
+ mTextureMatrix = mat4(matrix);
+}
+
+void Texture::setFiltering(bool enabled) {
+ mFiltering = enabled;
+}
+
+void Texture::setDimensions(size_t width, size_t height) {
+ mWidth = width;
+ mHeight = height;
+}
+
+uint32_t Texture::getTextureName() const {
+ return mTextureName;
+}
+
+uint32_t Texture::getTextureTarget() const {
+ return mTextureTarget;
+}
+
+const mat4& Texture::getMatrix() const {
+ return mTextureMatrix;
+}
+
+bool Texture::getFiltering() const {
+ return mFiltering;
+}
+
+size_t Texture::getWidth() const {
+ return mWidth;
+}
+
+size_t Texture::getHeight() const {
+ return mHeight;
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp b/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
new file mode 100644
index 0000000..824bdd9
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
@@ -0,0 +1,1772 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#undef LOG_TAG
+#define LOG_TAG "RenderEngine"
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include <sched.h>
+#include <cmath>
+#include <fstream>
+#include <sstream>
+#include <unordered_set>
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <android-base/stringprintf.h>
+#include <cutils/compiler.h>
+#include <cutils/properties.h>
+#include <gui/DebugEGLImageTracker.h>
+#include <renderengine/Mesh.h>
+#include <renderengine/Texture.h>
+#include <renderengine/private/Description.h>
+#include <sync/sync.h>
+#include <ui/ColorSpace.h>
+#include <ui/DebugUtils.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/Rect.h>
+#include <ui/Region.h>
+#include <utils/KeyedVector.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+#include "GLExtensions.h"
+#include "GLFramebuffer.h"
+#include "GLImage.h"
+#include "GLShadowVertexGenerator.h"
+#include "Program.h"
+#include "ProgramCache.h"
+#include "filters/BlurFilter.h"
+
+bool checkGlError(const char* op, int lineNumber) {
+ bool errorFound = false;
+ GLint error = glGetError();
+ while (error != GL_NO_ERROR) {
+ errorFound = true;
+ error = glGetError();
+ ALOGV("after %s() (line # %d) glError (0x%x)\n", op, lineNumber, error);
+ }
+ return errorFound;
+}
+
+static constexpr bool outputDebugPPMs = false;
+
+void writePPM(const char* basename, GLuint width, GLuint height) {
+ ALOGV("writePPM #%s: %d x %d", basename, width, height);
+
+ std::vector<GLubyte> pixels(width * height * 4);
+ std::vector<GLubyte> outBuffer(width * height * 3);
+
+ // TODO(courtneygo): We can now have float formats, need
+ // to remove this code or update to support.
+ // Make returned pixels fit in uint32_t, one byte per component
+ glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, pixels.data());
+ if (checkGlError(__FUNCTION__, __LINE__)) {
+ return;
+ }
+
+ std::string filename(basename);
+ filename.append(".ppm");
+ std::ofstream file(filename.c_str(), std::ios::binary);
+ if (!file.is_open()) {
+ ALOGE("Unable to open file: %s", filename.c_str());
+ ALOGE("You may need to do: \"adb shell setenforce 0\" to enable "
+ "surfaceflinger to write debug images");
+ return;
+ }
+
+ file << "P6\n";
+ file << width << "\n";
+ file << height << "\n";
+ file << 255 << "\n";
+
+ auto ptr = reinterpret_cast<char*>(pixels.data());
+ auto outPtr = reinterpret_cast<char*>(outBuffer.data());
+ for (int y = height - 1; y >= 0; y--) {
+ char* data = ptr + y * width * sizeof(uint32_t);
+
+ for (GLuint x = 0; x < width; x++) {
+ // Only copy R, G and B components
+ outPtr[0] = data[0];
+ outPtr[1] = data[1];
+ outPtr[2] = data[2];
+ data += sizeof(uint32_t);
+ outPtr += 3;
+ }
+ }
+ file.write(reinterpret_cast<char*>(outBuffer.data()), outBuffer.size());
+}
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+using base::StringAppendF;
+using ui::Dataspace;
+
+static status_t selectConfigForAttribute(EGLDisplay dpy, EGLint const* attrs, EGLint attribute,
+ EGLint wanted, EGLConfig* outConfig) {
+ EGLint numConfigs = -1, n = 0;
+ eglGetConfigs(dpy, nullptr, 0, &numConfigs);
+ std::vector<EGLConfig> configs(numConfigs, EGL_NO_CONFIG_KHR);
+ eglChooseConfig(dpy, attrs, configs.data(), configs.size(), &n);
+ configs.resize(n);
+
+ if (!configs.empty()) {
+ if (attribute != EGL_NONE) {
+ for (EGLConfig config : configs) {
+ EGLint value = 0;
+ eglGetConfigAttrib(dpy, config, attribute, &value);
+ if (wanted == value) {
+ *outConfig = config;
+ return NO_ERROR;
+ }
+ }
+ } else {
+ // just pick the first one
+ *outConfig = configs[0];
+ return NO_ERROR;
+ }
+ }
+
+ return NAME_NOT_FOUND;
+}
+
+static status_t selectEGLConfig(EGLDisplay display, EGLint format, EGLint renderableType,
+ EGLConfig* config) {
+ // select our EGLConfig. It must support EGL_RECORDABLE_ANDROID if
+ // it is to be used with WIFI displays
+ status_t err;
+ EGLint wantedAttribute;
+ EGLint wantedAttributeValue;
+
+ std::vector<EGLint> attribs;
+ if (renderableType) {
+ const ui::PixelFormat pixelFormat = static_cast<ui::PixelFormat>(format);
+ const bool is1010102 = pixelFormat == ui::PixelFormat::RGBA_1010102;
+
+ // Default to 8 bits per channel.
+ const EGLint tmpAttribs[] = {
+ EGL_RENDERABLE_TYPE,
+ renderableType,
+ EGL_RECORDABLE_ANDROID,
+ EGL_TRUE,
+ EGL_SURFACE_TYPE,
+ EGL_WINDOW_BIT | EGL_PBUFFER_BIT,
+ EGL_FRAMEBUFFER_TARGET_ANDROID,
+ EGL_TRUE,
+ EGL_RED_SIZE,
+ is1010102 ? 10 : 8,
+ EGL_GREEN_SIZE,
+ is1010102 ? 10 : 8,
+ EGL_BLUE_SIZE,
+ is1010102 ? 10 : 8,
+ EGL_ALPHA_SIZE,
+ is1010102 ? 2 : 8,
+ EGL_NONE,
+ };
+ std::copy(tmpAttribs, tmpAttribs + (sizeof(tmpAttribs) / sizeof(EGLint)),
+ std::back_inserter(attribs));
+ wantedAttribute = EGL_NONE;
+ wantedAttributeValue = EGL_NONE;
+ } else {
+ // if no renderable type specified, fallback to a simplified query
+ wantedAttribute = EGL_NATIVE_VISUAL_ID;
+ wantedAttributeValue = format;
+ }
+
+ err = selectConfigForAttribute(display, attribs.data(), wantedAttribute, wantedAttributeValue,
+ config);
+ if (err == NO_ERROR) {
+ EGLint caveat;
+ if (eglGetConfigAttrib(display, *config, EGL_CONFIG_CAVEAT, &caveat))
+ ALOGW_IF(caveat == EGL_SLOW_CONFIG, "EGL_SLOW_CONFIG selected!");
+ }
+
+ return err;
+}
+
+std::unique_ptr<GLESRenderEngine> GLESRenderEngine::create(const RenderEngineCreationArgs& args) {
+ // initialize EGL for the default display
+ EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+ if (!eglInitialize(display, nullptr, nullptr)) {
+ LOG_ALWAYS_FATAL("failed to initialize EGL");
+ }
+
+ const auto eglVersion = eglQueryString(display, EGL_VERSION);
+ if (!eglVersion) {
+ checkGlError(__FUNCTION__, __LINE__);
+ LOG_ALWAYS_FATAL("eglQueryString(EGL_VERSION) failed");
+ }
+
+ const auto eglExtensions = eglQueryString(display, EGL_EXTENSIONS);
+ if (!eglExtensions) {
+ checkGlError(__FUNCTION__, __LINE__);
+ LOG_ALWAYS_FATAL("eglQueryString(EGL_EXTENSIONS) failed");
+ }
+
+ GLExtensions& extensions = GLExtensions::getInstance();
+ extensions.initWithEGLStrings(eglVersion, eglExtensions);
+
+ // The code assumes that ES2 or later is available if this extension is
+ // supported.
+ EGLConfig config = EGL_NO_CONFIG;
+ if (!extensions.hasNoConfigContext()) {
+ config = chooseEglConfig(display, args.pixelFormat, /*logConfig*/ true);
+ }
+
+ bool useContextPriority =
+ extensions.hasContextPriority() && args.contextPriority == ContextPriority::HIGH;
+ EGLContext protectedContext = EGL_NO_CONTEXT;
+ if (args.enableProtectedContext && extensions.hasProtectedContent()) {
+ protectedContext = createEglContext(display, config, nullptr, useContextPriority,
+ Protection::PROTECTED);
+ ALOGE_IF(protectedContext == EGL_NO_CONTEXT, "Can't create protected context");
+ }
+
+ EGLContext ctxt = createEglContext(display, config, protectedContext, useContextPriority,
+ Protection::UNPROTECTED);
+
+ // if can't create a GL context, we can only abort.
+ LOG_ALWAYS_FATAL_IF(ctxt == EGL_NO_CONTEXT, "EGLContext creation failed");
+
+ EGLSurface stub = EGL_NO_SURFACE;
+ if (!extensions.hasSurfacelessContext()) {
+ stub = createStubEglPbufferSurface(display, config, args.pixelFormat,
+ Protection::UNPROTECTED);
+ LOG_ALWAYS_FATAL_IF(stub == EGL_NO_SURFACE, "can't create stub pbuffer");
+ }
+ EGLBoolean success = eglMakeCurrent(display, stub, stub, ctxt);
+ LOG_ALWAYS_FATAL_IF(!success, "can't make stub pbuffer current");
+ extensions.initWithGLStrings(glGetString(GL_VENDOR), glGetString(GL_RENDERER),
+ glGetString(GL_VERSION), glGetString(GL_EXTENSIONS));
+
+ EGLSurface protectedStub = EGL_NO_SURFACE;
+ if (protectedContext != EGL_NO_CONTEXT && !extensions.hasSurfacelessContext()) {
+ protectedStub = createStubEglPbufferSurface(display, config, args.pixelFormat,
+ Protection::PROTECTED);
+ ALOGE_IF(protectedStub == EGL_NO_SURFACE, "can't create protected stub pbuffer");
+ }
+
+ // now figure out what version of GL did we actually get
+ GlesVersion version = parseGlesVersion(extensions.getVersion());
+
+ LOG_ALWAYS_FATAL_IF(args.supportsBackgroundBlur && version < GLES_VERSION_3_0,
+ "Blurs require OpenGL ES 3.0. Please unset ro.surface_flinger.supports_background_blur");
+
+ // initialize the renderer while GL is current
+ std::unique_ptr<GLESRenderEngine> engine;
+ switch (version) {
+ case GLES_VERSION_1_0:
+ case GLES_VERSION_1_1:
+ LOG_ALWAYS_FATAL("SurfaceFlinger requires OpenGL ES 2.0 minimum to run.");
+ break;
+ case GLES_VERSION_2_0:
+ case GLES_VERSION_3_0:
+ engine = std::make_unique<GLESRenderEngine>(args, display, config, ctxt, stub,
+ protectedContext, protectedStub);
+ break;
+ }
+
+ ALOGI("OpenGL ES informations:");
+ ALOGI("vendor : %s", extensions.getVendor());
+ ALOGI("renderer : %s", extensions.getRenderer());
+ ALOGI("version : %s", extensions.getVersion());
+ ALOGI("extensions: %s", extensions.getExtensions());
+ ALOGI("GL_MAX_TEXTURE_SIZE = %zu", engine->getMaxTextureSize());
+ ALOGI("GL_MAX_VIEWPORT_DIMS = %zu", engine->getMaxViewportDims());
+
+ return engine;
+}
+
+EGLConfig GLESRenderEngine::chooseEglConfig(EGLDisplay display, int format, bool logConfig) {
+ status_t err;
+ EGLConfig config;
+
+ // First try to get an ES3 config
+ err = selectEGLConfig(display, format, EGL_OPENGL_ES3_BIT, &config);
+ if (err != NO_ERROR) {
+ // If ES3 fails, try to get an ES2 config
+ err = selectEGLConfig(display, format, EGL_OPENGL_ES2_BIT, &config);
+ if (err != NO_ERROR) {
+ // If ES2 still doesn't work, probably because we're on the emulator.
+ // try a simplified query
+ ALOGW("no suitable EGLConfig found, trying a simpler query");
+ err = selectEGLConfig(display, format, 0, &config);
+ if (err != NO_ERROR) {
+ // this EGL is too lame for android
+ LOG_ALWAYS_FATAL("no suitable EGLConfig found, giving up");
+ }
+ }
+ }
+
+ if (logConfig) {
+ // print some debugging info
+ EGLint r, g, b, a;
+ eglGetConfigAttrib(display, config, EGL_RED_SIZE, &r);
+ eglGetConfigAttrib(display, config, EGL_GREEN_SIZE, &g);
+ eglGetConfigAttrib(display, config, EGL_BLUE_SIZE, &b);
+ eglGetConfigAttrib(display, config, EGL_ALPHA_SIZE, &a);
+ ALOGI("EGL information:");
+ ALOGI("vendor : %s", eglQueryString(display, EGL_VENDOR));
+ ALOGI("version : %s", eglQueryString(display, EGL_VERSION));
+ ALOGI("extensions: %s", eglQueryString(display, EGL_EXTENSIONS));
+ ALOGI("Client API: %s", eglQueryString(display, EGL_CLIENT_APIS) ?: "Not Supported");
+ ALOGI("EGLSurface: %d-%d-%d-%d, config=%p", r, g, b, a, config);
+ }
+
+ return config;
+}
+
+GLESRenderEngine::GLESRenderEngine(const RenderEngineCreationArgs& args, EGLDisplay display,
+ EGLConfig config, EGLContext ctxt, EGLSurface stub,
+ EGLContext protectedContext, EGLSurface protectedStub)
+ : renderengine::impl::RenderEngine(args),
+ mEGLDisplay(display),
+ mEGLConfig(config),
+ mEGLContext(ctxt),
+ mStubSurface(stub),
+ mProtectedEGLContext(protectedContext),
+ mProtectedStubSurface(protectedStub),
+ mVpWidth(0),
+ mVpHeight(0),
+ mFramebufferImageCacheSize(args.imageCacheSize),
+ mUseColorManagement(args.useColorManagement) {
+ glGetIntegerv(GL_MAX_TEXTURE_SIZE, &mMaxTextureSize);
+ glGetIntegerv(GL_MAX_VIEWPORT_DIMS, mMaxViewportDims);
+
+ glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
+ glPixelStorei(GL_PACK_ALIGNMENT, 4);
+
+ // Initialize protected EGL Context.
+ if (mProtectedEGLContext != EGL_NO_CONTEXT) {
+ EGLBoolean success = eglMakeCurrent(display, mProtectedStubSurface, mProtectedStubSurface,
+ mProtectedEGLContext);
+ ALOGE_IF(!success, "can't make protected context current");
+ glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
+ glPixelStorei(GL_PACK_ALIGNMENT, 4);
+ success = eglMakeCurrent(display, mStubSurface, mStubSurface, mEGLContext);
+ LOG_ALWAYS_FATAL_IF(!success, "can't make default context current");
+ }
+
+ // mColorBlindnessCorrection = M;
+
+ if (mUseColorManagement) {
+ const ColorSpace srgb(ColorSpace::sRGB());
+ const ColorSpace displayP3(ColorSpace::DisplayP3());
+ const ColorSpace bt2020(ColorSpace::BT2020());
+
+ // no chromatic adaptation needed since all color spaces use D65 for their white points.
+ mSrgbToXyz = mat4(srgb.getRGBtoXYZ());
+ mDisplayP3ToXyz = mat4(displayP3.getRGBtoXYZ());
+ mBt2020ToXyz = mat4(bt2020.getRGBtoXYZ());
+ mXyzToSrgb = mat4(srgb.getXYZtoRGB());
+ mXyzToDisplayP3 = mat4(displayP3.getXYZtoRGB());
+ mXyzToBt2020 = mat4(bt2020.getXYZtoRGB());
+
+ // Compute sRGB to Display P3 and BT2020 transform matrix.
+ // NOTE: For now, we are limiting output wide color space support to
+ // Display-P3 and BT2020 only.
+ mSrgbToDisplayP3 = mXyzToDisplayP3 * mSrgbToXyz;
+ mSrgbToBt2020 = mXyzToBt2020 * mSrgbToXyz;
+
+ // Compute Display P3 to sRGB and BT2020 transform matrix.
+ mDisplayP3ToSrgb = mXyzToSrgb * mDisplayP3ToXyz;
+ mDisplayP3ToBt2020 = mXyzToBt2020 * mDisplayP3ToXyz;
+
+ // Compute BT2020 to sRGB and Display P3 transform matrix
+ mBt2020ToSrgb = mXyzToSrgb * mBt2020ToXyz;
+ mBt2020ToDisplayP3 = mXyzToDisplayP3 * mBt2020ToXyz;
+ }
+
+ char value[PROPERTY_VALUE_MAX];
+ property_get("debug.egl.traceGpuCompletion", value, "0");
+ if (atoi(value)) {
+ mTraceGpuCompletion = true;
+ mFlushTracer = std::make_unique<FlushTracer>(this);
+ }
+
+ if (args.supportsBackgroundBlur) {
+ mBlurFilter = new BlurFilter(*this);
+ checkErrors("BlurFilter creation");
+ }
+
+ mImageManager = std::make_unique<ImageManager>(this);
+ mImageManager->initThread();
+ mDrawingBuffer = createFramebuffer();
+ sp<GraphicBuffer> buf =
+ new GraphicBuffer(1, 1, PIXEL_FORMAT_RGBA_8888, 1,
+ GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE, "placeholder");
+
+ const status_t err = buf->initCheck();
+ if (err != OK) {
+ ALOGE("Error allocating placeholder buffer: %d", err);
+ return;
+ }
+ mPlaceholderBuffer = buf.get();
+ EGLint attributes[] = {
+ EGL_NONE,
+ };
+ mPlaceholderImage = eglCreateImageKHR(mEGLDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
+ mPlaceholderBuffer, attributes);
+ ALOGE_IF(mPlaceholderImage == EGL_NO_IMAGE_KHR, "Failed to create placeholder image: %#x",
+ eglGetError());
+}
+
+GLESRenderEngine::~GLESRenderEngine() {
+ // Destroy the image manager first.
+ mImageManager = nullptr;
+ std::lock_guard<std::mutex> lock(mRenderingMutex);
+ unbindFrameBuffer(mDrawingBuffer.get());
+ mDrawingBuffer = nullptr;
+ while (!mFramebufferImageCache.empty()) {
+ EGLImageKHR expired = mFramebufferImageCache.front().second;
+ mFramebufferImageCache.pop_front();
+ eglDestroyImageKHR(mEGLDisplay, expired);
+ DEBUG_EGL_IMAGE_TRACKER_DESTROY();
+ }
+ eglDestroyImageKHR(mEGLDisplay, mPlaceholderImage);
+ mImageCache.clear();
+ eglMakeCurrent(mEGLDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
+ eglTerminate(mEGLDisplay);
+}
+
+std::unique_ptr<Framebuffer> GLESRenderEngine::createFramebuffer() {
+ return std::make_unique<GLFramebuffer>(*this);
+}
+
+std::unique_ptr<Image> GLESRenderEngine::createImage() {
+ return std::make_unique<GLImage>(*this);
+}
+
+Framebuffer* GLESRenderEngine::getFramebufferForDrawing() {
+ return mDrawingBuffer.get();
+}
+
+void GLESRenderEngine::primeCache() const {
+ ProgramCache::getInstance().primeCache(mInProtectedContext ? mProtectedEGLContext : mEGLContext,
+ mArgs.useColorManagement,
+ mArgs.precacheToneMapperShaderOnly);
+}
+
+base::unique_fd GLESRenderEngine::flush() {
+ ATRACE_CALL();
+ if (!GLExtensions::getInstance().hasNativeFenceSync()) {
+ return base::unique_fd();
+ }
+
+ EGLSyncKHR sync = eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
+ if (sync == EGL_NO_SYNC_KHR) {
+ ALOGW("failed to create EGL native fence sync: %#x", eglGetError());
+ return base::unique_fd();
+ }
+
+ // native fence fd will not be populated until flush() is done.
+ glFlush();
+
+ // get the fence fd
+ base::unique_fd fenceFd(eglDupNativeFenceFDANDROID(mEGLDisplay, sync));
+ eglDestroySyncKHR(mEGLDisplay, sync);
+ if (fenceFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
+ ALOGW("failed to dup EGL native fence sync: %#x", eglGetError());
+ }
+
+ // Only trace if we have a valid fence, as current usage falls back to
+ // calling finish() if the fence fd is invalid.
+ if (CC_UNLIKELY(mTraceGpuCompletion && mFlushTracer) && fenceFd.get() >= 0) {
+ mFlushTracer->queueSync(eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_FENCE_KHR, nullptr));
+ }
+
+ return fenceFd;
+}
+
+bool GLESRenderEngine::finish() {
+ ATRACE_CALL();
+ if (!GLExtensions::getInstance().hasFenceSync()) {
+ ALOGW("no synchronization support");
+ return false;
+ }
+
+ EGLSyncKHR sync = eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_FENCE_KHR, nullptr);
+ if (sync == EGL_NO_SYNC_KHR) {
+ ALOGW("failed to create EGL fence sync: %#x", eglGetError());
+ return false;
+ }
+
+ if (CC_UNLIKELY(mTraceGpuCompletion && mFlushTracer)) {
+ mFlushTracer->queueSync(eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_FENCE_KHR, nullptr));
+ }
+
+ return waitSync(sync, EGL_SYNC_FLUSH_COMMANDS_BIT_KHR);
+}
+
+bool GLESRenderEngine::waitSync(EGLSyncKHR sync, EGLint flags) {
+ EGLint result = eglClientWaitSyncKHR(mEGLDisplay, sync, flags, 2000000000 /*2 sec*/);
+ EGLint error = eglGetError();
+ eglDestroySyncKHR(mEGLDisplay, sync);
+ if (result != EGL_CONDITION_SATISFIED_KHR) {
+ if (result == EGL_TIMEOUT_EXPIRED_KHR) {
+ ALOGW("fence wait timed out");
+ } else {
+ ALOGW("error waiting on EGL fence: %#x", error);
+ }
+ return false;
+ }
+
+ return true;
+}
+
+bool GLESRenderEngine::waitFence(base::unique_fd fenceFd) {
+ if (!GLExtensions::getInstance().hasNativeFenceSync() ||
+ !GLExtensions::getInstance().hasWaitSync()) {
+ return false;
+ }
+
+ // release the fd and transfer the ownership to EGLSync
+ EGLint attribs[] = {EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fenceFd.release(), EGL_NONE};
+ EGLSyncKHR sync = eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attribs);
+ if (sync == EGL_NO_SYNC_KHR) {
+ ALOGE("failed to create EGL native fence sync: %#x", eglGetError());
+ return false;
+ }
+
+ // XXX: The spec draft is inconsistent as to whether this should return an
+ // EGLint or void. Ignore the return value for now, as it's not strictly
+ // needed.
+ eglWaitSyncKHR(mEGLDisplay, sync, 0);
+ EGLint error = eglGetError();
+ eglDestroySyncKHR(mEGLDisplay, sync);
+ if (error != EGL_SUCCESS) {
+ ALOGE("failed to wait for EGL native fence sync: %#x", error);
+ return false;
+ }
+
+ return true;
+}
+
+void GLESRenderEngine::clearWithColor(float red, float green, float blue, float alpha) {
+ ATRACE_CALL();
+ glDisable(GL_BLEND);
+ glClearColor(red, green, blue, alpha);
+ glClear(GL_COLOR_BUFFER_BIT);
+}
+
+void GLESRenderEngine::fillRegionWithColor(const Region& region, float red, float green, float blue,
+ float alpha) {
+ size_t c;
+ Rect const* r = region.getArray(&c);
+ Mesh mesh = Mesh::Builder()
+ .setPrimitive(Mesh::TRIANGLES)
+ .setVertices(c * 6 /* count */, 2 /* size */)
+ .build();
+ Mesh::VertexArray<vec2> position(mesh.getPositionArray<vec2>());
+ for (size_t i = 0; i < c; i++, r++) {
+ position[i * 6 + 0].x = r->left;
+ position[i * 6 + 0].y = r->top;
+ position[i * 6 + 1].x = r->left;
+ position[i * 6 + 1].y = r->bottom;
+ position[i * 6 + 2].x = r->right;
+ position[i * 6 + 2].y = r->bottom;
+ position[i * 6 + 3].x = r->left;
+ position[i * 6 + 3].y = r->top;
+ position[i * 6 + 4].x = r->right;
+ position[i * 6 + 4].y = r->bottom;
+ position[i * 6 + 5].x = r->right;
+ position[i * 6 + 5].y = r->top;
+ }
+ setupFillWithColor(red, green, blue, alpha);
+ drawMesh(mesh);
+}
+
+void GLESRenderEngine::setScissor(const Rect& region) {
+ glScissor(region.left, region.top, region.getWidth(), region.getHeight());
+ glEnable(GL_SCISSOR_TEST);
+}
+
+void GLESRenderEngine::disableScissor() {
+ glDisable(GL_SCISSOR_TEST);
+}
+
+void GLESRenderEngine::genTextures(size_t count, uint32_t* names) {
+ glGenTextures(count, names);
+}
+
+void GLESRenderEngine::deleteTextures(size_t count, uint32_t const* names) {
+ for (int i = 0; i < count; ++i) {
+ mTextureView.erase(names[i]);
+ }
+ glDeleteTextures(count, names);
+}
+
+void GLESRenderEngine::bindExternalTextureImage(uint32_t texName, const Image& image) {
+ ATRACE_CALL();
+ const GLImage& glImage = static_cast<const GLImage&>(image);
+ const GLenum target = GL_TEXTURE_EXTERNAL_OES;
+
+ glBindTexture(target, texName);
+ if (glImage.getEGLImage() != EGL_NO_IMAGE_KHR) {
+ glEGLImageTargetTexture2DOES(target, static_cast<GLeglImageOES>(glImage.getEGLImage()));
+ }
+}
+
+status_t GLESRenderEngine::bindExternalTextureBuffer(uint32_t texName,
+ const sp<GraphicBuffer>& buffer,
+ const sp<Fence>& bufferFence) {
+ if (buffer == nullptr) {
+ return BAD_VALUE;
+ }
+
+ ATRACE_CALL();
+
+ bool found = false;
+ {
+ std::lock_guard<std::mutex> lock(mRenderingMutex);
+ auto cachedImage = mImageCache.find(buffer->getId());
+ found = (cachedImage != mImageCache.end());
+ }
+
+ // If we couldn't find the image in the cache at this time, then either
+ // SurfaceFlinger messed up registering the buffer ahead of time or we got
+ // backed up creating other EGLImages.
+ if (!found) {
+ status_t cacheResult = mImageManager->cache(buffer);
+ if (cacheResult != NO_ERROR) {
+ return cacheResult;
+ }
+ }
+
+ // Whether or not we needed to cache, re-check mImageCache to make sure that
+ // there's an EGLImage. The current threading model guarantees that we don't
+ // destroy a cached image until it's really not needed anymore (i.e. this
+ // function should not be called), so the only possibility is that something
+ // terrible went wrong and we should just bind something and move on.
+ {
+ std::lock_guard<std::mutex> lock(mRenderingMutex);
+ auto cachedImage = mImageCache.find(buffer->getId());
+
+ if (cachedImage == mImageCache.end()) {
+ // We failed creating the image if we got here, so bail out.
+ ALOGE("Failed to create an EGLImage when rendering");
+ bindExternalTextureImage(texName, *createImage());
+ return NO_INIT;
+ }
+
+ bindExternalTextureImage(texName, *cachedImage->second);
+ mTextureView.insert_or_assign(texName, buffer->getId());
+ }
+
+ // Wait for the new buffer to be ready.
+ if (bufferFence != nullptr && bufferFence->isValid()) {
+ if (GLExtensions::getInstance().hasWaitSync()) {
+ base::unique_fd fenceFd(bufferFence->dup());
+ if (fenceFd == -1) {
+ ALOGE("error dup'ing fence fd: %d", errno);
+ return -errno;
+ }
+ if (!waitFence(std::move(fenceFd))) {
+ ALOGE("failed to wait on fence fd");
+ return UNKNOWN_ERROR;
+ }
+ } else {
+ status_t err = bufferFence->waitForever("RenderEngine::bindExternalTextureBuffer");
+ if (err != NO_ERROR) {
+ ALOGE("error waiting for fence: %d", err);
+ return err;
+ }
+ }
+ }
+
+ return NO_ERROR;
+}
+
+void GLESRenderEngine::cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) {
+ mImageManager->cacheAsync(buffer, nullptr);
+}
+
+std::shared_ptr<ImageManager::Barrier> GLESRenderEngine::cacheExternalTextureBufferForTesting(
+ const sp<GraphicBuffer>& buffer) {
+ auto barrier = std::make_shared<ImageManager::Barrier>();
+ mImageManager->cacheAsync(buffer, barrier);
+ return barrier;
+}
+
+status_t GLESRenderEngine::cacheExternalTextureBufferInternal(const sp<GraphicBuffer>& buffer) {
+ if (buffer == nullptr) {
+ return BAD_VALUE;
+ }
+
+ {
+ std::lock_guard<std::mutex> lock(mRenderingMutex);
+ if (mImageCache.count(buffer->getId()) > 0) {
+ // If there's already an image then fail fast here.
+ return NO_ERROR;
+ }
+ }
+ ATRACE_CALL();
+
+ // Create the image without holding a lock so that we don't block anything.
+ std::unique_ptr<Image> newImage = createImage();
+
+ bool created = newImage->setNativeWindowBuffer(buffer->getNativeBuffer(),
+ buffer->getUsage() & GRALLOC_USAGE_PROTECTED);
+ if (!created) {
+ ALOGE("Failed to create image. size=%ux%u st=%u usage=%#" PRIx64 " fmt=%d",
+ buffer->getWidth(), buffer->getHeight(), buffer->getStride(), buffer->getUsage(),
+ buffer->getPixelFormat());
+ return NO_INIT;
+ }
+
+ {
+ std::lock_guard<std::mutex> lock(mRenderingMutex);
+ if (mImageCache.count(buffer->getId()) > 0) {
+ // In theory it's possible for another thread to recache the image,
+ // so bail out if another thread won.
+ return NO_ERROR;
+ }
+ mImageCache.insert(std::make_pair(buffer->getId(), std::move(newImage)));
+ }
+
+ return NO_ERROR;
+}
+
+void GLESRenderEngine::unbindExternalTextureBuffer(uint64_t bufferId) {
+ mImageManager->releaseAsync(bufferId, nullptr);
+}
+
+std::shared_ptr<ImageManager::Barrier> GLESRenderEngine::unbindExternalTextureBufferForTesting(
+ uint64_t bufferId) {
+ auto barrier = std::make_shared<ImageManager::Barrier>();
+ mImageManager->releaseAsync(bufferId, barrier);
+ return barrier;
+}
+
+void GLESRenderEngine::unbindExternalTextureBufferInternal(uint64_t bufferId) {
+ std::unique_ptr<Image> image;
+ {
+ std::lock_guard<std::mutex> lock(mRenderingMutex);
+ const auto& cachedImage = mImageCache.find(bufferId);
+
+ if (cachedImage != mImageCache.end()) {
+ ALOGV("Destroying image for buffer: %" PRIu64, bufferId);
+ // Move the buffer out of cache first, so that we can destroy
+ // without holding the cache's lock.
+ image = std::move(cachedImage->second);
+ mImageCache.erase(bufferId);
+ return;
+ }
+ }
+ ALOGV("Failed to find image for buffer: %" PRIu64, bufferId);
+}
+
+FloatRect GLESRenderEngine::setupLayerCropping(const LayerSettings& layer, Mesh& mesh) {
+ // Translate win by the rounded corners rect coordinates, to have all values in
+ // layer coordinate space.
+ FloatRect cropWin = layer.geometry.boundaries;
+ const FloatRect& roundedCornersCrop = layer.geometry.roundedCornersCrop;
+ cropWin.left -= roundedCornersCrop.left;
+ cropWin.right -= roundedCornersCrop.left;
+ cropWin.top -= roundedCornersCrop.top;
+ cropWin.bottom -= roundedCornersCrop.top;
+ Mesh::VertexArray<vec2> cropCoords(mesh.getCropCoordArray<vec2>());
+ cropCoords[0] = vec2(cropWin.left, cropWin.top);
+ cropCoords[1] = vec2(cropWin.left, cropWin.top + cropWin.getHeight());
+ cropCoords[2] = vec2(cropWin.right, cropWin.top + cropWin.getHeight());
+ cropCoords[3] = vec2(cropWin.right, cropWin.top);
+
+ setupCornerRadiusCropSize(roundedCornersCrop.getWidth(), roundedCornersCrop.getHeight());
+ return cropWin;
+}
+
+void GLESRenderEngine::handleRoundedCorners(const DisplaySettings& display,
+ const LayerSettings& layer, const Mesh& mesh) {
+ // We separate the layer into 3 parts essentially, such that we only turn on blending for the
+ // top rectangle and the bottom rectangle, and turn off blending for the middle rectangle.
+ FloatRect bounds = layer.geometry.roundedCornersCrop;
+
+ // Explicitly compute the transform from the clip rectangle to the physical
+ // display. Normally, this is done in glViewport but we explicitly compute
+ // it here so that we can get the scissor bounds correct.
+ const Rect& source = display.clip;
+ const Rect& destination = display.physicalDisplay;
+ // Here we compute the following transform:
+ // 1. Translate the top left corner of the source clip to (0, 0)
+ // 2. Rotate the clip rectangle about the origin in accordance with the
+ // orientation flag
+ // 3. Translate the top left corner back to the origin.
+ // 4. Scale the clip rectangle to the destination rectangle dimensions
+ // 5. Translate the top left corner to the destination rectangle's top left
+ // corner.
+ const mat4 translateSource = mat4::translate(vec4(-source.left, -source.top, 0, 1));
+ mat4 rotation;
+ int displacementX = 0;
+ int displacementY = 0;
+ float destinationWidth = static_cast<float>(destination.getWidth());
+ float destinationHeight = static_cast<float>(destination.getHeight());
+ float sourceWidth = static_cast<float>(source.getWidth());
+ float sourceHeight = static_cast<float>(source.getHeight());
+ const float rot90InRadians = 2.0f * static_cast<float>(M_PI) / 4.0f;
+ switch (display.orientation) {
+ case ui::Transform::ROT_90:
+ rotation = mat4::rotate(rot90InRadians, vec3(0, 0, 1));
+ displacementX = source.getHeight();
+ std::swap(sourceHeight, sourceWidth);
+ break;
+ case ui::Transform::ROT_180:
+ rotation = mat4::rotate(rot90InRadians * 2.0f, vec3(0, 0, 1));
+ displacementY = source.getHeight();
+ displacementX = source.getWidth();
+ break;
+ case ui::Transform::ROT_270:
+ rotation = mat4::rotate(rot90InRadians * 3.0f, vec3(0, 0, 1));
+ displacementY = source.getWidth();
+ std::swap(sourceHeight, sourceWidth);
+ break;
+ default:
+ break;
+ }
+
+ const mat4 intermediateTranslation = mat4::translate(vec4(displacementX, displacementY, 0, 1));
+ const mat4 scale = mat4::scale(
+ vec4(destinationWidth / sourceWidth, destinationHeight / sourceHeight, 1, 1));
+ const mat4 translateDestination =
+ mat4::translate(vec4(destination.left, destination.top, 0, 1));
+ const mat4 globalTransform =
+ translateDestination * scale * intermediateTranslation * rotation * translateSource;
+
+ const mat4 transformMatrix = globalTransform * layer.geometry.positionTransform;
+ const vec4 leftTopCoordinate(bounds.left, bounds.top, 1.0, 1.0);
+ const vec4 rightBottomCoordinate(bounds.right, bounds.bottom, 1.0, 1.0);
+ const vec4 leftTopCoordinateInBuffer = transformMatrix * leftTopCoordinate;
+ const vec4 rightBottomCoordinateInBuffer = transformMatrix * rightBottomCoordinate;
+ bounds = FloatRect(std::min(leftTopCoordinateInBuffer[0], rightBottomCoordinateInBuffer[0]),
+ std::min(leftTopCoordinateInBuffer[1], rightBottomCoordinateInBuffer[1]),
+ std::max(leftTopCoordinateInBuffer[0], rightBottomCoordinateInBuffer[0]),
+ std::max(leftTopCoordinateInBuffer[1], rightBottomCoordinateInBuffer[1]));
+
+ // Finally, we cut the layer into 3 parts, with top and bottom parts having rounded corners
+ // and the middle part without rounded corners.
+ const int32_t radius = ceil(layer.geometry.roundedCornersRadius);
+ const Rect topRect(bounds.left, bounds.top, bounds.right, bounds.top + radius);
+ setScissor(topRect);
+ drawMesh(mesh);
+ const Rect bottomRect(bounds.left, bounds.bottom - radius, bounds.right, bounds.bottom);
+ setScissor(bottomRect);
+ drawMesh(mesh);
+
+ // The middle part of the layer can turn off blending.
+ if (topRect.bottom < bottomRect.top) {
+ const Rect middleRect(bounds.left, bounds.top + radius, bounds.right,
+ bounds.bottom - radius);
+ setScissor(middleRect);
+ mState.cornerRadius = 0.0;
+ disableBlending();
+ drawMesh(mesh);
+ }
+ disableScissor();
+}
+
+status_t GLESRenderEngine::bindFrameBuffer(Framebuffer* framebuffer) {
+ ATRACE_CALL();
+ GLFramebuffer* glFramebuffer = static_cast<GLFramebuffer*>(framebuffer);
+ EGLImageKHR eglImage = glFramebuffer->getEGLImage();
+ uint32_t textureName = glFramebuffer->getTextureName();
+ uint32_t framebufferName = glFramebuffer->getFramebufferName();
+
+ // Bind the texture and turn our EGLImage into a texture
+ glBindTexture(GL_TEXTURE_2D, textureName);
+ glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, (GLeglImageOES)eglImage);
+
+ // Bind the Framebuffer to render into
+ glBindFramebuffer(GL_FRAMEBUFFER, framebufferName);
+ glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureName, 0);
+
+ uint32_t glStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
+ ALOGE_IF(glStatus != GL_FRAMEBUFFER_COMPLETE_OES, "glCheckFramebufferStatusOES error %d",
+ glStatus);
+
+ return glStatus == GL_FRAMEBUFFER_COMPLETE_OES ? NO_ERROR : BAD_VALUE;
+}
+
+void GLESRenderEngine::unbindFrameBuffer(Framebuffer* /*framebuffer*/) {
+ ATRACE_CALL();
+
+ // back to main framebuffer
+ glBindFramebuffer(GL_FRAMEBUFFER, 0);
+}
+
+bool GLESRenderEngine::cleanupPostRender(CleanupMode mode) {
+ ATRACE_CALL();
+
+ if (mPriorResourcesCleaned ||
+ (mLastDrawFence != nullptr && mLastDrawFence->getStatus() != Fence::Status::Signaled)) {
+ // If we don't have a prior frame needing cleanup, then don't do anything.
+ return false;
+ }
+
+ // This is a bit of a band-aid fix for FrameCaptureProcessor, as we should
+ // not need to keep memory around if we don't need to do so.
+ if (mode == CleanupMode::CLEAN_ALL) {
+ // TODO: SurfaceFlinger memory utilization may benefit from resetting
+ // texture bindings as well. Assess if it does and there's no performance regression
+ // when rebinding the same image data to the same texture, and if so then its mode
+ // behavior can be tweaked.
+ if (mPlaceholderImage != EGL_NO_IMAGE_KHR) {
+ for (auto [textureName, bufferId] : mTextureView) {
+ if (bufferId && mPlaceholderImage != EGL_NO_IMAGE_KHR) {
+ glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureName);
+ glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES,
+ static_cast<GLeglImageOES>(mPlaceholderImage));
+ mTextureView[textureName] = std::nullopt;
+ checkErrors();
+ }
+ }
+ }
+ {
+ std::lock_guard<std::mutex> lock(mRenderingMutex);
+ mImageCache.clear();
+ }
+ }
+
+ // Bind the texture to placeholder so that backing image data can be freed.
+ GLFramebuffer* glFramebuffer = static_cast<GLFramebuffer*>(getFramebufferForDrawing());
+ glFramebuffer->allocateBuffers(1, 1, mPlaceholderDrawBuffer);
+ // Release the cached fence here, so that we don't churn reallocations when
+ // we could no-op repeated calls of this method instead.
+ mLastDrawFence = nullptr;
+ mPriorResourcesCleaned = true;
+ return true;
+}
+
+void GLESRenderEngine::checkErrors() const {
+ checkErrors(nullptr);
+}
+
+void GLESRenderEngine::checkErrors(const char* tag) const {
+ do {
+ // there could be more than one error flag
+ GLenum error = glGetError();
+ if (error == GL_NO_ERROR) break;
+ if (tag == nullptr) {
+ ALOGE("GL error 0x%04x", int(error));
+ } else {
+ ALOGE("GL error: %s -> 0x%04x", tag, int(error));
+ }
+ } while (true);
+}
+
+bool GLESRenderEngine::supportsProtectedContent() const {
+ return mProtectedEGLContext != EGL_NO_CONTEXT;
+}
+
+bool GLESRenderEngine::useProtectedContext(bool useProtectedContext) {
+ if (useProtectedContext == mInProtectedContext) {
+ return true;
+ }
+ if (useProtectedContext && mProtectedEGLContext == EGL_NO_CONTEXT) {
+ return false;
+ }
+ const EGLSurface surface = useProtectedContext ? mProtectedStubSurface : mStubSurface;
+ const EGLContext context = useProtectedContext ? mProtectedEGLContext : mEGLContext;
+ const bool success = eglMakeCurrent(mEGLDisplay, surface, surface, context) == EGL_TRUE;
+ if (success) {
+ mInProtectedContext = useProtectedContext;
+ }
+ return success;
+}
+EGLImageKHR GLESRenderEngine::createFramebufferImageIfNeeded(ANativeWindowBuffer* nativeBuffer,
+ bool isProtected,
+ bool useFramebufferCache) {
+ sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(nativeBuffer);
+ if (useFramebufferCache) {
+ std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+ for (const auto& image : mFramebufferImageCache) {
+ if (image.first == graphicBuffer->getId()) {
+ return image.second;
+ }
+ }
+ }
+ EGLint attributes[] = {
+ isProtected ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE,
+ isProtected ? EGL_TRUE : EGL_NONE,
+ EGL_NONE,
+ };
+ EGLImageKHR image = eglCreateImageKHR(mEGLDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
+ nativeBuffer, attributes);
+ if (useFramebufferCache) {
+ if (image != EGL_NO_IMAGE_KHR) {
+ std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+ if (mFramebufferImageCache.size() >= mFramebufferImageCacheSize) {
+ EGLImageKHR expired = mFramebufferImageCache.front().second;
+ mFramebufferImageCache.pop_front();
+ eglDestroyImageKHR(mEGLDisplay, expired);
+ DEBUG_EGL_IMAGE_TRACKER_DESTROY();
+ }
+ mFramebufferImageCache.push_back({graphicBuffer->getId(), image});
+ }
+ }
+
+ if (image != EGL_NO_IMAGE_KHR) {
+ DEBUG_EGL_IMAGE_TRACKER_CREATE();
+ }
+ return image;
+}
+
+status_t GLESRenderEngine::drawLayers(const DisplaySettings& display,
+ const std::vector<const LayerSettings*>& layers,
+ const sp<GraphicBuffer>& buffer,
+ const bool useFramebufferCache, base::unique_fd&& bufferFence,
+ base::unique_fd* drawFence) {
+ ATRACE_CALL();
+ if (layers.empty()) {
+ ALOGV("Drawing empty layer stack");
+ return NO_ERROR;
+ }
+
+ if (bufferFence.get() >= 0) {
+ // Duplicate the fence for passing to waitFence.
+ base::unique_fd bufferFenceDup(dup(bufferFence.get()));
+ if (bufferFenceDup < 0 || !waitFence(std::move(bufferFenceDup))) {
+ ATRACE_NAME("Waiting before draw");
+ sync_wait(bufferFence.get(), -1);
+ }
+ }
+
+ if (buffer == nullptr) {
+ ALOGE("No output buffer provided. Aborting GPU composition.");
+ return BAD_VALUE;
+ }
+
+ std::unique_ptr<BindNativeBufferAsFramebuffer> fbo;
+ // Gathering layers that requested blur, we'll need them to decide when to render to an
+ // offscreen buffer, and when to render to the native buffer.
+ std::deque<const LayerSettings*> blurLayers;
+ if (CC_LIKELY(mBlurFilter != nullptr)) {
+ for (auto layer : layers) {
+ if (layer->backgroundBlurRadius > 0) {
+ blurLayers.push_back(layer);
+ }
+ }
+ }
+ const auto blurLayersSize = blurLayers.size();
+
+ if (blurLayersSize == 0) {
+ fbo = std::make_unique<BindNativeBufferAsFramebuffer>(*this,
+ buffer.get()->getNativeBuffer(),
+ useFramebufferCache);
+ if (fbo->getStatus() != NO_ERROR) {
+ ALOGE("Failed to bind framebuffer! Aborting GPU composition for buffer (%p).",
+ buffer->handle);
+ checkErrors();
+ return fbo->getStatus();
+ }
+ setViewportAndProjection(display.physicalDisplay, display.clip);
+ } else {
+ setViewportAndProjection(display.physicalDisplay, display.clip);
+ auto status =
+ mBlurFilter->setAsDrawTarget(display, blurLayers.front()->backgroundBlurRadius);
+ if (status != NO_ERROR) {
+ ALOGE("Failed to prepare blur filter! Aborting GPU composition for buffer (%p).",
+ buffer->handle);
+ checkErrors();
+ return status;
+ }
+ }
+
+ // clear the entire buffer, sometimes when we reuse buffers we'd persist
+ // ghost images otherwise.
+ // we also require a full transparent framebuffer for overlays. This is
+ // probably not quite efficient on all GPUs, since we could filter out
+ // opaque layers.
+ clearWithColor(0.0, 0.0, 0.0, 0.0);
+
+ setOutputDataSpace(display.outputDataspace);
+ setDisplayMaxLuminance(display.maxLuminance);
+
+ const mat4 projectionMatrix =
+ ui::Transform(display.orientation).asMatrix4() * mState.projectionMatrix;
+ if (!display.clearRegion.isEmpty()) {
+ glDisable(GL_BLEND);
+ fillRegionWithColor(display.clearRegion, 0.0, 0.0, 0.0, 1.0);
+ }
+
+ Mesh mesh = Mesh::Builder()
+ .setPrimitive(Mesh::TRIANGLE_FAN)
+ .setVertices(4 /* count */, 2 /* size */)
+ .setTexCoords(2 /* size */)
+ .setCropCoords(2 /* size */)
+ .build();
+ for (auto const layer : layers) {
+ if (blurLayers.size() > 0 && blurLayers.front() == layer) {
+ blurLayers.pop_front();
+
+ auto status = mBlurFilter->prepare();
+ if (status != NO_ERROR) {
+ ALOGE("Failed to render blur effect! Aborting GPU composition for buffer (%p).",
+ buffer->handle);
+ checkErrors("Can't render first blur pass");
+ return status;
+ }
+
+ if (blurLayers.size() == 0) {
+ // Done blurring, time to bind the native FBO and render our blur onto it.
+ fbo = std::make_unique<BindNativeBufferAsFramebuffer>(*this,
+ buffer.get()
+ ->getNativeBuffer(),
+ useFramebufferCache);
+ status = fbo->getStatus();
+ setViewportAndProjection(display.physicalDisplay, display.clip);
+ } else {
+ // There's still something else to blur, so let's keep rendering to our FBO
+ // instead of to the display.
+ status = mBlurFilter->setAsDrawTarget(display,
+ blurLayers.front()->backgroundBlurRadius);
+ }
+ if (status != NO_ERROR) {
+ ALOGE("Failed to bind framebuffer! Aborting GPU composition for buffer (%p).",
+ buffer->handle);
+ checkErrors("Can't bind native framebuffer");
+ return status;
+ }
+
+ status = mBlurFilter->render(blurLayersSize > 1);
+ if (status != NO_ERROR) {
+ ALOGE("Failed to render blur effect! Aborting GPU composition for buffer (%p).",
+ buffer->handle);
+ checkErrors("Can't render blur filter");
+ return status;
+ }
+ }
+
+ mState.maxMasteringLuminance = layer->source.buffer.maxMasteringLuminance;
+ mState.maxContentLuminance = layer->source.buffer.maxContentLuminance;
+ mState.projectionMatrix = projectionMatrix * layer->geometry.positionTransform;
+
+ const FloatRect bounds = layer->geometry.boundaries;
+ Mesh::VertexArray<vec2> position(mesh.getPositionArray<vec2>());
+ position[0] = vec2(bounds.left, bounds.top);
+ position[1] = vec2(bounds.left, bounds.bottom);
+ position[2] = vec2(bounds.right, bounds.bottom);
+ position[3] = vec2(bounds.right, bounds.top);
+
+ setupLayerCropping(*layer, mesh);
+ setColorTransform(display.colorTransform * layer->colorTransform);
+
+ bool usePremultipliedAlpha = true;
+ bool disableTexture = true;
+ bool isOpaque = false;
+ if (layer->source.buffer.buffer != nullptr) {
+ disableTexture = false;
+ isOpaque = layer->source.buffer.isOpaque;
+
+ sp<GraphicBuffer> gBuf = layer->source.buffer.buffer;
+ bindExternalTextureBuffer(layer->source.buffer.textureName, gBuf,
+ layer->source.buffer.fence);
+
+ usePremultipliedAlpha = layer->source.buffer.usePremultipliedAlpha;
+ Texture texture(Texture::TEXTURE_EXTERNAL, layer->source.buffer.textureName);
+ mat4 texMatrix = layer->source.buffer.textureTransform;
+
+ texture.setMatrix(texMatrix.asArray());
+ texture.setFiltering(layer->source.buffer.useTextureFiltering);
+
+ texture.setDimensions(gBuf->getWidth(), gBuf->getHeight());
+ setSourceY410BT2020(layer->source.buffer.isY410BT2020);
+
+ renderengine::Mesh::VertexArray<vec2> texCoords(mesh.getTexCoordArray<vec2>());
+ texCoords[0] = vec2(0.0, 0.0);
+ texCoords[1] = vec2(0.0, 1.0);
+ texCoords[2] = vec2(1.0, 1.0);
+ texCoords[3] = vec2(1.0, 0.0);
+ setupLayerTexturing(texture);
+ }
+
+ const half3 solidColor = layer->source.solidColor;
+ const half4 color = half4(solidColor.r, solidColor.g, solidColor.b, layer->alpha);
+ // Buffer sources will have a black solid color ignored in the shader,
+ // so in that scenario the solid color passed here is arbitrary.
+ setupLayerBlending(usePremultipliedAlpha, isOpaque, disableTexture, color,
+ layer->geometry.roundedCornersRadius);
+ if (layer->disableBlending) {
+ glDisable(GL_BLEND);
+ }
+ setSourceDataSpace(layer->sourceDataspace);
+
+ if (layer->shadow.length > 0.0f) {
+ handleShadow(layer->geometry.boundaries, layer->geometry.roundedCornersRadius,
+ layer->shadow);
+ }
+ // We only want to do a special handling for rounded corners when having rounded corners
+ // is the only reason it needs to turn on blending, otherwise, we handle it like the
+ // usual way since it needs to turn on blending anyway.
+ else if (layer->geometry.roundedCornersRadius > 0.0 && color.a >= 1.0f && isOpaque) {
+ handleRoundedCorners(display, *layer, mesh);
+ } else {
+ drawMesh(mesh);
+ }
+
+ // Cleanup if there's a buffer source
+ if (layer->source.buffer.buffer != nullptr) {
+ disableBlending();
+ setSourceY410BT2020(false);
+ disableTexturing();
+ }
+ }
+
+ if (drawFence != nullptr) {
+ *drawFence = flush();
+ }
+ // If flush failed or we don't support native fences, we need to force the
+ // gl command stream to be executed.
+ if (drawFence == nullptr || drawFence->get() < 0) {
+ bool success = finish();
+ if (!success) {
+ ALOGE("Failed to flush RenderEngine commands");
+ checkErrors();
+ // Chances are, something illegal happened (either the caller passed
+ // us bad parameters, or we messed up our shader generation).
+ return INVALID_OPERATION;
+ }
+ mLastDrawFence = nullptr;
+ } else {
+ // The caller takes ownership of drawFence, so we need to duplicate the
+ // fd here.
+ mLastDrawFence = new Fence(dup(drawFence->get()));
+ }
+ mPriorResourcesCleaned = false;
+
+ checkErrors();
+ return NO_ERROR;
+}
+
+void GLESRenderEngine::setViewportAndProjection(Rect viewport, Rect clip) {
+ ATRACE_CALL();
+ mVpWidth = viewport.getWidth();
+ mVpHeight = viewport.getHeight();
+
+ // We pass the top left corner instead of the bottom left corner,
+ // because since we're rendering off-screen first.
+ glViewport(viewport.left, viewport.top, mVpWidth, mVpHeight);
+
+ mState.projectionMatrix = mat4::ortho(clip.left, clip.right, clip.top, clip.bottom, 0, 1);
+}
+
+void GLESRenderEngine::setupLayerBlending(bool premultipliedAlpha, bool opaque, bool disableTexture,
+ const half4& color, float cornerRadius) {
+ mState.isPremultipliedAlpha = premultipliedAlpha;
+ mState.isOpaque = opaque;
+ mState.color = color;
+ mState.cornerRadius = cornerRadius;
+
+ if (disableTexture) {
+ mState.textureEnabled = false;
+ }
+
+ if (color.a < 1.0f || !opaque || cornerRadius > 0.0f) {
+ glEnable(GL_BLEND);
+ glBlendFunc(premultipliedAlpha ? GL_ONE : GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
+ } else {
+ glDisable(GL_BLEND);
+ }
+}
+
+void GLESRenderEngine::setSourceY410BT2020(bool enable) {
+ mState.isY410BT2020 = enable;
+}
+
+void GLESRenderEngine::setSourceDataSpace(Dataspace source) {
+ mDataSpace = source;
+}
+
+void GLESRenderEngine::setOutputDataSpace(Dataspace dataspace) {
+ mOutputDataSpace = dataspace;
+}
+
+void GLESRenderEngine::setDisplayMaxLuminance(const float maxLuminance) {
+ mState.displayMaxLuminance = maxLuminance;
+}
+
+void GLESRenderEngine::setupLayerTexturing(const Texture& texture) {
+ GLuint target = texture.getTextureTarget();
+ glBindTexture(target, texture.getTextureName());
+ GLenum filter = GL_NEAREST;
+ if (texture.getFiltering()) {
+ filter = GL_LINEAR;
+ }
+ glTexParameteri(target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ glTexParameteri(target, GL_TEXTURE_MAG_FILTER, filter);
+ glTexParameteri(target, GL_TEXTURE_MIN_FILTER, filter);
+
+ mState.texture = texture;
+ mState.textureEnabled = true;
+}
+
+void GLESRenderEngine::setColorTransform(const mat4& colorTransform) {
+ mState.colorMatrix = colorTransform;
+}
+
+void GLESRenderEngine::disableTexturing() {
+ mState.textureEnabled = false;
+}
+
+void GLESRenderEngine::disableBlending() {
+ glDisable(GL_BLEND);
+}
+
+void GLESRenderEngine::setupFillWithColor(float r, float g, float b, float a) {
+ mState.isPremultipliedAlpha = true;
+ mState.isOpaque = false;
+ mState.color = half4(r, g, b, a);
+ mState.textureEnabled = false;
+ glDisable(GL_BLEND);
+}
+
+void GLESRenderEngine::setupCornerRadiusCropSize(float width, float height) {
+ mState.cropSize = half2(width, height);
+}
+
+void GLESRenderEngine::drawMesh(const Mesh& mesh) {
+ ATRACE_CALL();
+ if (mesh.getTexCoordsSize()) {
+ glEnableVertexAttribArray(Program::texCoords);
+ glVertexAttribPointer(Program::texCoords, mesh.getTexCoordsSize(), GL_FLOAT, GL_FALSE,
+ mesh.getByteStride(), mesh.getTexCoords());
+ }
+
+ glVertexAttribPointer(Program::position, mesh.getVertexSize(), GL_FLOAT, GL_FALSE,
+ mesh.getByteStride(), mesh.getPositions());
+
+ if (mState.cornerRadius > 0.0f) {
+ glEnableVertexAttribArray(Program::cropCoords);
+ glVertexAttribPointer(Program::cropCoords, mesh.getVertexSize(), GL_FLOAT, GL_FALSE,
+ mesh.getByteStride(), mesh.getCropCoords());
+ }
+
+ if (mState.drawShadows) {
+ glEnableVertexAttribArray(Program::shadowColor);
+ glVertexAttribPointer(Program::shadowColor, mesh.getShadowColorSize(), GL_FLOAT, GL_FALSE,
+ mesh.getByteStride(), mesh.getShadowColor());
+
+ glEnableVertexAttribArray(Program::shadowParams);
+ glVertexAttribPointer(Program::shadowParams, mesh.getShadowParamsSize(), GL_FLOAT, GL_FALSE,
+ mesh.getByteStride(), mesh.getShadowParams());
+ }
+
+ Description managedState = mState;
+ // By default, DISPLAY_P3 is the only supported wide color output. However,
+ // when HDR content is present, hardware composer may be able to handle
+ // BT2020 data space, in that case, the output data space is set to be
+ // BT2020_HLG or BT2020_PQ respectively. In GPU fall back we need
+ // to respect this and convert non-HDR content to HDR format.
+ if (mUseColorManagement) {
+ Dataspace inputStandard = static_cast<Dataspace>(mDataSpace & Dataspace::STANDARD_MASK);
+ Dataspace inputTransfer = static_cast<Dataspace>(mDataSpace & Dataspace::TRANSFER_MASK);
+ Dataspace outputStandard =
+ static_cast<Dataspace>(mOutputDataSpace & Dataspace::STANDARD_MASK);
+ Dataspace outputTransfer =
+ static_cast<Dataspace>(mOutputDataSpace & Dataspace::TRANSFER_MASK);
+ bool needsXYZConversion = needsXYZTransformMatrix();
+
+ // NOTE: if the input standard of the input dataspace is not STANDARD_DCI_P3 or
+ // STANDARD_BT2020, it will be treated as STANDARD_BT709
+ if (inputStandard != Dataspace::STANDARD_DCI_P3 &&
+ inputStandard != Dataspace::STANDARD_BT2020) {
+ inputStandard = Dataspace::STANDARD_BT709;
+ }
+
+ if (needsXYZConversion) {
+ // The supported input color spaces are standard RGB, Display P3 and BT2020.
+ switch (inputStandard) {
+ case Dataspace::STANDARD_DCI_P3:
+ managedState.inputTransformMatrix = mDisplayP3ToXyz;
+ break;
+ case Dataspace::STANDARD_BT2020:
+ managedState.inputTransformMatrix = mBt2020ToXyz;
+ break;
+ default:
+ managedState.inputTransformMatrix = mSrgbToXyz;
+ break;
+ }
+
+ // The supported output color spaces are BT2020, Display P3 and standard RGB.
+ switch (outputStandard) {
+ case Dataspace::STANDARD_BT2020:
+ managedState.outputTransformMatrix = mXyzToBt2020;
+ break;
+ case Dataspace::STANDARD_DCI_P3:
+ managedState.outputTransformMatrix = mXyzToDisplayP3;
+ break;
+ default:
+ managedState.outputTransformMatrix = mXyzToSrgb;
+ break;
+ }
+ } else if (inputStandard != outputStandard) {
+ // At this point, the input data space and output data space could be both
+ // HDR data spaces, but they match each other, we do nothing in this case.
+ // In addition to the case above, the input data space could be
+ // - scRGB linear
+ // - scRGB non-linear
+ // - sRGB
+ // - Display P3
+ // - BT2020
+ // The output data spaces could be
+ // - sRGB
+ // - Display P3
+ // - BT2020
+ switch (outputStandard) {
+ case Dataspace::STANDARD_BT2020:
+ if (inputStandard == Dataspace::STANDARD_BT709) {
+ managedState.outputTransformMatrix = mSrgbToBt2020;
+ } else if (inputStandard == Dataspace::STANDARD_DCI_P3) {
+ managedState.outputTransformMatrix = mDisplayP3ToBt2020;
+ }
+ break;
+ case Dataspace::STANDARD_DCI_P3:
+ if (inputStandard == Dataspace::STANDARD_BT709) {
+ managedState.outputTransformMatrix = mSrgbToDisplayP3;
+ } else if (inputStandard == Dataspace::STANDARD_BT2020) {
+ managedState.outputTransformMatrix = mBt2020ToDisplayP3;
+ }
+ break;
+ default:
+ if (inputStandard == Dataspace::STANDARD_DCI_P3) {
+ managedState.outputTransformMatrix = mDisplayP3ToSrgb;
+ } else if (inputStandard == Dataspace::STANDARD_BT2020) {
+ managedState.outputTransformMatrix = mBt2020ToSrgb;
+ }
+ break;
+ }
+ }
+
+ // we need to convert the RGB value to linear space and convert it back when:
+ // - there is a color matrix that is not an identity matrix, or
+ // - there is an output transform matrix that is not an identity matrix, or
+ // - the input transfer function doesn't match the output transfer function.
+ if (managedState.hasColorMatrix() || managedState.hasOutputTransformMatrix() ||
+ inputTransfer != outputTransfer) {
+ managedState.inputTransferFunction =
+ Description::dataSpaceToTransferFunction(inputTransfer);
+ managedState.outputTransferFunction =
+ Description::dataSpaceToTransferFunction(outputTransfer);
+ }
+ }
+
+ ProgramCache::getInstance().useProgram(mInProtectedContext ? mProtectedEGLContext : mEGLContext,
+ managedState);
+
+ if (mState.drawShadows) {
+ glDrawElements(mesh.getPrimitive(), mesh.getIndexCount(), GL_UNSIGNED_SHORT,
+ mesh.getIndices());
+ } else {
+ glDrawArrays(mesh.getPrimitive(), 0, mesh.getVertexCount());
+ }
+
+ if (mUseColorManagement && outputDebugPPMs) {
+ static uint64_t managedColorFrameCount = 0;
+ std::ostringstream out;
+ out << "/data/texture_out" << managedColorFrameCount++;
+ writePPM(out.str().c_str(), mVpWidth, mVpHeight);
+ }
+
+ if (mesh.getTexCoordsSize()) {
+ glDisableVertexAttribArray(Program::texCoords);
+ }
+
+ if (mState.cornerRadius > 0.0f) {
+ glDisableVertexAttribArray(Program::cropCoords);
+ }
+
+ if (mState.drawShadows) {
+ glDisableVertexAttribArray(Program::shadowColor);
+ glDisableVertexAttribArray(Program::shadowParams);
+ }
+}
+
+size_t GLESRenderEngine::getMaxTextureSize() const {
+ return mMaxTextureSize;
+}
+
+size_t GLESRenderEngine::getMaxViewportDims() const {
+ return mMaxViewportDims[0] < mMaxViewportDims[1] ? mMaxViewportDims[0] : mMaxViewportDims[1];
+}
+
+void GLESRenderEngine::dump(std::string& result) {
+ const GLExtensions& extensions = GLExtensions::getInstance();
+ ProgramCache& cache = ProgramCache::getInstance();
+
+ StringAppendF(&result, "EGL implementation : %s\n", extensions.getEGLVersion());
+ StringAppendF(&result, "%s\n", extensions.getEGLExtensions());
+ StringAppendF(&result, "GLES: %s, %s, %s\n", extensions.getVendor(), extensions.getRenderer(),
+ extensions.getVersion());
+ StringAppendF(&result, "%s\n", extensions.getExtensions());
+ StringAppendF(&result, "RenderEngine supports protected context: %d\n",
+ supportsProtectedContent());
+ StringAppendF(&result, "RenderEngine is in protected context: %d\n", mInProtectedContext);
+ StringAppendF(&result, "RenderEngine program cache size for unprotected context: %zu\n",
+ cache.getSize(mEGLContext));
+ StringAppendF(&result, "RenderEngine program cache size for protected context: %zu\n",
+ cache.getSize(mProtectedEGLContext));
+ StringAppendF(&result, "RenderEngine last dataspace conversion: (%s) to (%s)\n",
+ dataspaceDetails(static_cast<android_dataspace>(mDataSpace)).c_str(),
+ dataspaceDetails(static_cast<android_dataspace>(mOutputDataSpace)).c_str());
+ {
+ std::lock_guard<std::mutex> lock(mRenderingMutex);
+ StringAppendF(&result, "RenderEngine image cache size: %zu\n", mImageCache.size());
+ StringAppendF(&result, "Dumping buffer ids...\n");
+ for (const auto& [id, unused] : mImageCache) {
+ StringAppendF(&result, "0x%" PRIx64 "\n", id);
+ }
+ }
+ {
+ std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+ StringAppendF(&result, "RenderEngine framebuffer image cache size: %zu\n",
+ mFramebufferImageCache.size());
+ StringAppendF(&result, "Dumping buffer ids...\n");
+ for (const auto& [id, unused] : mFramebufferImageCache) {
+ StringAppendF(&result, "0x%" PRIx64 "\n", id);
+ }
+ }
+}
+
+GLESRenderEngine::GlesVersion GLESRenderEngine::parseGlesVersion(const char* str) {
+ int major, minor;
+ if (sscanf(str, "OpenGL ES-CM %d.%d", &major, &minor) != 2) {
+ if (sscanf(str, "OpenGL ES %d.%d", &major, &minor) != 2) {
+ ALOGW("Unable to parse GL_VERSION string: \"%s\"", str);
+ return GLES_VERSION_1_0;
+ }
+ }
+
+ if (major == 1 && minor == 0) return GLES_VERSION_1_0;
+ if (major == 1 && minor >= 1) return GLES_VERSION_1_1;
+ if (major == 2 && minor >= 0) return GLES_VERSION_2_0;
+ if (major == 3 && minor >= 0) return GLES_VERSION_3_0;
+
+ ALOGW("Unrecognized OpenGL ES version: %d.%d", major, minor);
+ return GLES_VERSION_1_0;
+}
+
+EGLContext GLESRenderEngine::createEglContext(EGLDisplay display, EGLConfig config,
+ EGLContext shareContext, bool useContextPriority,
+ Protection protection) {
+ EGLint renderableType = 0;
+ if (config == EGL_NO_CONFIG) {
+ renderableType = EGL_OPENGL_ES3_BIT;
+ } else if (!eglGetConfigAttrib(display, config, EGL_RENDERABLE_TYPE, &renderableType)) {
+ LOG_ALWAYS_FATAL("can't query EGLConfig RENDERABLE_TYPE");
+ }
+ EGLint contextClientVersion = 0;
+ if (renderableType & EGL_OPENGL_ES3_BIT) {
+ contextClientVersion = 3;
+ } else if (renderableType & EGL_OPENGL_ES2_BIT) {
+ contextClientVersion = 2;
+ } else if (renderableType & EGL_OPENGL_ES_BIT) {
+ contextClientVersion = 1;
+ } else {
+ LOG_ALWAYS_FATAL("no supported EGL_RENDERABLE_TYPEs");
+ }
+
+ std::vector<EGLint> contextAttributes;
+ contextAttributes.reserve(7);
+ contextAttributes.push_back(EGL_CONTEXT_CLIENT_VERSION);
+ contextAttributes.push_back(contextClientVersion);
+ if (useContextPriority) {
+ contextAttributes.push_back(EGL_CONTEXT_PRIORITY_LEVEL_IMG);
+ contextAttributes.push_back(EGL_CONTEXT_PRIORITY_HIGH_IMG);
+ }
+ if (protection == Protection::PROTECTED) {
+ contextAttributes.push_back(EGL_PROTECTED_CONTENT_EXT);
+ contextAttributes.push_back(EGL_TRUE);
+ }
+ contextAttributes.push_back(EGL_NONE);
+
+ EGLContext context = eglCreateContext(display, config, shareContext, contextAttributes.data());
+
+ if (contextClientVersion == 3 && context == EGL_NO_CONTEXT) {
+ // eglGetConfigAttrib indicated we can create GLES 3 context, but we failed, thus
+ // EGL_NO_CONTEXT so that we can abort.
+ if (config != EGL_NO_CONFIG) {
+ return context;
+ }
+ // If |config| is EGL_NO_CONFIG, we speculatively try to create GLES 3 context, so we should
+ // try to fall back to GLES 2.
+ contextAttributes[1] = 2;
+ context = eglCreateContext(display, config, shareContext, contextAttributes.data());
+ }
+
+ return context;
+}
+
+EGLSurface GLESRenderEngine::createStubEglPbufferSurface(EGLDisplay display, EGLConfig config,
+ int hwcFormat, Protection protection) {
+ EGLConfig stubConfig = config;
+ if (stubConfig == EGL_NO_CONFIG) {
+ stubConfig = chooseEglConfig(display, hwcFormat, /*logConfig*/ true);
+ }
+ std::vector<EGLint> attributes;
+ attributes.reserve(7);
+ attributes.push_back(EGL_WIDTH);
+ attributes.push_back(1);
+ attributes.push_back(EGL_HEIGHT);
+ attributes.push_back(1);
+ if (protection == Protection::PROTECTED) {
+ attributes.push_back(EGL_PROTECTED_CONTENT_EXT);
+ attributes.push_back(EGL_TRUE);
+ }
+ attributes.push_back(EGL_NONE);
+
+ return eglCreatePbufferSurface(display, stubConfig, attributes.data());
+}
+
+bool GLESRenderEngine::isHdrDataSpace(const Dataspace dataSpace) const {
+ const Dataspace standard = static_cast<Dataspace>(dataSpace & Dataspace::STANDARD_MASK);
+ const Dataspace transfer = static_cast<Dataspace>(dataSpace & Dataspace::TRANSFER_MASK);
+ return standard == Dataspace::STANDARD_BT2020 &&
+ (transfer == Dataspace::TRANSFER_ST2084 || transfer == Dataspace::TRANSFER_HLG);
+}
+
+// For convenience, we want to convert the input color space to XYZ color space first,
+// and then convert from XYZ color space to output color space when
+// - SDR and HDR contents are mixed, either SDR content will be converted to HDR or
+// HDR content will be tone-mapped to SDR; Or,
+// - there are HDR PQ and HLG contents presented at the same time, where we want to convert
+// HLG content to PQ content.
+// In either case above, we need to operate the Y value in XYZ color space. Thus, when either
+// input data space or output data space is HDR data space, and the input transfer function
+// doesn't match the output transfer function, we would enable an intermediate transfrom to
+// XYZ color space.
+bool GLESRenderEngine::needsXYZTransformMatrix() const {
+ const bool isInputHdrDataSpace = isHdrDataSpace(mDataSpace);
+ const bool isOutputHdrDataSpace = isHdrDataSpace(mOutputDataSpace);
+ const Dataspace inputTransfer = static_cast<Dataspace>(mDataSpace & Dataspace::TRANSFER_MASK);
+ const Dataspace outputTransfer =
+ static_cast<Dataspace>(mOutputDataSpace & Dataspace::TRANSFER_MASK);
+
+ return (isInputHdrDataSpace || isOutputHdrDataSpace) && inputTransfer != outputTransfer;
+}
+
+bool GLESRenderEngine::isImageCachedForTesting(uint64_t bufferId) {
+ std::lock_guard<std::mutex> lock(mRenderingMutex);
+ const auto& cachedImage = mImageCache.find(bufferId);
+ return cachedImage != mImageCache.end();
+}
+
+bool GLESRenderEngine::isTextureNameKnownForTesting(uint32_t texName) {
+ const auto& entry = mTextureView.find(texName);
+ return entry != mTextureView.end();
+}
+
+std::optional<uint64_t> GLESRenderEngine::getBufferIdForTextureNameForTesting(uint32_t texName) {
+ const auto& entry = mTextureView.find(texName);
+ return entry != mTextureView.end() ? entry->second : std::nullopt;
+}
+
+bool GLESRenderEngine::isFramebufferImageCachedForTesting(uint64_t bufferId) {
+ std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+ return std::any_of(mFramebufferImageCache.cbegin(), mFramebufferImageCache.cend(),
+ [=](std::pair<uint64_t, EGLImageKHR> image) {
+ return image.first == bufferId;
+ });
+}
+
+// FlushTracer implementation
+GLESRenderEngine::FlushTracer::FlushTracer(GLESRenderEngine* engine) : mEngine(engine) {
+ mThread = std::thread(&GLESRenderEngine::FlushTracer::loop, this);
+}
+
+GLESRenderEngine::FlushTracer::~FlushTracer() {
+ {
+ std::lock_guard<std::mutex> lock(mMutex);
+ mRunning = false;
+ }
+ mCondition.notify_all();
+ if (mThread.joinable()) {
+ mThread.join();
+ }
+}
+
+void GLESRenderEngine::FlushTracer::queueSync(EGLSyncKHR sync) {
+ std::lock_guard<std::mutex> lock(mMutex);
+ char name[64];
+ const uint64_t frameNum = mFramesQueued++;
+ snprintf(name, sizeof(name), "Queueing sync for frame: %lu",
+ static_cast<unsigned long>(frameNum));
+ ATRACE_NAME(name);
+ mQueue.push({sync, frameNum});
+ ATRACE_INT("GPU Frames Outstanding", mQueue.size());
+ mCondition.notify_one();
+}
+
+void GLESRenderEngine::FlushTracer::loop() {
+ while (mRunning) {
+ QueueEntry entry;
+ {
+ std::lock_guard<std::mutex> lock(mMutex);
+
+ mCondition.wait(mMutex,
+ [&]() REQUIRES(mMutex) { return !mQueue.empty() || !mRunning; });
+
+ if (!mRunning) {
+ // if mRunning is false, then FlushTracer is being destroyed, so
+ // bail out now.
+ break;
+ }
+ entry = mQueue.front();
+ mQueue.pop();
+ }
+ {
+ char name[64];
+ snprintf(name, sizeof(name), "waiting for frame %lu",
+ static_cast<unsigned long>(entry.mFrameNum));
+ ATRACE_NAME(name);
+ mEngine->waitSync(entry.mSync, 0);
+ }
+ }
+}
+
+void GLESRenderEngine::handleShadow(const FloatRect& casterRect, float casterCornerRadius,
+ const ShadowSettings& settings) {
+ ATRACE_CALL();
+ const float casterZ = settings.length / 2.0f;
+ const GLShadowVertexGenerator shadows(casterRect, casterCornerRadius, casterZ,
+ settings.casterIsTranslucent, settings.ambientColor,
+ settings.spotColor, settings.lightPos,
+ settings.lightRadius);
+
+ // setup mesh for both shadows
+ Mesh mesh = Mesh::Builder()
+ .setPrimitive(Mesh::TRIANGLES)
+ .setVertices(shadows.getVertexCount(), 2 /* size */)
+ .setShadowAttrs()
+ .setIndices(shadows.getIndexCount())
+ .build();
+
+ Mesh::VertexArray<vec2> position = mesh.getPositionArray<vec2>();
+ Mesh::VertexArray<vec4> shadowColor = mesh.getShadowColorArray<vec4>();
+ Mesh::VertexArray<vec3> shadowParams = mesh.getShadowParamsArray<vec3>();
+ shadows.fillVertices(position, shadowColor, shadowParams);
+ shadows.fillIndices(mesh.getIndicesArray());
+
+ mState.cornerRadius = 0.0f;
+ mState.drawShadows = true;
+ setupLayerTexturing(mShadowTexture.getTexture());
+ drawMesh(mesh);
+ mState.drawShadows = false;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLESRenderEngine.h b/media/libstagefright/renderfright/gl/GLESRenderEngine.h
new file mode 100644
index 0000000..2c6eae2
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLESRenderEngine.h
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_GLESRENDERENGINE_H_
+#define SF_GLESRENDERENGINE_H_
+
+#include <condition_variable>
+#include <deque>
+#include <mutex>
+#include <queue>
+#include <thread>
+#include <unordered_map>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES2/gl2.h>
+#include <android-base/thread_annotations.h>
+#include <renderengine/RenderEngine.h>
+#include <renderengine/private/Description.h>
+#include <sys/types.h>
+#include "GLShadowTexture.h"
+#include "ImageManager.h"
+
+#define EGL_NO_CONFIG ((EGLConfig)0)
+
+namespace android {
+
+namespace renderengine {
+
+class Mesh;
+class Texture;
+
+namespace gl {
+
+class GLImage;
+class BlurFilter;
+
+class GLESRenderEngine : public impl::RenderEngine {
+public:
+ static std::unique_ptr<GLESRenderEngine> create(const RenderEngineCreationArgs& args);
+
+ GLESRenderEngine(const RenderEngineCreationArgs& args, EGLDisplay display, EGLConfig config,
+ EGLContext ctxt, EGLSurface stub, EGLContext protectedContext,
+ EGLSurface protectedStub);
+ ~GLESRenderEngine() override EXCLUDES(mRenderingMutex);
+
+ void primeCache() const override;
+ void genTextures(size_t count, uint32_t* names) override;
+ void deleteTextures(size_t count, uint32_t const* names) override;
+ void bindExternalTextureImage(uint32_t texName, const Image& image) override;
+ status_t bindExternalTextureBuffer(uint32_t texName, const sp<GraphicBuffer>& buffer,
+ const sp<Fence>& fence) EXCLUDES(mRenderingMutex);
+ void cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) EXCLUDES(mRenderingMutex);
+ void unbindExternalTextureBuffer(uint64_t bufferId) EXCLUDES(mRenderingMutex);
+ status_t bindFrameBuffer(Framebuffer* framebuffer) override;
+ void unbindFrameBuffer(Framebuffer* framebuffer) override;
+
+ bool isProtected() const override { return mInProtectedContext; }
+ bool supportsProtectedContent() const override;
+ bool useProtectedContext(bool useProtectedContext) override;
+ status_t drawLayers(const DisplaySettings& display,
+ const std::vector<const LayerSettings*>& layers,
+ const sp<GraphicBuffer>& buffer, const bool useFramebufferCache,
+ base::unique_fd&& bufferFence, base::unique_fd* drawFence) override;
+ bool cleanupPostRender(CleanupMode mode) override;
+
+ EGLDisplay getEGLDisplay() const { return mEGLDisplay; }
+ // Creates an output image for rendering to
+ EGLImageKHR createFramebufferImageIfNeeded(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+ bool useFramebufferCache)
+ EXCLUDES(mFramebufferImageCacheMutex);
+
+ // Test-only methods
+ // Returns true iff mImageCache contains an image keyed by bufferId
+ bool isImageCachedForTesting(uint64_t bufferId) EXCLUDES(mRenderingMutex);
+ // Returns true iff texName was previously generated by RenderEngine and was
+ // not destroyed.
+ bool isTextureNameKnownForTesting(uint32_t texName);
+ // Returns the buffer ID of the content bound to texName, or nullopt if no
+ // such mapping exists.
+ std::optional<uint64_t> getBufferIdForTextureNameForTesting(uint32_t texName);
+ // Returns true iff mFramebufferImageCache contains an image keyed by bufferId
+ bool isFramebufferImageCachedForTesting(uint64_t bufferId)
+ EXCLUDES(mFramebufferImageCacheMutex);
+ // These are wrappers around public methods above, but exposing Barrier
+ // objects so that tests can block.
+ std::shared_ptr<ImageManager::Barrier> cacheExternalTextureBufferForTesting(
+ const sp<GraphicBuffer>& buffer);
+ std::shared_ptr<ImageManager::Barrier> unbindExternalTextureBufferForTesting(uint64_t bufferId);
+
+protected:
+ Framebuffer* getFramebufferForDrawing() override;
+ void dump(std::string& result) override EXCLUDES(mRenderingMutex)
+ EXCLUDES(mFramebufferImageCacheMutex);
+ size_t getMaxTextureSize() const override;
+ size_t getMaxViewportDims() const override;
+
+private:
+ enum GlesVersion {
+ GLES_VERSION_1_0 = 0x10000,
+ GLES_VERSION_1_1 = 0x10001,
+ GLES_VERSION_2_0 = 0x20000,
+ GLES_VERSION_3_0 = 0x30000,
+ };
+
+ static EGLConfig chooseEglConfig(EGLDisplay display, int format, bool logConfig);
+ static GlesVersion parseGlesVersion(const char* str);
+ static EGLContext createEglContext(EGLDisplay display, EGLConfig config,
+ EGLContext shareContext, bool useContextPriority,
+ Protection protection);
+ static EGLSurface createStubEglPbufferSurface(EGLDisplay display, EGLConfig config,
+ int hwcFormat, Protection protection);
+ std::unique_ptr<Framebuffer> createFramebuffer();
+ std::unique_ptr<Image> createImage();
+ void checkErrors() const;
+ void checkErrors(const char* tag) const;
+ void setScissor(const Rect& region);
+ void disableScissor();
+ bool waitSync(EGLSyncKHR sync, EGLint flags);
+ status_t cacheExternalTextureBufferInternal(const sp<GraphicBuffer>& buffer)
+ EXCLUDES(mRenderingMutex);
+ void unbindExternalTextureBufferInternal(uint64_t bufferId) EXCLUDES(mRenderingMutex);
+
+ // A data space is considered HDR data space if it has BT2020 color space
+ // with PQ or HLG transfer function.
+ bool isHdrDataSpace(const ui::Dataspace dataSpace) const;
+ bool needsXYZTransformMatrix() const;
+ // Defines the viewport, and sets the projection matrix to the projection
+ // defined by the clip.
+ void setViewportAndProjection(Rect viewport, Rect clip);
+ // Evicts stale images from the buffer cache.
+ void evictImages(const std::vector<LayerSettings>& layers);
+ // Computes the cropping window for the layer and sets up cropping
+ // coordinates for the mesh.
+ FloatRect setupLayerCropping(const LayerSettings& layer, Mesh& mesh);
+
+ // We do a special handling for rounded corners when it's possible to turn off blending
+ // for the majority of the layer. The rounded corners needs to turn on blending such that
+ // we can set the alpha value correctly, however, only the corners need this, and since
+ // blending is an expensive operation, we want to turn off blending when it's not necessary.
+ void handleRoundedCorners(const DisplaySettings& display, const LayerSettings& layer,
+ const Mesh& mesh);
+ base::unique_fd flush();
+ bool finish();
+ bool waitFence(base::unique_fd fenceFd);
+ void clearWithColor(float red, float green, float blue, float alpha);
+ void fillRegionWithColor(const Region& region, float red, float green, float blue, float alpha);
+ void handleShadow(const FloatRect& casterRect, float casterCornerRadius,
+ const ShadowSettings& shadowSettings);
+ void setupLayerBlending(bool premultipliedAlpha, bool opaque, bool disableTexture,
+ const half4& color, float cornerRadius);
+ void setupLayerTexturing(const Texture& texture);
+ void setupFillWithColor(float r, float g, float b, float a);
+ void setColorTransform(const mat4& colorTransform);
+ void disableTexturing();
+ void disableBlending();
+ void setupCornerRadiusCropSize(float width, float height);
+
+ // HDR and color management related functions and state
+ void setSourceY410BT2020(bool enable);
+ void setSourceDataSpace(ui::Dataspace source);
+ void setOutputDataSpace(ui::Dataspace dataspace);
+ void setDisplayMaxLuminance(const float maxLuminance);
+
+ // drawing
+ void drawMesh(const Mesh& mesh);
+
+ EGLDisplay mEGLDisplay;
+ EGLConfig mEGLConfig;
+ EGLContext mEGLContext;
+ EGLSurface mStubSurface;
+ EGLContext mProtectedEGLContext;
+ EGLSurface mProtectedStubSurface;
+ GLint mMaxViewportDims[2];
+ GLint mMaxTextureSize;
+ GLuint mVpWidth;
+ GLuint mVpHeight;
+ Description mState;
+ GLShadowTexture mShadowTexture;
+
+ mat4 mSrgbToXyz;
+ mat4 mDisplayP3ToXyz;
+ mat4 mBt2020ToXyz;
+ mat4 mXyzToSrgb;
+ mat4 mXyzToDisplayP3;
+ mat4 mXyzToBt2020;
+ mat4 mSrgbToDisplayP3;
+ mat4 mSrgbToBt2020;
+ mat4 mDisplayP3ToSrgb;
+ mat4 mDisplayP3ToBt2020;
+ mat4 mBt2020ToSrgb;
+ mat4 mBt2020ToDisplayP3;
+
+ bool mInProtectedContext = false;
+ // If set to true, then enables tracing flush() and finish() to systrace.
+ bool mTraceGpuCompletion = false;
+ // Maximum size of mFramebufferImageCache. If more images would be cached, then (approximately)
+ // the last recently used buffer should be kicked out.
+ uint32_t mFramebufferImageCacheSize = 0;
+
+ // Cache of output images, keyed by corresponding GraphicBuffer ID.
+ std::deque<std::pair<uint64_t, EGLImageKHR>> mFramebufferImageCache
+ GUARDED_BY(mFramebufferImageCacheMutex);
+ // The only reason why we have this mutex is so that we don't segfault when
+ // dumping info.
+ std::mutex mFramebufferImageCacheMutex;
+
+ // Current dataspace of layer being rendered
+ ui::Dataspace mDataSpace = ui::Dataspace::UNKNOWN;
+
+ // Current output dataspace of the render engine
+ ui::Dataspace mOutputDataSpace = ui::Dataspace::UNKNOWN;
+
+ // Whether device supports color management, currently color management
+ // supports sRGB, DisplayP3 color spaces.
+ const bool mUseColorManagement = false;
+
+ // Cache of GL images that we'll store per GraphicBuffer ID
+ std::unordered_map<uint64_t, std::unique_ptr<Image>> mImageCache GUARDED_BY(mRenderingMutex);
+ std::unordered_map<uint32_t, std::optional<uint64_t>> mTextureView;
+
+ // Mutex guarding rendering operations, so that:
+ // 1. GL operations aren't interleaved, and
+ // 2. Internal state related to rendering that is potentially modified by
+ // multiple threads is guaranteed thread-safe.
+ std::mutex mRenderingMutex;
+
+ std::unique_ptr<Framebuffer> mDrawingBuffer;
+ // this is a 1x1 RGB buffer, but over-allocate in case a driver wants more
+ // memory or if it needs to satisfy alignment requirements. In this case:
+ // assume that each channel requires 4 bytes, and add 3 additional bytes to
+ // ensure that we align on a word. Allocating 16 bytes will provide a
+ // guarantee that we don't clobber memory.
+ uint32_t mPlaceholderDrawBuffer[4];
+ // Placeholder buffer and image, similar to mPlaceholderDrawBuffer, but
+ // instead these are intended for cleaning up texture memory with the
+ // GL_TEXTURE_EXTERNAL_OES target.
+ ANativeWindowBuffer* mPlaceholderBuffer = nullptr;
+ EGLImage mPlaceholderImage = EGL_NO_IMAGE_KHR;
+ sp<Fence> mLastDrawFence;
+ // Store a separate boolean checking if prior resources were cleaned up, as
+ // devices that don't support native sync fences can't rely on a last draw
+ // fence that doesn't exist.
+ bool mPriorResourcesCleaned = true;
+
+ // Blur effect processor, only instantiated when a layer requests it.
+ BlurFilter* mBlurFilter = nullptr;
+
+ class FlushTracer {
+ public:
+ FlushTracer(GLESRenderEngine* engine);
+ ~FlushTracer();
+ void queueSync(EGLSyncKHR sync) EXCLUDES(mMutex);
+
+ struct QueueEntry {
+ EGLSyncKHR mSync = nullptr;
+ uint64_t mFrameNum = 0;
+ };
+
+ private:
+ void loop();
+ GLESRenderEngine* const mEngine;
+ std::thread mThread;
+ std::condition_variable_any mCondition;
+ std::mutex mMutex;
+ std::queue<QueueEntry> mQueue GUARDED_BY(mMutex);
+ uint64_t mFramesQueued GUARDED_BY(mMutex) = 0;
+ bool mRunning = true;
+ };
+ friend class FlushTracer;
+ friend class ImageManager;
+ friend class GLFramebuffer;
+ friend class BlurFilter;
+ friend class GenericProgram;
+ std::unique_ptr<FlushTracer> mFlushTracer;
+ std::unique_ptr<ImageManager> mImageManager = std::make_unique<ImageManager>(this);
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_GLESRENDERENGINE_H_ */
diff --git a/media/libstagefright/renderfright/gl/GLExtensions.cpp b/media/libstagefright/renderfright/gl/GLExtensions.cpp
new file mode 100644
index 0000000..2924b0e
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLExtensions.cpp
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "GLExtensions.h"
+
+#include <string>
+#include <unordered_set>
+
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+ANDROID_SINGLETON_STATIC_INSTANCE(android::renderengine::gl::GLExtensions)
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+namespace {
+
+class ExtensionSet {
+public:
+ ExtensionSet(const char* extensions) {
+ char const* curr = extensions;
+ char const* head = curr;
+ do {
+ head = strchr(curr, ' ');
+ size_t len = head ? head - curr : strlen(curr);
+ if (len > 0) {
+ mExtensions.emplace(curr, len);
+ }
+ curr = head + 1;
+ } while (head);
+ }
+
+ bool hasExtension(const char* extension) const { return mExtensions.count(extension) > 0; }
+
+private:
+ std::unordered_set<std::string> mExtensions;
+};
+
+} // anonymous namespace
+
+void GLExtensions::initWithGLStrings(GLubyte const* vendor, GLubyte const* renderer,
+ GLubyte const* version, GLubyte const* extensions) {
+ mVendor = (char const*)vendor;
+ mRenderer = (char const*)renderer;
+ mVersion = (char const*)version;
+ mExtensions = (char const*)extensions;
+
+ ExtensionSet extensionSet(mExtensions.c_str());
+ if (extensionSet.hasExtension("GL_EXT_protected_textures")) {
+ mHasProtectedTexture = true;
+ }
+}
+
+char const* GLExtensions::getVendor() const {
+ return mVendor.string();
+}
+
+char const* GLExtensions::getRenderer() const {
+ return mRenderer.string();
+}
+
+char const* GLExtensions::getVersion() const {
+ return mVersion.string();
+}
+
+char const* GLExtensions::getExtensions() const {
+ return mExtensions.string();
+}
+
+void GLExtensions::initWithEGLStrings(char const* eglVersion, char const* eglExtensions) {
+ mEGLVersion = eglVersion;
+ mEGLExtensions = eglExtensions;
+
+ ExtensionSet extensionSet(eglExtensions);
+
+ // EGL_ANDROIDX_no_config_context is an experimental extension with no
+ // written specification. It will be replaced by something more formal.
+ // SurfaceFlinger is using it to allow a single EGLContext to render to
+ // both a 16-bit primary display framebuffer and a 32-bit virtual display
+ // framebuffer.
+ //
+ // EGL_KHR_no_config_context is official extension to allow creating a
+ // context that works with any surface of a display.
+ if (extensionSet.hasExtension("EGL_ANDROIDX_no_config_context") ||
+ extensionSet.hasExtension("EGL_KHR_no_config_context")) {
+ mHasNoConfigContext = true;
+ }
+
+ if (extensionSet.hasExtension("EGL_ANDROID_native_fence_sync")) {
+ mHasNativeFenceSync = true;
+ }
+ if (extensionSet.hasExtension("EGL_KHR_fence_sync")) {
+ mHasFenceSync = true;
+ }
+ if (extensionSet.hasExtension("EGL_KHR_wait_sync")) {
+ mHasWaitSync = true;
+ }
+ if (extensionSet.hasExtension("EGL_EXT_protected_content")) {
+ mHasProtectedContent = true;
+ }
+ if (extensionSet.hasExtension("EGL_IMG_context_priority")) {
+ mHasContextPriority = true;
+ }
+ if (extensionSet.hasExtension("EGL_KHR_surfaceless_context")) {
+ mHasSurfacelessContext = true;
+ }
+}
+
+char const* GLExtensions::getEGLVersion() const {
+ return mEGLVersion.string();
+}
+
+char const* GLExtensions::getEGLExtensions() const {
+ return mEGLExtensions.string();
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLExtensions.h b/media/libstagefright/renderfright/gl/GLExtensions.h
new file mode 100644
index 0000000..ef00009
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLExtensions.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SF_GLEXTENSION_H
+#define ANDROID_SF_GLEXTENSION_H
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <utils/Singleton.h>
+#include <utils/String8.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLExtensions : public Singleton<GLExtensions> {
+public:
+ bool hasNoConfigContext() const { return mHasNoConfigContext; }
+ bool hasNativeFenceSync() const { return mHasNativeFenceSync; }
+ bool hasFenceSync() const { return mHasFenceSync; }
+ bool hasWaitSync() const { return mHasWaitSync; }
+ bool hasProtectedContent() const { return mHasProtectedContent; }
+ bool hasContextPriority() const { return mHasContextPriority; }
+ bool hasSurfacelessContext() const { return mHasSurfacelessContext; }
+ bool hasProtectedTexture() const { return mHasProtectedTexture; }
+
+ void initWithGLStrings(GLubyte const* vendor, GLubyte const* renderer, GLubyte const* version,
+ GLubyte const* extensions);
+ char const* getVendor() const;
+ char const* getRenderer() const;
+ char const* getVersion() const;
+ char const* getExtensions() const;
+
+ void initWithEGLStrings(char const* eglVersion, char const* eglExtensions);
+ char const* getEGLVersion() const;
+ char const* getEGLExtensions() const;
+
+protected:
+ GLExtensions() = default;
+
+private:
+ friend class Singleton<GLExtensions>;
+
+ bool mHasNoConfigContext = false;
+ bool mHasNativeFenceSync = false;
+ bool mHasFenceSync = false;
+ bool mHasWaitSync = false;
+ bool mHasProtectedContent = false;
+ bool mHasContextPriority = false;
+ bool mHasSurfacelessContext = false;
+ bool mHasProtectedTexture = false;
+
+ String8 mVendor;
+ String8 mRenderer;
+ String8 mVersion;
+ String8 mExtensions;
+ String8 mEGLVersion;
+ String8 mEGLExtensions;
+
+ GLExtensions(const GLExtensions&);
+ GLExtensions& operator=(const GLExtensions&);
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
+
+#endif // ANDROID_SF_GLEXTENSION_H
diff --git a/media/libstagefright/renderfright/gl/GLFramebuffer.cpp b/media/libstagefright/renderfright/gl/GLFramebuffer.cpp
new file mode 100644
index 0000000..383486b
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLFramebuffer.cpp
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "GLFramebuffer.h"
+
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <GLES2/gl2ext.h>
+#include <GLES3/gl3.h>
+#include <gui/DebugEGLImageTracker.h>
+#include <nativebase/nativebase.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLFramebuffer::GLFramebuffer(GLESRenderEngine& engine)
+ : mEngine(engine), mEGLDisplay(engine.getEGLDisplay()), mEGLImage(EGL_NO_IMAGE_KHR) {
+ glGenTextures(1, &mTextureName);
+ glGenFramebuffers(1, &mFramebufferName);
+}
+
+GLFramebuffer::~GLFramebuffer() {
+ glDeleteFramebuffers(1, &mFramebufferName);
+ glDeleteTextures(1, &mTextureName);
+}
+
+bool GLFramebuffer::setNativeWindowBuffer(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+ const bool useFramebufferCache) {
+ ATRACE_CALL();
+ if (mEGLImage != EGL_NO_IMAGE_KHR) {
+ if (!usingFramebufferCache) {
+ eglDestroyImageKHR(mEGLDisplay, mEGLImage);
+ DEBUG_EGL_IMAGE_TRACKER_DESTROY();
+ }
+ mEGLImage = EGL_NO_IMAGE_KHR;
+ mBufferWidth = 0;
+ mBufferHeight = 0;
+ }
+
+ if (nativeBuffer) {
+ mEGLImage = mEngine.createFramebufferImageIfNeeded(nativeBuffer, isProtected,
+ useFramebufferCache);
+ if (mEGLImage == EGL_NO_IMAGE_KHR) {
+ return false;
+ }
+ usingFramebufferCache = useFramebufferCache;
+ mBufferWidth = nativeBuffer->width;
+ mBufferHeight = nativeBuffer->height;
+ }
+ return true;
+}
+
+void GLFramebuffer::allocateBuffers(uint32_t width, uint32_t height, void* data) {
+ ATRACE_CALL();
+
+ glBindTexture(GL_TEXTURE_2D, mTextureName);
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_MIRRORED_REPEAT);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_MIRRORED_REPEAT);
+
+ mBufferHeight = height;
+ mBufferWidth = width;
+ mEngine.checkErrors("Allocating Fbo texture");
+
+ bind();
+ glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, mTextureName, 0);
+ mStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
+ unbind();
+ glBindTexture(GL_TEXTURE_2D, 0);
+
+ if (mStatus != GL_FRAMEBUFFER_COMPLETE) {
+ ALOGE("Frame buffer is not complete. Error %d", mStatus);
+ }
+}
+
+void GLFramebuffer::bind() const {
+ glBindFramebuffer(GL_FRAMEBUFFER, mFramebufferName);
+}
+
+void GLFramebuffer::bindAsReadBuffer() const {
+ glBindFramebuffer(GL_READ_FRAMEBUFFER, mFramebufferName);
+}
+
+void GLFramebuffer::bindAsDrawBuffer() const {
+ glBindFramebuffer(GL_DRAW_FRAMEBUFFER, mFramebufferName);
+}
+
+void GLFramebuffer::unbind() const {
+ glBindFramebuffer(GL_FRAMEBUFFER, 0);
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLFramebuffer.h b/media/libstagefright/renderfright/gl/GLFramebuffer.h
new file mode 100644
index 0000000..6757695
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLFramebuffer.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES2/gl2.h>
+#include <renderengine/Framebuffer.h>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class GLFramebuffer : public renderengine::Framebuffer {
+public:
+ explicit GLFramebuffer(GLESRenderEngine& engine);
+ explicit GLFramebuffer(GLESRenderEngine& engine, bool multiTarget);
+ ~GLFramebuffer() override;
+
+ bool setNativeWindowBuffer(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+ const bool useFramebufferCache) override;
+ void allocateBuffers(uint32_t width, uint32_t height, void* data = nullptr);
+ EGLImageKHR getEGLImage() const { return mEGLImage; }
+ uint32_t getTextureName() const { return mTextureName; }
+ uint32_t getFramebufferName() const { return mFramebufferName; }
+ int32_t getBufferHeight() const { return mBufferHeight; }
+ int32_t getBufferWidth() const { return mBufferWidth; }
+ GLenum getStatus() const { return mStatus; }
+ void bind() const;
+ void bindAsReadBuffer() const;
+ void bindAsDrawBuffer() const;
+ void unbind() const;
+
+private:
+ GLESRenderEngine& mEngine;
+ EGLDisplay mEGLDisplay;
+ EGLImageKHR mEGLImage;
+ bool usingFramebufferCache = false;
+ GLenum mStatus = GL_FRAMEBUFFER_UNSUPPORTED;
+ uint32_t mTextureName, mFramebufferName;
+
+ int32_t mBufferHeight = 0;
+ int32_t mBufferWidth = 0;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLImage.cpp b/media/libstagefright/renderfright/gl/GLImage.cpp
new file mode 100644
index 0000000..8497721
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLImage.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "GLImage.h"
+
+#include <vector>
+
+#include <gui/DebugEGLImageTracker.h>
+#include <log/log.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+#include "GLExtensions.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+static std::vector<EGLint> buildAttributeList(bool isProtected) {
+ std::vector<EGLint> attrs;
+ attrs.reserve(16);
+
+ attrs.push_back(EGL_IMAGE_PRESERVED_KHR);
+ attrs.push_back(EGL_TRUE);
+
+ if (isProtected && GLExtensions::getInstance().hasProtectedContent()) {
+ attrs.push_back(EGL_PROTECTED_CONTENT_EXT);
+ attrs.push_back(EGL_TRUE);
+ }
+
+ attrs.push_back(EGL_NONE);
+
+ return attrs;
+}
+
+GLImage::GLImage(const GLESRenderEngine& engine) : mEGLDisplay(engine.getEGLDisplay()) {}
+
+GLImage::~GLImage() {
+ setNativeWindowBuffer(nullptr, false);
+}
+
+bool GLImage::setNativeWindowBuffer(ANativeWindowBuffer* buffer, bool isProtected) {
+ ATRACE_CALL();
+ if (mEGLImage != EGL_NO_IMAGE_KHR) {
+ if (!eglDestroyImageKHR(mEGLDisplay, mEGLImage)) {
+ ALOGE("failed to destroy image: %#x", eglGetError());
+ }
+ DEBUG_EGL_IMAGE_TRACKER_DESTROY();
+ mEGLImage = EGL_NO_IMAGE_KHR;
+ }
+
+ if (buffer) {
+ std::vector<EGLint> attrs = buildAttributeList(isProtected);
+ mEGLImage = eglCreateImageKHR(mEGLDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
+ static_cast<EGLClientBuffer>(buffer), attrs.data());
+ if (mEGLImage == EGL_NO_IMAGE_KHR) {
+ ALOGE("failed to create EGLImage: %#x", eglGetError());
+ return false;
+ }
+ DEBUG_EGL_IMAGE_TRACKER_CREATE();
+ mProtected = isProtected;
+ }
+
+ return true;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLImage.h b/media/libstagefright/renderfright/gl/GLImage.h
new file mode 100644
index 0000000..59d6ce3
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLImage.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <android-base/macros.h>
+#include <renderengine/Image.h>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class GLImage : public renderengine::Image {
+public:
+ explicit GLImage(const GLESRenderEngine& engine);
+ ~GLImage() override;
+
+ bool setNativeWindowBuffer(ANativeWindowBuffer* buffer, bool isProtected) override;
+
+ EGLImageKHR getEGLImage() const { return mEGLImage; }
+ bool isProtected() const { return mProtected; }
+
+private:
+ EGLDisplay mEGLDisplay;
+ EGLImageKHR mEGLImage = EGL_NO_IMAGE_KHR;
+ bool mProtected = false;
+
+ DISALLOW_COPY_AND_ASSIGN(GLImage);
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowTexture.cpp b/media/libstagefright/renderfright/gl/GLShadowTexture.cpp
new file mode 100644
index 0000000..2423a34
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowTexture.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <GLES3/gl3.h>
+
+#include "GLShadowTexture.h"
+#include "GLSkiaShadowPort.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLShadowTexture::GLShadowTexture() {
+ fillShadowTextureData(mTextureData, SHADOW_TEXTURE_WIDTH);
+
+ glGenTextures(1, &mName);
+ glBindTexture(GL_TEXTURE_2D, mName);
+ glTexImage2D(GL_TEXTURE_2D, 0 /* base image level */, GL_ALPHA, SHADOW_TEXTURE_WIDTH,
+ SHADOW_TEXTURE_HEIGHT, 0 /* border */, GL_ALPHA, GL_UNSIGNED_BYTE, mTextureData);
+ mTexture.init(Texture::TEXTURE_2D, mName);
+ mTexture.setFiltering(true);
+ mTexture.setDimensions(SHADOW_TEXTURE_WIDTH, 1);
+}
+
+GLShadowTexture::~GLShadowTexture() {
+ glDeleteTextures(1, &mName);
+}
+
+const Texture& GLShadowTexture::getTexture() {
+ return mTexture;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowTexture.h b/media/libstagefright/renderfright/gl/GLShadowTexture.h
new file mode 100644
index 0000000..250a9d7
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowTexture.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <renderengine/Texture.h>
+#include <cstdint>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLShadowTexture {
+public:
+ GLShadowTexture();
+ ~GLShadowTexture();
+
+ const Texture& getTexture();
+
+private:
+ static constexpr int SHADOW_TEXTURE_WIDTH = 128;
+ static constexpr int SHADOW_TEXTURE_HEIGHT = 1;
+
+ GLuint mName;
+ Texture mTexture;
+ uint8_t mTextureData[SHADOW_TEXTURE_WIDTH];
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.cpp b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.cpp
new file mode 100644
index 0000000..3181f9b
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.cpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/Mesh.h>
+
+#include <math/vec4.h>
+
+#include <ui/Rect.h>
+#include <ui/Transform.h>
+
+#include "GLShadowVertexGenerator.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLShadowVertexGenerator::GLShadowVertexGenerator(const FloatRect& casterRect,
+ float casterCornerRadius, float casterZ,
+ bool casterIsTranslucent, const vec4& ambientColor,
+ const vec4& spotColor, const vec3& lightPosition,
+ float lightRadius) {
+ mDrawAmbientShadow = ambientColor.a > 0.f;
+ mDrawSpotShadow = spotColor.a > 0.f;
+
+ // Generate geometries and find number of vertices to generate
+ if (mDrawAmbientShadow) {
+ mAmbientShadowGeometry = getAmbientShadowGeometry(casterRect, casterCornerRadius, casterZ,
+ casterIsTranslucent, ambientColor);
+ mAmbientShadowVertexCount = getVertexCountForGeometry(*mAmbientShadowGeometry.get());
+ mAmbientShadowIndexCount = getIndexCountForGeometry(*mAmbientShadowGeometry.get());
+ } else {
+ mAmbientShadowVertexCount = 0;
+ mAmbientShadowIndexCount = 0;
+ }
+
+ if (mDrawSpotShadow) {
+ mSpotShadowGeometry =
+ getSpotShadowGeometry(casterRect, casterCornerRadius, casterZ, casterIsTranslucent,
+ spotColor, lightPosition, lightRadius);
+ mSpotShadowVertexCount = getVertexCountForGeometry(*mSpotShadowGeometry.get());
+ mSpotShadowIndexCount = getIndexCountForGeometry(*mSpotShadowGeometry.get());
+ } else {
+ mSpotShadowVertexCount = 0;
+ mSpotShadowIndexCount = 0;
+ }
+}
+
+size_t GLShadowVertexGenerator::getVertexCount() const {
+ return mAmbientShadowVertexCount + mSpotShadowVertexCount;
+}
+
+size_t GLShadowVertexGenerator::getIndexCount() const {
+ return mAmbientShadowIndexCount + mSpotShadowIndexCount;
+}
+
+void GLShadowVertexGenerator::fillVertices(Mesh::VertexArray<vec2>& position,
+ Mesh::VertexArray<vec4>& color,
+ Mesh::VertexArray<vec3>& params) const {
+ if (mDrawAmbientShadow) {
+ fillVerticesForGeometry(*mAmbientShadowGeometry.get(), mAmbientShadowVertexCount, position,
+ color, params);
+ }
+ if (mDrawSpotShadow) {
+ fillVerticesForGeometry(*mSpotShadowGeometry.get(), mSpotShadowVertexCount,
+ Mesh::VertexArray<vec2>(position, mAmbientShadowVertexCount),
+ Mesh::VertexArray<vec4>(color, mAmbientShadowVertexCount),
+ Mesh::VertexArray<vec3>(params, mAmbientShadowVertexCount));
+ }
+}
+
+void GLShadowVertexGenerator::fillIndices(uint16_t* indices) const {
+ if (mDrawAmbientShadow) {
+ fillIndicesForGeometry(*mAmbientShadowGeometry.get(), mAmbientShadowIndexCount,
+ 0 /* starting vertex offset */, indices);
+ }
+ if (mDrawSpotShadow) {
+ fillIndicesForGeometry(*mSpotShadowGeometry.get(), mSpotShadowIndexCount,
+ mAmbientShadowVertexCount /* starting vertex offset */,
+ &(indices[mAmbientShadowIndexCount]));
+ }
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.h b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.h
new file mode 100644
index 0000000..112f976
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <math/vec4.h>
+#include <ui/Rect.h>
+
+#include "GLSkiaShadowPort.h"
+
+namespace android {
+namespace renderengine {
+
+class Mesh;
+
+namespace gl {
+
+/**
+ * Generates gl attributes required to draw shadow spot and/or ambient shadows.
+ *
+ * Each shadow can support different colors. This class generates three vertex attributes for
+ * each shadow, its position, color and shadow params(offset and distance). These can be sent
+ * using a single glDrawElements call.
+ */
+class GLShadowVertexGenerator {
+public:
+ GLShadowVertexGenerator(const FloatRect& casterRect, float casterCornerRadius, float casterZ,
+ bool casterIsTranslucent, const vec4& ambientColor,
+ const vec4& spotColor, const vec3& lightPosition, float lightRadius);
+ ~GLShadowVertexGenerator() = default;
+
+ size_t getVertexCount() const;
+ size_t getIndexCount() const;
+ void fillVertices(Mesh::VertexArray<vec2>& position, Mesh::VertexArray<vec4>& color,
+ Mesh::VertexArray<vec3>& params) const;
+ void fillIndices(uint16_t* indices) const;
+
+private:
+ bool mDrawAmbientShadow;
+ std::unique_ptr<Geometry> mAmbientShadowGeometry;
+ int mAmbientShadowVertexCount = 0;
+ int mAmbientShadowIndexCount = 0;
+
+ bool mDrawSpotShadow;
+ std::unique_ptr<Geometry> mSpotShadowGeometry;
+ int mSpotShadowVertexCount = 0;
+ int mSpotShadowIndexCount = 0;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLSkiaShadowPort.cpp b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.cpp
new file mode 100644
index 0000000..da8b435
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.cpp
@@ -0,0 +1,656 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <math/vec4.h>
+
+#include <renderengine/Mesh.h>
+
+#include <ui/Rect.h>
+#include <ui/Transform.h>
+
+#include <utils/Log.h>
+
+#include "GLSkiaShadowPort.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/**
+ * The shadow geometry logic and vertex generation code has been ported from skia shadow
+ * fast path OpenGL implementation to draw shadows around rects and rounded rects including
+ * circles.
+ *
+ * path: skia/src/gpu/GrRenderTargetContext.cpp GrRenderTargetContext::drawFastShadow
+ *
+ * Modifications made:
+ * - Switched to using std lib math functions
+ * - Fall off function is implemented in vertex shader rather than a shadow texture
+ * - Removed transformations applied on the caster rect since the caster will be in local
+ * coordinate space and will be transformed by the vertex shader.
+ */
+
+static inline float divide_and_pin(float numer, float denom, float min, float max) {
+ if (denom == 0.0f) return min;
+ return std::clamp(numer / denom, min, max);
+}
+
+static constexpr auto SK_ScalarSqrt2 = 1.41421356f;
+static constexpr auto kAmbientHeightFactor = 1.0f / 128.0f;
+static constexpr auto kAmbientGeomFactor = 64.0f;
+// Assuming that we have a light height of 600 for the spot shadow,
+// the spot values will reach their maximum at a height of approximately 292.3077.
+// We'll round up to 300 to keep it simple.
+static constexpr auto kMaxAmbientRadius = 300 * kAmbientHeightFactor * kAmbientGeomFactor;
+
+inline float AmbientBlurRadius(float height) {
+ return std::min(height * kAmbientHeightFactor * kAmbientGeomFactor, kMaxAmbientRadius);
+}
+inline float AmbientRecipAlpha(float height) {
+ return 1.0f + std::max(height * kAmbientHeightFactor, 0.0f);
+}
+
+//////////////////////////////////////////////////////////////////////////////
+// Circle Data
+//
+// We have two possible cases for geometry for a circle:
+
+// In the case of a normal fill, we draw geometry for the circle as an octagon.
+static const uint16_t gFillCircleIndices[] = {
+ // enter the octagon
+ // clang-format off
+ 0, 1, 8, 1, 2, 8,
+ 2, 3, 8, 3, 4, 8,
+ 4, 5, 8, 5, 6, 8,
+ 6, 7, 8, 7, 0, 8,
+ // clang-format on
+};
+
+// For stroked circles, we use two nested octagons.
+static const uint16_t gStrokeCircleIndices[] = {
+ // enter the octagon
+ // clang-format off
+ 0, 1, 9, 0, 9, 8,
+ 1, 2, 10, 1, 10, 9,
+ 2, 3, 11, 2, 11, 10,
+ 3, 4, 12, 3, 12, 11,
+ 4, 5, 13, 4, 13, 12,
+ 5, 6, 14, 5, 14, 13,
+ 6, 7, 15, 6, 15, 14,
+ 7, 0, 8, 7, 8, 15,
+ // clang-format on
+};
+
+#define SK_ARRAY_COUNT(a) (sizeof(a) / sizeof((a)[0]))
+static const int kIndicesPerFillCircle = SK_ARRAY_COUNT(gFillCircleIndices);
+static const int kIndicesPerStrokeCircle = SK_ARRAY_COUNT(gStrokeCircleIndices);
+static const int kVertsPerStrokeCircle = 16;
+static const int kVertsPerFillCircle = 9;
+
+static int circle_type_to_vert_count(bool stroked) {
+ return stroked ? kVertsPerStrokeCircle : kVertsPerFillCircle;
+}
+
+static int circle_type_to_index_count(bool stroked) {
+ return stroked ? kIndicesPerStrokeCircle : kIndicesPerFillCircle;
+}
+
+static const uint16_t* circle_type_to_indices(bool stroked) {
+ return stroked ? gStrokeCircleIndices : gFillCircleIndices;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// RoundRect Data
+//
+// The geometry for a shadow roundrect is similar to a 9-patch:
+// ____________
+// |_|________|_|
+// | | | |
+// | | | |
+// | | | |
+// |_|________|_|
+// |_|________|_|
+//
+// However, each corner is rendered as a fan rather than a simple quad, as below. (The diagram
+// shows the upper part of the upper left corner. The bottom triangle would similarly be split
+// into two triangles.)
+// ________
+// |\ \ |
+// | \ \ |
+// | \\ |
+// | \|
+// --------
+//
+// The center of the fan handles the curve of the corner. For roundrects where the stroke width
+// is greater than the corner radius, the outer triangles blend from the curve to the straight
+// sides. Otherwise these triangles will be degenerate.
+//
+// In the case where the stroke width is greater than the corner radius and the
+// blur radius (overstroke), we add additional geometry to mark out the rectangle in the center.
+// This rectangle extends the coverage values of the center edges of the 9-patch.
+// ____________
+// |_|________|_|
+// | |\ ____ /| |
+// | | | | | |
+// | | |____| | |
+// |_|/______\|_|
+// |_|________|_|
+//
+// For filled rrects we reuse the stroke geometry but add an additional quad to the center.
+
+static const uint16_t gRRectIndices[] = {
+ // clang-format off
+ // overstroke quads
+ // we place this at the beginning so that we can skip these indices when rendering as filled
+ 0, 6, 25, 0, 25, 24,
+ 6, 18, 27, 6, 27, 25,
+ 18, 12, 26, 18, 26, 27,
+ 12, 0, 24, 12, 24, 26,
+
+ // corners
+ 0, 1, 2, 0, 2, 3, 0, 3, 4, 0, 4, 5,
+ 6, 11, 10, 6, 10, 9, 6, 9, 8, 6, 8, 7,
+ 12, 17, 16, 12, 16, 15, 12, 15, 14, 12, 14, 13,
+ 18, 19, 20, 18, 20, 21, 18, 21, 22, 18, 22, 23,
+
+ // edges
+ 0, 5, 11, 0, 11, 6,
+ 6, 7, 19, 6, 19, 18,
+ 18, 23, 17, 18, 17, 12,
+ 12, 13, 1, 12, 1, 0,
+
+ // fill quad
+ // we place this at the end so that we can skip these indices when rendering as stroked
+ 0, 6, 18, 0, 18, 12,
+ // clang-format on
+};
+
+// overstroke count
+static const int kIndicesPerOverstrokeRRect = SK_ARRAY_COUNT(gRRectIndices) - 6;
+// simple stroke count skips overstroke indices
+static const int kIndicesPerStrokeRRect = kIndicesPerOverstrokeRRect - 6 * 4;
+// fill count adds final quad to stroke count
+static const int kIndicesPerFillRRect = kIndicesPerStrokeRRect + 6;
+static const int kVertsPerStrokeRRect = 24;
+static const int kVertsPerOverstrokeRRect = 28;
+static const int kVertsPerFillRRect = 24;
+
+static int rrect_type_to_vert_count(RRectType type) {
+ switch (type) {
+ case kFill_RRectType:
+ return kVertsPerFillRRect;
+ case kStroke_RRectType:
+ return kVertsPerStrokeRRect;
+ case kOverstroke_RRectType:
+ return kVertsPerOverstrokeRRect;
+ }
+ ALOGE("Invalid rect type: %d", type);
+ return -1;
+}
+
+static int rrect_type_to_index_count(RRectType type) {
+ switch (type) {
+ case kFill_RRectType:
+ return kIndicesPerFillRRect;
+ case kStroke_RRectType:
+ return kIndicesPerStrokeRRect;
+ case kOverstroke_RRectType:
+ return kIndicesPerOverstrokeRRect;
+ }
+ ALOGE("Invalid rect type: %d", type);
+ return -1;
+}
+
+static const uint16_t* rrect_type_to_indices(RRectType type) {
+ switch (type) {
+ case kFill_RRectType:
+ case kStroke_RRectType:
+ return gRRectIndices + 6 * 4;
+ case kOverstroke_RRectType:
+ return gRRectIndices;
+ }
+ ALOGE("Invalid rect type: %d", type);
+ return nullptr;
+}
+
+static void fillInCircleVerts(const Geometry& args, bool isStroked,
+ Mesh::VertexArray<vec2>& position,
+ Mesh::VertexArray<vec4>& shadowColor,
+ Mesh::VertexArray<vec3>& shadowParams) {
+ vec4 color = args.fColor;
+ float outerRadius = args.fOuterRadius;
+ float innerRadius = args.fInnerRadius;
+ float blurRadius = args.fBlurRadius;
+ float distanceCorrection = outerRadius / blurRadius;
+
+ const FloatRect& bounds = args.fDevBounds;
+
+ // The inner radius in the vertex data must be specified in normalized space.
+ innerRadius = innerRadius / outerRadius;
+
+ vec2 center = vec2(bounds.getWidth() / 2.0f, bounds.getHeight() / 2.0f);
+ float halfWidth = 0.5f * bounds.getWidth();
+ float octOffset = 0.41421356237f; // sqrt(2) - 1
+ int vertexCount = 0;
+
+ position[vertexCount] = center + vec2(-octOffset * halfWidth, -halfWidth);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(-octOffset, -1, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(octOffset * halfWidth, -halfWidth);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(octOffset, -1, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(halfWidth, -octOffset * halfWidth);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(1, -octOffset, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(halfWidth, octOffset * halfWidth);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(1, octOffset, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(octOffset * halfWidth, halfWidth);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(octOffset, 1, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(-octOffset * halfWidth, halfWidth);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(-octOffset, 1, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(-halfWidth, octOffset * halfWidth);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(-1, octOffset, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(-halfWidth, -octOffset * halfWidth);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(-1, -octOffset, distanceCorrection);
+ vertexCount++;
+
+ if (isStroked) {
+ // compute the inner ring
+
+ // cosine and sine of pi/8
+ float c = 0.923579533f;
+ float s = 0.382683432f;
+ float r = args.fInnerRadius;
+
+ position[vertexCount] = center + vec2(-s * r, -c * r);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(-s * innerRadius, -c * innerRadius, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(s * r, -c * r);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(s * innerRadius, -c * innerRadius, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(c * r, -s * r);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(c * innerRadius, -s * innerRadius, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(c * r, s * r);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(c * innerRadius, s * innerRadius, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(s * r, c * r);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(s * innerRadius, c * innerRadius, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(-s * r, c * r);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(-s * innerRadius, c * innerRadius, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(-c * r, s * r);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(-c * innerRadius, s * innerRadius, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = center + vec2(-c * r, -s * r);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(-c * innerRadius, -s * innerRadius, distanceCorrection);
+ vertexCount++;
+ } else {
+ // filled
+ position[vertexCount] = center;
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+ vertexCount++;
+ }
+}
+
+static void fillInRRectVerts(const Geometry& args, Mesh::VertexArray<vec2>& position,
+ Mesh::VertexArray<vec4>& shadowColor,
+ Mesh::VertexArray<vec3>& shadowParams) {
+ vec4 color = args.fColor;
+ float outerRadius = args.fOuterRadius;
+
+ const FloatRect& bounds = args.fDevBounds;
+
+ float umbraInset = args.fUmbraInset;
+ float minDim = 0.5f * std::min(bounds.getWidth(), bounds.getHeight());
+ if (umbraInset > minDim) {
+ umbraInset = minDim;
+ }
+
+ float xInner[4] = {bounds.left + umbraInset, bounds.right - umbraInset,
+ bounds.left + umbraInset, bounds.right - umbraInset};
+ float xMid[4] = {bounds.left + outerRadius, bounds.right - outerRadius,
+ bounds.left + outerRadius, bounds.right - outerRadius};
+ float xOuter[4] = {bounds.left, bounds.right, bounds.left, bounds.right};
+ float yInner[4] = {bounds.top + umbraInset, bounds.top + umbraInset, bounds.bottom - umbraInset,
+ bounds.bottom - umbraInset};
+ float yMid[4] = {bounds.top + outerRadius, bounds.top + outerRadius,
+ bounds.bottom - outerRadius, bounds.bottom - outerRadius};
+ float yOuter[4] = {bounds.top, bounds.top, bounds.bottom, bounds.bottom};
+
+ float blurRadius = args.fBlurRadius;
+
+ // In the case where we have to inset more for the umbra, our two triangles in the
+ // corner get skewed to a diamond rather than a square. To correct for that,
+ // we also skew the vectors we send to the shader that help define the circle.
+ // By doing so, we end up with a quarter circle in the corner rather than the
+ // elliptical curve.
+
+ // This is a bit magical, but it gives us the correct results at extrema:
+ // a) umbraInset == outerRadius produces an orthogonal vector
+ // b) outerRadius == 0 produces a diagonal vector
+ // And visually the corner looks correct.
+ vec2 outerVec = vec2(outerRadius - umbraInset, -outerRadius - umbraInset);
+ outerVec = normalize(outerVec);
+ // We want the circle edge to fall fractionally along the diagonal at
+ // (sqrt(2)*(umbraInset - outerRadius) + outerRadius)/sqrt(2)*umbraInset
+ //
+ // Setting the components of the diagonal offset to the following value will give us that.
+ float diagVal = umbraInset / (SK_ScalarSqrt2 * (outerRadius - umbraInset) - outerRadius);
+ vec2 diagVec = vec2(diagVal, diagVal);
+ float distanceCorrection = umbraInset / blurRadius;
+
+ int vertexCount = 0;
+ // build corner by corner
+ for (int i = 0; i < 4; ++i) {
+ // inner point
+ position[vertexCount] = vec2(xInner[i], yInner[i]);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+ vertexCount++;
+
+ // outer points
+ position[vertexCount] = vec2(xOuter[i], yInner[i]);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(0, -1, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = vec2(xOuter[i], yMid[i]);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(outerVec.x, outerVec.y, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = vec2(xOuter[i], yOuter[i]);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(diagVec.x, diagVec.y, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = vec2(xMid[i], yOuter[i]);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(outerVec.x, outerVec.y, distanceCorrection);
+ vertexCount++;
+
+ position[vertexCount] = vec2(xInner[i], yOuter[i]);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(0, -1, distanceCorrection);
+ vertexCount++;
+ }
+
+ // Add the additional vertices for overstroked rrects.
+ // Effectively this is an additional stroked rrect, with its
+ // parameters equal to those in the center of the 9-patch. This will
+ // give constant values across this inner ring.
+ if (kOverstroke_RRectType == args.fType) {
+ float inset = umbraInset + args.fInnerRadius;
+
+ // TL
+ position[vertexCount] = vec2(bounds.left + inset, bounds.top + inset);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+ vertexCount++;
+
+ // TR
+ position[vertexCount] = vec2(bounds.right - inset, bounds.top + inset);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+ vertexCount++;
+
+ // BL
+ position[vertexCount] = vec2(bounds.left + inset, bounds.bottom - inset);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+ vertexCount++;
+
+ // BR
+ position[vertexCount] = vec2(bounds.right - inset, bounds.bottom - inset);
+ shadowColor[vertexCount] = color;
+ shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+ vertexCount++;
+ }
+}
+
+int getVertexCountForGeometry(const Geometry& shadowGeometry) {
+ if (shadowGeometry.fIsCircle) {
+ return circle_type_to_vert_count(shadowGeometry.fType);
+ }
+
+ return rrect_type_to_vert_count(shadowGeometry.fType);
+}
+
+int getIndexCountForGeometry(const Geometry& shadowGeometry) {
+ if (shadowGeometry.fIsCircle) {
+ return circle_type_to_index_count(kStroke_RRectType == shadowGeometry.fType);
+ }
+
+ return rrect_type_to_index_count(shadowGeometry.fType);
+}
+
+void fillVerticesForGeometry(const Geometry& shadowGeometry, int /* vertexCount */,
+ Mesh::VertexArray<vec2> position, Mesh::VertexArray<vec4> shadowColor,
+ Mesh::VertexArray<vec3> shadowParams) {
+ if (shadowGeometry.fIsCircle) {
+ fillInCircleVerts(shadowGeometry, shadowGeometry.fIsStroked, position, shadowColor,
+ shadowParams);
+ } else {
+ fillInRRectVerts(shadowGeometry, position, shadowColor, shadowParams);
+ }
+}
+
+void fillIndicesForGeometry(const Geometry& shadowGeometry, int indexCount,
+ int startingVertexOffset, uint16_t* indices) {
+ if (shadowGeometry.fIsCircle) {
+ const uint16_t* primIndices = circle_type_to_indices(shadowGeometry.fIsStroked);
+ for (int i = 0; i < indexCount; ++i) {
+ indices[i] = primIndices[i] + startingVertexOffset;
+ }
+ } else {
+ const uint16_t* primIndices = rrect_type_to_indices(shadowGeometry.fType);
+ for (int i = 0; i < indexCount; ++i) {
+ indices[i] = primIndices[i] + startingVertexOffset;
+ }
+ }
+}
+
+inline void GetSpotParams(float occluderZ, float lightX, float lightY, float lightZ,
+ float lightRadius, float& blurRadius, float& scale, vec2& translate) {
+ float zRatio = divide_and_pin(occluderZ, lightZ - occluderZ, 0.0f, 0.95f);
+ blurRadius = lightRadius * zRatio;
+ scale = divide_and_pin(lightZ, lightZ - occluderZ, 1.0f, 1.95f);
+ translate.x = -zRatio * lightX;
+ translate.y = -zRatio * lightY;
+}
+
+static std::unique_ptr<Geometry> getShadowGeometry(const vec4& color, const FloatRect& devRect,
+ float devRadius, float blurRadius,
+ float insetWidth) {
+ // An insetWidth > 1/2 rect width or height indicates a simple fill.
+ const bool isCircle = ((devRadius >= devRect.getWidth()) && (devRadius >= devRect.getHeight()));
+
+ FloatRect bounds = devRect;
+ float innerRadius = 0.0f;
+ float outerRadius = devRadius;
+ float umbraInset;
+
+ RRectType type = kFill_RRectType;
+ if (isCircle) {
+ umbraInset = 0;
+ } else {
+ umbraInset = std::max(outerRadius, blurRadius);
+ }
+
+ // If stroke is greater than width or height, this is still a fill,
+ // otherwise we compute stroke params.
+ if (isCircle) {
+ innerRadius = devRadius - insetWidth;
+ type = innerRadius > 0 ? kStroke_RRectType : kFill_RRectType;
+ } else {
+ if (insetWidth <= 0.5f * std::min(devRect.getWidth(), devRect.getHeight())) {
+ // We don't worry about a real inner radius, we just need to know if we
+ // need to create overstroke vertices.
+ innerRadius = std::max(insetWidth - umbraInset, 0.0f);
+ type = innerRadius > 0 ? kOverstroke_RRectType : kStroke_RRectType;
+ }
+ }
+ const bool isStroked = (kStroke_RRectType == type);
+ return std::make_unique<Geometry>(Geometry{color, outerRadius, umbraInset, innerRadius,
+ blurRadius, bounds, type, isCircle, isStroked});
+}
+
+std::unique_ptr<Geometry> getAmbientShadowGeometry(const FloatRect& casterRect,
+ float casterCornerRadius, float casterZ,
+ bool casterIsTranslucent,
+ const vec4& ambientColor) {
+ float devSpaceInsetWidth = AmbientBlurRadius(casterZ);
+ const float umbraRecipAlpha = AmbientRecipAlpha(casterZ);
+ const float devSpaceAmbientBlur = devSpaceInsetWidth * umbraRecipAlpha;
+
+ // Outset the shadow rrect to the border of the penumbra
+ float ambientPathOutset = devSpaceInsetWidth;
+ FloatRect outsetRect(casterRect);
+ outsetRect.left -= ambientPathOutset;
+ outsetRect.top -= ambientPathOutset;
+ outsetRect.right += ambientPathOutset;
+ outsetRect.bottom += ambientPathOutset;
+
+ float outsetRad = casterCornerRadius + ambientPathOutset;
+ if (casterIsTranslucent) {
+ // set a large inset to force a fill
+ devSpaceInsetWidth = outsetRect.getWidth();
+ }
+
+ return getShadowGeometry(ambientColor, outsetRect, std::abs(outsetRad), devSpaceAmbientBlur,
+ std::abs(devSpaceInsetWidth));
+}
+
+std::unique_ptr<Geometry> getSpotShadowGeometry(const FloatRect& casterRect,
+ float casterCornerRadius, float casterZ,
+ bool casterIsTranslucent, const vec4& spotColor,
+ const vec3& lightPosition, float lightRadius) {
+ float devSpaceSpotBlur;
+ float spotScale;
+ vec2 spotOffset;
+ GetSpotParams(casterZ, lightPosition.x, lightPosition.y, lightPosition.z, lightRadius,
+ devSpaceSpotBlur, spotScale, spotOffset);
+ // handle scale of radius due to CTM
+ const float srcSpaceSpotBlur = devSpaceSpotBlur;
+
+ // Adjust translate for the effect of the scale.
+ spotOffset.x += spotScale;
+ spotOffset.y += spotScale;
+
+ // Compute the transformed shadow rect
+ ui::Transform shadowTransform;
+ shadowTransform.set(spotOffset.x, spotOffset.y);
+ shadowTransform.set(spotScale, 0, 0, spotScale);
+ FloatRect spotShadowRect = shadowTransform.transform(casterRect);
+ float spotShadowRadius = casterCornerRadius * spotScale;
+
+ // Compute the insetWidth
+ float blurOutset = srcSpaceSpotBlur;
+ float insetWidth = blurOutset;
+ if (casterIsTranslucent) {
+ // If transparent, just do a fill
+ insetWidth += spotShadowRect.getWidth();
+ } else {
+ // For shadows, instead of using a stroke we specify an inset from the penumbra
+ // border. We want to extend this inset area so that it meets up with the caster
+ // geometry. The inset geometry will by default already be inset by the blur width.
+ //
+ // We compare the min and max corners inset by the radius between the original
+ // rrect and the shadow rrect. The distance between the two plus the difference
+ // between the scaled radius and the original radius gives the distance from the
+ // transformed shadow shape to the original shape in that corner. The max
+ // of these gives the maximum distance we need to cover.
+ //
+ // Since we are outsetting by 1/2 the blur distance, we just add the maxOffset to
+ // that to get the full insetWidth.
+ float maxOffset;
+ if (casterCornerRadius <= 0.f) {
+ // Manhattan distance works better for rects
+ maxOffset = std::max(std::max(std::abs(spotShadowRect.left - casterRect.left),
+ std::abs(spotShadowRect.top - casterRect.top)),
+ std::max(std::abs(spotShadowRect.right - casterRect.right),
+ std::abs(spotShadowRect.bottom - casterRect.bottom)));
+ } else {
+ float dr = spotShadowRadius - casterCornerRadius;
+ vec2 upperLeftOffset = vec2(spotShadowRect.left - casterRect.left + dr,
+ spotShadowRect.top - casterRect.top + dr);
+ vec2 lowerRightOffset = vec2(spotShadowRect.right - casterRect.right - dr,
+ spotShadowRect.bottom - casterRect.bottom - dr);
+ maxOffset = sqrt(std::max(dot(upperLeftOffset, lowerRightOffset),
+ dot(lowerRightOffset, lowerRightOffset))) +
+ dr;
+ }
+ insetWidth += std::max(blurOutset, maxOffset);
+ }
+
+ // Outset the shadow rrect to the border of the penumbra
+ spotShadowRadius += blurOutset;
+ spotShadowRect.left -= blurOutset;
+ spotShadowRect.top -= blurOutset;
+ spotShadowRect.right += blurOutset;
+ spotShadowRect.bottom += blurOutset;
+
+ return getShadowGeometry(spotColor, spotShadowRect, std::abs(spotShadowRadius),
+ 2.0f * devSpaceSpotBlur, std::abs(insetWidth));
+}
+
+void fillShadowTextureData(uint8_t* data, size_t shadowTextureWidth) {
+ for (int i = 0; i < shadowTextureWidth; i++) {
+ const float d = 1 - i / ((shadowTextureWidth * 1.0f) - 1.0f);
+ data[i] = static_cast<uint8_t>((exp(-4.0f * d * d) - 0.018f) * 255);
+ }
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLSkiaShadowPort.h b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.h
new file mode 100644
index 0000000..912c8bb
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <math/vec4.h>
+#include <renderengine/Mesh.h>
+#include <ui/Rect.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/**
+ * The shadow geometry logic and vertex generation code has been ported from skia shadow
+ * fast path OpenGL implementation to draw shadows around rects and rounded rects including
+ * circles.
+ *
+ * path: skia/src/gpu/GrRenderTargetContext.cpp GrRenderTargetContext::drawFastShadow
+ *
+ * Modifications made:
+ * - Switched to using std lib math functions
+ * - Fall off function is implemented in vertex shader rather than a shadow texture
+ * - Removed transformations applied on the caster rect since the caster will be in local
+ * coordinate space and will be transformed by the vertex shader.
+ */
+
+enum RRectType {
+ kFill_RRectType,
+ kStroke_RRectType,
+ kOverstroke_RRectType,
+};
+
+struct Geometry {
+ vec4 fColor;
+ float fOuterRadius;
+ float fUmbraInset;
+ float fInnerRadius;
+ float fBlurRadius;
+ FloatRect fDevBounds;
+ RRectType fType;
+ bool fIsCircle;
+ bool fIsStroked;
+};
+
+std::unique_ptr<Geometry> getSpotShadowGeometry(const FloatRect& casterRect,
+ float casterCornerRadius, float casterZ,
+ bool casterIsTranslucent, const vec4& spotColor,
+ const vec3& lightPosition, float lightRadius);
+
+std::unique_ptr<Geometry> getAmbientShadowGeometry(const FloatRect& casterRect,
+ float casterCornerRadius, float casterZ,
+ bool casterIsTranslucent,
+ const vec4& ambientColor);
+
+int getVertexCountForGeometry(const Geometry& shadowGeometry);
+
+int getIndexCountForGeometry(const Geometry& shadowGeometry);
+
+void fillVerticesForGeometry(const Geometry& shadowGeometry, int vertexCount,
+ Mesh::VertexArray<vec2> position, Mesh::VertexArray<vec4> shadowColor,
+ Mesh::VertexArray<vec3> shadowParams);
+
+void fillIndicesForGeometry(const Geometry& shadowGeometry, int indexCount,
+ int startingVertexOffset, uint16_t* indices);
+
+/**
+ * Maps shadow geometry 'alpha' varying (1 for darkest, 0 for transparent) to
+ * darkness at that spot. Values are determined by an exponential falloff
+ * function provided by UX.
+ *
+ * The texture is used for quick lookup in theshadow shader.
+ *
+ * textureData - filled with shadow texture data that needs to be at least of
+ * size textureWidth
+ *
+ * textureWidth - width of the texture, height is always 1
+ */
+void fillShadowTextureData(uint8_t* textureData, size_t textureWidth);
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLVertexBuffer.cpp b/media/libstagefright/renderfright/gl/GLVertexBuffer.cpp
new file mode 100644
index 0000000..e50c471
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLVertexBuffer.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "GLVertexBuffer.h"
+
+#include <GLES/gl.h>
+#include <GLES2/gl2.h>
+#include <nativebase/nativebase.h>
+#include <utils/Trace.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLVertexBuffer::GLVertexBuffer() {
+ glGenBuffers(1, &mBufferName);
+}
+
+GLVertexBuffer::~GLVertexBuffer() {
+ glDeleteBuffers(1, &mBufferName);
+}
+
+void GLVertexBuffer::allocateBuffers(const GLfloat data[], const GLuint size) {
+ ATRACE_CALL();
+ bind();
+ glBufferData(GL_ARRAY_BUFFER, size * sizeof(GLfloat), data, GL_STATIC_DRAW);
+ unbind();
+}
+
+void GLVertexBuffer::bind() const {
+ glBindBuffer(GL_ARRAY_BUFFER, mBufferName);
+}
+
+void GLVertexBuffer::unbind() const {
+ glBindBuffer(GL_ARRAY_BUFFER, 0);
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLVertexBuffer.h b/media/libstagefright/renderfright/gl/GLVertexBuffer.h
new file mode 100644
index 0000000..c0fd0c1
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLVertexBuffer.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES2/gl2.h>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class GLVertexBuffer {
+public:
+ explicit GLVertexBuffer();
+ ~GLVertexBuffer();
+
+ void allocateBuffers(const GLfloat data[], const GLuint size);
+ uint32_t getBufferName() const { return mBufferName; }
+ void bind() const;
+ void unbind() const;
+
+private:
+ uint32_t mBufferName;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/ImageManager.cpp b/media/libstagefright/renderfright/gl/ImageManager.cpp
new file mode 100644
index 0000000..6256649
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ImageManager.cpp
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#undef LOG_TAG
+#define LOG_TAG "RenderEngine"
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include <pthread.h>
+
+#include <processgroup/sched_policy.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+#include "ImageManager.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+ImageManager::ImageManager(GLESRenderEngine* engine) : mEngine(engine) {}
+
+void ImageManager::initThread() {
+ mThread = std::thread([this]() { threadMain(); });
+ pthread_setname_np(mThread.native_handle(), "ImageManager");
+ // Use SCHED_FIFO to minimize jitter
+ struct sched_param param = {0};
+ param.sched_priority = 2;
+ if (pthread_setschedparam(mThread.native_handle(), SCHED_FIFO, ¶m) != 0) {
+ ALOGE("Couldn't set SCHED_FIFO for ImageManager");
+ }
+}
+
+ImageManager::~ImageManager() {
+ {
+ std::lock_guard<std::mutex> lock(mMutex);
+ mRunning = false;
+ }
+ mCondition.notify_all();
+ if (mThread.joinable()) {
+ mThread.join();
+ }
+}
+
+void ImageManager::cacheAsync(const sp<GraphicBuffer>& buffer,
+ const std::shared_ptr<Barrier>& barrier) {
+ if (buffer == nullptr) {
+ {
+ std::lock_guard<std::mutex> lock(barrier->mutex);
+ barrier->isOpen = true;
+ barrier->result = BAD_VALUE;
+ }
+ barrier->condition.notify_one();
+ return;
+ }
+ ATRACE_CALL();
+ QueueEntry entry = {QueueEntry::Operation::Insert, buffer, buffer->getId(), barrier};
+ queueOperation(std::move(entry));
+}
+
+status_t ImageManager::cache(const sp<GraphicBuffer>& buffer) {
+ ATRACE_CALL();
+ auto barrier = std::make_shared<Barrier>();
+ cacheAsync(buffer, barrier);
+ std::lock_guard<std::mutex> lock(barrier->mutex);
+ barrier->condition.wait(barrier->mutex,
+ [&]() REQUIRES(barrier->mutex) { return barrier->isOpen; });
+ return barrier->result;
+}
+
+void ImageManager::releaseAsync(uint64_t bufferId, const std::shared_ptr<Barrier>& barrier) {
+ ATRACE_CALL();
+ QueueEntry entry = {QueueEntry::Operation::Delete, nullptr, bufferId, barrier};
+ queueOperation(std::move(entry));
+}
+
+void ImageManager::queueOperation(const QueueEntry&& entry) {
+ {
+ std::lock_guard<std::mutex> lock(mMutex);
+ mQueue.emplace(entry);
+ ATRACE_INT("ImageManagerQueueDepth", mQueue.size());
+ }
+ mCondition.notify_one();
+}
+
+void ImageManager::threadMain() {
+ set_sched_policy(0, SP_FOREGROUND);
+ bool run;
+ {
+ std::lock_guard<std::mutex> lock(mMutex);
+ run = mRunning;
+ }
+ while (run) {
+ QueueEntry entry;
+ {
+ std::lock_guard<std::mutex> lock(mMutex);
+ mCondition.wait(mMutex,
+ [&]() REQUIRES(mMutex) { return !mQueue.empty() || !mRunning; });
+ run = mRunning;
+
+ if (!mRunning) {
+ // if mRunning is false, then ImageManager is being destroyed, so
+ // bail out now.
+ break;
+ }
+
+ entry = mQueue.front();
+ mQueue.pop();
+ ATRACE_INT("ImageManagerQueueDepth", mQueue.size());
+ }
+
+ status_t result = NO_ERROR;
+ switch (entry.op) {
+ case QueueEntry::Operation::Delete:
+ mEngine->unbindExternalTextureBufferInternal(entry.bufferId);
+ break;
+ case QueueEntry::Operation::Insert:
+ result = mEngine->cacheExternalTextureBufferInternal(entry.buffer);
+ break;
+ }
+ if (entry.barrier != nullptr) {
+ {
+ std::lock_guard<std::mutex> entryLock(entry.barrier->mutex);
+ entry.barrier->result = result;
+ entry.barrier->isOpen = true;
+ }
+ entry.barrier->condition.notify_one();
+ }
+ }
+
+ ALOGD("Reached end of threadMain, terminating ImageManager thread!");
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/ImageManager.h b/media/libstagefright/renderfright/gl/ImageManager.h
new file mode 100644
index 0000000..be67de8
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ImageManager.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <condition_variable>
+#include <mutex>
+#include <queue>
+#include <thread>
+
+#include <ui/GraphicBuffer.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class ImageManager {
+public:
+ struct Barrier {
+ std::mutex mutex;
+ std::condition_variable_any condition;
+ bool isOpen GUARDED_BY(mutex) = false;
+ status_t result GUARDED_BY(mutex) = NO_ERROR;
+ };
+ ImageManager(GLESRenderEngine* engine);
+ ~ImageManager();
+ // Starts the background thread for the ImageManager
+ // We need this to guarantee that the class is fully-constructed before the
+ // thread begins running.
+ void initThread();
+ void cacheAsync(const sp<GraphicBuffer>& buffer, const std::shared_ptr<Barrier>& barrier)
+ EXCLUDES(mMutex);
+ status_t cache(const sp<GraphicBuffer>& buffer);
+ void releaseAsync(uint64_t bufferId, const std::shared_ptr<Barrier>& barrier) EXCLUDES(mMutex);
+
+private:
+ struct QueueEntry {
+ enum class Operation { Delete, Insert };
+
+ Operation op = Operation::Delete;
+ sp<GraphicBuffer> buffer = nullptr;
+ uint64_t bufferId = 0;
+ std::shared_ptr<Barrier> barrier = nullptr;
+ };
+
+ void queueOperation(const QueueEntry&& entry);
+ void threadMain();
+ GLESRenderEngine* const mEngine;
+ std::thread mThread;
+ std::condition_variable_any mCondition;
+ std::mutex mMutex;
+ std::queue<QueueEntry> mQueue GUARDED_BY(mMutex);
+
+ bool mRunning GUARDED_BY(mMutex) = true;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/Program.cpp b/media/libstagefright/renderfright/gl/Program.cpp
new file mode 100644
index 0000000..f4fbf35
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/Program.cpp
@@ -0,0 +1,163 @@
+/*Gluint
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Program.h"
+
+#include <stdint.h>
+
+#include <log/log.h>
+#include <math/mat4.h>
+#include <utils/String8.h>
+#include "ProgramCache.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+Program::Program(const ProgramCache::Key& /*needs*/, const char* vertex, const char* fragment)
+ : mInitialized(false) {
+ GLuint vertexId = buildShader(vertex, GL_VERTEX_SHADER);
+ GLuint fragmentId = buildShader(fragment, GL_FRAGMENT_SHADER);
+ GLuint programId = glCreateProgram();
+ glAttachShader(programId, vertexId);
+ glAttachShader(programId, fragmentId);
+ glBindAttribLocation(programId, position, "position");
+ glBindAttribLocation(programId, texCoords, "texCoords");
+ glBindAttribLocation(programId, cropCoords, "cropCoords");
+ glBindAttribLocation(programId, shadowColor, "shadowColor");
+ glBindAttribLocation(programId, shadowParams, "shadowParams");
+ glLinkProgram(programId);
+
+ GLint status;
+ glGetProgramiv(programId, GL_LINK_STATUS, &status);
+ if (status != GL_TRUE) {
+ ALOGE("Error while linking shaders:");
+ GLint infoLen = 0;
+ glGetProgramiv(programId, GL_INFO_LOG_LENGTH, &infoLen);
+ if (infoLen > 1) {
+ GLchar log[infoLen];
+ glGetProgramInfoLog(programId, infoLen, 0, &log[0]);
+ ALOGE("%s", log);
+ }
+ glDetachShader(programId, vertexId);
+ glDetachShader(programId, fragmentId);
+ glDeleteShader(vertexId);
+ glDeleteShader(fragmentId);
+ glDeleteProgram(programId);
+ } else {
+ mProgram = programId;
+ mVertexShader = vertexId;
+ mFragmentShader = fragmentId;
+ mInitialized = true;
+ mProjectionMatrixLoc = glGetUniformLocation(programId, "projection");
+ mTextureMatrixLoc = glGetUniformLocation(programId, "texture");
+ mSamplerLoc = glGetUniformLocation(programId, "sampler");
+ mColorLoc = glGetUniformLocation(programId, "color");
+ mDisplayMaxLuminanceLoc = glGetUniformLocation(programId, "displayMaxLuminance");
+ mMaxMasteringLuminanceLoc = glGetUniformLocation(programId, "maxMasteringLuminance");
+ mMaxContentLuminanceLoc = glGetUniformLocation(programId, "maxContentLuminance");
+ mInputTransformMatrixLoc = glGetUniformLocation(programId, "inputTransformMatrix");
+ mOutputTransformMatrixLoc = glGetUniformLocation(programId, "outputTransformMatrix");
+ mCornerRadiusLoc = glGetUniformLocation(programId, "cornerRadius");
+ mCropCenterLoc = glGetUniformLocation(programId, "cropCenter");
+
+ // set-up the default values for our uniforms
+ glUseProgram(programId);
+ glUniformMatrix4fv(mProjectionMatrixLoc, 1, GL_FALSE, mat4().asArray());
+ glEnableVertexAttribArray(0);
+ }
+}
+
+bool Program::isValid() const {
+ return mInitialized;
+}
+
+void Program::use() {
+ glUseProgram(mProgram);
+}
+
+GLuint Program::getAttrib(const char* name) const {
+ // TODO: maybe use a local cache
+ return glGetAttribLocation(mProgram, name);
+}
+
+GLint Program::getUniform(const char* name) const {
+ // TODO: maybe use a local cache
+ return glGetUniformLocation(mProgram, name);
+}
+
+GLuint Program::buildShader(const char* source, GLenum type) {
+ GLuint shader = glCreateShader(type);
+ glShaderSource(shader, 1, &source, 0);
+ glCompileShader(shader);
+ GLint status;
+ glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
+ if (status != GL_TRUE) {
+ // Some drivers return wrong values for GL_INFO_LOG_LENGTH
+ // use a fixed size instead
+ GLchar log[512];
+ glGetShaderInfoLog(shader, sizeof(log), 0, log);
+ ALOGE("Error while compiling shader: \n%s\n%s", source, log);
+ glDeleteShader(shader);
+ return 0;
+ }
+ return shader;
+}
+
+void Program::setUniforms(const Description& desc) {
+ // TODO: we should have a mechanism here to not always reset uniforms that
+ // didn't change for this program.
+
+ if (mSamplerLoc >= 0) {
+ glUniform1i(mSamplerLoc, 0);
+ glUniformMatrix4fv(mTextureMatrixLoc, 1, GL_FALSE, desc.texture.getMatrix().asArray());
+ }
+ if (mColorLoc >= 0) {
+ const float color[4] = {desc.color.r, desc.color.g, desc.color.b, desc.color.a};
+ glUniform4fv(mColorLoc, 1, color);
+ }
+ if (mInputTransformMatrixLoc >= 0) {
+ mat4 inputTransformMatrix = desc.inputTransformMatrix;
+ glUniformMatrix4fv(mInputTransformMatrixLoc, 1, GL_FALSE, inputTransformMatrix.asArray());
+ }
+ if (mOutputTransformMatrixLoc >= 0) {
+ // The output transform matrix and color matrix can be combined as one matrix
+ // that is applied right before applying OETF.
+ mat4 outputTransformMatrix = desc.colorMatrix * desc.outputTransformMatrix;
+ glUniformMatrix4fv(mOutputTransformMatrixLoc, 1, GL_FALSE, outputTransformMatrix.asArray());
+ }
+ if (mDisplayMaxLuminanceLoc >= 0) {
+ glUniform1f(mDisplayMaxLuminanceLoc, desc.displayMaxLuminance);
+ }
+ if (mMaxMasteringLuminanceLoc >= 0) {
+ glUniform1f(mMaxMasteringLuminanceLoc, desc.maxMasteringLuminance);
+ }
+ if (mMaxContentLuminanceLoc >= 0) {
+ glUniform1f(mMaxContentLuminanceLoc, desc.maxContentLuminance);
+ }
+ if (mCornerRadiusLoc >= 0) {
+ glUniform1f(mCornerRadiusLoc, desc.cornerRadius);
+ }
+ if (mCropCenterLoc >= 0) {
+ glUniform2f(mCropCenterLoc, desc.cropSize.x / 2.0f, desc.cropSize.y / 2.0f);
+ }
+ // these uniforms are always present
+ glUniformMatrix4fv(mProjectionMatrixLoc, 1, GL_FALSE, desc.projectionMatrix.asArray());
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/Program.h b/media/libstagefright/renderfright/gl/Program.h
new file mode 100644
index 0000000..fc3755e
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/Program.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_PROGRAM_H
+#define SF_RENDER_ENGINE_PROGRAM_H
+
+#include <stdint.h>
+
+#include <GLES2/gl2.h>
+#include <renderengine/private/Description.h>
+#include "ProgramCache.h"
+
+namespace android {
+
+class String8;
+
+namespace renderengine {
+namespace gl {
+
+/*
+ * Abstracts a GLSL program comprising a vertex and fragment shader
+ */
+class Program {
+public:
+ // known locations for position and texture coordinates
+ enum {
+ /* position of each vertex for vertex shader */
+ position = 0,
+
+ /* UV coordinates for texture mapping */
+ texCoords = 1,
+
+ /* Crop coordinates, in pixels */
+ cropCoords = 2,
+
+ /* Shadow color */
+ shadowColor = 3,
+
+ /* Shadow params */
+ shadowParams = 4,
+ };
+
+ Program(const ProgramCache::Key& needs, const char* vertex, const char* fragment);
+ ~Program() = default;
+
+ /* whether this object is usable */
+ bool isValid() const;
+
+ /* Binds this program to the GLES context */
+ void use();
+
+ /* Returns the location of the specified attribute */
+ GLuint getAttrib(const char* name) const;
+
+ /* Returns the location of the specified uniform */
+ GLint getUniform(const char* name) const;
+
+ /* set-up uniforms from the description */
+ void setUniforms(const Description& desc);
+
+private:
+ GLuint buildShader(const char* source, GLenum type);
+
+ // whether the initialization succeeded
+ bool mInitialized;
+
+ // Name of the OpenGL program and shaders
+ GLuint mProgram;
+ GLuint mVertexShader;
+ GLuint mFragmentShader;
+
+ /* location of the projection matrix uniform */
+ GLint mProjectionMatrixLoc;
+
+ /* location of the texture matrix uniform */
+ GLint mTextureMatrixLoc;
+
+ /* location of the sampler uniform */
+ GLint mSamplerLoc;
+
+ /* location of the color uniform */
+ GLint mColorLoc;
+
+ /* location of display luminance uniform */
+ GLint mDisplayMaxLuminanceLoc;
+ /* location of max mastering luminance uniform */
+ GLint mMaxMasteringLuminanceLoc;
+ /* location of max content luminance uniform */
+ GLint mMaxContentLuminanceLoc;
+
+ /* location of transform matrix */
+ GLint mInputTransformMatrixLoc;
+ GLint mOutputTransformMatrixLoc;
+
+ /* location of corner radius uniform */
+ GLint mCornerRadiusLoc;
+
+ /* location of surface crop origin uniform, for rounded corner clipping */
+ GLint mCropCenterLoc;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_RENDER_ENGINE_PROGRAM_H */
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.cpp b/media/libstagefright/renderfright/gl/ProgramCache.cpp
new file mode 100644
index 0000000..3ae35ec
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ProgramCache.cpp
@@ -0,0 +1,800 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "ProgramCache.h"
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <log/log.h>
+#include <renderengine/private/Description.h>
+#include <utils/String8.h>
+#include <utils/Trace.h>
+#include "Program.h"
+
+ANDROID_SINGLETON_STATIC_INSTANCE(android::renderengine::gl::ProgramCache)
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/*
+ * A simple formatter class to automatically add the endl and
+ * manage the indentation.
+ */
+
+class Formatter;
+static Formatter& indent(Formatter& f);
+static Formatter& dedent(Formatter& f);
+
+class Formatter {
+ String8 mString;
+ int mIndent;
+ typedef Formatter& (*FormaterManipFunc)(Formatter&);
+ friend Formatter& indent(Formatter& f);
+ friend Formatter& dedent(Formatter& f);
+
+public:
+ Formatter() : mIndent(0) {}
+
+ String8 getString() const { return mString; }
+
+ friend Formatter& operator<<(Formatter& out, const char* in) {
+ for (int i = 0; i < out.mIndent; i++) {
+ out.mString.append(" ");
+ }
+ out.mString.append(in);
+ out.mString.append("\n");
+ return out;
+ }
+ friend inline Formatter& operator<<(Formatter& out, const String8& in) {
+ return operator<<(out, in.string());
+ }
+ friend inline Formatter& operator<<(Formatter& to, FormaterManipFunc func) {
+ return (*func)(to);
+ }
+};
+Formatter& indent(Formatter& f) {
+ f.mIndent++;
+ return f;
+}
+Formatter& dedent(Formatter& f) {
+ f.mIndent--;
+ return f;
+}
+
+void ProgramCache::primeCache(
+ EGLContext context, bool useColorManagement, bool toneMapperShaderOnly) {
+ auto& cache = mCaches[context];
+ uint32_t shaderCount = 0;
+
+ if (toneMapperShaderOnly) {
+ Key shaderKey;
+ // base settings used by HDR->SDR tonemap only
+ shaderKey.set(Key::BLEND_MASK | Key::INPUT_TRANSFORM_MATRIX_MASK |
+ Key::OUTPUT_TRANSFORM_MATRIX_MASK | Key::OUTPUT_TF_MASK |
+ Key::OPACITY_MASK | Key::ALPHA_MASK |
+ Key::ROUNDED_CORNERS_MASK | Key::TEXTURE_MASK,
+ Key::BLEND_NORMAL | Key::INPUT_TRANSFORM_MATRIX_ON |
+ Key::OUTPUT_TRANSFORM_MATRIX_ON | Key::OUTPUT_TF_SRGB |
+ Key::OPACITY_OPAQUE | Key::ALPHA_EQ_ONE |
+ Key::ROUNDED_CORNERS_OFF | Key::TEXTURE_EXT);
+ for (int i = 0; i < 4; i++) {
+ // Cache input transfer for HLG & ST2084
+ shaderKey.set(Key::INPUT_TF_MASK, (i & 1) ?
+ Key::INPUT_TF_HLG : Key::INPUT_TF_ST2084);
+
+ // Cache Y410 input on or off
+ shaderKey.set(Key::Y410_BT2020_MASK, (i & 2) ?
+ Key::Y410_BT2020_ON : Key::Y410_BT2020_OFF);
+ if (cache.count(shaderKey) == 0) {
+ cache.emplace(shaderKey, generateProgram(shaderKey));
+ shaderCount++;
+ }
+ }
+ return;
+ }
+
+ uint32_t keyMask = Key::BLEND_MASK | Key::OPACITY_MASK | Key::ALPHA_MASK | Key::TEXTURE_MASK
+ | Key::ROUNDED_CORNERS_MASK;
+ // Prime the cache for all combinations of the above masks,
+ // leaving off the experimental color matrix mask options.
+
+ nsecs_t timeBefore = systemTime();
+ for (uint32_t keyVal = 0; keyVal <= keyMask; keyVal++) {
+ Key shaderKey;
+ shaderKey.set(keyMask, keyVal);
+ uint32_t tex = shaderKey.getTextureTarget();
+ if (tex != Key::TEXTURE_OFF && tex != Key::TEXTURE_EXT && tex != Key::TEXTURE_2D) {
+ continue;
+ }
+ if (cache.count(shaderKey) == 0) {
+ cache.emplace(shaderKey, generateProgram(shaderKey));
+ shaderCount++;
+ }
+ }
+
+ // Prime for sRGB->P3 conversion
+ if (useColorManagement) {
+ Key shaderKey;
+ shaderKey.set(Key::BLEND_MASK | Key::OUTPUT_TRANSFORM_MATRIX_MASK | Key::INPUT_TF_MASK |
+ Key::OUTPUT_TF_MASK,
+ Key::BLEND_PREMULT | Key::OUTPUT_TRANSFORM_MATRIX_ON | Key::INPUT_TF_SRGB |
+ Key::OUTPUT_TF_SRGB);
+ for (int i = 0; i < 16; i++) {
+ shaderKey.set(Key::OPACITY_MASK,
+ (i & 1) ? Key::OPACITY_OPAQUE : Key::OPACITY_TRANSLUCENT);
+ shaderKey.set(Key::ALPHA_MASK, (i & 2) ? Key::ALPHA_LT_ONE : Key::ALPHA_EQ_ONE);
+
+ // Cache rounded corners
+ shaderKey.set(Key::ROUNDED_CORNERS_MASK,
+ (i & 4) ? Key::ROUNDED_CORNERS_ON : Key::ROUNDED_CORNERS_OFF);
+
+ // Cache texture off option for window transition
+ shaderKey.set(Key::TEXTURE_MASK, (i & 8) ? Key::TEXTURE_EXT : Key::TEXTURE_OFF);
+ if (cache.count(shaderKey) == 0) {
+ cache.emplace(shaderKey, generateProgram(shaderKey));
+ shaderCount++;
+ }
+ }
+ }
+
+ nsecs_t timeAfter = systemTime();
+ float compileTimeMs = static_cast<float>(timeAfter - timeBefore) / 1.0E6;
+ ALOGD("shader cache generated - %u shaders in %f ms\n", shaderCount, compileTimeMs);
+}
+
+ProgramCache::Key ProgramCache::computeKey(const Description& description) {
+ Key needs;
+ needs.set(Key::TEXTURE_MASK,
+ !description.textureEnabled
+ ? Key::TEXTURE_OFF
+ : description.texture.getTextureTarget() == GL_TEXTURE_EXTERNAL_OES
+ ? Key::TEXTURE_EXT
+ : description.texture.getTextureTarget() == GL_TEXTURE_2D
+ ? Key::TEXTURE_2D
+ : Key::TEXTURE_OFF)
+ .set(Key::ALPHA_MASK, (description.color.a < 1) ? Key::ALPHA_LT_ONE : Key::ALPHA_EQ_ONE)
+ .set(Key::BLEND_MASK,
+ description.isPremultipliedAlpha ? Key::BLEND_PREMULT : Key::BLEND_NORMAL)
+ .set(Key::OPACITY_MASK,
+ description.isOpaque ? Key::OPACITY_OPAQUE : Key::OPACITY_TRANSLUCENT)
+ .set(Key::Key::INPUT_TRANSFORM_MATRIX_MASK,
+ description.hasInputTransformMatrix() ? Key::INPUT_TRANSFORM_MATRIX_ON
+ : Key::INPUT_TRANSFORM_MATRIX_OFF)
+ .set(Key::Key::OUTPUT_TRANSFORM_MATRIX_MASK,
+ description.hasOutputTransformMatrix() || description.hasColorMatrix()
+ ? Key::OUTPUT_TRANSFORM_MATRIX_ON
+ : Key::OUTPUT_TRANSFORM_MATRIX_OFF)
+ .set(Key::ROUNDED_CORNERS_MASK,
+ description.cornerRadius > 0 ? Key::ROUNDED_CORNERS_ON : Key::ROUNDED_CORNERS_OFF)
+ .set(Key::SHADOW_MASK, description.drawShadows ? Key::SHADOW_ON : Key::SHADOW_OFF);
+ needs.set(Key::Y410_BT2020_MASK,
+ description.isY410BT2020 ? Key::Y410_BT2020_ON : Key::Y410_BT2020_OFF);
+
+ if (needs.hasTransformMatrix() ||
+ (description.inputTransferFunction != description.outputTransferFunction)) {
+ switch (description.inputTransferFunction) {
+ case Description::TransferFunction::LINEAR:
+ default:
+ needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_LINEAR);
+ break;
+ case Description::TransferFunction::SRGB:
+ needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_SRGB);
+ break;
+ case Description::TransferFunction::ST2084:
+ needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_ST2084);
+ break;
+ case Description::TransferFunction::HLG:
+ needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_HLG);
+ break;
+ }
+
+ switch (description.outputTransferFunction) {
+ case Description::TransferFunction::LINEAR:
+ default:
+ needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_LINEAR);
+ break;
+ case Description::TransferFunction::SRGB:
+ needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_SRGB);
+ break;
+ case Description::TransferFunction::ST2084:
+ needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_ST2084);
+ break;
+ case Description::TransferFunction::HLG:
+ needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_HLG);
+ break;
+ }
+ }
+
+ return needs;
+}
+
+// Generate EOTF that converts signal values to relative display light,
+// both normalized to [0, 1].
+void ProgramCache::generateEOTF(Formatter& fs, const Key& needs) {
+ switch (needs.getInputTF()) {
+ case Key::INPUT_TF_SRGB:
+ fs << R"__SHADER__(
+ float EOTF_sRGB(float srgb) {
+ return srgb <= 0.04045 ? srgb / 12.92 : pow((srgb + 0.055) / 1.055, 2.4);
+ }
+
+ vec3 EOTF_sRGB(const vec3 srgb) {
+ return vec3(EOTF_sRGB(srgb.r), EOTF_sRGB(srgb.g), EOTF_sRGB(srgb.b));
+ }
+
+ vec3 EOTF(const vec3 srgb) {
+ return sign(srgb.rgb) * EOTF_sRGB(abs(srgb.rgb));
+ }
+ )__SHADER__";
+ break;
+ case Key::INPUT_TF_ST2084:
+ fs << R"__SHADER__(
+ vec3 EOTF(const highp vec3 color) {
+ const highp float m1 = (2610.0 / 4096.0) / 4.0;
+ const highp float m2 = (2523.0 / 4096.0) * 128.0;
+ const highp float c1 = (3424.0 / 4096.0);
+ const highp float c2 = (2413.0 / 4096.0) * 32.0;
+ const highp float c3 = (2392.0 / 4096.0) * 32.0;
+
+ highp vec3 tmp = pow(clamp(color, 0.0, 1.0), 1.0 / vec3(m2));
+ tmp = max(tmp - c1, 0.0) / (c2 - c3 * tmp);
+ return pow(tmp, 1.0 / vec3(m1));
+ }
+ )__SHADER__";
+ break;
+ case Key::INPUT_TF_HLG:
+ fs << R"__SHADER__(
+ highp float EOTF_channel(const highp float channel) {
+ const highp float a = 0.17883277;
+ const highp float b = 0.28466892;
+ const highp float c = 0.55991073;
+ return channel <= 0.5 ? channel * channel / 3.0 :
+ (exp((channel - c) / a) + b) / 12.0;
+ }
+
+ vec3 EOTF(const highp vec3 color) {
+ return vec3(EOTF_channel(color.r), EOTF_channel(color.g),
+ EOTF_channel(color.b));
+ }
+ )__SHADER__";
+ break;
+ default:
+ fs << R"__SHADER__(
+ vec3 EOTF(const vec3 linear) {
+ return linear;
+ }
+ )__SHADER__";
+ break;
+ }
+}
+
+void ProgramCache::generateToneMappingProcess(Formatter& fs, const Key& needs) {
+ // Convert relative light to absolute light.
+ switch (needs.getInputTF()) {
+ case Key::INPUT_TF_ST2084:
+ fs << R"__SHADER__(
+ highp vec3 ScaleLuminance(highp vec3 color) {
+ return color * 10000.0;
+ }
+ )__SHADER__";
+ break;
+ case Key::INPUT_TF_HLG:
+ fs << R"__SHADER__(
+ highp vec3 ScaleLuminance(highp vec3 color) {
+ // The formula is:
+ // alpha * pow(Y, gamma - 1.0) * color + beta;
+ // where alpha is 1000.0, gamma is 1.2, beta is 0.0.
+ return color * 1000.0 * pow(color.y, 0.2);
+ }
+ )__SHADER__";
+ break;
+ default:
+ fs << R"__SHADER__(
+ highp vec3 ScaleLuminance(highp vec3 color) {
+ return color * displayMaxLuminance;
+ }
+ )__SHADER__";
+ break;
+ }
+
+ // Tone map absolute light to display luminance range.
+ switch (needs.getInputTF()) {
+ case Key::INPUT_TF_ST2084:
+ case Key::INPUT_TF_HLG:
+ switch (needs.getOutputTF()) {
+ case Key::OUTPUT_TF_HLG:
+ // Right now when mixed PQ and HLG contents are presented,
+ // HLG content will always be converted to PQ. However, for
+ // completeness, we simply clamp the value to [0.0, 1000.0].
+ fs << R"__SHADER__(
+ highp vec3 ToneMap(highp vec3 color) {
+ return clamp(color, 0.0, 1000.0);
+ }
+ )__SHADER__";
+ break;
+ case Key::OUTPUT_TF_ST2084:
+ fs << R"__SHADER__(
+ highp vec3 ToneMap(highp vec3 color) {
+ return color;
+ }
+ )__SHADER__";
+ break;
+ default:
+ fs << R"__SHADER__(
+ highp vec3 ToneMap(highp vec3 color) {
+ float maxMasteringLumi = maxMasteringLuminance;
+ float maxContentLumi = maxContentLuminance;
+ float maxInLumi = min(maxMasteringLumi, maxContentLumi);
+ float maxOutLumi = displayMaxLuminance;
+
+ float nits = color.y;
+
+ // clamp to max input luminance
+ nits = clamp(nits, 0.0, maxInLumi);
+
+ // scale [0.0, maxInLumi] to [0.0, maxOutLumi]
+ if (maxInLumi <= maxOutLumi) {
+ return color * (maxOutLumi / maxInLumi);
+ } else {
+ // three control points
+ const float x0 = 10.0;
+ const float y0 = 17.0;
+ float x1 = maxOutLumi * 0.75;
+ float y1 = x1;
+ float x2 = x1 + (maxInLumi - x1) / 2.0;
+ float y2 = y1 + (maxOutLumi - y1) * 0.75;
+
+ // horizontal distances between the last three control points
+ float h12 = x2 - x1;
+ float h23 = maxInLumi - x2;
+ // tangents at the last three control points
+ float m1 = (y2 - y1) / h12;
+ float m3 = (maxOutLumi - y2) / h23;
+ float m2 = (m1 + m3) / 2.0;
+
+ if (nits < x0) {
+ // scale [0.0, x0] to [0.0, y0] linearly
+ float slope = y0 / x0;
+ return color * slope;
+ } else if (nits < x1) {
+ // scale [x0, x1] to [y0, y1] linearly
+ float slope = (y1 - y0) / (x1 - x0);
+ nits = y0 + (nits - x0) * slope;
+ } else if (nits < x2) {
+ // scale [x1, x2] to [y1, y2] using Hermite interp
+ float t = (nits - x1) / h12;
+ nits = (y1 * (1.0 + 2.0 * t) + h12 * m1 * t) * (1.0 - t) * (1.0 - t) +
+ (y2 * (3.0 - 2.0 * t) + h12 * m2 * (t - 1.0)) * t * t;
+ } else {
+ // scale [x2, maxInLumi] to [y2, maxOutLumi] using Hermite interp
+ float t = (nits - x2) / h23;
+ nits = (y2 * (1.0 + 2.0 * t) + h23 * m2 * t) * (1.0 - t) * (1.0 - t) +
+ (maxOutLumi * (3.0 - 2.0 * t) + h23 * m3 * (t - 1.0)) * t * t;
+ }
+ }
+
+ // color.y is greater than x0 and is thus non-zero
+ return color * (nits / color.y);
+ }
+ )__SHADER__";
+ break;
+ }
+ break;
+ default:
+ // inverse tone map; the output luminance can be up to maxOutLumi.
+ fs << R"__SHADER__(
+ highp vec3 ToneMap(highp vec3 color) {
+ const float maxOutLumi = 3000.0;
+
+ const float x0 = 5.0;
+ const float y0 = 2.5;
+ float x1 = displayMaxLuminance * 0.7;
+ float y1 = maxOutLumi * 0.15;
+ float x2 = displayMaxLuminance * 0.9;
+ float y2 = maxOutLumi * 0.45;
+ float x3 = displayMaxLuminance;
+ float y3 = maxOutLumi;
+
+ float c1 = y1 / 3.0;
+ float c2 = y2 / 2.0;
+ float c3 = y3 / 1.5;
+
+ float nits = color.y;
+
+ float scale;
+ if (nits <= x0) {
+ // scale [0.0, x0] to [0.0, y0] linearly
+ const float slope = y0 / x0;
+ return color * slope;
+ } else if (nits <= x1) {
+ // scale [x0, x1] to [y0, y1] using a curve
+ float t = (nits - x0) / (x1 - x0);
+ nits = (1.0 - t) * (1.0 - t) * y0 + 2.0 * (1.0 - t) * t * c1 + t * t * y1;
+ } else if (nits <= x2) {
+ // scale [x1, x2] to [y1, y2] using a curve
+ float t = (nits - x1) / (x2 - x1);
+ nits = (1.0 - t) * (1.0 - t) * y1 + 2.0 * (1.0 - t) * t * c2 + t * t * y2;
+ } else {
+ // scale [x2, x3] to [y2, y3] using a curve
+ float t = (nits - x2) / (x3 - x2);
+ nits = (1.0 - t) * (1.0 - t) * y2 + 2.0 * (1.0 - t) * t * c3 + t * t * y3;
+ }
+
+ // color.y is greater than x0 and is thus non-zero
+ return color * (nits / color.y);
+ }
+ )__SHADER__";
+ break;
+ }
+
+ // convert absolute light to relative light.
+ switch (needs.getOutputTF()) {
+ case Key::OUTPUT_TF_ST2084:
+ fs << R"__SHADER__(
+ highp vec3 NormalizeLuminance(highp vec3 color) {
+ return color / 10000.0;
+ }
+ )__SHADER__";
+ break;
+ case Key::OUTPUT_TF_HLG:
+ fs << R"__SHADER__(
+ highp vec3 NormalizeLuminance(highp vec3 color) {
+ return color / 1000.0 * pow(color.y / 1000.0, -0.2 / 1.2);
+ }
+ )__SHADER__";
+ break;
+ default:
+ fs << R"__SHADER__(
+ highp vec3 NormalizeLuminance(highp vec3 color) {
+ return color / displayMaxLuminance;
+ }
+ )__SHADER__";
+ break;
+ }
+}
+
+// Generate OOTF that modifies the relative scence light to relative display light.
+void ProgramCache::generateOOTF(Formatter& fs, const ProgramCache::Key& needs) {
+ if (!needs.needsToneMapping()) {
+ fs << R"__SHADER__(
+ highp vec3 OOTF(const highp vec3 color) {
+ return color;
+ }
+ )__SHADER__";
+ } else {
+ generateToneMappingProcess(fs, needs);
+ fs << R"__SHADER__(
+ highp vec3 OOTF(const highp vec3 color) {
+ return NormalizeLuminance(ToneMap(ScaleLuminance(color)));
+ }
+ )__SHADER__";
+ }
+}
+
+// Generate OETF that converts relative display light to signal values,
+// both normalized to [0, 1]
+void ProgramCache::generateOETF(Formatter& fs, const Key& needs) {
+ switch (needs.getOutputTF()) {
+ case Key::OUTPUT_TF_SRGB:
+ fs << R"__SHADER__(
+ float OETF_sRGB(const float linear) {
+ return linear <= 0.0031308 ?
+ linear * 12.92 : (pow(linear, 1.0 / 2.4) * 1.055) - 0.055;
+ }
+
+ vec3 OETF_sRGB(const vec3 linear) {
+ return vec3(OETF_sRGB(linear.r), OETF_sRGB(linear.g), OETF_sRGB(linear.b));
+ }
+
+ vec3 OETF(const vec3 linear) {
+ return sign(linear.rgb) * OETF_sRGB(abs(linear.rgb));
+ }
+ )__SHADER__";
+ break;
+ case Key::OUTPUT_TF_ST2084:
+ fs << R"__SHADER__(
+ vec3 OETF(const vec3 linear) {
+ const highp float m1 = (2610.0 / 4096.0) / 4.0;
+ const highp float m2 = (2523.0 / 4096.0) * 128.0;
+ const highp float c1 = (3424.0 / 4096.0);
+ const highp float c2 = (2413.0 / 4096.0) * 32.0;
+ const highp float c3 = (2392.0 / 4096.0) * 32.0;
+
+ highp vec3 tmp = pow(linear, vec3(m1));
+ tmp = (c1 + c2 * tmp) / (1.0 + c3 * tmp);
+ return pow(tmp, vec3(m2));
+ }
+ )__SHADER__";
+ break;
+ case Key::OUTPUT_TF_HLG:
+ fs << R"__SHADER__(
+ highp float OETF_channel(const highp float channel) {
+ const highp float a = 0.17883277;
+ const highp float b = 0.28466892;
+ const highp float c = 0.55991073;
+ return channel <= 1.0 / 12.0 ? sqrt(3.0 * channel) :
+ a * log(12.0 * channel - b) + c;
+ }
+
+ vec3 OETF(const highp vec3 color) {
+ return vec3(OETF_channel(color.r), OETF_channel(color.g),
+ OETF_channel(color.b));
+ }
+ )__SHADER__";
+ break;
+ default:
+ fs << R"__SHADER__(
+ vec3 OETF(const vec3 linear) {
+ return linear;
+ }
+ )__SHADER__";
+ break;
+ }
+}
+
+String8 ProgramCache::generateVertexShader(const Key& needs) {
+ Formatter vs;
+ if (needs.hasTextureCoords()) {
+ vs << "attribute vec4 texCoords;"
+ << "varying vec2 outTexCoords;";
+ }
+ if (needs.hasRoundedCorners()) {
+ vs << "attribute lowp vec4 cropCoords;";
+ vs << "varying lowp vec2 outCropCoords;";
+ }
+ if (needs.drawShadows()) {
+ vs << "attribute lowp vec4 shadowColor;";
+ vs << "varying lowp vec4 outShadowColor;";
+ vs << "attribute lowp vec4 shadowParams;";
+ vs << "varying lowp vec3 outShadowParams;";
+ }
+ vs << "attribute vec4 position;"
+ << "uniform mat4 projection;"
+ << "uniform mat4 texture;"
+ << "void main(void) {" << indent << "gl_Position = projection * position;";
+ if (needs.hasTextureCoords()) {
+ vs << "outTexCoords = (texture * texCoords).st;";
+ }
+ if (needs.hasRoundedCorners()) {
+ vs << "outCropCoords = cropCoords.st;";
+ }
+ if (needs.drawShadows()) {
+ vs << "outShadowColor = shadowColor;";
+ vs << "outShadowParams = shadowParams.xyz;";
+ }
+ vs << dedent << "}";
+ return vs.getString();
+}
+
+String8 ProgramCache::generateFragmentShader(const Key& needs) {
+ Formatter fs;
+ if (needs.getTextureTarget() == Key::TEXTURE_EXT) {
+ fs << "#extension GL_OES_EGL_image_external : require";
+ }
+
+ // default precision is required-ish in fragment shaders
+ fs << "precision mediump float;";
+
+ if (needs.getTextureTarget() == Key::TEXTURE_EXT) {
+ fs << "uniform samplerExternalOES sampler;";
+ } else if (needs.getTextureTarget() == Key::TEXTURE_2D) {
+ fs << "uniform sampler2D sampler;";
+ }
+
+ if (needs.hasTextureCoords()) {
+ fs << "varying vec2 outTexCoords;";
+ }
+
+ if (needs.hasRoundedCorners()) {
+ // Rounded corners implementation using a signed distance function.
+ fs << R"__SHADER__(
+ uniform float cornerRadius;
+ uniform vec2 cropCenter;
+ varying vec2 outCropCoords;
+
+ /**
+ * This function takes the current crop coordinates and calculates an alpha value based
+ * on the corner radius and distance from the crop center.
+ */
+ float applyCornerRadius(vec2 cropCoords)
+ {
+ vec2 position = cropCoords - cropCenter;
+ // Scale down the dist vector here, as otherwise large corner
+ // radii can cause floating point issues when computing the norm
+ vec2 dist = (abs(position) - cropCenter + vec2(cornerRadius)) / 16.0;
+ // Once we've found the norm, then scale back up.
+ float plane = length(max(dist, vec2(0.0))) * 16.0;
+ return 1.0 - clamp(plane - cornerRadius, 0.0, 1.0);
+ }
+ )__SHADER__";
+ }
+
+ if (needs.drawShadows()) {
+ fs << R"__SHADER__(
+ varying lowp vec4 outShadowColor;
+ varying lowp vec3 outShadowParams;
+
+ /**
+ * Returns the shadow color.
+ */
+ vec4 getShadowColor()
+ {
+ lowp float d = length(outShadowParams.xy);
+ vec2 uv = vec2(outShadowParams.z * (1.0 - d), 0.5);
+ lowp float factor = texture2D(sampler, uv).a;
+ return outShadowColor * factor;
+ }
+ )__SHADER__";
+ }
+
+ if (needs.getTextureTarget() == Key::TEXTURE_OFF || needs.hasAlpha()) {
+ fs << "uniform vec4 color;";
+ }
+
+ if (needs.isY410BT2020()) {
+ fs << R"__SHADER__(
+ vec3 convertY410BT2020(const vec3 color) {
+ const vec3 offset = vec3(0.0625, 0.5, 0.5);
+ const mat3 transform = mat3(
+ vec3(1.1678, 1.1678, 1.1678),
+ vec3( 0.0, -0.1878, 2.1481),
+ vec3(1.6836, -0.6523, 0.0));
+ // Y is in G, U is in R, and V is in B
+ return clamp(transform * (color.grb - offset), 0.0, 1.0);
+ }
+ )__SHADER__";
+ }
+
+ if (needs.hasTransformMatrix() || (needs.getInputTF() != needs.getOutputTF())) {
+ if (needs.needsToneMapping()) {
+ fs << "uniform float displayMaxLuminance;";
+ fs << "uniform float maxMasteringLuminance;";
+ fs << "uniform float maxContentLuminance;";
+ }
+
+ if (needs.hasInputTransformMatrix()) {
+ fs << "uniform mat4 inputTransformMatrix;";
+ fs << R"__SHADER__(
+ highp vec3 InputTransform(const highp vec3 color) {
+ return clamp(vec3(inputTransformMatrix * vec4(color, 1.0)), 0.0, 1.0);
+ }
+ )__SHADER__";
+ } else {
+ fs << R"__SHADER__(
+ highp vec3 InputTransform(const highp vec3 color) {
+ return color;
+ }
+ )__SHADER__";
+ }
+
+ // the transformation from a wider colorspace to a narrower one can
+ // result in >1.0 or <0.0 pixel values
+ if (needs.hasOutputTransformMatrix()) {
+ fs << "uniform mat4 outputTransformMatrix;";
+ fs << R"__SHADER__(
+ highp vec3 OutputTransform(const highp vec3 color) {
+ return clamp(vec3(outputTransformMatrix * vec4(color, 1.0)), 0.0, 1.0);
+ }
+ )__SHADER__";
+ } else {
+ fs << R"__SHADER__(
+ highp vec3 OutputTransform(const highp vec3 color) {
+ return clamp(color, 0.0, 1.0);
+ }
+ )__SHADER__";
+ }
+
+ generateEOTF(fs, needs);
+ generateOOTF(fs, needs);
+ generateOETF(fs, needs);
+ }
+
+ fs << "void main(void) {" << indent;
+ if (needs.drawShadows()) {
+ fs << "gl_FragColor = getShadowColor();";
+ } else {
+ if (needs.isTexturing()) {
+ fs << "gl_FragColor = texture2D(sampler, outTexCoords);";
+ if (needs.isY410BT2020()) {
+ fs << "gl_FragColor.rgb = convertY410BT2020(gl_FragColor.rgb);";
+ }
+ } else {
+ fs << "gl_FragColor.rgb = color.rgb;";
+ fs << "gl_FragColor.a = 1.0;";
+ }
+ if (needs.isOpaque()) {
+ fs << "gl_FragColor.a = 1.0;";
+ }
+ if (needs.hasAlpha()) {
+ // modulate the current alpha value with alpha set
+ if (needs.isPremultiplied()) {
+ // ... and the color too if we're premultiplied
+ fs << "gl_FragColor *= color.a;";
+ } else {
+ fs << "gl_FragColor.a *= color.a;";
+ }
+ }
+ }
+
+ if (needs.hasTransformMatrix() || (needs.getInputTF() != needs.getOutputTF())) {
+ if (!needs.isOpaque() && needs.isPremultiplied()) {
+ // un-premultiply if needed before linearization
+ // avoid divide by 0 by adding 0.5/256 to the alpha channel
+ fs << "gl_FragColor.rgb = gl_FragColor.rgb / (gl_FragColor.a + 0.0019);";
+ }
+ fs << "gl_FragColor.rgb = "
+ "OETF(OutputTransform(OOTF(InputTransform(EOTF(gl_FragColor.rgb)))));";
+ if (!needs.isOpaque() && needs.isPremultiplied()) {
+ // and re-premultiply if needed after gamma correction
+ fs << "gl_FragColor.rgb = gl_FragColor.rgb * (gl_FragColor.a + 0.0019);";
+ }
+ }
+
+ if (needs.hasRoundedCorners()) {
+ if (needs.isPremultiplied()) {
+ fs << "gl_FragColor *= vec4(applyCornerRadius(outCropCoords));";
+ } else {
+ fs << "gl_FragColor.a *= applyCornerRadius(outCropCoords);";
+ }
+ }
+
+ fs << dedent << "}";
+ return fs.getString();
+}
+
+std::unique_ptr<Program> ProgramCache::generateProgram(const Key& needs) {
+ ATRACE_CALL();
+
+ // vertex shader
+ String8 vs = generateVertexShader(needs);
+
+ // fragment shader
+ String8 fs = generateFragmentShader(needs);
+
+ return std::make_unique<Program>(needs, vs.string(), fs.string());
+}
+
+void ProgramCache::useProgram(EGLContext context, const Description& description) {
+ // generate the key for the shader based on the description
+ Key needs(computeKey(description));
+
+ // look-up the program in the cache
+ auto& cache = mCaches[context];
+ auto it = cache.find(needs);
+ if (it == cache.end()) {
+ // we didn't find our program, so generate one...
+ nsecs_t time = systemTime();
+ it = cache.emplace(needs, generateProgram(needs)).first;
+ time = systemTime() - time;
+
+ ALOGV(">>> generated new program for context %p: needs=%08X, time=%u ms (%zu programs)",
+ context, needs.mKey, uint32_t(ns2ms(time)), cache.size());
+ }
+
+ // here we have a suitable program for this description
+ std::unique_ptr<Program>& program = it->second;
+ if (program->isValid()) {
+ program->use();
+ program->setUniforms(description);
+ }
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.h b/media/libstagefright/renderfright/gl/ProgramCache.h
new file mode 100644
index 0000000..901e631
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ProgramCache.h
@@ -0,0 +1,228 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_PROGRAMCACHE_H
+#define SF_RENDER_ENGINE_PROGRAMCACHE_H
+
+#include <memory>
+#include <unordered_map>
+
+#include <EGL/egl.h>
+#include <GLES2/gl2.h>
+#include <renderengine/private/Description.h>
+#include <utils/Singleton.h>
+#include <utils/TypeHelpers.h>
+
+namespace android {
+
+class String8;
+
+namespace renderengine {
+
+struct Description;
+
+namespace gl {
+
+class Formatter;
+class Program;
+
+/*
+ * This class generates GLSL programs suitable to handle a given
+ * Description. It's responsible for figuring out what to
+ * generate from a Description.
+ * It also maintains a cache of these Programs.
+ */
+class ProgramCache : public Singleton<ProgramCache> {
+public:
+ /*
+ * Key is used to retrieve a Program in the cache.
+ * A Key is generated from a Description.
+ */
+ class Key {
+ friend class ProgramCache;
+ typedef uint32_t key_t;
+ key_t mKey;
+
+ public:
+ enum {
+ BLEND_SHIFT = 0,
+ BLEND_MASK = 1 << BLEND_SHIFT,
+ BLEND_PREMULT = 1 << BLEND_SHIFT,
+ BLEND_NORMAL = 0 << BLEND_SHIFT,
+
+ OPACITY_SHIFT = 1,
+ OPACITY_MASK = 1 << OPACITY_SHIFT,
+ OPACITY_OPAQUE = 1 << OPACITY_SHIFT,
+ OPACITY_TRANSLUCENT = 0 << OPACITY_SHIFT,
+
+ ALPHA_SHIFT = 2,
+ ALPHA_MASK = 1 << ALPHA_SHIFT,
+ ALPHA_LT_ONE = 1 << ALPHA_SHIFT,
+ ALPHA_EQ_ONE = 0 << ALPHA_SHIFT,
+
+ TEXTURE_SHIFT = 3,
+ TEXTURE_MASK = 3 << TEXTURE_SHIFT,
+ TEXTURE_OFF = 0 << TEXTURE_SHIFT,
+ TEXTURE_EXT = 1 << TEXTURE_SHIFT,
+ TEXTURE_2D = 2 << TEXTURE_SHIFT,
+
+ ROUNDED_CORNERS_SHIFT = 5,
+ ROUNDED_CORNERS_MASK = 1 << ROUNDED_CORNERS_SHIFT,
+ ROUNDED_CORNERS_OFF = 0 << ROUNDED_CORNERS_SHIFT,
+ ROUNDED_CORNERS_ON = 1 << ROUNDED_CORNERS_SHIFT,
+
+ INPUT_TRANSFORM_MATRIX_SHIFT = 6,
+ INPUT_TRANSFORM_MATRIX_MASK = 1 << INPUT_TRANSFORM_MATRIX_SHIFT,
+ INPUT_TRANSFORM_MATRIX_OFF = 0 << INPUT_TRANSFORM_MATRIX_SHIFT,
+ INPUT_TRANSFORM_MATRIX_ON = 1 << INPUT_TRANSFORM_MATRIX_SHIFT,
+
+ OUTPUT_TRANSFORM_MATRIX_SHIFT = 7,
+ OUTPUT_TRANSFORM_MATRIX_MASK = 1 << OUTPUT_TRANSFORM_MATRIX_SHIFT,
+ OUTPUT_TRANSFORM_MATRIX_OFF = 0 << OUTPUT_TRANSFORM_MATRIX_SHIFT,
+ OUTPUT_TRANSFORM_MATRIX_ON = 1 << OUTPUT_TRANSFORM_MATRIX_SHIFT,
+
+ INPUT_TF_SHIFT = 8,
+ INPUT_TF_MASK = 3 << INPUT_TF_SHIFT,
+ INPUT_TF_LINEAR = 0 << INPUT_TF_SHIFT,
+ INPUT_TF_SRGB = 1 << INPUT_TF_SHIFT,
+ INPUT_TF_ST2084 = 2 << INPUT_TF_SHIFT,
+ INPUT_TF_HLG = 3 << INPUT_TF_SHIFT,
+
+ OUTPUT_TF_SHIFT = 10,
+ OUTPUT_TF_MASK = 3 << OUTPUT_TF_SHIFT,
+ OUTPUT_TF_LINEAR = 0 << OUTPUT_TF_SHIFT,
+ OUTPUT_TF_SRGB = 1 << OUTPUT_TF_SHIFT,
+ OUTPUT_TF_ST2084 = 2 << OUTPUT_TF_SHIFT,
+ OUTPUT_TF_HLG = 3 << OUTPUT_TF_SHIFT,
+
+ Y410_BT2020_SHIFT = 12,
+ Y410_BT2020_MASK = 1 << Y410_BT2020_SHIFT,
+ Y410_BT2020_OFF = 0 << Y410_BT2020_SHIFT,
+ Y410_BT2020_ON = 1 << Y410_BT2020_SHIFT,
+
+ SHADOW_SHIFT = 13,
+ SHADOW_MASK = 1 << SHADOW_SHIFT,
+ SHADOW_OFF = 0 << SHADOW_SHIFT,
+ SHADOW_ON = 1 << SHADOW_SHIFT,
+ };
+
+ inline Key() : mKey(0) {}
+ inline Key(const Key& rhs) : mKey(rhs.mKey) {}
+
+ inline Key& set(key_t mask, key_t value) {
+ mKey = (mKey & ~mask) | value;
+ return *this;
+ }
+
+ inline bool isTexturing() const { return (mKey & TEXTURE_MASK) != TEXTURE_OFF; }
+ inline bool hasTextureCoords() const { return isTexturing() && !drawShadows(); }
+ inline int getTextureTarget() const { return (mKey & TEXTURE_MASK); }
+ inline bool isPremultiplied() const { return (mKey & BLEND_MASK) == BLEND_PREMULT; }
+ inline bool isOpaque() const { return (mKey & OPACITY_MASK) == OPACITY_OPAQUE; }
+ inline bool hasAlpha() const { return (mKey & ALPHA_MASK) == ALPHA_LT_ONE; }
+ inline bool hasRoundedCorners() const {
+ return (mKey & ROUNDED_CORNERS_MASK) == ROUNDED_CORNERS_ON;
+ }
+ inline bool drawShadows() const { return (mKey & SHADOW_MASK) == SHADOW_ON; }
+ inline bool hasInputTransformMatrix() const {
+ return (mKey & INPUT_TRANSFORM_MATRIX_MASK) == INPUT_TRANSFORM_MATRIX_ON;
+ }
+ inline bool hasOutputTransformMatrix() const {
+ return (mKey & OUTPUT_TRANSFORM_MATRIX_MASK) == OUTPUT_TRANSFORM_MATRIX_ON;
+ }
+ inline bool hasTransformMatrix() const {
+ return hasInputTransformMatrix() || hasOutputTransformMatrix();
+ }
+ inline int getInputTF() const { return (mKey & INPUT_TF_MASK); }
+ inline int getOutputTF() const { return (mKey & OUTPUT_TF_MASK); }
+
+ // When HDR and non-HDR contents are mixed, or different types of HDR contents are
+ // mixed, we will do a tone mapping process to tone map the input content to output
+ // content. Currently, the following conversions handled, they are:
+ // * SDR -> HLG
+ // * SDR -> PQ
+ // * HLG -> PQ
+ inline bool needsToneMapping() const {
+ int inputTF = getInputTF();
+ int outputTF = getOutputTF();
+
+ // Return false when converting from SDR to SDR.
+ if (inputTF == Key::INPUT_TF_SRGB && outputTF == Key::OUTPUT_TF_LINEAR) {
+ return false;
+ }
+ if (inputTF == Key::INPUT_TF_LINEAR && outputTF == Key::OUTPUT_TF_SRGB) {
+ return false;
+ }
+
+ inputTF >>= Key::INPUT_TF_SHIFT;
+ outputTF >>= Key::OUTPUT_TF_SHIFT;
+ return inputTF != outputTF;
+ }
+ inline bool isY410BT2020() const { return (mKey & Y410_BT2020_MASK) == Y410_BT2020_ON; }
+
+ // for use by std::unordered_map
+
+ bool operator==(const Key& other) const { return mKey == other.mKey; }
+
+ struct Hash {
+ size_t operator()(const Key& key) const { return static_cast<size_t>(key.mKey); }
+ };
+ };
+
+ ProgramCache() = default;
+ ~ProgramCache() = default;
+
+ // Generate shaders to populate the cache
+ void primeCache(const EGLContext context, bool useColorManagement, bool toneMapperShaderOnly);
+
+ size_t getSize(const EGLContext context) { return mCaches[context].size(); }
+
+ // useProgram lookup a suitable program in the cache or generates one
+ // if none can be found.
+ void useProgram(const EGLContext context, const Description& description);
+
+private:
+ // compute a cache Key from a Description
+ static Key computeKey(const Description& description);
+ // Generate EOTF based from Key.
+ static void generateEOTF(Formatter& fs, const Key& needs);
+ // Generate necessary tone mapping methods for OOTF.
+ static void generateToneMappingProcess(Formatter& fs, const Key& needs);
+ // Generate OOTF based from Key.
+ static void generateOOTF(Formatter& fs, const Key& needs);
+ // Generate OETF based from Key.
+ static void generateOETF(Formatter& fs, const Key& needs);
+ // generates a program from the Key
+ static std::unique_ptr<Program> generateProgram(const Key& needs);
+ // generates the vertex shader from the Key
+ static String8 generateVertexShader(const Key& needs);
+ // generates the fragment shader from the Key
+ static String8 generateFragmentShader(const Key& needs);
+
+ // Key/Value map used for caching Programs. Currently the cache
+ // is never shrunk (and the GL program objects are never deleted).
+ std::unordered_map<EGLContext, std::unordered_map<Key, std::unique_ptr<Program>, Key::Hash>>
+ mCaches;
+};
+
+} // namespace gl
+} // namespace renderengine
+
+ANDROID_BASIC_TYPES_TRAITS(renderengine::gl::ProgramCache::Key)
+
+} // namespace android
+
+#endif /* SF_RENDER_ENGINE_PROGRAMCACHE_H */
diff --git a/media/libstagefright/renderfright/gl/filters/BlurFilter.cpp b/media/libstagefright/renderfright/gl/filters/BlurFilter.cpp
new file mode 100644
index 0000000..19f18c0
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/BlurFilter.cpp
@@ -0,0 +1,268 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "BlurFilter.h"
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES3/gl3.h>
+#include <GLES3/gl3ext.h>
+#include <ui/GraphicTypes.h>
+#include <cstdint>
+
+#include <utils/Trace.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+BlurFilter::BlurFilter(GLESRenderEngine& engine)
+ : mEngine(engine),
+ mCompositionFbo(engine),
+ mPingFbo(engine),
+ mPongFbo(engine),
+ mMixProgram(engine),
+ mBlurProgram(engine) {
+ mMixProgram.compile(getVertexShader(), getMixFragShader());
+ mMPosLoc = mMixProgram.getAttributeLocation("aPosition");
+ mMUvLoc = mMixProgram.getAttributeLocation("aUV");
+ mMTextureLoc = mMixProgram.getUniformLocation("uTexture");
+ mMCompositionTextureLoc = mMixProgram.getUniformLocation("uCompositionTexture");
+ mMMixLoc = mMixProgram.getUniformLocation("uMix");
+
+ mBlurProgram.compile(getVertexShader(), getFragmentShader());
+ mBPosLoc = mBlurProgram.getAttributeLocation("aPosition");
+ mBUvLoc = mBlurProgram.getAttributeLocation("aUV");
+ mBTextureLoc = mBlurProgram.getUniformLocation("uTexture");
+ mBOffsetLoc = mBlurProgram.getUniformLocation("uOffset");
+
+ static constexpr auto size = 2.0f;
+ static constexpr auto translation = 1.0f;
+ const GLfloat vboData[] = {
+ // Vertex data
+ translation - size, -translation - size,
+ translation - size, -translation + size,
+ translation + size, -translation + size,
+ // UV data
+ 0.0f, 0.0f - translation,
+ 0.0f, size - translation,
+ size, size - translation
+ };
+ mMeshBuffer.allocateBuffers(vboData, 12 /* size */);
+}
+
+status_t BlurFilter::setAsDrawTarget(const DisplaySettings& display, uint32_t radius) {
+ ATRACE_NAME("BlurFilter::setAsDrawTarget");
+ mRadius = radius;
+ mDisplayX = display.physicalDisplay.left;
+ mDisplayY = display.physicalDisplay.top;
+
+ if (mDisplayWidth < display.physicalDisplay.width() ||
+ mDisplayHeight < display.physicalDisplay.height()) {
+ ATRACE_NAME("BlurFilter::allocatingTextures");
+
+ mDisplayWidth = display.physicalDisplay.width();
+ mDisplayHeight = display.physicalDisplay.height();
+ mCompositionFbo.allocateBuffers(mDisplayWidth, mDisplayHeight);
+
+ const uint32_t fboWidth = floorf(mDisplayWidth * kFboScale);
+ const uint32_t fboHeight = floorf(mDisplayHeight * kFboScale);
+ mPingFbo.allocateBuffers(fboWidth, fboHeight);
+ mPongFbo.allocateBuffers(fboWidth, fboHeight);
+
+ if (mPingFbo.getStatus() != GL_FRAMEBUFFER_COMPLETE) {
+ ALOGE("Invalid ping buffer");
+ return mPingFbo.getStatus();
+ }
+ if (mPongFbo.getStatus() != GL_FRAMEBUFFER_COMPLETE) {
+ ALOGE("Invalid pong buffer");
+ return mPongFbo.getStatus();
+ }
+ if (mCompositionFbo.getStatus() != GL_FRAMEBUFFER_COMPLETE) {
+ ALOGE("Invalid composition buffer");
+ return mCompositionFbo.getStatus();
+ }
+ if (!mBlurProgram.isValid()) {
+ ALOGE("Invalid shader");
+ return GL_INVALID_OPERATION;
+ }
+ }
+
+ mCompositionFbo.bind();
+ glViewport(0, 0, mCompositionFbo.getBufferWidth(), mCompositionFbo.getBufferHeight());
+ return NO_ERROR;
+}
+
+void BlurFilter::drawMesh(GLuint uv, GLuint position) {
+
+ glEnableVertexAttribArray(uv);
+ glEnableVertexAttribArray(position);
+ mMeshBuffer.bind();
+ glVertexAttribPointer(position, 2 /* size */, GL_FLOAT, GL_FALSE,
+ 2 * sizeof(GLfloat) /* stride */, 0 /* offset */);
+ glVertexAttribPointer(uv, 2 /* size */, GL_FLOAT, GL_FALSE, 0 /* stride */,
+ (GLvoid*)(6 * sizeof(GLfloat)) /* offset */);
+ mMeshBuffer.unbind();
+
+ // draw mesh
+ glDrawArrays(GL_TRIANGLES, 0 /* first */, 3 /* count */);
+}
+
+status_t BlurFilter::prepare() {
+ ATRACE_NAME("BlurFilter::prepare");
+
+ // Kawase is an approximation of Gaussian, but it behaves differently from it.
+ // A radius transformation is required for approximating them, and also to introduce
+ // non-integer steps, necessary to smoothly interpolate large radii.
+ const auto radius = mRadius / 6.0f;
+
+ // Calculate how many passes we'll do, based on the radius.
+ // Too many passes will make the operation expensive.
+ const auto passes = min(kMaxPasses, (uint32_t)ceil(radius));
+
+ const float radiusByPasses = radius / (float)passes;
+ const float stepX = radiusByPasses / (float)mCompositionFbo.getBufferWidth();
+ const float stepY = radiusByPasses / (float)mCompositionFbo.getBufferHeight();
+
+ // Let's start by downsampling and blurring the composited frame simultaneously.
+ mBlurProgram.useProgram();
+ glActiveTexture(GL_TEXTURE0);
+ glUniform1i(mBTextureLoc, 0);
+ glBindTexture(GL_TEXTURE_2D, mCompositionFbo.getTextureName());
+ glUniform2f(mBOffsetLoc, stepX, stepY);
+ glViewport(0, 0, mPingFbo.getBufferWidth(), mPingFbo.getBufferHeight());
+ mPingFbo.bind();
+ drawMesh(mBUvLoc, mBPosLoc);
+
+ // And now we'll ping pong between our textures, to accumulate the result of various offsets.
+ GLFramebuffer* read = &mPingFbo;
+ GLFramebuffer* draw = &mPongFbo;
+ glViewport(0, 0, draw->getBufferWidth(), draw->getBufferHeight());
+ for (auto i = 1; i < passes; i++) {
+ ATRACE_NAME("BlurFilter::renderPass");
+ draw->bind();
+
+ glBindTexture(GL_TEXTURE_2D, read->getTextureName());
+ glUniform2f(mBOffsetLoc, stepX * i, stepY * i);
+
+ drawMesh(mBUvLoc, mBPosLoc);
+
+ // Swap buffers for next iteration
+ auto tmp = draw;
+ draw = read;
+ read = tmp;
+ }
+ mLastDrawTarget = read;
+
+ return NO_ERROR;
+}
+
+status_t BlurFilter::render(bool multiPass) {
+ ATRACE_NAME("BlurFilter::render");
+
+ // Now let's scale our blur up. It will be interpolated with the larger composited
+ // texture for the first frames, to hide downscaling artifacts.
+ GLfloat mix = fmin(1.0, mRadius / kMaxCrossFadeRadius);
+
+ // When doing multiple passes, we cannot try to read mCompositionFbo, given that we'll
+ // be writing onto it. Let's disable the crossfade, otherwise we'd need 1 extra frame buffer,
+ // as large as the screen size.
+ if (mix >= 1 || multiPass) {
+ mLastDrawTarget->bindAsReadBuffer();
+ glBlitFramebuffer(0, 0, mLastDrawTarget->getBufferWidth(),
+ mLastDrawTarget->getBufferHeight(), mDisplayX, mDisplayY, mDisplayWidth,
+ mDisplayHeight, GL_COLOR_BUFFER_BIT, GL_LINEAR);
+ return NO_ERROR;
+ }
+
+ mMixProgram.useProgram();
+ glUniform1f(mMMixLoc, mix);
+ glActiveTexture(GL_TEXTURE0);
+ glBindTexture(GL_TEXTURE_2D, mLastDrawTarget->getTextureName());
+ glUniform1i(mMTextureLoc, 0);
+ glActiveTexture(GL_TEXTURE1);
+ glBindTexture(GL_TEXTURE_2D, mCompositionFbo.getTextureName());
+ glUniform1i(mMCompositionTextureLoc, 1);
+
+ drawMesh(mMUvLoc, mMPosLoc);
+
+ glUseProgram(0);
+ glActiveTexture(GL_TEXTURE0);
+ mEngine.checkErrors("Drawing blur mesh");
+ return NO_ERROR;
+}
+
+string BlurFilter::getVertexShader() const {
+ return R"SHADER(#version 310 es
+ precision mediump float;
+
+ in vec2 aPosition;
+ in highp vec2 aUV;
+ out highp vec2 vUV;
+
+ void main() {
+ vUV = aUV;
+ gl_Position = vec4(aPosition, 0.0, 1.0);
+ }
+ )SHADER";
+}
+
+string BlurFilter::getFragmentShader() const {
+ return R"SHADER(#version 310 es
+ precision mediump float;
+
+ uniform sampler2D uTexture;
+ uniform vec2 uOffset;
+
+ in highp vec2 vUV;
+ out vec4 fragColor;
+
+ void main() {
+ fragColor = texture(uTexture, vUV, 0.0);
+ fragColor += texture(uTexture, vUV + vec2( uOffset.x, uOffset.y), 0.0);
+ fragColor += texture(uTexture, vUV + vec2( uOffset.x, -uOffset.y), 0.0);
+ fragColor += texture(uTexture, vUV + vec2(-uOffset.x, uOffset.y), 0.0);
+ fragColor += texture(uTexture, vUV + vec2(-uOffset.x, -uOffset.y), 0.0);
+
+ fragColor = vec4(fragColor.rgb * 0.2, 1.0);
+ }
+ )SHADER";
+}
+
+string BlurFilter::getMixFragShader() const {
+ string shader = R"SHADER(#version 310 es
+ precision mediump float;
+
+ in highp vec2 vUV;
+ out vec4 fragColor;
+
+ uniform sampler2D uCompositionTexture;
+ uniform sampler2D uTexture;
+ uniform float uMix;
+
+ void main() {
+ vec4 blurred = texture(uTexture, vUV);
+ vec4 composition = texture(uCompositionTexture, vUV);
+ fragColor = mix(composition, blurred, uMix);
+ }
+ )SHADER";
+ return shader;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/filters/BlurFilter.h b/media/libstagefright/renderfright/gl/filters/BlurFilter.h
new file mode 100644
index 0000000..593a8fd
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/BlurFilter.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <ui/GraphicTypes.h>
+#include "../GLESRenderEngine.h"
+#include "../GLFramebuffer.h"
+#include "../GLVertexBuffer.h"
+#include "GenericProgram.h"
+
+using namespace std;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/**
+ * This is an implementation of a Kawase blur, as described in here:
+ * https://community.arm.com/cfs-file/__key/communityserver-blogs-components-weblogfiles/
+ * 00-00-00-20-66/siggraph2015_2D00_mmg_2D00_marius_2D00_notes.pdf
+ */
+class BlurFilter {
+public:
+ // Downsample FBO to improve performance
+ static constexpr float kFboScale = 0.25f;
+ // Maximum number of render passes
+ static constexpr uint32_t kMaxPasses = 4;
+ // To avoid downscaling artifacts, we interpolate the blurred fbo with the full composited
+ // image, up to this radius.
+ static constexpr float kMaxCrossFadeRadius = 30.0f;
+
+ explicit BlurFilter(GLESRenderEngine& engine);
+ virtual ~BlurFilter(){};
+
+ // Set up render targets, redirecting output to offscreen texture.
+ status_t setAsDrawTarget(const DisplaySettings&, uint32_t radius);
+ // Execute blur passes, rendering to offscreen texture.
+ status_t prepare();
+ // Render blur to the bound framebuffer (screen).
+ status_t render(bool multiPass);
+
+private:
+ uint32_t mRadius;
+ void drawMesh(GLuint uv, GLuint position);
+ string getVertexShader() const;
+ string getFragmentShader() const;
+ string getMixFragShader() const;
+
+ GLESRenderEngine& mEngine;
+ // Frame buffer holding the composited background.
+ GLFramebuffer mCompositionFbo;
+ // Frame buffers holding the blur passes.
+ GLFramebuffer mPingFbo;
+ GLFramebuffer mPongFbo;
+ uint32_t mDisplayWidth = 0;
+ uint32_t mDisplayHeight = 0;
+ uint32_t mDisplayX = 0;
+ uint32_t mDisplayY = 0;
+ // Buffer holding the final blur pass.
+ GLFramebuffer* mLastDrawTarget;
+
+ // VBO containing vertex and uv data of a fullscreen triangle.
+ GLVertexBuffer mMeshBuffer;
+
+ GenericProgram mMixProgram;
+ GLuint mMPosLoc;
+ GLuint mMUvLoc;
+ GLuint mMMixLoc;
+ GLuint mMTextureLoc;
+ GLuint mMCompositionTextureLoc;
+
+ GenericProgram mBlurProgram;
+ GLuint mBPosLoc;
+ GLuint mBUvLoc;
+ GLuint mBTextureLoc;
+ GLuint mBOffsetLoc;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/filters/GenericProgram.cpp b/media/libstagefright/renderfright/gl/filters/GenericProgram.cpp
new file mode 100644
index 0000000..bb35889
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/GenericProgram.cpp
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "GenericProgram.h"
+
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GenericProgram::GenericProgram(GLESRenderEngine& engine) : mEngine(engine) {}
+
+GenericProgram::~GenericProgram() {
+ if (mVertexShaderHandle != 0) {
+ if (mProgramHandle != 0) {
+ glDetachShader(mProgramHandle, mVertexShaderHandle);
+ }
+ glDeleteShader(mVertexShaderHandle);
+ }
+
+ if (mFragmentShaderHandle != 0) {
+ if (mProgramHandle != 0) {
+ glDetachShader(mProgramHandle, mFragmentShaderHandle);
+ }
+ glDeleteShader(mFragmentShaderHandle);
+ }
+
+ if (mProgramHandle != 0) {
+ glDeleteProgram(mProgramHandle);
+ }
+}
+
+void GenericProgram::compile(string vertexShader, string fragmentShader) {
+ mVertexShaderHandle = compileShader(GL_VERTEX_SHADER, vertexShader);
+ mFragmentShaderHandle = compileShader(GL_FRAGMENT_SHADER, fragmentShader);
+ if (mVertexShaderHandle == 0 || mFragmentShaderHandle == 0) {
+ ALOGE("Aborting program creation.");
+ return;
+ }
+ mProgramHandle = createAndLink(mVertexShaderHandle, mFragmentShaderHandle);
+ mEngine.checkErrors("Linking program");
+}
+
+void GenericProgram::useProgram() const {
+ glUseProgram(mProgramHandle);
+}
+
+GLuint GenericProgram::compileShader(GLuint type, string src) const {
+ const GLuint shader = glCreateShader(type);
+ if (shader == 0) {
+ mEngine.checkErrors("Creating shader");
+ return 0;
+ }
+ const GLchar* charSrc = (const GLchar*)src.c_str();
+ glShaderSource(shader, 1, &charSrc, nullptr);
+ glCompileShader(shader);
+
+ GLint isCompiled = 0;
+ glGetShaderiv(shader, GL_COMPILE_STATUS, &isCompiled);
+ if (isCompiled == GL_FALSE) {
+ GLint maxLength = 0;
+ glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &maxLength);
+ string errorLog;
+ errorLog.reserve(maxLength);
+ glGetShaderInfoLog(shader, maxLength, &maxLength, errorLog.data());
+ glDeleteShader(shader);
+ ALOGE("Error compiling shader: %s", errorLog.c_str());
+ return 0;
+ }
+ return shader;
+}
+GLuint GenericProgram::createAndLink(GLuint vertexShader, GLuint fragmentShader) const {
+ const GLuint program = glCreateProgram();
+ mEngine.checkErrors("Creating program");
+
+ glAttachShader(program, vertexShader);
+ glAttachShader(program, fragmentShader);
+ glLinkProgram(program);
+ mEngine.checkErrors("Linking program");
+ return program;
+}
+
+GLuint GenericProgram::getUniformLocation(const string name) const {
+ if (mProgramHandle == 0) {
+ ALOGE("Can't get location of %s on an invalid program.", name.c_str());
+ return -1;
+ }
+ return glGetUniformLocation(mProgramHandle, (const GLchar*)name.c_str());
+}
+
+GLuint GenericProgram::getAttributeLocation(const string name) const {
+ if (mProgramHandle == 0) {
+ ALOGE("Can't get location of %s on an invalid program.", name.c_str());
+ return -1;
+ }
+ return glGetAttribLocation(mProgramHandle, (const GLchar*)name.c_str());
+}
+
+bool GenericProgram::isValid() const {
+ return mProgramHandle != 0;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/filters/GenericProgram.h b/media/libstagefright/renderfright/gl/filters/GenericProgram.h
new file mode 100644
index 0000000..6da2a5a
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/GenericProgram.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <ui/GraphicTypes.h>
+#include "../GLESRenderEngine.h"
+#include "../GLFramebuffer.h"
+
+using namespace std;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GenericProgram {
+public:
+ explicit GenericProgram(GLESRenderEngine& renderEngine);
+ ~GenericProgram();
+ void compile(string vertexShader, string fragmentShader);
+ bool isValid() const;
+ void useProgram() const;
+ GLuint getAttributeLocation(const string name) const;
+ GLuint getUniformLocation(const string name) const;
+
+private:
+ GLuint compileShader(GLuint type, const string src) const;
+ GLuint createAndLink(GLuint vertexShader, GLuint fragmentShader) const;
+
+ GLESRenderEngine& mEngine;
+ GLuint mVertexShaderHandle = 0;
+ GLuint mFragmentShaderHandle = 0;
+ GLuint mProgramHandle = 0;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/DisplaySettings.h b/media/libstagefright/renderfright/include/renderengine/DisplaySettings.h
new file mode 100644
index 0000000..ca16d2c
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/DisplaySettings.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <iosfwd>
+
+#include <math/mat4.h>
+#include <ui/GraphicTypes.h>
+#include <ui/Rect.h>
+#include <ui/Region.h>
+#include <ui/Transform.h>
+
+namespace android {
+namespace renderengine {
+
+// DisplaySettings contains the settings that are applicable when drawing all
+// layers for a given display.
+struct DisplaySettings {
+ // Rectangle describing the physical display. We will project from the
+ // logical clip onto this rectangle.
+ Rect physicalDisplay = Rect::INVALID_RECT;
+
+ // Rectangle bounded by the x,y- clipping planes in the logical display, so
+ // that the orthographic projection matrix can be computed. When
+ // constructing this matrix, z-coordinate bound are assumed to be at z=0 and
+ // z=1.
+ Rect clip = Rect::INVALID_RECT;
+
+ // Maximum luminance pulled from the display's HDR capabilities.
+ float maxLuminance = 1.0f;
+
+ // Output dataspace that will be populated if wide color gamut is used, or
+ // DataSpace::UNKNOWN otherwise.
+ ui::Dataspace outputDataspace = ui::Dataspace::UNKNOWN;
+
+ // Additional color transform to apply in linear space after transforming
+ // to the output dataspace.
+ mat4 colorTransform = mat4();
+
+ // Region that will be cleared to (0, 0, 0, 1) prior to rendering.
+ // This is specified in layer-stack space.
+ Region clearRegion = Region::INVALID_REGION;
+
+ // An additional orientation flag to be applied after clipping the output.
+ // By way of example, this may be used for supporting fullscreen screenshot
+ // capture of a device in landscape while the buffer is in portrait
+ // orientation.
+ uint32_t orientation = ui::Transform::ROT_0;
+};
+
+static inline bool operator==(const DisplaySettings& lhs, const DisplaySettings& rhs) {
+ return lhs.physicalDisplay == rhs.physicalDisplay && lhs.clip == rhs.clip &&
+ lhs.maxLuminance == rhs.maxLuminance && lhs.outputDataspace == rhs.outputDataspace &&
+ lhs.colorTransform == rhs.colorTransform &&
+ lhs.clearRegion.hasSameRects(rhs.clearRegion) && lhs.orientation == rhs.orientation;
+}
+
+// Defining PrintTo helps with Google Tests.
+static inline void PrintTo(const DisplaySettings& settings, ::std::ostream* os) {
+ *os << "DisplaySettings {";
+ *os << "\n .physicalDisplay = ";
+ PrintTo(settings.physicalDisplay, os);
+ *os << "\n .clip = ";
+ PrintTo(settings.clip, os);
+ *os << "\n .maxLuminance = " << settings.maxLuminance;
+ *os << "\n .outputDataspace = ";
+ PrintTo(settings.outputDataspace, os);
+ *os << "\n .colorTransform = " << settings.colorTransform;
+ *os << "\n .clearRegion = ";
+ PrintTo(settings.clearRegion, os);
+ *os << "\n .orientation = " << settings.orientation;
+ *os << "\n}";
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/Framebuffer.h b/media/libstagefright/renderfright/include/renderengine/Framebuffer.h
new file mode 100644
index 0000000..6511127
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Framebuffer.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+
+class Framebuffer {
+public:
+ virtual ~Framebuffer() = default;
+
+ virtual bool setNativeWindowBuffer(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+ const bool useFramebufferCache) = 0;
+};
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/Image.h b/media/libstagefright/renderfright/include/renderengine/Image.h
new file mode 100644
index 0000000..3bb4731
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Image.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+
+class Image {
+public:
+ virtual ~Image() = default;
+ virtual bool setNativeWindowBuffer(ANativeWindowBuffer* buffer, bool isProtected) = 0;
+};
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/LayerSettings.h b/media/libstagefright/renderfright/include/renderengine/LayerSettings.h
new file mode 100644
index 0000000..95e9367
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/LayerSettings.h
@@ -0,0 +1,258 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <iosfwd>
+
+#include <math/mat4.h>
+#include <math/vec3.h>
+#include <renderengine/Texture.h>
+#include <ui/Fence.h>
+#include <ui/FloatRect.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicTypes.h>
+#include <ui/Rect.h>
+#include <ui/Region.h>
+#include <ui/Transform.h>
+
+namespace android {
+namespace renderengine {
+
+// Metadata describing the input buffer to render from.
+struct Buffer {
+ // Buffer containing the image that we will render.
+ // If buffer == nullptr, then the rest of the fields in this struct will be
+ // ignored.
+ sp<GraphicBuffer> buffer = nullptr;
+
+ // Fence that will fire when the buffer is ready to be bound.
+ sp<Fence> fence = nullptr;
+
+ // Texture identifier to bind the external texture to.
+ // TODO(alecmouri): This is GL-specific...make the type backend-agnostic.
+ uint32_t textureName = 0;
+
+ // Whether to use filtering when rendering the texture.
+ bool useTextureFiltering = false;
+
+ // Transform matrix to apply to texture coordinates.
+ mat4 textureTransform = mat4();
+
+ // Whether to use pre-multiplied alpha.
+ bool usePremultipliedAlpha = true;
+
+ // Override flag that alpha for each pixel in the buffer *must* be 1.0.
+ // LayerSettings::alpha is still used if isOpaque==true - this flag only
+ // overrides the alpha channel of the buffer.
+ bool isOpaque = false;
+
+ // HDR color-space setting for Y410.
+ bool isY410BT2020 = false;
+ float maxMasteringLuminance = 0.0;
+ float maxContentLuminance = 0.0;
+};
+
+// Metadata describing the layer geometry.
+struct Geometry {
+ // Boundaries of the layer.
+ FloatRect boundaries = FloatRect();
+
+ // Transform matrix to apply to mesh coordinates.
+ mat4 positionTransform = mat4();
+
+ // Radius of rounded corners, if greater than 0. Otherwise, this layer's
+ // corners are not rounded.
+ // Having corner radius will force GPU composition on the layer and its children, drawing it
+ // with a special shader. The shader will receive the radius and the crop rectangle as input,
+ // modifying the opacity of the destination texture, multiplying it by a number between 0 and 1.
+ // We query Layer#getRoundedCornerState() to retrieve the radius as well as the rounded crop
+ // rectangle to figure out how to apply the radius for this layer. The crop rectangle will be
+ // in local layer coordinate space, so we have to take the layer transform into account when
+ // walking up the tree.
+ float roundedCornersRadius = 0.0;
+
+ // Rectangle within which corners will be rounded.
+ FloatRect roundedCornersCrop = FloatRect();
+};
+
+// Descriptor of the source pixels for this layer.
+struct PixelSource {
+ // Source buffer
+ Buffer buffer = Buffer();
+
+ // The solid color with which to fill the layer.
+ // This should only be populated if we don't render from an application
+ // buffer.
+ half3 solidColor = half3(0.0f, 0.0f, 0.0f);
+};
+
+/*
+ * Contains the configuration for the shadows drawn by single layer. Shadow follows
+ * material design guidelines.
+ */
+struct ShadowSettings {
+ // Color to the ambient shadow. The alpha is premultiplied.
+ vec4 ambientColor = vec4();
+
+ // Color to the spot shadow. The alpha is premultiplied. The position of the spot shadow
+ // depends on the light position.
+ vec4 spotColor = vec4();
+
+ // Position of the light source used to cast the spot shadow.
+ vec3 lightPos = vec3();
+
+ // Radius of the spot light source. Smaller radius will have sharper edges,
+ // larger radius will have softer shadows
+ float lightRadius = 0.f;
+
+ // Length of the cast shadow. If length is <= 0.f no shadows will be drawn.
+ float length = 0.f;
+
+ // If true fill in the casting layer is translucent and the shadow needs to fill the bounds.
+ // Otherwise the shadow will only be drawn around the edges of the casting layer.
+ bool casterIsTranslucent = false;
+};
+
+// The settings that RenderEngine requires for correctly rendering a Layer.
+struct LayerSettings {
+ // Geometry information
+ Geometry geometry = Geometry();
+
+ // Source pixels for this layer.
+ PixelSource source = PixelSource();
+
+ // Alpha option to blend with the source pixels
+ half alpha = half(0.0);
+
+ // Color space describing how the source pixels should be interpreted.
+ ui::Dataspace sourceDataspace = ui::Dataspace::UNKNOWN;
+
+ // Additional layer-specific color transform to be applied before the global
+ // transform.
+ mat4 colorTransform = mat4();
+
+ // True if blending will be forced to be disabled.
+ bool disableBlending = false;
+
+ ShadowSettings shadow;
+
+ int backgroundBlurRadius = 0;
+};
+
+// Keep in sync with custom comparison function in
+// compositionengine/impl/ClientCompositionRequestCache.cpp
+static inline bool operator==(const Buffer& lhs, const Buffer& rhs) {
+ return lhs.buffer == rhs.buffer && lhs.fence == rhs.fence &&
+ lhs.textureName == rhs.textureName &&
+ lhs.useTextureFiltering == rhs.useTextureFiltering &&
+ lhs.textureTransform == rhs.textureTransform &&
+ lhs.usePremultipliedAlpha == rhs.usePremultipliedAlpha &&
+ lhs.isOpaque == rhs.isOpaque && lhs.isY410BT2020 == rhs.isY410BT2020 &&
+ lhs.maxMasteringLuminance == rhs.maxMasteringLuminance &&
+ lhs.maxContentLuminance == rhs.maxContentLuminance;
+}
+
+static inline bool operator==(const Geometry& lhs, const Geometry& rhs) {
+ return lhs.boundaries == rhs.boundaries && lhs.positionTransform == rhs.positionTransform &&
+ lhs.roundedCornersRadius == rhs.roundedCornersRadius &&
+ lhs.roundedCornersCrop == rhs.roundedCornersCrop;
+}
+
+static inline bool operator==(const PixelSource& lhs, const PixelSource& rhs) {
+ return lhs.buffer == rhs.buffer && lhs.solidColor == rhs.solidColor;
+}
+
+static inline bool operator==(const ShadowSettings& lhs, const ShadowSettings& rhs) {
+ return lhs.ambientColor == rhs.ambientColor && lhs.spotColor == rhs.spotColor &&
+ lhs.lightPos == rhs.lightPos && lhs.lightRadius == rhs.lightRadius &&
+ lhs.length == rhs.length && lhs.casterIsTranslucent == rhs.casterIsTranslucent;
+}
+
+static inline bool operator==(const LayerSettings& lhs, const LayerSettings& rhs) {
+ return lhs.geometry == rhs.geometry && lhs.source == rhs.source && lhs.alpha == rhs.alpha &&
+ lhs.sourceDataspace == rhs.sourceDataspace &&
+ lhs.colorTransform == rhs.colorTransform &&
+ lhs.disableBlending == rhs.disableBlending && lhs.shadow == rhs.shadow &&
+ lhs.backgroundBlurRadius == rhs.backgroundBlurRadius;
+}
+
+// Defining PrintTo helps with Google Tests.
+
+static inline void PrintTo(const Buffer& settings, ::std::ostream* os) {
+ *os << "Buffer {";
+ *os << "\n .buffer = " << settings.buffer.get();
+ *os << "\n .fence = " << settings.fence.get();
+ *os << "\n .textureName = " << settings.textureName;
+ *os << "\n .useTextureFiltering = " << settings.useTextureFiltering;
+ *os << "\n .textureTransform = " << settings.textureTransform;
+ *os << "\n .usePremultipliedAlpha = " << settings.usePremultipliedAlpha;
+ *os << "\n .isOpaque = " << settings.isOpaque;
+ *os << "\n .isY410BT2020 = " << settings.isY410BT2020;
+ *os << "\n .maxMasteringLuminance = " << settings.maxMasteringLuminance;
+ *os << "\n .maxContentLuminance = " << settings.maxContentLuminance;
+ *os << "\n}";
+}
+
+static inline void PrintTo(const Geometry& settings, ::std::ostream* os) {
+ *os << "Geometry {";
+ *os << "\n .boundaries = ";
+ PrintTo(settings.boundaries, os);
+ *os << "\n .positionTransform = " << settings.positionTransform;
+ *os << "\n .roundedCornersRadius = " << settings.roundedCornersRadius;
+ *os << "\n .roundedCornersCrop = ";
+ PrintTo(settings.roundedCornersCrop, os);
+ *os << "\n}";
+}
+
+static inline void PrintTo(const PixelSource& settings, ::std::ostream* os) {
+ *os << "PixelSource {";
+ *os << "\n .buffer = ";
+ PrintTo(settings.buffer, os);
+ *os << "\n .solidColor = " << settings.solidColor;
+ *os << "\n}";
+}
+
+static inline void PrintTo(const ShadowSettings& settings, ::std::ostream* os) {
+ *os << "ShadowSettings {";
+ *os << "\n .ambientColor = " << settings.ambientColor;
+ *os << "\n .spotColor = " << settings.spotColor;
+ *os << "\n .lightPos = " << settings.lightPos;
+ *os << "\n .lightRadius = " << settings.lightRadius;
+ *os << "\n .length = " << settings.length;
+ *os << "\n .casterIsTranslucent = " << settings.casterIsTranslucent;
+ *os << "\n}";
+}
+
+static inline void PrintTo(const LayerSettings& settings, ::std::ostream* os) {
+ *os << "LayerSettings {";
+ *os << "\n .geometry = ";
+ PrintTo(settings.geometry, os);
+ *os << "\n .source = ";
+ PrintTo(settings.source, os);
+ *os << "\n .alpha = " << settings.alpha;
+ *os << "\n .sourceDataspace = ";
+ PrintTo(settings.sourceDataspace, os);
+ *os << "\n .colorTransform = " << settings.colorTransform;
+ *os << "\n .disableBlending = " << settings.disableBlending;
+ *os << "\n .backgroundBlurRadius = " << settings.backgroundBlurRadius;
+ *os << "\n .shadow = ";
+ PrintTo(settings.shadow, os);
+ *os << "\n}";
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/Mesh.h b/media/libstagefright/renderfright/include/renderengine/Mesh.h
new file mode 100644
index 0000000..167f13f
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Mesh.h
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_MESH_H
+#define SF_RENDER_ENGINE_MESH_H
+
+#include <vector>
+
+#include <stdint.h>
+
+namespace android {
+namespace renderengine {
+
+class Mesh {
+public:
+ class Builder;
+
+ enum Primitive {
+ TRIANGLES = 0x0004, // GL_TRIANGLES
+ TRIANGLE_STRIP = 0x0005, // GL_TRIANGLE_STRIP
+ TRIANGLE_FAN = 0x0006 // GL_TRIANGLE_FAN
+ };
+
+ ~Mesh() = default;
+
+ /*
+ * VertexArray handles the stride automatically.
+ */
+ template <typename TYPE>
+ class VertexArray {
+ friend class Mesh;
+ float* mData;
+ size_t mStride;
+ size_t mOffset = 0;
+ VertexArray(float* data, size_t stride) : mData(data), mStride(stride) {}
+
+ public:
+ // Returns a vertex array at an offset so its easier to append attributes from
+ // multiple sources.
+ VertexArray(VertexArray<TYPE>& other, size_t offset)
+ : mData(other.mData), mStride(other.mStride), mOffset(offset) {}
+
+ TYPE& operator[](size_t index) {
+ return *reinterpret_cast<TYPE*>(&mData[(index + mOffset) * mStride]);
+ }
+ TYPE const& operator[](size_t index) const {
+ return *reinterpret_cast<TYPE const*>(&mData[(index + mOffset) * mStride]);
+ }
+ };
+
+ template <typename TYPE>
+ VertexArray<TYPE> getPositionArray() {
+ return VertexArray<TYPE>(getPositions(), mStride);
+ }
+
+ template <typename TYPE>
+ VertexArray<TYPE> getTexCoordArray() {
+ return VertexArray<TYPE>(getTexCoords(), mStride);
+ }
+
+ template <typename TYPE>
+ VertexArray<TYPE> getCropCoordArray() {
+ return VertexArray<TYPE>(getCropCoords(), mStride);
+ }
+
+ template <typename TYPE>
+ VertexArray<TYPE> getShadowColorArray() {
+ return VertexArray<TYPE>(getShadowColor(), mStride);
+ }
+
+ template <typename TYPE>
+ VertexArray<TYPE> getShadowParamsArray() {
+ return VertexArray<TYPE>(getShadowParams(), mStride);
+ }
+
+ uint16_t* getIndicesArray() { return getIndices(); }
+
+ Primitive getPrimitive() const;
+
+ // returns a pointer to the vertices positions
+ float const* getPositions() const;
+
+ // returns a pointer to the vertices texture coordinates
+ float const* getTexCoords() const;
+
+ // returns a pointer to the vertices crop coordinates
+ float const* getCropCoords() const;
+
+ // returns a pointer to colors
+ float const* getShadowColor() const;
+
+ // returns a pointer to the shadow params
+ float const* getShadowParams() const;
+
+ // returns a pointer to indices
+ uint16_t const* getIndices() const;
+
+ // number of vertices in this mesh
+ size_t getVertexCount() const;
+
+ // dimension of vertices
+ size_t getVertexSize() const;
+
+ // dimension of texture coordinates
+ size_t getTexCoordsSize() const;
+
+ size_t getShadowParamsSize() const;
+
+ size_t getShadowColorSize() const;
+
+ size_t getIndexCount() const;
+
+ // return stride in bytes
+ size_t getByteStride() const;
+
+ // return stride in floats
+ size_t getStride() const;
+
+private:
+ Mesh(Primitive primitive, size_t vertexCount, size_t vertexSize, size_t texCoordSize,
+ size_t cropCoordsSize, size_t shadowColorSize, size_t shadowParamsSize, size_t indexCount);
+ Mesh(const Mesh&);
+ Mesh& operator=(const Mesh&);
+ Mesh const& operator=(const Mesh&) const;
+
+ float* getPositions();
+ float* getTexCoords();
+ float* getCropCoords();
+ float* getShadowColor();
+ float* getShadowParams();
+ uint16_t* getIndices();
+
+ std::vector<float> mVertices;
+ size_t mVertexCount;
+ size_t mVertexSize;
+ size_t mTexCoordsSize;
+ size_t mCropCoordsSize;
+ size_t mShadowColorSize;
+ size_t mShadowParamsSize;
+ size_t mStride;
+ Primitive mPrimitive;
+ std::vector<uint16_t> mIndices;
+ size_t mIndexCount;
+};
+
+class Mesh::Builder {
+public:
+ Builder& setPrimitive(Primitive primitive) {
+ mPrimitive = primitive;
+ return *this;
+ };
+ Builder& setVertices(size_t vertexCount, size_t vertexSize) {
+ mVertexCount = vertexCount;
+ mVertexSize = vertexSize;
+ return *this;
+ };
+ Builder& setTexCoords(size_t texCoordsSize) {
+ mTexCoordsSize = texCoordsSize;
+ return *this;
+ };
+ Builder& setCropCoords(size_t cropCoordsSize) {
+ mCropCoordsSize = cropCoordsSize;
+ return *this;
+ };
+ Builder& setShadowAttrs() {
+ mShadowParamsSize = 3;
+ mShadowColorSize = 4;
+ return *this;
+ };
+ Builder& setIndices(size_t indexCount) {
+ mIndexCount = indexCount;
+ return *this;
+ };
+ Mesh build() const {
+ return Mesh{mPrimitive, mVertexCount, mVertexSize, mTexCoordsSize,
+ mCropCoordsSize, mShadowColorSize, mShadowParamsSize, mIndexCount};
+ }
+
+private:
+ size_t mVertexCount = 0;
+ size_t mVertexSize = 0;
+ size_t mTexCoordsSize = 0;
+ size_t mCropCoordsSize = 0;
+ size_t mShadowColorSize = 0;
+ size_t mShadowParamsSize = 0;
+ size_t mIndexCount = 0;
+ Primitive mPrimitive;
+};
+
+} // namespace renderengine
+} // namespace android
+#endif /* SF_RENDER_ENGINE_MESH_H */
diff --git a/media/libstagefright/renderfright/include/renderengine/RenderEngine.h b/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
new file mode 100644
index 0000000..40fdff4
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
@@ -0,0 +1,324 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDERENGINE_H_
+#define SF_RENDERENGINE_H_
+
+#include <stdint.h>
+#include <sys/types.h>
+#include <memory>
+
+#include <android-base/unique_fd.h>
+#include <math/mat4.h>
+#include <renderengine/DisplaySettings.h>
+#include <renderengine/Framebuffer.h>
+#include <renderengine/Image.h>
+#include <renderengine/LayerSettings.h>
+#include <ui/GraphicTypes.h>
+#include <ui/Transform.h>
+
+/**
+ * Allows to set RenderEngine backend to GLES (default) or Vulkan (NOT yet supported).
+ */
+#define PROPERTY_DEBUG_RENDERENGINE_BACKEND "debug.stagefright.renderengine.backend"
+
+struct ANativeWindowBuffer;
+
+namespace android {
+
+class Rect;
+class Region;
+
+namespace renderengine {
+
+class BindNativeBufferAsFramebuffer;
+class Image;
+class Mesh;
+class Texture;
+struct RenderEngineCreationArgs;
+
+namespace threaded {
+class RenderEngineThreaded;
+}
+
+namespace impl {
+class RenderEngine;
+}
+
+enum class Protection {
+ UNPROTECTED = 1,
+ PROTECTED = 2,
+};
+
+class RenderEngine {
+public:
+ enum class ContextPriority {
+ LOW = 1,
+ MEDIUM = 2,
+ HIGH = 3,
+ };
+
+ enum class RenderEngineType {
+ GLES = 1,
+ THREADED = 2,
+ };
+
+ static std::unique_ptr<RenderEngine> create(const RenderEngineCreationArgs& args);
+
+ virtual ~RenderEngine() = 0;
+
+ // ----- BEGIN DEPRECATED INTERFACE -----
+ // This interface, while still in use until a suitable replacement is built,
+ // should be considered deprecated, minus some methods which still may be
+ // used to support legacy behavior.
+ virtual void primeCache() const = 0;
+
+ // dump the extension strings. always call the base class.
+ virtual void dump(std::string& result) = 0;
+
+ virtual bool useNativeFenceSync() const = 0;
+ virtual bool useWaitSync() const = 0;
+ virtual void genTextures(size_t count, uint32_t* names) = 0;
+ virtual void deleteTextures(size_t count, uint32_t const* names) = 0;
+ virtual void bindExternalTextureImage(uint32_t texName, const Image& image) = 0;
+ // Legacy public method used by devices that don't support native fence
+ // synchronization in their GPU driver, as this method provides implicit
+ // synchronization for latching buffers.
+ virtual status_t bindExternalTextureBuffer(uint32_t texName, const sp<GraphicBuffer>& buffer,
+ const sp<Fence>& fence) = 0;
+ // Caches Image resources for this buffer, but does not bind the buffer to
+ // a particular texture.
+ // Note that work is deferred to an additional thread, i.e. this call
+ // is made asynchronously, but the caller can expect that cache/unbind calls
+ // are performed in a manner that's conflict serializable, i.e. unbinding
+ // a buffer should never occur before binding the buffer if the caller
+ // called {bind, cache}ExternalTextureBuffer before calling unbind.
+ virtual void cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) = 0;
+ // Removes internal resources referenced by the bufferId. This method should be
+ // invoked when the caller will no longer hold a reference to a GraphicBuffer
+ // and needs to clean up its resources.
+ // Note that work is deferred to an additional thread, i.e. this call
+ // is made asynchronously, but the caller can expect that cache/unbind calls
+ // are performed in a manner that's conflict serializable, i.e. unbinding
+ // a buffer should never occur before binding the buffer if the caller
+ // called {bind, cache}ExternalTextureBuffer before calling unbind.
+ virtual void unbindExternalTextureBuffer(uint64_t bufferId) = 0;
+ // When binding a native buffer, it must be done before setViewportAndProjection
+ // Returns NO_ERROR when binds successfully, NO_MEMORY when there's no memory for allocation.
+ virtual status_t bindFrameBuffer(Framebuffer* framebuffer) = 0;
+ virtual void unbindFrameBuffer(Framebuffer* framebuffer) = 0;
+
+ enum class CleanupMode {
+ CLEAN_OUTPUT_RESOURCES,
+ CLEAN_ALL,
+ };
+ // Clean-up method that should be called on the main thread after the
+ // drawFence returned by drawLayers fires. This method will free up
+ // resources used by the most recently drawn frame. If the frame is still
+ // being drawn, then this call is silently ignored.
+ //
+ // If mode is CLEAN_OUTPUT_RESOURCES, then only resources related to the
+ // output framebuffer are cleaned up, including the sibling texture.
+ //
+ // If mode is CLEAN_ALL, then we also cleanup resources related to any input
+ // buffers.
+ //
+ // Returns true if resources were cleaned up, and false if we didn't need to
+ // do any work.
+ virtual bool cleanupPostRender(CleanupMode mode = CleanupMode::CLEAN_OUTPUT_RESOURCES) = 0;
+
+ // queries
+ virtual size_t getMaxTextureSize() const = 0;
+ virtual size_t getMaxViewportDims() const = 0;
+
+ // ----- END DEPRECATED INTERFACE -----
+
+ // ----- BEGIN NEW INTERFACE -----
+
+ virtual bool isProtected() const = 0;
+ virtual bool supportsProtectedContent() const = 0;
+ virtual bool useProtectedContext(bool useProtectedContext) = 0;
+
+ // Renders layers for a particular display via GPU composition. This method
+ // should be called for every display that needs to be rendered via the GPU.
+ // @param display The display-wide settings that should be applied prior to
+ // drawing any layers.
+ //
+ // Assumptions when calling this method:
+ // 1. There is exactly one caller - i.e. multi-threading is not supported.
+ // 2. Additional threads may be calling the {bind,cache}ExternalTexture
+ // methods above. But the main thread is responsible for holding resources
+ // such that Image destruction does not occur while this method is called.
+ //
+ // TODO(b/136806342): This should behavior should ideally be fixed since
+ // the above two assumptions are brittle, as conditional thread safetyness
+ // may be insufficient when maximizing rendering performance in the future.
+ //
+ // @param layers The layers to draw onto the display, in Z-order.
+ // @param buffer The buffer which will be drawn to. This buffer will be
+ // ready once drawFence fires.
+ // @param useFramebufferCache True if the framebuffer cache should be used.
+ // If an implementation does not cache output framebuffers, then this
+ // parameter does nothing.
+ // @param bufferFence Fence signalling that the buffer is ready to be drawn
+ // to.
+ // @param drawFence A pointer to a fence, which will fire when the buffer
+ // has been drawn to and is ready to be examined. The fence will be
+ // initialized by this method. The caller will be responsible for owning the
+ // fence.
+ // @return An error code indicating whether drawing was successful. For
+ // now, this always returns NO_ERROR.
+ virtual status_t drawLayers(const DisplaySettings& display,
+ const std::vector<const LayerSettings*>& layers,
+ const sp<GraphicBuffer>& buffer, const bool useFramebufferCache,
+ base::unique_fd&& bufferFence, base::unique_fd* drawFence) = 0;
+
+protected:
+ // Gets a framebuffer to render to. This framebuffer may or may not be
+ // cached depending on the implementation.
+ //
+ // Note that this method does not transfer ownership, so the caller most not
+ // live longer than RenderEngine.
+ virtual Framebuffer* getFramebufferForDrawing() = 0;
+ friend class BindNativeBufferAsFramebuffer;
+ friend class threaded::RenderEngineThreaded;
+};
+
+struct RenderEngineCreationArgs {
+ int pixelFormat;
+ uint32_t imageCacheSize;
+ bool useColorManagement;
+ bool enableProtectedContext;
+ bool precacheToneMapperShaderOnly;
+ bool supportsBackgroundBlur;
+ RenderEngine::ContextPriority contextPriority;
+ RenderEngine::RenderEngineType renderEngineType;
+
+ struct Builder;
+
+private:
+ // must be created by Builder via constructor with full argument list
+ RenderEngineCreationArgs(int _pixelFormat, uint32_t _imageCacheSize, bool _useColorManagement,
+ bool _enableProtectedContext, bool _precacheToneMapperShaderOnly,
+ bool _supportsBackgroundBlur,
+ RenderEngine::ContextPriority _contextPriority,
+ RenderEngine::RenderEngineType _renderEngineType)
+ : pixelFormat(_pixelFormat),
+ imageCacheSize(_imageCacheSize),
+ useColorManagement(_useColorManagement),
+ enableProtectedContext(_enableProtectedContext),
+ precacheToneMapperShaderOnly(_precacheToneMapperShaderOnly),
+ supportsBackgroundBlur(_supportsBackgroundBlur),
+ contextPriority(_contextPriority),
+ renderEngineType(_renderEngineType) {}
+ RenderEngineCreationArgs() = delete;
+};
+
+struct RenderEngineCreationArgs::Builder {
+ Builder() {}
+
+ Builder& setPixelFormat(int pixelFormat) {
+ this->pixelFormat = pixelFormat;
+ return *this;
+ }
+ Builder& setImageCacheSize(uint32_t imageCacheSize) {
+ this->imageCacheSize = imageCacheSize;
+ return *this;
+ }
+ Builder& setUseColorManagerment(bool useColorManagement) {
+ this->useColorManagement = useColorManagement;
+ return *this;
+ }
+ Builder& setEnableProtectedContext(bool enableProtectedContext) {
+ this->enableProtectedContext = enableProtectedContext;
+ return *this;
+ }
+ Builder& setPrecacheToneMapperShaderOnly(bool precacheToneMapperShaderOnly) {
+ this->precacheToneMapperShaderOnly = precacheToneMapperShaderOnly;
+ return *this;
+ }
+ Builder& setSupportsBackgroundBlur(bool supportsBackgroundBlur) {
+ this->supportsBackgroundBlur = supportsBackgroundBlur;
+ return *this;
+ }
+ Builder& setContextPriority(RenderEngine::ContextPriority contextPriority) {
+ this->contextPriority = contextPriority;
+ return *this;
+ }
+ Builder& setRenderEngineType(RenderEngine::RenderEngineType renderEngineType) {
+ this->renderEngineType = renderEngineType;
+ return *this;
+ }
+ RenderEngineCreationArgs build() const {
+ return RenderEngineCreationArgs(pixelFormat, imageCacheSize, useColorManagement,
+ enableProtectedContext, precacheToneMapperShaderOnly,
+ supportsBackgroundBlur, contextPriority, renderEngineType);
+ }
+
+private:
+ // 1 means RGBA_8888
+ int pixelFormat = 1;
+ uint32_t imageCacheSize = 0;
+ bool useColorManagement = true;
+ bool enableProtectedContext = false;
+ bool precacheToneMapperShaderOnly = false;
+ bool supportsBackgroundBlur = false;
+ RenderEngine::ContextPriority contextPriority = RenderEngine::ContextPriority::MEDIUM;
+ RenderEngine::RenderEngineType renderEngineType = RenderEngine::RenderEngineType::GLES;
+};
+
+class BindNativeBufferAsFramebuffer {
+public:
+ BindNativeBufferAsFramebuffer(RenderEngine& engine, ANativeWindowBuffer* buffer,
+ const bool useFramebufferCache)
+ : mEngine(engine), mFramebuffer(mEngine.getFramebufferForDrawing()), mStatus(NO_ERROR) {
+ mStatus = mFramebuffer->setNativeWindowBuffer(buffer, mEngine.isProtected(),
+ useFramebufferCache)
+ ? mEngine.bindFrameBuffer(mFramebuffer)
+ : NO_MEMORY;
+ }
+ ~BindNativeBufferAsFramebuffer() {
+ mFramebuffer->setNativeWindowBuffer(nullptr, false, /*arbitrary*/ true);
+ mEngine.unbindFrameBuffer(mFramebuffer);
+ }
+ status_t getStatus() const { return mStatus; }
+
+private:
+ RenderEngine& mEngine;
+ Framebuffer* mFramebuffer;
+ status_t mStatus;
+};
+
+namespace impl {
+
+// impl::RenderEngine contains common implementation that is graphics back-end agnostic.
+class RenderEngine : public renderengine::RenderEngine {
+public:
+ virtual ~RenderEngine() = 0;
+
+ bool useNativeFenceSync() const override;
+ bool useWaitSync() const override;
+
+protected:
+ RenderEngine(const RenderEngineCreationArgs& args);
+ const RenderEngineCreationArgs mArgs;
+};
+
+} // namespace impl
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_RENDERENGINE_H_ */
diff --git a/media/libstagefright/renderfright/include/renderengine/Texture.h b/media/libstagefright/renderfright/include/renderengine/Texture.h
new file mode 100644
index 0000000..c69ace0
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Texture.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_TEXTURE_H
+#define SF_RENDER_ENGINE_TEXTURE_H
+
+#include <stdint.h>
+
+#include <math/mat4.h>
+
+namespace android {
+namespace renderengine {
+
+class Texture {
+public:
+ enum Target { TEXTURE_2D = 0x0DE1, TEXTURE_EXTERNAL = 0x8D65 };
+
+ Texture();
+ Texture(Target textureTarget, uint32_t textureName);
+ ~Texture();
+
+ void init(Target textureTarget, uint32_t textureName);
+
+ void setMatrix(float const* matrix);
+ void setFiltering(bool enabled);
+ void setDimensions(size_t width, size_t height);
+
+ uint32_t getTextureName() const;
+ uint32_t getTextureTarget() const;
+
+ const mat4& getMatrix() const;
+ bool getFiltering() const;
+ size_t getWidth() const;
+ size_t getHeight() const;
+
+private:
+ uint32_t mTextureName;
+ uint32_t mTextureTarget;
+ size_t mWidth;
+ size_t mHeight;
+ bool mFiltering;
+ mat4 mTextureMatrix;
+};
+
+} // namespace renderengine
+} // namespace android
+#endif /* SF_RENDER_ENGINE_TEXTURE_H */
diff --git a/media/libstagefright/renderfright/include/renderengine/mock/Framebuffer.h b/media/libstagefright/renderfright/include/renderengine/mock/Framebuffer.h
new file mode 100644
index 0000000..dfb6a4e
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/mock/Framebuffer.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <renderengine/Framebuffer.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+class Framebuffer : public renderengine::Framebuffer {
+public:
+ Framebuffer();
+ ~Framebuffer() override;
+
+ MOCK_METHOD3(setNativeWindowBuffer, bool(ANativeWindowBuffer*, bool, const bool));
+};
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/mock/Image.h b/media/libstagefright/renderfright/include/renderengine/mock/Image.h
new file mode 100644
index 0000000..2b0eed1
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/mock/Image.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <renderengine/Image.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+class Image : public renderengine::Image {
+public:
+ Image();
+ ~Image() override;
+
+ MOCK_METHOD2(setNativeWindowBuffer, bool(ANativeWindowBuffer* buffer, bool isProtected));
+};
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/mock/RenderEngine.h b/media/libstagefright/renderfright/include/renderengine/mock/RenderEngine.h
new file mode 100644
index 0000000..e03dd58
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/mock/RenderEngine.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <renderengine/DisplaySettings.h>
+#include <renderengine/LayerSettings.h>
+#include <renderengine/Mesh.h>
+#include <renderengine/RenderEngine.h>
+#include <renderengine/Texture.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/Region.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+class RenderEngine : public renderengine::RenderEngine {
+public:
+ RenderEngine();
+ ~RenderEngine() override;
+
+ MOCK_METHOD0(getFramebufferForDrawing, Framebuffer*());
+ MOCK_CONST_METHOD0(primeCache, void());
+ MOCK_METHOD1(dump, void(std::string&));
+ MOCK_CONST_METHOD0(useNativeFenceSync, bool());
+ MOCK_CONST_METHOD0(useWaitSync, bool());
+ MOCK_CONST_METHOD0(isCurrent, bool());
+ MOCK_METHOD2(genTextures, void(size_t, uint32_t*));
+ MOCK_METHOD2(deleteTextures, void(size_t, uint32_t const*));
+ MOCK_METHOD2(bindExternalTextureImage, void(uint32_t, const renderengine::Image&));
+ MOCK_METHOD1(cacheExternalTextureBuffer, void(const sp<GraphicBuffer>&));
+ MOCK_METHOD3(bindExternalTextureBuffer,
+ status_t(uint32_t, const sp<GraphicBuffer>&, const sp<Fence>&));
+ MOCK_METHOD1(unbindExternalTextureBuffer, void(uint64_t));
+ MOCK_METHOD1(bindFrameBuffer, status_t(renderengine::Framebuffer*));
+ MOCK_METHOD1(unbindFrameBuffer, void(renderengine::Framebuffer*));
+ MOCK_METHOD1(drawMesh, void(const renderengine::Mesh&));
+ MOCK_CONST_METHOD0(getMaxTextureSize, size_t());
+ MOCK_CONST_METHOD0(getMaxViewportDims, size_t());
+ MOCK_CONST_METHOD0(isProtected, bool());
+ MOCK_CONST_METHOD0(supportsProtectedContent, bool());
+ MOCK_METHOD1(useProtectedContext, bool(bool));
+ MOCK_METHOD1(cleanupPostRender, bool(CleanupMode mode));
+ MOCK_METHOD6(drawLayers,
+ status_t(const DisplaySettings&, const std::vector<const LayerSettings*>&,
+ const sp<GraphicBuffer>&, const bool, base::unique_fd&&,
+ base::unique_fd*));
+};
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/private/Description.h b/media/libstagefright/renderfright/include/renderengine/private/Description.h
new file mode 100644
index 0000000..a62161a
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/private/Description.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_DESCRIPTION_H_
+#define SF_RENDER_ENGINE_DESCRIPTION_H_
+
+#include <renderengine/Texture.h>
+#include <ui/GraphicTypes.h>
+
+namespace android {
+namespace renderengine {
+
+/*
+ * This is the structure that holds the state of the rendering engine.
+ * This class is used to generate a corresponding GLSL program and set the
+ * appropriate uniform.
+ */
+struct Description {
+ enum class TransferFunction : int {
+ LINEAR,
+ SRGB,
+ ST2084,
+ HLG, // Hybrid Log-Gamma for HDR.
+ };
+
+ static TransferFunction dataSpaceToTransferFunction(ui::Dataspace dataSpace);
+
+ Description() = default;
+ ~Description() = default;
+
+ bool hasInputTransformMatrix() const;
+ bool hasOutputTransformMatrix() const;
+ bool hasColorMatrix() const;
+
+ // whether textures are premultiplied
+ bool isPremultipliedAlpha = false;
+ // whether this layer is marked as opaque
+ bool isOpaque = true;
+
+ // corner radius of the layer
+ float cornerRadius = 0;
+
+ // Size of the rounded rectangle we are cropping to
+ half2 cropSize;
+
+ // Texture this layer uses
+ Texture texture;
+ bool textureEnabled = false;
+
+ // color used when texturing is disabled or when setting alpha.
+ half4 color;
+
+ // true if the sampled pixel values are in Y410/BT2020 rather than RGBA
+ bool isY410BT2020 = false;
+
+ // transfer functions for the input/output
+ TransferFunction inputTransferFunction = TransferFunction::LINEAR;
+ TransferFunction outputTransferFunction = TransferFunction::LINEAR;
+
+ float displayMaxLuminance;
+ float maxMasteringLuminance;
+ float maxContentLuminance;
+
+ // projection matrix
+ mat4 projectionMatrix;
+
+ // The color matrix will be applied in linear space right before OETF.
+ mat4 colorMatrix;
+ mat4 inputTransformMatrix;
+ mat4 outputTransformMatrix;
+
+ // True if this layer will draw a shadow.
+ bool drawShadows = false;
+};
+
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_RENDER_ENGINE_DESCRIPTION_H_ */
diff --git a/media/libstagefright/renderfright/mock/Framebuffer.cpp b/media/libstagefright/renderfright/mock/Framebuffer.cpp
new file mode 100644
index 0000000..fbdcaab
--- /dev/null
+++ b/media/libstagefright/renderfright/mock/Framebuffer.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/mock/Framebuffer.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+// The Google Mock documentation recommends explicit non-header instantiations
+// for better compile time performance.
+Framebuffer::Framebuffer() = default;
+Framebuffer::~Framebuffer() = default;
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/mock/Image.cpp b/media/libstagefright/renderfright/mock/Image.cpp
new file mode 100644
index 0000000..57f4346
--- /dev/null
+++ b/media/libstagefright/renderfright/mock/Image.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/mock/Image.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+// The Google Mock documentation recommends explicit non-header instantiations
+// for better compile time performance.
+Image::Image() = default;
+Image::~Image() = default;
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/mock/RenderEngine.cpp b/media/libstagefright/renderfright/mock/RenderEngine.cpp
new file mode 100644
index 0000000..261636d
--- /dev/null
+++ b/media/libstagefright/renderfright/mock/RenderEngine.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/mock/RenderEngine.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+// The Google Mock documentation recommends explicit non-header instantiations
+// for better compile time performance.
+RenderEngine::RenderEngine() = default;
+RenderEngine::~RenderEngine() = default;
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/tests/Android.bp b/media/libstagefright/renderfright/tests/Android.bp
new file mode 100644
index 0000000..9fee646
--- /dev/null
+++ b/media/libstagefright/renderfright/tests/Android.bp
@@ -0,0 +1,41 @@
+// Copyright 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_test {
+ name: "librenderfright_test",
+ defaults: ["surfaceflinger_defaults"],
+ test_suites: ["device-tests"],
+ srcs: [
+ "RenderEngineTest.cpp",
+ "RenderEngineThreadedTest.cpp",
+ ],
+ static_libs: [
+ "libgmock",
+ "librenderfright",
+ "librenderfright_mocks",
+ ],
+ shared_libs: [
+ "libbase",
+ "libcutils",
+ "libEGL",
+ "libGLESv2",
+ "libgui",
+ "liblog",
+ "libnativewindow",
+ "libprocessgroup",
+ "libsync",
+ "libui",
+ "libutils",
+ ],
+}
diff --git a/media/libstagefright/renderfright/tests/RenderEngineTest.cpp b/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
new file mode 100644
index 0000000..730f606
--- /dev/null
+++ b/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
@@ -0,0 +1,1469 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// TODO(b/129481165): remove the #pragma below and fix conversion issues
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wconversion"
+
+#include <chrono>
+#include <condition_variable>
+#include <fstream>
+
+#include <cutils/properties.h>
+#include <gtest/gtest.h>
+#include <renderengine/RenderEngine.h>
+#include <sync/sync.h>
+#include <ui/PixelFormat.h>
+#include "../gl/GLESRenderEngine.h"
+#include "../threaded/RenderEngineThreaded.h"
+
+constexpr int DEFAULT_DISPLAY_WIDTH = 128;
+constexpr int DEFAULT_DISPLAY_HEIGHT = 256;
+constexpr int DEFAULT_DISPLAY_OFFSET = 64;
+constexpr bool WRITE_BUFFER_TO_FILE_ON_FAILURE = false;
+
+namespace android {
+
+struct RenderEngineTest : public ::testing::Test {
+ static void SetUpTestSuite() {
+ sRE = renderengine::gl::GLESRenderEngine::create(
+ renderengine::RenderEngineCreationArgs::Builder()
+ .setPixelFormat(static_cast<int>(ui::PixelFormat::RGBA_8888))
+ .setImageCacheSize(1)
+ .setUseColorManagerment(false)
+ .setEnableProtectedContext(false)
+ .setPrecacheToneMapperShaderOnly(false)
+ .setSupportsBackgroundBlur(true)
+ .setContextPriority(renderengine::RenderEngine::ContextPriority::MEDIUM)
+ .setRenderEngineType(renderengine::RenderEngine::RenderEngineType::GLES)
+ .build());
+ }
+
+ static void TearDownTestSuite() {
+ // The ordering here is important - sCurrentBuffer must live longer
+ // than RenderEngine to avoid a null reference on tear-down.
+ sRE = nullptr;
+ sCurrentBuffer = nullptr;
+ }
+
+ static sp<GraphicBuffer> allocateDefaultBuffer() {
+ return new GraphicBuffer(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT,
+ HAL_PIXEL_FORMAT_RGBA_8888, 1,
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
+ GRALLOC_USAGE_HW_RENDER,
+ "output");
+ }
+
+ // Allocates a 1x1 buffer to fill with a solid color
+ static sp<GraphicBuffer> allocateSourceBuffer(uint32_t width, uint32_t height) {
+ return new GraphicBuffer(width, height, HAL_PIXEL_FORMAT_RGBA_8888, 1,
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
+ GRALLOC_USAGE_HW_TEXTURE,
+ "input");
+ }
+
+ RenderEngineTest() { mBuffer = allocateDefaultBuffer(); }
+
+ ~RenderEngineTest() {
+ if (WRITE_BUFFER_TO_FILE_ON_FAILURE && ::testing::Test::HasFailure()) {
+ writeBufferToFile("/data/texture_out_");
+ }
+ for (uint32_t texName : mTexNames) {
+ sRE->deleteTextures(1, &texName);
+ EXPECT_FALSE(sRE->isTextureNameKnownForTesting(texName));
+ }
+ }
+
+ void writeBufferToFile(const char* basename) {
+ std::string filename(basename);
+ filename.append(::testing::UnitTest::GetInstance()->current_test_info()->name());
+ filename.append(".ppm");
+ std::ofstream file(filename.c_str(), std::ios::binary);
+ if (!file.is_open()) {
+ ALOGE("Unable to open file: %s", filename.c_str());
+ ALOGE("You may need to do: \"adb shell setenforce 0\" to enable "
+ "surfaceflinger to write debug images");
+ return;
+ }
+
+ uint8_t* pixels;
+ mBuffer->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+ reinterpret_cast<void**>(&pixels));
+
+ file << "P6\n";
+ file << mBuffer->getWidth() << "\n";
+ file << mBuffer->getHeight() << "\n";
+ file << 255 << "\n";
+
+ std::vector<uint8_t> outBuffer(mBuffer->getWidth() * mBuffer->getHeight() * 3);
+ auto outPtr = reinterpret_cast<uint8_t*>(outBuffer.data());
+
+ for (int32_t j = 0; j < mBuffer->getHeight(); j++) {
+ const uint8_t* src = pixels + (mBuffer->getStride() * j) * 4;
+ for (int32_t i = 0; i < mBuffer->getWidth(); i++) {
+ // Only copy R, G and B components
+ outPtr[0] = src[0];
+ outPtr[1] = src[1];
+ outPtr[2] = src[2];
+ outPtr += 3;
+
+ src += 4;
+ }
+ }
+ file.write(reinterpret_cast<char*>(outBuffer.data()), outBuffer.size());
+ mBuffer->unlock();
+ }
+
+ void expectBufferColor(const Region& region, uint8_t r, uint8_t g, uint8_t b, uint8_t a) {
+ size_t c;
+ Rect const* rect = region.getArray(&c);
+ for (size_t i = 0; i < c; i++, rect++) {
+ expectBufferColor(*rect, r, g, b, a);
+ }
+ }
+
+ void expectBufferColor(const Rect& rect, uint8_t r, uint8_t g, uint8_t b, uint8_t a,
+ uint8_t tolerance = 0) {
+ auto colorCompare = [tolerance](const uint8_t* colorA, const uint8_t* colorB) {
+ auto colorBitCompare = [tolerance](uint8_t a, uint8_t b) {
+ uint8_t tmp = a >= b ? a - b : b - a;
+ return tmp <= tolerance;
+ };
+ return std::equal(colorA, colorA + 4, colorB, colorBitCompare);
+ };
+
+ expectBufferColor(rect, r, g, b, a, colorCompare);
+ }
+
+ void expectBufferColor(const Rect& region, uint8_t r, uint8_t g, uint8_t b, uint8_t a,
+ std::function<bool(const uint8_t* a, const uint8_t* b)> colorCompare) {
+ uint8_t* pixels;
+ mBuffer->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+ reinterpret_cast<void**>(&pixels));
+ int32_t maxFails = 10;
+ int32_t fails = 0;
+ for (int32_t j = 0; j < region.getHeight(); j++) {
+ const uint8_t* src =
+ pixels + (mBuffer->getStride() * (region.top + j) + region.left) * 4;
+ for (int32_t i = 0; i < region.getWidth(); i++) {
+ const uint8_t expected[4] = {r, g, b, a};
+ bool equal = colorCompare(src, expected);
+ EXPECT_TRUE(equal)
+ << "pixel @ (" << region.left + i << ", " << region.top + j << "): "
+ << "expected (" << static_cast<uint32_t>(r) << ", "
+ << static_cast<uint32_t>(g) << ", " << static_cast<uint32_t>(b) << ", "
+ << static_cast<uint32_t>(a) << "), "
+ << "got (" << static_cast<uint32_t>(src[0]) << ", "
+ << static_cast<uint32_t>(src[1]) << ", " << static_cast<uint32_t>(src[2])
+ << ", " << static_cast<uint32_t>(src[3]) << ")";
+ src += 4;
+ if (!equal && ++fails >= maxFails) {
+ break;
+ }
+ }
+ if (fails >= maxFails) {
+ break;
+ }
+ }
+ mBuffer->unlock();
+ }
+
+ void expectAlpha(const Rect& rect, uint8_t a) {
+ auto colorCompare = [](const uint8_t* colorA, const uint8_t* colorB) {
+ return colorA[3] == colorB[3];
+ };
+ expectBufferColor(rect, 0.0f /* r */, 0.0f /*g */, 0.0f /* b */, a, colorCompare);
+ }
+
+ void expectShadowColor(const renderengine::LayerSettings& castingLayer,
+ const renderengine::ShadowSettings& shadow, const ubyte4& casterColor,
+ const ubyte4& backgroundColor) {
+ const Rect casterRect(castingLayer.geometry.boundaries);
+ Region casterRegion = Region(casterRect);
+ const float casterCornerRadius = castingLayer.geometry.roundedCornersRadius;
+ if (casterCornerRadius > 0.0f) {
+ // ignore the corners if a corner radius is set
+ Rect cornerRect(casterCornerRadius, casterCornerRadius);
+ casterRegion.subtractSelf(cornerRect.offsetTo(casterRect.left, casterRect.top));
+ casterRegion.subtractSelf(
+ cornerRect.offsetTo(casterRect.right - casterCornerRadius, casterRect.top));
+ casterRegion.subtractSelf(
+ cornerRect.offsetTo(casterRect.left, casterRect.bottom - casterCornerRadius));
+ casterRegion.subtractSelf(cornerRect.offsetTo(casterRect.right - casterCornerRadius,
+ casterRect.bottom - casterCornerRadius));
+ }
+
+ const float shadowInset = shadow.length * -1.0f;
+ const Rect casterWithShadow =
+ Rect(casterRect).inset(shadowInset, shadowInset, shadowInset, shadowInset);
+ const Region shadowRegion = Region(casterWithShadow).subtractSelf(casterRect);
+ const Region backgroundRegion = Region(fullscreenRect()).subtractSelf(casterWithShadow);
+
+ // verify casting layer
+ expectBufferColor(casterRegion, casterColor.r, casterColor.g, casterColor.b, casterColor.a);
+
+ // verify shadows by testing just the alpha since its difficult to validate the shadow color
+ size_t c;
+ Rect const* r = shadowRegion.getArray(&c);
+ for (size_t i = 0; i < c; i++, r++) {
+ expectAlpha(*r, 255);
+ }
+
+ // verify background
+ expectBufferColor(backgroundRegion, backgroundColor.r, backgroundColor.g, backgroundColor.b,
+ backgroundColor.a);
+ }
+
+ static renderengine::ShadowSettings getShadowSettings(const vec2& casterPos, float shadowLength,
+ bool casterIsTranslucent) {
+ renderengine::ShadowSettings shadow;
+ shadow.ambientColor = {0.0f, 0.0f, 0.0f, 0.039f};
+ shadow.spotColor = {0.0f, 0.0f, 0.0f, 0.19f};
+ shadow.lightPos = vec3(casterPos.x, casterPos.y, 0);
+ shadow.lightRadius = 0.0f;
+ shadow.length = shadowLength;
+ shadow.casterIsTranslucent = casterIsTranslucent;
+ return shadow;
+ }
+
+ static Rect fullscreenRect() { return Rect(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT); }
+
+ static Rect offsetRect() {
+ return Rect(DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_WIDTH,
+ DEFAULT_DISPLAY_HEIGHT);
+ }
+
+ static Rect offsetRectAtZero() {
+ return Rect(DEFAULT_DISPLAY_WIDTH - DEFAULT_DISPLAY_OFFSET,
+ DEFAULT_DISPLAY_HEIGHT - DEFAULT_DISPLAY_OFFSET);
+ }
+
+ void invokeDraw(renderengine::DisplaySettings settings,
+ std::vector<const renderengine::LayerSettings*> layers,
+ sp<GraphicBuffer> buffer) {
+ base::unique_fd fence;
+ status_t status =
+ sRE->drawLayers(settings, layers, buffer, true, base::unique_fd(), &fence);
+ sCurrentBuffer = buffer;
+
+ int fd = fence.release();
+ if (fd >= 0) {
+ sync_wait(fd, -1);
+ close(fd);
+ }
+
+ ASSERT_EQ(NO_ERROR, status);
+ if (layers.size() > 0) {
+ ASSERT_TRUE(sRE->isFramebufferImageCachedForTesting(buffer->getId()));
+ }
+ }
+
+ void drawEmptyLayers() {
+ renderengine::DisplaySettings settings;
+ std::vector<const renderengine::LayerSettings*> layers;
+ // Meaningless buffer since we don't do any drawing
+ sp<GraphicBuffer> buffer = new GraphicBuffer();
+ invokeDraw(settings, layers, buffer);
+ }
+
+ template <typename SourceVariant>
+ void fillBuffer(half r, half g, half b, half a);
+
+ template <typename SourceVariant>
+ void fillRedBuffer();
+
+ template <typename SourceVariant>
+ void fillGreenBuffer();
+
+ template <typename SourceVariant>
+ void fillBlueBuffer();
+
+ template <typename SourceVariant>
+ void fillRedTransparentBuffer();
+
+ template <typename SourceVariant>
+ void fillRedOffsetBuffer();
+
+ template <typename SourceVariant>
+ void fillBufferPhysicalOffset();
+
+ template <typename SourceVariant>
+ void fillBufferCheckers(uint32_t rotation);
+
+ template <typename SourceVariant>
+ void fillBufferCheckersRotate0();
+
+ template <typename SourceVariant>
+ void fillBufferCheckersRotate90();
+
+ template <typename SourceVariant>
+ void fillBufferCheckersRotate180();
+
+ template <typename SourceVariant>
+ void fillBufferCheckersRotate270();
+
+ template <typename SourceVariant>
+ void fillBufferWithLayerTransform();
+
+ template <typename SourceVariant>
+ void fillBufferLayerTransform();
+
+ template <typename SourceVariant>
+ void fillBufferWithColorTransform();
+
+ template <typename SourceVariant>
+ void fillBufferColorTransform();
+
+ template <typename SourceVariant>
+ void fillRedBufferWithRoundedCorners();
+
+ template <typename SourceVariant>
+ void fillBufferWithRoundedCorners();
+
+ template <typename SourceVariant>
+ void fillBufferAndBlurBackground();
+
+ template <typename SourceVariant>
+ void overlayCorners();
+
+ void fillRedBufferTextureTransform();
+
+ void fillBufferTextureTransform();
+
+ void fillRedBufferWithPremultiplyAlpha();
+
+ void fillBufferWithPremultiplyAlpha();
+
+ void fillRedBufferWithoutPremultiplyAlpha();
+
+ void fillBufferWithoutPremultiplyAlpha();
+
+ void fillGreenColorBufferThenClearRegion();
+
+ void clearLeftRegion();
+
+ void clearRegion();
+
+ template <typename SourceVariant>
+ void drawShadow(const renderengine::LayerSettings& castingLayer,
+ const renderengine::ShadowSettings& shadow, const ubyte4& casterColor,
+ const ubyte4& backgroundColor);
+
+ // Keep around the same renderengine object to save on initialization time.
+ // For now, exercise the GL backend directly so that some caching specifics
+ // can be tested without changing the interface.
+ static std::unique_ptr<renderengine::gl::GLESRenderEngine> sRE;
+ // Hack to avoid NPE in the EGL driver: the GraphicBuffer needs to
+ // be freed *after* RenderEngine is destroyed, so that the EGL image is
+ // destroyed first.
+ static sp<GraphicBuffer> sCurrentBuffer;
+
+ sp<GraphicBuffer> mBuffer;
+
+ std::vector<uint32_t> mTexNames;
+};
+
+std::unique_ptr<renderengine::gl::GLESRenderEngine> RenderEngineTest::sRE = nullptr;
+sp<GraphicBuffer> RenderEngineTest::sCurrentBuffer = nullptr;
+
+struct ColorSourceVariant {
+ static void fillColor(renderengine::LayerSettings& layer, half r, half g, half b,
+ RenderEngineTest* /*fixture*/) {
+ layer.source.solidColor = half3(r, g, b);
+ }
+};
+
+struct RelaxOpaqueBufferVariant {
+ static void setOpaqueBit(renderengine::LayerSettings& layer) {
+ layer.source.buffer.isOpaque = false;
+ }
+
+ static uint8_t getAlphaChannel() { return 255; }
+};
+
+struct ForceOpaqueBufferVariant {
+ static void setOpaqueBit(renderengine::LayerSettings& layer) {
+ layer.source.buffer.isOpaque = true;
+ }
+
+ static uint8_t getAlphaChannel() {
+ // The isOpaque bit will override the alpha channel, so this should be
+ // arbitrary.
+ return 10;
+ }
+};
+
+template <typename OpaquenessVariant>
+struct BufferSourceVariant {
+ static void fillColor(renderengine::LayerSettings& layer, half r, half g, half b,
+ RenderEngineTest* fixture) {
+ sp<GraphicBuffer> buf = RenderEngineTest::allocateSourceBuffer(1, 1);
+ uint32_t texName;
+ fixture->sRE->genTextures(1, &texName);
+ fixture->mTexNames.push_back(texName);
+
+ uint8_t* pixels;
+ buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+ reinterpret_cast<void**>(&pixels));
+
+ for (int32_t j = 0; j < buf->getHeight(); j++) {
+ uint8_t* iter = pixels + (buf->getStride() * j) * 4;
+ for (int32_t i = 0; i < buf->getWidth(); i++) {
+ iter[0] = uint8_t(r * 255);
+ iter[1] = uint8_t(g * 255);
+ iter[2] = uint8_t(b * 255);
+ iter[3] = OpaquenessVariant::getAlphaChannel();
+ iter += 4;
+ }
+ }
+
+ buf->unlock();
+
+ layer.source.buffer.buffer = buf;
+ layer.source.buffer.textureName = texName;
+ OpaquenessVariant::setOpaqueBit(layer);
+ }
+};
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBuffer(half r, half g, half b, half a) {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = fullscreenRect();
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = fullscreenRect().toFloatRect();
+ SourceVariant::fillColor(layer, r, g, b, this);
+ layer.alpha = a;
+
+ layers.push_back(&layer);
+
+ invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedBuffer() {
+ fillBuffer<SourceVariant>(1.0f, 0.0f, 0.0f, 1.0f);
+ expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillGreenBuffer() {
+ fillBuffer<SourceVariant>(0.0f, 1.0f, 0.0f, 1.0f);
+ expectBufferColor(fullscreenRect(), 0, 255, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBlueBuffer() {
+ fillBuffer<SourceVariant>(0.0f, 0.0f, 1.0f, 1.0f);
+ expectBufferColor(fullscreenRect(), 0, 0, 255, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedTransparentBuffer() {
+ fillBuffer<SourceVariant>(1.0f, 0.0f, 0.0f, .2f);
+ expectBufferColor(fullscreenRect(), 51, 0, 0, 51);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedOffsetBuffer() {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = offsetRect();
+ settings.clip = offsetRectAtZero();
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = offsetRectAtZero().toFloatRect();
+ SourceVariant::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+ layer.alpha = 1.0f;
+
+ layers.push_back(&layer);
+ invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferPhysicalOffset() {
+ fillRedOffsetBuffer<SourceVariant>();
+
+ expectBufferColor(Rect(DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_WIDTH,
+ DEFAULT_DISPLAY_HEIGHT),
+ 255, 0, 0, 255);
+ Rect offsetRegionLeft(DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_HEIGHT);
+ Rect offsetRegionTop(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_OFFSET);
+
+ expectBufferColor(offsetRegionLeft, 0, 0, 0, 0);
+ expectBufferColor(offsetRegionTop, 0, 0, 0, 0);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckers(uint32_t orientationFlag) {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ // Here logical space is 2x2
+ settings.clip = Rect(2, 2);
+ settings.orientation = orientationFlag;
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings layerOne;
+ Rect rectOne(0, 0, 1, 1);
+ layerOne.geometry.boundaries = rectOne.toFloatRect();
+ SourceVariant::fillColor(layerOne, 1.0f, 0.0f, 0.0f, this);
+ layerOne.alpha = 1.0f;
+
+ renderengine::LayerSettings layerTwo;
+ Rect rectTwo(0, 1, 1, 2);
+ layerTwo.geometry.boundaries = rectTwo.toFloatRect();
+ SourceVariant::fillColor(layerTwo, 0.0f, 1.0f, 0.0f, this);
+ layerTwo.alpha = 1.0f;
+
+ renderengine::LayerSettings layerThree;
+ Rect rectThree(1, 0, 2, 1);
+ layerThree.geometry.boundaries = rectThree.toFloatRect();
+ SourceVariant::fillColor(layerThree, 0.0f, 0.0f, 1.0f, this);
+ layerThree.alpha = 1.0f;
+
+ layers.push_back(&layerOne);
+ layers.push_back(&layerTwo);
+ layers.push_back(&layerThree);
+
+ invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate0() {
+ fillBufferCheckers<SourceVariant>(ui::Transform::ROT_0);
+ expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 255, 0, 0,
+ 255);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+ DEFAULT_DISPLAY_HEIGHT / 2),
+ 0, 0, 255, 255);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+ DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+ 0, 0, 0, 0);
+ expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+ DEFAULT_DISPLAY_HEIGHT),
+ 0, 255, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate90() {
+ fillBufferCheckers<SourceVariant>(ui::Transform::ROT_90);
+ expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 0, 255, 0,
+ 255);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+ DEFAULT_DISPLAY_HEIGHT / 2),
+ 255, 0, 0, 255);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+ DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+ 0, 0, 255, 255);
+ expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+ DEFAULT_DISPLAY_HEIGHT),
+ 0, 0, 0, 0);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate180() {
+ fillBufferCheckers<SourceVariant>(ui::Transform::ROT_180);
+ expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 0, 0, 0,
+ 0);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+ DEFAULT_DISPLAY_HEIGHT / 2),
+ 0, 255, 0, 255);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+ DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+ 255, 0, 0, 255);
+ expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+ DEFAULT_DISPLAY_HEIGHT),
+ 0, 0, 255, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate270() {
+ fillBufferCheckers<SourceVariant>(ui::Transform::ROT_270);
+ expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 0, 0, 255,
+ 255);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+ DEFAULT_DISPLAY_HEIGHT / 2),
+ 0, 0, 0, 0);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+ DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+ 0, 255, 0, 255);
+ expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+ DEFAULT_DISPLAY_HEIGHT),
+ 255, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferWithLayerTransform() {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ // Here logical space is 2x2
+ settings.clip = Rect(2, 2);
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+ // Translate one pixel diagonally
+ layer.geometry.positionTransform = mat4(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1);
+ SourceVariant::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+ layer.source.solidColor = half3(1.0f, 0.0f, 0.0f);
+ layer.alpha = 1.0f;
+
+ layers.push_back(&layer);
+
+ invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferLayerTransform() {
+ fillBufferWithLayerTransform<SourceVariant>();
+ expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT / 2), 0, 0, 0, 0);
+ expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT), 0, 0, 0, 0);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+ DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+ 255, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferWithColorTransform() {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = Rect(1, 1);
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+ SourceVariant::fillColor(layer, 0.5f, 0.25f, 0.125f, this);
+ layer.alpha = 1.0f;
+
+ // construct a fake color matrix
+ // annihilate green and blue channels
+ settings.colorTransform = mat4::scale(vec4(1, 0, 0, 1));
+ // set red channel to red + green
+ layer.colorTransform = mat4(1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1);
+
+ layer.alpha = 1.0f;
+ layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+ layers.push_back(&layer);
+
+ invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferColorTransform() {
+ fillBufferWithColorTransform<SourceVariant>();
+ expectBufferColor(fullscreenRect(), 191, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedBufferWithRoundedCorners() {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = fullscreenRect();
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = fullscreenRect().toFloatRect();
+ layer.geometry.roundedCornersRadius = 5.0f;
+ layer.geometry.roundedCornersCrop = fullscreenRect().toFloatRect();
+ SourceVariant::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+ layer.alpha = 1.0f;
+
+ layers.push_back(&layer);
+
+ invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferWithRoundedCorners() {
+ fillRedBufferWithRoundedCorners<SourceVariant>();
+ // Corners should be ignored...
+ expectBufferColor(Rect(0, 0, 1, 1), 0, 0, 0, 0);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH - 1, 0, DEFAULT_DISPLAY_WIDTH, 1), 0, 0, 0, 0);
+ expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT - 1, 1, DEFAULT_DISPLAY_HEIGHT), 0, 0, 0, 0);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH - 1, DEFAULT_DISPLAY_HEIGHT - 1,
+ DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+ 0, 0, 0, 0);
+ // ...And the non-rounded portion should be red.
+ // Other pixels may be anti-aliased, so let's not check those.
+ expectBufferColor(Rect(5, 5, DEFAULT_DISPLAY_WIDTH - 5, DEFAULT_DISPLAY_HEIGHT - 5), 255, 0, 0,
+ 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferAndBlurBackground() {
+ char value[PROPERTY_VALUE_MAX];
+ property_get("ro.surface_flinger.supports_background_blur", value, "0");
+ if (!atoi(value)) {
+ // This device doesn't support blurs, no-op.
+ return;
+ }
+
+ auto blurRadius = 50;
+ auto center = DEFAULT_DISPLAY_WIDTH / 2;
+
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = fullscreenRect();
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings backgroundLayer;
+ backgroundLayer.geometry.boundaries = fullscreenRect().toFloatRect();
+ SourceVariant::fillColor(backgroundLayer, 0.0f, 1.0f, 0.0f, this);
+ backgroundLayer.alpha = 1.0f;
+ layers.push_back(&backgroundLayer);
+
+ renderengine::LayerSettings leftLayer;
+ leftLayer.geometry.boundaries =
+ Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT).toFloatRect();
+ SourceVariant::fillColor(leftLayer, 1.0f, 0.0f, 0.0f, this);
+ leftLayer.alpha = 1.0f;
+ layers.push_back(&leftLayer);
+
+ renderengine::LayerSettings blurLayer;
+ blurLayer.geometry.boundaries = fullscreenRect().toFloatRect();
+ blurLayer.backgroundBlurRadius = blurRadius;
+ blurLayer.alpha = 0;
+ layers.push_back(&blurLayer);
+
+ invokeDraw(settings, layers, mBuffer);
+
+ expectBufferColor(Rect(center - 1, center - 5, center, center + 5), 150, 150, 0, 255,
+ 50 /* tolerance */);
+ expectBufferColor(Rect(center, center - 5, center + 1, center + 5), 150, 150, 0, 255,
+ 50 /* tolerance */);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::overlayCorners() {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = fullscreenRect();
+
+ std::vector<const renderengine::LayerSettings*> layersFirst;
+
+ renderengine::LayerSettings layerOne;
+ layerOne.geometry.boundaries =
+ FloatRect(0, 0, DEFAULT_DISPLAY_WIDTH / 3.0, DEFAULT_DISPLAY_HEIGHT / 3.0);
+ SourceVariant::fillColor(layerOne, 1.0f, 0.0f, 0.0f, this);
+ layerOne.alpha = 0.2;
+
+ layersFirst.push_back(&layerOne);
+ invokeDraw(settings, layersFirst, mBuffer);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3, DEFAULT_DISPLAY_HEIGHT / 3), 51, 0, 0, 51);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3 + 1, DEFAULT_DISPLAY_HEIGHT / 3 + 1,
+ DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+ 0, 0, 0, 0);
+
+ std::vector<const renderengine::LayerSettings*> layersSecond;
+ renderengine::LayerSettings layerTwo;
+ layerTwo.geometry.boundaries =
+ FloatRect(DEFAULT_DISPLAY_WIDTH / 3.0, DEFAULT_DISPLAY_HEIGHT / 3.0,
+ DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT);
+ SourceVariant::fillColor(layerTwo, 0.0f, 1.0f, 0.0f, this);
+ layerTwo.alpha = 1.0f;
+
+ layersSecond.push_back(&layerTwo);
+ invokeDraw(settings, layersSecond, mBuffer);
+
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3, DEFAULT_DISPLAY_HEIGHT / 3), 0, 0, 0, 0);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3 + 1, DEFAULT_DISPLAY_HEIGHT / 3 + 1,
+ DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+ 0, 255, 0, 255);
+}
+
+void RenderEngineTest::fillRedBufferTextureTransform() {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = Rect(1, 1);
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings layer;
+ // Here will allocate a checker board texture, but transform texture
+ // coordinates so that only the upper left is applied.
+ sp<GraphicBuffer> buf = allocateSourceBuffer(2, 2);
+ uint32_t texName;
+ RenderEngineTest::sRE->genTextures(1, &texName);
+ this->mTexNames.push_back(texName);
+
+ uint8_t* pixels;
+ buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+ reinterpret_cast<void**>(&pixels));
+ // Red top left, Green top right, Blue bottom left, Black bottom right
+ pixels[0] = 255;
+ pixels[1] = 0;
+ pixels[2] = 0;
+ pixels[3] = 255;
+ pixels[4] = 0;
+ pixels[5] = 255;
+ pixels[6] = 0;
+ pixels[7] = 255;
+ pixels[8] = 0;
+ pixels[9] = 0;
+ pixels[10] = 255;
+ pixels[11] = 255;
+ buf->unlock();
+
+ layer.source.buffer.buffer = buf;
+ layer.source.buffer.textureName = texName;
+ // Transform coordinates to only be inside the red quadrant.
+ layer.source.buffer.textureTransform = mat4::scale(vec4(0.2, 0.2, 1, 1));
+ layer.alpha = 1.0f;
+ layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+ layers.push_back(&layer);
+
+ invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::fillBufferTextureTransform() {
+ fillRedBufferTextureTransform();
+ expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+void RenderEngineTest::fillRedBufferWithPremultiplyAlpha() {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ // Here logical space is 1x1
+ settings.clip = Rect(1, 1);
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings layer;
+ sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+ uint32_t texName;
+ RenderEngineTest::sRE->genTextures(1, &texName);
+ this->mTexNames.push_back(texName);
+
+ uint8_t* pixels;
+ buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+ reinterpret_cast<void**>(&pixels));
+ pixels[0] = 255;
+ pixels[1] = 0;
+ pixels[2] = 0;
+ pixels[3] = 255;
+ buf->unlock();
+
+ layer.source.buffer.buffer = buf;
+ layer.source.buffer.textureName = texName;
+ layer.source.buffer.usePremultipliedAlpha = true;
+ layer.alpha = 0.5f;
+ layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+ layers.push_back(&layer);
+
+ invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::fillBufferWithPremultiplyAlpha() {
+ fillRedBufferWithPremultiplyAlpha();
+ expectBufferColor(fullscreenRect(), 128, 0, 0, 128);
+}
+
+void RenderEngineTest::fillRedBufferWithoutPremultiplyAlpha() {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ // Here logical space is 1x1
+ settings.clip = Rect(1, 1);
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings layer;
+ sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+ uint32_t texName;
+ RenderEngineTest::sRE->genTextures(1, &texName);
+ this->mTexNames.push_back(texName);
+
+ uint8_t* pixels;
+ buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+ reinterpret_cast<void**>(&pixels));
+ pixels[0] = 255;
+ pixels[1] = 0;
+ pixels[2] = 0;
+ pixels[3] = 255;
+ buf->unlock();
+
+ layer.source.buffer.buffer = buf;
+ layer.source.buffer.textureName = texName;
+ layer.source.buffer.usePremultipliedAlpha = false;
+ layer.alpha = 0.5f;
+ layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+ layers.push_back(&layer);
+
+ invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::fillBufferWithoutPremultiplyAlpha() {
+ fillRedBufferWithoutPremultiplyAlpha();
+ expectBufferColor(fullscreenRect(), 128, 0, 0, 64, 1);
+}
+
+void RenderEngineTest::clearLeftRegion() {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ // Here logical space is 4x4
+ settings.clip = Rect(4, 4);
+ settings.clearRegion = Region(Rect(2, 4));
+ std::vector<const renderengine::LayerSettings*> layers;
+ // fake layer, without bounds should not render anything
+ renderengine::LayerSettings layer;
+ layers.push_back(&layer);
+ invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::clearRegion() {
+ // Reuse mBuffer
+ clearLeftRegion();
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT), 0, 0, 0, 255);
+ expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+ DEFAULT_DISPLAY_HEIGHT),
+ 0, 0, 0, 0);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::drawShadow(const renderengine::LayerSettings& castingLayer,
+ const renderengine::ShadowSettings& shadow,
+ const ubyte4& casterColor, const ubyte4& backgroundColor) {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = fullscreenRect();
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ // add background layer
+ renderengine::LayerSettings bgLayer;
+ bgLayer.geometry.boundaries = fullscreenRect().toFloatRect();
+ ColorSourceVariant::fillColor(bgLayer, backgroundColor.r / 255.0f, backgroundColor.g / 255.0f,
+ backgroundColor.b / 255.0f, this);
+ bgLayer.alpha = backgroundColor.a / 255.0f;
+ layers.push_back(&bgLayer);
+
+ // add shadow layer
+ renderengine::LayerSettings shadowLayer;
+ shadowLayer.geometry.boundaries = castingLayer.geometry.boundaries;
+ shadowLayer.alpha = castingLayer.alpha;
+ shadowLayer.shadow = shadow;
+ layers.push_back(&shadowLayer);
+
+ // add layer casting the shadow
+ renderengine::LayerSettings layer = castingLayer;
+ SourceVariant::fillColor(layer, casterColor.r / 255.0f, casterColor.g / 255.0f,
+ casterColor.b / 255.0f, this);
+ layers.push_back(&layer);
+
+ invokeDraw(settings, layers, mBuffer);
+}
+
+TEST_F(RenderEngineTest, drawLayers_noLayersToDraw) {
+ drawEmptyLayers();
+}
+
+TEST_F(RenderEngineTest, drawLayers_nullOutputBuffer) {
+ renderengine::DisplaySettings settings;
+ std::vector<const renderengine::LayerSettings*> layers;
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = fullscreenRect().toFloatRect();
+ BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+ layers.push_back(&layer);
+ base::unique_fd fence;
+ status_t status = sRE->drawLayers(settings, layers, nullptr, true, base::unique_fd(), &fence);
+
+ ASSERT_EQ(BAD_VALUE, status);
+}
+
+TEST_F(RenderEngineTest, drawLayers_nullOutputFence) {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = fullscreenRect();
+
+ std::vector<const renderengine::LayerSettings*> layers;
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = fullscreenRect().toFloatRect();
+ BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+ layer.alpha = 1.0;
+ layers.push_back(&layer);
+
+ status_t status = sRE->drawLayers(settings, layers, mBuffer, true, base::unique_fd(), nullptr);
+ sCurrentBuffer = mBuffer;
+ ASSERT_EQ(NO_ERROR, status);
+ expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+TEST_F(RenderEngineTest, drawLayers_doesNotCacheFramebuffer) {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = fullscreenRect();
+
+ std::vector<const renderengine::LayerSettings*> layers;
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = fullscreenRect().toFloatRect();
+ BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+ layer.alpha = 1.0;
+ layers.push_back(&layer);
+
+ status_t status = sRE->drawLayers(settings, layers, mBuffer, false, base::unique_fd(), nullptr);
+ sCurrentBuffer = mBuffer;
+ ASSERT_EQ(NO_ERROR, status);
+ ASSERT_FALSE(sRE->isFramebufferImageCachedForTesting(mBuffer->getId()));
+ expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedBuffer_colorSource) {
+ fillRedBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillGreenBuffer_colorSource) {
+ fillGreenBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBlueBuffer_colorSource) {
+ fillBlueBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedTransparentBuffer_colorSource) {
+ fillRedTransparentBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferPhysicalOffset_colorSource) {
+ fillBufferPhysicalOffset<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate0_colorSource) {
+ fillBufferCheckersRotate0<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate90_colorSource) {
+ fillBufferCheckersRotate90<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate180_colorSource) {
+ fillBufferCheckersRotate180<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate270_colorSource) {
+ fillBufferCheckersRotate270<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferLayerTransform_colorSource) {
+ fillBufferLayerTransform<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferColorTransform_colorSource) {
+ fillBufferLayerTransform<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferRoundedCorners_colorSource) {
+ fillBufferWithRoundedCorners<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferAndBlurBackground_colorSource) {
+ fillBufferAndBlurBackground<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_overlayCorners_colorSource) {
+ overlayCorners<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedBuffer_opaqueBufferSource) {
+ fillRedBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillGreenBuffer_opaqueBufferSource) {
+ fillGreenBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBlueBuffer_opaqueBufferSource) {
+ fillBlueBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedTransparentBuffer_opaqueBufferSource) {
+ fillRedTransparentBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferPhysicalOffset_opaqueBufferSource) {
+ fillBufferPhysicalOffset<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate0_opaqueBufferSource) {
+ fillBufferCheckersRotate0<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate90_opaqueBufferSource) {
+ fillBufferCheckersRotate90<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate180_opaqueBufferSource) {
+ fillBufferCheckersRotate180<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate270_opaqueBufferSource) {
+ fillBufferCheckersRotate270<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferLayerTransform_opaqueBufferSource) {
+ fillBufferLayerTransform<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferColorTransform_opaqueBufferSource) {
+ fillBufferLayerTransform<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferRoundedCorners_opaqueBufferSource) {
+ fillBufferWithRoundedCorners<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferAndBlurBackground_opaqueBufferSource) {
+ fillBufferAndBlurBackground<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_overlayCorners_opaqueBufferSource) {
+ overlayCorners<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedBuffer_bufferSource) {
+ fillRedBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillGreenBuffer_bufferSource) {
+ fillGreenBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBlueBuffer_bufferSource) {
+ fillBlueBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedTransparentBuffer_bufferSource) {
+ fillRedTransparentBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferPhysicalOffset_bufferSource) {
+ fillBufferPhysicalOffset<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate0_bufferSource) {
+ fillBufferCheckersRotate0<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate90_bufferSource) {
+ fillBufferCheckersRotate90<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate180_bufferSource) {
+ fillBufferCheckersRotate180<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate270_bufferSource) {
+ fillBufferCheckersRotate270<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferLayerTransform_bufferSource) {
+ fillBufferLayerTransform<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferColorTransform_bufferSource) {
+ fillBufferLayerTransform<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferRoundedCorners_bufferSource) {
+ fillBufferWithRoundedCorners<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferAndBlurBackground_bufferSource) {
+ fillBufferAndBlurBackground<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_overlayCorners_bufferSource) {
+ overlayCorners<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferTextureTransform) {
+ fillBufferTextureTransform();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBuffer_premultipliesAlpha) {
+ fillBufferWithPremultiplyAlpha();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBuffer_withoutPremultiplyingAlpha) {
+ fillBufferWithoutPremultiplyAlpha();
+}
+
+TEST_F(RenderEngineTest, drawLayers_clearRegion) {
+ clearRegion();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillsBufferAndCachesImages) {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = fullscreenRect();
+
+ std::vector<const renderengine::LayerSettings*> layers;
+
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = fullscreenRect().toFloatRect();
+ BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+
+ layers.push_back(&layer);
+ invokeDraw(settings, layers, mBuffer);
+ uint64_t bufferId = layer.source.buffer.buffer->getId();
+ EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+ std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+ sRE->unbindExternalTextureBufferForTesting(bufferId);
+ std::lock_guard<std::mutex> lock(barrier->mutex);
+ ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+ [&]() REQUIRES(barrier->mutex) {
+ return barrier->isOpen;
+ }));
+ EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+ EXPECT_EQ(NO_ERROR, barrier->result);
+}
+
+TEST_F(RenderEngineTest, bindExternalBuffer_withNullBuffer) {
+ status_t result = sRE->bindExternalTextureBuffer(0, nullptr, nullptr);
+ ASSERT_EQ(BAD_VALUE, result);
+}
+
+TEST_F(RenderEngineTest, bindExternalBuffer_cachesImages) {
+ sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+ uint32_t texName;
+ sRE->genTextures(1, &texName);
+ mTexNames.push_back(texName);
+
+ sRE->bindExternalTextureBuffer(texName, buf, nullptr);
+ uint64_t bufferId = buf->getId();
+ EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+ std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+ sRE->unbindExternalTextureBufferForTesting(bufferId);
+ std::lock_guard<std::mutex> lock(barrier->mutex);
+ ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+ [&]() REQUIRES(barrier->mutex) {
+ return barrier->isOpen;
+ }));
+ EXPECT_EQ(NO_ERROR, barrier->result);
+ EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+}
+
+TEST_F(RenderEngineTest, cacheExternalBuffer_withNullBuffer) {
+ std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+ sRE->cacheExternalTextureBufferForTesting(nullptr);
+ std::lock_guard<std::mutex> lock(barrier->mutex);
+ ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+ [&]() REQUIRES(barrier->mutex) {
+ return barrier->isOpen;
+ }));
+ EXPECT_TRUE(barrier->isOpen);
+ EXPECT_EQ(BAD_VALUE, barrier->result);
+}
+
+TEST_F(RenderEngineTest, cacheExternalBuffer_cachesImages) {
+ sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+ uint64_t bufferId = buf->getId();
+ std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+ sRE->cacheExternalTextureBufferForTesting(buf);
+ {
+ std::lock_guard<std::mutex> lock(barrier->mutex);
+ ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+ [&]() REQUIRES(barrier->mutex) {
+ return barrier->isOpen;
+ }));
+ EXPECT_EQ(NO_ERROR, barrier->result);
+ }
+ EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+ barrier = sRE->unbindExternalTextureBufferForTesting(bufferId);
+ {
+ std::lock_guard<std::mutex> lock(barrier->mutex);
+ ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+ [&]() REQUIRES(barrier->mutex) {
+ return barrier->isOpen;
+ }));
+ EXPECT_EQ(NO_ERROR, barrier->result);
+ }
+ EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterLayerMinSize) {
+ const ubyte4 casterColor(255, 0, 0, 255);
+ const ubyte4 backgroundColor(255, 255, 255, 255);
+ const float shadowLength = 5.0f;
+ Rect casterBounds(1, 1);
+ casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+ renderengine::LayerSettings castingLayer;
+ castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+ castingLayer.alpha = 1.0f;
+ renderengine::ShadowSettings settings =
+ getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+ false /* casterIsTranslucent */);
+
+ drawShadow<ColorSourceVariant>(castingLayer, settings, casterColor, backgroundColor);
+ expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterColorLayer) {
+ const ubyte4 casterColor(255, 0, 0, 255);
+ const ubyte4 backgroundColor(255, 255, 255, 255);
+ const float shadowLength = 5.0f;
+ Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+ casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+ renderengine::LayerSettings castingLayer;
+ castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+ castingLayer.alpha = 1.0f;
+ renderengine::ShadowSettings settings =
+ getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+ false /* casterIsTranslucent */);
+
+ drawShadow<ColorSourceVariant>(castingLayer, settings, casterColor, backgroundColor);
+ expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterOpaqueBufferLayer) {
+ const ubyte4 casterColor(255, 0, 0, 255);
+ const ubyte4 backgroundColor(255, 255, 255, 255);
+ const float shadowLength = 5.0f;
+ Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+ casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+ renderengine::LayerSettings castingLayer;
+ castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+ castingLayer.alpha = 1.0f;
+ renderengine::ShadowSettings settings =
+ getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+ false /* casterIsTranslucent */);
+
+ drawShadow<BufferSourceVariant<ForceOpaqueBufferVariant>>(castingLayer, settings, casterColor,
+ backgroundColor);
+ expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterWithRoundedCorner) {
+ const ubyte4 casterColor(255, 0, 0, 255);
+ const ubyte4 backgroundColor(255, 255, 255, 255);
+ const float shadowLength = 5.0f;
+ Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+ casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+ renderengine::LayerSettings castingLayer;
+ castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+ castingLayer.geometry.roundedCornersRadius = 3.0f;
+ castingLayer.geometry.roundedCornersCrop = casterBounds.toFloatRect();
+ castingLayer.alpha = 1.0f;
+ renderengine::ShadowSettings settings =
+ getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+ false /* casterIsTranslucent */);
+
+ drawShadow<BufferSourceVariant<ForceOpaqueBufferVariant>>(castingLayer, settings, casterColor,
+ backgroundColor);
+ expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_translucentCasterWithAlpha) {
+ const ubyte4 casterColor(255, 0, 0, 255);
+ const ubyte4 backgroundColor(255, 255, 255, 255);
+ const float shadowLength = 5.0f;
+ Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+ casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+ renderengine::LayerSettings castingLayer;
+ castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+ castingLayer.alpha = 0.5f;
+ renderengine::ShadowSettings settings =
+ getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+ true /* casterIsTranslucent */);
+
+ drawShadow<BufferSourceVariant<RelaxOpaqueBufferVariant>>(castingLayer, settings, casterColor,
+ backgroundColor);
+
+ // verify only the background since the shadow will draw behind the caster
+ const float shadowInset = settings.length * -1.0f;
+ const Rect casterWithShadow =
+ Rect(casterBounds).inset(shadowInset, shadowInset, shadowInset, shadowInset);
+ const Region backgroundRegion = Region(fullscreenRect()).subtractSelf(casterWithShadow);
+ expectBufferColor(backgroundRegion, backgroundColor.r, backgroundColor.g, backgroundColor.b,
+ backgroundColor.a);
+}
+
+TEST_F(RenderEngineTest, cleanupPostRender_cleansUpOnce) {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = fullscreenRect();
+
+ std::vector<const renderengine::LayerSettings*> layers;
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = fullscreenRect().toFloatRect();
+ BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+ layer.alpha = 1.0;
+ layers.push_back(&layer);
+
+ base::unique_fd fenceOne;
+ sRE->drawLayers(settings, layers, mBuffer, true, base::unique_fd(), &fenceOne);
+ base::unique_fd fenceTwo;
+ sRE->drawLayers(settings, layers, mBuffer, true, std::move(fenceOne), &fenceTwo);
+
+ const int fd = fenceTwo.get();
+ if (fd >= 0) {
+ sync_wait(fd, -1);
+ }
+ // Only cleanup the first time.
+ EXPECT_TRUE(sRE->cleanupPostRender(
+ renderengine::RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES));
+ EXPECT_FALSE(sRE->cleanupPostRender(
+ renderengine::RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES));
+}
+
+TEST_F(RenderEngineTest, cleanupPostRender_whenCleaningAll_replacesTextureMemory) {
+ renderengine::DisplaySettings settings;
+ settings.physicalDisplay = fullscreenRect();
+ settings.clip = fullscreenRect();
+
+ std::vector<const renderengine::LayerSettings*> layers;
+ renderengine::LayerSettings layer;
+ layer.geometry.boundaries = fullscreenRect().toFloatRect();
+ BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+ layer.alpha = 1.0;
+ layers.push_back(&layer);
+
+ base::unique_fd fence;
+ sRE->drawLayers(settings, layers, mBuffer, true, base::unique_fd(), &fence);
+
+ const int fd = fence.get();
+ if (fd >= 0) {
+ sync_wait(fd, -1);
+ }
+
+ uint64_t bufferId = layer.source.buffer.buffer->getId();
+ uint32_t texName = layer.source.buffer.textureName;
+ EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+ EXPECT_EQ(bufferId, sRE->getBufferIdForTextureNameForTesting(texName));
+
+ EXPECT_TRUE(sRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL));
+
+ // Now check that our view of memory is good.
+ EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+ EXPECT_EQ(std::nullopt, sRE->getBufferIdForTextureNameForTesting(bufferId));
+ EXPECT_TRUE(sRE->isTextureNameKnownForTesting(texName));
+}
+
+} // namespace android
+
+// TODO(b/129481165): remove the #pragma below and fix conversion issues
+#pragma clang diagnostic pop // ignored "-Wconversion"
diff --git a/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp b/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
new file mode 100644
index 0000000..97c7442
--- /dev/null
+++ b/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
@@ -0,0 +1,216 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cutils/properties.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <renderengine/mock/RenderEngine.h>
+#include "../threaded/RenderEngineThreaded.h"
+
+namespace android {
+
+using testing::_;
+using testing::Eq;
+using testing::Mock;
+using testing::Return;
+
+struct RenderEngineThreadedTest : public ::testing::Test {
+ ~RenderEngineThreadedTest() {}
+
+ void SetUp() override {
+ mThreadedRE = renderengine::threaded::RenderEngineThreaded::create(
+ [this]() { return std::unique_ptr<renderengine::RenderEngine>(mRenderEngine); });
+ }
+
+ std::unique_ptr<renderengine::threaded::RenderEngineThreaded> mThreadedRE;
+ renderengine::mock::RenderEngine* mRenderEngine = new renderengine::mock::RenderEngine();
+};
+
+TEST_F(RenderEngineThreadedTest, dump) {
+ std::string testString = "XYZ";
+ EXPECT_CALL(*mRenderEngine, dump(_));
+ mThreadedRE->dump(testString);
+}
+
+TEST_F(RenderEngineThreadedTest, primeCache) {
+ EXPECT_CALL(*mRenderEngine, primeCache());
+ mThreadedRE->primeCache();
+}
+
+TEST_F(RenderEngineThreadedTest, genTextures) {
+ uint32_t texName;
+ EXPECT_CALL(*mRenderEngine, genTextures(1, &texName));
+ mThreadedRE->genTextures(1, &texName);
+}
+
+TEST_F(RenderEngineThreadedTest, deleteTextures) {
+ uint32_t texName;
+ EXPECT_CALL(*mRenderEngine, deleteTextures(1, &texName));
+ mThreadedRE->deleteTextures(1, &texName);
+}
+
+TEST_F(RenderEngineThreadedTest, bindExternalBuffer_nullptrBuffer) {
+ EXPECT_CALL(*mRenderEngine, bindExternalTextureBuffer(0, Eq(nullptr), Eq(nullptr)))
+ .WillOnce(Return(BAD_VALUE));
+ status_t result = mThreadedRE->bindExternalTextureBuffer(0, nullptr, nullptr);
+ ASSERT_EQ(BAD_VALUE, result);
+}
+
+TEST_F(RenderEngineThreadedTest, bindExternalBuffer_withBuffer) {
+ sp<GraphicBuffer> buf = new GraphicBuffer();
+ EXPECT_CALL(*mRenderEngine, bindExternalTextureBuffer(0, buf, Eq(nullptr)))
+ .WillOnce(Return(NO_ERROR));
+ status_t result = mThreadedRE->bindExternalTextureBuffer(0, buf, nullptr);
+ ASSERT_EQ(NO_ERROR, result);
+}
+
+TEST_F(RenderEngineThreadedTest, cacheExternalTextureBuffer_nullptr) {
+ EXPECT_CALL(*mRenderEngine, cacheExternalTextureBuffer(Eq(nullptr)));
+ mThreadedRE->cacheExternalTextureBuffer(nullptr);
+}
+
+TEST_F(RenderEngineThreadedTest, cacheExternalTextureBuffer_withBuffer) {
+ sp<GraphicBuffer> buf = new GraphicBuffer();
+ EXPECT_CALL(*mRenderEngine, cacheExternalTextureBuffer(buf));
+ mThreadedRE->cacheExternalTextureBuffer(buf);
+}
+
+TEST_F(RenderEngineThreadedTest, unbindExternalTextureBuffer) {
+ EXPECT_CALL(*mRenderEngine, unbindExternalTextureBuffer(0x0));
+ mThreadedRE->unbindExternalTextureBuffer(0x0);
+}
+
+TEST_F(RenderEngineThreadedTest, bindFrameBuffer_returnsBadValue) {
+ std::unique_ptr<renderengine::Framebuffer> framebuffer;
+ EXPECT_CALL(*mRenderEngine, bindFrameBuffer(framebuffer.get())).WillOnce(Return(BAD_VALUE));
+ status_t result = mThreadedRE->bindFrameBuffer(framebuffer.get());
+ ASSERT_EQ(BAD_VALUE, result);
+}
+
+TEST_F(RenderEngineThreadedTest, bindFrameBuffer_returnsNoError) {
+ std::unique_ptr<renderengine::Framebuffer> framebuffer;
+ EXPECT_CALL(*mRenderEngine, bindFrameBuffer(framebuffer.get())).WillOnce(Return(NO_ERROR));
+ status_t result = mThreadedRE->bindFrameBuffer(framebuffer.get());
+ ASSERT_EQ(NO_ERROR, result);
+}
+
+TEST_F(RenderEngineThreadedTest, unbindFrameBuffer) {
+ std::unique_ptr<renderengine::Framebuffer> framebuffer;
+ EXPECT_CALL(*mRenderEngine, unbindFrameBuffer(framebuffer.get()));
+ mThreadedRE->unbindFrameBuffer(framebuffer.get());
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxTextureSize_returns20) {
+ size_t size = 20;
+ EXPECT_CALL(*mRenderEngine, getMaxTextureSize()).WillOnce(Return(size));
+ size_t result = mThreadedRE->getMaxTextureSize();
+ ASSERT_EQ(size, result);
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxTextureSize_returns0) {
+ size_t size = 0;
+ EXPECT_CALL(*mRenderEngine, getMaxTextureSize()).WillOnce(Return(size));
+ size_t result = mThreadedRE->getMaxTextureSize();
+ ASSERT_EQ(size, result);
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxViewportDims_returns20) {
+ size_t dims = 20;
+ EXPECT_CALL(*mRenderEngine, getMaxViewportDims()).WillOnce(Return(dims));
+ size_t result = mThreadedRE->getMaxViewportDims();
+ ASSERT_EQ(dims, result);
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxViewportDims_returns0) {
+ size_t dims = 0;
+ EXPECT_CALL(*mRenderEngine, getMaxViewportDims()).WillOnce(Return(dims));
+ size_t result = mThreadedRE->getMaxViewportDims();
+ ASSERT_EQ(dims, result);
+}
+
+TEST_F(RenderEngineThreadedTest, isProtected_returnsFalse) {
+ EXPECT_CALL(*mRenderEngine, isProtected()).WillOnce(Return(false));
+ status_t result = mThreadedRE->isProtected();
+ ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, isProtected_returnsTrue) {
+ EXPECT_CALL(*mRenderEngine, isProtected()).WillOnce(Return(true));
+ size_t result = mThreadedRE->isProtected();
+ ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, supportsProtectedContent_returnsFalse) {
+ EXPECT_CALL(*mRenderEngine, supportsProtectedContent()).WillOnce(Return(false));
+ status_t result = mThreadedRE->supportsProtectedContent();
+ ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, supportsProtectedContent_returnsTrue) {
+ EXPECT_CALL(*mRenderEngine, supportsProtectedContent()).WillOnce(Return(true));
+ status_t result = mThreadedRE->supportsProtectedContent();
+ ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, useProtectedContext_returnsFalse) {
+ EXPECT_CALL(*mRenderEngine, useProtectedContext(false)).WillOnce(Return(false));
+ status_t result = mThreadedRE->useProtectedContext(false);
+ ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, useProtectedContext_returnsTrue) {
+ EXPECT_CALL(*mRenderEngine, useProtectedContext(false)).WillOnce(Return(true));
+ status_t result = mThreadedRE->useProtectedContext(false);
+ ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, cleanupPostRender_returnsFalse) {
+ EXPECT_CALL(*mRenderEngine,
+ cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL))
+ .WillOnce(Return(false));
+ status_t result =
+ mThreadedRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL);
+ ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, cleanupPostRender_returnsTrue) {
+ EXPECT_CALL(*mRenderEngine,
+ cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL))
+ .WillOnce(Return(true));
+ status_t result =
+ mThreadedRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL);
+ ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, drawLayers) {
+ renderengine::DisplaySettings settings;
+ std::vector<const renderengine::LayerSettings*> layers;
+ sp<GraphicBuffer> buffer = new GraphicBuffer();
+ base::unique_fd bufferFence;
+ base::unique_fd drawFence;
+
+ EXPECT_CALL(*mRenderEngine, drawLayers)
+ .WillOnce([](const renderengine::DisplaySettings&,
+ const std::vector<const renderengine::LayerSettings*>&,
+ const sp<GraphicBuffer>&, const bool, base::unique_fd&&,
+ base::unique_fd*) -> status_t { return NO_ERROR; });
+
+ status_t result = mThreadedRE->drawLayers(settings, layers, buffer, false,
+ std::move(bufferFence), &drawFence);
+ ASSERT_EQ(NO_ERROR, result);
+}
+
+} // namespace android
diff --git a/media/libstagefright/renderfright/threaded/RenderEngineThreaded.cpp b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.cpp
new file mode 100644
index 0000000..d4184fd
--- /dev/null
+++ b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.cpp
@@ -0,0 +1,403 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "RenderEngineThreaded.h"
+
+#include <sched.h>
+#include <chrono>
+#include <future>
+
+#include <android-base/stringprintf.h>
+#include <private/gui/SyncFeatures.h>
+#include <utils/Trace.h>
+
+#include "gl/GLESRenderEngine.h"
+
+using namespace std::chrono_literals;
+
+namespace android {
+namespace renderengine {
+namespace threaded {
+
+std::unique_ptr<RenderEngineThreaded> RenderEngineThreaded::create(CreateInstanceFactory factory) {
+ return std::make_unique<RenderEngineThreaded>(std::move(factory));
+}
+
+RenderEngineThreaded::RenderEngineThreaded(CreateInstanceFactory factory) {
+ ATRACE_CALL();
+
+ std::lock_guard lockThread(mThreadMutex);
+ mThread = std::thread(&RenderEngineThreaded::threadMain, this, factory);
+}
+
+RenderEngineThreaded::~RenderEngineThreaded() {
+ {
+ std::lock_guard lock(mThreadMutex);
+ mRunning = false;
+ mCondition.notify_one();
+ }
+
+ if (mThread.joinable()) {
+ mThread.join();
+ }
+}
+
+// NO_THREAD_SAFETY_ANALYSIS is because std::unique_lock presently lacks thread safety annotations.
+void RenderEngineThreaded::threadMain(CreateInstanceFactory factory) NO_THREAD_SAFETY_ANALYSIS {
+ ATRACE_CALL();
+
+ struct sched_param param = {0};
+ param.sched_priority = 2;
+ if (sched_setscheduler(0, SCHED_FIFO, ¶m) != 0) {
+ ALOGE("Couldn't set SCHED_FIFO");
+ }
+
+ mRenderEngine = factory();
+
+ std::unique_lock<std::mutex> lock(mThreadMutex);
+ pthread_setname_np(pthread_self(), mThreadName);
+
+ while (mRunning) {
+ if (!mFunctionCalls.empty()) {
+ auto task = mFunctionCalls.front();
+ mFunctionCalls.pop();
+ task(*mRenderEngine);
+ }
+ mCondition.wait(lock, [this]() REQUIRES(mThreadMutex) {
+ return !mRunning || !mFunctionCalls.empty();
+ });
+ }
+}
+
+void RenderEngineThreaded::primeCache() const {
+ std::promise<void> resultPromise;
+ std::future<void> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::primeCache");
+ instance.primeCache();
+ resultPromise.set_value();
+ });
+ }
+ mCondition.notify_one();
+ resultFuture.wait();
+}
+
+void RenderEngineThreaded::dump(std::string& result) {
+ std::promise<std::string> resultPromise;
+ std::future<std::string> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise, &result](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::dump");
+ std::string localResult = result;
+ instance.dump(localResult);
+ resultPromise.set_value(std::move(localResult));
+ });
+ }
+ mCondition.notify_one();
+ // Note: This is an rvalue.
+ result.assign(resultFuture.get());
+}
+
+bool RenderEngineThreaded::useNativeFenceSync() const {
+ std::promise<bool> resultPromise;
+ std::future<bool> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& /*instance*/) {
+ ATRACE_NAME("REThreaded::useNativeFenceSync");
+ bool returnValue = SyncFeatures::getInstance().useNativeFenceSync();
+ resultPromise.set_value(returnValue);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+bool RenderEngineThreaded::useWaitSync() const {
+ std::promise<bool> resultPromise;
+ std::future<bool> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& /*instance*/) {
+ ATRACE_NAME("REThreaded::useWaitSync");
+ bool returnValue = SyncFeatures::getInstance().useWaitSync();
+ resultPromise.set_value(returnValue);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+void RenderEngineThreaded::genTextures(size_t count, uint32_t* names) {
+ std::promise<void> resultPromise;
+ std::future<void> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise, count, names](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::genTextures");
+ instance.genTextures(count, names);
+ resultPromise.set_value();
+ });
+ }
+ mCondition.notify_one();
+ resultFuture.wait();
+}
+
+void RenderEngineThreaded::deleteTextures(size_t count, uint32_t const* names) {
+ std::promise<void> resultPromise;
+ std::future<void> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise, count, &names](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::deleteTextures");
+ instance.deleteTextures(count, names);
+ resultPromise.set_value();
+ });
+ }
+ mCondition.notify_one();
+ resultFuture.wait();
+}
+
+void RenderEngineThreaded::bindExternalTextureImage(uint32_t texName, const Image& image) {
+ std::promise<void> resultPromise;
+ std::future<void> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push(
+ [&resultPromise, texName, &image](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::bindExternalTextureImage");
+ instance.bindExternalTextureImage(texName, image);
+ resultPromise.set_value();
+ });
+ }
+ mCondition.notify_one();
+ resultFuture.wait();
+}
+
+status_t RenderEngineThreaded::bindExternalTextureBuffer(uint32_t texName,
+ const sp<GraphicBuffer>& buffer,
+ const sp<Fence>& fence) {
+ std::promise<status_t> resultPromise;
+ std::future<status_t> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push(
+ [&resultPromise, texName, &buffer, &fence](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::bindExternalTextureBuffer");
+ status_t status = instance.bindExternalTextureBuffer(texName, buffer, fence);
+ resultPromise.set_value(status);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+void RenderEngineThreaded::cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) {
+ std::promise<void> resultPromise;
+ std::future<void> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise, &buffer](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::cacheExternalTextureBuffer");
+ instance.cacheExternalTextureBuffer(buffer);
+ resultPromise.set_value();
+ });
+ }
+ mCondition.notify_one();
+ resultFuture.wait();
+}
+
+void RenderEngineThreaded::unbindExternalTextureBuffer(uint64_t bufferId) {
+ std::promise<void> resultPromise;
+ std::future<void> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise, &bufferId](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::unbindExternalTextureBuffer");
+ instance.unbindExternalTextureBuffer(bufferId);
+ resultPromise.set_value();
+ });
+ }
+ mCondition.notify_one();
+ resultFuture.wait();
+}
+
+status_t RenderEngineThreaded::bindFrameBuffer(Framebuffer* framebuffer) {
+ std::promise<status_t> resultPromise;
+ std::future<status_t> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise, &framebuffer](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::bindFrameBuffer");
+ status_t status = instance.bindFrameBuffer(framebuffer);
+ resultPromise.set_value(status);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+void RenderEngineThreaded::unbindFrameBuffer(Framebuffer* framebuffer) {
+ std::promise<void> resultPromise;
+ std::future<void> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise, &framebuffer](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::unbindFrameBuffer");
+ instance.unbindFrameBuffer(framebuffer);
+ resultPromise.set_value();
+ });
+ }
+ mCondition.notify_one();
+ resultFuture.wait();
+}
+
+size_t RenderEngineThreaded::getMaxTextureSize() const {
+ std::promise<size_t> resultPromise;
+ std::future<size_t> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::getMaxTextureSize");
+ size_t size = instance.getMaxTextureSize();
+ resultPromise.set_value(size);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+size_t RenderEngineThreaded::getMaxViewportDims() const {
+ std::promise<size_t> resultPromise;
+ std::future<size_t> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::getMaxViewportDims");
+ size_t size = instance.getMaxViewportDims();
+ resultPromise.set_value(size);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+bool RenderEngineThreaded::isProtected() const {
+ std::promise<bool> resultPromise;
+ std::future<bool> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::isProtected");
+ bool returnValue = instance.isProtected();
+ resultPromise.set_value(returnValue);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+bool RenderEngineThreaded::supportsProtectedContent() const {
+ std::promise<bool> resultPromise;
+ std::future<bool> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::supportsProtectedContent");
+ bool returnValue = instance.supportsProtectedContent();
+ resultPromise.set_value(returnValue);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+bool RenderEngineThreaded::useProtectedContext(bool useProtectedContext) {
+ std::promise<bool> resultPromise;
+ std::future<bool> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push(
+ [&resultPromise, useProtectedContext](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::useProtectedContext");
+ bool returnValue = instance.useProtectedContext(useProtectedContext);
+ resultPromise.set_value(returnValue);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+Framebuffer* RenderEngineThreaded::getFramebufferForDrawing() {
+ std::promise<Framebuffer*> resultPromise;
+ std::future<Framebuffer*> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::getFramebufferForDrawing");
+ Framebuffer* framebuffer = instance.getFramebufferForDrawing();
+ resultPromise.set_value(framebuffer);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+bool RenderEngineThreaded::cleanupPostRender(CleanupMode mode) {
+ std::promise<bool> resultPromise;
+ std::future<bool> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise, mode](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::cleanupPostRender");
+ bool returnValue = instance.cleanupPostRender(mode);
+ resultPromise.set_value(returnValue);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+status_t RenderEngineThreaded::drawLayers(const DisplaySettings& display,
+ const std::vector<const LayerSettings*>& layers,
+ const sp<GraphicBuffer>& buffer,
+ const bool useFramebufferCache,
+ base::unique_fd&& bufferFence,
+ base::unique_fd* drawFence) {
+ std::promise<status_t> resultPromise;
+ std::future<status_t> resultFuture = resultPromise.get_future();
+ {
+ std::lock_guard lock(mThreadMutex);
+ mFunctionCalls.push([&resultPromise, &display, &layers, &buffer, useFramebufferCache,
+ &bufferFence, &drawFence](renderengine::RenderEngine& instance) {
+ ATRACE_NAME("REThreaded::drawLayers");
+ status_t status = instance.drawLayers(display, layers, buffer, useFramebufferCache,
+ std::move(bufferFence), drawFence);
+ resultPromise.set_value(status);
+ });
+ }
+ mCondition.notify_one();
+ return resultFuture.get();
+}
+
+} // namespace threaded
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/threaded/RenderEngineThreaded.h b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.h
new file mode 100644
index 0000000..86a49e9
--- /dev/null
+++ b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <condition_variable>
+#include <mutex>
+#include <queue>
+#include <thread>
+
+#include "renderengine/RenderEngine.h"
+
+namespace android {
+namespace renderengine {
+namespace threaded {
+
+using CreateInstanceFactory = std::function<std::unique_ptr<renderengine::RenderEngine>()>;
+
+/**
+ * This class extends a basic RenderEngine class. It contains a thread. Each time a function of
+ * this class is called, we create a lambda function that is put on a queue. The main thread then
+ * executes the functions in order.
+ */
+class RenderEngineThreaded : public RenderEngine {
+public:
+ static std::unique_ptr<RenderEngineThreaded> create(CreateInstanceFactory factory);
+
+ RenderEngineThreaded(CreateInstanceFactory factory);
+ ~RenderEngineThreaded() override;
+ void primeCache() const override;
+
+ void dump(std::string& result) override;
+
+ bool useNativeFenceSync() const override;
+ bool useWaitSync() const override;
+ void genTextures(size_t count, uint32_t* names) override;
+ void deleteTextures(size_t count, uint32_t const* names) override;
+ void bindExternalTextureImage(uint32_t texName, const Image& image) override;
+ status_t bindExternalTextureBuffer(uint32_t texName, const sp<GraphicBuffer>& buffer,
+ const sp<Fence>& fence) override;
+ void cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) override;
+ void unbindExternalTextureBuffer(uint64_t bufferId) override;
+ status_t bindFrameBuffer(Framebuffer* framebuffer) override;
+ void unbindFrameBuffer(Framebuffer* framebuffer) override;
+ size_t getMaxTextureSize() const override;
+ size_t getMaxViewportDims() const override;
+
+ bool isProtected() const override;
+ bool supportsProtectedContent() const override;
+ bool useProtectedContext(bool useProtectedContext) override;
+ bool cleanupPostRender(CleanupMode mode) override;
+
+ status_t drawLayers(const DisplaySettings& display,
+ const std::vector<const LayerSettings*>& layers,
+ const sp<GraphicBuffer>& buffer, const bool useFramebufferCache,
+ base::unique_fd&& bufferFence, base::unique_fd* drawFence) override;
+
+protected:
+ Framebuffer* getFramebufferForDrawing() override;
+
+private:
+ void threadMain(CreateInstanceFactory factory);
+
+ /* ------------------------------------------------------------------------
+ * Threading
+ */
+ const char* const mThreadName = "RenderEngineThread";
+ // Protects the creation and destruction of mThread.
+ mutable std::mutex mThreadMutex;
+ std::thread mThread GUARDED_BY(mThreadMutex);
+ bool mRunning GUARDED_BY(mThreadMutex) = true;
+ mutable std::queue<std::function<void(renderengine::RenderEngine& instance)>> mFunctionCalls
+ GUARDED_BY(mThreadMutex);
+ mutable std::condition_variable mCondition;
+
+ /* ------------------------------------------------------------------------
+ * Render Engine
+ */
+ std::unique_ptr<renderengine::RenderEngine> mRenderEngine;
+};
+} // namespace threaded
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 0164040..a0b66a7 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -37,12 +37,73 @@
mAccessUnitRTPTime(0),
mNextExpectedSeqNoValid(false),
mNextExpectedSeqNo(0),
- mAccessUnitDamaged(false) {
+ mAccessUnitDamaged(false),
+ mFirstIFrameProvided(false),
+ mLastIFrameProvidedAtMs(0) {
}
AAVCAssembler::~AAVCAssembler() {
}
+int32_t AAVCAssembler::addNack(
+ const sp<ARTPSource> &source) {
+ List<sp<ABuffer>> *queue = source->queue();
+ int32_t nackCount = 0;
+
+ List<sp<ABuffer> >::iterator it = queue->begin();
+
+ if (it == queue->end()) {
+ return nackCount /* 0 */;
+ }
+
+ uint16_t queueHeadSeqNum = (*it)->int32Data();
+
+ // move to the packet after which RTCP:NACK was sent.
+ for (; it != queue->end(); ++it) {
+ int32_t seqNum = (*it)->int32Data();
+ if (seqNum >= source->mHighestNackNumber) {
+ break;
+ }
+ }
+
+ int32_t nackStartAt = -1;
+
+ while (it != queue->end()) {
+ int32_t seqBeforeLast = (*it)->int32Data();
+ // increase iterator.
+ if ((++it) == queue->end()) {
+ break;
+ }
+ int32_t seqLast = (*it)->int32Data();
+
+ if ((seqLast - seqBeforeLast) < 0) {
+ ALOGD("addNack: found end of seqNum from(%d) to(%d)", seqBeforeLast, seqLast);
+ source->mHighestNackNumber = 0;
+ }
+
+ // missed packet found
+ if (seqLast > (seqBeforeLast + 1) &&
+ // we didn't send RTCP:NACK for this packet yet.
+ (seqLast - 1) > source->mHighestNackNumber) {
+ source->mHighestNackNumber = seqLast - 1;
+ nackStartAt = seqBeforeLast + 1;
+ break;
+ }
+
+ }
+
+ if (nackStartAt != -1) {
+ nackCount = source->mHighestNackNumber - nackStartAt + 1;
+ ALOGD("addNack: nackCount=%d, nackFrom=%d, nackTo=%d", nackCount,
+ nackStartAt, source->mHighestNackNumber);
+
+ uint16_t mask = (uint16_t)(0xffff) >> (16 - nackCount + 1);
+ source->setSeqNumToNACK(nackStartAt, mask, queueHeadSeqNum);
+ }
+
+ return nackCount;
+}
+
ARTPAssembler::AssemblyStatus AAVCAssembler::addNALUnit(
const sp<ARTPSource> &source) {
List<sp<ABuffer> > *queue = source->queue();
@@ -52,78 +113,54 @@
}
sp<ABuffer> buffer = *queue->begin();
- int32_t rtpTime;
- CHECK(buffer->meta()->findInt32("rtp-time", &rtpTime));
+ uint32_t rtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
int64_t startTime = source->mFirstSysTime / 1000;
int64_t nowTime = ALooper::GetNowUs() / 1000;
int64_t playedTime = nowTime - startTime;
- int32_t playedTimeRtp = source->mFirstRtpTime +
- (((uint32_t)playedTime) * (source->mClockRate / 1000));
- const int32_t jitterTime = source->mClockRate / 5; // 200ms
- int32_t expiredTimeInJb = rtpTime + jitterTime;
+ int64_t playedTimeRtp =
+ source->mFirstRtpTime + (((uint32_t)playedTime) * (source->mClockRate / 1000));
+ const uint32_t jitterTime =
+ (uint32_t)(source->mClockRate / ((float)1000 / (source->mJbTimeMs)));
+ uint32_t expiredTimeInJb = rtpTime + jitterTime;
bool isExpired = expiredTimeInJb <= (playedTimeRtp);
bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
if (mShowQueue && mShowQueueCnt < 20) {
showCurrentQueue(queue);
- ALOGD("start=%lld, now=%lld, played=%lld", (long long)startTime,
- (long long)nowTime, (long long)playedTime);
- ALOGD("rtp-time(JB)=%d, played-rtp-time(JB)=%d, expired-rtp-time(JB)=%d isExpired=%d",
- rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+ printNowTimeUs(startTime, nowTime, playedTime);
+ printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
mShowQueueCnt++;
}
- ALOGV("start=%lld, now=%lld, played=%lld", (long long)startTime,
- (long long)nowTime, (long long)playedTime);
- ALOGV("rtp-time(JB)=%d, played-rtp-time(JB)=%d, expired-rtp-time(JB)=%d isExpired=%d",
- rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+ AAVCAssembler::addNack(source);
if (!isExpired) {
ALOGV("buffering in jitter buffer.");
return NOT_ENOUGH_DATA;
}
- if (isTooLate200)
+ if (isTooLate200) {
ALOGW("=== WARNING === buffer arrived 200ms late. === WARNING === ");
+ }
if (isTooLate300) {
- ALOGW("buffer arrived too late. 300ms..");
- ALOGW("start=%lld, now=%lld, played=%lld", (long long)startTime,
- (long long)nowTime, (long long)playedTime);
- ALOGW("rtp-time(JB)=%d, plyed-rtp-time(JB)=%d, exp-rtp-time(JB)=%d diff=%lld isExpired=%d",
- rtpTime, playedTimeRtp, expiredTimeInJb,
- ((long long)playedTimeRtp) - expiredTimeInJb, isExpired);
- ALOGW("expected Seq. NO =%d", buffer->int32Data());
+ ALOGW("buffer arrived after 300ms ... \t Diff in Jb=%lld \t Seq# %d",
+ ((long long)playedTimeRtp) - expiredTimeInJb, buffer->int32Data());
+ printNowTimeUs(startTime, nowTime, playedTime);
+ printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
- List<sp<ABuffer> >::iterator it = queue->begin();
- while (it != queue->end()) {
- CHECK((*it)->meta()->findInt32("rtp-time", &rtpTime));
- if (rtpTime + jitterTime >= playedTimeRtp) {
- mNextExpectedSeqNo = (*it)->int32Data();
- break;
- }
- it++;
- }
- source->noticeAbandonBuffer();
+ mNextExpectedSeqNo = pickProperSeq(queue, jitterTime, playedTimeRtp);
}
if (mNextExpectedSeqNoValid) {
int32_t size = queue->size();
- int32_t cnt = 0;
- List<sp<ABuffer> >::iterator it = queue->begin();
- while (it != queue->end()) {
- if ((uint32_t)(*it)->int32Data() >= mNextExpectedSeqNo) {
- break;
- }
+ int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
- it = queue->erase(it);
- cnt++;
- }
-
- if (cnt > 0) {
- source->noticeAbandonBuffer(cnt);
- ALOGW("delete %d of %d buffers", cnt, size);
+ if (cntRemove > 0) {
+ source->noticeAbandonBuffer(cntRemove);
+ ALOGW("delete %d of %d buffers", cntRemove, size);
}
if (queue->empty()) {
return NOT_ENOUGH_DATA;
@@ -187,12 +224,30 @@
}
}
+void AAVCAssembler::checkIFrameProvided(const sp<ABuffer> &buffer) {
+ if (buffer->size() == 0) {
+ return;
+ }
+ const uint8_t *data = buffer->data();
+ unsigned nalType = data[0] & 0x1f;
+ if (nalType == 0x5) {
+ mFirstIFrameProvided = true;
+ mLastIFrameProvidedAtMs = ALooper::GetNowUs() / 1000;
+
+ uint32_t rtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ ALOGD("got First I-frame to be decoded. rtpTime=%u, size=%zu", rtpTime, buffer->size());
+ }
+}
+
void AAVCAssembler::addSingleNALUnit(const sp<ABuffer> &buffer) {
ALOGV("addSingleNALUnit of size %zu", buffer->size());
#if !LOG_NDEBUG
hexdump(buffer->data(), buffer->size());
#endif
+ checkIFrameProvided(buffer);
+
uint32_t rtpTime;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
@@ -280,6 +335,11 @@
size_t totalCount = 1;
bool complete = false;
+ uint32_t rtpTimeStartAt;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
+ uint32_t startSeqNo = buffer->int32Data();
+ bool pFrame = nalType == 0x1;
+
if (data[1] & 0x40) {
// Huh? End bit also set on the first buffer.
@@ -288,6 +348,8 @@
complete = true;
} else {
List<sp<ABuffer> >::iterator it = ++queue->begin();
+ int32_t connected = 1;
+ bool snapped = false;
while (it != queue->end()) {
ALOGV("sequence length %zu", totalCount);
@@ -297,26 +359,32 @@
size_t size = buffer->size();
if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
- ALOGV("sequence not complete, expected seqNo %d, got %d",
- expectedSeqNo, (uint32_t)buffer->int32Data());
+ ALOGD("sequence not complete, expected seqNo %u, got %u, nalType %u",
+ expectedSeqNo, (unsigned)buffer->int32Data(), nalType);
+ snapped = true;
- return WRONG_SEQUENCE_NUMBER;
+ if (!pFrame) {
+ return WRONG_SEQUENCE_NUMBER;
+ }
}
+ if (!snapped) {
+ connected++;
+ }
+
+ uint32_t rtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
if (size < 2
|| data[0] != indicator
|| (data[1] & 0x1f) != nalType
- || (data[1] & 0x80)) {
+ || (data[1] & 0x80)
+ || rtpTime != rtpTimeStartAt) {
ALOGV("Ignoring malformed FU buffer.");
// Delete the whole start of the FU.
- it = queue->begin();
- for (size_t i = 0; i <= totalCount; ++i) {
- it = queue->erase(it);
- }
-
mNextExpectedSeqNo = expectedSeqNo + 1;
+ deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
return MALFORMED_PACKET;
}
@@ -324,9 +392,17 @@
totalSize += size - 2;
++totalCount;
- expectedSeqNo = expectedSeqNo + 1;
+ expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
if (data[1] & 0x40) {
+ if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
+ connected, totalCount, 0.5f)) {
+ mNextExpectedSeqNo = expectedSeqNo;
+ deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+ return MALFORMED_PACKET;
+ }
+
// This is the last fragment.
complete = true;
break;
@@ -433,22 +509,78 @@
msg->post();
}
+int32_t AAVCAssembler::pickProperSeq(const Queue *queue, uint32_t jit, int64_t play) {
+ sp<ABuffer> buffer = *(queue->begin());
+ uint32_t rtpTime;
+ int32_t nextSeqNo = buffer->int32Data();
+
+ Queue::const_iterator it = queue->begin();
+ while (it != queue->end()) {
+ CHECK((*it)->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ // if pkt in time exists, that should be the next pivot
+ if (rtpTime + jit >= play) {
+ nextSeqNo = (*it)->int32Data();
+ break;
+ }
+ it++;
+ }
+ return nextSeqNo;
+}
+
+bool AAVCAssembler::recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
+ size_t avail, float goodRatio) {
+ float total = end - start;
+ float valid = connected;
+ float exist = avail;
+ bool isRecycle = (valid / total) >= goodRatio;
+
+ ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
+ exist, valid, total, isRecycle);
+
+ return isRecycle;
+}
+
+int32_t AAVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
+ int32_t initSize = queue->size();
+ Queue::iterator it = queue->begin();
+ while (it != queue->end()) {
+ if ((uint32_t)(*it)->int32Data() >= seq) {
+ break;
+ }
+ it++;
+ }
+ queue->erase(queue->begin(), it);
+ return initSize - queue->size();
+}
+
+inline void AAVCAssembler::printNowTimeUs(int64_t start, int64_t now, int64_t play) {
+ ALOGD("start=%lld, now=%lld, played=%lld",
+ (long long)start, (long long)now, (long long)play);
+}
+
+inline void AAVCAssembler::printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp) {
+ ALOGD("rtp-time(JB)=%u, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%u isExpired=%d",
+ rtp, (long long)play, exp, isExp);
+}
+
ARTPAssembler::AssemblyStatus AAVCAssembler::assembleMore(
const sp<ARTPSource> &source) {
AssemblyStatus status = addNALUnit(source);
if (status == MALFORMED_PACKET) {
- mAccessUnitDamaged = true;
+ uint64_t msecsSinceLastIFrame = (ALooper::GetNowUs() / 1000) - mLastIFrameProvidedAtMs;
+ if (msecsSinceLastIFrame > 1000) {
+ ALOGV("request FIR to get a new I-Frame, time since "
+ "last I-Frame %llu ms", (unsigned long long)msecsSinceLastIFrame);
+ source->onIssueFIRByAssembler();
+ }
}
return status;
}
void AAVCAssembler::packetLost() {
CHECK(mNextExpectedSeqNoValid);
- ALOGV("packetLost (expected %d)", mNextExpectedSeqNo);
-
+ ALOGD("packetLost (expected %u)", mNextExpectedSeqNo);
++mNextExpectedSeqNo;
-
- mAccessUnitDamaged = true;
}
void AAVCAssembler::onByeReceived() {
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/AAVCAssembler.h
index e19480c..913a868 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/AAVCAssembler.h
@@ -31,6 +31,7 @@
struct AAVCAssembler : public ARTPAssembler {
explicit AAVCAssembler(const sp<AMessage> ¬ify);
+ typedef List<sp<ABuffer> > Queue;
protected:
virtual ~AAVCAssembler();
@@ -45,8 +46,12 @@
bool mNextExpectedSeqNoValid;
uint32_t mNextExpectedSeqNo;
bool mAccessUnitDamaged;
+ bool mFirstIFrameProvided;
+ uint64_t mLastIFrameProvidedAtMs;
List<sp<ABuffer> > mNALUnits;
+ int32_t addNack(const sp<ARTPSource> &source);
+ void checkIFrameProvided(const sp<ABuffer> &buffer);
AssemblyStatus addNALUnit(const sp<ARTPSource> &source);
void addSingleNALUnit(const sp<ABuffer> &buffer);
AssemblyStatus addFragmentedNALUnit(List<sp<ABuffer> > *queue);
@@ -54,6 +59,13 @@
void submitAccessUnit();
+ int32_t pickProperSeq(const Queue *q, uint32_t jit, int64_t play);
+ bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
+ size_t avail, float goodRatio);
+ int32_t deleteUnitUnderSeq(Queue *q, uint32_t seq);
+ void printNowTimeUs(int64_t start, int64_t now, int64_t play);
+ void printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp);
+
DISALLOW_EVIL_CONSTRUCTORS(AAVCAssembler);
};
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index 93869fb..148a0ba 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
#define LOG_TAG "AHEVCAssembler"
#include <utils/Log.h>
@@ -25,6 +25,7 @@
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <include/HevcUtils.h>
#include <media/stagefright/foundation/hexdump.h>
#include <stdint.h>
@@ -46,7 +47,11 @@
mAccessUnitRTPTime(0),
mNextExpectedSeqNoValid(false),
mNextExpectedSeqNo(0),
- mAccessUnitDamaged(false) {
+ mAccessUnitDamaged(false),
+ mFirstIFrameProvided(false),
+ mLastIFrameProvidedAtMs(0),
+ mWidth(0),
+ mHeight(0) {
ALOGV("Constructor");
}
@@ -54,6 +59,66 @@
AHEVCAssembler::~AHEVCAssembler() {
}
+int32_t AHEVCAssembler::addNack(
+ const sp<ARTPSource> &source) {
+ List<sp<ABuffer>> *queue = source->queue();
+ int32_t nackCount = 0;
+
+ List<sp<ABuffer> >::iterator it = queue->begin();
+
+ if (it == queue->end()) {
+ return nackCount /* 0 */;
+ }
+
+ uint16_t queueHeadSeqNum = (*it)->int32Data();
+
+ // move to the packet after which RTCP:NACK was sent.
+ for (; it != queue->end(); ++it) {
+ int32_t seqNum = (*it)->int32Data();
+ if (seqNum >= source->mHighestNackNumber) {
+ break;
+ }
+ }
+
+ int32_t nackStartAt = -1;
+
+ while (it != queue->end()) {
+ int32_t seqBeforeLast = (*it)->int32Data();
+ // increase iterator.
+ if ((++it) == queue->end()) {
+ break;
+ }
+
+ int32_t seqLast = (*it)->int32Data();
+
+ if ((seqLast - seqBeforeLast) < 0) {
+ ALOGD("addNack: found end of seqNum from(%d) to(%d)", seqBeforeLast, seqLast);
+ source->mHighestNackNumber = 0;
+ }
+
+ // missed packet found
+ if (seqLast > (seqBeforeLast + 1) &&
+ // we didn't send RTCP:NACK for this packet yet.
+ (seqLast - 1) > source->mHighestNackNumber) {
+ source->mHighestNackNumber = seqLast -1;
+ nackStartAt = seqBeforeLast + 1;
+ break;
+ }
+
+ }
+
+ if (nackStartAt != -1) {
+ nackCount = source->mHighestNackNumber - nackStartAt + 1;
+ ALOGD("addNack: nackCount=%d, nackFrom=%d, nackTo=%d", nackCount,
+ nackStartAt, source->mHighestNackNumber);
+
+ uint16_t mask = (uint16_t)(0xffff) >> (16 - nackCount + 1);
+ source->setSeqNumToNACK(nackStartAt, mask, queueHeadSeqNum);
+ }
+
+ return nackCount;
+}
+
ARTPAssembler::AssemblyStatus AHEVCAssembler::addNALUnit(
const sp<ARTPSource> &source) {
List<sp<ABuffer> > *queue = source->queue();
@@ -63,33 +128,54 @@
}
sp<ABuffer> buffer = *queue->begin();
- int32_t rtpTime;
- CHECK(buffer->meta()->findInt32("rtp-time", &rtpTime));
+ buffer->meta()->setObject("source", source);
+ uint32_t rtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
int64_t startTime = source->mFirstSysTime / 1000;
int64_t nowTime = ALooper::GetNowUs() / 1000;
int64_t playedTime = nowTime - startTime;
- int32_t playedTimeRtp = source->mFirstRtpTime +
+ int64_t playedTimeRtp = source->mFirstRtpTime +
(((uint32_t)playedTime) * (source->mClockRate / 1000));
- int32_t expiredTimeInJb = rtpTime + (source->mClockRate / 5);
+ const uint32_t jitterTime = (uint32_t)(source->mClockRate / ((float)1000 / (source->mJbTimeMs)));
+ uint32_t expiredTimeInJb = rtpTime + jitterTime;
bool isExpired = expiredTimeInJb <= (playedTimeRtp);
- ALOGV("start=%lld, now=%lld, played=%lld", (long long)startTime,
- (long long)nowTime, (long long)playedTime);
- ALOGV("rtp-time(JB)=%d, played-rtp-time(JB)=%d, expired-rtp-time(JB)=%d isExpired=%d",
- rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+ bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
+ bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
+
+ if (mShowQueueCnt < 20) {
+ showCurrentQueue(queue);
+ printNowTimeUs(startTime, nowTime, playedTime);
+ printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+ mShowQueueCnt++;
+ }
+
+ AHEVCAssembler::addNack(source);
if (!isExpired) {
ALOGV("buffering in jitter buffer.");
return NOT_ENOUGH_DATA;
}
- if (mNextExpectedSeqNoValid) {
- List<sp<ABuffer> >::iterator it = queue->begin();
- while (it != queue->end()) {
- if ((uint32_t)(*it)->int32Data() >= mNextExpectedSeqNo) {
- break;
- }
+ if (isTooLate200) {
+ ALOGW("=== WARNING === buffer arrived 200ms late. === WARNING === ");
+ }
- it = queue->erase(it);
+ if (isTooLate300) {
+ ALOGW("buffer arrived after 300ms ... \t Diff in Jb=%lld \t Seq# %d",
+ ((long long)playedTimeRtp) - expiredTimeInJb, buffer->int32Data());
+ printNowTimeUs(startTime, nowTime, playedTime);
+ printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+
+ mNextExpectedSeqNo = pickProperSeq(queue, jitterTime, playedTimeRtp);
+ }
+
+ if (mNextExpectedSeqNoValid) {
+ int32_t size = queue->size();
+ int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+ if (cntRemove > 0) {
+ source->noticeAbandonBuffer(cntRemove);
+ ALOGW("delete %d of %d buffers", cntRemove, size);
}
if (queue->empty()) {
@@ -154,15 +240,74 @@
}
}
+void AHEVCAssembler::checkSpsUpdated(const sp<ABuffer> &buffer) {
+ if (buffer->size() == 0) {
+ return;
+ }
+ const uint8_t *data = buffer->data();
+ HevcParameterSets paramSets;
+ unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+ if (nalType == H265_NALU_SPS) {
+ int32_t width = 0, height = 0;
+ paramSets.FindHEVCDimensions(buffer, &width, &height);
+ ALOGV("existing resolution (%u x %u)", mWidth, mHeight);
+ if (width != mWidth || height != mHeight) {
+ mFirstIFrameProvided = false;
+ mWidth = width;
+ mHeight = height;
+ ALOGD("found a new resolution (%u x %u)", mWidth, mHeight);
+ }
+ }
+}
+
+void AHEVCAssembler::checkIFrameProvided(const sp<ABuffer> &buffer) {
+ if (buffer->size() == 0) {
+ return;
+ }
+ const uint8_t *data = buffer->data();
+ unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+ if (nalType > 0x0F && nalType < 0x18) {
+ mLastIFrameProvidedAtMs = ALooper::GetNowUs() / 1000;
+ if (!mFirstIFrameProvided) {
+ mFirstIFrameProvided = true;
+ uint32_t rtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ ALOGD("got First I-frame to be decoded. rtpTime=%d, size=%zu", rtpTime, buffer->size());
+ }
+ }
+}
+
+bool AHEVCAssembler::dropFramesUntilIframe(const sp<ABuffer> &buffer) {
+ if (buffer->size() == 0) {
+ return false;
+ }
+ const uint8_t *data = buffer->data();
+ unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+ return !mFirstIFrameProvided && nalType < 0x10;
+}
+
void AHEVCAssembler::addSingleNALUnit(const sp<ABuffer> &buffer) {
ALOGV("addSingleNALUnit of size %zu", buffer->size());
#if !LOG_NDEBUG
hexdump(buffer->data(), buffer->size());
#endif
+ checkSpsUpdated(buffer);
+ checkIFrameProvided(buffer);
uint32_t rtpTime;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ if (dropFramesUntilIframe(buffer)) {
+ sp<ARTPSource> source = nullptr;
+ buffer->meta()->findObject("source", (sp<android::RefBase>*)&source);
+ if (source != nullptr) {
+ ALOGD("Issued FIR to get the I-frame");
+ source->onIssueFIRByAssembler();
+ }
+ ALOGD("drop P-frames till an I-frame provided. rtpTime %u", rtpTime);
+ return;
+ }
+
if (!mNALUnits.empty() && rtpTime != mAccessUnitRTPTime) {
submitAccessUnit();
}
@@ -260,6 +405,11 @@
size_t totalCount = 1;
bool complete = false;
+ uint32_t rtpTimeStartAt;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
+ uint32_t startSeqNo = buffer->int32Data();
+ bool pFrame = (nalType < 0x10);
+
if (data[2] & 0x40) {
// Huh? End bit also set on the first buffer.
@@ -268,6 +418,8 @@
complete = true;
} else {
List<sp<ABuffer> >::iterator it = ++queue->begin();
+ int32_t connected = 1;
+ bool snapped = false;
while (it != queue->end()) {
ALOGV("sequence length %zu", totalCount);
@@ -277,26 +429,32 @@
size_t size = buffer->size();
if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
- ALOGV("sequence not complete, expected seqNo %d, got %d",
- expectedSeqNo, (uint32_t)buffer->int32Data());
+ ALOGV("sequence not complete, expected seqNo %u, got %u, nalType %u",
+ expectedSeqNo, (uint32_t)buffer->int32Data(), nalType);
+ snapped = true;
- return WRONG_SEQUENCE_NUMBER;
+ if (!pFrame) {
+ return WRONG_SEQUENCE_NUMBER;
+ }
}
+ if (!snapped) {
+ connected++;
+ }
+
+ uint32_t rtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
if (size < 3
|| ((data[0] >> 1) & H265_NALU_MASK) != indicator
|| (data[2] & H265_NALU_MASK) != nalType
- || (data[2] & 0x80)) {
+ || (data[2] & 0x80)
+ || rtpTime != rtpTimeStartAt) {
ALOGV("Ignoring malformed FU buffer.");
// Delete the whole start of the FU.
- it = queue->begin();
- for (size_t i = 0; i <= totalCount; ++i) {
- it = queue->erase(it);
- }
-
mNextExpectedSeqNo = expectedSeqNo + 1;
+ deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
return MALFORMED_PACKET;
}
@@ -304,9 +462,16 @@
totalSize += size - 3;
++totalCount;
- expectedSeqNo = expectedSeqNo + 1;
+ expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
if (data[2] & 0x40) {
+ if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
+ connected, totalCount, 0.5f)) {
+ mNextExpectedSeqNo = expectedSeqNo;
+ deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+ return MALFORMED_PACKET;
+ }
// This is the last fragment.
complete = true;
break;
@@ -335,6 +500,7 @@
unit->data()[1] = tid;
size_t offset = 2;
+ int32_t cvo = -1;
List<sp<ABuffer> >::iterator it = queue->begin();
for (size_t i = 0; i < totalCount; ++i) {
const sp<ABuffer> &buffer = *it;
@@ -345,6 +511,7 @@
#endif
memcpy(unit->data() + offset, buffer->data() + 3, buffer->size() - 3);
+ buffer->meta()->findInt32("cvo", &cvo);
offset += buffer->size() - 3;
it = queue->erase(it);
@@ -352,6 +519,10 @@
unit->setRange(0, totalSize);
+ if (cvo >= 0) {
+ unit->meta()->setInt32("cvo", cvo);
+ }
+
addSingleNALUnit(unit);
ALOGV("successfully assembled a NAL unit from fragments.");
@@ -372,6 +543,7 @@
sp<ABuffer> accessUnit = new ABuffer(totalSize);
size_t offset = 0;
+ int32_t cvo = -1;
for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
it != mNALUnits.end(); ++it) {
memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
@@ -380,6 +552,7 @@
sp<ABuffer> nal = *it;
memcpy(accessUnit->data() + offset, nal->data(), nal->size());
offset += nal->size();
+ nal->meta()->findInt32("cvo", &cvo);
}
CopyTimes(accessUnit, *mNALUnits.begin());
@@ -388,6 +561,9 @@
printf(mAccessUnitDamaged ? "X" : ".");
fflush(stdout);
#endif
+ if (cvo >= 0) {
+ accessUnit->meta()->setInt32("cvo", cvo);
+ }
if (mAccessUnitDamaged) {
accessUnit->meta()->setInt32("damaged", true);
@@ -401,22 +577,80 @@
msg->post();
}
+int32_t AHEVCAssembler::pickProperSeq(const Queue *queue, uint32_t jit, int64_t play) {
+ sp<ABuffer> buffer = *(queue->begin());
+ uint32_t rtpTime;
+ int32_t nextSeqNo = buffer->int32Data();
+
+ Queue::const_iterator it = queue->begin();
+ while (it != queue->end()) {
+ CHECK((*it)->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ // if pkt in time exists, that should be the next pivot
+ if (rtpTime + jit >= play) {
+ nextSeqNo = (*it)->int32Data();
+ break;
+ }
+ it++;
+ }
+ return nextSeqNo;
+}
+
+bool AHEVCAssembler::recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
+ size_t avail, float goodRatio) {
+ float total = end - start;
+ float valid = connected;
+ float exist = avail;
+ bool isRecycle = (valid / total) >= goodRatio;
+
+ ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
+ exist, valid, total, isRecycle);
+
+ return isRecycle;
+}
+
+int32_t AHEVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
+ int32_t initSize = queue->size();
+ Queue::iterator it = queue->begin();
+ while (it != queue->end()) {
+ if ((uint32_t)(*it)->int32Data() >= seq) {
+ break;
+ }
+ it++;
+ }
+ queue->erase(queue->begin(), it);
+ return initSize - queue->size();
+}
+
+inline void AHEVCAssembler::printNowTimeUs(int64_t start, int64_t now, int64_t play) {
+ ALOGD("start=%lld, now=%lld, played=%lld",
+ (long long)start, (long long)now, (long long)play);
+}
+
+inline void AHEVCAssembler::printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp) {
+ ALOGD("rtp-time(JB)=%u, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%u isExpired=%d",
+ rtp, (long long)play, exp, isExp);
+}
+
+
ARTPAssembler::AssemblyStatus AHEVCAssembler::assembleMore(
const sp<ARTPSource> &source) {
AssemblyStatus status = addNALUnit(source);
if (status == MALFORMED_PACKET) {
- mAccessUnitDamaged = true;
+ uint64_t msecsSinceLastIFrame = (ALooper::GetNowUs() / 1000) - mLastIFrameProvidedAtMs;
+ if (msecsSinceLastIFrame > 1000) {
+ ALOGV("request FIR to get a new I-Frame, time after "
+ "last I-Frame in %llu ms", (unsigned long long)msecsSinceLastIFrame);
+ source->onIssueFIRByAssembler();
+ }
}
return status;
}
void AHEVCAssembler::packetLost() {
CHECK(mNextExpectedSeqNoValid);
- ALOGV("packetLost (expected %d)", mNextExpectedSeqNo);
+ ALOGD("packetLost (expected %u)", mNextExpectedSeqNo);
++mNextExpectedSeqNo;
-
- mAccessUnitDamaged = true;
}
void AHEVCAssembler::onByeReceived() {
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
index cc20622..16fc1c8 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -31,6 +31,8 @@
struct AHEVCAssembler : public ARTPAssembler {
AHEVCAssembler(const sp<AMessage> ¬ify);
+ typedef List<sp<ABuffer> > Queue;
+
protected:
virtual ~AHEVCAssembler();
@@ -45,8 +47,16 @@
bool mNextExpectedSeqNoValid;
uint32_t mNextExpectedSeqNo;
bool mAccessUnitDamaged;
+ bool mFirstIFrameProvided;
+ uint64_t mLastIFrameProvidedAtMs;
+ int32_t mWidth;
+ int32_t mHeight;
List<sp<ABuffer> > mNALUnits;
+ int32_t addNack(const sp<ARTPSource> &source);
+ void checkSpsUpdated(const sp<ABuffer> &buffer);
+ void checkIFrameProvided(const sp<ABuffer> &buffer);
+ bool dropFramesUntilIframe(const sp<ABuffer> &buffer);
AssemblyStatus addNALUnit(const sp<ARTPSource> &source);
void addSingleNALUnit(const sp<ABuffer> &buffer);
AssemblyStatus addFragmentedNALUnit(List<sp<ABuffer> > *queue);
@@ -54,6 +64,13 @@
void submitAccessUnit();
+ int32_t pickProperSeq(const Queue *queue, uint32_t jit, int64_t play);
+ bool recycleUnit(uint32_t start, uint32_t end, uint32_t conneceted,
+ size_t avail, float goodRatio);
+ int32_t deleteUnitUnderSeq(Queue *queue, uint32_t seq);
+ void printNowTimeUs(int64_t start, int64_t now, int64_t play);
+ void printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp);
+
DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
};
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 1346c9a..07f9dd3 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -78,7 +78,9 @@
: mFlags(flags),
mPollEventPending(false),
mLastReceiverReportTimeUs(-1),
- mLastBitrateReportTimeUs(-1) {
+ mLastBitrateReportTimeUs(-1),
+ mTargetBitrate(-1),
+ mJbTimeMs(300) {
}
ARTPConnection::~ARTPConnection() {
@@ -129,7 +131,7 @@
unsigned start = (unsigned)((rand()* 1000LL)/RAND_MAX) + 15550;
start &= ~1;
- for (unsigned port = start; port < 65536; port += 2) {
+ for (unsigned port = start; port < 65535; port += 2) {
struct sockaddr_in addr;
memset(addr.sin_zero, 0, sizeof(addr.sin_zero));
addr.sin_family = AF_INET;
@@ -147,6 +149,13 @@
(const struct sockaddr *)&addr, sizeof(addr)) == 0) {
*rtpPort = port;
return;
+ } else {
+ // we should recreate a RTP socket to avoid bind other port in same RTP socket
+ close(*rtpSocket);
+
+ *rtpSocket = socket(AF_INET, SOCK_DGRAM, 0);
+ CHECK_GE(*rtpSocket, 0);
+ bumpSocketBufferSize(*rtpSocket);
}
}
@@ -439,6 +448,24 @@
continue;
}
+ // add NACK and FIR that needs to be sent immediately.
+ sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
+ for (size_t i = 0; i < it->mSources.size(); ++i) {
+ buffer->setRange(0, 0);
+ int cnt = it->mSources.valueAt(i)->addNACK(buffer);
+ if (cnt > 0) {
+ ALOGV("Send NACK for lost %d Packets", cnt);
+ send(&*it, buffer);
+ }
+
+ buffer->setRange(0, 0);
+ it->mSources.valueAt(i)->addFIR(buffer);
+ if (buffer->size() > 0) {
+ ALOGD("Send FIR immediately for lost Packets");
+ send(&*it, buffer);
+ }
+ }
+
++it;
}
}
@@ -524,8 +551,9 @@
(!receiveRTP && s->mNumRTCPPacketsReceived == 0)
? sizeSockSt : 0;
- if (mFlags & kViLTEConnection)
+ if (mFlags & kViLTEConnection) {
remoteAddrLen = 0;
+ }
ssize_t nbytes;
do {
@@ -1012,8 +1040,12 @@
source = new ARTPSource(
srcId, info->mSessionDesc, info->mIndex, info->mNotifyMsg);
+ if (mFlags & kViLTEConnection) {
+ source->setPeriodicFIR(false);
+ }
+
source->setSelfID(mSelfID);
- source->setMinMaxBitrate(mMinBitrate, mMaxBitrate);
+ source->setJbTime(mJbTimeMs > 0 ? mJbTimeMs : 300);
info->mSources.add(srcId, source);
} else {
source = info->mSources.valueAt(index);
@@ -1033,9 +1065,12 @@
mSelfID = selfID;
}
-void ARTPConnection::setMinMaxBitrate(int32_t min, int32_t max) {
- mMinBitrate = min;
- mMaxBitrate = max;
+void ARTPConnection::setJbTime(const uint32_t jbTimeMs) {
+ mJbTimeMs = jbTimeMs;
+}
+
+void ARTPConnection::setTargetBitrate(int32_t targetBitrate) {
+ mTargetBitrate = targetBitrate;
}
void ARTPConnection::checkRxBitrate(int64_t nowUs) {
@@ -1068,17 +1103,8 @@
for (size_t i = 0; i < s->mSources.size(); ++i) {
sp<ARTPSource> source = s->mSources.valueAt(i);
- source->setBitrateData(bitrate, nowUs);
- source->setTargetBitrate();
- source->addTMMBR(buffer);
- if (source->isNeedToDowngrade()) {
- sp<AMessage> notify = s->mNotifyMsg->dup();
- notify->setInt32("rtcp-event", 1);
- notify->setInt32("payload-type", 400);
- notify->setInt32("feedback-type", 1);
- notify->setInt32("sender", source->getSelfID());
- notify->post();
- }
+ source->notifyPktInfo(bitrate, nowUs);
+ source->addTMMBR(buffer, mTargetBitrate);
}
if (buffer->size() > 0) {
ALOGV("Sending TMMBR...");
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index 712eec5..7c8218f 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -46,7 +46,8 @@
void injectPacket(int index, const sp<ABuffer> &buffer);
void setSelfID(const uint32_t selfID);
- void setMinMaxBitrate(int32_t min, int32_t max);
+ void setJbTime(const uint32_t jbTimeMs);
+ void setTargetBitrate(int32_t targetBitrate);
// Creates a pair of UDP datagram sockets bound to adjacent ports
// (the rtpSocket is bound to an even port, the rtcpSocket to the
@@ -85,9 +86,10 @@
int64_t mLastBitrateReportTimeUs;
int32_t mSelfID;
+ int32_t mTargetBitrate;
- int32_t mMinBitrate;
- int32_t mMaxBitrate;
+ uint32_t mJbTimeMs;
+
int32_t mCumulativeBytes;
void onAddStream(const sp<AMessage> &msg);
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index bbe9d94..6303fc4 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -46,15 +46,21 @@
mFirstRtpTime(0),
mFirstSysTime(0),
mClockRate(0),
+ mJbTimeMs(300), // default jitter buffer time is 300ms.
+ mFirstSsrc(0),
+ mHighestNackNumber(0),
mID(id),
mHighestSeqNumber(0),
mPrevExpected(0),
mBaseSeqNumber(0),
mNumBuffersReceived(0),
mPrevNumBuffersReceived(0),
+ mPrevExpectedForRR(0),
+ mPrevNumBuffersReceivedForRR(0),
mLastNTPTime(0),
mLastNTPTimeUpdateUs(0),
mIssueFIRRequests(false),
+ mIssueFIRByAssembler(false),
mLastFIRRequestUs(-1),
mNextFIRSeqNo((rand() * 256.0) / RAND_MAX),
mNotify(notify) {
@@ -120,20 +126,29 @@
bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
uint32_t seqNum = (uint32_t)buffer->int32Data();
+ int32_t ssrc = 0;
+ buffer->meta()->findInt32("ssrc", &ssrc);
+
if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
- int32_t firstRtpTime;
- CHECK(buffer->meta()->findInt32("rtp-time", &firstRtpTime));
+ uint32_t firstRtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&firstRtpTime));
mFirstSysTime = ALooper::GetNowUs();
mHighestSeqNumber = seqNum;
mBaseSeqNumber = seqNum;
mFirstRtpTime = firstRtpTime;
- ALOGV("first-rtp arrived: first-rtp-time=%d, sys-time=%lld, seq-num=%u",
- mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber);
+ mFirstSsrc = ssrc;
+ ALOGD("first-rtp arrived: first-rtp-time=%d, sys-time=%lld, seq-num=%u, ssrc=%d",
+ mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
mClockRate = 90000;
mQueue.push_back(buffer);
return true;
}
+ if (mFirstSsrc != ssrc) {
+ ALOGW("Discarding a buffer due to unexpected ssrc");
+ return false;
+ }
+
// Only the lower 16-bit of the sequence numbers are transmitted,
// derive the high-order bits by choosing the candidate closest
// to the highest sequence number (extended to 32 bits) received so far.
@@ -196,20 +211,34 @@
}
void ARTPSource::addFIR(const sp<ABuffer> &buffer) {
- if (!mIssueFIRRequests) {
+ if (!mIssueFIRRequests && !mIssueFIRByAssembler) {
return;
}
+ bool send = false;
int64_t nowUs = ALooper::GetNowUs();
- if (mLastFIRRequestUs >= 0 && mLastFIRRequestUs + 5000000LL > nowUs) {
- // Send FIR requests at most every 5 secs.
+ int64_t usecsSinceLastFIR = nowUs - mLastFIRRequestUs;
+ if (mLastFIRRequestUs < 0) {
+ // A first FIR, just send it.
+ send = true;
+ } else if (mIssueFIRByAssembler && (usecsSinceLastFIR > 1000000)) {
+ // A FIR issued by Assembler.
+ // Send it if last FIR is not sent within a sec.
+ send = true;
+ } else if (mIssueFIRRequests && (usecsSinceLastFIR > 5000000)) {
+ // A FIR issued periodically reagardless packet loss.
+ // Send it if last FIR is not sent within 5 secs.
+ send = true;
+ }
+
+ if (!send) {
return;
}
mLastFIRRequestUs = nowUs;
if (buffer->size() + 20 > buffer->capacity()) {
- ALOGW("RTCP buffer too small to accomodate FIR.");
+ ALOGW("RTCP buffer too small to accommodate FIR.");
return;
}
@@ -218,7 +247,7 @@
data[0] = 0x80 | 4;
data[1] = 206; // PSFB
data[2] = 0;
- data[3] = 4;
+ data[3] = 4; // total (4+1) * sizeof(int32_t) = 20 bytes
data[4] = kSourceID >> 24;
data[5] = (kSourceID >> 16) & 0xff;
data[6] = (kSourceID >> 8) & 0xff;
@@ -240,14 +269,16 @@
data[18] = 0x00;
data[19] = 0x00;
- buffer->setRange(buffer->offset(), buffer->size() + 20);
+ buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+
+ mIssueFIRByAssembler = false;
ALOGV("Added FIR request.");
}
void ARTPSource::addReceiverReport(const sp<ABuffer> &buffer) {
if (buffer->size() + 32 > buffer->capacity()) {
- ALOGW("RTCP buffer too small to accomodate RR.");
+ ALOGW("RTCP buffer too small to accommodate RR.");
return;
}
@@ -255,16 +286,16 @@
// According to appendix A.3 in RFC 3550
uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
- int64_t intervalExpected = expected - mPrevExpected;
- int64_t intervalReceived = mNumBuffersReceived - mPrevNumBuffersReceived;
+ int64_t intervalExpected = expected - mPrevExpectedForRR;
+ int64_t intervalReceived = mNumBuffersReceived - mPrevNumBuffersReceivedForRR;
int64_t intervalPacketLost = intervalExpected - intervalReceived;
if (intervalExpected > 0 && intervalPacketLost > 0) {
fraction = (intervalPacketLost << 8) / intervalExpected;
}
- mPrevExpected = expected;
- mPrevNumBuffersReceived = mNumBuffersReceived;
+ mPrevExpectedForRR = expected;
+ mPrevNumBuffersReceivedForRR = mNumBuffersReceived;
int32_t cumulativePacketLost = (int32_t)expected - mNumBuffersReceived;
uint8_t *data = buffer->data() + buffer->size();
@@ -272,7 +303,7 @@
data[0] = 0x80 | 1;
data[1] = 201; // RR
data[2] = 0;
- data[3] = 7;
+ data[3] = 7; // total (7+1) * sizeof(int32_t) = 32 bytes
data[4] = kSourceID >> 24;
data[5] = (kSourceID >> 16) & 0xff;
data[6] = (kSourceID >> 8) & 0xff;
@@ -318,18 +349,18 @@
data[30] = (DLSR >> 8) & 0xff;
data[31] = DLSR & 0xff;
- buffer->setRange(buffer->offset(), buffer->size() + 32);
+ buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
}
-void ARTPSource::addTMMBR(const sp<ABuffer> &buffer) {
+void ARTPSource::addTMMBR(const sp<ABuffer> &buffer, int32_t targetBitrate) {
if (buffer->size() + 20 > buffer->capacity()) {
ALOGW("RTCP buffer too small to accommodate RR.");
return;
}
- int32_t targetBitrate = mQualManager.getTargetBitrate();
- if (targetBitrate <= 0)
+ if (targetBitrate <= 0) {
return;
+ }
uint8_t *data = buffer->data() + buffer->size();
@@ -363,52 +394,145 @@
data[18] = (mantissa & 0x0007f) << 1;
data[19] = 40; // 40 bytes overhead;
- buffer->setRange(buffer->offset(), buffer->size() + 20);
+ buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+}
+
+int ARTPSource::addNACK(const sp<ABuffer> &buffer) {
+ constexpr size_t kMaxFCIs = 10; // max number of FCIs
+ if (buffer->size() + (3 + kMaxFCIs) * sizeof(int32_t) > buffer->capacity()) {
+ ALOGW("RTCP buffer too small to accommodate NACK.");
+ return -1;
+ }
+
+ uint8_t *data = buffer->data() + buffer->size();
+
+ data[0] = 0x80 | 1; // Generic NACK
+ data[1] = 205; // TSFB
+ data[2] = 0;
+ data[3] = 0; // will be decided later
+ data[4] = kSourceID >> 24;
+ data[5] = (kSourceID >> 16) & 0xff;
+ data[6] = (kSourceID >> 8) & 0xff;
+ data[7] = kSourceID & 0xff;
+
+ data[8] = mID >> 24;
+ data[9] = (mID >> 16) & 0xff;
+ data[10] = (mID >> 8) & 0xff;
+ data[11] = mID & 0xff;
+
+ List<int> list;
+ List<int>::iterator it;
+ getSeqNumToNACK(list, kMaxFCIs);
+ size_t cnt = 0;
+
+ int *FCI = (int *)(data + 12);
+ for (it = list.begin(); it != list.end() && cnt < kMaxFCIs; it++) {
+ *(FCI + cnt) = *it;
+ cnt++;
+ }
+
+ data[3] = (3 + cnt) - 1; // total (3 + #ofFCI) * sizeof(int32_t) byte
+
+ buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+
+ return cnt;
+}
+
+int ARTPSource::getSeqNumToNACK(List<int>& list, int size) {
+ AutoMutex _l(mMapLock);
+ int cnt = 0;
+
+ std::map<uint16_t, infoNACK>::iterator it;
+ for(it = mNACKMap.begin(); it != mNACKMap.end() && cnt < size; it++) {
+ infoNACK &info_it = it->second;
+ if (info_it.needToNACK) {
+ info_it.needToNACK = false;
+ // switch LSB to MSB for sending N/W
+ uint32_t FCI;
+ uint8_t *temp = (uint8_t *)&FCI;
+ temp[0] = (info_it.seqNum >> 8) & 0xff;
+ temp[1] = (info_it.seqNum) & 0xff;
+ temp[2] = (info_it.mask >> 8) & 0xff;
+ temp[3] = (info_it.mask) & 0xff;
+
+ list.push_back(FCI);
+ cnt++;
+ }
+ }
+
+ return cnt;
+}
+
+void ARTPSource::setSeqNumToNACK(uint16_t seqNum, uint16_t mask, uint16_t nowJitterHeadSeqNum) {
+ AutoMutex _l(mMapLock);
+ infoNACK info = {seqNum, mask, nowJitterHeadSeqNum, true};
+ std::map<uint16_t, infoNACK>::iterator it;
+
+ it = mNACKMap.find(seqNum);
+ if (it != mNACKMap.end()) {
+ infoNACK &info_it = it->second;
+ // renew if (mask or head seq) is changed
+ if ((info_it.mask != mask) || (info_it.nowJitterHeadSeqNum != nowJitterHeadSeqNum)) {
+ info_it = info;
+ }
+ } else {
+ mNACKMap[seqNum] = info;
+ }
+
+ // delete all NACK far from current Jitter's first sequence number
+ it = mNACKMap.begin();
+ while (it != mNACKMap.end()) {
+ infoNACK &info_it = it->second;
+
+ int diff = nowJitterHeadSeqNum - info_it.nowJitterHeadSeqNum;
+ if (diff > 100) {
+ ALOGV("Delete %d pkt from NACK map ", info_it.seqNum);
+ it = mNACKMap.erase(it);
+ } else {
+ it++;
+ }
+ }
+
}
uint32_t ARTPSource::getSelfID() {
return kSourceID;
}
+
void ARTPSource::setSelfID(const uint32_t selfID) {
kSourceID = selfID;
}
-void ARTPSource::setMinMaxBitrate(int32_t min, int32_t max) {
- mQualManager.setMinMaxBitrate(min, max);
+void ARTPSource::setJbTime(const uint32_t jbTimeMs) {
+ mJbTimeMs = jbTimeMs;
}
-void ARTPSource::setBitrateData(int32_t bitrate, int64_t time) {
- mQualManager.setBitrateData(bitrate, time);
+void ARTPSource::setPeriodicFIR(bool enable) {
+ ALOGD("setPeriodicFIR %d", enable);
+ mIssueFIRRequests = enable;
}
-void ARTPSource::setTargetBitrate() {
- uint8_t fraction = 0;
+void ARTPSource::notifyPktInfo(int32_t bitrate, int64_t /*time*/) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", 102);
+ notify->setInt32("feedback-type", 0);
+ // sending target bitrate up to application to share rtp quality.
+ notify->setInt32("bit-rate", bitrate);
+ notify->setInt32("highest-seq-num", mHighestSeqNumber);
+ notify->setInt32("base-seq-num", mBaseSeqNumber);
+ notify->setInt32("prev-expected", mPrevExpected);
+ notify->setInt32("num-buf-recv", mNumBuffersReceived);
+ notify->setInt32("prev-num-buf-recv", mPrevNumBuffersReceived);
+ notify->post();
- // According to appendix A.3 in RFC 3550
uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
- int64_t intervalExpected = expected - mPrevExpected;
- int64_t intervalReceived = mNumBuffersReceived - mPrevNumBuffersReceived;
- int64_t intervalPacketLost = intervalExpected - intervalReceived;
-
- ALOGI("UID %p expectedPkts %lld lostPkts %lld", this, (long long)intervalExpected, (long long)intervalPacketLost);
-
- if (intervalPacketLost < 0 || intervalExpected == 0)
- fraction = 0;
- else if (intervalExpected <= intervalPacketLost)
- fraction = 255;
- else
- fraction = (intervalPacketLost << 8) / intervalExpected;
-
- mQualManager.setTargetBitrate(fraction, ALooper::GetNowUs(), intervalExpected < 5);
+ mPrevExpected = expected;
+ mPrevNumBuffersReceived = mNumBuffersReceived;
}
-bool ARTPSource::isNeedToReport() {
- int64_t intervalReceived = mNumBuffersReceived - mPrevNumBuffersReceived;
- return (intervalReceived > 0) ? true : false;
-}
-
-bool ARTPSource::isNeedToDowngrade() {
- return mQualManager.isNeedToDowngrade();
+void ARTPSource::onIssueFIRByAssembler() {
+ mIssueFIRByAssembler = true;
}
void ARTPSource::noticeAbandonBuffer(int cnt) {
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index 652e753..ea683a0 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -23,7 +23,9 @@
#include <media/stagefright/foundation/ABase.h>
#include <utils/List.h>
#include <utils/RefBase.h>
-#include <QualManager.h>
+#include <utils/Thread.h>
+
+#include <map>
namespace android {
@@ -46,23 +48,28 @@
void addReceiverReport(const sp<ABuffer> &buffer);
void addFIR(const sp<ABuffer> &buffer);
- void addTMMBR(const sp<ABuffer> &buffer);
+ void addTMMBR(const sp<ABuffer> &buffer, int32_t targetBitrate);
+ int addNACK(const sp<ABuffer> &buffer);
+ void setSeqNumToNACK(uint16_t seqNum, uint16_t mask, uint16_t nowJitterHeadSeqNum);
uint32_t getSelfID();
void setSelfID(const uint32_t selfID);
- void setMinMaxBitrate(int32_t min, int32_t max);
- void setBitrateData(int32_t bitrate, int64_t time);
- void setTargetBitrate();
-
- bool isNeedToReport();
- bool isNeedToDowngrade();
+ void setJbTime(const uint32_t jbTimeMs);
+ void setPeriodicFIR(bool enable);
+ void notifyPktInfo(int32_t bitrate, int64_t time);
+ // FIR needs to be sent by missing packet or broken video image.
+ void onIssueFIRByAssembler();
void noticeAbandonBuffer(int cnt=1);
int32_t mFirstSeqNumber;
- int32_t mFirstRtpTime;
+ uint32_t mFirstRtpTime;
int64_t mFirstSysTime;
int32_t mClockRate;
+ uint32_t mJbTimeMs;
+ int32_t mFirstSsrc;
+ int32_t mHighestNackNumber;
+
private:
uint32_t mID;
@@ -71,21 +78,33 @@
uint32_t mBaseSeqNumber;
int32_t mNumBuffersReceived;
int32_t mPrevNumBuffersReceived;
+ uint32_t mPrevExpectedForRR;
+ int32_t mPrevNumBuffersReceivedForRR;
List<sp<ABuffer> > mQueue;
sp<ARTPAssembler> mAssembler;
+ typedef struct infoNACK {
+ uint16_t seqNum;
+ uint16_t mask;
+ uint16_t nowJitterHeadSeqNum;
+ bool needToNACK;
+ } infoNACK;
+
+ Mutex mMapLock;
+ std::map<uint16_t, infoNACK> mNACKMap;
+ int getSeqNumToNACK(List<int>& list, int size);
+
uint64_t mLastNTPTime;
int64_t mLastNTPTimeUpdateUs;
bool mIssueFIRRequests;
+ bool mIssueFIRByAssembler;
int64_t mLastFIRRequestUs;
uint8_t mNextFIRSeqNo;
sp<AMessage> mNotify;
- QualManager mQualManager;
-
bool queuePacket(const sp<ABuffer> &buffer);
DISALLOW_EVIL_CONSTRUCTORS(ARTPSource);
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 70d34de..76afb04 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -42,21 +42,24 @@
#define H264_NALU_PFRAME 0x1
#define H265_NALU_MASK 0x3F
-#define H265_NALU_VPS 0x40
-#define H265_NALU_SPS 0x42
-#define H265_NALU_PPS 0x44
+#define H265_NALU_VPS 0x20
+#define H265_NALU_SPS 0x21
+#define H265_NALU_PPS 0x22
+#define LINK_HEADER_SIZE 14
+#define IP_HEADER_SIZE 20
#define UDP_HEADER_SIZE 8
+#define TCPIP_HEADER_SIZE (LINK_HEADER_SIZE + IP_HEADER_SIZE + UDP_HEADER_SIZE)
#define RTP_HEADER_SIZE 12
-#define RTP_HEADER_EXT_SIZE 1
+#define RTP_HEADER_EXT_SIZE 8
#define RTP_FU_HEADER_SIZE 2
-#define RTP_PAYLOAD_ROOM_SIZE 140
+#define RTP_PAYLOAD_ROOM_SIZE 100 // ROOM size for IPv6 header, ESP and etc.
namespace android {
// static const size_t kMaxPacketSize = 65507; // maximum payload in UDP over IP
-static const size_t kMaxPacketSize = 1500;
+static const size_t kMaxPacketSize = 1280;
static char kCNAME[255] = "someone@somewhere";
static int UniformRand(int limit) {
@@ -67,7 +70,8 @@
: mFlags(0),
mFd(dup(fd)),
mLooper(new ALooper),
- mReflector(new AHandlerReflector<ARTPWriter>(this)) {
+ mReflector(new AHandlerReflector<ARTPWriter>(this)),
+ mTrafficRec(new TrafficRecorder<uint32_t, size_t>(128)) {
CHECK_GE(fd, 0);
mIsIPv6 = false;
@@ -117,7 +121,8 @@
: mFlags(0),
mFd(dup(fd)),
mLooper(new ALooper),
- mReflector(new AHandlerReflector<ARTPWriter>(this)) {
+ mReflector(new AHandlerReflector<ARTPWriter>(this)),
+ mTrafficRec(new TrafficRecorder<uint32_t, size_t>(128)) {
CHECK_GE(fd, 0);
mIsIPv6 = false;
@@ -126,6 +131,7 @@
mLooper->start();
makeSocketPairAndBind(localIp, localPort, remoteIp , remotePort);
+ mVPSBuf = NULL;
mSPSBuf = NULL;
mPPSBuf = NULL;
@@ -147,6 +153,11 @@
}
ARTPWriter::~ARTPWriter() {
+ if (mVPSBuf != NULL) {
+ mVPSBuf->release();
+ mVPSBuf = NULL;
+ }
+
if (mSPSBuf != NULL) {
mSPSBuf->release();
mSPSBuf = NULL;
@@ -277,12 +288,9 @@
return OK;
}
-// return size of SPS if there is more NAL unit found following to SPS.
-static uint32_t StripStartcode(MediaBufferBase *buffer) {
- uint32_t nalSize = 0;
-
+static void StripStartcode(MediaBufferBase *buffer) {
if (buffer->range_length() < 4) {
- return 0;
+ return;
}
const uint8_t *ptr =
@@ -292,55 +300,129 @@
buffer->set_range(
buffer->range_offset() + 4, buffer->range_length() - 4);
}
-
- ptr = (const uint8_t *)buffer->data() + buffer->range_offset();
-
- if (buffer->range_length() > 0 && (*ptr & H264_NALU_MASK) == H264_NALU_SPS) {
- for (uint32_t i = 1; i + 4 <= buffer->range_length(); i++) {
-
- if (!memcmp(ptr + i, "\x00\x00\x00\x01", 4)) {
- // Now, we found one more NAL unit in the media buffer.
- // Mostly, it will be a PPS.
- nalSize = i;
- ALOGV("SPS found. size=%d", nalSize);
- }
- }
- }
-
- return nalSize;
}
-static void SpsPpsParser(MediaBufferBase *mediaBuffer,
- MediaBufferBase **spsBuffer, MediaBufferBase **ppsBuffer, uint32_t spsSize) {
+static const uint8_t SPCSize = 4; // Start Prefix Code Size
+static const uint8_t startPrefixCode[SPCSize] = {0, 0, 0, 1};
+static const uint8_t spcKMPidx[SPCSize] = {0, 0, 2, 0};
+static void SpsPpsParser(MediaBufferBase *buffer,
+ MediaBufferBase **spsBuffer, MediaBufferBase **ppsBuffer) {
- if (mediaBuffer == NULL || mediaBuffer->range_length() < 4)
- return;
+ while (buffer->range_length() > 0) {
+ const uint8_t *NALPtr = (const uint8_t *)buffer->data() + buffer->range_offset();
- if ((*spsBuffer) != NULL) {
- (*spsBuffer)->release();
- (*spsBuffer) = NULL;
+ MediaBufferBase **targetPtr = NULL;
+ if ((*NALPtr & H264_NALU_MASK) == H264_NALU_SPS) {
+ targetPtr = spsBuffer;
+ } else if ((*NALPtr & H264_NALU_MASK) == H264_NALU_PPS) {
+ targetPtr = ppsBuffer;
+ } else {
+ return;
+ }
+ ALOGV("SPS(7) or PPS(8) found. Type %d", *NALPtr & H264_NALU_MASK);
+
+ uint32_t bufferSize = buffer->range_length();
+ MediaBufferBase *&target = *targetPtr;
+ uint32_t i = 0, j = 0;
+ bool isBoundFound = false;
+ for (i = 0; i < bufferSize; i++) {
+ while (j > 0 && NALPtr[i] != startPrefixCode[j]) {
+ j = spcKMPidx[j - 1];
+ }
+ if (NALPtr[i] == startPrefixCode[j]) {
+ j++;
+ if (j == SPCSize) {
+ isBoundFound = true;
+ break;
+ }
+ }
+ }
+
+ uint32_t targetSize;
+ if (target != NULL) {
+ target->release();
+ }
+ // note that targetSize is never 0 as the first byte is never part
+ // of a start prefix
+ if (isBoundFound) {
+ targetSize = i - SPCSize + 1;
+ target = MediaBufferBase::Create(targetSize);
+ memcpy(target->data(),
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ targetSize);
+ buffer->set_range(buffer->range_offset() + targetSize + SPCSize,
+ buffer->range_length() - targetSize - SPCSize);
+ } else {
+ targetSize = bufferSize;
+ target = MediaBufferBase::Create(targetSize);
+ memcpy(target->data(),
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ targetSize);
+ buffer->set_range(buffer->range_offset() + bufferSize, 0);
+ return;
+ }
}
+}
- if ((*ppsBuffer) != NULL) {
- (*ppsBuffer)->release();
- (*ppsBuffer) = NULL;
- }
+static void VpsSpsPpsParser(MediaBufferBase *buffer,
+ MediaBufferBase **vpsBuffer, MediaBufferBase **spsBuffer, MediaBufferBase **ppsBuffer) {
- // we got sps/pps but startcode of sps is striped.
- (*spsBuffer) = MediaBufferBase::Create(spsSize);
- memcpy((*spsBuffer)->data(),
- (const uint8_t *)mediaBuffer->data() + mediaBuffer->range_offset(),
- spsSize);
+ while (buffer->range_length() > 0) {
+ const uint8_t *NALPtr = (const uint8_t *)buffer->data() + buffer->range_offset();
+ uint8_t nalType = ((*NALPtr) >> 1) & H265_NALU_MASK;
- int32_t ppsSize = mediaBuffer->range_length() - spsSize - 4 /*startcode*/;
- if (ppsSize > 0) {
- (*ppsBuffer) = MediaBufferBase::Create(ppsSize);
- ALOGV("PPS found. size=%d", (int)ppsSize);
- mediaBuffer->set_range(mediaBuffer->range_offset() + spsSize + 4 /*startcode*/,
- mediaBuffer->range_length() - spsSize - 4 /*startcode*/);
- memcpy((*ppsBuffer)->data(),
- (const uint8_t *)mediaBuffer->data() + mediaBuffer->range_offset(),
- ppsSize);
+ MediaBufferBase **targetPtr = NULL;
+ if (nalType == H265_NALU_VPS) {
+ targetPtr = vpsBuffer;
+ } else if (nalType == H265_NALU_SPS) {
+ targetPtr = spsBuffer;
+ } else if (nalType == H265_NALU_PPS) {
+ targetPtr = ppsBuffer;
+ } else {
+ return;
+ }
+ ALOGV("VPS(32) SPS(33) or PPS(34) found. Type %d", nalType);
+
+ uint32_t bufferSize = buffer->range_length();
+ MediaBufferBase *&target = *targetPtr;
+ uint32_t i = 0, j = 0;
+ bool isBoundFound = false;
+ for (i = 0; i < bufferSize; i++) {
+ while (j > 0 && NALPtr[i] != startPrefixCode[j]) {
+ j = spcKMPidx[j - 1];
+ }
+ if (NALPtr[i] == startPrefixCode[j]) {
+ j++;
+ if (j == SPCSize) {
+ isBoundFound = true;
+ break;
+ }
+ }
+ }
+
+ if (target != NULL) {
+ target->release();
+ }
+ uint32_t targetSize;
+ // note that targetSize is never 0 as the first byte is never part
+ // of a start prefix
+ if (isBoundFound) {
+ targetSize = i - SPCSize + 1;
+ target = MediaBufferBase::Create(j);
+ memcpy(target->data(),
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ j);
+ buffer->set_range(buffer->range_offset() + targetSize + SPCSize,
+ buffer->range_length() - targetSize - SPCSize);
+ } else {
+ targetSize = bufferSize;
+ target = MediaBufferBase::Create(targetSize);
+ memcpy(target->data(),
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ targetSize);
+ buffer->set_range(buffer->range_offset() + bufferSize, 0);
+ return;
+ }
}
}
@@ -451,15 +533,17 @@
ALOGV("read buffer of size %zu", mediaBuf->range_length());
if (mMode == H264) {
- uint32_t spsSize = 0;
- if ((spsSize = StripStartcode(mediaBuf)) > 0) {
- SpsPpsParser(mediaBuf, &mSPSBuf, &mPPSBuf, spsSize);
- } else {
+ StripStartcode(mediaBuf);
+ SpsPpsParser(mediaBuf, &mSPSBuf, &mPPSBuf);
+ if (mediaBuf->range_length() > 0) {
sendAVCData(mediaBuf);
}
} else if (mMode == H265) {
StripStartcode(mediaBuf);
- sendHEVCData(mediaBuf);
+ VpsSpsPpsParser(mediaBuf, &mVPSBuf, &mSPSBuf, &mPPSBuf);
+ if (mediaBuf->range_length() > 0) {
+ sendHEVCData(mediaBuf);
+ }
} else if (mMode == H263) {
sendH263Data(mediaBuf);
} else if (mMode == AMR_NB || mMode == AMR_WB) {
@@ -504,11 +588,20 @@
remAddr = (struct sockaddr *)&mRTPAddr;
}
+ // Unseal code if moderator is needed (prevent overflow of instant bandwidth)
+ // Set limit bits per period through the moderator.
+ // ex) 6KByte/10ms = 48KBit/10ms = 4.8MBit/s instant limit
+ // ModerateInstantTraffic(10, 6 * 1024);
+
ssize_t n = sendto(isRTCP ? mRTCPSocket : mRTPSocket,
buffer->data(), buffer->size(), 0, remAddr, sizeSockSt);
if (n != (ssize_t)buffer->size()) {
ALOGW("packets can not be sent. ret=%d, buf=%d", (int)n, (int)buffer->size());
+ } else {
+ // Record current traffic & Print bits while last 1sec (1000ms)
+ mTrafficRec->writeBytes(buffer->size());
+ mTrafficRec->printAccuBitsForLastPeriod(1000, 1000);
}
#if LOG_TO_FILES
@@ -807,12 +900,13 @@
}
void ARTPWriter::sendSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs) {
+ CHECK(mediaBuf->range_length() > 0);
const uint8_t *mediaData =
(const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
- if (mediaBuf->range_length() == 0
- || (mediaData[0] & H264_NALU_MASK) != H264_NALU_IFRAME)
+ if ((mediaData[0] & H264_NALU_MASK) != H264_NALU_IFRAME) {
return;
+ }
if (mSPSBuf != NULL) {
mSPSBuf->meta_data().setInt64(kKeyTime, timeUs);
@@ -827,6 +921,35 @@
}
}
+void ARTPWriter::sendVPSSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs) {
+ CHECK(mediaBuf->range_length() > 0);
+ const uint8_t *mediaData =
+ (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+ int nalType = ((mediaData[0] >> 1) & H265_NALU_MASK);
+ if (!(nalType >= 16 && nalType <= 21) /*H265_NALU_IFRAME*/) {
+ return;
+ }
+
+ if (mVPSBuf != NULL) {
+ mVPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+ mVPSBuf->meta_data().setInt32(kKeyVps, 1);
+ sendHEVCData(mVPSBuf);
+ }
+
+ if (mSPSBuf != NULL) {
+ mSPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+ mSPSBuf->meta_data().setInt32(kKeySps, 1);
+ sendHEVCData(mSPSBuf);
+ }
+
+ if (mPPSBuf != NULL) {
+ mPPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+ mPPSBuf->meta_data().setInt32(kKeyPps, 1);
+ sendHEVCData(mPPSBuf);
+ }
+}
+
void ARTPWriter::sendHEVCData(MediaBufferBase *mediaBuf) {
// 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
CHECK_GE(kMaxPacketSize, 12u + 2u);
@@ -834,21 +957,33 @@
int64_t timeUs;
CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
- sendSPSPPSIfIFrame(mediaBuf, timeUs);
+ sendVPSSPSPPSIfIFrame(mediaBuf, timeUs);
uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100ll);
+ CHECK(mediaBuf->range_length() > 0);
const uint8_t *mediaData =
(const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+ int32_t isNonVCL = 0;
+ if (mediaBuf->meta_data().findInt32(kKeyVps, &isNonVCL) ||
+ mediaBuf->meta_data().findInt32(kKeySps, &isNonVCL) ||
+ mediaBuf->meta_data().findInt32(kKeyPps, &isNonVCL)) {
+ isNonVCL = 1;
+ }
+
sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
- if (mediaBuf->range_length() + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE
- <= buffer->capacity()) {
+ if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
+ + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
// The data fits into a single packet
uint8_t *data = buffer->data();
data[0] = 0x80;
- data[1] = (1 << 7) | mPayloadType; // M-bit
+ if (isNonVCL) {
+ data[1] = mPayloadType; // Marker bit should not be set in case of Non-VCL
+ } else {
+ data[1] = (1 << 7) | mPayloadType; // M-bit
+ }
data[2] = (mSeqNo >> 8) & 0xff;
data[3] = mSeqNo & 0xff;
data[4] = rtpTime >> 24;
@@ -881,11 +1016,11 @@
while (offset < mediaBuf->range_length()) {
size_t size = mediaBuf->range_length() - offset;
bool lastPacket = true;
- if (size + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_FU_HEADER_SIZE +
- RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
+ if (size + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE +
+ RTP_FU_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
lastPacket = false;
- size = buffer->capacity() - UDP_HEADER_SIZE - RTP_HEADER_SIZE -
- RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
+ size = buffer->capacity() - TCPIP_HEADER_SIZE - RTP_HEADER_SIZE -
+ RTP_HEADER_EXT_SIZE - RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
}
uint8_t *data = buffer->data();
@@ -963,6 +1098,7 @@
uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+ CHECK(mediaBuf->range_length() > 0);
const uint8_t *mediaData =
(const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
@@ -973,9 +1109,10 @@
isSpsPps = true;
}
+ mTrafficRec->updateClock(ALooper::GetNowUs() / 1000);
sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
- if (mediaBuf->range_length() + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE
- <= buffer->capacity()) {
+ if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
+ + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
// The data fits into a single packet
uint8_t *data = buffer->data();
data[0] = 0x80;
@@ -1051,11 +1188,11 @@
while (offset < mediaBuf->range_length()) {
size_t size = mediaBuf->range_length() - offset;
bool lastPacket = true;
- if (size + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_FU_HEADER_SIZE +
- RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
+ if (size + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE +
+ RTP_FU_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
lastPacket = false;
- size = buffer->capacity() - UDP_HEADER_SIZE - RTP_HEADER_SIZE -
- RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
+ size = buffer->capacity() - TCPIP_HEADER_SIZE - RTP_HEADER_SIZE -
+ RTP_HEADER_EXT_SIZE - RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
}
uint8_t *data = buffer->data();
@@ -1408,4 +1545,15 @@
}
}
+// TODO : Develop more advanced moderator based on AS & TMMBR value
+void ARTPWriter::ModerateInstantTraffic(uint32_t samplePeriod, uint32_t limitBytes) {
+ unsigned int bytes = mTrafficRec->readBytesForLastPeriod(samplePeriod);
+ if (bytes > limitBytes) {
+ ALOGI("Nuclear moderator. #seq = %d \t\t %d bits / 10ms",
+ mSeqNo, bytes * 8);
+ usleep(4000);
+ mTrafficRec->updateClock(ALooper::GetNowUs() / 1000);
+ }
+}
+
} // namespace android
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index f7e2204..6f25a66 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -28,6 +28,7 @@
#include <sys/socket.h>
#include <android/multinetwork.h>
+#include "TrafficRecorder.h"
#define LOG_TO_FILES 0
@@ -102,6 +103,7 @@
AString mSeqParamSet;
AString mPicParamSet;
+ MediaBufferBase *mVPSBuf;
MediaBufferBase *mSPSBuf;
MediaBufferBase *mPPSBuf;
@@ -116,6 +118,7 @@
uint32_t mOpponentID;
uint32_t mBitrate;
+ sp<TrafficRecorder<uint32_t, size_t> > mTrafficRec;
int32_t mNumSRsSent;
int32_t mRTPCVOExtMap;
@@ -143,6 +146,7 @@
void dumpSessionDesc();
void sendBye();
+ void sendVPSSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs);
void sendSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs);
void sendHEVCData(MediaBufferBase *mediaBuf);
void sendAVCData(MediaBufferBase *mediaBuf);
@@ -152,6 +156,7 @@
void send(const sp<ABuffer> &buffer, bool isRTCP);
void makeSocketPairAndBind(String8& localIp, int localPort, String8& remoteIp, int remotePort);
+ void ModerateInstantTraffic(uint32_t samplePeriod, uint32_t limitBytes);
DISALLOW_EVIL_CONSTRUCTORS(ARTPWriter);
};
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index bb66f4c..c33bf3f 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -329,6 +329,7 @@
mPass.clear();
mAuthType = NONE;
mNonce.clear();
+ mRealm.clear();
mState = DISCONNECTED;
}
@@ -911,6 +912,14 @@
CHECK_GE(j, 0);
mNonce.setTo(value, i + 7, j - i - 7);
+
+ i = value.find("realm=");
+ CHECK_GE(i, 0);
+ CHECK_EQ(value.c_str()[i + 6], '\"');
+ j = value.find("\"", i + 7);
+ CHECK_GE(j, 0);
+
+ mRealm.setTo(value, i + 7, j - i - 7);
}
return true;
@@ -993,7 +1002,7 @@
AString A1;
A1.append(mUser);
A1.append(":");
- A1.append("Streaming Server");
+ A1.append(mRealm);
A1.append(":");
A1.append(mPass);
@@ -1029,6 +1038,9 @@
fragment.append("\", ");
fragment.append("response=\"");
fragment.append(digest);
+ fragment.append("\", ");
+ fragment.append("realm=\"");
+ fragment.append(mRealm);
fragment.append("\"");
fragment.append("\r\n");
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 56b604d..7cdd4c0 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -84,6 +84,7 @@
AString mUser, mPass;
AuthType mAuthType;
AString mNonce;
+ AString mRealm;
int mSocket;
int32_t mConnectionID;
int32_t mNextCSeq;
diff --git a/media/libstagefright/rtsp/Android.bp b/media/libstagefright/rtsp/Android.bp
index 6179142..f990ecf 100644
--- a/media/libstagefright/rtsp/Android.bp
+++ b/media/libstagefright/rtsp/Android.bp
@@ -18,7 +18,6 @@
"ARTSPConnection.cpp",
"ASessionDescription.cpp",
"SDPLoader.cpp",
- "QualManager.cpp",
],
shared_libs: [
diff --git a/media/libstagefright/rtsp/TrafficRecorder.h b/media/libstagefright/rtsp/TrafficRecorder.h
new file mode 100644
index 0000000..f8e7c03
--- /dev/null
+++ b/media/libstagefright/rtsp/TrafficRecorder.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_TRAFFIC_RECORDER_H_
+
+#define A_TRAFFIC_RECORDER_H_
+
+#include <android-base/logging.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+// Circular array to save recent amount of bytes
+template <class Time, class Bytes>
+class TrafficRecorder : public RefBase {
+private:
+ size_t mSize;
+ size_t mSizeMask;
+ Time *mTimeArray = NULL;
+ Bytes *mBytesArray = NULL;
+ size_t mHeadIdx = 0;
+ size_t mTailIdx = 0;
+
+ Time mClock = 0;
+ Time mLastTimeOfPrint = 0;
+ Bytes mAccuBytesOfPrint = 0;
+public:
+ TrafficRecorder();
+ TrafficRecorder(size_t size);
+ virtual ~TrafficRecorder();
+
+ void init();
+
+ void updateClock(Time now);
+
+ Bytes readBytesForLastPeriod(Time period);
+ void writeBytes(Bytes bytes);
+
+ void printAccuBitsForLastPeriod(Time period, Time unit);
+};
+
+template <class Time, class Bytes>
+TrafficRecorder<Time, Bytes>::TrafficRecorder() {
+ TrafficRecorder(128);
+}
+
+template <class Time, class Bytes>
+TrafficRecorder<Time, Bytes>::TrafficRecorder(size_t size) {
+ size_t exp;
+ for (exp = 0; exp < 32; exp++) {
+ if (size <= (1ul << exp)) {
+ break;
+ }
+ }
+ mSize = (1ul << exp); // size = 2^exp
+ mSizeMask = mSize - 1;
+
+ LOG(VERBOSE) << "TrafficRecorder Init size " << mSize;
+ mTimeArray = new Time[mSize];
+ mBytesArray = new Bytes[mSize];
+
+ init();
+}
+
+template <class Time, class Bytes>
+TrafficRecorder<Time, Bytes>::~TrafficRecorder() {
+ delete[] mTimeArray;
+ delete[] mBytesArray;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::init() {
+ mHeadIdx = 0;
+ mTailIdx = 0;
+ mTimeArray[0] = 0;
+ mBytesArray[0] = 0;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::updateClock(Time now) {
+ mClock = now;
+}
+
+template <class Time, class Bytes>
+Bytes TrafficRecorder<Time, Bytes>::readBytesForLastPeriod(Time period) {
+ Bytes bytes = 0;
+
+ size_t i = mTailIdx;
+ while (i != mHeadIdx) {
+ LOG(VERBOSE) << "READ " << i << " time " << mTimeArray[i] << " \t EndOfPeriod " << mClock - period;
+ if (mTimeArray[i] < mClock - period) {
+ break;
+ }
+ bytes += mBytesArray[i];
+ i = (i + mSize - 1) & mSizeMask;
+ }
+ mHeadIdx = i;
+ return bytes;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::writeBytes(Bytes bytes) {
+ size_t writeIdx;
+ if (mClock == mTimeArray[mTailIdx]) {
+ writeIdx = mTailIdx;
+ mBytesArray[writeIdx] += bytes;
+ } else {
+ writeIdx = (mTailIdx + 1) % mSize;
+ mTimeArray[writeIdx] = mClock;
+ mBytesArray[writeIdx] = bytes;
+ }
+
+ LOG(VERBOSE) << "WRITE " << writeIdx << " time " << mClock;
+ if (writeIdx == mHeadIdx) {
+ LOG(WARNING) << "Traffic recorder size exceeded at " << mHeadIdx;
+ mHeadIdx = (mHeadIdx + 1) & mSizeMask;
+ }
+
+ mTailIdx = writeIdx;
+ mAccuBytesOfPrint += bytes;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::printAccuBitsForLastPeriod(Time period, Time unit) {
+ Time duration = mClock - mLastTimeOfPrint;
+ float numOfUnit = (float)duration / unit;
+ if (duration > period) {
+ ALOGD("Actual Tx period %.0f ms \t %.0f Bits/Unit",
+ numOfUnit * 1000.f, mAccuBytesOfPrint * 8.f / numOfUnit);
+ mLastTimeOfPrint = mClock;
+ mAccuBytesOfPrint = 0;
+ init();
+ }
+}
+
+} // namespace android
+
+#endif // A_TRAFFIC_RECORDER_H_
diff --git a/media/libstagefright/tests/ESDS/Android.bp b/media/libstagefright/tests/ESDS/Android.bp
new file mode 100644
index 0000000..1ad1a64
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/Android.bp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+ name: "ESDSTest",
+ gtest: true,
+
+ srcs: [
+ "ESDSTest.cpp",
+ ],
+
+ shared_libs: [
+ "libbinder",
+ "libdatasource",
+ "liblog",
+ "libmedia",
+ "libstagefright",
+ "libstagefright_foundation",
+ "libutils",
+ ],
+
+ static_libs: [
+ "libstagefright_esds",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/libstagefright/tests/ESDS/AndroidTest.xml b/media/libstagefright/tests/ESDS/AndroidTest.xml
new file mode 100644
index 0000000..a4fbc7f
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Test module config for ESDS unit test">
+ <option name="test-suite-tag" value="ESDSTest" />
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="true" />
+ <option name="push" value="ESDSTest->/data/local/tmp/ESDSTest" />
+ <option name="push-file"
+ key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/ESDS/ESDSTestRes-1.0.zip?unzip=true"
+ value="/data/local/tmp/ESDSTestRes/" />
+ </target_preparer>
+
+ <test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="module-name" value="ESDSTest" />
+ <option name="native-test-flag" value="-P /data/local/tmp/ESDSTestRes/" />
+ </test>
+</configuration>
diff --git a/media/libstagefright/tests/ESDS/ESDSTest.cpp b/media/libstagefright/tests/ESDS/ESDSTest.cpp
new file mode 100644
index 0000000..101e00c
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/ESDSTest.cpp
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ESDSTest"
+#include <utils/Log.h>
+
+#include <stdio.h>
+#include <string.h>
+#include <fstream>
+
+#include <ESDS.h>
+#include <binder/ProcessState.h>
+#include <datasource/FileSource.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <media/stagefright/MetaData.h>
+
+#include "ESDSTestEnvironment.h"
+
+using namespace android;
+
+static ESDSTestEnvironment *gEnv = nullptr;
+
+struct ESDSParams {
+ const char *inputFile;
+ int32_t objectTypeIndication;
+ const char *codecSpecificInfoData;
+ int32_t codecSpecificInfoDataSize;
+ int32_t bitrateMax;
+ int32_t bitrateAvg;
+};
+
+class ESDSUnitTest : public ::testing::TestWithParam<tuple<
+ /* InputFile */ const char *,
+ /* ObjectTypeIndication */ int32_t,
+ /* CodecSpecificInfoData */ const char *,
+ /* CodecSpecificInfoDataSize */ int32_t,
+ /* BitrateMax */ int32_t,
+ /* BitrateAvg */ int32_t>> {
+ public:
+ ESDSUnitTest() : mESDSData(nullptr) {
+ mESDSParams.inputFile = get<0>(GetParam());
+ mESDSParams.objectTypeIndication = get<1>(GetParam());
+ mESDSParams.codecSpecificInfoData = get<2>(GetParam());
+ mESDSParams.codecSpecificInfoDataSize = get<3>(GetParam());
+ mESDSParams.bitrateMax = get<4>(GetParam());
+ mESDSParams.bitrateAvg = get<5>(GetParam());
+ };
+
+ virtual void TearDown() override {
+ if (mDataSource) mDataSource.clear();
+ if (mInputFp) {
+ fclose(mInputFp);
+ mInputFp = nullptr;
+ }
+ }
+
+ virtual void SetUp() override { ASSERT_NO_FATAL_FAILURE(readESDSData()); }
+ const void *mESDSData;
+ size_t mESDSSize;
+ ESDSParams mESDSParams;
+
+ private:
+ void readESDSData() {
+ string inputFile = gEnv->getRes() + mESDSParams.inputFile;
+ mInputFp = fopen(inputFile.c_str(), "rb");
+ ASSERT_NE(mInputFp, nullptr) << "File open failed for file: " << inputFile;
+ int32_t fd = fileno(mInputFp);
+ ASSERT_GE(fd, 0) << "File descriptor invalid for file: " << inputFile;
+
+ struct stat buf;
+ status_t status = stat(inputFile.c_str(), &buf);
+ ASSERT_EQ(status, 0) << "Failed to get properties of input file: " << mESDSParams.inputFile;
+ size_t fileSize = buf.st_size;
+
+ mDataSource = new FileSource(dup(fd), 0, fileSize);
+ ASSERT_NE(mDataSource, nullptr) << "Unable to create data source for file: " << inputFile;
+
+ sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(mDataSource);
+ if (extractor == nullptr) {
+ mDataSource.clear();
+ ASSERT_TRUE(false) << "Unable to create extractor for file: " << inputFile;
+ }
+
+ size_t numTracks = extractor->countTracks();
+ ASSERT_GT(numTracks, 0) << "No tracks in file: " << inputFile;
+ ASSERT_TRUE(esdsDataPresent(numTracks, extractor))
+ << "Unable to find esds in any track in file: " << inputFile;
+ }
+
+ bool esdsDataPresent(size_t numTracks, sp<IMediaExtractor> extractor) {
+ bool foundESDS = false;
+ uint32_t type;
+ for (size_t i = 0; i < numTracks; ++i) {
+ sp<MetaData> trackMeta = extractor->getTrackMetaData(i);
+ if (trackMeta != nullptr &&
+ trackMeta->findData(kKeyESDS, &type, &mESDSData, &mESDSSize)) {
+ trackMeta->clear();
+ foundESDS = true;
+ break;
+ }
+ }
+ return foundESDS;
+ }
+
+ FILE *mInputFp;
+ sp<DataSource> mDataSource;
+};
+
+TEST_P(ESDSUnitTest, InvalidDataTest) {
+ void *invalidData = calloc(mESDSSize, 1);
+ ASSERT_NE(invalidData, nullptr) << "Unable to allocate memory";
+ ESDS esds(invalidData, mESDSSize);
+ free(invalidData);
+ ASSERT_NE(esds.InitCheck(), OK) << "invalid ESDS data accepted";
+}
+
+TEST(ESDSSanityUnitTest, ConstructorSanityTest) {
+ void *invalidData = malloc(1);
+ ASSERT_NE(invalidData, nullptr) << "Unable to allocate memory";
+ ESDS esds_zero(invalidData, 0);
+ free(invalidData);
+ ASSERT_NE(esds_zero.InitCheck(), OK) << "invalid ESDS data accepted";
+
+ ESDS esds_null(NULL, 0);
+ ASSERT_NE(esds_null.InitCheck(), OK) << "invalid ESDS data accepted";
+}
+
+TEST_P(ESDSUnitTest, CreateAndDestroyTest) {
+ ESDS esds(mESDSData, mESDSSize);
+ ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+}
+
+TEST_P(ESDSUnitTest, ObjectTypeIndicationTest) {
+ ESDS esds(mESDSData, mESDSSize);
+ ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+ uint8_t objectTypeIndication;
+ status_t status = esds.getObjectTypeIndication(&objectTypeIndication);
+ ASSERT_EQ(status, OK) << "ESDS objectTypeIndication data invalid";
+ ASSERT_EQ(objectTypeIndication, mESDSParams.objectTypeIndication)
+ << "ESDS objectTypeIndication data doesn't match";
+}
+
+TEST_P(ESDSUnitTest, CodecSpecificInfoTest) {
+ ESDS esds(mESDSData, mESDSSize);
+ ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+ status_t status;
+ const void *codecSpecificInfo;
+ size_t codecSpecificInfoSize;
+ status = esds.getCodecSpecificInfo(&codecSpecificInfo, &codecSpecificInfoSize);
+ ASSERT_EQ(status, OK) << "ESDS getCodecSpecificInfo data invalid";
+ ASSERT_EQ(mESDSParams.codecSpecificInfoDataSize, codecSpecificInfoSize)
+ << "CodecSpecificInfo data doesn't match";
+ status = memcmp(codecSpecificInfo, mESDSParams.codecSpecificInfoData, codecSpecificInfoSize);
+ ASSERT_EQ(status, 0) << "CodecSpecificInfo data doesn't match";
+}
+
+TEST_P(ESDSUnitTest, GetBitrateTest) {
+ ESDS esds(mESDSData, mESDSSize);
+ ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+ uint32_t bitrateMax;
+ uint32_t bitrateAvg;
+ status_t status = esds.getBitRate(&bitrateMax, &bitrateAvg);
+ ASSERT_EQ(status, OK) << "ESDS bitRate data invalid";
+ ASSERT_EQ(bitrateMax, mESDSParams.bitrateMax) << "ESDS bitrateMax doesn't match";
+ ASSERT_EQ(bitrateAvg, mESDSParams.bitrateAvg) << "ESDS bitrateAvg doesn't match";
+ ASSERT_LE(bitrateAvg, bitrateMax) << "ESDS bitrateMax is less than bitrateAvg";
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ ESDSUnitTestAll, ESDSUnitTest,
+ ::testing::Values(
+ // InputFile, ObjectTypeIndication, CodecSpecificInfoData,
+ // CodecSpecificInfoDataSize, BitrateMax, BitrateAvg
+ make_tuple("video_176x144_3gp_h263_56kbps_12fps_aac_stereo_128kbps_22050hz.3gp", 64,
+ "\x13\x90", 2, 131072, 0),
+ make_tuple("video_1280x720_mp4_mpeg2_3000kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
+ 97,
+ "\x00\x00\x01\xB3\x50\x02\xD0\x35\xFF\xFF\xE1\xA0\x00\x00\x01\xB5\x15"
+ "\x6A\x00\x01\x00\x00",
+ 22, 3415452, 3415452),
+ make_tuple("video_176x144_3gp_h263_56kbps_25fps_aac_mono_24kbps_11025hz.3gp", 64,
+ "\x15\x08", 2, 24576, 0)));
+
+int main(int argc, char **argv) {
+ // MediaExtractor needs binder thread pool
+ ProcessState::self()->startThreadPool();
+ gEnv = new ESDSTestEnvironment();
+ ::testing::AddGlobalTestEnvironment(gEnv);
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = gEnv->initFromOptions(argc, argv);
+ if (status == 0) {
+ status = RUN_ALL_TESTS();
+ ALOGV("Test result = %d\n", status);
+ }
+ return status;
+}
diff --git a/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h b/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h
new file mode 100644
index 0000000..4ca2303
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __ESDS_TEST_ENVIRONMENT_H__
+#define __ESDS_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class ESDSTestEnvironment : public ::testing::Environment {
+ public:
+ ESDSTestEnvironment() : res("/data/local/tmp/") {}
+
+ // Parses the command line arguments
+ int initFromOptions(int argc, char **argv);
+
+ void setRes(const char *_res) { res = _res; }
+
+ const string getRes() const { return res; }
+
+ private:
+ string res;
+};
+
+int ESDSTestEnvironment::initFromOptions(int argc, char **argv) {
+ static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+ while (true) {
+ int index = 0;
+ int c = getopt_long(argc, argv, "P:", options, &index);
+ if (c == -1) {
+ break;
+ }
+
+ switch (c) {
+ case 'P':
+ setRes(optarg);
+ break;
+ default:
+ break;
+ }
+ }
+
+ if (optind < argc) {
+ fprintf(stderr,
+ "unrecognized option: %s\n\n"
+ "usage: %s <gtest options> <test options>\n\n"
+ "test options are:\n\n"
+ "-P, --path: Resource files directory location\n",
+ argv[optind ?: 1], argv[0]);
+ return 2;
+ }
+ return 0;
+}
+
+#endif // __ESDS_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/tests/ESDS/README.md b/media/libstagefright/tests/ESDS/README.md
new file mode 100644
index 0000000..100fb86
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/README.md
@@ -0,0 +1,40 @@
+## Media Testing ##
+---
+#### ESDS Unit Test :
+The ESDS Unit Test Suite validates the ESDS class available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m ESDSTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/ESDSTest/ESDSTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/ESDSTest/ESDSTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/ESDS/ESDSTestRes-1.0.zip)
+Download, unzip and push these files into device for testing.
+
+```
+adb push ESDSTestRes /data/local/tmp/
+```
+
+usage: ESDSTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/ESDSTest -P /data/local/tmp/ESDSTestRes/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest ESDSTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/tests/HEVC/Android.bp b/media/libstagefright/tests/HEVC/Android.bp
index 7a6b959..3762553 100644
--- a/media/libstagefright/tests/HEVC/Android.bp
+++ b/media/libstagefright/tests/HEVC/Android.bp
@@ -16,6 +16,7 @@
cc_test {
name: "HEVCUtilsUnitTest",
+ test_suites: ["device-tests"],
gtest: true,
srcs: [
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
new file mode 100644
index 0000000..49ff69a
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -0,0 +1,53 @@
+cc_defaults {
+ name: "libstagefright_fuzzer_defaults",
+ cflags: [
+ "-Wno-multichar",
+ "-Werror",
+ "-Wno-error=deprecated-declarations",
+ "-Wall",
+ ],
+ shared_libs: [
+ "libstagefright",
+ "libstagefright_codecbase",
+ "libutils",
+ "libstagefright_foundation",
+ "libmedia",
+ "libaudioclient",
+ "libmedia_omx",
+ "libgui",
+ "libbinder",
+ "libcutils",
+ ],
+}
+
+cc_fuzz {
+ name: "libstagefright_mediaclock_fuzzer",
+ srcs: [
+ "MediaClockFuzzer.cpp",
+ ],
+ defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+ name: "libstagefright_mediascanner_fuzzer",
+ srcs: [
+ "StagefrightMediaScannerFuzzer.cpp",
+ ],
+ defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+ name: "libstagefright_skipcutbuffer_fuzzer",
+ srcs: [
+ "SkipCutBufferFuzzer.cpp",
+ ],
+ defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+ name: "libstagefright_mediamuxer_fuzzer",
+ srcs: [
+ "MediaMuxerFuzzer.cpp",
+ ],
+ defaults: ["libstagefright_fuzzer_defaults"],
+}
diff --git a/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp
new file mode 100644
index 0000000..e473541
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+// dylan.katz@leviathansecurity.com
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ sp<MediaClock> mClock(new MediaClock);
+
+ bool registered = false;
+ while (fdp.remaining_bytes() > 0) {
+ switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 5)) {
+ case 0: {
+ if (registered == false) {
+ mClock->init();
+ registered = true;
+ }
+ break;
+ }
+ case 1: {
+ int64_t startingTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+ mClock->setStartingTimeMedia(startingTimeMediaUs);
+ break;
+ }
+ case 2: {
+ mClock->clearAnchor();
+ break;
+ }
+ case 3: {
+ int64_t anchorTimeRealUs = fdp.ConsumeIntegral<int64_t>();
+ int64_t anchorTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+ int64_t maxTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+ mClock->updateAnchor(anchorTimeMediaUs, anchorTimeRealUs, maxTimeMediaUs);
+ break;
+ }
+ case 4: {
+ int64_t maxTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+ mClock->updateMaxTimeMedia(maxTimeMediaUs);
+ break;
+ }
+ case 5: {
+ wp<AMessage> msg(new AMessage);
+ mClock->setNotificationMessage(msg.promote());
+ }
+ }
+ }
+
+ return 0;
+}
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
new file mode 100644
index 0000000..5df3267
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+// dylan.katz@leviathansecurity.com
+
+#include <MediaMuxerFuzzer.h>
+#include <cutils/ashmem.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaMuxer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+// Can't seem to get setBuffer or setString working. It always segfaults on a
+// null pointer read or memleaks. So that functionality is missing.
+void createMessage(AMessage *msg, FuzzedDataProvider *fdp) {
+ size_t count = fdp->ConsumeIntegralInRange<size_t>(0, 32);
+ while (fdp->remaining_bytes() > 0 && count > 0) {
+ uint8_t function_id =
+ fdp->ConsumeIntegralInRange<uint8_t>(0, amessage_setvals.size() - 1);
+ amessage_setvals[function_id](msg, fdp);
+ count--;
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+ size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
+ int fd = ashmem_create_region("mediamuxer_fuzz_region", data_size);
+ if (fd < 0)
+ return 0;
+
+ uint8_t *sh_data = static_cast<uint8_t *>(
+ mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
+ if (sh_data == MAP_FAILED)
+ return 0;
+
+ MediaMuxer::OutputFormat format =
+ (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
+ sp<MediaMuxer> mMuxer(new MediaMuxer(fd, format));
+
+ while (fdp.remaining_bytes() > 1) {
+ switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
+ case 0: {
+ // For some reason it only likes mp4s here...
+ if (format == 1 || format == 4)
+ break;
+
+ sp<AMessage> a_format(new AMessage);
+ createMessage(a_format.get(), &fdp);
+ mMuxer->addTrack(a_format);
+ break;
+ }
+ case 1: {
+ mMuxer->start();
+ break;
+ }
+ case 2: {
+ int degrees = fdp.ConsumeIntegral<int>();
+ mMuxer->setOrientationHint(degrees);
+ break;
+ }
+ case 3: {
+ int latitude = fdp.ConsumeIntegral<int>();
+ int longitude = fdp.ConsumeIntegral<int>();
+ mMuxer->setLocation(latitude, longitude);
+ break;
+ }
+ case 4: {
+ size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, data_size);
+ sp<ABuffer> a_buffer(new ABuffer(buf_size));
+
+ size_t trackIndex = fdp.ConsumeIntegral<size_t>();
+ int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
+ uint32_t flags = fdp.ConsumeIntegral<uint32_t>();
+ mMuxer->writeSampleData(a_buffer, trackIndex, timeUs, flags);
+ }
+ }
+ }
+
+ if (fdp.ConsumeBool())
+ mMuxer->stop();
+
+ munmap(sh_data, data_size);
+ close(fd);
+ return 0;
+}
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
new file mode 100644
index 0000000..7d4421d
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+// dylan.katz@leviathansecurity.com
+
+#pragma once
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+// Mappings vectors are the list of attributes that the MediaMuxer
+// class looks for in the message.
+static std::vector<const char *> floatMappings{
+ "capture-rate",
+ "time-lapse-fps",
+ "frame-rate",
+};
+
+static std::vector<const char *> int64Mappings{
+ "exif-offset", "exif-size", "target-time",
+ "thumbnail-time", "timeUs", "durationUs",
+};
+
+static std::vector<const char *> int32Mappings{"loop",
+ "time-scale",
+ "crypto-mode",
+ "crypto-default-iv-size",
+ "crypto-encrypted-byte-block",
+ "crypto-skip-byte-block",
+ "frame-count",
+ "max-bitrate",
+ "pcm-big-endian",
+ "temporal-layer-count",
+ "temporal-layer-id",
+ "thumbnail-width",
+ "thumbnail-height",
+ "track-id",
+ "valid-samples",
+ "color-format",
+ "ca-system-id",
+ "is-sync-frame",
+ "bitrate",
+ "max-bitrate",
+ "width",
+ "height",
+ "sar-width",
+ "sar-height",
+ "display-width",
+ "display-height",
+ "is-default",
+ "tile-width",
+ "tile-height",
+ "grid-rows",
+ "grid-cols",
+ "rotation-degrees",
+ "channel-count",
+ "sample-rate",
+ "bits-per-sample",
+ "channel-mask",
+ "encoder-delay",
+ "encoder-padding",
+ "is-adts",
+ "frame-rate",
+ "max-height",
+ "max-width",
+ "max-input-size",
+ "haptic-channel-count",
+ "pcm-encoding",
+ "aac-profile"};
+
+static const std::vector<std::function<void(AMessage *, FuzzedDataProvider *)>>
+ amessage_setvals = {
+ [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+ msg->setRect("crop", fdp->ConsumeIntegral<int32_t>(),
+ fdp->ConsumeIntegral<int32_t>(),
+ fdp->ConsumeIntegral<int32_t>(),
+ fdp->ConsumeIntegral<int32_t>());
+ },
+ [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+ msg->setFloat(floatMappings[fdp->ConsumeIntegralInRange<size_t>(
+ 0, floatMappings.size() - 1)],
+ fdp->ConsumeFloatingPoint<float>());
+ },
+ [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+ msg->setInt64(int64Mappings[fdp->ConsumeIntegralInRange<size_t>(
+ 0, int64Mappings.size() - 1)],
+ fdp->ConsumeIntegral<int64_t>());
+ },
+ [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+ msg->setInt32(int32Mappings[fdp->ConsumeIntegralInRange<size_t>(
+ 0, int32Mappings.size() - 1)],
+ fdp->ConsumeIntegral<int32_t>());
+ }};
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/SkipCutBufferFuzzer.cpp b/media/libstagefright/tests/fuzzers/SkipCutBufferFuzzer.cpp
new file mode 100644
index 0000000..1f78e6d
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/SkipCutBufferFuzzer.cpp
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+// dylan.katz@leviathansecurity.com
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/SkipCutBuffer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ size_t skip = fdp.ConsumeIntegral<size_t>();
+ size_t cut = fdp.ConsumeIntegral<size_t>();
+ size_t num16Channels = fdp.ConsumeIntegral<size_t>();
+ sp<SkipCutBuffer> sBuffer(new SkipCutBuffer(skip, cut, num16Channels));
+
+ while (fdp.remaining_bytes() > 0) {
+ // Cap size to 1024 to limit max amount allocated.
+ size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, 1024);
+ size_t range = fdp.ConsumeIntegralInRange<size_t>(0, buf_size);
+ size_t length = fdp.ConsumeIntegralInRange<size_t>(0, buf_size - range);
+
+ switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
+ case 0: {
+ sp<ABuffer> a_buffer(new ABuffer(buf_size));
+ sp<AMessage> format(new AMessage);
+ sp<MediaCodecBuffer> s_buffer(new MediaCodecBuffer(format, a_buffer));
+ s_buffer->setRange(range, length);
+ sBuffer->submit(s_buffer);
+ break;
+ }
+ case 1: {
+ std::unique_ptr<MediaBufferBase> m_buffer(new MediaBuffer(buf_size));
+ m_buffer->set_range(range, length);
+ sBuffer->submit(reinterpret_cast<MediaBuffer *>(m_buffer.get()));
+ break;
+ }
+ case 2: {
+ sp<ABuffer> a_buffer(new ABuffer(buf_size));
+ sp<AMessage> format(new AMessage);
+ sp<MediaCodecBuffer> s_buffer(new MediaCodecBuffer(format, a_buffer));
+ a_buffer->setRange(range, length);
+ sBuffer->submit(a_buffer);
+ break;
+ }
+ case 3: {
+ sBuffer->clear();
+ break;
+ }
+ case 4: {
+ sBuffer->size();
+ }
+ }
+ }
+ return 0;
+}
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp b/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp
new file mode 100644
index 0000000..a072b7c
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+// dylan.katz@leviathansecurity.com
+
+#include <cutils/ashmem.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/StagefrightMediaScanner.h>
+
+#include <cstdio>
+
+namespace android {
+class FuzzMediaScannerClient : public MediaScannerClient {
+public:
+ virtual status_t scanFile(const char *, long long, long long, bool, bool) {
+ return 0;
+ }
+
+ virtual status_t handleStringTag(const char *, const char *) { return 0; }
+
+ virtual status_t setMimeType(const char *) { return 0; }
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ StagefrightMediaScanner mScanner = StagefrightMediaScanner();
+ // Without this, the fuzzer crashes for some reason.
+ mScanner.setLocale("");
+
+ size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
+ int fd =
+ ashmem_create_region("stagefrightmediascanner_fuzz_region", data_size);
+ if (fd < 0)
+ return 0;
+
+ uint8_t *sh_data = static_cast<uint8_t *>(
+ mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
+ if (sh_data == MAP_FAILED)
+ return 0;
+
+ while (fdp.remaining_bytes() > 8) {
+ switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 1)) {
+ case 0: {
+ std::string path = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
+ std::string mimeType =
+ fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
+ std::shared_ptr<MediaScannerClient> client(new FuzzMediaScannerClient());
+ mScanner.processFile(path.c_str(), mimeType.c_str(), *client);
+ break;
+ }
+ case 1: {
+ size_t to_copy = fdp.ConsumeIntegralInRange<size_t>(1, data_size);
+ std::vector<uint8_t> rand_buf = fdp.ConsumeBytes<uint8_t>(to_copy);
+
+ // If fdp doesn't have enough bytes left it will just make a shorter
+ // vector.
+ to_copy = std::min(rand_buf.size(), data_size);
+
+ std::copy(sh_data, sh_data + to_copy, rand_buf.begin());
+ mScanner.extractAlbumArt(fd);
+ }
+ }
+ }
+
+ munmap(sh_data, data_size);
+ close(fd);
+ return 0;
+}
+} // namespace android
diff --git a/media/libstagefright/tests/mediacodec/Android.bp b/media/libstagefright/tests/mediacodec/Android.bp
new file mode 100644
index 0000000..0bd0639
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/Android.bp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+ name: "mediacodecTest",
+ gtest: true,
+
+ srcs: [
+ "MediaCodecTest.cpp",
+ "MediaTestHelper.cpp",
+ ],
+
+ header_libs: [
+ "libmediadrm_headers",
+ ],
+
+ shared_libs: [
+ "libgui",
+ "libmedia",
+ "libmedia_codeclist",
+ "libmediametrics",
+ "libmediandk",
+ "libstagefright",
+ "libstagefright_codecbase",
+ "libstagefright_foundation",
+ "libutils",
+ ],
+
+ static_libs: [
+ "libgmock",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+
+ test_suites: [
+ "general-tests",
+ ],
+}
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
new file mode 100644
index 0000000..d00a50f
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -0,0 +1,350 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <future>
+#include <thread>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <gui/Surface.h>
+#include <mediadrm/ICrypto.h>
+#include <media/stagefright/CodecBase.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecListWriter.h>
+#include <media/MediaCodecInfo.h>
+
+#include "MediaTestHelper.h"
+
+namespace android {
+
+class MockBufferChannel : public BufferChannelBase {
+public:
+ ~MockBufferChannel() override = default;
+
+ MOCK_METHOD(void, setCrypto, (const sp<ICrypto> &crypto), (override));
+ MOCK_METHOD(void, setDescrambler, (const sp<IDescrambler> &descrambler), (override));
+ MOCK_METHOD(status_t, queueInputBuffer, (const sp<MediaCodecBuffer> &buffer), (override));
+ MOCK_METHOD(status_t, queueSecureInputBuffer,
+ (const sp<MediaCodecBuffer> &buffer,
+ bool secure,
+ const uint8_t *key,
+ const uint8_t *iv,
+ CryptoPlugin::Mode mode,
+ CryptoPlugin::Pattern pattern,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ AString *errorDetailMsg),
+ (override));
+ MOCK_METHOD(status_t, attachBuffer,
+ (const std::shared_ptr<C2Buffer> &c2Buffer, const sp<MediaCodecBuffer> &buffer),
+ (override));
+ MOCK_METHOD(status_t, attachEncryptedBuffer,
+ (const sp<hardware::HidlMemory> &memory,
+ bool secure,
+ const uint8_t *key,
+ const uint8_t *iv,
+ CryptoPlugin::Mode mode,
+ CryptoPlugin::Pattern pattern,
+ size_t offset,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const sp<MediaCodecBuffer> &buffer),
+ (override));
+ MOCK_METHOD(status_t, renderOutputBuffer,
+ (const sp<MediaCodecBuffer> &buffer, int64_t timestampNs),
+ (override));
+ MOCK_METHOD(status_t, discardBuffer, (const sp<MediaCodecBuffer> &buffer), (override));
+ MOCK_METHOD(void, getInputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
+ MOCK_METHOD(void, getOutputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
+};
+
+class MockCodec : public CodecBase {
+public:
+ MockCodec(std::function<void(const std::shared_ptr<MockBufferChannel> &)> mock) {
+ mMockBufferChannel = std::make_shared<MockBufferChannel>();
+ mock(mMockBufferChannel);
+ }
+ ~MockCodec() override = default;
+
+ MOCK_METHOD(void, initiateAllocateComponent, (const sp<AMessage> &msg), (override));
+ MOCK_METHOD(void, initiateConfigureComponent, (const sp<AMessage> &msg), (override));
+ MOCK_METHOD(void, initiateCreateInputSurface, (), (override));
+ MOCK_METHOD(void, initiateSetInputSurface, (const sp<PersistentSurface> &surface), (override));
+ MOCK_METHOD(void, initiateStart, (), (override));
+ MOCK_METHOD(void, initiateShutdown, (bool keepComponentAllocated), (override));
+ MOCK_METHOD(void, onMessageReceived, (const sp<AMessage> &msg), (override));
+ MOCK_METHOD(status_t, setSurface, (const sp<Surface> &surface), (override));
+ MOCK_METHOD(void, signalFlush, (), (override));
+ MOCK_METHOD(void, signalResume, (), (override));
+ MOCK_METHOD(void, signalRequestIDRFrame, (), (override));
+ MOCK_METHOD(void, signalSetParameters, (const sp<AMessage> &msg), (override));
+ MOCK_METHOD(void, signalEndOfInputStream, (), (override));
+
+ std::shared_ptr<BufferChannelBase> getBufferChannel() override {
+ return mMockBufferChannel;
+ }
+
+ const std::unique_ptr<CodecCallback> &callback() {
+ return mCallback;
+ }
+
+ std::shared_ptr<MockBufferChannel> mMockBufferChannel;
+};
+
+class Counter {
+public:
+ Counter() = default;
+ explicit Counter(int32_t initCount) : mCount(initCount) {}
+ ~Counter() = default;
+
+ int32_t advance() {
+ std::unique_lock<std::mutex> lock(mMutex);
+ ++mCount;
+ mCondition.notify_all();
+ return mCount;
+ }
+
+ template <typename Rep, typename Period, typename ...Args>
+ int32_t waitFor(const std::chrono::duration<Rep, Period> &duration, Args... values) {
+ std::initializer_list<int32_t> list = {values...};
+ std::unique_lock<std::mutex> lock(mMutex);
+ mCondition.wait_for(
+ lock,
+ duration,
+ [&list, this]{
+ return std::find(list.begin(), list.end(), mCount) != list.end();
+ });
+ return mCount;
+ }
+
+ template <typename ...Args>
+ int32_t wait(Args... values) {
+ std::initializer_list<int32_t> list = {values...};
+ std::unique_lock<std::mutex> lock(mMutex);
+ mCondition.wait(
+ lock,
+ [&list, this]{
+ return std::find(list.begin(), list.end(), mCount) != list.end();
+ });
+ return mCount;
+ }
+
+private:
+ std::mutex mMutex;
+ std::condition_variable mCondition;
+ int32_t mCount = 0;
+};
+
+} // namespace android
+
+using namespace android;
+using ::testing::_;
+
+static sp<MediaCodec> SetupMediaCodec(
+ const AString &owner,
+ const AString &codecName,
+ const AString &mediaType,
+ const sp<ALooper> &looper,
+ std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase) {
+ std::shared_ptr<MediaCodecListWriter> listWriter =
+ MediaTestHelper::CreateCodecListWriter();
+ std::unique_ptr<MediaCodecInfoWriter> infoWriter = listWriter->addMediaCodecInfo();
+ infoWriter->setName(codecName.c_str());
+ infoWriter->setOwner(owner.c_str());
+ infoWriter->addMediaType(mediaType.c_str());
+ std::vector<sp<MediaCodecInfo>> codecInfos;
+ MediaTestHelper::WriteCodecInfos(listWriter, &codecInfos);
+ std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo =
+ [codecInfos](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
+ auto it = std::find_if(
+ codecInfos.begin(), codecInfos.end(),
+ [&name](const sp<MediaCodecInfo> &info) {
+ return name.equalsIgnoreCase(info->getCodecName());
+ });
+
+ *info = (it == codecInfos.end()) ? nullptr : *it;
+ return (*info) ? OK : NAME_NOT_FOUND;
+ };
+
+ looper->start();
+ return MediaTestHelper::CreateCodec(
+ codecName, looper, getCodecBase, getCodecInfo);
+}
+
+TEST(MediaCodecTest, ReclaimReleaseRace) {
+ // Test scenario:
+ //
+ // 1) ResourceManager thread calls reclaim(), message posted to
+ // MediaCodec looper thread.
+ // 2) MediaCodec looper thread calls initiateShutdown(), shutdown being
+ // handled at the component thread.
+ // 3) Client thread calls release(), message posted to & handle at
+ // MediaCodec looper thread.
+ // 4) MediaCodec looper thread may call initiateShutdown().
+ // 5) initiateShutdown() from 2) is handled at onReleaseComplete() event
+ // posted to MediaCodec looper thread.
+ // 6) If called, initiateShutdown() from 4) is handled and
+ // onReleaseComplete() event posted to MediaCodec looper thread.
+
+ static const AString kCodecName{"test.codec"};
+ static const AString kCodecOwner{"nobody"};
+ static const AString kMediaType{"video/x-test"};
+
+ enum {
+ kInit,
+ kShutdownFromReclaimReceived,
+ kReleaseCalled,
+ };
+ Counter counter{kInit};
+ sp<MockCodec> mockCodec;
+ std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+ [&mockCodec, &counter](const AString &, const char *) {
+ mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+ // No mock setup, as we don't expect any buffer operations
+ // in this scenario.
+ });
+ ON_CALL(*mockCodec, initiateAllocateComponent(_))
+ .WillByDefault([mockCodec](const sp<AMessage> &) {
+ mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+ });
+ ON_CALL(*mockCodec, initiateShutdown(_))
+ .WillByDefault([mockCodec, &counter](bool) {
+ int32_t stage = counter.wait(kInit, kReleaseCalled);
+ if (stage == kInit) {
+ // Mark that 2) happened, so test can proceed to 3)
+ counter.advance();
+ } else if (stage == kReleaseCalled) {
+ // Handle 6)
+ mockCodec->callback()->onReleaseCompleted();
+ }
+ });
+ return mockCodec;
+ };
+
+ sp<ALooper> looper{new ALooper};
+ sp<MediaCodec> codec = SetupMediaCodec(
+ kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+ ASSERT_NE(nullptr, codec) << "Codec must not be null";
+ ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+ std::promise<void> reclaimCompleted;
+ std::promise<void> releaseCompleted;
+ Counter threadExitCounter;
+ std::thread([codec, &reclaimCompleted]{
+ // Simulate ResourceManager thread. Proceed with 1)
+ MediaTestHelper::Reclaim(codec, true /* force */);
+ reclaimCompleted.set_value();
+ }).detach();
+ std::thread([codec, &counter, &releaseCompleted]{
+ // Simulate client thread. Wait until 2) is complete
+ (void)counter.wait(kShutdownFromReclaimReceived);
+ // Proceed to 3), and mark that 5) is ready to happen.
+ // NOTE: it's difficult to pinpoint when 4) happens, so we will sleep
+ // to meet the timing.
+ counter.advance();
+ codec->release();
+ releaseCompleted.set_value();
+ }).detach();
+ std::thread([mockCodec, &counter]{
+ // Simulate component thread. Wait until 3) is complete
+ (void)counter.wait(kReleaseCalled);
+ // We want 4) to complete before moving forward, but it is hard to
+ // wait for this exact event. Just sleep so that the other thread can
+ // proceed and complete 4).
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
+ // Proceed to 5).
+ mockCodec->callback()->onReleaseCompleted();
+ }).detach();
+ EXPECT_EQ(
+ std::future_status::ready,
+ reclaimCompleted.get_future().wait_for(std::chrono::seconds(5)))
+ << "reclaim timed out";
+ EXPECT_EQ(
+ std::future_status::ready,
+ releaseCompleted.get_future().wait_for(std::chrono::seconds(5)))
+ << "release timed out";
+ looper->stop();
+}
+
+TEST(MediaCodecTest, ErrorWhileStopping) {
+ // Test scenario:
+ //
+ // 1) Client thread calls stop(); MediaCodec looper thread calls
+ // initiateShutdown(); shutdown is being handled at the component thread.
+ // 2) Error occurred, but the shutdown operation is still being done.
+ // 3) MediaCodec looper thread handles the error.
+ // 4) Component thread completes shutdown and posts onStopCompleted()
+
+ static const AString kCodecName{"test.codec"};
+ static const AString kCodecOwner{"nobody"};
+ static const AString kMediaType{"video/x-test"};
+
+ std::promise<void> errorOccurred;
+ sp<MockCodec> mockCodec;
+ std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+ [&mockCodec, &errorOccurred](const AString &, const char *) {
+ mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+ // No mock setup, as we don't expect any buffer operations
+ // in this scenario.
+ });
+ ON_CALL(*mockCodec, initiateAllocateComponent(_))
+ .WillByDefault([mockCodec](const sp<AMessage> &) {
+ mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+ });
+ ON_CALL(*mockCodec, initiateConfigureComponent(_))
+ .WillByDefault([mockCodec](const sp<AMessage> &msg) {
+ mockCodec->callback()->onComponentConfigured(
+ msg->dup(), msg->dup());
+ });
+ ON_CALL(*mockCodec, initiateStart())
+ .WillByDefault([mockCodec]() {
+ mockCodec->callback()->onStartCompleted();
+ });
+ ON_CALL(*mockCodec, initiateShutdown(true))
+ .WillByDefault([mockCodec, &errorOccurred](bool) {
+ mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ // Mark that 1) and 2) are complete.
+ errorOccurred.set_value();
+ });
+ ON_CALL(*mockCodec, initiateShutdown(false))
+ .WillByDefault([mockCodec](bool) {
+ mockCodec->callback()->onReleaseCompleted();
+ });
+ return mockCodec;
+ };
+
+ sp<ALooper> looper{new ALooper};
+ sp<MediaCodec> codec = SetupMediaCodec(
+ kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+ ASSERT_NE(nullptr, codec) << "Codec must not be null";
+ ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+
+ std::thread([mockCodec, &errorOccurred]{
+ // Simulate component thread that handles stop()
+ errorOccurred.get_future().wait();
+ // Error occurred but shutdown request still got processed.
+ mockCodec->callback()->onStopCompleted();
+ }).detach();
+
+ codec->configure(new AMessage, nullptr, nullptr, 0);
+ codec->start();
+ codec->stop();
+ // Sleep here to give time for the MediaCodec looper thread
+ // to process the messages.
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
+ codec->release();
+ looper->stop();
+}
diff --git a/media/libstagefright/tests/mediacodec/MediaTestHelper.cpp b/media/libstagefright/tests/mediacodec/MediaTestHelper.cpp
new file mode 100644
index 0000000..bbe3c05
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/MediaTestHelper.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecListWriter.h>
+
+#include "MediaTestHelper.h"
+
+namespace android {
+
+// static
+sp<MediaCodec> MediaTestHelper::CreateCodec(
+ const AString &name,
+ const sp<ALooper> &looper,
+ std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
+ std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo) {
+ sp<MediaCodec> codec = new MediaCodec(
+ looper, MediaCodec::kNoPid, MediaCodec::kNoUid, getCodecBase, getCodecInfo);
+ if (codec->init(name) != OK) {
+ return nullptr;
+ }
+ return codec;
+}
+
+// static
+void MediaTestHelper::Reclaim(const sp<MediaCodec> &codec, bool force) {
+ codec->reclaim(force);
+}
+
+// static
+std::shared_ptr<MediaCodecListWriter> MediaTestHelper::CreateCodecListWriter() {
+ return std::shared_ptr<MediaCodecListWriter>(new MediaCodecListWriter);
+}
+
+// static
+void MediaTestHelper::WriteCodecInfos(
+ const std::shared_ptr<MediaCodecListWriter> &writer,
+ std::vector<sp<MediaCodecInfo>> *codecInfos) {
+ writer->writeCodecInfos(codecInfos);
+}
+
+} // namespace android
diff --git a/media/libstagefright/tests/mediacodec/MediaTestHelper.h b/media/libstagefright/tests/mediacodec/MediaTestHelper.h
new file mode 100644
index 0000000..f3d6110
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/MediaTestHelper.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_TEST_HELPER_H_
+
+#define MEDIA_TEST_HELPER_H_
+
+#include <media/stagefright/foundation/AString.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct ALooper;
+struct CodecBase;
+struct MediaCodec;
+struct MediaCodecInfo;
+struct MediaCodecListWriter;
+
+class MediaTestHelper {
+public:
+ // MediaCodec
+ static sp<MediaCodec> CreateCodec(
+ const AString &name,
+ const sp<ALooper> &looper,
+ std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
+ std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo);
+ static void Reclaim(const sp<MediaCodec> &codec, bool force);
+
+ // MediaCodecListWriter
+ static std::shared_ptr<MediaCodecListWriter> CreateCodecListWriter();
+ static void WriteCodecInfos(
+ const std::shared_ptr<MediaCodecListWriter> &writer,
+ std::vector<sp<MediaCodecInfo>> *codecInfos);
+};
+
+} // namespace android
+
+#endif // MEDIA_TEST_HELPER_H_
diff --git a/media/libstagefright/tests/metadatautils/Android.bp b/media/libstagefright/tests/metadatautils/Android.bp
new file mode 100644
index 0000000..69830fc
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/Android.bp
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_test {
+ name: "MetaDataUtilsTest",
+ gtest: true,
+
+ srcs: [
+ "MetaDataUtilsTest.cpp",
+ ],
+
+ static_libs: [
+ "libstagefright_metadatautils",
+ "libstagefright_esds",
+ ],
+
+ shared_libs: [
+ "liblog",
+ "libutils",
+ "libmediandk",
+ "libstagefright",
+ "libstagefright_foundation",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/libstagefright/tests/metadatautils/AndroidTest.xml b/media/libstagefright/tests/metadatautils/AndroidTest.xml
new file mode 100644
index 0000000..d6497f3
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/AndroidTest.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Test module config for MetaDataUtils unit test">
+ <option name="test-suite-tag" value="MetaDataUtilsTest" />
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="false" />
+ <option name="push" value="MetaDataUtilsTest->/data/local/tmp/MetaDataUtilsTest" />
+ <option name="push-file"
+ key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/metadatautils/MetaDataUtilsTestRes-1.0.zip?unzip=true"
+ value="/data/local/tmp/MetaDataUtilsTestRes/" />
+ </target_preparer>
+ <test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="module-name" value="MetaDataUtilsTest" />
+ <option name="native-test-flag" value="-P /data/local/tmp/MetaDataUtilsTestRes/" />
+ </test>
+</configuration>
diff --git a/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp b/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp
new file mode 100644
index 0000000..9fd5fdb
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp
@@ -0,0 +1,490 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MetaDataUtilsTest"
+#include <utils/Log.h>
+
+#include <fstream>
+#include <string>
+
+#include <ESDS.h>
+#include <media/NdkMediaFormat.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaDataBase.h>
+#include <media/stagefright/MetaDataUtils.h>
+#include <media/stagefright/foundation/ABitReader.h>
+
+#include "MetaDataUtilsTestEnvironment.h"
+
+constexpr uint8_t kAdtsCsdSize = 7;
+// from AAC specs: https://www.iso.org/standard/43345.html
+constexpr int32_t kSamplingFreq[] = {96000, 88200, 64000, 48000, 44100, 32000,
+ 24000, 22050, 16000, 12000, 11025, 8000};
+constexpr uint8_t kMaxSamplingFreqIndex = sizeof(kSamplingFreq) / sizeof(kSamplingFreq[0]);
+
+static MetaDataUtilsTestEnvironment *gEnv = nullptr;
+
+using namespace android;
+
+class MetaDataValidate {
+ public:
+ MetaDataValidate() : mInputBuffer(nullptr) {}
+
+ ~MetaDataValidate() {
+ if (mInputBuffer) {
+ delete[] mInputBuffer;
+ mInputBuffer = nullptr;
+ }
+ }
+
+ void SetUpMetaDataValidate(string fileName) {
+ struct stat buf;
+ int8_t err = stat(fileName.c_str(), &buf);
+ ASSERT_EQ(err, 0) << "Failed to get file information for file: " << fileName;
+
+ mInputBufferSize = buf.st_size;
+ FILE *inputFilePtr = fopen(fileName.c_str(), "rb+");
+ ASSERT_NE(inputFilePtr, nullptr) << "Failed to open file: " << fileName;
+
+ mInputBuffer = new uint8_t[mInputBufferSize];
+ ASSERT_NE(mInputBuffer, nullptr)
+ << "Failed to allocate memory of size: " << mInputBufferSize;
+
+ int32_t numBytes =
+ fread((char *)mInputBuffer, sizeof(uint8_t), mInputBufferSize, inputFilePtr);
+ ASSERT_EQ(numBytes, mInputBufferSize) << numBytes << " of " << mInputBufferSize << " read";
+
+ fclose(inputFilePtr);
+ }
+
+ size_t mInputBufferSize;
+ const uint8_t *mInputBuffer;
+};
+
+class AvcCSDTest : public ::testing::TestWithParam<
+ tuple<string /*inputFile*/, size_t /*avcWidth*/, size_t /*avcHeight*/>> {
+ public:
+ AvcCSDTest() : mInputBuffer(nullptr) {}
+
+ ~AvcCSDTest() {
+ if (mInputBuffer) {
+ delete[] mInputBuffer;
+ mInputBuffer = nullptr;
+ }
+ }
+ virtual void SetUp() override {
+ tuple<string, size_t, size_t> params = GetParam();
+ string inputFile = gEnv->getRes() + get<0>(params);
+ mFrameWidth = get<1>(params);
+ mFrameHeight = get<2>(params);
+
+ struct stat buf;
+ int8_t err = stat(inputFile.c_str(), &buf);
+ ASSERT_EQ(err, 0) << "Failed to get information for file: " << inputFile;
+
+ mInputBufferSize = buf.st_size;
+ FILE *inputFilePtr = fopen(inputFile.c_str(), "rb+");
+ ASSERT_NE(inputFilePtr, nullptr) << "Failed to open file: " << inputFile;
+
+ mInputBuffer = new uint8_t[mInputBufferSize];
+ ASSERT_NE(mInputBuffer, nullptr)
+ << "Failed to create a buffer of size: " << mInputBufferSize;
+
+ int32_t numBytes =
+ fread((char *)mInputBuffer, sizeof(uint8_t), mInputBufferSize, inputFilePtr);
+ ASSERT_EQ(numBytes, mInputBufferSize) << numBytes << " of " << mInputBufferSize << " read";
+
+ fclose(inputFilePtr);
+ }
+
+ size_t mFrameWidth;
+ size_t mFrameHeight;
+ size_t mInputBufferSize;
+ const uint8_t *mInputBuffer;
+};
+
+class AvcCSDValidateTest : public MetaDataValidate,
+ public ::testing::TestWithParam<string /*inputFile*/> {
+ public:
+ virtual void SetUp() override {
+ string inputFile = gEnv->getRes() + GetParam();
+
+ ASSERT_NO_FATAL_FAILURE(SetUpMetaDataValidate(inputFile));
+ }
+};
+
+class AacCSDTest
+ : public ::testing::TestWithParam<tuple<uint32_t /*profile*/, uint32_t /*samplingFreqIndex*/,
+ uint32_t /*channelConfig*/>> {
+ public:
+ virtual void SetUp() override {
+ tuple<uint32_t, uint32_t, uint32_t> params = GetParam();
+ mAacProfile = get<0>(params);
+ mAacSamplingFreqIndex = get<1>(params);
+ mAacChannelConfig = get<2>(params);
+ }
+
+ uint32_t mAacProfile;
+ uint32_t mAacSamplingFreqIndex;
+ uint32_t mAacChannelConfig;
+};
+
+class AacADTSTest
+ : public ::testing::TestWithParam<
+ tuple<string /*adtsFile*/, uint32_t /*channelCount*/, uint32_t /*sampleRate*/>> {
+ public:
+ AacADTSTest() : mInputBuffer(nullptr) {}
+
+ virtual void SetUp() override {
+ tuple<string, uint32_t, uint32_t> params = GetParam();
+ string fileName = gEnv->getRes() + get<0>(params);
+ mAacChannelCount = get<1>(params);
+ mAacSampleRate = get<2>(params);
+
+ FILE *filePtr = fopen(fileName.c_str(), "r");
+ ASSERT_NE(filePtr, nullptr) << "Failed to open file: " << fileName;
+
+ mInputBuffer = new uint8_t[kAdtsCsdSize];
+ ASSERT_NE(mInputBuffer, nullptr) << "Failed to allocate a memory of size: " << kAdtsCsdSize;
+
+ int32_t numBytes = fread((void *)mInputBuffer, sizeof(uint8_t), kAdtsCsdSize, filePtr);
+ ASSERT_EQ(numBytes, kAdtsCsdSize)
+ << "Failed to read complete file, bytes read: " << numBytes;
+
+ fclose(filePtr);
+ }
+ int32_t mAacChannelCount;
+ int32_t mAacSampleRate;
+ const uint8_t *mInputBuffer;
+};
+
+class AacCSDValidateTest : public MetaDataValidate,
+ public ::testing::TestWithParam<string /*inputFile*/> {
+ public:
+ virtual void SetUp() override {
+ string inputFile = gEnv->getRes() + GetParam();
+
+ ASSERT_NO_FATAL_FAILURE(SetUpMetaDataValidate(inputFile));
+ }
+};
+
+class VorbisTest : public ::testing::TestWithParam<pair<string /*fileName*/, string /*infoFile*/>> {
+ public:
+ virtual void SetUp() override {
+ pair<string, string> params = GetParam();
+ string inputMediaFile = gEnv->getRes() + params.first;
+ mInputFileStream.open(inputMediaFile, ifstream::in);
+ ASSERT_TRUE(mInputFileStream.is_open()) << "Failed to open data file: " << inputMediaFile;
+
+ string inputInfoFile = gEnv->getRes() + params.second;
+ mInfoFileStream.open(inputInfoFile, ifstream::in);
+ ASSERT_TRUE(mInputFileStream.is_open()) << "Failed to open data file: " << inputInfoFile;
+ ASSERT_FALSE(inputInfoFile.empty()) << "Empty info file: " << inputInfoFile;
+ }
+
+ ~VorbisTest() {
+ if (mInputFileStream.is_open()) mInputFileStream.close();
+ if (mInfoFileStream.is_open()) mInfoFileStream.close();
+ }
+
+ ifstream mInputFileStream;
+ ifstream mInfoFileStream;
+};
+
+TEST_P(AvcCSDTest, AvcCSDValidationTest) {
+ AMediaFormat *csdData = AMediaFormat_new();
+ ASSERT_NE(csdData, nullptr) << "Failed to create AMedia format";
+
+ bool status = MakeAVCCodecSpecificData(csdData, mInputBuffer, mInputBufferSize);
+ ASSERT_TRUE(status) << "Failed to make AVC CSD from AMediaFormat";
+
+ int32_t avcWidth = -1;
+ status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_WIDTH, &avcWidth);
+ ASSERT_TRUE(status) << "Failed to get avc width";
+ ASSERT_EQ(avcWidth, mFrameWidth);
+
+ int32_t avcHeight = -1;
+ status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_HEIGHT, &avcHeight);
+ ASSERT_TRUE(status) << "Failed to get avc height";
+ ASSERT_EQ(avcHeight, mFrameHeight);
+
+ const char *mimeType = "";
+ status = AMediaFormat_getString(csdData, AMEDIAFORMAT_KEY_MIME, &mimeType);
+ ASSERT_TRUE(status) << "Failed to get the mime type";
+ ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_VIDEO_AVC);
+
+ MetaDataBase *metaData = new MetaDataBase();
+ ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
+
+ status = MakeAVCCodecSpecificData(*metaData, mInputBuffer, mInputBufferSize);
+ ASSERT_TRUE(status) << "Failed to make AVC CSD from MetaDataBase";
+
+ avcWidth = -1;
+ status = metaData->findInt32(kKeyWidth, &avcWidth);
+ ASSERT_TRUE(status) << "Failed to find the width";
+ ASSERT_EQ(avcWidth, mFrameWidth);
+
+ avcHeight = -1;
+ status = metaData->findInt32(kKeyHeight, &avcHeight);
+ ASSERT_TRUE(status) << "Failed to find the height";
+ ASSERT_EQ(avcHeight, mFrameHeight);
+
+ void *csdAMediaFormatBuffer = nullptr;
+ size_t csdAMediaFormatSize;
+ status = AMediaFormat_getBuffer(csdData, AMEDIAFORMAT_KEY_CSD_AVC, &csdAMediaFormatBuffer,
+ &csdAMediaFormatSize);
+ ASSERT_TRUE(status) << "Failed to get the CSD from AMediaFormat";
+ ASSERT_NE(csdAMediaFormatBuffer, nullptr) << "Invalid CSD from AMediaFormat";
+
+ const void *csdMetaDataBaseBuffer = nullptr;
+ size_t csdMetaDataBaseSize = 0;
+ uint32_t mediaType;
+ status = metaData->findData(kKeyAVCC, &mediaType, &csdMetaDataBaseBuffer, &csdMetaDataBaseSize);
+ ASSERT_TRUE(status) << "Failed to get the CSD from MetaDataBase";
+ ASSERT_NE(csdMetaDataBaseBuffer, nullptr) << "Invalid CSD from MetaDataBase";
+ ASSERT_GT(csdMetaDataBaseSize, 0) << "CSD size must be greater than 0";
+ ASSERT_EQ(csdMetaDataBaseSize, csdAMediaFormatSize)
+ << "CSD size of MetaData type and AMediaFormat type must be same";
+
+ int32_t result = memcmp(csdAMediaFormatBuffer, csdMetaDataBaseBuffer, csdAMediaFormatSize);
+ ASSERT_EQ(result, 0) << "CSD from AMediaFormat and MetaDataBase do not match";
+
+ delete metaData;
+ AMediaFormat_delete(csdData);
+}
+
+TEST_P(AvcCSDValidateTest, AvcValidateTest) {
+ AMediaFormat *csdData = AMediaFormat_new();
+ ASSERT_NE(csdData, nullptr) << "Failed to create AMedia format";
+
+ bool status = MakeAVCCodecSpecificData(csdData, mInputBuffer, mInputBufferSize);
+ ASSERT_FALSE(status) << "MakeAVCCodecSpecificData with AMediaFormat succeeds with invalid data";
+
+ MetaDataBase *metaData = new MetaDataBase();
+ ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
+
+ status = MakeAVCCodecSpecificData(*metaData, mInputBuffer, mInputBufferSize);
+ ASSERT_FALSE(status) << "MakeAVCCodecSpecificData with MetaDataBase succeeds with invalid data";
+}
+
+TEST_P(AacCSDTest, AacCSDValidationTest) {
+ AMediaFormat *csdData = AMediaFormat_new();
+ ASSERT_NE(csdData, nullptr) << "Failed to create AMedia format";
+
+ ASSERT_GE(mAacSamplingFreqIndex, 0);
+ ASSERT_LT(mAacSamplingFreqIndex, kMaxSamplingFreqIndex);
+ bool status = MakeAACCodecSpecificData(csdData, mAacProfile, mAacSamplingFreqIndex,
+ mAacChannelConfig);
+ ASSERT_TRUE(status) << "Failed to make AAC CSD from AMediaFormat";
+
+ int32_t sampleRate = -1;
+ status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate);
+ ASSERT_TRUE(status) << "Failed to get sample rate";
+ ASSERT_EQ(kSamplingFreq[mAacSamplingFreqIndex], sampleRate);
+
+ int32_t channelCount = -1;
+ status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &channelCount);
+ ASSERT_TRUE(status) << "Failed to get channel count";
+ ASSERT_EQ(channelCount, mAacChannelConfig);
+
+ const char *mimeType = "";
+ status = AMediaFormat_getString(csdData, AMEDIAFORMAT_KEY_MIME, &mimeType);
+ ASSERT_TRUE(status) << "Failed to get the mime type";
+ ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
+
+ MetaDataBase *metaData = new MetaDataBase();
+ ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
+
+ status = MakeAACCodecSpecificData(*metaData, mAacProfile, mAacSamplingFreqIndex,
+ mAacChannelConfig);
+ ASSERT_TRUE(status) << "Failed to make AAC CSD from MetaDataBase";
+
+ sampleRate = -1;
+ status = metaData->findInt32(kKeySampleRate, &sampleRate);
+ ASSERT_TRUE(status) << "Failed to get sampling rate";
+ ASSERT_EQ(kSamplingFreq[mAacSamplingFreqIndex], sampleRate);
+
+ channelCount = -1;
+ status = metaData->findInt32(kKeyChannelCount, &channelCount);
+ ASSERT_TRUE(status) << "Failed to get channel count";
+ ASSERT_EQ(channelCount, mAacChannelConfig);
+
+ mimeType = "";
+ status = metaData->findCString(kKeyMIMEType, &mimeType);
+ ASSERT_TRUE(status) << "Failed to get mime type";
+ ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
+
+ void *csdAMediaFormatBuffer = nullptr;
+ size_t csdAMediaFormatSize = 0;
+ status = AMediaFormat_getBuffer(csdData, AMEDIAFORMAT_KEY_CSD_0, &csdAMediaFormatBuffer,
+ &csdAMediaFormatSize);
+ ASSERT_TRUE(status) << "Failed to get the AMediaFormat CSD";
+ ASSERT_GT(csdAMediaFormatSize, 0) << "CSD size must be greater than 0";
+ ASSERT_NE(csdAMediaFormatBuffer, nullptr) << "Invalid CSD found";
+
+ const void *csdMetaDataBaseBuffer;
+ size_t csdMetaDataBaseSize = 0;
+ uint32_t mediaType;
+ status = metaData->findData(kKeyESDS, &mediaType, &csdMetaDataBaseBuffer, &csdMetaDataBaseSize);
+ ASSERT_TRUE(status) << "Failed to get the ESDS data from MetaDataBase";
+ ASSERT_GT(csdMetaDataBaseSize, 0) << "CSD size must be greater than 0";
+
+ ESDS esds(csdMetaDataBaseBuffer, csdMetaDataBaseSize);
+ status_t result = esds.getCodecSpecificInfo(&csdMetaDataBaseBuffer, &csdMetaDataBaseSize);
+ ASSERT_EQ(result, (status_t)OK) << "Failed to get CSD from ESDS data";
+ ASSERT_NE(csdMetaDataBaseBuffer, nullptr) << "Invalid CSD found";
+ ASSERT_EQ(csdAMediaFormatSize, csdMetaDataBaseSize)
+ << "CSD size do not match between AMediaFormat type and MetaDataBase type";
+
+ int32_t memcmpResult =
+ memcmp(csdAMediaFormatBuffer, csdMetaDataBaseBuffer, csdAMediaFormatSize);
+ ASSERT_EQ(memcmpResult, 0) << "AMediaFormat and MetaDataBase CSDs do not match";
+
+ AMediaFormat_delete(csdData);
+ delete metaData;
+}
+
+TEST_P(AacADTSTest, AacADTSValidationTest) {
+ MetaDataBase *metaData = new MetaDataBase();
+ ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+ bool status = MakeAACCodecSpecificData(*metaData, mInputBuffer, kAdtsCsdSize);
+ ASSERT_TRUE(status) << "Failed to make AAC CSD from MetaDataBase";
+
+ int32_t sampleRate = -1;
+ status = metaData->findInt32(kKeySampleRate, &sampleRate);
+ ASSERT_TRUE(status) << "Failed to get sampling rate";
+ ASSERT_EQ(sampleRate, mAacSampleRate);
+
+ int32_t channelCount = -1;
+ status = metaData->findInt32(kKeyChannelCount, &channelCount);
+ ASSERT_TRUE(status) << "Failed to get channel count";
+ ASSERT_EQ(channelCount, mAacChannelCount);
+
+ const char *mimeType = "";
+ status = metaData->findCString(kKeyMIMEType, &mimeType);
+ ASSERT_TRUE(status) << "Failed to get mime type";
+ ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
+
+ delete metaData;
+}
+
+TEST_P(AacCSDValidateTest, AacInvalidInputTest) {
+ MetaDataBase *metaData = new MetaDataBase();
+ ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+ bool status = MakeAACCodecSpecificData(*metaData, mInputBuffer, kAdtsCsdSize);
+ ASSERT_FALSE(status) << "MakeAACCodecSpecificData succeeds with invalid data";
+}
+
+TEST_P(VorbisTest, VorbisCommentTest) {
+ string line;
+ string tag;
+ string key;
+ string value;
+ size_t commentLength;
+ bool status;
+
+ while (getline(mInfoFileStream, line)) {
+ istringstream stringLine(line);
+ stringLine >> tag >> key >> value >> commentLength;
+ ASSERT_GT(commentLength, 0) << "Vorbis comment size must be greater than 0";
+
+ string comment;
+ string dataLine;
+
+ getline(mInputFileStream, dataLine);
+ istringstream dataStringLine(dataLine);
+ dataStringLine >> comment;
+
+ char *buffer = strndup(comment.c_str(), commentLength);
+ ASSERT_NE(buffer, nullptr) << "Failed to allocate buffer of size: " << commentLength;
+
+ AMediaFormat *fileMeta = AMediaFormat_new();
+ ASSERT_NE(fileMeta, nullptr) << "Failed to create AMedia format";
+
+ parseVorbisComment(fileMeta, buffer, commentLength);
+ free(buffer);
+
+ if (!strncasecmp(tag.c_str(), "ANDROID_HAPTIC", sizeof(tag))) {
+ int32_t numChannelExpected = stoi(value);
+ int32_t numChannelFound = -1;
+ status = AMediaFormat_getInt32(fileMeta, key.c_str(), &numChannelFound);
+ ASSERT_TRUE(status) << "Failed to get the channel count";
+ ASSERT_EQ(numChannelExpected, numChannelFound);
+ } else if (!strncasecmp(tag.c_str(), "ANDROID_LOOP", sizeof(tag))) {
+ int32_t loopExpected = !value.compare("true");
+ int32_t loopFound = -1;
+
+ status = AMediaFormat_getInt32(fileMeta, "loop", &loopFound);
+ ASSERT_TRUE(status) << "Failed to get the loop count";
+ ASSERT_EQ(loopExpected, loopFound);
+ } else {
+ const char *tagValue = "";
+ status = AMediaFormat_getString(fileMeta, key.c_str(), &tagValue);
+ ASSERT_TRUE(status) << "Failed to get the tag value";
+ ASSERT_STREQ(value.c_str(), tagValue);
+ }
+ AMediaFormat_delete(fileMeta);
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AvcCSDTest,
+ ::testing::Values(make_tuple("sps_pps_userdata.h264", 8, 8),
+ make_tuple("sps_userdata_pps.h264", 8, 8),
+ make_tuple("sps_pps_sps_pps.h264", 8, 8)));
+
+// TODO(b/158067691): Add invalid test vectors with incomplete PPS or no PPS
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AvcCSDValidateTest,
+ ::testing::Values("sps_pps_only_startcode.h264",
+ "sps_incomplete_pps.h264",
+ // TODO(b/158067691) "sps_pps_incomplete.h264",
+ "randomdata.h264",
+ // TODO(b/158067691) "sps.h264",
+ "pps.h264"));
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AacCSDTest,
+ ::testing::Values(make_tuple(AACObjectMain, 1, 1)));
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AacADTSTest,
+ ::testing::Values(make_tuple("loudsoftaacadts", 1, 44100)));
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AacCSDValidateTest,
+ ::testing::Values("loudsoftaacadts_invalidheader",
+ "loudsoftaacadts_invalidprofile",
+ "loudsoftaacadts_invalidchannelconfig"));
+
+// TODO(b/157974508) Add test vector for vorbis thumbnail tag
+// Info file contains TAG, Key, Value and size of the vorbis comment
+INSTANTIATE_TEST_SUITE_P(
+ MetaDataUtilsTestAll, VorbisTest,
+ ::testing::Values(make_pair("vorbiscomment_sintel.dat", "vorbiscomment_sintel.info"),
+ make_pair("vorbiscomment_album.dat", "vorbiscomment_album.info"),
+ make_pair("vorbiscomment_loop.dat", "vorbiscomment_loop.info")));
+
+int main(int argc, char **argv) {
+ gEnv = new MetaDataUtilsTestEnvironment();
+ ::testing::AddGlobalTestEnvironment(gEnv);
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = gEnv->initFromOptions(argc, argv);
+ if (status == 0) {
+ status = RUN_ALL_TESTS();
+ ALOGV("Test result = %d\n", status);
+ }
+ return status;
+}
diff --git a/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h b/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h
new file mode 100644
index 0000000..4d642bc
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __METADATA_UTILS_TEST_ENVIRONMENT_H__
+#define __METADATA_UTILS_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class MetaDataUtilsTestEnvironment : public::testing::Environment {
+ public:
+ MetaDataUtilsTestEnvironment() : res("/data/local/tmp/") {}
+
+ // Parses the command line arguments
+ int initFromOptions(int argc, char **argv);
+
+ void setRes(const char *_res) { res = _res; }
+
+ const string getRes() const { return res; }
+
+ private:
+ string res;
+};
+
+int MetaDataUtilsTestEnvironment::initFromOptions(int argc, char **argv) {
+ static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+ while (true) {
+ int index = 0;
+ int c = getopt_long(argc, argv, "P:", options, &index);
+ if (c == -1) {
+ break;
+ }
+
+ switch (c) {
+ case 'P': {
+ setRes(optarg);
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ if (optind < argc) {
+ fprintf(stderr,
+ "unrecognized option: %s\n\n"
+ "usage: %s <gtest options> <test options>\n\n"
+ "test options are:\n\n"
+ "-P, --path: Resource files directory location\n",
+ argv[optind ?: 1], argv[0]);
+ return 2;
+ }
+ return 0;
+}
+
+#endif // __METADATA_UTILS_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/tests/metadatautils/README.md b/media/libstagefright/tests/metadatautils/README.md
new file mode 100644
index 0000000..0862a07
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/README.md
@@ -0,0 +1,39 @@
+## Media Testing ##
+---
+#### MetaDataUtils Test
+The MetaDataUtils Unit Test Suite validates the libstagefright_metadatautils library available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m MetaDataUtilsTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/MetaDataUtilsTest/MetaDataUtilsTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/MetaDataUtilsTest/MetaDataUtilsTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/metadatautils/MetaDataUtilsTestRes-1.0.zip). Download, unzip and push these files into device for testing.
+
+```
+adb push MetaDataUtilsTestRes-1.0 /data/local/tmp/
+```
+
+usage: MetaDataUtilsTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/MetaDataUtilsTest -P /data/local/tmp/MetaDataUtilsTestRes-1.0/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest MetaDataUtilsTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/tests/writer/Android.bp b/media/libstagefright/tests/writer/Android.bp
index d058ed3..b5d453e 100644
--- a/media/libstagefright/tests/writer/Android.bp
+++ b/media/libstagefright/tests/writer/Android.bp
@@ -29,13 +29,14 @@
"liblog",
"libutils",
"libmedia",
+ "libmediandk",
+ "libstagefright",
],
static_libs: [
"libstagefright_webm",
- "libdatasource",
- "libstagefright",
"libstagefright_foundation",
+ "libdatasource",
"libstagefright_esds",
"libogg",
],
diff --git a/media/libstagefright/tests/writer/AndroidTest.xml b/media/libstagefright/tests/writer/AndroidTest.xml
index a21be8a..cc890fe 100644
--- a/media/libstagefright/tests/writer/AndroidTest.xml
+++ b/media/libstagefright/tests/writer/AndroidTest.xml
@@ -19,7 +19,7 @@
<option name="cleanup" value="true" />
<option name="push" value="writerTest->/data/local/tmp/writerTest" />
<option name="push-file"
- key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/WriterTestRes.zip?unzip=true"
+ key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/WriterTestRes-1.1.zip?unzip=true"
value="/data/local/tmp/WriterTestRes/" />
</target_preparer>
<test class="com.android.tradefed.testtype.GTest" >
diff --git a/media/libstagefright/tests/writer/README.md b/media/libstagefright/tests/writer/README.md
index e103613..0e54ca7 100644
--- a/media/libstagefright/tests/writer/README.md
+++ b/media/libstagefright/tests/writer/README.md
@@ -19,10 +19,10 @@
adb push ${OUT}/data/nativetest/writerTest/writerTest /data/local/tmp/
-The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/WriterTestRes.zip).
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/WriterTestRes-1.1.zip).
Download and extract the folder. Push all the files in this folder to /data/local/tmp/ on the device.
```
-adb push WriterTestRes /data/local/tmp/
+adb push WriterTestRes-1.1/. /data/local/tmp/WriterTestRes/
```
usage: writerTest -P \<path_to_res_folder\> -C <remove_output_file>
diff --git a/media/libstagefright/tests/writer/WriterTest.cpp b/media/libstagefright/tests/writer/WriterTest.cpp
index 4d8df2d..d170e7c 100644
--- a/media/libstagefright/tests/writer/WriterTest.cpp
+++ b/media/libstagefright/tests/writer/WriterTest.cpp
@@ -18,9 +18,13 @@
#define LOG_TAG "WriterTest"
#include <utils/Log.h>
+#include <binder/ProcessState.h>
+
+#include <inttypes.h>
#include <fstream>
#include <iostream>
+#include <media/NdkMediaExtractor.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
@@ -39,18 +43,40 @@
#define OUTPUT_FILE_NAME "/data/local/tmp/writer.out"
+// Stts values within 0.1ms(100us) difference are fudged to save too
+// many stts entries in MPEG4Writer.
+constexpr int32_t kMpeg4MuxToleranceTimeUs = 100;
+// Tolerance value for other writers
+constexpr int32_t kMuxToleranceTimeUs = 1;
+
static WriterTestEnvironment *gEnv = nullptr;
-struct configFormat {
- char mime[128];
- int32_t width;
- int32_t height;
- int32_t sampleRate;
- int32_t channelCount;
+enum inputId {
+ // audio streams
+ AAC_1,
+ AAC_ADTS_1,
+ AMR_NB_1,
+ AMR_WB_1,
+ FLAC_1,
+ OPUS_1,
+ VORBIS_1,
+ // video streams
+ AV1_1,
+ AVC_1,
+ H263_1,
+ HEVC_1,
+ MPEG4_1,
+ VP8_1,
+ VP9_1,
+ // heif stream
+ HEIC_1,
+ UNUSED_ID,
+ UNKNOWN_ID,
};
// LookUpTable of clips and metadata for component testing
static const struct InputData {
+ inputId inpId;
const char *mime;
string inputFile;
string info;
@@ -58,61 +84,67 @@
int32_t secondParam;
bool isAudio;
} kInputData[] = {
- {MEDIA_MIMETYPE_AUDIO_OPUS, "bbb_opus_stereo_128kbps_48000hz.opus",
- "bbb_opus_stereo_128kbps_48000hz.info", 48000, 2, true},
- {MEDIA_MIMETYPE_AUDIO_AAC, "bbb_aac_stereo_128kbps_48000hz.aac",
- "bbb_aac_stereo_128kbps_48000hz.info", 48000, 2, true},
- {MEDIA_MIMETYPE_AUDIO_AAC_ADTS, "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.adts",
+ {AAC_1, MEDIA_MIMETYPE_AUDIO_AAC, "audio_aac_stereo_8kbps_11025hz.aac",
+ "audio_aac_stereo_8kbps_11025hz.info", 11025, 2, true},
+ {AAC_ADTS_1, MEDIA_MIMETYPE_AUDIO_AAC_ADTS, "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.adts",
"Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.info", 48000, 2, true},
- {MEDIA_MIMETYPE_AUDIO_AMR_NB, "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+ {AMR_NB_1, MEDIA_MIMETYPE_AUDIO_AMR_NB, "sine_amrnb_1ch_12kbps_8000hz.amrnb",
"sine_amrnb_1ch_12kbps_8000hz.info", 8000, 1, true},
- {MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+ {AMR_WB_1, MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
"bbb_amrwb_1ch_14kbps_16000hz.info", 16000, 1, true},
- {MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
- "bbb_vorbis_stereo_128kbps_48000hz.info", 48000, 2, true},
- {MEDIA_MIMETYPE_AUDIO_FLAC, "bbb_flac_stereo_680kbps_48000hz.flac",
+ {FLAC_1, MEDIA_MIMETYPE_AUDIO_FLAC, "bbb_flac_stereo_680kbps_48000hz.flac",
"bbb_flac_stereo_680kbps_48000hz.info", 48000, 2, true},
- {MEDIA_MIMETYPE_VIDEO_VP9, "bbb_vp9_176x144_285kbps_60fps.vp9",
- "bbb_vp9_176x144_285kbps_60fps.info", 176, 144, false},
- {MEDIA_MIMETYPE_VIDEO_VP8, "bbb_vp8_176x144_240kbps_60fps.vp8",
- "bbb_vp8_176x144_240kbps_60fps.info", 176, 144, false},
- {MEDIA_MIMETYPE_VIDEO_AVC, "bbb_avc_176x144_300kbps_60fps.h264",
- "bbb_avc_176x144_300kbps_60fps.info", 176, 144, false},
- {MEDIA_MIMETYPE_VIDEO_HEVC, "bbb_hevc_176x144_176kbps_60fps.hevc",
- "bbb_hevc_176x144_176kbps_60fps.info", 176, 144, false},
- {MEDIA_MIMETYPE_VIDEO_AV1, "bbb_av1_176_144.av1", "bbb_av1_176_144.info", 176, 144, false},
- {MEDIA_MIMETYPE_VIDEO_H263, "bbb_h263_352x288_300kbps_12fps.h263",
+ {OPUS_1, MEDIA_MIMETYPE_AUDIO_OPUS, "bbb_opus_stereo_128kbps_48000hz.opus",
+ "bbb_opus_stereo_128kbps_48000hz.info", 48000, 2, true},
+ {VORBIS_1, MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_vorbis_1ch_64kbps_16kHz.vorbis",
+ "bbb_vorbis_1ch_64kbps_16kHz.info", 16000, 1, true},
+
+ {AV1_1, MEDIA_MIMETYPE_VIDEO_AV1, "bbb_av1_176_144.av1", "bbb_av1_176_144.info", 176, 144,
+ false},
+ {AVC_1, MEDIA_MIMETYPE_VIDEO_AVC, "bbb_avc_352x288_768kbps_30fps.avc",
+ "bbb_avc_352x288_768kbps_30fps.info", 352, 288, false},
+ {H263_1, MEDIA_MIMETYPE_VIDEO_H263, "bbb_h263_352x288_300kbps_12fps.h263",
"bbb_h263_352x288_300kbps_12fps.info", 352, 288, false},
- {MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_mpeg4_352x288_512kbps_30fps.m4v",
+ {HEVC_1, MEDIA_MIMETYPE_VIDEO_HEVC, "bbb_hevc_340x280_768kbps_30fps.hevc",
+ "bbb_hevc_340x280_768kbps_30fps.info", 340, 280, false},
+ {MPEG4_1, MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_mpeg4_352x288_512kbps_30fps.m4v",
"bbb_mpeg4_352x288_512kbps_30fps.info", 352, 288, false},
+ {VP8_1, MEDIA_MIMETYPE_VIDEO_VP8, "bbb_vp8_176x144_240kbps_60fps.vp8",
+ "bbb_vp8_176x144_240kbps_60fps.info", 176, 144, false},
+ {VP9_1, MEDIA_MIMETYPE_VIDEO_VP9, "bbb_vp9_176x144_285kbps_60fps.vp9",
+ "bbb_vp9_176x144_285kbps_60fps.info", 176, 144, false},
+
+ {HEIC_1, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, "bbb_hevc_176x144_176kbps_60fps.hevc",
+ "bbb_heic_176x144_176kbps_60fps.info", 176, 144, false},
};
class WriterTest {
public:
- WriterTest() : mWriter(nullptr), mFileMeta(nullptr), mCurrentTrack(nullptr) {}
+ WriterTest() : mWriter(nullptr), mFileMeta(nullptr) {}
~WriterTest() {
if (mFileMeta) {
mFileMeta.clear();
mFileMeta = nullptr;
}
- if (mCurrentTrack) {
- mCurrentTrack->stop();
- mCurrentTrack.clear();
- mCurrentTrack = nullptr;
- }
if (mWriter) {
mWriter.clear();
mWriter = nullptr;
}
- mBufferInfo.clear();
- if (mInputStream.is_open()) mInputStream.close();
if (gEnv->cleanUp()) remove(OUTPUT_FILE_NAME);
+
+ for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+ mBufferInfo[idx].clear();
+ if (mCurrentTrack[idx]) {
+ mCurrentTrack[idx]->stop();
+ mCurrentTrack[idx].clear();
+ mCurrentTrack[idx] = nullptr;
+ }
+ if (mInputStream[idx].is_open()) mInputStream[idx].close();
+ }
}
void setupWriterType(string writerFormat) {
- mNumCsds = 0;
- mInputFrameId = 0;
mWriterName = unknown_comp;
mDisableTest = false;
static const std::map<std::string, standardWriters> mapWriter = {
@@ -128,11 +160,19 @@
}
}
- void getInputBufferInfo(string inputFileName, string inputInfo);
+ void getInputBufferInfo(string inputFileName, string inputInfo, int32_t idx = 0);
int32_t createWriter(int32_t fd);
- int32_t addWriterSource(bool isAudio, configFormat params);
+ int32_t addWriterSource(bool isAudio, configFormat params, int32_t idx = 0);
+
+ void setupExtractor(AMediaExtractor *extractor, string inputFileName, int32_t &trackCount);
+
+ void extract(AMediaExtractor *extractor, configFormat ¶ms, vector<BufferInfo> &bufferInfo,
+ uint8_t *buffer, size_t bufSize, size_t *bytesExtracted, int32_t idx);
+
+ void compareParams(configFormat srcParam, configFormat dstParam, vector<BufferInfo> dstBufInfo,
+ int32_t index);
enum standardWriters {
OGG,
@@ -149,38 +189,42 @@
standardWriters mWriterName;
sp<MediaWriter> mWriter;
sp<MetaData> mFileMeta;
- sp<MediaAdapter> mCurrentTrack;
+ sp<MediaAdapter> mCurrentTrack[kMaxTrackCount]{};
bool mDisableTest;
- int32_t mNumCsds;
- int32_t mInputFrameId;
- ifstream mInputStream;
- vector<BufferInfo> mBufferInfo;
+ int32_t mNumCsds[kMaxTrackCount]{};
+ int32_t mInputFrameId[kMaxTrackCount]{};
+ ifstream mInputStream[kMaxTrackCount]{};
+ vector<BufferInfo> mBufferInfo[kMaxTrackCount];
};
-class WriteFunctionalityTest : public WriterTest,
- public ::testing::TestWithParam<pair<string, int32_t>> {
+class WriteFunctionalityTest
+ : public WriterTest,
+ public ::testing::TestWithParam<tuple<string /* writerFormat*/, inputId /* inputId0*/,
+ inputId /* inputId1*/, float /* BufferInterval*/>> {
public:
- virtual void SetUp() override { setupWriterType(GetParam().first); }
+ virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
};
-void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo) {
+void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo, int32_t idx) {
std::ifstream eleInfo;
eleInfo.open(inputInfo.c_str());
ASSERT_EQ(eleInfo.is_open(), true);
int32_t bytesCount = 0;
uint32_t flags = 0;
int64_t timestamp = 0;
+ int32_t numCsds = 0;
while (1) {
if (!(eleInfo >> bytesCount)) break;
eleInfo >> flags;
eleInfo >> timestamp;
- mBufferInfo.push_back({bytesCount, flags, timestamp});
- if (flags == CODEC_CONFIG_FLAG) mNumCsds++;
+ mBufferInfo[idx].push_back({bytesCount, flags, timestamp});
+ if (flags == CODEC_CONFIG_FLAG) numCsds++;
}
eleInfo.close();
- mInputStream.open(inputFileName.c_str(), std::ifstream::binary);
- ASSERT_EQ(mInputStream.is_open(), true);
+ mNumCsds[idx] = numCsds;
+ mInputStream[idx].open(inputFileName.c_str(), std::ifstream::binary);
+ ASSERT_EQ(mInputStream[idx].is_open(), true);
}
int32_t WriterTest::createWriter(int32_t fd) {
@@ -226,10 +270,10 @@
return 0;
}
-int32_t WriterTest::addWriterSource(bool isAudio, configFormat params) {
- if (mInputFrameId) return -1;
+int32_t WriterTest::addWriterSource(bool isAudio, configFormat params, int32_t idx) {
+ if (mInputFrameId[idx]) return -1;
sp<AMessage> format = new AMessage;
- if (mInputStream.is_open()) {
+ if (mInputStream[idx].is_open()) {
format->setString("mime", params.mime);
if (isAudio) {
format->setInt32("channel-count", params.channelCount);
@@ -238,25 +282,34 @@
format->setInt32("width", params.width);
format->setInt32("height", params.height);
}
-
- int32_t status =
- writeHeaderBuffers(mInputStream, mBufferInfo, mInputFrameId, format, mNumCsds);
- if (status != 0) return -1;
+ if (mNumCsds[idx]) {
+ int32_t status = writeHeaderBuffers(mInputStream[idx], mBufferInfo[idx],
+ mInputFrameId[idx], format, mNumCsds[idx]);
+ if (status != 0) return -1;
+ }
}
+
sp<MetaData> trackMeta = new MetaData;
convertMessageToMetaData(format, trackMeta);
- mCurrentTrack = new MediaAdapter(trackMeta);
- if (mCurrentTrack == nullptr) {
+ mCurrentTrack[idx] = new MediaAdapter(trackMeta);
+ if (mCurrentTrack[idx] == nullptr) {
ALOGE("MediaAdapter returned nullptr");
return -1;
}
- status_t result = mWriter->addSource(mCurrentTrack);
+ status_t result = mWriter->addSource(mCurrentTrack[idx]);
return result;
}
void getFileDetails(string &inputFilePath, string &info, configFormat ¶ms, bool &isAudio,
- int32_t streamIndex = 0) {
- if (streamIndex >= sizeof(kInputData) / sizeof(kInputData[0])) {
+ inputId inpId) {
+ int32_t inputDataSize = sizeof(kInputData) / sizeof(kInputData[0]);
+ int32_t streamIndex = 0;
+ for (; streamIndex < inputDataSize; streamIndex++) {
+ if (inpId == kInputData[streamIndex].inpId) {
+ break;
+ }
+ }
+ if (streamIndex == inputDataSize) {
return;
}
inputFilePath += kInputData[streamIndex].inputFile;
@@ -273,6 +326,146 @@
return;
}
+void WriterTest::setupExtractor(AMediaExtractor *extractor, string inputFileName,
+ int32_t &trackCount) {
+ ALOGV("Input file for extractor: %s", inputFileName.c_str());
+
+ int32_t fd = open(inputFileName.c_str(), O_RDONLY);
+ ASSERT_GE(fd, 0) << "Failed to open writer's output file to validate";
+
+ struct stat buf;
+ int32_t status = fstat(fd, &buf);
+ ASSERT_EQ(status, 0) << "Failed to get properties of input file for extractor";
+
+ size_t fileSize = buf.st_size;
+ ALOGV("Size of input file to extractor: %zu", fileSize);
+
+ status = AMediaExtractor_setDataSourceFd(extractor, fd, 0, fileSize);
+ ASSERT_EQ(status, AMEDIA_OK) << "Failed to set data source for extractor";
+
+ trackCount = AMediaExtractor_getTrackCount(extractor);
+ ASSERT_GT(trackCount, 0) << "No tracks reported by extractor";
+ ALOGV("Number of tracks reported by extractor : %d", trackCount);
+ return;
+}
+
+void WriterTest::extract(AMediaExtractor *extractor, configFormat ¶ms,
+ vector<BufferInfo> &bufferInfo, uint8_t *buffer, size_t bufSize,
+ size_t *bytesExtracted, int32_t idx) {
+ AMediaExtractor_selectTrack(extractor, idx);
+ AMediaFormat *format = AMediaExtractor_getTrackFormat(extractor, idx);
+ ASSERT_NE(format, nullptr) << "Track format is NULL";
+ ALOGI("Track format = %s", AMediaFormat_toString(format));
+
+ const char *mime = nullptr;
+ AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+ ASSERT_NE(mime, nullptr) << "Track mime is NULL";
+ ALOGI("Track mime = %s", mime);
+ strlcpy(params.mime, mime, kMimeSize);
+
+ if (!strncmp(mime, "audio/", 6)) {
+ ASSERT_TRUE(
+ AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, ¶ms.channelCount))
+ << "Extractor did not report channel count";
+ ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, ¶ms.sampleRate))
+ << "Extractor did not report sample rate";
+ } else if (!strncmp(mime, "video/", 6) || !strncmp(mime, "image/", 6)) {
+ ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, ¶ms.width))
+ << "Extractor did not report width";
+ ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, ¶ms.height))
+ << "Extractor did not report height";
+ } else {
+ ASSERT_TRUE(false) << "Invalid mime " << mime;
+ }
+
+ int32_t bufferOffset = 0;
+ // Get CSD data
+ int index = 0;
+ void *csdBuf;
+ while (1) {
+ csdBuf = nullptr;
+ char csdName[16];
+ snprintf(csdName, 16, "csd-%d", index);
+ size_t csdSize = 0;
+ bool csdFound = AMediaFormat_getBuffer(format, csdName, &csdBuf, &csdSize);
+ if (!csdFound || !csdBuf || !csdSize) break;
+
+ bufferInfo.push_back({static_cast<int32_t>(csdSize), CODEC_CONFIG_FLAG, 0});
+ memcpy(buffer + bufferOffset, csdBuf, csdSize);
+ bufferOffset += csdSize;
+ index++;
+ }
+
+ // Get frame data
+ while (1) {
+ ssize_t sampleSize = AMediaExtractor_getSampleSize(extractor);
+ if (sampleSize < 0) break;
+
+ uint8_t *sampleBuffer = (uint8_t *)malloc(sampleSize);
+ ASSERT_NE(sampleBuffer, nullptr) << "Failed to allocate the buffer of size " << sampleSize;
+
+ int bytesRead = AMediaExtractor_readSampleData(extractor, sampleBuffer, sampleSize);
+ ASSERT_EQ(bytesRead, sampleSize)
+ << "Number of bytes extracted does not match with sample size";
+ int64_t pts = AMediaExtractor_getSampleTime(extractor);
+ uint32_t flag = AMediaExtractor_getSampleFlags(extractor);
+
+ if (mime == MEDIA_MIMETYPE_AUDIO_VORBIS) {
+ // Removing 4 bytes of AMEDIAFORMAT_KEY_VALID_SAMPLES from sample size
+ bytesRead = bytesRead - 4;
+ }
+
+ ASSERT_LE(bufferOffset + bytesRead, bufSize)
+ << "Size of the buffer is insufficient to store the extracted data";
+ bufferInfo.push_back({bytesRead, flag, pts});
+ memcpy(buffer + bufferOffset, sampleBuffer, bytesRead);
+ bufferOffset += bytesRead;
+
+ AMediaExtractor_advance(extractor);
+ free(sampleBuffer);
+ }
+ *bytesExtracted = bufferOffset;
+ return;
+}
+
+void WriterTest::compareParams(configFormat srcParam, configFormat dstParam,
+ vector<BufferInfo> dstBufInfo, int32_t index) {
+ ASSERT_STREQ(srcParam.mime, dstParam.mime)
+ << "Extracted mime type does not match with input mime type";
+
+ if (!strncmp(srcParam.mime, "audio/", 6)) {
+ ASSERT_EQ(srcParam.channelCount, dstParam.channelCount)
+ << "Extracted channel count does not match with input channel count";
+ ASSERT_EQ(srcParam.sampleRate, dstParam.sampleRate)
+ << "Extracted sample rate does not match with input sample rate";
+ } else if (!strncmp(srcParam.mime, "video/", 6) || !strncmp(srcParam.mime, "image/", 6)) {
+ ASSERT_EQ(srcParam.width, dstParam.width)
+ << "Extracted width does not match with input width";
+ ASSERT_EQ(srcParam.height, dstParam.height)
+ << "Extracted height does not match with input height";
+ } else {
+ ASSERT_TRUE(false) << "Invalid mime type" << srcParam.mime;
+ }
+
+ int32_t toleranceValueUs = kMuxToleranceTimeUs;
+ if (mWriterName == MPEG4) {
+ toleranceValueUs = kMpeg4MuxToleranceTimeUs;
+ }
+ for (int32_t i = 0; i < dstBufInfo.size(); i++) {
+ ASSERT_EQ(mBufferInfo[index][i].size, dstBufInfo[i].size)
+ << "Input size " << mBufferInfo[index][i].size << " mismatched with extracted size "
+ << dstBufInfo[i].size;
+ ASSERT_EQ(mBufferInfo[index][i].flags, dstBufInfo[i].flags)
+ << "Input flag " << mBufferInfo[index][i].flags
+ << " mismatched with extracted size " << dstBufInfo[i].flags;
+ ASSERT_LE(abs(mBufferInfo[index][i].timeUs - dstBufInfo[i].timeUs), toleranceValueUs)
+ << "Difference between original timestamp " << mBufferInfo[index][i].timeUs
+ << " and extracted timestamp " << dstBufInfo[i].timeUs
+ << "is greater than tolerance value = " << toleranceValueUs << " micro seconds";
+ }
+ return;
+}
+
TEST_P(WriteFunctionalityTest, CreateWriterTest) {
if (mDisableTest) return;
ALOGV("Tests the creation of writers");
@@ -284,14 +477,14 @@
// Creating writer within a test scope. Destructor should be called when the test ends
ASSERT_EQ((status_t)OK, createWriter(fd))
- << "Failed to create writer for output format:" << GetParam().first;
+ << "Failed to create writer for output format:" << get<0>(GetParam());
}
TEST_P(WriteFunctionalityTest, WriterTest) {
if (mDisableTest) return;
ALOGV("Checks if for a given input, a valid muxed file has been created or not");
- string writerFormat = GetParam().first;
+ string writerFormat = get<0>(GetParam());
string outputFile = OUTPUT_FILE_NAME;
int32_t fd =
open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
@@ -300,35 +493,110 @@
int32_t status = createWriter(fd);
ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
- string inputFile = gEnv->getRes();
- string inputInfo = gEnv->getRes();
- configFormat param;
- bool isAudio;
- int32_t inputFileIdx = GetParam().second;
- getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
- ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+ inputId inpId[] = {get<1>(GetParam()), get<2>(GetParam())};
+ ASSERT_NE(inpId[0], UNUSED_ID) << "Test expects first inputId to be a valid id";
- ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
- status = addWriterSource(isAudio, param);
- ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+ int32_t numTracks = 1;
+ if (inpId[1] != UNUSED_ID) {
+ numTracks++;
+ }
+
+ size_t fileSize[numTracks];
+ configFormat param[numTracks];
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ string inputFile = gEnv->getRes();
+ string inputInfo = gEnv->getRes();
+ bool isAudio;
+ getFileDetails(inputFile, inputInfo, param[idx], isAudio, inpId[idx]);
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+ struct stat buf;
+ status = stat(inputFile.c_str(), &buf);
+ ASSERT_EQ(status, 0) << "Failed to get properties of input file:" << inputFile;
+ fileSize[idx] = buf.st_size;
+
+ ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo, idx));
+ status = addWriterSource(isAudio, param[idx], idx);
+ ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+ }
status = mWriter->start(mFileMeta.get());
ASSERT_EQ((status_t)OK, status);
- status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
- mBufferInfo.size());
- ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
- mCurrentTrack->stop();
+ float interval = get<3>(GetParam());
+ ASSERT_LE(interval, 1.0f) << "Buffer interval invalid. Should be less than or equal to 1.0";
+ size_t range = 0;
+ int32_t loopCount = 0;
+ int32_t offset[kMaxTrackCount]{};
+ while (loopCount < ceil(1.0 / interval)) {
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ range = mBufferInfo[idx].size() * interval;
+ status = sendBuffersToWriter(mInputStream[idx], mBufferInfo[idx], mInputFrameId[idx],
+ mCurrentTrack[idx], offset[idx], range);
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+ offset[idx] += range;
+ }
+ loopCount++;
+ }
+ for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+ if (mCurrentTrack[idx]) {
+ mCurrentTrack[idx]->stop();
+ }
+ }
status = mWriter->stop();
ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
close(fd);
+
+ // Validate the output muxed file created by writer
+ // TODO(b/146423022): Skip validating output for webm writer
+ // TODO(b/146421018): Skip validating output for ogg writer
+ if (mWriterName != OGG && mWriterName != WEBM) {
+ configFormat extractorParams[numTracks];
+ vector<BufferInfo> extractorBufferInfo[numTracks];
+ int32_t trackCount = -1;
+
+ AMediaExtractor *extractor = AMediaExtractor_new();
+ ASSERT_NE(extractor, nullptr) << "Failed to create extractor";
+ ASSERT_NO_FATAL_FAILURE(setupExtractor(extractor, outputFile, trackCount));
+ ASSERT_EQ(trackCount, numTracks)
+ << "Tracks reported by extractor does not match with input number of tracks";
+
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ char *inputBuffer = (char *)malloc(fileSize[idx]);
+ ASSERT_NE(inputBuffer, nullptr)
+ << "Failed to allocate the buffer of size " << fileSize[idx];
+ mInputStream[idx].seekg(0, mInputStream[idx].beg);
+ mInputStream[idx].read(inputBuffer, fileSize[idx]);
+ ASSERT_EQ(mInputStream[idx].gcount(), fileSize[idx]);
+
+ uint8_t *extractedBuffer = (uint8_t *)malloc(fileSize[idx]);
+ ASSERT_NE(extractedBuffer, nullptr)
+ << "Failed to allocate the buffer of size " << fileSize[idx];
+ size_t bytesExtracted = 0;
+
+ ASSERT_NO_FATAL_FAILURE(extract(extractor, extractorParams[idx],
+ extractorBufferInfo[idx], extractedBuffer,
+ fileSize[idx], &bytesExtracted, idx));
+ ASSERT_GT(bytesExtracted, 0) << "Total bytes extracted by extractor cannot be zero";
+
+ ASSERT_NO_FATAL_FAILURE(
+ compareParams(param[idx], extractorParams[idx], extractorBufferInfo[idx], idx));
+
+ ASSERT_EQ(memcmp(extractedBuffer, (uint8_t *)inputBuffer, bytesExtracted), 0)
+ << "Extracted bit stream does not match with input bit stream";
+
+ free(inputBuffer);
+ free(extractedBuffer);
+ }
+ AMediaExtractor_delete(extractor);
+ }
}
TEST_P(WriteFunctionalityTest, PauseWriterTest) {
if (mDisableTest) return;
ALOGV("Validates the pause() api of writers");
- string writerFormat = GetParam().first;
+ string writerFormat = get<0>(GetParam());
string outputFile = OUTPUT_FILE_NAME;
int32_t fd =
open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
@@ -341,8 +609,10 @@
string inputInfo = gEnv->getRes();
configFormat param;
bool isAudio;
- int32_t inputFileIdx = GetParam().second;
- getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+ inputId inpId = get<1>(GetParam());
+ ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+ getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
@@ -351,8 +621,8 @@
status = mWriter->start(mFileMeta.get());
ASSERT_EQ((status_t)OK, status);
- status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
- mBufferInfo.size() / 4);
+ status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+ mCurrentTrack[0], 0, mBufferInfo[0].size() / 4);
ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
bool isPaused = false;
@@ -362,19 +632,19 @@
isPaused = true;
}
// In the pause state, writers shouldn't write anything. Testing the writers for the same
- int32_t numFramesPaused = mBufferInfo.size() / 4;
- status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
- mInputFrameId, numFramesPaused, isPaused);
+ int32_t numFramesPaused = mBufferInfo[0].size() / 4;
+ status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+ mCurrentTrack[0], mInputFrameId[0], numFramesPaused, isPaused);
ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
if (isPaused) {
status = mWriter->start(mFileMeta.get());
ASSERT_EQ((status_t)OK, status);
}
- status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
- mInputFrameId, mBufferInfo.size());
+ status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+ mCurrentTrack[0], mInputFrameId[0], mBufferInfo[0].size());
ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
- mCurrentTrack->stop();
+ mCurrentTrack[0]->stop();
status = mWriter->stop();
ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
@@ -392,7 +662,7 @@
open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
- string writerFormat = GetParam().first;
+ string writerFormat = get<0>(GetParam());
int32_t status = createWriter(fd);
ASSERT_EQ(status, (status_t)OK) << "Failed to create writer for output format:" << writerFormat;
@@ -400,8 +670,10 @@
string inputInfo = gEnv->getRes();
configFormat param;
bool isAudio;
- int32_t inputFileIdx = GetParam().second;
- getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+ inputId inpId = get<1>(GetParam());
+ ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+ getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
@@ -418,8 +690,8 @@
mWriter->start(mFileMeta.get());
}
- status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
- mBufferInfo.size() / 4);
+ status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+ mCurrentTrack[0], 0, mBufferInfo[0].size() / 4);
ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
for (int32_t count = 0; count < kMaxCount; count++) {
@@ -428,20 +700,20 @@
}
mWriter->pause();
- int32_t numFramesPaused = mBufferInfo.size() / 4;
- status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
- mInputFrameId, numFramesPaused, true);
+ int32_t numFramesPaused = mBufferInfo[0].size() / 4;
+ status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+ mCurrentTrack[0], mInputFrameId[0], numFramesPaused, true);
ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
for (int32_t count = 0; count < kMaxCount; count++) {
mWriter->start(mFileMeta.get());
}
- status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
- mInputFrameId, mBufferInfo.size());
+ status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+ mCurrentTrack[0], mInputFrameId[0], mBufferInfo[0].size());
ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
- mCurrentTrack->stop();
+ mCurrentTrack[0]->stop();
// first stop should succeed.
status = mWriter->stop();
@@ -454,23 +726,191 @@
close(fd);
}
-class ListenerTest : public WriterTest,
- public ::testing::TestWithParam<
- tuple<string /* writerFormat*/, int32_t /* inputFileIdx*/,
- float /* FileSizeLimit*/, float /* FileDurationLimit*/>> {
+class WriterValidityTest
+ : public WriterTest,
+ public ::testing::TestWithParam<
+ tuple<string /* writerFormat*/, inputId /* inputId0*/, bool /* addSourceFail*/>> {
public:
- virtual void SetUp() override {
- tuple<string, int32_t, float, float> params = GetParam();
- setupWriterType(get<0>(params));
+ virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
+};
+
+TEST_P(WriterValidityTest, InvalidInputTest) {
+ if (mDisableTest) return;
+ ALOGV("Validates writer's behavior for invalid inputs");
+
+ string writerFormat = get<0>(GetParam());
+ inputId inpId = get<1>(GetParam());
+ bool addSourceFailExpected = get<2>(GetParam());
+
+ // Test writers for invalid FD value
+ int32_t fd = -1;
+ int32_t status = createWriter(fd);
+ if (status != OK) {
+ ALOGV("createWriter failed for invalid FD, this is expected behavior");
+ return;
}
+
+ // If writer was created for invalid fd, test it further.
+ string inputFile = gEnv->getRes();
+ string inputInfo = gEnv->getRes();
+ configFormat param;
+ bool isAudio;
+ ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+ getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+ ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
+ status = addWriterSource(isAudio, param);
+ if (status != OK) {
+ ASSERT_TRUE(addSourceFailExpected)
+ << "Failed to add source for " << writerFormat << " writer";
+ ALOGV("addWriterSource failed for invalid FD, this is expected behavior");
+ return;
+ }
+
+ // start the writer with valid argument but invalid FD
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_NE((status_t)OK, status) << "Writer did not fail for invalid FD";
+
+ status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+ mCurrentTrack[0], 0, mBufferInfo[0].size());
+ ASSERT_NE((status_t)OK, status) << "Writer did not report error for invalid FD";
+
+ status = mCurrentTrack[0]->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop the track";
+
+ status = mWriter->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop " << writerFormat << " writer";
+}
+
+TEST_P(WriterValidityTest, MalFormedDataTest) {
+ if (mDisableTest) return;
+ // Enable test for Ogg writer
+ ASSERT_NE(mWriterName, OGG) << "TODO(b/160105646)";
+ ALOGV("Test writer for malformed inputs");
+
+ string writerFormat = get<0>(GetParam());
+ inputId inpId = get<1>(GetParam());
+ bool addSourceFailExpected = get<2>(GetParam());
+ int32_t fd =
+ open(OUTPUT_FILE_NAME, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+ ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+ int32_t status = createWriter(fd);
+ ASSERT_EQ(status, (status_t)OK)
+ << "Failed to create writer for " << writerFormat << " output format";
+
+ string inputFile = gEnv->getRes();
+ string inputInfo = gEnv->getRes();
+ configFormat param;
+ bool isAudio;
+ ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+ getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+ ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
+ // Remove CSD data from input
+ mNumCsds[0] = 0;
+ status = addWriterSource(isAudio, param);
+ if (status != OK) {
+ ASSERT_TRUE(addSourceFailExpected)
+ << "Failed to add source for " << writerFormat << " writer";
+ ALOGV("%s writer failed to addSource after removing CSD from input", writerFormat.c_str());
+ return;
+ }
+
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_EQ((status_t)OK, status) << "Could not start " << writerFormat << "writer";
+
+ // Skip first few frames. These may contain sync frames also.
+ int32_t frameID = mInputFrameId[0] + mBufferInfo[0].size() / 4;
+ status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], frameID, mCurrentTrack[0], 0,
+ mBufferInfo[0].size());
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+ status = mCurrentTrack[0]->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop the track";
+
+ Vector<String16> args;
+ status = mWriter->dump(fd, args);
+ ASSERT_EQ((status_t)OK, status) << "Failed to dump statistics from writer";
+
+ status = mWriter->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop " << writerFormat << " writer";
+ close(fd);
+}
+
+// This test is specific to MPEG4Writer to test more APIs
+TEST_P(WriteFunctionalityTest, Mpeg4WriterTest) {
+ if (mDisableTest) return;
+ if (mWriterName != standardWriters::MPEG4) return;
+ ALOGV("Test MPEG4 writer specific APIs");
+
+ inputId inpId = get<1>(GetParam());
+ int32_t fd =
+ open(OUTPUT_FILE_NAME, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+ ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+ int32_t status = createWriter(fd);
+ ASSERT_EQ(status, (status_t)OK) << "Failed to create writer for mpeg4 output format";
+
+ string inputFile = gEnv->getRes();
+ string inputInfo = gEnv->getRes();
+ configFormat param;
+ bool isAudio;
+ ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+ getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+ ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
+ status = addWriterSource(isAudio, param);
+ ASSERT_EQ((status_t)OK, status) << "Failed to add source for mpeg4 Writer";
+
+ // signal meta data for the writer
+ sp<MPEG4Writer> mp4writer = static_cast<MPEG4Writer *>(mWriter.get());
+ status = mp4writer->setInterleaveDuration(kDefaultInterleaveDuration);
+ ASSERT_EQ((status_t)OK, status) << "setInterleaveDuration failed";
+
+ status = mp4writer->setGeoData(kDefaultLatitudex10000, kDefaultLongitudex10000);
+ ASSERT_EQ((status_t)OK, status) << "setGeoData failed";
+
+ status = mp4writer->setCaptureRate(kDefaultFPS);
+ ASSERT_EQ((status_t)OK, status) << "setCaptureRate failed";
+
+ status = mWriter->start(mFileMeta.get());
+ ASSERT_EQ((status_t)OK, status) << "Could not start the writer";
+
+ status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+ mCurrentTrack[0], 0, mBufferInfo[0].size());
+ ASSERT_EQ((status_t)OK, status) << "mpeg4 writer failed";
+
+ status = mCurrentTrack[0]->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop the track";
+
+ status = mWriter->stop();
+ ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+ mp4writer.clear();
+ close(fd);
+}
+
+class ListenerTest
+ : public WriterTest,
+ public ::testing::TestWithParam<tuple<
+ string /* writerFormat*/, inputId /* inputId0*/, inputId /* inputId1*/,
+ float /* FileSizeLimit*/, float /* FileDurationLimit*/, float /* BufferInterval*/>> {
+ public:
+ virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
};
TEST_P(ListenerTest, SetMaxFileLimitsTest) {
- if (mDisableTest) return;
+ // TODO(b/151892414): Enable test for other writers
+ if (mDisableTest || mWriterName != MPEG4) return;
ALOGV("Validates writer when max file limits are set");
- tuple<string, int32_t, float, float> params = GetParam();
- string writerFormat = get<0>(params);
+ string writerFormat = get<0>(GetParam());
string outputFile = OUTPUT_FILE_NAME;
int32_t fd =
open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
@@ -479,30 +919,42 @@
int32_t status = createWriter(fd);
ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
- string inputFile = gEnv->getRes();
- string inputInfo = gEnv->getRes();
- configFormat param;
- bool isAudio;
- int32_t inputFileIdx = get<1>(params);
- getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
- ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+ inputId inpId[] = {get<1>(GetParam()), get<2>(GetParam())};
+ ASSERT_NE(inpId[0], UNUSED_ID) << "Test expects first inputId to be a valid id";
- ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
- status = addWriterSource(isAudio, param);
- ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+ size_t inputFileSize = 0;
+ int64_t lastFrameTimeStampUs = INT_MAX;
+ int32_t numTracks = 1;
+ if (inpId[1] != UNUSED_ID) {
+ numTracks++;
+ }
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ string inputFile = gEnv->getRes();
+ string inputInfo = gEnv->getRes();
+ configFormat param;
+ bool isAudio;
+ getFileDetails(inputFile, inputInfo, param, isAudio, inpId[idx]);
+ ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
- // Read file properties
- struct stat buf;
- status = stat(inputFile.c_str(), &buf);
- ASSERT_EQ(0, status);
+ ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo, idx));
+ status = addWriterSource(isAudio, param, idx);
+ ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
- float fileSizeLimit = get<2>(params);
- float fileDurationLimit = get<3>(params);
+ // Read file properties
+ struct stat buf;
+ status = stat(inputFile.c_str(), &buf);
+ ASSERT_EQ(0, status);
+
+ inputFileSize += buf.st_size;
+ if (lastFrameTimeStampUs > mBufferInfo[idx][mBufferInfo[idx].size() - 1].timeUs) {
+ lastFrameTimeStampUs = mBufferInfo[idx][mBufferInfo[idx].size() - 1].timeUs;
+ }
+ }
+
+ float fileSizeLimit = get<3>(GetParam());
+ float fileDurationLimit = get<4>(GetParam());
int64_t maxFileSize = 0;
int64_t maxFileDuration = 0;
-
- size_t inputFileSize = buf.st_size;
- int64_t lastFrameTimeStampUs = mBufferInfo[mBufferInfo.size() - 1].timeUs;
if (fileSizeLimit > 0) {
maxFileSize = (int64_t)(fileSizeLimit * inputFileSize);
mWriter->setMaxFileSize(maxFileSize);
@@ -517,14 +969,33 @@
mWriter->setListener(listener);
status = mWriter->start(mFileMeta.get());
-
ASSERT_EQ((status_t)OK, status);
- status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
- mBufferInfo.size(), false, listener);
- ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+ float interval = get<5>(GetParam());
+ ASSERT_LE(interval, 1.0f) << "Buffer interval invalid. Should be less than or equal to 1.0";
+
+ size_t range = 0;
+ int32_t loopCount = 0;
+ int32_t offset[kMaxTrackCount]{};
+ while (loopCount < ceil(1.0 / interval)) {
+ for (int32_t idx = 0; idx < numTracks; idx++) {
+ range = mBufferInfo[idx].size() * interval;
+ status = sendBuffersToWriter(mInputStream[idx], mBufferInfo[idx], mInputFrameId[idx],
+ mCurrentTrack[idx], offset[idx], range, false, listener);
+ ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+ offset[idx] += range;
+ }
+ loopCount++;
+ }
+
ASSERT_TRUE(mWriter->reachedEOS()) << "EOS not signalled.";
- mCurrentTrack->stop();
+ for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+ if (mCurrentTrack[idx]) {
+ mCurrentTrack[idx]->stop();
+ }
+ }
+
status = mWriter->stop();
ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
close(fd);
@@ -551,25 +1022,84 @@
// TODO: (b/150923387)
// Add WEBM input
-INSTANTIATE_TEST_SUITE_P(
- ListenerTestAll, ListenerTest,
- ::testing::Values(make_tuple("ogg", 0, 0.7, 0.3), make_tuple("aac", 1, 0.6, 0.7),
- make_tuple("mpeg4", 1, 0.4, 0.3), make_tuple("amrnb", 3, 0.2, 0.6),
- make_tuple("amrwb", 4, 0.5, 0.5), make_tuple("mpeg2Ts", 1, 0.2, 1)));
+INSTANTIATE_TEST_SUITE_P(ListenerTestAll, ListenerTest,
+ ::testing::Values(make_tuple("aac", AAC_1, UNUSED_ID, 0.6, 0.7, 1),
+ make_tuple("amrnb", AMR_NB_1, UNUSED_ID, 0.2, 0.6, 1),
+ make_tuple("amrwb", AMR_WB_1, UNUSED_ID, 0.5, 0.5, 1),
+ make_tuple("mpeg2Ts", AAC_1, UNUSED_ID, 0.2, 1, 1),
+ make_tuple("mpeg4", AAC_1, UNUSED_ID, 0.4, 0.3, 0.25),
+ make_tuple("mpeg4", AAC_1, UNUSED_ID, 0.3, 1, 0.5),
+ make_tuple("ogg", OPUS_1, UNUSED_ID, 0.7, 0.3, 1)));
// TODO: (b/144476164)
// Add AAC_ADTS, FLAC, AV1 input
-INSTANTIATE_TEST_SUITE_P(WriterTestAll, WriteFunctionalityTest,
- ::testing::Values(make_pair("ogg", 0), make_pair("webm", 0),
- make_pair("aac", 1), make_pair("mpeg4", 1),
- make_pair("amrnb", 3), make_pair("amrwb", 4),
- make_pair("webm", 5), make_pair("webm", 7),
- make_pair("webm", 8), make_pair("mpeg4", 9),
- make_pair("mpeg4", 10), make_pair("mpeg4", 12),
- make_pair("mpeg4", 13), make_pair("mpeg2Ts", 1),
- make_pair("mpeg2Ts", 9)));
+INSTANTIATE_TEST_SUITE_P(
+ WriterTestAll, WriteFunctionalityTest,
+ ::testing::Values(
+ make_tuple("aac", AAC_1, UNUSED_ID, 1),
+
+ make_tuple("amrnb", AMR_NB_1, UNUSED_ID, 1),
+ make_tuple("amrwb", AMR_WB_1, UNUSED_ID, 1),
+
+ // TODO(b/144902018): Enable test for mpeg2ts
+ // make_tuple("mpeg2Ts", AAC_1, UNUSED_ID, 1),
+ // make_tuple("mpeg2Ts", AVC_1, UNUSED_ID, 1),
+ // TODO(b/156355857): Add multitrack for mpeg2ts
+ // make_tuple("mpeg2Ts", AAC_1, AVC_1, 0.50),
+ // make_tuple("mpeg2Ts", AVC_1, AAC_1, 0.25),
+
+ make_tuple("mpeg4", AAC_1, UNUSED_ID, 1),
+ make_tuple("mpeg4", AMR_NB_1, UNUSED_ID, 1),
+ make_tuple("mpeg4", AMR_WB_1, UNUSED_ID, 1),
+ make_tuple("mpeg4", AVC_1, UNUSED_ID, 1),
+ make_tuple("mpeg4", H263_1, UNUSED_ID, 1),
+ make_tuple("mpeg4", HEIC_1, UNUSED_ID, 1),
+ make_tuple("mpeg4", HEVC_1, UNUSED_ID, 1),
+ make_tuple("mpeg4", MPEG4_1, UNUSED_ID, 1),
+ make_tuple("mpeg4", AAC_1, AVC_1, 0.25),
+ make_tuple("mpeg4", AVC_1, AAC_1, 0.75),
+ make_tuple("mpeg4", AMR_WB_1, AAC_1, 0.75),
+ make_tuple("mpeg4", HEVC_1, AMR_WB_1, 0.25),
+ make_tuple("mpeg4", H263_1, AMR_NB_1, 0.50),
+ make_tuple("mpeg4", MPEG4_1, AAC_1, 0.75),
+ make_tuple("mpeg4", AMR_NB_1, AMR_WB_1, 0.25),
+ make_tuple("mpeg4", H263_1, AMR_NB_1, 0.50),
+ make_tuple("mpeg4", MPEG4_1, HEVC_1, 0.75),
+
+ make_tuple("ogg", OPUS_1, UNUSED_ID, 1),
+
+ make_tuple("webm", OPUS_1, UNUSED_ID, 1),
+ make_tuple("webm", VORBIS_1, UNUSED_ID, 1),
+ make_tuple("webm", VP8_1, UNUSED_ID, 1),
+ make_tuple("webm", VP9_1, UNUSED_ID, 1),
+ make_tuple("webm", VP8_1, OPUS_1, 0.50),
+ make_tuple("webm", VORBIS_1, VP8_1, 0.25)));
+
+INSTANTIATE_TEST_SUITE_P(
+ WriterValidityTest, WriterValidityTest,
+ ::testing::Values(
+ make_tuple("aac", AAC_1, true),
+
+ make_tuple("amrnb", AMR_NB_1, true),
+ make_tuple("amrwb", AMR_WB_1, true),
+
+ make_tuple("mpeg4", AAC_1, false),
+ make_tuple("mpeg4", AMR_NB_1, false),
+ make_tuple("mpeg4", AVC_1, false),
+ make_tuple("mpeg4", H263_1, false),
+ make_tuple("mpeg4", HEIC_1, false),
+ make_tuple("mpeg4", HEVC_1, false),
+ make_tuple("mpeg4", MPEG4_1, false),
+
+ make_tuple("ogg", OPUS_1, true),
+
+ make_tuple("webm", OPUS_1, false),
+ make_tuple("webm", VORBIS_1, true),
+ make_tuple("webm", VP8_1, false),
+ make_tuple("webm", VP9_1, false)));
int main(int argc, char **argv) {
+ ProcessState::self()->startThreadPool();
gEnv = new WriterTestEnvironment();
::testing::AddGlobalTestEnvironment(gEnv);
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/libstagefright/tests/writer/WriterUtility.h b/media/libstagefright/tests/writer/WriterUtility.h
index 5e19973..6b456fb 100644
--- a/media/libstagefright/tests/writer/WriterUtility.h
+++ b/media/libstagefright/tests/writer/WriterUtility.h
@@ -31,8 +31,15 @@
#define CODEC_CONFIG_FLAG 32
+constexpr uint32_t kMaxTrackCount = 2;
constexpr uint32_t kMaxCSDStrlen = 16;
constexpr uint32_t kMaxCount = 20;
+constexpr int32_t kMimeSize = 128;
+constexpr int32_t kDefaultInterleaveDuration = 0;
+// Geodata is set according to ISO-6709 standard.
+constexpr int32_t kDefaultLatitudex10000 = 500000;
+constexpr int32_t kDefaultLongitudex10000 = 1000000;
+constexpr float kDefaultFPS = 30.0f;
struct BufferInfo {
int32_t size;
@@ -40,6 +47,14 @@
int64_t timeUs;
};
+struct configFormat {
+ char mime[kMimeSize];
+ int32_t width;
+ int32_t height;
+ int32_t sampleRate;
+ int32_t channelCount;
+};
+
int32_t sendBuffersToWriter(ifstream &inputStream, vector<BufferInfo> &bufferInfo,
int32_t &inputFrameId, sp<MediaAdapter> ¤tTrack, int32_t offset,
int32_t range, bool isPaused = false,
diff --git a/media/libstagefright/timedtext/TEST_MAPPING b/media/libstagefright/timedtext/TEST_MAPPING
index 185f824..35a5b11 100644
--- a/media/libstagefright/timedtext/TEST_MAPPING
+++ b/media/libstagefright/timedtext/TEST_MAPPING
@@ -1,7 +1,9 @@
// mappings for frameworks/av/media/libstagefright/timedtext
{
- "presubmit": [
- // TODO(b/148094059): unit tests not allowed to download content
- //{ "name": "TimedTextUnitTest" }
+ // tests which require dynamic content
+ // invoke with: atest -- --enable-module-dynamic-download=true
+ // TODO(b/148094059): unit tests not allowed to download content
+ "dynamic-presubmit": [
+ { "name": "TimedTextUnitTest" }
]
}
diff --git a/media/libstagefright/timedtext/test/Android.bp b/media/libstagefright/timedtext/test/Android.bp
index 36f8891..11e5077 100644
--- a/media/libstagefright/timedtext/test/Android.bp
+++ b/media/libstagefright/timedtext/test/Android.bp
@@ -16,6 +16,7 @@
cc_test {
name: "TimedTextUnitTest",
+ test_suites: ["device-tests"],
gtest: true,
srcs: [
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index 3be5e74..dbdb43c 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -493,7 +493,7 @@
mPath(path),
mStatus(NO_INIT) {
// determine href_base
- std::string::size_type end = path.rfind("/");
+ std::string::size_type end = path.rfind('/');
if (end != std::string::npos) {
mHrefBase = path.substr(0, end + 1);
}
diff --git a/media/libstagefright/xmlparser/vts/Android.mk b/media/libstagefright/xmlparser/vts/Android.mk
deleted file mode 100644
index d5290ba..0000000
--- a/media/libstagefright/xmlparser/vts/Android.mk
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := VtsValidateMediaCodecs
-include test/vts/tools/build/Android.host_config.mk
diff --git a/media/libstagefright/xmlparser/vts/AndroidTest.xml b/media/libstagefright/xmlparser/vts/AndroidTest.xml
deleted file mode 100644
index 97ee107..0000000
--- a/media/libstagefright/xmlparser/vts/AndroidTest.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2019 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<configuration description="Config for VTS VtsValidateMediaCodecs.">
- <option name="config-descriptor:metadata" key="plan" value="vts-treble" />
- <target_preparer class="com.android.compatibility.common.tradefed.targetprep.VtsFilePusher">
- <option name="abort-on-push-failure" value="false"/>
- <option name="push-group" value="HostDrivenTest.push"/>
- <option name="push" value="DATA/etc/media_codecs.xsd->/data/local/tmp/media_codecs.xsd"/>
- </target_preparer>
- <test class="com.android.tradefed.testtype.VtsMultiDeviceTest">
- <option name="test-module-name" value="VtsValidateMediaCodecs"/>
- <option name="binary-test-source" value="_32bit::DATA/nativetest/vts_mediaCodecs_validate_test/vts_mediaCodecs_validate_test" />
- <option name="binary-test-source" value="_64bit::DATA/nativetest64/vts_mediaCodecs_validate_test/vts_mediaCodecs_validate_test" />
- <option name="binary-test-type" value="gtest"/>
- <option name="test-timeout" value="30s"/>
- </test>
-</configuration>
diff --git a/media/libwatchdog/Android.bp b/media/libwatchdog/Android.bp
index 2aefa7d..f7f0db7 100644
--- a/media/libwatchdog/Android.bp
+++ b/media/libwatchdog/Android.bp
@@ -14,6 +14,7 @@
cc_library {
name: "libwatchdog",
+ host_supported: true,
srcs: [
"Watchdog.cpp",
],
@@ -29,6 +30,12 @@
darwin: {
enabled: false,
},
+ linux_glibc: {
+ cflags: [
+ "-Dsigev_notify_thread_id=_sigev_un._tid",
+ ],
+ },
},
apex_available: ["com.android.media"],
+ min_sdk_version: "29",
}
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index a968890..ee7285d 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -15,13 +15,15 @@
srcs: ["main_mediaserver.cpp"],
shared_libs: [
- "libresourcemanagerservice",
+ "android.hardware.media.omx@1.0",
+ "libandroidicu",
+ "libfmq",
+ "libbinder",
+ "libhidlbase",
"liblog",
"libmediaplayerservice",
+ "libresourcemanagerservice",
"libutils",
- "libbinder",
- "libandroidicu",
- "android.hardware.media.omx@1.0",
],
static_libs: [
@@ -33,8 +35,10 @@
"frameworks/av/services/mediaresourcemanager",
],
- // back to 32-bit, b/126502613
- compile_multilib: "32",
+ // mediaserver has only been verified on 32-bit, see b/126502613
+ // use "prefer32" to *only* enable 64-bit builds on 64-bit-only lunch
+ // targets, which allows them to reach 'boot_complete'.
+ compile_multilib: "prefer32",
init_rc: ["mediaserver.rc"],
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index 7b22b05..58e2d2a 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -18,10 +18,10 @@
#define LOG_TAG "mediaserver"
//#define LOG_NDEBUG 0
-#include <aicu/AIcu.h>
#include <binder/IPCThreadState.h>
#include <binder/ProcessState.h>
#include <binder/IServiceManager.h>
+#include <hidl/HidlTransportSupport.h>
#include <utils/Log.h>
#include "RegisterExtensions.h"
@@ -38,10 +38,11 @@
sp<ProcessState> proc(ProcessState::self());
sp<IServiceManager> sm(defaultServiceManager());
ALOGI("ServiceManager: %p", sm.get());
- AIcu_initializeIcuOrDie();
MediaPlayerService::instantiate();
ResourceManagerService::instantiate();
registerExtensions();
+ ::android::hardware::configureRpcThreadpool(16, false);
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
+ ::android::hardware::joinRpcThreadpool();
}
diff --git a/media/mtp/Android.bp b/media/mtp/Android.bp
index 66a3139..e572249 100644
--- a/media/mtp/Android.bp
+++ b/media/mtp/Android.bp
@@ -52,5 +52,6 @@
"liblog",
"libusbhost",
],
+ header_libs: ["libcutils_headers"],
}
diff --git a/media/mtp/tests/Android.bp b/media/mtp/tests/Android.bp
deleted file mode 100644
index 0750208..0000000
--- a/media/mtp/tests/Android.bp
+++ /dev/null
@@ -1,47 +0,0 @@
-//
-// Copyright (C) 2017 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-cc_test {
- name: "mtp_ffs_handle_test",
- test_suites: ["device-tests"],
- srcs: ["MtpFfsHandle_test.cpp"],
- shared_libs: [
- "libbase",
- "libmtp",
- "liblog",
- ],
- cflags: [
- "-Wall",
- "-Wextra",
- "-Werror",
- ],
-}
-
-cc_test {
- name: "posix_async_io_test",
- test_suites: ["device-tests"],
- srcs: ["PosixAsyncIO_test.cpp"],
- shared_libs: [
- "libbase",
- "libmtp",
- "liblog",
- ],
- cflags: [
- "-Wall",
- "-Wextra",
- "-Werror",
- ],
-}
diff --git a/media/mtp/tests/AndroidTest.xml b/media/mtp/tests/AndroidTest.xml
deleted file mode 100644
index c1f4753..0000000
--- a/media/mtp/tests/AndroidTest.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2017 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<configuration description="Config for mtp_ffs_handle_test">
- <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
- <option name="cleanup" value="true" />
- <option name="push" value="mtp_ffs_handle_test->/data/local/tmp/mtp_ffs_handle_test" />
- </target_preparer>
- <option name="test-suite-tag" value="apct" />
- <test class="com.android.tradefed.testtype.GTest" >
- <option name="native-test-device-path" value="/data/local/tmp" />
- <option name="module-name" value="mtp_ffs_handle_test" />
- </test>
-</configuration>
\ No newline at end of file
diff --git a/media/mtp/tests/MtpFfsHandleTest/Android.bp b/media/mtp/tests/MtpFfsHandleTest/Android.bp
new file mode 100644
index 0000000..e393067
--- /dev/null
+++ b/media/mtp/tests/MtpFfsHandleTest/Android.bp
@@ -0,0 +1,32 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+cc_test {
+ name: "mtp_ffs_handle_test",
+ test_suites: ["device-tests"],
+ srcs: ["MtpFfsHandle_test.cpp"],
+ shared_libs: [
+ "libbase",
+ "libmtp",
+ "liblog",
+ ],
+ cflags: [
+ "-Wall",
+ "-Wextra",
+ "-Werror",
+ ],
+}
+
diff --git a/media/mtp/tests/MtpFfsHandleTest/AndroidTest.xml b/media/mtp/tests/MtpFfsHandleTest/AndroidTest.xml
new file mode 100644
index 0000000..38bab27
--- /dev/null
+++ b/media/mtp/tests/MtpFfsHandleTest/AndroidTest.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Config for mtp_ffs_handle_test">
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="true" />
+ <option name="push" value="mtp_ffs_handle_test->/data/local/tmp/mtp_ffs_handle_test" />
+ </target_preparer>
+ <option name="test-suite-tag" value="apct" />
+ <test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="module-name" value="mtp_ffs_handle_test" />
+ </test>
+</configuration>
\ No newline at end of file
diff --git a/media/mtp/tests/MtpFfsHandle_test.cpp b/media/mtp/tests/MtpFfsHandleTest/MtpFfsHandle_test.cpp
similarity index 100%
rename from media/mtp/tests/MtpFfsHandle_test.cpp
rename to media/mtp/tests/MtpFfsHandleTest/MtpFfsHandle_test.cpp
diff --git a/media/mtp/tests/MtpFuzzer/Android.bp b/media/mtp/tests/MtpFuzzer/Android.bp
new file mode 100644
index 0000000..9cd4669
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/Android.bp
@@ -0,0 +1,31 @@
+cc_fuzz {
+ name: "mtp_fuzzer",
+ srcs: [
+ "mtp_fuzzer.cpp",
+ "MtpMockDatabase.cpp",
+ ],
+ shared_libs: [
+ "libmtp",
+ "libbase",
+ "liblog",
+ "libutils",
+ ],
+ cflags: [
+ "-Wall",
+ "-Wextra",
+ "-Werror",
+ "-DMTP_DEVICE",
+ "-Wno-unused-parameter",
+ ],
+ dictionary: "mtp_fuzzer.dict",
+ corpus: ["corpus/*"],
+
+ fuzz_config: {
+
+ cc: ["jameswei@google.com"],
+ componentid: 1344,
+ acknowledgement: [
+ "Grant Hernandez of Google",
+ ],
+ },
+}
diff --git a/media/mtp/tests/MtpFuzzer/MtpMockDatabase.cpp b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.cpp
new file mode 100644
index 0000000..5d95aa2
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.cpp
@@ -0,0 +1,315 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <sys/stat.h>
+
+#include <string>
+
+#define LOG_TAG "MtpFuzzer"
+
+#include <log/log.h>
+
+#include "MtpDebug.h"
+#include "MtpMockDatabase.h"
+#include "MtpObjectInfo.h"
+
+namespace android {
+
+MtpMockDatabase::MtpMockDatabase() : mLastObjectHandle(0) {}
+
+MtpMockDatabase::~MtpMockDatabase() {
+ for (MtpObjectInfo* i : mObjects) {
+ delete i;
+ }
+ mObjects.clear();
+}
+
+void MtpMockDatabase::addObject(MtpObjectInfo* info) {
+ assert(hasStorage(info->storageID));
+
+ // we take ownership
+ mObjects.push_back(info);
+
+ return;
+}
+
+MtpObjectHandle MtpMockDatabase::allocateObjectHandle() {
+ // this is in sync with our mObjects database
+ return mLastObjectHandle++;
+}
+
+// Called from SendObjectInfo to reserve a database entry for the incoming file.
+MtpObjectHandle MtpMockDatabase::beginSendObject(const char* path, MtpObjectFormat format,
+ MtpObjectHandle parent, MtpStorageID storage) {
+ if (!hasStorage(storage)) {
+ ALOGW("%s: Tried to lookup storageID %u, but doesn't exist\n", __func__, storage);
+ return kInvalidObjectHandle;
+ }
+
+ ALOGD("MockDatabase %s: path=%s oformat=0x%04x parent_handle=%u "
+ "storage_id=%u\n",
+ __func__, path, format, parent, storage);
+
+ return mLastObjectHandle;
+}
+
+// Called to report success or failure of the SendObject file transfer.
+void MtpMockDatabase::endSendObject(MtpObjectHandle handle, bool succeeded) {
+ ALOGD("MockDatabase %s: ohandle=%u succeeded=%d\n", __func__, handle, succeeded);
+}
+
+// Called to rescan a file, such as after an edit.
+void MtpMockDatabase::rescanFile(const char* path, MtpObjectHandle handle, MtpObjectFormat format) {
+ ALOGD("MockDatabase %s: path=%s ohandle=%u, oformat=0x%04x\n", __func__, path, handle, format);
+}
+
+MtpObjectHandleList* MtpMockDatabase::getObjectList(MtpStorageID storageID, MtpObjectFormat format,
+ MtpObjectHandle parent) {
+ ALOGD("MockDatabase %s: storage_id=%u oformat=0x%04x ohandle=%u\n", __func__, storageID, format,
+ parent);
+ return nullptr;
+}
+
+int MtpMockDatabase::getNumObjects(MtpStorageID storageID, MtpObjectFormat format,
+ MtpObjectHandle parent) {
+ ALOGD("MockDatabase %s: storage_id=%u oformat=0x%04x ohandle=%u\n", __func__, storageID, format,
+ parent);
+ // TODO: return MTP_RESPONSE_OK when it stops segfaulting
+ return 0;
+}
+
+// callee should delete[] the results from these
+// results can be NULL
+MtpObjectFormatList* MtpMockDatabase::getSupportedPlaybackFormats() {
+ ALOGD("MockDatabase %s\n", __func__);
+ return nullptr;
+}
+MtpObjectFormatList* MtpMockDatabase::getSupportedCaptureFormats() {
+ ALOGD("MockDatabase %s\n", __func__);
+ return nullptr;
+}
+MtpObjectPropertyList* MtpMockDatabase::getSupportedObjectProperties(MtpObjectFormat format) {
+ ALOGD("MockDatabase %s: oformat=0x%04x\n", __func__, format);
+ return nullptr;
+}
+MtpDevicePropertyList* MtpMockDatabase::getSupportedDeviceProperties() {
+ ALOGD("MockDatabase %s\n", __func__);
+ return nullptr;
+}
+
+MtpResponseCode MtpMockDatabase::getObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet) {
+ ALOGD("MockDatabase %s: ohandle=%u property=%s\n", __func__, handle,
+ MtpDebug::getObjectPropCodeName(property));
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::setObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet) {
+ ALOGD("MockDatabase %s: ohandle=%u property=%s\n", __func__, handle,
+ MtpDebug::getObjectPropCodeName(property));
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::getDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet) {
+ ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::setDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet) {
+ ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::resetDeviceProperty(MtpDeviceProperty property) {
+ ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::getObjectPropertyList(MtpObjectHandle handle, uint32_t format,
+ uint32_t property, int groupCode, int depth,
+ MtpDataPacket& packet) {
+ ALOGD("MockDatabase %s: ohandle=%u format=%s property=%s groupCode=%d "
+ "depth=%d\n",
+ __func__, handle, MtpDebug::getFormatCodeName(format),
+ MtpDebug::getObjectPropCodeName(property), groupCode, depth);
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::getObjectInfo(MtpObjectHandle handle, MtpObjectInfo& info) {
+ ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+
+ // used for the root
+ if (handle == kInvalidObjectHandle) {
+ return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ } else {
+ if (mObjects.size() == 0) {
+ return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+
+ // this is used to let the fuzzer make progress, otherwise
+ // it has to brute-force a 32-bit handle
+ MtpObjectHandle reducedHandle = handle % mObjects.size();
+ MtpObjectInfo* obj = mObjects[reducedHandle];
+
+ // make a copy, but make sure to maintain ownership of string pointers
+ info = *obj;
+
+ // fixup the response handle
+ info.mHandle = handle;
+
+ if (obj->mName) info.mName = strdup(obj->mName);
+ if (obj->mKeywords) info.mKeywords = strdup(obj->mKeywords);
+
+ return MTP_RESPONSE_OK;
+ }
+}
+
+void* MtpMockDatabase::getThumbnail(MtpObjectHandle handle, size_t& outThumbSize) {
+ ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+
+ size_t allocSize = handle % 0x1000;
+ void* data = calloc(allocSize, sizeof(uint8_t));
+ if (!data) {
+ return nullptr;
+ } else {
+ ALOGD("MockDatabase %s\n", __func__);
+ outThumbSize = allocSize;
+ return data;
+ }
+}
+
+MtpResponseCode MtpMockDatabase::getObjectFilePath(MtpObjectHandle handle,
+ MtpStringBuffer& outFilePath,
+ int64_t& outFileLength,
+ MtpObjectFormat& outFormat) {
+ ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+
+ if (mObjects.size() == 0) {
+ return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+
+ // this is used to let the fuzzer make progress, otherwise
+ // it has to brute-force a 32-bit handle
+ MtpObjectHandle reducedHandle = handle % mObjects.size();
+ MtpObjectInfo* obj = mObjects[reducedHandle];
+ MtpStorage* storage = mStorage[obj->mStorageID];
+
+ // walk up the tree to build a full path of the object
+ MtpObjectHandle currentHandle = reducedHandle;
+ std::string path = "";
+
+ while (currentHandle != MTP_PARENT_ROOT) {
+ MtpObjectInfo* next = mObjects[currentHandle];
+
+ // prepend the name
+ if (path == "")
+ path = std::string(next->mName);
+ else
+ path = std::string(next->mName) + "/" + path;
+
+ currentHandle = next->mParent;
+ }
+
+ outFilePath.set(storage->getPath());
+ outFilePath.append("/");
+ outFilePath.append(path.c_str());
+
+ outFormat = obj->mFormat;
+
+ ALOGD("MockDatabase %s: get file %s\n", __func__, (const char*)outFilePath);
+
+ struct stat sstat;
+ // this should not happen unless our database view of the filesystem is out of
+ // sync
+ if (stat((const char*)outFilePath, &sstat) < 0) {
+ ALOGE("MockDatabase %s: unable to stat %s\n", __func__, (const char*)outFilePath);
+
+ return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+
+ outFileLength = sstat.st_size;
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::beginDeleteObject(MtpObjectHandle handle) {
+ ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+ return MTP_RESPONSE_OK;
+}
+void MtpMockDatabase::endDeleteObject(MtpObjectHandle handle, bool succeeded) {
+ ALOGD("MockDatabase %s: ohandle=%u succeeded=%d\n", __func__, handle, succeeded);
+ return;
+}
+
+MtpObjectHandleList* MtpMockDatabase::getObjectReferences(MtpObjectHandle handle) {
+ ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+ return nullptr;
+}
+
+MtpResponseCode MtpMockDatabase::setObjectReferences(MtpObjectHandle handle,
+ MtpObjectHandleList* references) {
+ ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+ return MTP_RESPONSE_OK;
+}
+
+MtpProperty* MtpMockDatabase::getObjectPropertyDesc(MtpObjectProperty property,
+ MtpObjectFormat format) {
+ ALOGD("MockDatabase %s: property=%s format=%s\n", __func__,
+ MtpDebug::getObjectPropCodeName(property), MtpDebug::getFormatCodeName(format));
+
+ return nullptr;
+}
+
+MtpProperty* MtpMockDatabase::getDevicePropertyDesc(MtpDeviceProperty property) {
+ ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+ return nullptr;
+}
+
+MtpResponseCode MtpMockDatabase::beginMoveObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+ MtpStorageID newStorage) {
+ ALOGD("MockDatabase %s: ohandle=%u newParent=%u newStorage=%u\n", __func__, handle, newParent,
+ newStorage);
+ return MTP_RESPONSE_OK;
+}
+
+void MtpMockDatabase::endMoveObject(MtpObjectHandle oldParent, MtpObjectHandle newParent,
+ MtpStorageID oldStorage, MtpStorageID newStorage,
+ MtpObjectHandle handle, bool succeeded) {
+ ALOGD("MockDatabase %s: oldParent=%u newParent=%u oldStorage=%u newStorage=%u "
+ "ohandle=%u succeeded=%d\n",
+ __func__, oldParent, newParent, oldStorage, newStorage, handle, succeeded);
+ return;
+}
+
+MtpResponseCode MtpMockDatabase::beginCopyObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+ MtpStorageID newStorage) {
+ ALOGD("MockDatabase %s: ohandle=%u newParent=%u newStorage=%u\n", __func__, handle, newParent,
+ newStorage);
+ return MTP_RESPONSE_OK;
+}
+
+void MtpMockDatabase::endCopyObject(MtpObjectHandle handle, bool succeeded) {
+ ALOGD("MockDatabase %s: ohandle=%u succeeded=%d\n", __func__, handle, succeeded);
+}
+
+}; // namespace android
diff --git a/media/mtp/tests/MtpFuzzer/MtpMockDatabase.h b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.h
new file mode 100644
index 0000000..876719e
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef _MTP_MOCK_DATABASE_H
+#define _MTP_MOCK_DATABASE_H
+
+#include <map>
+
+#include "IMtpDatabase.h"
+#include "MtpStorage.h"
+
+namespace android {
+
+class MtpMockDatabase : public IMtpDatabase {
+ std::map<MtpStorageID, MtpStorage*> mStorage;
+ std::vector<MtpObjectInfo*> mObjects;
+ uint32_t mLastObjectHandle;
+
+public:
+ MtpMockDatabase();
+ virtual ~MtpMockDatabase();
+
+ // MtpFuzzer methods
+ void addStorage(MtpStorage* storage) {
+ // we don't own this
+ mStorage[storage->getStorageID()] = storage;
+ }
+
+ bool hasStorage(MtpStorageID storage) { return mStorage.find(storage) != mStorage.end(); }
+
+ void addObject(MtpObjectInfo* info);
+ MtpObjectHandle allocateObjectHandle();
+
+ // libmtp interface methods
+ // Called from SendObjectInfo to reserve a database entry for the incoming
+ // file.
+ MtpObjectHandle beginSendObject(const char* path, MtpObjectFormat format,
+ MtpObjectHandle parent, MtpStorageID storage);
+
+ // Called to report success or failure of the SendObject file transfer.
+ void endSendObject(MtpObjectHandle handle, bool succeeded);
+
+ // Called to rescan a file, such as after an edit.
+ void rescanFile(const char* path, MtpObjectHandle handle, MtpObjectFormat format);
+
+ MtpObjectHandleList* getObjectList(MtpStorageID storageID, MtpObjectFormat format,
+ MtpObjectHandle parent);
+
+ int getNumObjects(MtpStorageID storageID, MtpObjectFormat format, MtpObjectHandle parent);
+
+ // callee should delete[] the results from these
+ // results can be NULL
+ MtpObjectFormatList* getSupportedPlaybackFormats();
+ MtpObjectFormatList* getSupportedCaptureFormats();
+ MtpObjectPropertyList* getSupportedObjectProperties(MtpObjectFormat format);
+ MtpDevicePropertyList* getSupportedDeviceProperties();
+
+ MtpResponseCode getObjectPropertyValue(MtpObjectHandle handle, MtpObjectProperty property,
+ MtpDataPacket& packet);
+
+ MtpResponseCode setObjectPropertyValue(MtpObjectHandle handle, MtpObjectProperty property,
+ MtpDataPacket& packet);
+
+ MtpResponseCode getDevicePropertyValue(MtpDeviceProperty property, MtpDataPacket& packet);
+
+ MtpResponseCode setDevicePropertyValue(MtpDeviceProperty property, MtpDataPacket& packet);
+
+ MtpResponseCode resetDeviceProperty(MtpDeviceProperty property);
+
+ MtpResponseCode getObjectPropertyList(MtpObjectHandle handle, uint32_t format,
+ uint32_t property, int groupCode, int depth,
+ MtpDataPacket& packet);
+
+ MtpResponseCode getObjectInfo(MtpObjectHandle handle, MtpObjectInfo& info);
+
+ void* getThumbnail(MtpObjectHandle handle, size_t& outThumbSize);
+
+ MtpResponseCode getObjectFilePath(MtpObjectHandle handle, MtpStringBuffer& outFilePath,
+ int64_t& outFileLength, MtpObjectFormat& outFormat);
+
+ MtpResponseCode beginDeleteObject(MtpObjectHandle handle);
+ void endDeleteObject(MtpObjectHandle handle, bool succeeded);
+
+ MtpObjectHandleList* getObjectReferences(MtpObjectHandle handle);
+
+ MtpResponseCode setObjectReferences(MtpObjectHandle handle, MtpObjectHandleList* references);
+
+ MtpProperty* getObjectPropertyDesc(MtpObjectProperty property, MtpObjectFormat format);
+
+ MtpProperty* getDevicePropertyDesc(MtpDeviceProperty property);
+
+ MtpResponseCode beginMoveObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+ MtpStorageID newStorage);
+
+ void endMoveObject(MtpObjectHandle oldParent, MtpObjectHandle newParent,
+ MtpStorageID oldStorage, MtpStorageID newStorage, MtpObjectHandle handle,
+ bool succeeded);
+
+ MtpResponseCode beginCopyObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+ MtpStorageID newStorage);
+ void endCopyObject(MtpObjectHandle handle, bool succeeded);
+};
+
+}; // namespace android
+
+#endif // _MTP_MOCK_DATABASE_H
diff --git a/media/mtp/tests/MtpFuzzer/MtpMockHandle.h b/media/mtp/tests/MtpFuzzer/MtpMockHandle.h
new file mode 100644
index 0000000..111485c
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/MtpMockHandle.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_MOCK_HANDLE_H
+#define _MTP_MOCK_HANDLE_H
+
+#include <vector>
+
+typedef std::vector<uint8_t> packet_t;
+
+namespace android {
+class MtpMockHandle : public IMtpHandle {
+private:
+ size_t mPacketNumber;
+ size_t mPacketOffset;
+ std::vector<packet_t> mPackets;
+
+public:
+ MtpMockHandle() : mPacketNumber(0), mPacketOffset(0) {}
+
+ void add_packet(packet_t pkt) { mPackets.push_back(pkt); }
+
+ // Return number of bytes read/written, or -1 and errno is set
+ int read(void *data, size_t len) {
+ if (mPacketNumber >= mPackets.size()) {
+ return 0;
+ } else {
+ int readAmt = 0;
+ packet_t pkt = mPackets[mPacketNumber];
+
+ ALOGD("%s: sz %zu, pkt %zu+%zu/%zu\n", __func__, len, mPacketNumber, mPacketOffset,
+ pkt.size());
+
+ // packet is bigger than what the caller can handle,
+ if (pkt.size() > len) {
+ memcpy(data, pkt.data() + mPacketOffset, len);
+
+ mPacketOffset += len;
+ readAmt = len;
+ // packet is equal or smaller than the caller buffer
+ } else {
+ memcpy(data, pkt.data() + mPacketOffset, pkt.size());
+
+ mPacketNumber++;
+ mPacketOffset = 0;
+ readAmt = pkt.size();
+ }
+
+ return readAmt;
+ }
+ }
+ int write(const void *data, size_t len) {
+ ALOGD("MockHandle %s: len=%zu\n", __func__, len);
+ // fake the write
+ return len;
+ }
+
+ // Return 0 if send/receive is successful, or -1 and errno is set
+ int receiveFile(mtp_file_range mfr, bool zero_packet) {
+ ALOGD("MockHandle %s\n", __func__);
+ return 0;
+ }
+ int sendFile(mtp_file_range mfr) {
+ ALOGD("MockHandle %s\n", __func__);
+ return 0;
+ }
+ int sendEvent(mtp_event me) {
+ ALOGD("MockHandle %s: len=%zu\n", __func__, me.length);
+ return 0;
+ }
+
+ // Return 0 if operation is successful, or -1 else
+ int start(bool ptp) { return 0; }
+
+ void close() {}
+
+ virtual ~MtpMockHandle() {}
+};
+}; // namespace android
+
+#endif // _MTP_MOCK_HANDLE_H
diff --git a/media/mtp/tests/MtpFuzzer/corpus/1-mtp-open_session.pkt b/media/mtp/tests/MtpFuzzer/corpus/1-mtp-open_session.pkt
new file mode 100644
index 0000000..38f8ed2
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/1-mtp-open_session.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/2-mtp-get_device_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/2-mtp-get_device_info.pkt
new file mode 100644
index 0000000..7759380
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/2-mtp-get_device_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/3-mtp-get_object_handles.pkt b/media/mtp/tests/MtpFuzzer/corpus/3-mtp-get_object_handles.pkt
new file mode 100644
index 0000000..e88410f
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/3-mtp-get_object_handles.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/4-mtp-get_object_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/4-mtp-get_object_info.pkt
new file mode 100644
index 0000000..e283fb4
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/4-mtp-get_object_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/5-mtp-send_object_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/5-mtp-send_object_info.pkt
new file mode 100644
index 0000000..7627f88
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/5-mtp-send_object_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
new file mode 100644
index 0000000..f578462
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/unique_fd.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#include <string>
+
+#define LOG_TAG "MtpFuzzer"
+
+#include "IMtpHandle.h"
+#include "MtpMockDatabase.h"
+#include "MtpMockHandle.h"
+#include "MtpObjectInfo.h"
+#include "MtpServer.h"
+#include "MtpStorage.h"
+#include "MtpUtils.h"
+
+const char* storage_desc = "Fuzz Storage";
+// prefer tmpfs for file operations to avoid wearing out flash
+const char* storage_path = "/storage/fuzzer/0";
+const char* source_database = "srcdb/";
+
+namespace android {
+class MtpMockServer {
+public:
+ std::unique_ptr<MtpMockHandle> mHandle;
+ std::unique_ptr<MtpStorage> mStorage;
+ std::unique_ptr<MtpMockDatabase> mDatabase;
+ std::unique_ptr<MtpServer> mMtp;
+ int mStorageId;
+
+ MtpMockServer(const char* storage_path) : mStorageId(0) {
+ bool ptp = false;
+ const char* manu = "Google";
+ const char* model = "Pixel 3XL";
+ const char* version = "1.0";
+ const char* serial = "ABDEF1231";
+
+ // This is unused in our harness
+ int controlFd = -1;
+
+ mHandle = std::make_unique<MtpMockHandle>();
+ mStorage = std::make_unique<MtpStorage>(mStorageId, storage_path, storage_desc, true,
+ 0x200000000L);
+ mDatabase = std::make_unique<MtpMockDatabase>();
+ mDatabase->addStorage(mStorage.get());
+
+ mMtp = std::make_unique<MtpServer>(mDatabase.get(), controlFd, ptp, manu, model, version,
+ serial);
+ mMtp->addStorage(mStorage.get());
+
+ // clear the old handle first, so we don't leak memory
+ delete mMtp->mHandle;
+ mMtp->mHandle = mHandle.get();
+ }
+
+ void run() { mMtp->run(); }
+
+ int createDatabaseFromSourceDir(const char* fromPath, const char* toPath,
+ MtpObjectHandle parentHandle) {
+ int ret = 0;
+ std::string fromPathStr(fromPath);
+ std::string toPathStr(toPath);
+
+ DIR* dir = opendir(fromPath);
+ if (!dir) {
+ ALOGE("opendir %s failed", fromPath);
+ return -1;
+ }
+ if (fromPathStr[fromPathStr.size() - 1] != '/') fromPathStr += '/';
+ if (toPathStr[toPathStr.size() - 1] != '/') toPathStr += '/';
+
+ struct dirent* entry;
+ while ((entry = readdir(dir))) {
+ const char* name = entry->d_name;
+
+ // ignore "." and ".."
+ if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' && name[2] == 0))) {
+ continue;
+ }
+
+ std::string oldFile = fromPathStr + name;
+ std::string newFile = toPathStr + name;
+
+ if (entry->d_type == DT_DIR) {
+ ret += makeFolder(newFile.c_str());
+
+ MtpObjectInfo* objectInfo = new MtpObjectInfo(mDatabase->allocateObjectHandle());
+ objectInfo->mStorageID = mStorage->getStorageID();
+ objectInfo->mParent = parentHandle;
+ objectInfo->mFormat = MTP_FORMAT_ASSOCIATION; // folder
+ objectInfo->mName = strdup(name);
+ objectInfo->mKeywords = strdup("");
+
+ mDatabase->addObject(objectInfo);
+
+ ret += createDatabaseFromSourceDir(oldFile.c_str(), newFile.c_str(),
+ objectInfo->mHandle);
+ } else {
+ ret += copyFile(oldFile.c_str(), newFile.c_str());
+
+ MtpObjectInfo* objectInfo = new MtpObjectInfo(mDatabase->allocateObjectHandle());
+ objectInfo->mStorageID = mStorage->getStorageID();
+ objectInfo->mParent = parentHandle;
+ objectInfo->mFormat = MTP_FORMAT_TEXT;
+ objectInfo->mName = strdup(name);
+ objectInfo->mKeywords = strdup("");
+
+ mDatabase->addObject(objectInfo);
+ }
+ }
+
+ closedir(dir);
+ return ret;
+ }
+};
+}; // namespace android
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) __attribute__((optnone)) {
+ // reset our storage (from MtpUtils.h)
+ android::deletePath(storage_path);
+ android::makeFolder("/storage/fuzzer");
+ android::makeFolder(storage_path);
+
+ std::unique_ptr<android::MtpMockServer> mtp =
+ std::make_unique<android::MtpMockServer>(storage_path);
+
+ size_t off = 0;
+
+ // Packetize the input stream
+ for (size_t i = 0; i < size; i++) {
+ // A longer delimiter could be used, but this worked in practice
+ if (data[i] == '@') {
+ size_t pktsz = i - off;
+ if (pktsz > 0) {
+ packet_t pkt = packet_t((unsigned char*)data + off, (unsigned char*)data + i);
+ // insert into packet buffer
+ mtp->mHandle->add_packet(pkt);
+ off = i;
+ }
+ }
+ }
+
+ mtp->createDatabaseFromSourceDir(source_database, storage_path, MTP_PARENT_ROOT);
+ mtp->run();
+
+ return 0;
+}
diff --git a/media/mtp/tests/MtpFuzzer/mtp_fuzzer.dict b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.dict
new file mode 100644
index 0000000..4c3f136
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.dict
@@ -0,0 +1,74 @@
+mtp_operation_get_device_info="\x01\x10"
+mtp_operation_open_session="\x02\x10"
+mtp_operation_close_session="\x03\x10"
+mtp_operation_get_storage_ids="\x04\x10"
+mtp_operation_get_storage_info="\x05\x10"
+mtp_operation_get_num_objects="\x06\x10"
+mtp_operation_get_object_handles="\x07\x10"
+mtp_operation_get_object_info="\x08\x10"
+mtp_operation_get_object="\x09\x10"
+mtp_operation_get_thumb="\x0A\x10"
+mtp_operation_delete_object="\x0B\x10"
+mtp_operation_send_object_info="\x0C\x10"
+mtp_operation_send_object="\x0D\x10"
+mtp_operation_initiate_capture="\x0E\x10"
+mtp_operation_format_store="\x0F\x10"
+mtp_operation_reset_device="\x10\x10"
+mtp_operation_self_test="\x11\x10"
+mtp_operation_set_object_protection="\x12\x10"
+mtp_operation_power_down="\x13\x10"
+mtp_operation_get_device_prop_desc="\x14\x10"
+mtp_operation_get_device_prop_value="\x15\x10"
+mtp_operation_set_device_prop_value="\x16\x10"
+mtp_operation_reset_device_prop_value="\x17\x10"
+mtp_operation_terminate_open_capture="\x18\x10"
+mtp_operation_move_object="\x19\x10"
+mtp_operation_copy_object="\x1A\x10"
+mtp_operation_get_partial_object="\x1B\x10"
+mtp_operation_initiate_open_capture="\x1C\x10"
+mtp_operation_get_object_props_supported="\x01\x98"
+mtp_operation_get_object_prop_desc="\x02\x98"
+mtp_operation_get_object_prop_value="\x03\x98"
+mtp_operation_set_object_prop_value="\x04\x98"
+mtp_operation_get_object_prop_list="\x05\x98"
+mtp_operation_set_object_prop_list="\x06\x98"
+mtp_operation_get_interdependent_prop_desc="\x07\x98"
+mtp_operation_send_object_prop_list="\x08\x98"
+mtp_operation_get_object_references="\x10\x98"
+mtp_operation_set_object_references="\x11\x98"
+mtp_operation_skip="\x20\x98"
+mtp_operation_get_partial_object_64="\xC1\x95"
+mtp_operation_send_partial_object="\xC2\x95"
+mtp_operation_truncate_object="\xC3\x95"
+mtp_operation_begin_edit_object="\xC4\x95"
+mtp_operation_end_edit_object="\xC5\x95"
+
+# Association (for example, a folder)
+mtp_format_association="\x01\x30"
+
+# types
+mtp_type_undefined="\x00\x00"
+mtp_type_int8="\x01\x00"
+mtp_type_uint8="\x02\x00"
+mtp_type_int16="\x03\x00"
+mtp_type_uint16="\x04\x00"
+mtp_type_int32="\x05\x00"
+mtp_type_uint32="\x06\x00"
+mtp_type_int64="\x07\x00"
+mtp_type_uint64="\x08\x00"
+mtp_type_int128="\x09\x00"
+mtp_type_uint128="\x0A\x00"
+mtp_type_aint8="\x01\x40"
+mtp_type_auint8="\x02\x40"
+mtp_type_aint16="\x03\x40"
+mtp_type_auint16="\x04\x40"
+mtp_type_aint32="\x05\x40"
+mtp_type_auint32="\x06\x40"
+mtp_type_aint64="\x07\x40"
+mtp_type_auint64="\x08\x40"
+mtp_type_aint128="\x09\x40"
+mtp_type_auint128="\x0A\x40"
+mtp_type_str="\xFF\xFF"
+
+# also used for max size (>4GB)
+mtp_parent_root="\xFF\xFF\xFF\xFF"
diff --git a/media/mtp/tests/PosixAsyncIOTest/Android.bp b/media/mtp/tests/PosixAsyncIOTest/Android.bp
new file mode 100644
index 0000000..1d401b8
--- /dev/null
+++ b/media/mtp/tests/PosixAsyncIOTest/Android.bp
@@ -0,0 +1,31 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+cc_test {
+ name: "posix_async_io_test",
+ test_suites: ["device-tests"],
+ srcs: ["PosixAsyncIO_test.cpp"],
+ shared_libs: [
+ "libbase",
+ "libmtp",
+ "liblog",
+ ],
+ cflags: [
+ "-Wall",
+ "-Wextra",
+ "-Werror",
+ ],
+}
diff --git a/media/mtp/tests/PosixAsyncIOTest/AndroidTest.xml b/media/mtp/tests/PosixAsyncIOTest/AndroidTest.xml
new file mode 100644
index 0000000..cbb10fb
--- /dev/null
+++ b/media/mtp/tests/PosixAsyncIOTest/AndroidTest.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Config for posix_async_io_test">
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="true" />
+ <option name="push" value="posix_async_io_test->/data/local/tmp/posix_async_io_test" />
+ </target_preparer>
+ <option name="test-suite-tag" value="apct" />
+ <test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="module-name" value="posix_async_io_test" />
+ </test>
+</configuration>
\ No newline at end of file
diff --git a/media/mtp/tests/PosixAsyncIO_test.cpp b/media/mtp/tests/PosixAsyncIOTest/PosixAsyncIO_test.cpp
similarity index 100%
rename from media/mtp/tests/PosixAsyncIO_test.cpp
rename to media/mtp/tests/PosixAsyncIOTest/PosixAsyncIO_test.cpp
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 13db15c..ee4def5 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -35,6 +35,14 @@
cc_library_headers {
name: "media_ndk_headers",
vendor_available: true,
+ // TODO(b/153609531): remove when no longer needed.
+ native_bridge_supported: true,
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ "com.android.media.swcodec",
+ ],
+ min_sdk_version: "29",
export_include_dirs: ["include"],
host_supported: true,
target: {
@@ -46,6 +54,7 @@
cc_library_shared {
name: "libmediandk",
+ llndk_stubs: "libmediandk.llndk",
srcs: [
"NdkJavaVMHelper.cpp",
@@ -107,7 +116,10 @@
export_header_lib_headers: ["jni_headers"],
- export_include_dirs: ["include"],
+ export_include_dirs: [
+ "include",
+ "include_platform",
+ ],
export_shared_lib_headers: [
"libgui",
@@ -126,7 +138,7 @@
}
llndk_library {
- name: "libmediandk",
+ name: "libmediandk.llndk",
symbol_file: "libmediandk.map.txt",
export_include_dirs: [
"include",
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index af21a99..1055dc4 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -19,7 +19,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "NdkMediaCodec"
-#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaCodecPlatform.h>
#include <media/NdkMediaError.h>
#include <media/NdkMediaFormatPriv.h>
#include "NdkMediaCryptoPriv.h"
@@ -45,6 +45,10 @@
return AMEDIA_OK;
} else if (err == -EAGAIN) {
return (media_status_t) AMEDIACODEC_INFO_TRY_AGAIN_LATER;
+ } else if (err == NO_MEMORY) {
+ return AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE;
+ } else if (err == DEAD_OBJECT) {
+ return AMEDIACODEC_ERROR_RECLAIMED;
}
ALOGE("sf error code: %d", err);
return AMEDIA_ERROR_UNKNOWN;
@@ -255,7 +259,7 @@
break;
}
msg->findString("detail", &detail);
- ALOGE("Decoder reported error(0x%x), actionCode(%d), detail(%s)",
+ ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
err, actionCode, detail.c_str());
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
@@ -308,7 +312,11 @@
extern "C" {
-static AMediaCodec * createAMediaCodec(const char *name, bool name_is_type, bool encoder) {
+static AMediaCodec * createAMediaCodec(const char *name,
+ bool name_is_type,
+ bool encoder,
+ pid_t pid = android::MediaCodec::kNoPid,
+ uid_t uid = android::MediaCodec::kNoUid) {
AMediaCodec *mData = new AMediaCodec();
mData->mLooper = new ALooper;
mData->mLooper->setName("NDK MediaCodec_looper");
@@ -322,9 +330,20 @@
return NULL;
}
if (name_is_type) {
- mData->mCodec = android::MediaCodec::CreateByType(mData->mLooper, name, encoder);
+ mData->mCodec = android::MediaCodec::CreateByType(
+ mData->mLooper,
+ name,
+ encoder,
+ nullptr /* err */,
+ pid,
+ uid);
} else {
- mData->mCodec = android::MediaCodec::CreateByComponentName(mData->mLooper, name);
+ mData->mCodec = android::MediaCodec::CreateByComponentName(
+ mData->mLooper,
+ name,
+ nullptr /* err */,
+ pid,
+ uid);
}
if (mData->mCodec == NULL) { // failed to create codec
AMediaCodec_delete(mData);
@@ -344,17 +363,38 @@
EXPORT
AMediaCodec* AMediaCodec_createCodecByName(const char *name) {
- return createAMediaCodec(name, false, false);
+ return createAMediaCodec(name, false /* name_is_type */, false /* encoder */);
}
EXPORT
AMediaCodec* AMediaCodec_createDecoderByType(const char *mime_type) {
- return createAMediaCodec(mime_type, true, false);
+ return createAMediaCodec(mime_type, true /* name_is_type */, false /* encoder */);
}
EXPORT
AMediaCodec* AMediaCodec_createEncoderByType(const char *name) {
- return createAMediaCodec(name, true, true);
+ return createAMediaCodec(name, true /* name_is_type */, true /* encoder */);
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createCodecByNameForClient(const char *name,
+ pid_t pid,
+ uid_t uid) {
+ return createAMediaCodec(name, false /* name_is_type */, false /* encoder */, pid, uid);
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createDecoderByTypeForClient(const char *mime_type,
+ pid_t pid,
+ uid_t uid) {
+ return createAMediaCodec(mime_type, true /* name_is_type */, false /* encoder */, pid, uid);
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createEncoderByTypeForClient(const char *name,
+ pid_t pid,
+ uid_t uid) {
+ return createAMediaCodec(name, true /* name_is_type */, true /* encoder */, pid, uid);
}
EXPORT
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index 0da0740..0c65e9e 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -22,6 +22,7 @@
#include <media/NdkMediaExtractor.h>
#include <media/NdkMediaErrorPriv.h>
#include <media/NdkMediaFormatPriv.h>
+#include "NdkJavaVMHelperPriv.h"
#include "NdkMediaDataSourcePriv.h"
@@ -63,7 +64,10 @@
AMediaExtractor* AMediaExtractor_new() {
ALOGV("ctor");
AMediaExtractor *mData = new AMediaExtractor();
- mData->mImpl = new NuMediaExtractor();
+ mData->mImpl = new NuMediaExtractor(
+ NdkJavaVMHelper::getJNIEnv() != nullptr
+ ? NuMediaExtractor::EntryPoint::NDK_WITH_JVM
+ : NuMediaExtractor::EntryPoint::NDK_NO_JVM );
return mData;
}
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index 8680641..8e673ca 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -364,12 +364,14 @@
EXPORT const char* AMEDIAFORMAT_KEY_SAR_WIDTH = "sar-width";
EXPORT const char* AMEDIAFORMAT_KEY_SEI = "sei";
EXPORT const char* AMEDIAFORMAT_KEY_SLICE_HEIGHT = "slice-height";
+EXPORT const char* AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS = "slow-motion-markers";
EXPORT const char* AMEDIAFORMAT_KEY_STRIDE = "stride";
EXPORT const char* AMEDIAFORMAT_KEY_TARGET_TIME = "target-time";
EXPORT const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT = "temporal-layer-count";
EXPORT const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID = "temporal-layer-id";
EXPORT const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYERING = "ts-schema";
EXPORT const char* AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA = "text-format-data";
+EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C = "thumbnail-csd-av1c";
EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC = "thumbnail-csd-hevc";
EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT = "thumbnail-height";
EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_TIME = "thumbnail-time";
@@ -382,6 +384,8 @@
EXPORT const char* AMEDIAFORMAT_KEY_TRACK_INDEX = "track-index";
EXPORT const char* AMEDIAFORMAT_KEY_VALID_SAMPLES = "valid-samples";
EXPORT const char* AMEDIAFORMAT_KEY_WIDTH = "width";
+EXPORT const char* AMEDIAFORMAT_KEY_XMP_OFFSET = "xmp-offset";
+EXPORT const char* AMEDIAFORMAT_KEY_XMP_SIZE = "xmp-size";
EXPORT const char* AMEDIAFORMAT_KEY_YEAR = "year";
} // extern "C"
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index 8fb6a87..80d5d50 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -114,12 +114,12 @@
int32_t actionCode,
const char *detail);
-struct AMediaCodecOnAsyncNotifyCallback {
+typedef struct AMediaCodecOnAsyncNotifyCallback {
AMediaCodecOnAsyncInputAvailable onAsyncInputAvailable;
AMediaCodecOnAsyncOutputAvailable onAsyncOutputAvailable;
AMediaCodecOnAsyncFormatChanged onAsyncFormatChanged;
AMediaCodecOnAsyncError onAsyncError;
-};
+} AMediaCodecOnAsyncNotifyCallback;
#if __ANDROID_API__ >= 21
diff --git a/media/ndk/include/media/NdkMediaExtractor.h b/media/ndk/include/media/NdkMediaExtractor.h
index 14319c4..a1cd9e3 100644
--- a/media/ndk/include/media/NdkMediaExtractor.h
+++ b/media/ndk/include/media/NdkMediaExtractor.h
@@ -36,6 +36,7 @@
#ifndef _NDK_MEDIA_EXTRACTOR_H
#define _NDK_MEDIA_EXTRACTOR_H
+#include <stdbool.h>
#include <sys/cdefs.h>
#include <sys/types.h>
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index a094cfc..0b9024f 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -36,6 +36,7 @@
#ifndef _NDK_MEDIA_FORMAT_H
#define _NDK_MEDIA_FORMAT_H
+#include <stdbool.h>
#include <sys/cdefs.h>
#include <sys/types.h>
@@ -321,6 +322,13 @@
extern const char* AMEDIAFORMAT_KEY_LOW_LATENCY __INTRODUCED_IN(30);
#endif /* __ANDROID_API__ >= 30 */
+#if __ANDROID_API__ >= 31
+extern const char* AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_XMP_OFFSET __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_XMP_SIZE __INTRODUCED_IN(31);
+#endif /* __ANDROID_API__ >= 31 */
+
__END_DECLS
#endif // _NDK_MEDIA_FORMAT_H
diff --git a/media/ndk/include_platform/media/NdkMediaCodecPlatform.h b/media/ndk/include_platform/media/NdkMediaCodecPlatform.h
new file mode 100644
index 0000000..608346d
--- /dev/null
+++ b/media/ndk/include_platform/media/NdkMediaCodecPlatform.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_PLATFORM_H
+#define _NDK_MEDIA_CODEC_PLATFORM_H
+
+#include <stdint.h>
+#include <sys/cdefs.h>
+
+#include <media/NdkMediaCodec.h>
+
+__BEGIN_DECLS
+
+/**
+ * Special uid and pid values used with AMediaCodec_createCodecByNameForClient,
+ * AMediaCodec_createDecoderByTypeForClient and AMediaCodec_createEncoderByTypeForClient.
+ *
+ * Introduced in API 31.
+ */
+enum {
+ /**
+ * Uid value to indicate using calling uid.
+ */
+ AMEDIACODEC_CALLING_UID = -1,
+ /**
+ * Pid value to indicate using calling pid.
+ */
+ AMEDIACODEC_CALLING_PID = -1,
+};
+
+#if __ANDROID_API__ >= 31
+
+/**
+ * Create codec by name on behalf of a client.
+ *
+ * The usage is similar to AMediaCodec_createCodecByName(), except that the codec instance
+ * will be attributed to the client of {uid, pid}, instead of the caller.
+ *
+ * Only certain privileged users are allowed to specify {uid, pid} that's different from the
+ * caller's. Without the privilege, this API will behave the same as
+ * AMediaCodec_createCodecByName().
+ *
+ * Available since API level 31.
+ */
+AMediaCodec* AMediaCodec_createCodecByNameForClient(const char *name,
+ pid_t pid,
+ uid_t uid) __INTRODUCED_IN(31);
+
+/**
+ * Create codec by mime type on behalf of a client.
+ *
+ * The usage is similar to AMediaCodec_createDecoderByType(), except that the codec instance
+ * will be attributed to the client of {uid, pid}, instead of the caller.
+ *
+ * Only certain privileged users are allowed to specify {uid, pid} that's different from the
+ * caller's. Without the privilege, this API will behave the same as
+ * AMediaCodec_createDecoderByType().
+ *
+ * Available since API level 31.
+ */
+AMediaCodec* AMediaCodec_createDecoderByTypeForClient(const char *mime_type,
+ pid_t pid,
+ uid_t uid) __INTRODUCED_IN(31);
+
+/**
+ * Create encoder by name on behalf of a client.
+ *
+ * The usage is similar to AMediaCodec_createEncoderByType(), except that the codec instance
+ * will be attributed to the client of {uid, pid}, instead of the caller.
+ *
+ * Only certain privileged users are allowed to specify {uid, pid} that's different from the
+ * caller's. Without the privilege, this API will behave the same as
+ * AMediaCodec_createEncoderByType().
+ *
+ * Available since API level 31.
+ */
+AMediaCodec* AMediaCodec_createEncoderByTypeForClient(const char *mime_type,
+ pid_t pid,
+ uid_t uid) __INTRODUCED_IN(31);
+
+#endif // __ANDROID_API__ >= 31
+
+__END_DECLS
+
+#endif //_NDK_MEDIA_CODEC_PLATFORM_H
+
+/** @} */
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 29f1da8..237b66e 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -131,12 +131,14 @@
AMEDIAFORMAT_KEY_SAR_WIDTH; # var introduced=29
AMEDIAFORMAT_KEY_SEI; # var introduced=28
AMEDIAFORMAT_KEY_SLICE_HEIGHT; # var introduced=28
+ AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS; # var introduced=31
AMEDIAFORMAT_KEY_STRIDE; # var introduced=21
AMEDIAFORMAT_KEY_TARGET_TIME; # var introduced=29
AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT; # var introduced=29
AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID; # var introduced=28
AMEDIAFORMAT_KEY_TEMPORAL_LAYERING; # var introduced=28
AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA; # var introduced=29
+ AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C; # var introduced=31
AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC; # var introduced=29
AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT; # var introduced=29
AMEDIAFORMAT_KEY_THUMBNAIL_TIME; # var introduced=29
@@ -149,6 +151,8 @@
AMEDIAFORMAT_KEY_TRACK_ID; # var introduced=28
AMEDIAFORMAT_KEY_VALID_SAMPLES; # var introduced=29
AMEDIAFORMAT_KEY_WIDTH; # var introduced=21
+ AMEDIAFORMAT_KEY_XMP_OFFSET; # var introduced=31
+ AMEDIAFORMAT_KEY_XMP_SIZE; # var introduced=31
AMEDIAFORMAT_KEY_YEAR; # var introduced=29
AMediaCodecActionCode_isRecoverable; # introduced=28
AMediaCodecActionCode_isTransient; # introduced=28
@@ -163,8 +167,11 @@
AMediaCodecCryptoInfo_setPattern; # introduced=24
AMediaCodec_configure;
AMediaCodec_createCodecByName;
+ AMediaCodec_createCodecByNameForClient; # apex #introduced = 31
AMediaCodec_createDecoderByType;
+ AMediaCodec_createDecoderByTypeForClient; # apex #introduced = 31
AMediaCodec_createEncoderByType;
+ AMediaCodec_createEncoderByTypeForClient; # apex #introduced = 31
AMediaCodec_delete;
AMediaCodec_dequeueInputBuffer;
AMediaCodec_dequeueOutputBuffer;
diff --git a/media/tests/benchmark/README.md b/media/tests/benchmark/README.md
index 05fbe6f..047c289 100644
--- a/media/tests/benchmark/README.md
+++ b/media/tests/benchmark/README.md
@@ -1,7 +1,7 @@
# Benchmark tests
Benchmark app analyses the time taken by MediaCodec, MediaExtractor and MediaMuxer for given set of inputs. It is used to benchmark these modules on android devices.
-Benchmark results are emitted to logcat.
+Benchmark results are published as a CSV report.
This page describes steps to run the NDK and SDK layer test.
@@ -10,35 +10,49 @@
mmm frameworks/av/media/tests/benchmark/
```
-# NDK
-
-To run the test suite for measuring performance of the native layer, follow the following steps:
-
-The binaries will be created in the following path : $OUT/data/nativetest64/
-
-adb push $OUT/data/nativetest64/* /data/local/tmp/
-
-Eg. adb push $OUT/data/nativetest64/extractorTest/extractorTest /data/local/tmp/
-
-To run the binary, follow the commands mentioned below under each module.
-
-The resource file for the tests is taken from [here](https://drive.google.com/open?id=1ghMr17BBJ7n0pqbm7oREiTN_MNemJUqy)
+# Resources
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/tests/benchmark/MediaBenchmark.zip)
Download the MediaBenchmark.zip file, unzip and push it to /data/local/tmp/ on the device.
```
unzip MediaBenchmark.zip
-adb push MediaBenchmark /data/local/tmp
+adb push MediaBenchmark /data/local/tmp/MediaBenchmark/res/
```
+The resource files are assumed to be at /data/local/tmp/MediaBenchmark/res/. You can use a different location, but you have to modify the rest of the instructions to replace /data/local/tmp/MediaBenchmark/res/ with wherever you chose to put the files.
+
+# NDK CLI Tests
+Note: [Benchmark Application](#BenchmarkApplication) now supports profiling both SDK and NDK APIs and that is the preferred way to benchmark codecs
+
+To run the test suite for measuring performance of the native layer, follow the following steps:
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+
+adb push $OUT/data/nativetest64/* /data/local/tmp/. For example
+
+```
+adb push $OUT/data/nativetest64/extractorTest/extractorTest /data/local/tmp/
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+To test 32-bit binary push binaries from nativetest.
+
+adb push $OUT/data/nativetest/* /data/local/tmp/. For example
+
+```
+adb push $OUT/data/nativetest/extractorTest/extractorTest /data/local/tmp/
+```
+
+To run the binary, follow the commands mentioned below under each module.
+
## Extractor
The test extracts elementary stream and benchmarks the extractors available in NDK.
-The resource files are assumed to be at /data/local/tmp/MediaBenchmark/res/. You can use a different location, but you have to modify the rest of the instructions to replace /data/local/tmp/MediaBenchmark/res/ with wherever you chose to put the files.
-
-The path to these files on the device is required to be given for the test.
-
```
adb shell /data/local/tmp/extractorTest -P /data/local/tmp/MediaBenchmark/res/
```
@@ -47,8 +61,6 @@
The test decodes input stream and benchmarks the decoders available in NDK.
-Setup steps are same as extractor.
-
```
adb shell /data/local/tmp/decoderTest -P /data/local/tmp/MediaBenchmark/res/
```
@@ -57,8 +69,6 @@
The test muxes elementary stream and benchmarks the muxers available in NDK.
-Setup steps are same as extractor.
-
```
adb shell /data/local/tmp/muxerTest -P /data/local/tmp/MediaBenchmark/res/
```
@@ -67,55 +77,82 @@
The test encodes input stream and benchmarks the encoders available in NDK.
-Setup steps are same as extractor.
-
```
adb shell /data/local/tmp/encoderTest -P /data/local/tmp/MediaBenchmark/res/
```
-# SDK
+# <a name="BenchmarkApplication"></a> Benchmark Application
+To run the test suite for measuring performance of the SDK and NDK APIs, follow the following steps:
+Benchmark Application can be run in two ways.
-To run the test suite for measuring performance of the SDK APIs, follow the following steps:
+## Steps to run with atest
+Note that atest command will install Benchmark application and push the required test files to the device as well.
+
+For running all the tests, run the following command
+```
+atest com.android.media.benchmark.tests -- --enable-module-dynamic-download=true
+```
+
+For running the tests individually, run the following atest commands:
+
+```
+atest com.android.media.benchmark.tests.ExtractorTest -- --enable-module-dynamic-download=true
+atest com.android.media.benchmark.tests.DecoderTest -- --enable-module-dynamic-download=true
+atest com.android.media.benchmark.tests.MuxerTest -- --enable-module-dynamic-download=true
+atest com.android.media.benchmark.tests.EncoderTest -- --enable-module-dynamic-download=true
+```
+
+## Steps to run without atest
The apk will be created at the following path:
-$OUT/testcases/MediaBenchmarkTest/arm64/
-To get the resorce files for the test follow instructions given in [NDK](#NDK)
+The 64-bit apk will be created in the following path :
+$OUT/testcases/MediaBenchmarkTest/arm64/
For installing the apk, run the command:
```
adb install -f -r $OUT/testcases/MediaBenchmarkTest/arm64/MediaBenchmarkTest.apk
```
-For running all the tests, run the command:
+The 32-bit apk will be created in the following path :
+$OUT/testcases/MediaBenchmarkTest/arm/
+
+For installing the apk, run the command:
+```
+adb install -f -r $OUT/testcases/MediaBenchmarkTest/arm/MediaBenchmarkTest.apk
+```
+
+To get the resource files for the test follow instructions given in [Resources](#Resources)
+
+For running all the tests, run the following command
```
adb shell am instrument -w -r -e package com.android.media.benchmark.tests com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
```
## Extractor
-The test extracts elementary stream and benchmarks the extractors available in SDK.
+The test extracts elementary stream and benchmarks the extractors available in SDK and NDK.
```
adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.ExtractorTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
```
## Decoder
-The test decodes input stream and benchmarks the decoders available in SDK.
+The test decodes input stream and benchmarks the decoders available in SDK and NDK.
```
adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.DecoderTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
```
## Muxer
-The test muxes elementary stream and benchmarks different writers available in SDK.
+The test muxes elementary stream and benchmarks different writers available in SDK and NDK.
```
adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.MuxerTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
```
## Encoder
-The test encodes input stream and benchmarks the encoders available in SDK.
+The test encodes input stream and benchmarks the encoders available in SDK and NDK.
```
adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.EncoderTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
```
@@ -124,24 +161,27 @@
To run the test suite for measuring performance of the codec2 layer, follow the following steps:
The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
To test 64-bit binary push binaries from nativetest64.
adb push $(OUT)/data/nativetest64/* /data/local/tmp/
-Eg. adb push $(OUT)/data/nativetest64/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
+adb push $(OUT)/data/nativetest64/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
To test 32-bit binary push binaries from nativetest.
adb push $(OUT)/data/nativetest/* /data/local/tmp/
-Eg. adb push $(OUT)/data/nativetest/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
+adb push $(OUT)/data/nativetest/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
-To get the resource files for the test follow instructions given in [NDK](#NDK)
+To get the resource files for the test follow instructions given in [Resources](#Resources)
## C2 Decoder
The test decodes input stream and benchmarks the codec2 decoders available in device.
-Setup steps are same as [extractor](#extractor).
-
```
adb shell /data/local/tmp/C2DecoderTest -P /data/local/tmp/MediaBenchmark/res/
```
@@ -149,8 +189,95 @@
The test encodes input stream and benchmarks the codec2 encoders available in device.
-Setup steps are same as [extractor](#extractor).
-
```
adb shell /data/local/tmp/C2EncoderTest -P /data/local/tmp/MediaBenchmark/res/
```
+
+# Analysis
+
+The benchmark results are stored in a CSV file which can be used for analysis. These results are stored in following format:
+<app directory>/<module_name>.<timestamp>.csv
+
+Note: This timestamp is in nano seconds and will change based on current system time.
+
+To find the location of the CSV file, look for the path in logs. Example log below -
+
+```
+com.android.media.benchmark D/DecoderTest: Saving Benchmark results in: /storage/emulated/0/Android/data/com.android.media.benchmark/files/Decoder.1587732395387.csv
+```
+
+This file can be pulled from the device using "adb pull" command.
+```
+adb pull /storage/emulated/0/Android/data/com.android.media.benchmark/files/Decoder.1587732395387.csv ./Decoder.1587732395387.csv
+```
+
+## CSV Columns
+
+Following columns are available in CSV.
+
+Note: All time values are in nano seconds
+
+1. **currentTime** : The time recorded at the creation of the stats. This may be used to estimate time between consecutive test clips.
+
+2. **fileName**: The file being used as an input for the benchmark test.
+
+3. **operation**: The current operation on the input test vector i.e. Extract/Mux/Encode/Decode.
+
+4. **NDK/SDK**: The target APIs i.e. AMedia vs Media calls for the operation being performed.
+
+5. **sync/async**: This is specific to MediaCodec objects (i.e. Encoder and Decoder). It specifies the mode in which MediaCodec APIs are working. For async mode, callbacks are set. For sync mode, we have to poll the dequeueBuffer APIs to queue and dequeue input output buffers respectively.
+
+6. **setupTime**: The time taken to set up the MediaExtractor/Muxer/Codec instance.
+
+ * MediaCodec: includes setting async/sync mode, configuring with a format and codec.start
+
+ * MediaExtractor: includes AMediaExtractor_new and setDataSource.
+
+ * MediaMuxer: includes creating the object, adding track, and starting the muxer.
+
+7. **destroyTime**: The time taken to stop and close MediaExtractor/Muxer/Codec instance.
+
+8. **minimumTime**: The minimum time taken to extract/mux/encode/decode a frame.
+
+9. **maximumTime**: The maximum time taken to extract/mux/encode/decode a frame.
+
+10. **averageTime**: Average time taken to extract/mux/encode/decode per frame.
+
+ * MediaCodec: computed as the total time taken to encode/decode all frames divided by the number of frames encoded/decoded.
+
+ * MediaExtractor: computed as the total time taken to extract all frames divided by the number of frames extracted.
+
+ * MediaMuxer: computed as the total time taken to mux all frames divided by the number of frames muxed.
+
+11. **timeToProcess1SecContent**: The time required to process one second worth input data.
+
+12. **totalBytesProcessedPerSec**: The number of bytes extracted/muxed/decoded/encoded per second.
+
+13. **timeToFirstFrame**: The time taken to receive the first output frame.
+
+14. **totalSizeInBytes**: The total output size of the operation (in bytes).
+
+15. **totalTime**: The time taken to perform the complete operation (i.e. Extract/Mux/Decode/Encode) for respective test vector.
+
+
+## Muxer
+1. **componentName**: The format of the output Media file. Following muxers are currently supported:
+ * Ogg, Webm, 3gpp, and mp4.
+
+## Decoder
+1. **componentName**: Includes all supported codecs on the device. Aliased components are skipped.
+ * Video: H263, H264, H265, VPx, Mpeg4, Mpeg2, AV1
+ * Audio: AAC, Flac, Opus, MP3, Vorbis, GSM, AMR-NB/WB
+
+## Encoder
+1. **componentName**: Includes all supported codecs on the device. Aliased components are skipped.
+ * Video: H263, H264, H265, VPx, Mpeg4
+ * Audio: AAC, Flac, Opus, AMR-NB/WB
+
+## Common Failures
+On some devices, if a codec isn't supported some tests may report a failure like "codec not found for"
+
+For example: On mobile devices without support for mpeg2 decoder, following failure is observed:
+```
+Unable to create codec by mime: video/mpeg2
+```
diff --git a/media/utils/EventLogTags.logtags b/media/utils/EventLogTags.logtags
index 67f0ea8..c397f34 100644
--- a/media/utils/EventLogTags.logtags
+++ b/media/utils/EventLogTags.logtags
@@ -31,7 +31,7 @@
# 6: Percent
# Default value for data of type int/long is 2 (bytes).
#
-# See system/core/logcat/event.logtags for the master copy of the tags.
+# See system/core/logcat/event.logtags for the original definition of the tags.
# 61000 - 61199 reserved for audioserver
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index 113e4a7..19225d3 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -27,6 +27,9 @@
namespace android {
+static constexpr int32_t INVALID_ADJ = -10000;
+static constexpr int32_t NATIVE_ADJ = -1000;
+
ProcessInfo::ProcessInfo() {}
bool ProcessInfo::getPriority(int pid, int* priority) {
@@ -35,8 +38,6 @@
size_t length = 1;
int32_t state;
- static const int32_t INVALID_ADJ = -10000;
- static const int32_t NATIVE_ADJ = -1000;
int32_t score = INVALID_ADJ;
status_t err = service->getProcessStatesAndOomScoresFromPids(length, &pid, &state, &score);
if (err != OK) {
@@ -45,8 +46,17 @@
}
ALOGV("pid %d state %d score %d", pid, state, score);
if (score <= NATIVE_ADJ) {
- ALOGE("pid %d invalid OOM adjustments value %d", pid, score);
- return false;
+ std::scoped_lock lock{mOverrideLock};
+
+ // If this process if not tracked by ActivityManagerService, look for overrides.
+ auto it = mOverrideMap.find(pid);
+ if (it != mOverrideMap.end()) {
+ ALOGI("pid %d invalid OOM score %d, override to %d", pid, score, it->second.oomScore);
+ score = it->second.oomScore;
+ } else {
+ ALOGE("pid %d invalid OOM score %d", pid, score);
+ return false;
+ }
}
// Use OOM adjustments value as the priority. Lower the value, higher the priority.
@@ -61,6 +71,26 @@
return (callingPid == getpid()) || (callingPid == pid) || (callingUid == AID_MEDIA);
}
+bool ProcessInfo::overrideProcessInfo(int pid, int procState, int oomScore) {
+ std::scoped_lock lock{mOverrideLock};
+
+ mOverrideMap.erase(pid);
+
+ // Disable the override if oomScore is set to NATIVE_ADJ or below.
+ if (oomScore <= NATIVE_ADJ) {
+ return false;
+ }
+
+ mOverrideMap.emplace(pid, ProcessInfoOverride{procState, oomScore});
+ return true;
+}
+
+void ProcessInfo::removeProcessInfoOverride(int pid) {
+ std::scoped_lock lock{mOverrideLock};
+
+ mOverrideMap.erase(pid);
+}
+
ProcessInfo::~ProcessInfo() {}
} // namespace android
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 87ea084..7d7433a 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -22,6 +22,7 @@
#include <binder/IServiceManager.h>
#include <binder/PermissionCache.h>
#include "mediautils/ServiceUtilities.h"
+#include <system/audio-hal-enums.h>
#include <iterator>
#include <algorithm>
@@ -61,8 +62,20 @@
return packages[0];
}
+static int32_t getOpForSource(audio_source_t source) {
+ switch (source) {
+ case AUDIO_SOURCE_HOTWORD:
+ return AppOpsManager::OP_RECORD_AUDIO_HOTWORD;
+ case AUDIO_SOURCE_REMOTE_SUBMIX:
+ return AppOpsManager::OP_RECORD_AUDIO_OUTPUT;
+ case AUDIO_SOURCE_DEFAULT:
+ default:
+ return AppOpsManager::OP_RECORD_AUDIO;
+ }
+}
+
static bool checkRecordingInternal(const String16& opPackageName, pid_t pid,
- uid_t uid, bool start) {
+ uid_t uid, bool start, audio_source_t source) {
// Okay to not track in app ops as audio server or media server is us and if
// device is rooted security model is considered compromised.
// system_server loses its RECORD_AUDIO permission when a secondary
@@ -87,16 +100,21 @@
}
AppOpsManager appOps;
- const int32_t op = appOps.permissionToOpCode(sAndroidPermissionRecordAudio);
+ const int32_t op = getOpForSource(source);
if (start) {
- if (appOps.startOpNoThrow(op, uid, resolvedOpPackageName, /*startIfModeDefault*/ false)
- != AppOpsManager::MODE_ALLOWED) {
- ALOGE("Request denied by app op: %d", op);
+ if (int32_t mode = appOps.startOpNoThrow(
+ op, uid, resolvedOpPackageName, /*startIfModeDefault*/ false);
+ mode != AppOpsManager::MODE_ALLOWED) {
+ ALOGE("Request start for \"%s\" (uid %d) denied by app op: %d, mode: %d",
+ String8(resolvedOpPackageName).c_str(), uid, op, mode);
return false;
}
} else {
- if (appOps.checkOp(op, uid, resolvedOpPackageName) != AppOpsManager::MODE_ALLOWED) {
- ALOGE("Request denied by app op: %d", op);
+ // Always use OP_RECORD_AUDIO for checks at creation time.
+ if (int32_t mode = appOps.checkOp(op, uid, resolvedOpPackageName);
+ mode != AppOpsManager::MODE_ALLOWED) {
+ ALOGE("Request check for \"%s\" (uid %d) denied by app op: %d, mode: %d",
+ String8(resolvedOpPackageName).c_str(), uid, op, mode);
return false;
}
}
@@ -105,14 +123,14 @@
}
bool recordingAllowed(const String16& opPackageName, pid_t pid, uid_t uid) {
- return checkRecordingInternal(opPackageName, pid, uid, /*start*/ false);
+ return checkRecordingInternal(opPackageName, pid, uid, /*start*/ false, AUDIO_SOURCE_DEFAULT);
}
-bool startRecording(const String16& opPackageName, pid_t pid, uid_t uid) {
- return checkRecordingInternal(opPackageName, pid, uid, /*start*/ true);
+bool startRecording(const String16& opPackageName, pid_t pid, uid_t uid, audio_source_t source) {
+ return checkRecordingInternal(opPackageName, pid, uid, /*start*/ true, source);
}
-void finishRecording(const String16& opPackageName, uid_t uid) {
+void finishRecording(const String16& opPackageName, uid_t uid, audio_source_t source) {
// Okay to not track in app ops as audio server is us and if
// device is rooted security model is considered compromised.
if (isAudioServerOrRootUid(uid)) return;
@@ -125,7 +143,8 @@
}
AppOpsManager appOps;
- const int32_t op = appOps.permissionToOpCode(sAndroidPermissionRecordAudio);
+
+ const int32_t op = getOpForSource(source);
appOps.finishOp(op, uid, resolvedOpPackageName);
}
@@ -145,6 +164,14 @@
return ok;
}
+bool captureTunerAudioInputAllowed(pid_t pid, uid_t uid) {
+ if (isAudioServerOrRootUid(uid)) return true;
+ static const String16 sCaptureTunerAudioInput("android.permission.CAPTURE_TUNER_AUDIO_INPUT");
+ bool ok = PermissionCache::checkPermission(sCaptureTunerAudioInput, pid, uid);
+ if (!ok) ALOGV("Request requires android.permission.CAPTURE_TUNER_AUDIO_INPUT");
+ return ok;
+}
+
bool captureVoiceCommunicationOutputAllowed(pid_t pid, uid_t uid) {
if (isAudioServerOrRootUid(uid)) return true;
static const String16 sCaptureVoiceCommOutput(
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
new file mode 100644
index 0000000..ca1123c
--- /dev/null
+++ b/media/utils/fuzzers/Android.bp
@@ -0,0 +1,51 @@
+cc_defaults {
+ name: "libmediautils_fuzzer_defaults",
+ shared_libs: [
+ "libbinder",
+ "libcutils",
+ "liblog",
+ "libmediautils",
+ "libutils",
+ ],
+
+ cflags: [
+ "-Wall",
+ "-Wextra",
+ "-Werror",
+ "-Wno-c++2a-extensions",
+ ],
+
+ header_libs: [
+ "bionic_libc_platform_headers",
+ "libmedia_headers",
+ ],
+
+ include_dirs: [
+ // For DEBUGGER_SIGNAL
+ "system/core/debuggerd/include",
+ ],
+}
+
+cc_fuzz {
+ name: "libmediautils_fuzzer_battery_notifier",
+ defaults: ["libmediautils_fuzzer_defaults"],
+ srcs: ["BatteryNotifierFuzz.cpp"],
+}
+
+cc_fuzz {
+ name: "libmediautils_fuzzer_scheduling_policy_service",
+ defaults: ["libmediautils_fuzzer_defaults"],
+ srcs: ["SchedulingPolicyServiceFuzz.cpp"],
+}
+
+cc_fuzz {
+ name: "libmediautils_fuzzer_service_utilities",
+ defaults: ["libmediautils_fuzzer_defaults"],
+ srcs: ["ServiceUtilitiesFuzz.cpp"],
+}
+
+cc_fuzz {
+ name: "libmediautils_fuzzer_time_check",
+ defaults: ["libmediautils_fuzzer_defaults"],
+ srcs: ["TimeCheckFuzz.cpp"],
+}
diff --git a/media/utils/fuzzers/BatteryNotifierFuzz.cpp b/media/utils/fuzzers/BatteryNotifierFuzz.cpp
new file mode 100644
index 0000000..00b3cce
--- /dev/null
+++ b/media/utils/fuzzers/BatteryNotifierFuzz.cpp
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <functional>
+#include <string>
+#include <vector>
+
+#include <utils/String8.h>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/BatteryNotifier.h"
+
+static constexpr int kMaxOperations = 30;
+static constexpr int kMaxStringLength = 500;
+using android::BatteryNotifier;
+
+std::vector<std::function<void(std::string /*flashlight_name*/, std::string /*camera_name*/,
+ uid_t /*video_id*/, uid_t /*audio_id*/, uid_t /*light_id*/,
+ uid_t /*camera_id*/)>>
+ operations = {
+ [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+ BatteryNotifier::getInstance().noteResetVideo();
+ },
+ [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+ BatteryNotifier::getInstance().noteResetAudio();
+ },
+ [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+ BatteryNotifier::getInstance().noteResetFlashlight();
+ },
+ [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+ BatteryNotifier::getInstance().noteResetCamera();
+ },
+ [](std::string, std::string, uid_t video_id, uid_t, uid_t, uid_t) -> void {
+ BatteryNotifier::getInstance().noteStartVideo(video_id);
+ },
+ [](std::string, std::string, uid_t video_id, uid_t, uid_t, uid_t) -> void {
+ BatteryNotifier::getInstance().noteStopVideo(video_id);
+ },
+ [](std::string, std::string, uid_t, uid_t audio_id, uid_t, uid_t) -> void {
+ BatteryNotifier::getInstance().noteStartAudio(audio_id);
+ },
+ [](std::string, std::string, uid_t, uid_t audio_id, uid_t, uid_t) -> void {
+ BatteryNotifier::getInstance().noteStopAudio(audio_id);
+ },
+ [](std::string flashlight_name, std::string, uid_t, uid_t, uid_t light_id, uid_t) -> void {
+ android::String8 name(flashlight_name.c_str());
+ BatteryNotifier::getInstance().noteFlashlightOn(name, light_id);
+ },
+ [](std::string flashlight_name, std::string, uid_t, uid_t, uid_t light_id, uid_t) -> void {
+ android::String8 name(flashlight_name.c_str());
+ BatteryNotifier::getInstance().noteFlashlightOff(name, light_id);
+ },
+ [](std::string, std::string camera_name, uid_t, uid_t, uid_t, uid_t camera_id) -> void {
+ android::String8 name(camera_name.c_str());
+ BatteryNotifier::getInstance().noteStartCamera(name, camera_id);
+ },
+ [](std::string, std::string camera_name, uid_t, uid_t, uid_t, uid_t camera_id) -> void {
+ android::String8 name(camera_name.c_str());
+ BatteryNotifier::getInstance().noteStopCamera(name, camera_id);
+ },
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider data_provider(data, size);
+ std::string camera_name = data_provider.ConsumeRandomLengthString(kMaxStringLength);
+ std::string flashlight_name = data_provider.ConsumeRandomLengthString(kMaxStringLength);
+ uid_t video_id = data_provider.ConsumeIntegral<uid_t>();
+ uid_t audio_id = data_provider.ConsumeIntegral<uid_t>();
+ uid_t light_id = data_provider.ConsumeIntegral<uid_t>();
+ uid_t camera_id = data_provider.ConsumeIntegral<uid_t>();
+ size_t ops_run = 0;
+ while (data_provider.remaining_bytes() > 0 && ops_run++ < kMaxOperations) {
+ uint8_t op = data_provider.ConsumeIntegralInRange<uint8_t>(0, operations.size() - 1);
+ operations[op](flashlight_name, camera_name, video_id, audio_id, light_id, camera_id);
+ }
+ return 0;
+}
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
new file mode 100644
index 0000000..4521853
--- /dev/null
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "BatteryNotifierFuzzer"
+#include <binder/IBatteryStats.h>
+#include <binder/IServiceManager.h>
+#include <utils/String16.h>
+#include <android/log.h>
+#include <mediautils/SchedulingPolicyService.h>
+#include "fuzzer/FuzzedDataProvider.h"
+using android::IBatteryStats;
+using android::IBinder;
+using android::IInterface;
+using android::IServiceManager;
+using android::sp;
+using android::String16;
+using android::defaultServiceManager;
+using android::requestCpusetBoost;
+using android::requestPriority;
+sp<IBatteryStats> getBatteryService() {
+ sp<IBatteryStats> batteryStatService;
+ const sp<IServiceManager> sm(defaultServiceManager());
+ if (sm != nullptr) {
+ const String16 name("batterystats");
+ batteryStatService = checked_interface_cast<IBatteryStats>(sm->checkService(name));
+ if (batteryStatService == nullptr) {
+ ALOGW("batterystats service unavailable!");
+ return nullptr;
+ }
+ }
+ return batteryStatService;
+}
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider data_provider(data, size);
+ sp<IBatteryStats> batteryStatService = getBatteryService();
+ // There is some state here, but it's mostly focused around thread-safety, so
+ // we won't worry about order.
+ int32_t priority = data_provider.ConsumeIntegral<int32_t>();
+ bool is_for_app = data_provider.ConsumeBool();
+ bool async = data_provider.ConsumeBool();
+ requestPriority(getpid(), gettid(), priority, is_for_app, async);
+ // TODO: Verify and re-enable in AOSP (R).
+ // bool enable = data_provider.ConsumeBool();
+ // We are just using batterystats to avoid the need
+ // to register a new service.
+ // requestCpusetBoost(enable, IInterface::asBinder(batteryStatService));
+ return 0;
+}
+
diff --git a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
new file mode 100644
index 0000000..f4c815c
--- /dev/null
+++ b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fcntl.h>
+
+#include <functional>
+#include <type_traits>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/ServiceUtilities.h"
+
+static constexpr int kMaxOperations = 50;
+static constexpr int kMaxStringLen = 256;
+
+const std::vector<std::function<void(FuzzedDataProvider*, android::MediaPackageManager)>>
+ operations = {
+ [](FuzzedDataProvider* data_provider, android::MediaPackageManager pm) -> void {
+ uid_t uid = data_provider->ConsumeIntegral<uid_t>();
+ pm.allowPlaybackCapture(uid);
+ },
+ [](FuzzedDataProvider* data_provider, android::MediaPackageManager pm) -> void {
+ int spaces = data_provider->ConsumeIntegral<int>();
+
+ // Dump everything into /dev/null
+ int fd = open("/dev/null", O_WRONLY);
+ pm.dump(fd, spaces);
+ close(fd);
+ },
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider data_provider(data, size);
+ uid_t uid = data_provider.ConsumeIntegral<uid_t>();
+ pid_t pid = data_provider.ConsumeIntegral<pid_t>();
+ audio_source_t source = static_cast<audio_source_t>(data_provider
+ .ConsumeIntegral<std::underlying_type_t<audio_source_t>>());
+
+ // There is not state here, and order is not significant,
+ // so we can simply call all of the target functions
+ android::isServiceUid(uid);
+ android::isAudioServerUid(uid);
+ android::isAudioServerOrSystemServerUid(uid);
+ android::isAudioServerOrMediaServerUid(uid);
+ std::string packageNameStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+ android::String16 opPackageName(packageNameStr.c_str());
+ android::recordingAllowed(opPackageName, pid, uid);
+ android::startRecording(opPackageName, pid, uid, source);
+ android::finishRecording(opPackageName, uid, source);
+ android::captureAudioOutputAllowed(pid, uid);
+ android::captureMediaOutputAllowed(pid, uid);
+ android::captureHotwordAllowed(opPackageName, pid, uid);
+ android::modifyPhoneStateAllowed(uid, pid);
+ android::bypassInterruptionPolicyAllowed(uid, pid);
+ android::settingsAllowed();
+ android::modifyAudioRoutingAllowed();
+ android::modifyDefaultAudioEffectsAllowed();
+ android::dumpAllowed();
+
+ // MediaPackageManager does have state, so we need the fuzzer to decide order
+ android::MediaPackageManager packageManager;
+ size_t ops_run = 0;
+ while (data_provider.remaining_bytes() > 0 && ops_run++ < kMaxOperations) {
+ uint8_t op = data_provider.ConsumeIntegralInRange<uint8_t>(0, operations.size() - 1);
+ operations[op](&data_provider, packageManager);
+ }
+
+ return 0;
+}
diff --git a/media/utils/fuzzers/TimeCheckFuzz.cpp b/media/utils/fuzzers/TimeCheckFuzz.cpp
new file mode 100644
index 0000000..eeb6ba6
--- /dev/null
+++ b/media/utils/fuzzers/TimeCheckFuzz.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <chrono>
+#include <thread>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/TimeCheck.h"
+
+static constexpr int kMaxStringLen = 256;
+
+// While it might be interesting to test long-running
+// jobs, it seems unlikely it'd lead to the types of crashes
+// we're looking for, and would mean a significant increase in fuzzer time.
+// Therefore, we are setting a low cap.
+static constexpr uint32_t kMaxTimeoutMs = 1000;
+static constexpr uint32_t kMinTimeoutMs = 200;
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider data_provider(data, size);
+
+ // There's essentially 5 operations that we can access in this class
+ // 1. The time it takes to run this operation. As mentioned above,
+ // long-running tasks are not good for fuzzing, but there will be
+ // some change in the run time.
+ uint32_t timeoutMs =
+ data_provider.ConsumeIntegralInRange<uint32_t>(kMinTimeoutMs, kMaxTimeoutMs);
+ uint8_t pid_size = data_provider.ConsumeIntegral<uint8_t>();
+ std::vector<pid_t> pids(pid_size);
+ for (auto& pid : pids) {
+ pid = data_provider.ConsumeIntegral<pid_t>();
+ }
+
+ // 2. We also have setAudioHalPids, which is populated with the pids set
+ // above.
+ android::TimeCheck::setAudioHalPids(pids);
+ std::string name = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+
+ // 3. The constructor, which is fuzzed here:
+ android::TimeCheck timeCheck(name.c_str(), timeoutMs);
+ // We will leave some buffer to avoid sleeping too long
+ uint8_t sleep_amount_ms = data_provider.ConsumeIntegralInRange<uint8_t>(0, timeoutMs / 2);
+
+ // We want to make sure we can cover the time out functionality.
+ if (sleep_amount_ms) {
+ auto ms = std::chrono::milliseconds(sleep_amount_ms);
+ std::this_thread::sleep_for(ms);
+ }
+
+ // 4. Finally, the destructor on timecheck. These seem to be the only factors
+ // in play.
+ return 0;
+}
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 212599a..276b471 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -24,6 +24,7 @@
#include <binder/PermissionController.h>
#include <cutils/multiuser.h>
#include <private/android_filesystem_config.h>
+#include <system/audio-hal-enums.h>
#include <map>
#include <optional>
@@ -79,10 +80,11 @@
}
bool recordingAllowed(const String16& opPackageName, pid_t pid, uid_t uid);
-bool startRecording(const String16& opPackageName, pid_t pid, uid_t uid);
-void finishRecording(const String16& opPackageName, uid_t uid);
+bool startRecording(const String16& opPackageName, pid_t pid, uid_t uid, audio_source_t source);
+void finishRecording(const String16& opPackageName, uid_t uid, audio_source_t source);
bool captureAudioOutputAllowed(pid_t pid, uid_t uid);
bool captureMediaOutputAllowed(pid_t pid, uid_t uid);
+bool captureTunerAudioInputAllowed(pid_t pid, uid_t uid);
bool captureVoiceCommunicationOutputAllowed(pid_t pid, uid_t uid);
bool captureHotwordAllowed(const String16& opPackageName, pid_t pid, uid_t uid);
bool settingsAllowed();
diff --git a/services/OWNERS b/services/OWNERS
index 66a4bcb..f0b5e2f 100644
--- a/services/OWNERS
+++ b/services/OWNERS
@@ -5,3 +5,5 @@
gkasten@google.com
hunga@google.com
marcone@google.com
+nchalko@google.com
+quxiangfang@google.com
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 3873600..a005250 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -35,6 +35,11 @@
],
shared_libs: [
+ "audioflinger-aidl-unstable-cpp",
+ "audioclient-types-aidl-unstable-cpp",
+ "av-types-aidl-unstable-cpp",
+ "effect-aidl-unstable-cpp",
+ "libaudioclient_aidl_conversion",
"libaudiofoundation",
"libaudiohal",
"libaudioprocessing",
@@ -54,6 +59,7 @@
"libmediautils",
"libmemunreachable",
"libmedia_helper",
+ "libshmemcompat",
"libvibrator",
],
@@ -63,6 +69,7 @@
],
header_libs: [
+ "libaudioclient_headers",
"libaudiohal_headers",
"libmedia_headers",
],
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index f014209..78ad467 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -31,6 +31,7 @@
#include <sys/resource.h>
#include <thread>
+
#include <android/os/IExternalVibratorService.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
@@ -41,8 +42,10 @@
#include <media/audiohal/DevicesFactoryHalInterface.h>
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <media/AudioParameter.h>
+#include <media/IAudioPolicyService.h>
#include <media/MediaMetricsItem.h>
#include <media/TypeConverter.h>
+#include <mediautils/TimeCheck.h>
#include <memunreachable/memunreachable.h>
#include <utils/String16.h>
#include <utils/threads.h>
@@ -61,12 +64,15 @@
#include <system/audio_effects/effect_visualizer.h>
#include <system/audio_effects/effect_ns.h>
#include <system/audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
#include <audio_utils/primitives.h>
#include <powermanager/PowerManager.h>
#include <media/IMediaLogService.h>
+#include <media/AidlConversion.h>
+#include <media/AudioValidator.h>
#include <media/nbaio/Pipe.h>
#include <media/nbaio/PipeReader.h>
#include <mediautils/BatteryNotifier.h>
@@ -80,6 +86,15 @@
#include "TypedLogger.h"
+#define VALUE_OR_FATAL(result) \
+ ({ \
+ auto _tmp = (result); \
+ LOG_ALWAYS_FATAL_IF(!_tmp.ok(), \
+ "Failed result (%d)", \
+ _tmp.error()); \
+ std::move(_tmp.value()); \
+ })
+
// ----------------------------------------------------------------------------
// Note: the following macro is used for extremely verbose logging message. In
@@ -97,6 +112,8 @@
namespace android {
+using media::IEffectClient;
+
static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
static const char kHardwareLockedString[] = "Hardware lock is taken\n";
static const char kClientLockedString[] = "Client lock is taken\n";
@@ -168,9 +185,15 @@
// ----------------------------------------------------------------------------
+void AudioFlinger::instantiate() {
+ sp<IServiceManager> sm(defaultServiceManager());
+ sm->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+ new AudioFlingerServerAdapter(new AudioFlinger()), false,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+}
+
AudioFlinger::AudioFlinger()
- : BnAudioFlinger(),
- mMediaLogNotifier(new AudioFlinger::MediaLogNotifier()),
+ : mMediaLogNotifier(new AudioFlinger::MediaLogNotifier()),
mPrimaryHardwareDev(NULL),
mAudioHwDevs(NULL),
mHardwareStatus(AUDIO_HW_IDLE),
@@ -194,7 +217,12 @@
mNextUniqueIds[use] = AUDIO_UNIQUE_ID_USE_MAX;
}
+#if 1
+ // FIXME See bug 165702394 and bug 168511485
+ const bool doLog = false;
+#else
const bool doLog = property_get_bool("ro.test_harness", false);
+#endif
if (doLog) {
mLogMemoryDealer = new MemoryDealer(kLogMemorySize, "LogWriters",
MemoryHeapBase::READ_ONLY);
@@ -398,7 +426,7 @@
return ret;
}
}
- return AudioMixer::HAPTIC_SCALE_MUTE;
+ return static_cast<int>(os::HapticScale::MUTE);
}
/* static */
@@ -684,8 +712,8 @@
sp<NBLog::Writer> AudioFlinger::newWriter_l(size_t size, const char *name)
{
- // If there is no memory allocated for logs, return a dummy writer that does nothing.
- // Similarly if we can't contact the media.log service, also return a dummy writer.
+ // If there is no memory allocated for logs, return a no-op writer that does nothing.
+ // Similarly if we can't contact the media.log service, also return a no-op writer.
if (mLogMemoryDealer == 0 || sMediaLogService == 0) {
return new NBLog::Writer();
}
@@ -711,7 +739,7 @@
}
}
// Even after garbage-collecting all old writers, there is still not enough memory,
- // so return a dummy writer
+ // so return a no-op writer
return new NBLog::Writer();
}
success:
@@ -739,10 +767,13 @@
// IAudioFlinger interface
-sp<IAudioTrack> AudioFlinger::createTrack(const CreateTrackInput& input,
- CreateTrackOutput& output,
- status_t *status)
+status_t AudioFlinger::createTrack(const media::CreateTrackRequest& _input,
+ media::CreateTrackResponse& _output)
{
+ // Local version of VALUE_OR_RETURN, specific to this method's calling conventions.
+ CreateTrackInput input = VALUE_OR_RETURN_STATUS(CreateTrackInput::fromAidl(_input));
+ CreateTrackOutput output;
+
sp<PlaybackThread::Track> track;
sp<TrackHandle> trackHandle;
sp<Client> client;
@@ -852,7 +883,8 @@
input.notificationsPerBuffer, input.speed,
input.sharedBuffer, sessionId, &output.flags,
callingPid, input.clientInfo.clientTid, clientUid,
- &lStatus, portId, input.audioTrackCallback);
+ &lStatus, portId, input.audioTrackCallback,
+ input.opPackageName);
LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
// we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
@@ -998,15 +1030,14 @@
AudioSystem::moveEffectsToIo(effectIds, effectThreadId);
}
- // return handle to client
- trackHandle = new TrackHandle(track);
+ output.audioTrack = new TrackHandle(track);
+ _output = VALUE_OR_FATAL(output.toAidl());
Exit:
if (lStatus != NO_ERROR && output.outputId != AUDIO_IO_HANDLE_NONE) {
AudioSystem::releaseOutput(portId);
}
- *status = lStatus;
- return trackHandle;
+ return lStatus;
}
uint32_t AudioFlinger::sampleRate(audio_io_handle_t ioHandle) const
@@ -1283,9 +1314,9 @@
}
// Now set the master mute in each playback thread. Playback threads
- // assigned to HALs which do not have master mute support will apply master
- // mute during the mix operation. Threads with HALs which do support master
- // mute will simply ignore the setting.
+ // assigned to HALs which do not have master mute support will apply master mute
+ // during the mix operation. Threads with HALs which do support master mute
+ // will simply ignore the setting.
Vector<VolumeInterface *> volumeInterfaces = getAllVolumeInterfaces_l();
for (size_t i = 0; i < volumeInterfaces.size(); i++) {
volumeInterfaces[i]->setMasterMute(muted);
@@ -1430,7 +1461,7 @@
}
-void AudioFlinger::broacastParametersToRecordThreads_l(const String8& keyValuePairs)
+void AudioFlinger::broadcastParametersToRecordThreads_l(const String8& keyValuePairs)
{
for (size_t i = 0; i < mRecordThreads.size(); i++) {
mRecordThreads.valueAt(i)->setParameters(keyValuePairs);
@@ -1588,7 +1619,7 @@
int value;
if ((param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) &&
(value != 0)) {
- broacastParametersToRecordThreads_l(filteredKeyValuePairs);
+ broadcastParametersToRecordThreads_l(filteredKeyValuePairs);
}
}
}
@@ -1765,7 +1796,7 @@
return BAD_VALUE;
}
-void AudioFlinger::registerClient(const sp<IAudioFlingerClient>& client)
+void AudioFlinger::registerClient(const sp<media::IAudioFlingerClient>& client)
{
Mutex::Autolock _l(mLock);
if (client == 0) {
@@ -1840,13 +1871,18 @@
void AudioFlinger::ioConfigChanged(audio_io_config_event event,
const sp<AudioIoDescriptor>& ioDesc,
- pid_t pid)
-{
+ pid_t pid) {
+ media::AudioIoDescriptor descAidl = VALUE_OR_FATAL(
+ legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(ioDesc));
+ media::AudioIoConfigEvent eventAidl = VALUE_OR_FATAL(
+ legacy2aidl_audio_io_config_event_AudioIoConfigEvent(event));
+
Mutex::Autolock _l(mClientLock);
size_t size = mNotificationClients.size();
for (size_t i = 0; i < size; i++) {
if ((pid == 0) || (mNotificationClients.keyAt(i) == pid)) {
- mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(event, ioDesc);
+ mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(eventAidl,
+ descAidl);
}
}
}
@@ -1920,7 +1956,7 @@
// ----------------------------------------------------------------------------
AudioFlinger::NotificationClient::NotificationClient(const sp<AudioFlinger>& audioFlinger,
- const sp<IAudioFlingerClient>& client,
+ const sp<media::IAudioFlingerClient>& client,
pid_t pid,
uid_t uid)
: mAudioFlinger(audioFlinger), mPid(pid), mUid(uid), mAudioFlingerClient(client)
@@ -1975,10 +2011,12 @@
// ----------------------------------------------------------------------------
-sp<media::IAudioRecord> AudioFlinger::createRecord(const CreateRecordInput& input,
- CreateRecordOutput& output,
- status_t *status)
+status_t AudioFlinger::createRecord(const media::CreateRecordRequest& _input,
+ media::CreateRecordResponse& _output)
{
+ CreateRecordInput input = VALUE_OR_RETURN_STATUS(CreateRecordInput::fromAidl(_input));
+ CreateRecordOutput output;
+
sp<RecordThread::RecordTrack> recordTrack;
sp<RecordHandle> recordHandle;
sp<Client> client;
@@ -2068,8 +2106,8 @@
Mutex::Autolock _l(mLock);
RecordThread *thread = checkRecordThread_l(output.inputId);
if (thread == NULL) {
- ALOGE("createRecord() checkRecordThread_l failed, input handle %d", output.inputId);
- lStatus = BAD_VALUE;
+ ALOGW("createRecord() checkRecordThread_l failed, input handle %d", output.inputId);
+ lStatus = FAILED_TRANSACTION;
goto Exit;
}
@@ -2116,8 +2154,8 @@
output.buffers = recordTrack->getBuffers();
output.portId = portId;
- // return handle to client
- recordHandle = new RecordHandle(recordTrack);
+ output.audioRecord = new RecordHandle(recordTrack);
+ _output = VALUE_OR_FATAL(output.toAidl());
Exit:
if (lStatus != NO_ERROR) {
@@ -2135,8 +2173,7 @@
}
}
- *status = lStatus;
- return recordHandle;
+ return lStatus;
}
@@ -2308,6 +2345,11 @@
{
ALOGV(__func__);
+ status_t status = AudioValidator::validateAudioPortConfig(*config);
+ if (status != NO_ERROR) {
+ return status;
+ }
+
audio_module_handle_t module;
if (config->type == AUDIO_PORT_TYPE_DEVICE) {
module = config->ext.device.hw_module;
@@ -2541,20 +2583,28 @@
return 0;
}
-status_t AudioFlinger::openOutput(audio_module_handle_t module,
- audio_io_handle_t *output,
- audio_config_t *config,
- const sp<DeviceDescriptorBase>& device,
- uint32_t *latencyMs,
- audio_output_flags_t flags)
+status_t AudioFlinger::openOutput(const media::OpenOutputRequest& request,
+ media::OpenOutputResponse* response)
{
+ audio_module_handle_t module = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_int32_t_audio_module_handle_t(request.module));
+ audio_config_t config = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioConfig_audio_config_t(request.config));
+ sp<DeviceDescriptorBase> device = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_DeviceDescriptorBase(request.device));
+ audio_output_flags_t flags = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_int32_t_audio_output_flags_t_mask(request.flags));
+
+ audio_io_handle_t output;
+ uint32_t latencyMs;
+
ALOGI("openOutput() this %p, module %d Device %s, SamplingRate %d, Format %#08x, "
"Channels %#x, flags %#x",
this, module,
device->toString().c_str(),
- config->sample_rate,
- config->format,
- config->channel_mask,
+ config.sample_rate,
+ config.format,
+ config.channel_mask,
flags);
audio_devices_t deviceType = device->type();
@@ -2566,11 +2616,11 @@
Mutex::Autolock _l(mLock);
- sp<ThreadBase> thread = openOutput_l(module, output, config, deviceType, address, flags);
+ sp<ThreadBase> thread = openOutput_l(module, &output, &config, deviceType, address, flags);
if (thread != 0) {
if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == 0) {
PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
- *latencyMs = playbackThread->latency();
+ latencyMs = playbackThread->latency();
// notify client processes of the new output creation
playbackThread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
@@ -2590,6 +2640,11 @@
MmapThread *mmapThread = (MmapThread *)thread.get();
mmapThread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
}
+ response->output = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+ response->config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(config));
+ response->latencyMs = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(latencyMs));
+ response->flags = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
return NO_ERROR;
}
@@ -2742,22 +2797,36 @@
return NO_ERROR;
}
-status_t AudioFlinger::openInput(audio_module_handle_t module,
- audio_io_handle_t *input,
- audio_config_t *config,
- audio_devices_t *devices,
- const String8& address,
- audio_source_t source,
- audio_input_flags_t flags)
+status_t AudioFlinger::openInput(const media::OpenInputRequest& request,
+ media::OpenInputResponse* response)
{
Mutex::Autolock _l(mLock);
- if (*devices == AUDIO_DEVICE_NONE) {
+ if (request.device.type == AUDIO_DEVICE_NONE) {
return BAD_VALUE;
}
+ audio_io_handle_t input = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_int32_t_audio_io_handle_t(request.input));
+ audio_config_t config = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioConfig_audio_config_t(request.config));
+ AudioDeviceTypeAddr device = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioDeviceTypeAddress(request.device));
+
sp<ThreadBase> thread = openInput_l(
- module, input, config, *devices, address, source, flags, AUDIO_DEVICE_NONE, String8{});
+ VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_module_handle_t(request.module)),
+ &input,
+ &config,
+ device.mType,
+ device.address().c_str(),
+ VALUE_OR_RETURN_STATUS(aidl2legacy_AudioSourceType_audio_source_t(request.source)),
+ VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_input_flags_t_mask(request.flags)),
+ AUDIO_DEVICE_NONE,
+ String8{});
+
+ response->input = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(input));
+ response->config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(config));
+ response->device = request.device;
if (thread != 0) {
// notify client processes of the new input creation
@@ -2771,7 +2840,7 @@
audio_io_handle_t *input,
audio_config_t *config,
audio_devices_t devices,
- const String8& address,
+ const char* address,
audio_source_t source,
audio_input_flags_t flags,
audio_devices_t outputDevice,
@@ -2801,7 +2870,7 @@
sp<DeviceHalInterface> inHwHal = inHwDev->hwDevice();
sp<StreamInHalInterface> inStream;
status_t status = inHwHal->openInputStream(
- *input, devices, &halconfig, flags, address.string(), source,
+ *input, devices, &halconfig, flags, address, source,
outputDevice, outputDeviceAddress, &inStream);
ALOGV("openInput_l() openInputStream returned input %p, devices %#x, SamplingRate %d"
", Format %#x, Channels %#x, flags %#x, status %d addr %s",
@@ -2811,7 +2880,7 @@
halconfig.format,
halconfig.channel_mask,
flags,
- status, address.string());
+ status, address);
// If the input could not be opened with the requested parameters and we can handle the
// conversion internally, try to open again with the proposed parameters.
@@ -2825,7 +2894,7 @@
ALOGV("openInput_l() reopening with proposed sampling rate and channel mask");
inStream.clear();
status = inHwHal->openInputStream(
- *input, devices, &halconfig, flags, address.string(), source,
+ *input, devices, &halconfig, flags, address, source,
outputDevice, outputDeviceAddress, &inStream);
// FIXME log this new status; HAL should not propose any further changes
}
@@ -3141,7 +3210,8 @@
// dumpToThreadLog_l() must be called with AudioFlinger::mLock held
void AudioFlinger::dumpToThreadLog_l(const sp<ThreadBase> &thread)
{
- audio_utils::FdToString fdToString;
+ constexpr int THREAD_DUMP_TIMEOUT_MS = 2;
+ audio_utils::FdToString fdToString("- ", THREAD_DUMP_TIMEOUT_MS);
const int fd = fdToString.fd();
if (fd >= 0) {
thread->dump(fd, {} /* args */);
@@ -3297,6 +3367,16 @@
return minThread;
}
+AudioFlinger::ThreadBase *AudioFlinger::hapticPlaybackThread_l() const {
+ for (size_t i = 0; i < mPlaybackThreads.size(); ++i) {
+ PlaybackThread *thread = mPlaybackThreads.valueAt(i).get();
+ if (thread->hapticChannelMask() != AUDIO_CHANNEL_NONE) {
+ return thread;
+ }
+ }
+ return nullptr;
+}
+
sp<AudioFlinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
audio_session_t triggerSession,
audio_session_t listenerSession,
@@ -3427,23 +3507,29 @@
return status;
}
-sp<IEffect> AudioFlinger::createEffect(
- effect_descriptor_t *pDesc,
- const sp<IEffectClient>& effectClient,
- int32_t priority,
- audio_io_handle_t io,
- audio_session_t sessionId,
- const AudioDeviceTypeAddr& device,
- const String16& opPackageName,
- pid_t pid,
- bool probe,
- status_t *status,
- int *id,
- int *enabled)
-{
- status_t lStatus = NO_ERROR;
+status_t AudioFlinger::createEffect(const media::CreateEffectRequest& request,
+ media::CreateEffectResponse* response) {
+ const sp<IEffectClient>& effectClient = request.client;
+ const int32_t priority = request.priority;
+ const AudioDeviceTypeAddr device = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioDeviceTypeAddress(request.device));
+ const String16 opPackageName = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_string_view_String16(request.opPackageName));
+ pid_t pid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(request.pid));
+ const audio_session_t sessionId = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_int32_t_audio_session_t(request.sessionId));
+ audio_io_handle_t io = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_int32_t_audio_io_handle_t(request.output));
+ const effect_descriptor_t descIn = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_EffectDescriptor_effect_descriptor_t(request.desc));
+ const bool probe = request.probe;
+
sp<EffectHandle> handle;
- effect_descriptor_t desc;
+ effect_descriptor_t descOut;
+ int enabledOut = 0;
+ int idOut = -1;
+
+ status_t lStatus = NO_ERROR;
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
if (pid == -1 || !isAudioServerOrMediaServerUid(callingUid)) {
@@ -3455,12 +3541,7 @@
}
ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
- pid, effectClient.get(), priority, sessionId, io, mEffectsFactoryHal.get());
-
- if (pDesc == NULL) {
- lStatus = BAD_VALUE;
- goto Exit;
- }
+ pid, effectClient.get(), priority, sessionId, io, mEffectsFactoryHal.get());
if (mEffectsFactoryHal == 0) {
ALOGE("%s: no effects factory hal", __func__);
@@ -3517,7 +3598,7 @@
// otherwise no preference.
uint32_t preferredType = (sessionId == AUDIO_SESSION_OUTPUT_MIX ?
EFFECT_FLAG_TYPE_AUXILIARY : EFFECT_FLAG_TYPE_MASK);
- lStatus = getEffectDescriptor(&pDesc->uuid, &pDesc->type, preferredType, &desc);
+ lStatus = getEffectDescriptor(&descIn.uuid, &descIn.type, preferredType, &descOut);
if (lStatus < 0) {
ALOGW("createEffect() error %d from getEffectDescriptor", lStatus);
goto Exit;
@@ -3525,26 +3606,34 @@
// Do not allow auxiliary effects on a session different from 0 (output mix)
if (sessionId != AUDIO_SESSION_OUTPUT_MIX &&
- (desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
+ (descOut.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
lStatus = INVALID_OPERATION;
goto Exit;
}
// check recording permission for visualizer
- if ((memcmp(&desc.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) &&
+ if ((memcmp(&descOut.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) &&
// TODO: Do we need to start/stop op - i.e. is there recording being performed?
!recordingAllowed(opPackageName, pid, callingUid)) {
lStatus = PERMISSION_DENIED;
goto Exit;
}
- // return effect descriptor
- *pDesc = desc;
+ const bool hapticPlaybackRequired = EffectModule::isHapticGenerator(&descOut.type);
+ if (hapticPlaybackRequired
+ && (sessionId == AUDIO_SESSION_DEVICE
+ || sessionId == AUDIO_SESSION_OUTPUT_MIX
+ || sessionId == AUDIO_SESSION_OUTPUT_STAGE)) {
+ // haptic-generating effect is only valid when the session id is a general session id
+ lStatus = INVALID_OPERATION;
+ goto Exit;
+ }
+
if (io == AUDIO_IO_HANDLE_NONE && sessionId == AUDIO_SESSION_OUTPUT_MIX) {
// if the output returned by getOutputForEffect() is removed before we lock the
// mutex below, the call to checkPlaybackThread_l(io) below will detect it
// and we will exit safely
- io = AudioSystem::getOutputForEffect(&desc);
+ io = AudioSystem::getOutputForEffect(&descOut);
ALOGV("createEffect got output %d", io);
}
@@ -3554,15 +3643,15 @@
sp<Client> client = registerPid(pid);
ALOGV("%s device type %#x address %s", __func__, device.mType, device.getAddress());
handle = mDeviceEffectManager.createEffect_l(
- &desc, device, client, effectClient, mPatchPanel.patches_l(),
- enabled, &lStatus, probe);
+ &descOut, device, client, effectClient, mPatchPanel.patches_l(),
+ &enabledOut, &lStatus, probe);
if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
// remove local strong reference to Client with mClientLock held
Mutex::Autolock _cl(mClientLock);
client.clear();
} else {
// handle must be valid here, but check again to be safe.
- if (handle.get() != nullptr && id != nullptr) *id = handle->id();
+ if (handle.get() != nullptr) idOut = handle->id();
}
goto Register;
}
@@ -3592,8 +3681,8 @@
// Detect if the effect is created after an AudioRecord is destroyed.
if (getOrphanEffectChain_l(sessionId).get() != nullptr) {
ALOGE("%s: effect %s with no specified io handle is denied because the AudioRecord"
- " for session %d no longer exists",
- __func__, desc.name, sessionId);
+ " for session %d no longer exists",
+ __func__, descOut.name, sessionId);
lStatus = PERMISSION_DENIED;
goto Exit;
}
@@ -3607,17 +3696,27 @@
if (io == AUDIO_IO_HANDLE_NONE && mPlaybackThreads.size() > 0) {
io = mPlaybackThreads.keyAt(0);
}
- ALOGV("createEffect() got io %d for effect %s", io, desc.name);
+ ALOGV("createEffect() got io %d for effect %s", io, descOut.name);
} else if (checkPlaybackThread_l(io) != nullptr) {
// allow only one effect chain per sessionId on mPlaybackThreads.
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
const audio_io_handle_t checkIo = mPlaybackThreads.keyAt(i);
- if (io == checkIo) continue;
+ if (io == checkIo) {
+ if (hapticPlaybackRequired
+ && mPlaybackThreads.valueAt(i)
+ ->hapticChannelMask() == AUDIO_CHANNEL_NONE) {
+ ALOGE("%s: haptic playback thread is required while the required playback "
+ "thread(io=%d) doesn't support", __func__, (int)io);
+ lStatus = BAD_VALUE;
+ goto Exit;
+ }
+ continue;
+ }
const uint32_t sessionType =
mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId);
if ((sessionType & ThreadBase::EFFECT_SESSION) != 0) {
ALOGE("%s: effect %s io %d denied because session %d effect exists on io %d",
- __func__, desc.name, (int)io, (int)sessionId, (int)checkIo);
+ __func__, descOut.name, (int) io, (int) sessionId, (int) checkIo);
android_errorWriteLog(0x534e4554, "123237974");
lStatus = BAD_VALUE;
goto Exit;
@@ -3649,15 +3748,34 @@
// create effect on selected output thread
bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
+ ThreadBase *oriThread = nullptr;
+ if (hapticPlaybackRequired && thread->hapticChannelMask() == AUDIO_CHANNEL_NONE) {
+ ThreadBase *hapticThread = hapticPlaybackThread_l();
+ if (hapticThread == nullptr) {
+ ALOGE("%s haptic thread not found while it is required", __func__);
+ lStatus = INVALID_OPERATION;
+ goto Exit;
+ }
+ if (hapticThread != thread) {
+ // Force to use haptic thread for haptic-generating effect.
+ oriThread = thread;
+ thread = hapticThread;
+ }
+ }
handle = thread->createEffect_l(client, effectClient, priority, sessionId,
- &desc, enabled, &lStatus, pinned, probe);
+ &descOut, &enabledOut, &lStatus, pinned, probe);
if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
// remove local strong reference to Client with mClientLock held
Mutex::Autolock _cl(mClientLock);
client.clear();
} else {
// handle must be valid here, but check again to be safe.
- if (handle.get() != nullptr && id != nullptr) *id = handle->id();
+ if (handle.get() != nullptr) idOut = handle->id();
+ // Invalidate audio session when haptic playback is created.
+ if (hapticPlaybackRequired && oriThread != nullptr) {
+ // invalidateTracksForAudioSession will trigger locking the thread.
+ oriThread->invalidateTracksForAudioSession(sessionId);
+ }
}
}
@@ -3675,9 +3793,14 @@
handle.clear();
}
+ response->id = idOut;
+ response->enabled = enabledOut != 0;
+ response->effect = handle;
+ response->desc = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_effect_descriptor_t_EffectDescriptor(descOut));
+
Exit:
- *status = lStatus;
- return handle;
+ return lStatus;
}
status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcOutput,
@@ -3926,10 +4049,109 @@
// ----------------------------------------------------------------------------
-status_t AudioFlinger::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
+status_t AudioFlinger::onPreTransact(
+ TransactionCode code, const Parcel& /* data */, uint32_t /* flags */)
{
- return BnAudioFlinger::onTransact(code, data, reply, flags);
+ // make sure transactions reserved to AudioPolicyManager do not come from other processes
+ switch (code) {
+ case TransactionCode::SET_STREAM_VOLUME:
+ case TransactionCode::SET_STREAM_MUTE:
+ case TransactionCode::OPEN_OUTPUT:
+ case TransactionCode::OPEN_DUPLICATE_OUTPUT:
+ case TransactionCode::CLOSE_OUTPUT:
+ case TransactionCode::SUSPEND_OUTPUT:
+ case TransactionCode::RESTORE_OUTPUT:
+ case TransactionCode::OPEN_INPUT:
+ case TransactionCode::CLOSE_INPUT:
+ case TransactionCode::INVALIDATE_STREAM:
+ case TransactionCode::SET_VOICE_VOLUME:
+ case TransactionCode::MOVE_EFFECTS:
+ case TransactionCode::SET_EFFECT_SUSPENDED:
+ case TransactionCode::LOAD_HW_MODULE:
+ case TransactionCode::GET_AUDIO_PORT:
+ case TransactionCode::CREATE_AUDIO_PATCH:
+ case TransactionCode::RELEASE_AUDIO_PATCH:
+ case TransactionCode::LIST_AUDIO_PATCHES:
+ case TransactionCode::SET_AUDIO_PORT_CONFIG:
+ case TransactionCode::SET_RECORD_SILENCED:
+ ALOGW("%s: transaction %d received from PID %d",
+ __func__, code, IPCThreadState::self()->getCallingPid());
+ // return status only for non void methods
+ switch (code) {
+ case TransactionCode::SET_RECORD_SILENCED:
+ case TransactionCode::SET_EFFECT_SUSPENDED:
+ break;
+ default:
+ return INVALID_OPERATION;
+ }
+ return OK;
+ default:
+ break;
+ }
+
+ // make sure the following transactions come from system components
+ switch (code) {
+ case TransactionCode::SET_MASTER_VOLUME:
+ case TransactionCode::SET_MASTER_MUTE:
+ case TransactionCode::MASTER_MUTE:
+ case TransactionCode::SET_MODE:
+ case TransactionCode::SET_MIC_MUTE:
+ case TransactionCode::SET_LOW_RAM_DEVICE:
+ case TransactionCode::SYSTEM_READY:
+ case TransactionCode::SET_AUDIO_HAL_PIDS: {
+ if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
+ ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
+ __func__, code, IPCThreadState::self()->getCallingPid(),
+ IPCThreadState::self()->getCallingUid());
+ // return status only for non void methods
+ switch (code) {
+ case TransactionCode::SYSTEM_READY:
+ break;
+ default:
+ return INVALID_OPERATION;
+ }
+ return OK;
+ }
+ } break;
+ default:
+ break;
+ }
+
+ // List of relevant events that trigger log merging.
+ // Log merging should activate during audio activity of any kind. This are considered the
+ // most relevant events.
+ // TODO should select more wisely the items from the list
+ switch (code) {
+ case TransactionCode::CREATE_TRACK:
+ case TransactionCode::CREATE_RECORD:
+ case TransactionCode::SET_MASTER_VOLUME:
+ case TransactionCode::SET_MASTER_MUTE:
+ case TransactionCode::SET_MIC_MUTE:
+ case TransactionCode::SET_PARAMETERS:
+ case TransactionCode::CREATE_EFFECT:
+ case TransactionCode::SYSTEM_READY: {
+ requestLogMerge();
+ break;
+ }
+ default:
+ break;
+ }
+
+ std::string tag("IAudioFlinger command " +
+ std::to_string(static_cast<std::underlying_type_t<TransactionCode>>(code)));
+ TimeCheck check(tag.c_str());
+
+ // Make sure we connect to Audio Policy Service before calling into AudioFlinger:
+ // - AudioFlinger can call into Audio Policy Service with its global mutex held
+ // - If this is the first time Audio Policy Service is queried from inside audioserver process
+ // this will trigger Audio Policy Manager initialization.
+ // - Audio Policy Manager initialization calls into AudioFlinger which will try to lock
+ // its global mutex and a deadlock will occur.
+ if (IPCThreadState::self()->getCallingPid() != getpid()) {
+ AudioSystem::get_audio_policy_service();
+ }
+
+ return OK;
}
} // namespace android
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index ef7b1ab..1cf1e67 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -33,17 +33,17 @@
#include <sys/types.h>
#include <limits.h>
+#include <android/media/BnAudioTrack.h>
+#include <android/media/IAudioFlingerClient.h>
#include <android/media/IAudioTrackCallback.h>
#include <android/os/BnExternalVibrationController.h>
-#include <android-base/macros.h>
+#include <android-base/macros.h>
#include <cutils/atomic.h>
#include <cutils/compiler.h>
-#include <cutils/properties.h>
+#include <cutils/properties.h>
#include <media/IAudioFlinger.h>
-#include <media/IAudioFlingerClient.h>
-#include <media/IAudioTrack.h>
#include <media/AudioSystem.h>
#include <media/AudioTrack.h>
#include <media/MmapStreamInterface.h>
@@ -98,8 +98,10 @@
#include <private/media/AudioTrackShared.h>
#include <vibrator/ExternalVibration.h>
+#include <vibrator/ExternalVibrationUtils.h>
#include "android/media/BnAudioRecord.h"
+#include "android/media/BnEffect.h"
namespace android {
@@ -121,25 +123,19 @@
#define INCLUDING_FROM_AUDIOFLINGER_H
-class AudioFlinger :
- public BinderService<AudioFlinger>,
- public BnAudioFlinger
+class AudioFlinger : public AudioFlingerServerAdapter::Delegate
{
- friend class BinderService<AudioFlinger>; // for AudioFlinger()
-
public:
- static const char* getServiceName() ANDROID_API { return "media.audio_flinger"; }
+ static void instantiate() ANDROID_API;
- virtual status_t dump(int fd, const Vector<String16>& args);
+ status_t dump(int fd, const Vector<String16>& args) override;
// IAudioFlinger interface, in binder opcode order
- virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
- CreateTrackOutput& output,
- status_t *status);
+ status_t createTrack(const media::CreateTrackRequest& input,
+ media::CreateTrackResponse& output) override;
- virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
- CreateRecordOutput& output,
- status_t *status);
+ status_t createRecord(const media::CreateRecordRequest& input,
+ media::CreateRecordResponse& output) override;
virtual uint32_t sampleRate(audio_io_handle_t ioHandle) const;
virtual audio_format_t format(audio_io_handle_t output) const;
@@ -175,17 +171,13 @@
virtual status_t setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs);
virtual String8 getParameters(audio_io_handle_t ioHandle, const String8& keys) const;
- virtual void registerClient(const sp<IAudioFlingerClient>& client);
+ virtual void registerClient(const sp<media::IAudioFlingerClient>& client);
virtual size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
audio_channel_mask_t channelMask) const;
- virtual status_t openOutput(audio_module_handle_t module,
- audio_io_handle_t *output,
- audio_config_t *config,
- const sp<DeviceDescriptorBase>& device,
- uint32_t *latencyMs,
- audio_output_flags_t flags);
+ virtual status_t openOutput(const media::OpenOutputRequest& request,
+ media::OpenOutputResponse* response);
virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
audio_io_handle_t output2);
@@ -196,13 +188,8 @@
virtual status_t restoreOutput(audio_io_handle_t output);
- virtual status_t openInput(audio_module_handle_t module,
- audio_io_handle_t *input,
- audio_config_t *config,
- audio_devices_t *device,
- const String8& address,
- audio_source_t source,
- audio_input_flags_t flags);
+ virtual status_t openInput(const media::OpenInputRequest& request,
+ media::OpenInputResponse* response);
virtual status_t closeInput(audio_io_handle_t input);
@@ -231,19 +218,8 @@
uint32_t preferredTypeFlag,
effect_descriptor_t *descriptor) const;
- virtual sp<IEffect> createEffect(
- effect_descriptor_t *pDesc,
- const sp<IEffectClient>& effectClient,
- int32_t priority,
- audio_io_handle_t io,
- audio_session_t sessionId,
- const AudioDeviceTypeAddr& device,
- const String16& opPackageName,
- pid_t pid,
- bool probe,
- status_t *status /*non-NULL*/,
- int *id,
- int *enabled);
+ virtual status_t createEffect(const media::CreateEffectRequest& request,
+ media::CreateEffectResponse* response);
virtual status_t moveEffects(audio_session_t sessionId, audio_io_handle_t srcOutput,
audio_io_handle_t dstOutput);
@@ -264,7 +240,7 @@
struct audio_port *ports);
/* Get attributes for a given audio port */
- virtual status_t getAudioPort(struct audio_port *port);
+ virtual status_t getAudioPort(struct audio_port_v7 *port);
/* Create an audio patch between several source and sink ports */
virtual status_t createAudioPatch(const struct audio_patch *patch,
@@ -290,11 +266,7 @@
virtual status_t setAudioHalPids(const std::vector<pid_t>& pids);
- virtual status_t onTransact(
- uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags);
+ status_t onPreTransact(TransactionCode code, const Parcel& data, uint32_t flags) override;
// end of IAudioFlinger interface
@@ -405,7 +377,7 @@
case AUDIO_CHANNEL_REPRESENTATION_POSITION: {
// Haptic channel mask is only applicable for channel position mask.
const uint32_t channelCount = audio_channel_count_from_out_mask(
- channelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
+ static_cast<audio_channel_mask_t>(channelMask & ~AUDIO_CHANNEL_HAPTIC_ALL));
const uint32_t maxChannelCount = kEnableExtendedChannels
? AudioMixer::MAX_NUM_CHANNELS : FCC_2;
if (channelCount < FCC_2 // mono is not supported at this time
@@ -488,12 +460,12 @@
class NotificationClient : public IBinder::DeathRecipient {
public:
NotificationClient(const sp<AudioFlinger>& audioFlinger,
- const sp<IAudioFlingerClient>& client,
+ const sp<media::IAudioFlingerClient>& client,
pid_t pid,
uid_t uid);
virtual ~NotificationClient();
- sp<IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
+ sp<media::IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
pid_t getPid() const { return mPid; }
uid_t getUid() const { return mUid; }
@@ -506,7 +478,7 @@
const sp<AudioFlinger> mAudioFlinger;
const pid_t mPid;
const uid_t mUid;
- const sp<IAudioFlingerClient> mAudioFlingerClient;
+ const sp<media::IAudioFlingerClient> mAudioFlingerClient;
};
// --- MediaLogNotifier ---
@@ -539,6 +511,7 @@
const sp<MediaLogNotifier> mMediaLogNotifier;
// This is a helper that is called during incoming binder calls.
+ // Requests media.log to start merging log buffers
void requestLogMerge();
class TrackHandle;
@@ -624,27 +597,30 @@
}
// server side of the client's IAudioTrack
- class TrackHandle : public android::BnAudioTrack {
+ class TrackHandle : public android::media::BnAudioTrack {
public:
explicit TrackHandle(const sp<PlaybackThread::Track>& track);
virtual ~TrackHandle();
- virtual sp<IMemory> getCblk() const;
- virtual status_t start();
- virtual void stop();
- virtual void flush();
- virtual void pause();
- virtual status_t attachAuxEffect(int effectId);
- virtual status_t setParameters(const String8& keyValuePairs);
- virtual status_t selectPresentation(int presentationId, int programId);
- virtual media::VolumeShaper::Status applyVolumeShaper(
- const sp<media::VolumeShaper::Configuration>& configuration,
- const sp<media::VolumeShaper::Operation>& operation) override;
- virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) override;
- virtual status_t getTimestamp(AudioTimestamp& timestamp);
- virtual void signal(); // signal playback thread for a change in control block
- virtual status_t onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags);
+ binder::Status getCblk(std::optional<media::SharedFileRegion>* _aidl_return) override;
+ binder::Status start(int32_t* _aidl_return) override;
+ binder::Status stop() override;
+ binder::Status flush() override;
+ binder::Status pause() override;
+ binder::Status attachAuxEffect(int32_t effectId, int32_t* _aidl_return) override;
+ binder::Status setParameters(const std::string& keyValuePairs,
+ int32_t* _aidl_return) override;
+ binder::Status selectPresentation(int32_t presentationId, int32_t programId,
+ int32_t* _aidl_return) override;
+ binder::Status getTimestamp(media::AudioTimestampInternal* timestamp,
+ int32_t* _aidl_return) override;
+ binder::Status signal() override;
+ binder::Status applyVolumeShaper(const media::VolumeShaperConfiguration& configuration,
+ const media::VolumeShaperOperation& operation,
+ int32_t* _aidl_return) override;
+ binder::Status getVolumeShaperState(
+ int32_t id,
+ std::optional<media::VolumeShaperState>* _aidl_return) override;
private:
const sp<PlaybackThread::Track> mTrack;
@@ -659,7 +635,7 @@
int /*audio_session_t*/ triggerSession);
virtual binder::Status stop();
virtual binder::Status getActiveMicrophones(
- std::vector<media::MicrophoneInfo>* activeMicrophones);
+ std::vector<media::MicrophoneInfoData>* activeMicrophones);
virtual binder::Status setPreferredMicrophoneDirection(
int /*audio_microphone_direction_t*/ direction);
virtual binder::Status setPreferredMicrophoneFieldDimension(float zoom);
@@ -682,6 +658,7 @@
virtual status_t createMmapBuffer(int32_t minSizeFrames,
struct audio_mmap_buffer_info *info);
virtual status_t getMmapPosition(struct audio_mmap_position *position);
+ virtual status_t getExternalPosition(uint64_t *position, int64_t *timeNanos);
virtual status_t start(const AudioClient& client,
const audio_attributes_t *attr,
audio_port_handle_t *handle);
@@ -704,7 +681,7 @@
audio_io_handle_t *input,
audio_config_t *config,
audio_devices_t device,
- const String8& address,
+ const char* address,
audio_source_t source,
audio_input_flags_t flags,
audio_devices_t outputDevice,
@@ -756,6 +733,8 @@
sp<ThreadBase> getEffectThread_l(audio_session_t sessionId, int effectId);
+ ThreadBase *hapticPlaybackThread_l() const;
+
void removeClient_l(pid_t pid);
void removeNotificationClient(pid_t pid);
@@ -782,7 +761,7 @@
std::vector< sp<EffectModule> > purgeStaleEffects_l();
- void broacastParametersToRecordThreads_l(const String8& keyValuePairs);
+ void broadcastParametersToRecordThreads_l(const String8& keyValuePairs);
void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices);
void forwardParametersToDownstreamPatches_l(
audio_io_handle_t upStream, const String8& keyValuePairs,
diff --git a/services/audioflinger/AudioHwDevice.cpp b/services/audioflinger/AudioHwDevice.cpp
index dda164c..16b25f6 100644
--- a/services/audioflinger/AudioHwDevice.cpp
+++ b/services/audioflinger/AudioHwDevice.cpp
@@ -98,5 +98,9 @@
return mHwDevice->supportsAudioPatches(&result) == OK ? result : false;
}
+status_t AudioHwDevice::getAudioPort(struct audio_port_v7 *port) const {
+ return mHwDevice->getAudioPort(port);
+}
+
}; // namespace android
diff --git a/services/audioflinger/AudioHwDevice.h b/services/audioflinger/AudioHwDevice.h
index 6709d17..fc2c693 100644
--- a/services/audioflinger/AudioHwDevice.h
+++ b/services/audioflinger/AudioHwDevice.h
@@ -83,6 +83,8 @@
bool supportsAudioPatches() const;
+ status_t getAudioPort(struct audio_port_v7 *port) const;
+
private:
const audio_module_handle_t mHandle;
const char * const mModuleName;
diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/AudioStreamOut.cpp
index d13cb8f..7e06096 100644
--- a/services/audioflinger/AudioStreamOut.cpp
+++ b/services/audioflinger/AudioStreamOut.cpp
@@ -164,6 +164,10 @@
stream = outStream;
mHalFormatHasProportionalFrames = audio_has_proportional_frames(config->format);
status = stream->getFrameSize(&mHalFrameSize);
+ LOG_ALWAYS_FATAL_IF(status != OK, "Error retrieving frame size from HAL: %d", status);
+ LOG_ALWAYS_FATAL_IF(mHalFrameSize <= 0, "Error frame size was %zu but must be greater than"
+ " zero", mHalFrameSize);
+
}
return status;
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 5ff7215..cecd52b 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -30,6 +30,8 @@
namespace android {
+using media::IEffectClient;
+
void AudioFlinger::DeviceEffectManager::createAudioPatch(audio_patch_handle_t handle,
const PatchPanel::Patch& patch) {
ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
@@ -115,10 +117,19 @@
status_t AudioFlinger::DeviceEffectManager::checkEffectCompatibility(
const effect_descriptor_t *desc) {
+ sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+ if (effectsFactory == nullptr) {
+ return BAD_VALUE;
+ }
- if ((desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_PRE_PROC
- && (desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_POST_PROC) {
- ALOGW("%s() non pre/post processing device effect %s", __func__, desc->name);
+ static const float sMinDeviceEffectHalVersion = 6.0;
+ float halVersion = effectsFactory->getHalVersion();
+
+ if (((desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_PRE_PROC
+ && (desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_POST_PROC)
+ || halVersion < sMinDeviceEffectHalVersion) {
+ ALOGW("%s() non pre/post processing device effect %s or incompatible API version %f",
+ __func__, desc->name, halVersion);
return BAD_VALUE;
}
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 81e6065..d187df2 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -33,7 +33,7 @@
sp<EffectHandle> createEffect_l(effect_descriptor_t *descriptor,
const AudioDeviceTypeAddr& device,
const sp<AudioFlinger::Client>& client,
- const sp<IEffectClient>& effectClient,
+ const sp<media::IEffectClient>& effectClient,
const std::map<audio_patch_handle_t, PatchPanel::Patch>& patches,
int *enabled,
status_t *status,
@@ -165,6 +165,7 @@
uint32_t sampleRate() const override { return 0; }
audio_channel_mask_t channelMask() const override { return AUDIO_CHANNEL_NONE; }
uint32_t channelCount() const override { return 0; }
+ audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
size_t frameCount() const override { return 0; }
uint32_t latency() const override { return 0; }
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 3dfeb83..3ab7737 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -25,6 +25,7 @@
#include <utils/Log.h>
#include <system/audio_effects/effect_aec.h>
#include <system/audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
#include <system/audio_effects/effect_ns.h>
#include <system/audio_effects/effect_visualizer.h>
#include <audio_utils/channels.h>
@@ -33,6 +34,7 @@
#include <media/AudioContainers.h>
#include <media/AudioEffect.h>
#include <media/AudioDeviceTypeAddr.h>
+#include <media/ShmemCompat.h>
#include <media/audiohal/EffectHalInterface.h>
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <mediautils/ServiceUtilities.h>
@@ -58,6 +60,28 @@
namespace android {
+using aidl_utils::statusTFromBinderStatus;
+using binder::Status;
+
+namespace {
+
+// Append a POD value into a vector of bytes.
+template<typename T>
+void appendToBuffer(const T& value, std::vector<uint8_t>* buffer) {
+ const uint8_t* ar(reinterpret_cast<const uint8_t*>(&value));
+ buffer->insert(buffer->end(), ar, ar + sizeof(T));
+}
+
+// Write a POD value into a vector of bytes (clears the previous buffer
+// content).
+template<typename T>
+void writeToBuffer(const T& value, std::vector<uint8_t>* buffer) {
+ buffer->clear();
+ appendToBuffer(value, buffer);
+}
+
+} // namespace
+
// ----------------------------------------------------------------------------
// EffectBase implementation
// ----------------------------------------------------------------------------
@@ -292,6 +316,9 @@
}
}
+ // Prevent calls to process() and other functions on effect interface from now on.
+ // The effect engine will be released by the destructor when the last strong reference on
+ // this object is released which can happen after next process is called.
if (mHandles.size() == 0 && !mPinned) {
mState = DESTROYED;
}
@@ -565,20 +592,6 @@
}
-ssize_t AudioFlinger::EffectModule::removeHandle_l(EffectHandle *handle)
-{
- ssize_t status = EffectBase::removeHandle_l(handle);
-
- // Prevent calls to process() and other functions on effect interface from now on.
- // The effect engine will be released by the destructor when the last strong reference on
- // this object is released which can happen after next process is called.
- if (status == 0 && !mPinned) {
- mEffectInterface->close();
- }
-
- return status;
-}
-
bool AudioFlinger::EffectModule::updateState() {
Mutex::Autolock _l(mLock);
@@ -879,6 +892,11 @@
}
#endif
}
+ if (isHapticGenerator()) {
+ audio_channel_mask_t hapticChannelMask = mCallback->hapticChannelMask();
+ mConfig.inputCfg.channels |= hapticChannelMask;
+ mConfig.outputCfg.channels |= hapticChannelMask;
+ }
mInChannelCountRequested =
audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
mOutChannelCountRequested =
@@ -1160,11 +1178,10 @@
return remainder == 0 ? 0 : divisor - remainder;
}
-status_t AudioFlinger::EffectModule::command(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t *replySize,
- void *pReplyData)
+status_t AudioFlinger::EffectModule::command(int32_t cmdCode,
+ const std::vector<uint8_t>& cmdData,
+ int32_t maxReplySize,
+ std::vector<uint8_t>* reply)
{
Mutex::Autolock _l(mLock);
ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface.get());
@@ -1175,63 +1192,68 @@
if (mStatus != NO_ERROR) {
return mStatus;
}
+ if (maxReplySize < 0 || maxReplySize > EFFECT_PARAM_SIZE_MAX) {
+ return -EINVAL;
+ }
+ size_t cmdSize = cmdData.size();
+ const effect_param_t* param = cmdSize >= sizeof(effect_param_t)
+ ? reinterpret_cast<const effect_param_t*>(cmdData.data())
+ : nullptr;
if (cmdCode == EFFECT_CMD_GET_PARAM &&
- (sizeof(effect_param_t) > cmdSize ||
- ((effect_param_t *)pCmdData)->psize > cmdSize
- - sizeof(effect_param_t))) {
+ (param == nullptr || param->psize > cmdSize - sizeof(effect_param_t))) {
android_errorWriteLog(0x534e4554, "32438594");
android_errorWriteLog(0x534e4554, "33003822");
return -EINVAL;
}
if (cmdCode == EFFECT_CMD_GET_PARAM &&
- (*replySize < sizeof(effect_param_t) ||
- ((effect_param_t *)pCmdData)->psize > *replySize - sizeof(effect_param_t))) {
+ (maxReplySize < sizeof(effect_param_t) ||
+ param->psize > maxReplySize - sizeof(effect_param_t))) {
android_errorWriteLog(0x534e4554, "29251553");
return -EINVAL;
}
if (cmdCode == EFFECT_CMD_GET_PARAM &&
- (sizeof(effect_param_t) > *replySize
- || ((effect_param_t *)pCmdData)->psize > *replySize
- - sizeof(effect_param_t)
- || ((effect_param_t *)pCmdData)->vsize > *replySize
- - sizeof(effect_param_t)
- - ((effect_param_t *)pCmdData)->psize
- || roundUpDelta(((effect_param_t *)pCmdData)->psize, (uint32_t)sizeof(int)) >
- *replySize
- - sizeof(effect_param_t)
- - ((effect_param_t *)pCmdData)->psize
- - ((effect_param_t *)pCmdData)->vsize)) {
+ (sizeof(effect_param_t) > maxReplySize
+ || param->psize > maxReplySize - sizeof(effect_param_t)
+ || param->vsize > maxReplySize - sizeof(effect_param_t)
+ - param->psize
+ || roundUpDelta(param->psize, (uint32_t) sizeof(int)) >
+ maxReplySize
+ - sizeof(effect_param_t)
+ - param->psize
+ - param->vsize)) {
ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: reply size inconsistent");
android_errorWriteLog(0x534e4554, "32705438");
return -EINVAL;
}
if ((cmdCode == EFFECT_CMD_SET_PARAM
- || cmdCode == EFFECT_CMD_SET_PARAM_DEFERRED) && // DEFERRED not generally used
- (sizeof(effect_param_t) > cmdSize
- || ((effect_param_t *)pCmdData)->psize > cmdSize
- - sizeof(effect_param_t)
- || ((effect_param_t *)pCmdData)->vsize > cmdSize
- - sizeof(effect_param_t)
- - ((effect_param_t *)pCmdData)->psize
- || roundUpDelta(((effect_param_t *)pCmdData)->psize, (uint32_t)sizeof(int)) >
- cmdSize
- - sizeof(effect_param_t)
- - ((effect_param_t *)pCmdData)->psize
- - ((effect_param_t *)pCmdData)->vsize)) {
+ || cmdCode == EFFECT_CMD_SET_PARAM_DEFERRED)
+ && // DEFERRED not generally used
+ (param == nullptr
+ || param->psize > cmdSize - sizeof(effect_param_t)
+ || param->vsize > cmdSize - sizeof(effect_param_t)
+ - param->psize
+ || roundUpDelta(param->psize,
+ (uint32_t) sizeof(int)) >
+ cmdSize
+ - sizeof(effect_param_t)
+ - param->psize
+ - param->vsize)) {
android_errorWriteLog(0x534e4554, "30204301");
return -EINVAL;
}
+ uint32_t replySize = maxReplySize;
+ reply->resize(replySize);
status_t status = mEffectInterface->command(cmdCode,
cmdSize,
- pCmdData,
- replySize,
- pReplyData);
+ const_cast<uint8_t*>(cmdData.data()),
+ &replySize,
+ reply->data());
+ reply->resize(status == NO_ERROR ? replySize : 0);
if (cmdCode != EFFECT_CMD_GET_PARAM && status == NO_ERROR) {
- uint32_t size = (replySize == NULL) ? 0 : *replySize;
for (size_t i = 1; i < mHandles.size(); i++) {
EffectHandle *h = mHandles[i];
if (h != NULL && !h->disconnected()) {
- h->commandExecuted(cmdCode, cmdSize, pCmdData, size, pReplyData);
+ h->commandExecuted(cmdCode, cmdData, *reply);
}
}
}
@@ -1522,6 +1544,41 @@
return mOffloaded;
}
+/*static*/
+bool AudioFlinger::EffectModule::isHapticGenerator(const effect_uuid_t *type) {
+ return memcmp(type, FX_IID_HAPTICGENERATOR, sizeof(effect_uuid_t)) == 0;
+}
+
+bool AudioFlinger::EffectModule::isHapticGenerator() const {
+ return isHapticGenerator(&mDescriptor.type);
+}
+
+status_t AudioFlinger::EffectModule::setHapticIntensity(int id, int intensity)
+{
+ if (mStatus != NO_ERROR) {
+ return mStatus;
+ }
+ if (!isHapticGenerator()) {
+ ALOGW("Should not set haptic intensity for effects that are not HapticGenerator");
+ return INVALID_OPERATION;
+ }
+
+ std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t));
+ effect_param_t *param = (effect_param_t*) request.data();
+ param->psize = sizeof(int32_t);
+ param->vsize = sizeof(int32_t) * 2;
+ *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
+ *((int32_t*)param->data + 1) = id;
+ *((int32_t*)param->data + 2) = intensity;
+ std::vector<uint8_t> response;
+ status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
+ if (status == NO_ERROR) {
+ LOG_ALWAYS_FATAL_IF(response.size() != 4);
+ status = *reinterpret_cast<const status_t*>(response.data());
+ }
+ return status;
+}
+
static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
std::stringstream ss;
@@ -1600,9 +1657,9 @@
#define LOG_TAG "AudioFlinger::EffectHandle"
AudioFlinger::EffectHandle::EffectHandle(const sp<EffectBase>& effect,
- const sp<AudioFlinger::Client>& client,
- const sp<IEffectClient>& effectClient,
- int32_t priority)
+ const sp<AudioFlinger::Client>& client,
+ const sp<media::IEffectClient>& effectClient,
+ int32_t priority)
: BnEffect(),
mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL),
mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false)
@@ -1636,20 +1693,24 @@
return mClient == 0 || mCblkMemory != 0 ? OK : NO_MEMORY;
}
-status_t AudioFlinger::EffectHandle::enable()
+#define RETURN(code) \
+ *_aidl_return = (code); \
+ return Status::ok();
+
+Status AudioFlinger::EffectHandle::enable(int32_t* _aidl_return)
{
AutoMutex _l(mLock);
ALOGV("enable %p", this);
sp<EffectBase> effect = mEffect.promote();
if (effect == 0 || mDisconnected) {
- return DEAD_OBJECT;
+ RETURN(DEAD_OBJECT);
}
if (!mHasControl) {
- return INVALID_OPERATION;
+ RETURN(INVALID_OPERATION);
}
if (mEnabled) {
- return NO_ERROR;
+ RETURN(NO_ERROR);
}
mEnabled = true;
@@ -1657,54 +1718,55 @@
status_t status = effect->updatePolicyState();
if (status != NO_ERROR) {
mEnabled = false;
- return status;
+ RETURN(status);
}
effect->checkSuspendOnEffectEnabled(true, false /*threadLocked*/);
// checkSuspendOnEffectEnabled() can suspend this same effect when enabled
if (effect->suspended()) {
- return NO_ERROR;
+ RETURN(NO_ERROR);
}
status = effect->setEnabled(true, true /*fromHandle*/);
if (status != NO_ERROR) {
mEnabled = false;
}
- return status;
+ RETURN(status);
}
-status_t AudioFlinger::EffectHandle::disable()
+Status AudioFlinger::EffectHandle::disable(int32_t* _aidl_return)
{
ALOGV("disable %p", this);
AutoMutex _l(mLock);
sp<EffectBase> effect = mEffect.promote();
if (effect == 0 || mDisconnected) {
- return DEAD_OBJECT;
+ RETURN(DEAD_OBJECT);
}
if (!mHasControl) {
- return INVALID_OPERATION;
+ RETURN(INVALID_OPERATION);
}
if (!mEnabled) {
- return NO_ERROR;
+ RETURN(NO_ERROR);
}
mEnabled = false;
effect->updatePolicyState();
if (effect->suspended()) {
- return NO_ERROR;
+ RETURN(NO_ERROR);
}
status_t status = effect->setEnabled(false, true /*fromHandle*/);
- return status;
+ RETURN(status);
}
-void AudioFlinger::EffectHandle::disconnect()
+Status AudioFlinger::EffectHandle::disconnect()
{
ALOGV("%s %p", __FUNCTION__, this);
disconnect(true);
+ return Status::ok();
}
void AudioFlinger::EffectHandle::disconnect(bool unpinIfLast)
@@ -1741,11 +1803,16 @@
}
}
-status_t AudioFlinger::EffectHandle::command(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t *replySize,
- void *pReplyData)
+Status AudioFlinger::EffectHandle::getCblk(media::SharedFileRegion* _aidl_return) {
+ LOG_ALWAYS_FATAL_IF(!convertIMemoryToSharedFileRegion(mCblkMemory, _aidl_return));
+ return Status::ok();
+}
+
+Status AudioFlinger::EffectHandle::command(int32_t cmdCode,
+ const std::vector<uint8_t>& cmdData,
+ int32_t maxResponseSize,
+ std::vector<uint8_t>* response,
+ int32_t* _aidl_return)
{
ALOGVV("command(), cmdCode: %d, mHasControl: %d, mEffect: %p",
cmdCode, mHasControl, mEffect.unsafe_get());
@@ -1765,49 +1832,46 @@
break;
}
android_errorWriteLog(0x534e4554, "62019992");
- return BAD_VALUE;
+ RETURN(BAD_VALUE);
}
if (cmdCode == EFFECT_CMD_ENABLE) {
- if (*replySize < sizeof(int)) {
+ if (maxResponseSize < sizeof(int)) {
android_errorWriteLog(0x534e4554, "32095713");
- return BAD_VALUE;
+ RETURN(BAD_VALUE);
}
- *(int *)pReplyData = NO_ERROR;
- *replySize = sizeof(int);
- return enable();
+ writeToBuffer(NO_ERROR, response);
+ return enable(_aidl_return);
} else if (cmdCode == EFFECT_CMD_DISABLE) {
- if (*replySize < sizeof(int)) {
+ if (maxResponseSize < sizeof(int)) {
android_errorWriteLog(0x534e4554, "32095713");
- return BAD_VALUE;
+ RETURN(BAD_VALUE);
}
- *(int *)pReplyData = NO_ERROR;
- *replySize = sizeof(int);
- return disable();
+ writeToBuffer(NO_ERROR, response);
+ return disable(_aidl_return);
}
AutoMutex _l(mLock);
sp<EffectBase> effect = mEffect.promote();
if (effect == 0 || mDisconnected) {
- return DEAD_OBJECT;
+ RETURN(DEAD_OBJECT);
}
// only get parameter command is permitted for applications not controlling the effect
if (!mHasControl && cmdCode != EFFECT_CMD_GET_PARAM) {
- return INVALID_OPERATION;
+ RETURN(INVALID_OPERATION);
}
// handle commands that are not forwarded transparently to effect engine
if (cmdCode == EFFECT_CMD_SET_PARAM_COMMIT) {
if (mClient == 0) {
- return INVALID_OPERATION;
+ RETURN(INVALID_OPERATION);
}
- if (*replySize < sizeof(int)) {
+ if (maxResponseSize < sizeof(int)) {
android_errorWriteLog(0x534e4554, "32095713");
- return BAD_VALUE;
+ RETURN(BAD_VALUE);
}
- *(int *)pReplyData = NO_ERROR;
- *replySize = sizeof(int);
+ writeToBuffer(NO_ERROR, response);
// No need to trylock() here as this function is executed in the binder thread serving a
// particular client process: no risk to block the whole media server process or mixer
@@ -1820,10 +1884,10 @@
serverIndex > EFFECT_PARAM_BUFFER_SIZE) {
mCblk->serverIndex = 0;
mCblk->clientIndex = 0;
- return BAD_VALUE;
+ RETURN(BAD_VALUE);
}
status_t status = NO_ERROR;
- effect_param_t *param = NULL;
+ std::vector<uint8_t> param;
for (uint32_t index = serverIndex; index < clientIndex;) {
int *p = (int *)(mBuffer + index);
const int size = *p++;
@@ -1835,23 +1899,16 @@
break;
}
- // copy to local memory in case of client corruption b/32220769
- auto *newParam = (effect_param_t *)realloc(param, size);
- if (newParam == NULL) {
- ALOGW("command(): out of memory");
- status = NO_MEMORY;
- break;
- }
- param = newParam;
- memcpy(param, p, size);
+ std::copy(reinterpret_cast<const uint8_t*>(p),
+ reinterpret_cast<const uint8_t*>(p) + size,
+ std::back_inserter(param));
- int reply = 0;
- uint32_t rsize = sizeof(reply);
+ std::vector<uint8_t> replyBuffer;
status_t ret = effect->command(EFFECT_CMD_SET_PARAM,
- size,
param,
- &rsize,
- &reply);
+ sizeof(int),
+ &replyBuffer);
+ int reply = *reinterpret_cast<const int*>(replyBuffer.data());
// verify shared memory: server index shouldn't change; client index can't go back.
if (serverIndex != mCblk->serverIndex
@@ -1864,21 +1921,24 @@
// stop at first error encountered
if (ret != NO_ERROR) {
status = ret;
- *(int *)pReplyData = reply;
+ writeToBuffer(reply, response);
break;
} else if (reply != NO_ERROR) {
- *(int *)pReplyData = reply;
+ writeToBuffer(reply, response);
break;
}
index += size;
}
- free(param);
mCblk->serverIndex = 0;
mCblk->clientIndex = 0;
- return status;
+ RETURN(status);
}
- return effect->command(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+ status_t status = effect->command(cmdCode,
+ cmdData,
+ maxResponseSize,
+ response);
+ RETURN(status);
}
void AudioFlinger::EffectHandle::setControl(bool hasControl, bool signal, bool enabled)
@@ -1894,13 +1954,11 @@
}
void AudioFlinger::EffectHandle::commandExecuted(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t replySize,
- void *pReplyData)
+ const std::vector<uint8_t>& cmdData,
+ const std::vector<uint8_t>& replyData)
{
if (mEffectClient != 0) {
- mEffectClient->commandExecuted(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+ mEffectClient->commandExecuted(cmdCode, cmdData, replyData);
}
}
@@ -1913,13 +1971,6 @@
}
}
-status_t AudioFlinger::EffectHandle::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- return BnEffect::onTransact(code, data, reply, flags);
-}
-
-
void AudioFlinger::EffectHandle::dumpToBuffer(char* buffer, size_t size)
{
bool locked = mCblk != NULL && AudioFlinger::dumpTryLock(mCblk->lock);
@@ -2385,6 +2436,25 @@
}
}
+// containsHapticGeneratingEffect_l must be called with ThreadBase::mLock or EffectChain::mLock held
+bool AudioFlinger::EffectChain::containsHapticGeneratingEffect_l()
+{
+ for (size_t i = 0; i < mEffects.size(); ++i) {
+ if (mEffects[i]->isHapticGenerator()) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void AudioFlinger::EffectChain::setHapticIntensity_l(int id, int intensity)
+{
+ Mutex::Autolock _l(mLock);
+ for (size_t i = 0; i < mEffects.size(); ++i) {
+ mEffects[i]->setHapticIntensity(id, intensity);
+ }
+}
+
void AudioFlinger::EffectChain::syncHalEffectsState()
{
Mutex::Autolock _l(mLock);
@@ -2839,6 +2909,14 @@
return t->channelCount();
}
+audio_channel_mask_t AudioFlinger::EffectChain::EffectCallback::hapticChannelMask() const {
+ sp<ThreadBase> t = mThread.promote();
+ if (t == nullptr) {
+ return AUDIO_CHANNEL_NONE;
+ }
+ return t->hapticChannelMask();
+}
+
size_t AudioFlinger::EffectChain::EffectCallback::frameCount() const {
sp<ThreadBase> t = mThread.promote();
if (t == nullptr) {
@@ -2943,10 +3021,14 @@
Mutex::Autolock _l(mProxyLock);
if (status == NO_ERROR) {
for (auto& handle : mEffectHandles) {
+ Status bs;
if (enabled) {
- status = handle.second->enable();
+ bs = handle.second->enable(&status);
} else {
- status = handle.second->disable();
+ bs = handle.second->disable(&status);
+ }
+ if (!bs.isOk()) {
+ status = statusTFromBinderStatus(bs);
}
}
}
@@ -3005,7 +3087,7 @@
__func__, port->type, port->ext.device.type,
port->ext.device.address, port->id, patch.isSoftware());
if (port->type != AUDIO_PORT_TYPE_DEVICE || port->ext.device.type != mDevice.mType
- || port->ext.device.address != mDevice.mAddress) {
+ || port->ext.device.address != mDevice.address()) {
return NAME_NOT_FOUND;
}
status_t status = NAME_NOT_FOUND;
@@ -3054,10 +3136,14 @@
status = BAD_VALUE;
}
if (status == NO_ERROR || status == ALREADY_EXISTS) {
+ Status bs;
if (isEnabled()) {
- (*handle)->enable();
+ bs = (*handle)->enable(&status);
} else {
- (*handle)->disable();
+ bs = (*handle)->disable(&status);
+ }
+ if (!bs.isOk()) {
+ status = statusTFromBinderStatus(bs);
}
}
return status;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 2826297..03bdc60 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -36,6 +36,7 @@
virtual uint32_t sampleRate() const = 0;
virtual audio_channel_mask_t channelMask() const = 0;
virtual uint32_t channelCount() const = 0;
+ virtual audio_channel_mask_t hapticChannelMask() const = 0;
virtual size_t frameCount() const = 0;
// Non trivial methods usually implemented with help from ThreadBase:
@@ -132,11 +133,10 @@
void setSuspended(bool suspended);
bool suspended() const;
- virtual status_t command(uint32_t cmdCode __unused,
- uint32_t cmdSize __unused,
- void *pCmdData __unused,
- uint32_t *replySize __unused,
- void *pReplyData __unused) { return NO_ERROR; };
+ virtual status_t command(int32_t __unused,
+ const std::vector<uint8_t>& __unused,
+ int32_t __unused,
+ std::vector<uint8_t>* __unused) { return NO_ERROR; };
void setCallback(const sp<EffectCallbackInterface>& callback) { mCallback = callback; }
sp<EffectCallbackInterface>& callback() { return mCallback; }
@@ -144,7 +144,7 @@
status_t addHandle(EffectHandle *handle);
ssize_t disconnectHandle(EffectHandle *handle, bool unpinIfLast);
ssize_t removeHandle(EffectHandle *handle);
- virtual ssize_t removeHandle_l(EffectHandle *handle);
+ ssize_t removeHandle_l(EffectHandle *handle);
EffectHandle* controlHandle_l();
bool purgeHandles();
@@ -213,11 +213,10 @@
void process();
bool updateState();
- status_t command(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t *replySize,
- void *pReplyData) override;
+ status_t command(int32_t cmdCode,
+ const std::vector<uint8_t>& cmdData,
+ int32_t maxReplySize,
+ std::vector<uint8_t>* reply) override;
void reset_l();
status_t configure();
@@ -240,8 +239,6 @@
return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
}
- ssize_t removeHandle_l(EffectHandle *handle) override;
-
status_t setDevices(const AudioDeviceTypeAddrVector &devices);
status_t setInputDevice(const AudioDeviceTypeAddr &device);
status_t setVolume(uint32_t *left, uint32_t *right, bool controller);
@@ -257,6 +254,11 @@
sp<EffectModule> asEffectModule() override { return this; }
+ static bool isHapticGenerator(const effect_uuid_t* type);
+ bool isHapticGenerator() const;
+
+ status_t setHapticIntensity(int id, int intensity);
+
void dump(int fd, const Vector<String16>& args);
private:
@@ -316,32 +318,29 @@
// There is one EffectHandle object for each application controlling (or using)
// an effect module.
// The EffectHandle is obtained by calling AudioFlinger::createEffect().
-class EffectHandle: public android::BnEffect {
+class EffectHandle: public android::media::BnEffect {
public:
EffectHandle(const sp<EffectBase>& effect,
const sp<AudioFlinger::Client>& client,
- const sp<IEffectClient>& effectClient,
+ const sp<media::IEffectClient>& effectClient,
int32_t priority);
virtual ~EffectHandle();
virtual status_t initCheck();
// IEffect
- virtual status_t enable();
- virtual status_t disable();
- virtual status_t command(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t *replySize,
- void *pReplyData);
- virtual void disconnect();
-private:
- void disconnect(bool unpinIfLast);
-public:
- virtual sp<IMemory> getCblk() const { return mCblkMemory; }
- virtual status_t onTransact(uint32_t code, const Parcel& data,
- Parcel* reply, uint32_t flags);
+ android::binder::Status enable(int32_t* _aidl_return) override;
+ android::binder::Status disable(int32_t* _aidl_return) override;
+ android::binder::Status command(int32_t cmdCode,
+ const std::vector<uint8_t>& cmdData,
+ int32_t maxResponseSize,
+ std::vector<uint8_t>* response,
+ int32_t* _aidl_return) override;
+ android::binder::Status disconnect() override;
+ android::binder::Status getCblk(media::SharedFileRegion* _aidl_return) override;
+private:
+ void disconnect(bool unpinIfLast);
// Give or take control of effect module
// - hasControl: true if control is given, false if removed
@@ -349,10 +348,8 @@
// - enabled: state of the effect when control is passed
void setControl(bool hasControl, bool signal, bool enabled);
void commandExecuted(uint32_t cmdCode,
- uint32_t cmdSize,
- void *pCmdData,
- uint32_t replySize,
- void *pReplyData);
+ const std::vector<uint8_t>& cmdData,
+ const std::vector<uint8_t>& replyData);
void setEnabled(bool enabled);
bool enabled() const { return mEnabled; }
@@ -375,19 +372,20 @@
friend class AudioFlinger; // for mEffect, mHasControl, mEnabled
DISALLOW_COPY_AND_ASSIGN(EffectHandle);
- Mutex mLock; // protects IEffect method calls
- wp<EffectBase> mEffect; // pointer to controlled EffectModule
- sp<IEffectClient> mEffectClient; // callback interface for client notifications
- /*const*/ sp<Client> mClient; // client for shared memory allocation, see disconnect()
- sp<IMemory> mCblkMemory; // shared memory for control block
- effect_param_cblk_t* mCblk; // control block for deferred parameter setting via
- // shared memory
- uint8_t* mBuffer; // pointer to parameter area in shared memory
- int mPriority; // client application priority to control the effect
- bool mHasControl; // true if this handle is controlling the effect
- bool mEnabled; // cached enable state: needed when the effect is
- // restored after being suspended
- bool mDisconnected; // Set to true by disconnect()
+ Mutex mLock; // protects IEffect method calls
+ wp<EffectBase> mEffect; // pointer to controlled EffectModule
+ sp<media::IEffectClient> mEffectClient; // callback interface for client notifications
+ /*const*/ sp<Client> mClient; // client for shared memory allocation, see
+ // disconnect()
+ sp<IMemory> mCblkMemory; // shared memory for control block
+ effect_param_cblk_t* mCblk; // control block for deferred parameter setting via
+ // shared memory
+ uint8_t* mBuffer; // pointer to parameter area in shared memory
+ int mPriority; // client application priority to control the effect
+ bool mHasControl; // true if this handle is controlling the effect
+ bool mEnabled; // cached enable state: needed when the effect is
+ // restored after being suspended
+ bool mDisconnected; // Set to true by disconnect()
};
// the EffectChain class represents a group of effects associated to one audio session.
@@ -503,6 +501,10 @@
// isCompatibleWithThread_l() must be called with thread->mLock held
bool isCompatibleWithThread_l(const sp<ThreadBase>& thread) const;
+ bool containsHapticGeneratingEffect_l();
+
+ void setHapticIntensity_l(int id, int intensity);
+
sp<EffectCallbackInterface> effectCallback() const { return mEffectCallback; }
wp<ThreadBase> thread() const { return mEffectCallback->thread(); }
@@ -534,6 +536,7 @@
uint32_t sampleRate() const override;
audio_channel_mask_t channelMask() const override;
uint32_t channelCount() const override;
+ audio_channel_mask_t hapticChannelMask() const override;
size_t frameCount() const override;
uint32_t latency() const override;
@@ -685,6 +688,7 @@
uint32_t sampleRate() const override;
audio_channel_mask_t channelMask() const override;
uint32_t channelCount() const override;
+ audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
size_t frameCount() const override { return 0; }
uint32_t latency() const override { return 0; }
diff --git a/services/audioflinger/FastCapture.cpp b/services/audioflinger/FastCapture.cpp
index dd84bf2..d6d6e25 100644
--- a/services/audioflinger/FastCapture.cpp
+++ b/services/audioflinger/FastCapture.cpp
@@ -154,7 +154,7 @@
mReadBufferState = -1;
dumpState->mFrameCount = frameCount;
}
-
+ dumpState->mSilenced = current->mSilenceCapture;
}
void FastCapture::onWork()
@@ -208,6 +208,9 @@
mReadBufferState = frameCount;
}
if (mReadBufferState > 0) {
+ if (current->mSilenceCapture) {
+ memset(mReadBuffer, 0, mReadBufferState * Format_frameSize(mFormat));
+ }
ssize_t framesWritten = mPipeSink->write(mReadBuffer, mReadBufferState);
audio_track_cblk_t* cblk = current->mCblk;
if (fastPatchRecordBufferProvider != 0) {
diff --git a/services/audioflinger/FastCaptureDumpState.cpp b/services/audioflinger/FastCaptureDumpState.cpp
index 53eeba5..b8b3866 100644
--- a/services/audioflinger/FastCaptureDumpState.cpp
+++ b/services/audioflinger/FastCaptureDumpState.cpp
@@ -44,10 +44,11 @@
double periodSec = (double) mFrameCount / mSampleRate;
dprintf(fd, " FastCapture command=%s readSequence=%u framesRead=%u\n"
" readErrors=%u sampleRate=%u frameCount=%zu\n"
- " measuredWarmup=%.3g ms, warmupCycles=%u period=%.2f ms\n",
+ " measuredWarmup=%.3g ms, warmupCycles=%u period=%.2f ms\n"
+ " silenced: %s\n",
FastCaptureState::commandToString(mCommand), mReadSequence, mFramesRead,
mReadErrors, mSampleRate, mFrameCount, measuredWarmupMs, mWarmupCycles,
- periodSec * 1e3);
+ periodSec * 1e3, mSilenced ? "true" : "false");
}
} // android
diff --git a/services/audioflinger/FastCaptureDumpState.h b/services/audioflinger/FastCaptureDumpState.h
index 6f9c4c3..a1b8706 100644
--- a/services/audioflinger/FastCaptureDumpState.h
+++ b/services/audioflinger/FastCaptureDumpState.h
@@ -35,6 +35,7 @@
uint32_t mReadErrors; // total number of read() errors
uint32_t mSampleRate;
size_t mFrameCount;
+ bool mSilenced = false; // capture is silenced
};
} // android
diff --git a/services/audioflinger/FastCaptureState.h b/services/audioflinger/FastCaptureState.h
index d287232..f949275 100644
--- a/services/audioflinger/FastCaptureState.h
+++ b/services/audioflinger/FastCaptureState.h
@@ -41,6 +41,8 @@
audio_format_t mFastPatchRecordFormat = AUDIO_FORMAT_INVALID;
AudioBufferProvider* mFastPatchRecordBufferProvider = nullptr; // a reference to a patch
// record in fast mode
+ bool mSilenceCapture = false; // request to silence capture for fast track.
+ // note: this also silences the normal mixer pipe
// Extends FastThreadState::Command
static const Command
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 3eacc8c..cd3c743 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -27,7 +27,6 @@
#include "Configuration.h"
#include <time.h>
-#include <utils/Debug.h>
#include <utils/Log.h>
#include <utils/Trace.h>
#include <system/audio.h>
@@ -40,6 +39,7 @@
#include <audio_utils/channels.h>
#include <audio_utils/format.h>
#include <audio_utils/mono_blend.h>
+#include <cutils/bitops.h>
#include <media/AudioMixer.h>
#include "FastMixer.h"
#include "TypedLogger.h"
diff --git a/services/audioflinger/FastMixerDumpState.cpp b/services/audioflinger/FastMixerDumpState.cpp
index a42e09c..3f20282 100644
--- a/services/audioflinger/FastMixerDumpState.cpp
+++ b/services/audioflinger/FastMixerDumpState.cpp
@@ -24,7 +24,6 @@
#include <cpustats/ThreadCpuUsage.h>
#endif
#endif
-#include <utils/Debug.h>
#include <utils/Log.h>
#include "FastMixerDumpState.h"
diff --git a/services/audioflinger/FastMixerState.h b/services/audioflinger/FastMixerState.h
index 396c797..857d3de 100644
--- a/services/audioflinger/FastMixerState.h
+++ b/services/audioflinger/FastMixerState.h
@@ -23,6 +23,7 @@
#include <media/ExtendedAudioBufferProvider.h>
#include <media/nbaio/NBAIO.h>
#include <media/nblog/NBLog.h>
+#include <vibrator/ExternalVibrationUtils.h>
#include "FastThreadState.h"
namespace android {
@@ -49,8 +50,7 @@
audio_format_t mFormat; // track format
int mGeneration; // increment when any field is assigned
bool mHapticPlaybackEnabled = false; // haptic playback is enabled or not
- AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_MUTE; // intensity of
- // haptic data
+ os::HapticScale mHapticIntensity = os::HapticScale::MUTE; // intensity of haptic data
};
// Represents a single state of the fast mixer
diff --git a/services/audioflinger/OWNERS b/services/audioflinger/OWNERS
index d02d9e0..034d161 100644
--- a/services/audioflinger/OWNERS
+++ b/services/audioflinger/OWNERS
@@ -1,4 +1,4 @@
+gkasten@google.com
hunga@google.com
jmtrivi@google.com
mnaganov@google.com
-gkasten@google.com
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index b58fd8b..1e11660 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -25,6 +25,7 @@
#include "AudioFlinger.h"
#include <media/AudioParameter.h>
+#include <media/AudioValidator.h>
#include <media/DeviceDescriptorBase.h>
#include <media/PatchBuilder.h>
#include <mediautils/ServiceUtilities.h>
@@ -55,8 +56,12 @@
}
/* Get supported attributes for a given audio port */
-status_t AudioFlinger::getAudioPort(struct audio_port *port)
-{
+status_t AudioFlinger::getAudioPort(struct audio_port_v7 *port) {
+ status_t status = AudioValidator::validateAudioPort(*port);
+ if (status != NO_ERROR) {
+ return status;
+ }
+
Mutex::Autolock _l(mLock);
return mPatchPanel.getAudioPort(port);
}
@@ -65,6 +70,11 @@
status_t AudioFlinger::createAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle)
{
+ status_t status = AudioValidator::validateAudioPatch(*patch);
+ if (status != NO_ERROR) {
+ return status;
+ }
+
Mutex::Autolock _l(mLock);
return mPatchPanel.createAudioPatch(patch, handle);
}
@@ -103,10 +113,22 @@
}
/* Get supported attributes for a given audio port */
-status_t AudioFlinger::PatchPanel::getAudioPort(struct audio_port *port __unused)
+status_t AudioFlinger::PatchPanel::getAudioPort(struct audio_port_v7 *port)
{
- ALOGV(__func__);
- return NO_ERROR;
+ if (port->type != AUDIO_PORT_TYPE_DEVICE) {
+ // Only query the HAL when the port is a device.
+ // TODO: implement getAudioPort for mix.
+ return INVALID_OPERATION;
+ }
+ AudioHwDevice* hwDevice = findAudioHwDeviceByModule(port->ext.device.hw_module);
+ if (hwDevice == nullptr) {
+ ALOGW("%s cannot find hw module %d", __func__, port->ext.device.hw_module);
+ return BAD_VALUE;
+ }
+ if (!hwDevice->supportsAudioPatches()) {
+ return INVALID_OPERATION;
+ }
+ return hwDevice->getAudioPort(port);
}
/* Connect a patch between several source and sink ports */
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 89d4eb1..2568dd3 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -52,7 +52,7 @@
struct audio_port *ports);
/* Get supported attributes for a given audio port */
- status_t getAudioPort(struct audio_port *port);
+ status_t getAudioPort(struct audio_port_v7 *port);
/* Create a patch between several source and sink ports */
status_t createAudioPatch(const struct audio_patch *patch,
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index d8eebf3..a4b8650 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -26,10 +26,11 @@
bool hasOpPlayAudio() const;
static sp<OpPlayAudioMonitor> createIfNeeded(
- uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType);
+ uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType,
+ const std::string& opPackageName);
private:
- OpPlayAudioMonitor(uid_t uid, audio_usage_t usage, int id);
+ OpPlayAudioMonitor(uid_t uid, audio_usage_t usage, int id, const String16& opPackageName);
void onFirstRef() override;
static void getPackagesForUid(uid_t uid, Vector<String16>& packages);
@@ -49,10 +50,10 @@
void checkPlayAudioForUsage();
std::atomic_bool mHasOpPlayAudio;
- Vector<String16> mPackages;
const uid_t mUid;
const int32_t mUsage; // on purpose not audio_usage_t because always checked in appOps as int32_t
const int mId; // for logging purposes only
+ const String16 mOpPackageName;
};
// playback track
@@ -77,7 +78,8 @@
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
/** default behaviour is to start when there are as many frames
* ready as possible (aka. Buffer is full). */
- size_t frameCountToBeReady = SIZE_MAX);
+ size_t frameCountToBeReady = SIZE_MAX,
+ const std::string opPackageName = "");
virtual ~Track();
virtual status_t initCheck() const;
@@ -159,12 +161,12 @@
mHapticPlaybackEnabled = hapticPlaybackEnabled;
}
/** Return at what intensity to play haptics, used in mixer. */
- AudioMixer::haptic_intensity_t getHapticIntensity() const { return mHapticIntensity; }
+ os::HapticScale getHapticIntensity() const { return mHapticIntensity; }
/** Set intensity of haptic playback, should be set after querying vibrator service. */
- void setHapticIntensity(AudioMixer::haptic_intensity_t hapticIntensity) {
- if (AudioMixer::isValidHapticIntensity(hapticIntensity)) {
+ void setHapticIntensity(os::HapticScale hapticIntensity) {
+ if (os::isValidHapticScale(hapticIntensity)) {
mHapticIntensity = hapticIntensity;
- setHapticPlaybackEnabled(mHapticIntensity != AudioMixer::HAPTIC_SCALE_MUTE);
+ setHapticPlaybackEnabled(mHapticIntensity != os::HapticScale::MUTE);
}
}
sp<os::ExternalVibration> getExternalVibration() const { return mExternalVibration; }
@@ -265,7 +267,7 @@
bool mHapticPlaybackEnabled = false; // indicates haptic playback enabled or not
// intensity to play haptic data
- AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_MUTE;
+ os::HapticScale mHapticIntensity = os::HapticScale::MUTE;
class AudioVibrationController : public os::BnExternalVibrationController {
public:
explicit AudioVibrationController(Track* track) : mTrack(track) {}
diff --git a/services/audioflinger/SpdifStreamOut.cpp b/services/audioflinger/SpdifStreamOut.cpp
index c7aba79..0ce5681 100644
--- a/services/audioflinger/SpdifStreamOut.cpp
+++ b/services/audioflinger/SpdifStreamOut.cpp
@@ -39,7 +39,7 @@
, mSpdifEncoder(this, format)
, mApplicationFormat(AUDIO_FORMAT_DEFAULT)
, mApplicationSampleRate(0)
- , mApplicationChannelMask(0)
+ , mApplicationChannelMask(AUDIO_CHANNEL_NONE)
{
}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 090e6d2..ab2bc32 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -29,6 +29,7 @@
#include <linux/futex.h>
#include <sys/stat.h>
#include <sys/syscall.h>
+#include <cutils/bitops.h>
#include <cutils/properties.h>
#include <media/AudioContainers.h>
#include <media/AudioDeviceTypeAddr.h>
@@ -115,6 +116,8 @@
namespace android {
+using media::IEffectClient;
+
// retry counts for buffer fill timeout
// 50 * ~20msecs = 1 second
static const int8_t kMaxTrackRetries = 50;
@@ -1242,6 +1245,11 @@
return BAD_VALUE;
}
}
+
+ if (EffectModule::isHapticGenerator(&desc->type)) {
+ ALOGE("%s(): HapticGenerator is not supported in RecordThread", __func__);
+ return BAD_VALUE;
+ }
return NO_ERROR;
}
@@ -1261,6 +1269,12 @@
return NO_ERROR;
}
+ if (EffectModule::isHapticGenerator(&desc->type) && mHapticChannelCount == 0) {
+ ALOGW("%s: thread doesn't support haptic playback while the effect is HapticGenerator",
+ __func__);
+ return BAD_VALUE;
+ }
+
switch (mType) {
case MIXER: {
#ifndef MULTICHANNEL_EFFECT_CHAIN
@@ -1900,9 +1914,8 @@
: AUDIO_DEVICE_NONE));
}
- // ++ operator does not compile
- for (audio_stream_type_t stream = AUDIO_STREAM_MIN; stream < AUDIO_STREAM_FOR_POLICY_CNT;
- stream = (audio_stream_type_t) (stream + 1)) {
+ for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+ const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
mStreamTypes[stream].volume = 0.0f;
mStreamTypes[stream].mute = mAudioFlinger->streamMute_l(stream);
}
@@ -1932,7 +1945,7 @@
// here instead of constructor of PlaybackThread so that the onFirstRef
// callback would not be made on an incompletely constructed object.
if (mOutput->stream->setEventCallback(this) != OK) {
- ALOGE("Failed to add event callback");
+ ALOGD("Failed to add event callback");
}
}
run(mThreadName, ANDROID_PRIORITY_URGENT_AUDIO);
@@ -2066,7 +2079,8 @@
uid_t uid,
status_t *status,
audio_port_handle_t portId,
- const sp<media::IAudioTrackCallback>& callback)
+ const sp<media::IAudioTrackCallback>& callback,
+ const std::string& opPackageName)
{
size_t frameCount = *pFrameCount;
size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2091,12 +2105,6 @@
outputFlags = (audio_output_flags_t)(outputFlags | AUDIO_OUTPUT_FLAG_FAST);
}
- // Set DIRECT flag if current thread is DirectOutputThread. This can happen when the playback is
- // rerouted to direct output thread by dynamic audio policy.
- if (mType == DIRECT) {
- *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_DIRECT);
- }
-
// Check if requested flags are compatible with output stream flags
if ((*flags & outputFlags) != *flags) {
ALOGW("createTrack_l(): mismatch between requested flags (%08x) and output flags (%08x)",
@@ -2350,10 +2358,21 @@
}
}
+ // Set DIRECT flag if current thread is DirectOutputThread. This can
+ // happen when the playback is rerouted to direct output thread by
+ // dynamic audio policy.
+ // Do NOT report the flag changes back to client, since the client
+ // doesn't explicitly request a direct flag.
+ audio_output_flags_t trackFlags = *flags;
+ if (mType == DIRECT) {
+ trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
+ }
+
track = new Track(this, client, streamType, attr, sampleRate, format,
channelMask, frameCount,
nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
- sessionId, creatorPid, uid, *flags, TrackBase::TYPE_DEFAULT, portId);
+ sessionId, creatorPid, uid, trackFlags, TrackBase::TYPE_DEFAULT, portId,
+ SIZE_MAX /*frameCountToBeReady*/, opPackageName);
lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
if (lStatus != NO_ERROR) {
@@ -2365,7 +2384,7 @@
{
Mutex::Autolock _atCbL(mAudioTrackCbLock);
if (callback.get() != nullptr) {
- mAudioTrackCallbacks.emplace(callback);
+ mAudioTrackCallbacks.emplace(track, callback);
}
}
@@ -2532,15 +2551,17 @@
track->sharedBuffer() != 0 ? Track::FS_FILLED : Track::FS_FILLING;
}
- if ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
- && mHapticChannelMask != AUDIO_CHANNEL_NONE) {
+ sp<EffectChain> chain = getEffectChain_l(track->sessionId());
+ if (mHapticChannelMask != AUDIO_CHANNEL_NONE
+ && ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
+ || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
// Unlock due to VibratorService will lock for this call and will
// call Tracks.mute/unmute which also require thread's lock.
mLock.unlock();
const int intensity = AudioFlinger::onExternalVibrationStart(
track->getExternalVibration());
mLock.lock();
- track->setHapticIntensity(static_cast<AudioMixer::haptic_intensity_t>(intensity));
+ track->setHapticIntensity(static_cast<os::HapticScale>(intensity));
// Haptic playback should be enabled by vibrator service.
if (track->getHapticPlaybackEnabled()) {
// Disable haptic playback of all active track to ensure only
@@ -2549,12 +2570,16 @@
t->setHapticPlaybackEnabled(false);
}
}
+
+ // Set haptic intensity for effect
+ if (chain != nullptr) {
+ chain->setHapticIntensity_l(track->id(), intensity);
+ }
}
track->mResetDone = false;
track->mPresentationCompleteFrames = 0;
mActiveTracks.add(track);
- sp<EffectChain> chain = getEffectChain_l(track->sessionId());
if (chain != 0) {
ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(),
track->sessionId());
@@ -2593,6 +2618,10 @@
mLocalLog.log("removeTrack_l (%p) %s", track.get(), result.string());
mTracks.remove(track);
+ {
+ Mutex::Autolock _atCbL(mAudioTrackCbLock);
+ mAudioTrackCallbacks.erase(track);
+ }
if (track->isFastTrack()) {
int index = track->mFastIndex;
ALOG_ASSERT(0 < index && index < (int)FastMixerState::sMaxFastTracks);
@@ -2688,8 +2717,8 @@
audio_utils::metadata::byteStringFromData(metadata);
std::vector metadataVec(metaDataStr.begin(), metaDataStr.end());
Mutex::Autolock _l(mAudioTrackCbLock);
- for (const auto& callback : mAudioTrackCallbacks) {
- callback->onCodecFormatChanged(metadataVec);
+ for (const auto& callbackPair : mAudioTrackCallbacks) {
+ callbackPair.second->onCodecFormatChanged(metadataVec);
}
}).detach();
}
@@ -2867,8 +2896,8 @@
(void)posix_memalign(&mEffectBuffer, 32, mEffectBufferSize);
}
- mHapticChannelMask = mChannelMask & AUDIO_CHANNEL_HAPTIC_ALL;
- mChannelMask &= ~mHapticChannelMask;
+ mHapticChannelMask = static_cast<audio_channel_mask_t>(mChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
+ mChannelMask = static_cast<audio_channel_mask_t>(mChannelMask & ~mHapticChannelMask);
mHapticChannelCount = audio_channel_count_from_out_mask(mHapticChannelMask);
mChannelCount -= mHapticChannelCount;
@@ -3737,9 +3766,15 @@
// Determine which session to pick up haptic data.
// This must be done under the same lock as prepareTracks_l().
+ // The haptic data from the effect is at a higher priority than the one from track.
// TODO: Write haptic data directly to sink buffer when mixing.
if (mHapticChannelCount > 0 && effectChains.size() > 0) {
for (const auto& track : mActiveTracks) {
+ sp<EffectChain> effectChain = getEffectChain_l(track->sessionId());
+ if (effectChain != nullptr && effectChain->containsHapticGeneratingEffect_l()) {
+ activeHapticSessionId = track->sessionId();
+ break;
+ }
if (track->getHapticPlaybackEnabled()) {
activeHapticSessionId = track->sessionId();
break;
@@ -4109,13 +4144,20 @@
// remove from our tracks vector
removeTrack_l(track);
}
- if ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
- && mHapticChannelCount > 0) {
+ if (mHapticChannelCount > 0 &&
+ ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
+ || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
mLock.unlock();
// Unlock due to VibratorService will lock for this call and will
// call Tracks.mute/unmute which also require thread's lock.
AudioFlinger::onExternalVibrationStop(track->getExternalVibration());
mLock.lock();
+
+ // When the track is stop, set the haptic intensity as MUTE
+ // for the HapticGenerator effect.
+ if (chain != nullptr) {
+ chain->setHapticIntensity_l(track->id(), static_cast<int>(os::HapticScale::MUTE));
+ }
}
}
}
@@ -4204,7 +4246,7 @@
"Enumerated device type(%#x) must not be used "
"as it does not support audio patches",
patch->sinks[i].ext.device.type);
- type |= patch->sinks[i].ext.device.type;
+ type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
deviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
patch->sinks[i].ext.device.address));
}
@@ -4454,11 +4496,12 @@
// wrap the source side of the MonoPipe to make it an AudioBufferProvider
fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe));
fastTrack->mVolumeProvider = NULL;
- fastTrack->mChannelMask = mChannelMask | mHapticChannelMask; // mPipeSink channel mask for
- // audio to FastMixer
+ fastTrack->mChannelMask = static_cast<audio_channel_mask_t>(
+ mChannelMask | mHapticChannelMask); // mPipeSink channel mask for
+ // audio to FastMixer
fastTrack->mFormat = mFormat; // mPipeSink format for audio to FastMixer
fastTrack->mHapticPlaybackEnabled = mHapticChannelMask != AUDIO_CHANNEL_NONE;
- fastTrack->mHapticIntensity = AudioMixer::HAPTIC_SCALE_NONE;
+ fastTrack->mHapticIntensity = os::HapticScale::NONE;
fastTrack->mGeneration++;
state->mFastTracksGen++;
state->mTrackMask = 1;
@@ -4469,7 +4512,8 @@
// specify sink channel mask when haptic channel mask present as it can not
// be calculated directly from channel count
state->mSinkChannelMask = mHapticChannelMask == AUDIO_CHANNEL_NONE
- ? AUDIO_CHANNEL_NONE : mChannelMask | mHapticChannelMask;
+ ? AUDIO_CHANNEL_NONE
+ : static_cast<audio_channel_mask_t>(mChannelMask | mHapticChannelMask);
state->mCommand = FastMixerState::COLD_IDLE;
// already done in constructor initialization list
//mFastMixerFutex = 0;
@@ -6061,10 +6105,6 @@
bool trackPaused = false;
bool trackStopped = false;
- if ((mType == DIRECT) && audio_is_linear_pcm(mFormat) && !usesHwAvSync()) {
- return !mStandby;
- }
-
// do not put the HAL in standby when paused. AwesomePlayer clear the offloaded AudioTrack
// after a timeout and we will enter standby then.
if (mTracks.size() > 0) {
@@ -6890,7 +6930,7 @@
snprintf(mThreadName, kThreadNameLength, "AudioIn_%X", id);
mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mThreadName);
- if (mInput != nullptr && mInput->audioHwDev != nullptr) {
+ if (mInput->audioHwDev != nullptr) {
mIsMsdDevice = strcmp(
mInput->audioHwDev->moduleName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0;
}
@@ -7088,6 +7128,8 @@
// reference to a fast track which is about to be removed
sp<RecordTrack> fastTrackToRemove;
+ bool silenceFastCapture = false;
+
{ // scope for mLock
Mutex::Autolock _l(mLock);
@@ -7175,14 +7217,33 @@
__func__, activeTrackState, activeTrack->id(), size);
}
- activeTracks.add(activeTrack);
- i++;
-
if (activeTrack->isFastTrack()) {
ALOG_ASSERT(!mFastTrackAvail);
ALOG_ASSERT(fastTrack == 0);
+ // if the active fast track is silenced either:
+ // 1) silence the whole capture from fast capture buffer if this is
+ // the only active track
+ // 2) invalidate this track: this will cause the client to reconnect and possibly
+ // be invalidated again until unsilenced
+ if (activeTrack->isSilenced()) {
+ if (size > 1) {
+ activeTrack->invalidate();
+ ALOG_ASSERT(fastTrackToRemove == 0);
+ fastTrackToRemove = activeTrack;
+ removeTrack_l(activeTrack);
+ mActiveTracks.remove(activeTrack);
+ size--;
+ continue;
+ } else {
+ silenceFastCapture = true;
+ }
+ }
fastTrack = activeTrack;
}
+
+ activeTracks.add(activeTrack);
+ i++;
+
}
mActiveTracks.updatePowerState(this);
@@ -7256,6 +7317,10 @@
AUDIO_FORMAT_INVALID : fastTrack->format();
didModify = true;
}
+ if (state->mSilenceCapture != silenceFastCapture) {
+ state->mSilenceCapture = silenceFastCapture;
+ didModify = true;
+ }
sq->end(didModify);
if (didModify) {
sq->push(block);
@@ -7296,7 +7361,7 @@
const ssize_t availableToRead = mPipeSource->availableToRead();
if (availableToRead >= 0) {
- // PipeSource is the master clock. It is up to the AudioRecord client to keep up.
+ // PipeSource is the primary clock. It is up to the AudioRecord client to keep up.
LOG_ALWAYS_FATAL_IF((size_t)availableToRead > mPipeFramesP2,
"more frames to read than fifo size, %zd > %zu",
availableToRead, mPipeFramesP2);
@@ -7340,8 +7405,10 @@
// Update server timestamp with server stats
// systemTime() is optional if the hardware supports timestamps.
- mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] += framesRead;
- mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = lastIoEndNs;
+ if (framesRead >= 0) {
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] += framesRead;
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = lastIoEndNs;
+ }
// Update server timestamp with kernel stats
if (mPipeSource.get() == nullptr /* don't obtain for FastCapture, could block */) {
@@ -7410,7 +7477,7 @@
(framesRead - part1) * mFrameSize);
}
}
- rear = mRsmpInRear += framesRead;
+ mRsmpInRear = audio_utils::safe_add_overflow(mRsmpInRear, (int32_t)framesRead);
size = activeTracks.size();
@@ -7851,7 +7918,8 @@
AutoMutex lock(mLock);
if (recordTrack->isInvalid()) {
recordTrack->clearSyncStartEvent();
- return INVALID_OPERATION;
+ ALOGW("%s track %d: invalidated before startInput", __func__, recordTrack->portId());
+ return DEAD_OBJECT;
}
if (mActiveTracks.indexOf(recordTrack) >= 0) {
if (recordTrack->mState == TrackBase::PAUSING) {
@@ -7881,7 +7949,8 @@
recordTrack->mState = TrackBase::STARTING_2;
// STARTING_2 forces destroy to call stopInput.
}
- return INVALID_OPERATION;
+ ALOGW("%s track %d: invalidated after startInput", __func__, recordTrack->portId());
+ return DEAD_OBJECT;
}
if (recordTrack->mState != TrackBase::STARTING_1) {
ALOGW("%s(%d): unsynchronized mState:%d change",
@@ -8408,13 +8477,14 @@
}
result = mInput->stream->getFrameSize(&mFrameSize);
LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving frame size from HAL: %d", result);
+ LOG_ALWAYS_FATAL_IF(mFrameSize <= 0, "Error frame size was %zu but must be greater than zero",
+ mFrameSize);
result = mInput->stream->getBufferSize(&mBufferSize);
LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving buffer size from HAL: %d", result);
mFrameCount = mBufferSize / mFrameSize;
- ALOGV("%p RecordThread params: mChannelCount=%u, mFormat=%#x, mFrameSize=%lld, "
- "mBufferSize=%lld, mFrameCount=%lld",
- this, mChannelCount, mFormat, (long long)mFrameSize, (long long)mBufferSize,
- (long long)mFrameCount);
+ ALOGV("%p RecordThread params: mChannelCount=%u, mFormat=%#x, mFrameSize=%zu, "
+ "mBufferSize=%zu, mFrameCount=%zu",
+ this, mChannelCount, mFormat, mFrameSize, mBufferSize, mFrameCount);
// This is the formula for calculating the temporary buffer size.
// With 7 HAL buffers, we can guarantee ability to down-sample the input by ratio of 6:1 to
// 1 full output buffer, regardless of the alignment of the available input.
@@ -8535,7 +8605,7 @@
// store new device and send to effects
mInDeviceTypeAddr.mType = patch->sources[0].ext.device.type;
- mInDeviceTypeAddr.mAddress = patch->sources[0].ext.device.address;
+ mInDeviceTypeAddr.setAddress(patch->sources[0].ext.device.address);
audio_port_handle_t deviceId = patch->sources[0].id;
for (size_t i = 0; i < mEffectChains.size(); i++) {
mEffectChains[i]->setInputDevice_l(inDeviceTypeAddr());
@@ -8614,6 +8684,7 @@
void AudioFlinger::RecordThread::updateOutDevices(const DeviceDescriptorBaseVector& outDevices)
{
+ Mutex::Autolock _l(mLock);
mOutDevices = outDevices;
mOutDeviceTypeAddrs = deviceTypeAddrsFromDescriptors(mOutDevices);
for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -8677,6 +8748,11 @@
return mThread->getMmapPosition(position);
}
+status_t AudioFlinger::MmapThreadHandle::getExternalPosition(uint64_t *position,
+ int64_t *timeNanos) {
+ return mThread->getExternalPosition(position, timeNanos);
+}
+
status_t AudioFlinger::MmapThreadHandle::start(const AudioClient& client,
const audio_attributes_t *attr, audio_port_handle_t *handle)
@@ -8712,7 +8788,6 @@
AudioFlinger::MmapThread::~MmapThread()
{
- releaseWakeLock_l();
}
void AudioFlinger::MmapThread::onFirstRef()
@@ -8762,7 +8837,6 @@
return NO_INIT;
}
mStandby = true;
- acquireWakeLock();
return mHalStream->createMmapBuffer(minSizeFrames, info);
}
@@ -8801,8 +8875,12 @@
status_t ret;
if (*handle == mPortId) {
- // for the first track, reuse portId and session allocated when the stream was opened
- return exitStandby();
+ // For the first track, reuse portId and session allocated when the stream was opened.
+ ret = exitStandby();
+ if (ret == NO_ERROR) {
+ acquireWakeLock();
+ }
+ return ret;
}
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
@@ -8923,6 +9001,7 @@
if (handle == mPortId) {
mHalStream->stop();
+ releaseWakeLock();
return NO_ERROR;
}
@@ -8990,6 +9069,8 @@
LOG_ALWAYS_FATAL_IF(!audio_is_linear_pcm(mFormat), "HAL format %#x is not linear pcm", mFormat);
result = mHalStream->getFrameSize(&mFrameSize);
LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving frame size from HAL: %d", result);
+ LOG_ALWAYS_FATAL_IF(mFrameSize <= 0, "Error frame size was %zu but must be greater than zero",
+ mFrameSize);
result = mHalStream->getBufferSize(&mBufferSize);
LOG_ALWAYS_FATAL_IF(result != OK, "Error retrieving buffer size from HAL: %d", result);
mFrameCount = mBufferSize / mFrameSize;
@@ -9163,7 +9244,7 @@
"Enumerated device type(%#x) must not be used "
"as it does not support audio patches",
patch->sinks[i].ext.device.type);
- type |= patch->sinks[i].ext.device.type;
+ type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
sinkDeviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
patch->sinks[i].ext.device.address));
}
@@ -9174,7 +9255,7 @@
deviceId = patch->sources[0].id;
numDevices = mPatch.num_sources;
sourceDeviceTypeAddr.mType = patch->sources[0].ext.device.type;
- sourceDeviceTypeAddr.mAddress = patch->sources[0].ext.device.address;
+ sourceDeviceTypeAddr.setAddress(patch->sources[0].ext.device.address);
}
for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -9372,6 +9453,11 @@
return BAD_VALUE;
}
+ if (EffectModule::isHapticGenerator(&desc->type)) {
+ ALOGE("%s(): HapticGenerator is not supported for MmapThread", __func__);
+ return BAD_VALUE;
+ }
+
return NO_ERROR;
}
@@ -9624,6 +9710,20 @@
}
}
+status_t AudioFlinger::MmapPlaybackThread::getExternalPosition(uint64_t *position,
+ int64_t *timeNanos)
+{
+ if (mOutput == nullptr) {
+ return NO_INIT;
+ }
+ struct timespec timestamp;
+ status_t status = mOutput->getPresentationPosition(position, ×tamp);
+ if (status == NO_ERROR) {
+ *timeNanos = timestamp.tv_sec * NANOS_PER_SECOND + timestamp.tv_nsec;
+ }
+ return status;
+}
+
void AudioFlinger::MmapPlaybackThread::dumpInternals_l(int fd, const Vector<String16>& args)
{
MmapThread::dumpInternals_l(fd, args);
@@ -9728,4 +9828,13 @@
}
}
+status_t AudioFlinger::MmapCaptureThread::getExternalPosition(
+ uint64_t *position, int64_t *timeNanos)
+{
+ if (mInput == nullptr) {
+ return NO_INIT;
+ }
+ return mInput->getCapturePosition((int64_t*)position, timeNanos);
+}
+
} // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index f81387e..014f2d7 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -272,6 +272,7 @@
// Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects,
// and returns the [normal mix] buffer's frame count.
virtual size_t frameCount() const = 0;
+ virtual audio_channel_mask_t hapticChannelMask() const { return AUDIO_CHANNEL_NONE; }
virtual uint32_t latency_l() const { return 0; }
virtual void setVolumeForOutput_l(float left __unused, float right __unused) const {}
@@ -348,7 +349,7 @@
sp<EffectHandle> createEffect_l(
const sp<AudioFlinger::Client>& client,
- const sp<IEffectClient>& effectClient,
+ const sp<media::IEffectClient>& effectClient,
int32_t priority,
audio_session_t sessionId,
effect_descriptor_t *desc,
@@ -478,6 +479,25 @@
void onEffectEnable(const sp<EffectModule>& effect);
void onEffectDisable();
+ // invalidateTracksForAudioSession_l must be called with holding mLock.
+ virtual void invalidateTracksForAudioSession_l(audio_session_t sessionId __unused) const { }
+ // Invalidate all the tracks with the given audio session.
+ void invalidateTracksForAudioSession(audio_session_t sessionId) const {
+ Mutex::Autolock _l(mLock);
+ invalidateTracksForAudioSession_l(sessionId);
+ }
+
+ template <typename T>
+ void invalidateTracksForAudioSession_l(audio_session_t sessionId,
+ const T& tracks) const {
+ for (size_t i = 0; i < tracks.size(); ++i) {
+ const sp<TrackBase>& track = tracks[i];
+ if (sessionId == track->sessionId()) {
+ track->invalidate();
+ }
+ }
+ }
+
protected:
// entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -864,7 +884,8 @@
uid_t uid,
status_t *status /*non-NULL*/,
audio_port_handle_t portId,
- const sp<media::IAudioTrackCallback>& callback);
+ const sp<media::IAudioTrackCallback>& callback,
+ const std::string& opPackageName);
AudioStreamOut* getOutput() const;
AudioStreamOut* clearOutput();
@@ -939,6 +960,13 @@
&& outDeviceTypes().count(mTimestampCorrectedDevice) != 0;
}
+ audio_channel_mask_t hapticChannelMask() const override {
+ return mHapticChannelMask;
+ }
+ bool supportsHapticPlayback() const {
+ return (mHapticChannelMask & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE;
+ }
+
protected:
// updated by readOutputParameters_l()
size_t mNormalFrameCount; // normal mixer and effects
@@ -1061,6 +1089,11 @@
uint32_t trackCountForUid_l(uid_t uid) const;
+ void invalidateTracksForAudioSession_l(
+ audio_session_t sessionId) const override {
+ ThreadBase::invalidateTracksForAudioSession_l(sessionId, mTracks);
+ }
+
private:
friend class AudioFlinger; // for numerous
@@ -1186,7 +1219,7 @@
Mutex mAudioTrackCbLock;
// Record of IAudioTrackCallback
- std::set<sp<media::IAudioTrackCallback>> mAudioTrackCallbacks;
+ std::map<sp<Track>, sp<media::IAudioTrackCallback>> mAudioTrackCallbacks;
private:
// The HAL output sink is treated as non-blocking, but current implementation is blocking
@@ -1791,6 +1824,7 @@
audio_port_handle_t *handle);
status_t stop(audio_port_handle_t handle);
status_t standby();
+ virtual status_t getExternalPosition(uint64_t *position, int64_t *timeNaos) = 0;
// RefBase
virtual void onFirstRef();
@@ -1902,6 +1936,8 @@
virtual void toAudioPortConfig(struct audio_port_config *config);
+ status_t getExternalPosition(uint64_t *position, int64_t *timeNanos) override;
+
protected:
void dumpInternals_l(int fd, const Vector<String16>& args) override;
@@ -1932,6 +1968,8 @@
virtual void toAudioPortConfig(struct audio_port_config *config);
+ status_t getExternalPosition(uint64_t *position, int64_t *timeNanos) override;
+
protected:
AudioStreamIn* mInput;
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 15c66fb..01d5345 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -373,10 +373,15 @@
bool mIsInvalid; // non-resettable latch, set by invalidate()
// It typically takes 5 threadloop mix iterations for latency to stabilize.
- static inline constexpr int32_t LOG_START_COUNTDOWN = 8;
- int32_t mLogStartCountdown = 0;
- int64_t mLogStartTimeNs = 0;
- int64_t mLogStartFrames = 0;
+ // However, this can be 12+ iterations for BT.
+ // To be sure, we wait for latency to dip (it usually increases at the start)
+ // to assess stability and then log to MediaMetrics.
+ // Rapid start / pause calls may cause inaccurate numbers.
+ static inline constexpr int32_t LOG_START_COUNTDOWN = 12;
+ int32_t mLogStartCountdown = 0; // Mixer period countdown
+ int64_t mLogStartTimeNs = 0; // Monotonic time at start()
+ int64_t mLogStartFrames = 0; // Timestamp frames at start()
+ double mLogLatencyMs = 0.; // Track the last log latency
TrackMetrics mTrackMetrics;
diff --git a/services/audioflinger/TrackMetrics.h b/services/audioflinger/TrackMetrics.h
index 12bd341..af16448 100644
--- a/services/audioflinger/TrackMetrics.h
+++ b/services/audioflinger/TrackMetrics.h
@@ -68,6 +68,7 @@
}
void logConstructor(pid_t creatorPid, uid_t creatorUid,
+ const std::string& traits = {},
audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT) const {
// Once this item is logged by the server, the client can add properties.
// no lock required, all local or const variables.
@@ -76,7 +77,8 @@
.setUid(creatorUid)
.set(AMEDIAMETRICS_PROP_ALLOWUID, (int32_t)creatorUid)
.set(AMEDIAMETRICS_PROP_EVENT,
- AMEDIAMETRICS_PROP_PREFIX_SERVER AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR);
+ AMEDIAMETRICS_PROP_PREFIX_SERVER AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR)
+ .set(AMEDIAMETRICS_PROP_TRAITS, traits);
// log streamType from the service, since client doesn't know chosen streamType.
if (streamType != AUDIO_STREAM_DEFAULT) {
item.set(AMEDIAMETRICS_PROP_STREAMTYPE, toString(streamType).c_str());
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index f2b88aa..6049f62 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -54,6 +54,8 @@
namespace android {
+using aidl_utils::binderStatusFromStatusT;
+using binder::Status;
using media::VolumeShaper;
// ----------------------------------------------------------------------------
// TrackBase
@@ -319,64 +321,98 @@
mTrack->destroy();
}
-sp<IMemory> AudioFlinger::TrackHandle::getCblk() const {
- return mTrack->getCblk();
+Status AudioFlinger::TrackHandle::getCblk(
+ std::optional<media::SharedFileRegion>* _aidl_return) {
+ *_aidl_return = legacy2aidl_NullableIMemory_SharedFileRegion(mTrack->getCblk()).value();
+ return Status::ok();
}
-status_t AudioFlinger::TrackHandle::start() {
- return mTrack->start();
+Status AudioFlinger::TrackHandle::start(int32_t* _aidl_return) {
+ *_aidl_return = mTrack->start();
+ return Status::ok();
}
-void AudioFlinger::TrackHandle::stop() {
+Status AudioFlinger::TrackHandle::stop() {
mTrack->stop();
+ return Status::ok();
}
-void AudioFlinger::TrackHandle::flush() {
+Status AudioFlinger::TrackHandle::flush() {
mTrack->flush();
+ return Status::ok();
}
-void AudioFlinger::TrackHandle::pause() {
+Status AudioFlinger::TrackHandle::pause() {
mTrack->pause();
+ return Status::ok();
}
-status_t AudioFlinger::TrackHandle::attachAuxEffect(int EffectId)
-{
- return mTrack->attachAuxEffect(EffectId);
+Status AudioFlinger::TrackHandle::attachAuxEffect(int32_t effectId,
+ int32_t* _aidl_return) {
+ *_aidl_return = mTrack->attachAuxEffect(effectId);
+ return Status::ok();
}
-status_t AudioFlinger::TrackHandle::setParameters(const String8& keyValuePairs) {
- return mTrack->setParameters(keyValuePairs);
+Status AudioFlinger::TrackHandle::setParameters(const std::string& keyValuePairs,
+ int32_t* _aidl_return) {
+ *_aidl_return = mTrack->setParameters(String8(keyValuePairs.c_str()));
+ return Status::ok();
}
-status_t AudioFlinger::TrackHandle::selectPresentation(int presentationId, int programId) {
- return mTrack->selectPresentation(presentationId, programId);
+Status AudioFlinger::TrackHandle::selectPresentation(int32_t presentationId, int32_t programId,
+ int32_t* _aidl_return) {
+ *_aidl_return = mTrack->selectPresentation(presentationId, programId);
+ return Status::ok();
}
-VolumeShaper::Status AudioFlinger::TrackHandle::applyVolumeShaper(
- const sp<VolumeShaper::Configuration>& configuration,
- const sp<VolumeShaper::Operation>& operation) {
- return mTrack->applyVolumeShaper(configuration, operation);
+Status AudioFlinger::TrackHandle::getTimestamp(media::AudioTimestampInternal* timestamp,
+ int32_t* _aidl_return) {
+ AudioTimestamp legacy;
+ *_aidl_return = mTrack->getTimestamp(legacy);
+ if (*_aidl_return != OK) {
+ return Status::ok();
+ }
+ *timestamp = legacy2aidl_AudioTimestamp_AudioTimestampInternal(legacy).value();
+ return Status::ok();
}
-sp<VolumeShaper::State> AudioFlinger::TrackHandle::getVolumeShaperState(int id) {
- return mTrack->getVolumeShaperState(id);
+Status AudioFlinger::TrackHandle::signal() {
+ mTrack->signal();
+ return Status::ok();
}
-status_t AudioFlinger::TrackHandle::getTimestamp(AudioTimestamp& timestamp)
-{
- return mTrack->getTimestamp(timestamp);
+Status AudioFlinger::TrackHandle::applyVolumeShaper(
+ const media::VolumeShaperConfiguration& configuration,
+ const media::VolumeShaperOperation& operation,
+ int32_t* _aidl_return) {
+ sp<VolumeShaper::Configuration> conf = new VolumeShaper::Configuration();
+ *_aidl_return = conf->readFromParcelable(configuration);
+ if (*_aidl_return != OK) {
+ return Status::ok();
+ }
+
+ sp<VolumeShaper::Operation> op = new VolumeShaper::Operation();
+ *_aidl_return = op->readFromParcelable(operation);
+ if (*_aidl_return != OK) {
+ return Status::ok();
+ }
+
+ *_aidl_return = mTrack->applyVolumeShaper(conf, op);
+ return Status::ok();
}
-
-void AudioFlinger::TrackHandle::signal()
-{
- return mTrack->signal();
-}
-
-status_t AudioFlinger::TrackHandle::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- return BnAudioTrack::onTransact(code, data, reply, flags);
+Status AudioFlinger::TrackHandle::getVolumeShaperState(
+ int32_t id,
+ std::optional<media::VolumeShaperState>* _aidl_return) {
+ sp<VolumeShaper::State> legacy = mTrack->getVolumeShaperState(id);
+ if (legacy == nullptr) {
+ _aidl_return->reset();
+ return Status::ok();
+ }
+ media::VolumeShaperState aidl;
+ legacy->writeToParcelable(&aidl);
+ *_aidl_return = aidl;
+ return Status::ok();
}
// ----------------------------------------------------------------------------
@@ -386,11 +422,12 @@
// static
sp<AudioFlinger::PlaybackThread::OpPlayAudioMonitor>
AudioFlinger::PlaybackThread::OpPlayAudioMonitor::createIfNeeded(
- uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType)
+ uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType,
+ const std::string& opPackageName)
{
+ Vector <String16> packages;
+ getPackagesForUid(uid, packages);
if (isServiceUid(uid)) {
- Vector <String16> packages;
- getPackagesForUid(uid, packages);
if (packages.isEmpty()) {
ALOGD("OpPlayAudio: not muting track:%d usage:%d for service UID %d",
id,
@@ -410,12 +447,32 @@
id, attr.flags);
return nullptr;
}
- return new OpPlayAudioMonitor(uid, attr.usage, id);
+
+ String16 opPackageNameStr(opPackageName.c_str());
+ if (opPackageName.empty()) {
+ // If no package name is provided by the client, use the first associated with the uid
+ if (!packages.isEmpty()) {
+ opPackageNameStr = packages[0];
+ }
+ } else {
+ // If the provided package name is invalid, we force app ops denial by clearing the package
+ // name passed to OpPlayAudioMonitor
+ if (std::find_if(packages.begin(), packages.end(),
+ [&opPackageNameStr](const auto& package) {
+ return opPackageNameStr == package; }) == packages.end()) {
+ ALOGW("The package name(%s) provided does not correspond to the uid %d, "
+ "force muting the track", opPackageName.c_str(), uid);
+ // Set package name as an empty string so that hasOpPlayAudio will always return false.
+ opPackageNameStr = String16("");
+ }
+ }
+ return new OpPlayAudioMonitor(uid, attr.usage, id, opPackageNameStr);
}
AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
- uid_t uid, audio_usage_t usage, int id)
- : mHasOpPlayAudio(true), mUid(uid), mUsage((int32_t) usage), mId(id)
+ uid_t uid, audio_usage_t usage, int id, const String16& opPackageName)
+ : mHasOpPlayAudio(true), mUid(uid), mUsage((int32_t) usage), mId(id),
+ mOpPackageName(opPackageName)
{
}
@@ -429,11 +486,10 @@
void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::onFirstRef()
{
- getPackagesForUid(mUid, mPackages);
checkPlayAudioForUsage();
- if (!mPackages.isEmpty()) {
+ if (mOpPackageName.size() != 0) {
mOpCallback = new PlayAudioOpCallback(this);
- mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO, mPackages[0], mOpCallback);
+ mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO, mOpPackageName, mOpCallback);
}
}
@@ -446,18 +502,11 @@
// - not called from PlayAudioOpCallback because the callback is not installed in this case
void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::checkPlayAudioForUsage()
{
- if (mPackages.isEmpty()) {
+ if (mOpPackageName.size() == 0) {
mHasOpPlayAudio.store(false);
} else {
- bool hasIt = true;
- for (const String16& packageName : mPackages) {
- const int32_t mode = mAppOpsManager.checkAudioOpNoThrow(AppOpsManager::OP_PLAY_AUDIO,
- mUsage, mUid, packageName);
- if (mode != AppOpsManager::MODE_ALLOWED) {
- hasIt = false;
- break;
- }
- }
+ bool hasIt = mAppOpsManager.checkAudioOpNoThrow(AppOpsManager::OP_PLAY_AUDIO,
+ mUsage, mUid, mOpPackageName) == AppOpsManager::MODE_ALLOWED;
ALOGD("OpPlayAudio: track:%d usage:%d %smuted", mId, mUsage, hasIt ? "not " : "");
mHasOpPlayAudio.store(hasIt);
}
@@ -511,7 +560,8 @@
audio_output_flags_t flags,
track_type type,
audio_port_handle_t portId,
- size_t frameCountToBeReady)
+ size_t frameCountToBeReady,
+ const std::string opPackageName)
: TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
@@ -534,7 +584,8 @@
mPresentationCompleteFrames(0),
mFrameMap(16 /* sink-frame-to-track-frame map memory */),
mVolumeHandler(new media::VolumeHandler(sampleRate)),
- mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(uid, attr, id(), streamType)),
+ mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(
+ uid, attr, id(), streamType, opPackageName)),
// mSinkTimestamp
mFrameCountToBeReady(frameCountToBeReady),
mFastIndex(-1),
@@ -595,14 +646,18 @@
+ "_" + std::to_string(mId) + "_T");
#endif
- if (channelMask & AUDIO_CHANNEL_HAPTIC_ALL) {
+ if (thread->supportsHapticPlayback()) {
+ // If the track is attached to haptic playback thread, it is potentially to have
+ // HapticGenerator effect, which will generate haptic data, on the track. In that case,
+ // external vibration is always created for all tracks attached to haptic playback thread.
mAudioVibrationController = new AudioVibrationController(this);
mExternalVibration = new os::ExternalVibration(
- mUid, "" /* pkg */, mAttr, mAudioVibrationController);
+ mUid, opPackageName, mAttr, mAudioVibrationController);
}
// Once this item is logged by the server, the client can add properties.
- mTrackMetrics.logConstructor(creatorPid, uid, streamType);
+ const char * const traits = sharedBuffer == 0 ? "" : "static";
+ mTrackMetrics.logConstructor(creatorPid, uid, traits, streamType);
}
AudioFlinger::PlaybackThread::Track::~Track()
@@ -804,7 +859,7 @@
status_t status = mServerProxy->obtainBuffer(&buf);
buffer->frameCount = buf.mFrameCount;
buffer->raw = buf.mRaw;
- if (buf.mFrameCount == 0 && !isStopping() && !isStopped() && !isPaused()) {
+ if (buf.mFrameCount == 0 && !isStopping() && !isStopped() && !isPaused() && !isOffloaded()) {
ALOGV("%s(%d): underrun, framesReady(%zu) < framesDesired(%zd), state: %d",
__func__, mId, buf.mFrameCount, desiredFrames, mState);
mAudioTrackServerProxy->tallyUnderrunFrames(desiredFrames);
@@ -935,6 +990,11 @@
// initial state-stopping. next state-pausing.
// What if resume is called ?
+ if (state == FLUSHED) {
+ // avoid underrun glitches when starting after flush
+ reset();
+ }
+
if (state == PAUSED || state == PAUSING) {
if (mResumeToStopping) {
// happened we need to resume to STOPPING_1
@@ -978,7 +1038,8 @@
mLogStartCountdown = LOG_START_COUNTDOWN;
mLogStartTimeNs = systemTime();
mLogStartFrames = mAudioTrackServerProxy->getTimestamp()
- .mPosition[ExtendedTimestamp::LOCATION_SERVER];
+ .mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+ mLogLatencyMs = 0.;
}
if (status == NO_ERROR || status == ALREADY_EXISTS) {
@@ -1514,23 +1575,31 @@
mServerLatencyFromTrack.store(useTrackTimestamp);
mServerLatencyMs.store(latencyMs);
- if (mLogStartCountdown > 0) {
- if (--mLogStartCountdown == 0) {
+ if (mLogStartCountdown > 0
+ && local.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] > 0
+ && local.mPosition[ExtendedTimestamp::LOCATION_KERNEL] > 0)
+ {
+ if (mLogStartCountdown > 1) {
+ --mLogStartCountdown;
+ } else if (latencyMs < mLogLatencyMs) { // wait for latency to stabilize (dip)
+ mLogStartCountdown = 0;
// startup is the difference in times for the current timestamp and our start
double startUpMs =
- (local.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] - mLogStartTimeNs) * 1e-6;
+ (local.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] - mLogStartTimeNs) * 1e-6;
// adjust for frames played.
- startUpMs -= (local.mPosition[ExtendedTimestamp::LOCATION_SERVER] - mLogStartFrames)
- * 1e3 / mSampleRate;
- ALOGV("%s: logging localTime:%lld, startTime:%lld"
- " localPosition:%lld, startPosition:%lld",
- __func__,
- (long long)local.mTimeNs[ExtendedTimestamp::LOCATION_SERVER],
+ startUpMs -= (local.mPosition[ExtendedTimestamp::LOCATION_KERNEL] - mLogStartFrames)
+ * 1e3 / mSampleRate;
+ ALOGV("%s: latencyMs:%lf startUpMs:%lf"
+ " localTime:%lld startTime:%lld"
+ " localPosition:%lld startPosition:%lld",
+ __func__, latencyMs, startUpMs,
+ (long long)local.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
(long long)mLogStartTimeNs,
- (long long)local.mPosition[ExtendedTimestamp::LOCATION_SERVER],
+ (long long)local.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
(long long)mLogStartFrames);
mTrackMetrics.logLatencyAndStartup(latencyMs, startUpMs);
}
+ mLogLatencyMs = latencyMs;
}
}
@@ -2064,7 +2133,7 @@
binder::Status AudioFlinger::RecordHandle::start(int /*AudioSystem::sync_event_t*/ event,
int /*audio_session_t*/ triggerSession) {
ALOGV("%s()", __func__);
- return binder::Status::fromStatusT(
+ return binderStatusFromStatusT(
mRecordTrack->start((AudioSystem::sync_event_t)event, (audio_session_t) triggerSession));
}
@@ -2079,22 +2148,27 @@
}
binder::Status AudioFlinger::RecordHandle::getActiveMicrophones(
- std::vector<media::MicrophoneInfo>* activeMicrophones) {
+ std::vector<media::MicrophoneInfoData>* activeMicrophones) {
ALOGV("%s()", __func__);
- return binder::Status::fromStatusT(
- mRecordTrack->getActiveMicrophones(activeMicrophones));
+ std::vector<media::MicrophoneInfo> mics;
+ status_t status = mRecordTrack->getActiveMicrophones(&mics);
+ activeMicrophones->resize(mics.size());
+ for (size_t i = 0; status == OK && i < mics.size(); ++i) {
+ status = mics[i].writeToParcelable(&activeMicrophones->at(i));
+ }
+ return binderStatusFromStatusT(status);
}
binder::Status AudioFlinger::RecordHandle::setPreferredMicrophoneDirection(
int /*audio_microphone_direction_t*/ direction) {
ALOGV("%s()", __func__);
- return binder::Status::fromStatusT(mRecordTrack->setPreferredMicrophoneDirection(
+ return binderStatusFromStatusT(mRecordTrack->setPreferredMicrophoneDirection(
static_cast<audio_microphone_direction_t>(direction)));
}
binder::Status AudioFlinger::RecordHandle::setPreferredMicrophoneFieldDimension(float zoom) {
ALOGV("%s()", __func__);
- return binder::Status::fromStatusT(mRecordTrack->setPreferredMicrophoneFieldDimension(zoom));
+ return binderStatusFromStatusT(mRecordTrack->setPreferredMicrophoneFieldDimension(zoom));
}
// ----------------------------------------------------------------------------
@@ -2216,7 +2290,8 @@
RecordThread *recordThread = (RecordThread *)thread.get();
return recordThread->start(this, event, triggerSession);
} else {
- return BAD_VALUE;
+ ALOGW("%s track %d: thread was destroyed", __func__, portId());
+ return DEAD_OBJECT;
}
}
diff --git a/services/audioflinger/TypedLogger.h b/services/audioflinger/TypedLogger.h
index 6ef19bf..feb71e3 100644
--- a/services/audioflinger/TypedLogger.h
+++ b/services/audioflinger/TypedLogger.h
@@ -80,7 +80,7 @@
// TODO Permit disabling of logging at compile-time.
-// TODO A non-nullptr dummy implementation that is a nop would be faster than checking for nullptr
+// TODO A non-nullptr stub implementation that is a nop would be faster than checking for nullptr
// in the case when logging is enabled at compile-time and enabled at runtime, but it might be
// slower than nullptr check when logging is enabled at compile-time and disabled at runtime.
@@ -129,8 +129,8 @@
namespace android {
extern "C" {
-// TODO consider adding a thread_local NBLog::Writer tlDummyNBLogWriter and then
-// initialize below tlNBLogWriter to &tlDummyNBLogWriter to remove the need to
+// TODO consider adding a thread_local NBLog::Writer tlStubNBLogWriter and then
+// initialize below tlNBLogWriter to &tlStubNBLogWriter to remove the need to
// check for nullptr every time. Also reduces the need to add a new logging macro above
// each time we want to log a new type.
extern thread_local NBLog::Writer *tlNBLogWriter;
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 8d0e5db..f753836 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -220,16 +220,16 @@
virtual status_t dump(int fd) = 0;
virtual status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) = 0;
- virtual bool isOffloadSupported(const audio_offload_info_t& offloadInfo) = 0;
+ virtual audio_offload_mode_t getOffloadSupport(const audio_offload_info_t& offloadInfo) = 0;
virtual bool isDirectOutputSupported(const audio_config_base_t& config,
const audio_attributes_t& attributes) = 0;
virtual status_t listAudioPorts(audio_port_role_t role,
audio_port_type_t type,
unsigned int *num_ports,
- struct audio_port *ports,
+ struct audio_port_v7 *ports,
unsigned int *generation) = 0;
- virtual status_t getAudioPort(struct audio_port *port) = 0;
+ virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
virtual status_t createAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle,
uid_t uid) = 0;
@@ -250,12 +250,12 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes) = 0;
virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
- virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+ virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
= 0;
virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
virtual status_t setUserIdDeviceAffinities(int userId,
- const Vector<AudioDeviceTypeAddr>& devices) = 0;
+ const AudioDeviceTypeAddrVector& devices) = 0;
virtual status_t removeUserIdDeviceAffinities(int userId) = 0;
virtual status_t startAudioSource(const struct audio_port_config *source,
@@ -295,13 +295,36 @@
virtual bool isCallScreenModeSupported() = 0;
- virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device) = 0;
+ virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
- virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy) = 0;
+ virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role) = 0;
- virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device) = 0;
+
+ virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices) = 0;
+
+ virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
+
+ virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
+
+ virtual status_t removeDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector& devices) = 0;
+
+ virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role) = 0;
+
+ virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices) = 0;
};
@@ -421,6 +444,8 @@
// sessions to be preempted on modules that do not support sound trigger
// recognition concurrently with audio capture.
virtual void setSoundTriggerCaptureState(bool active) = 0;
+
+ virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
};
extern "C" AudioPolicyInterface* createAudioPolicyManager(AudioPolicyClientInterface *clientInterface);
diff --git a/services/audiopolicy/OWNERS b/services/audiopolicy/OWNERS
index a8483fa..da9d32f 100644
--- a/services/audiopolicy/OWNERS
+++ b/services/audiopolicy/OWNERS
@@ -1,3 +1,2 @@
jmtrivi@google.com
-krocard@google.com
mnaganov@google.com
diff --git a/services/audiopolicy/common/include/Volume.h b/services/audiopolicy/common/include/Volume.h
index 7c8ce83..736f8b2 100644
--- a/services/audiopolicy/common/include/Volume.h
+++ b/services/audiopolicy/common/include/Volume.h
@@ -126,6 +126,7 @@
case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
case AUDIO_DEVICE_OUT_USB_HEADSET:
+ case AUDIO_DEVICE_OUT_BLE_HEADSET:
return DEVICE_CATEGORY_HEADSET;
case AUDIO_DEVICE_OUT_HEARING_AID:
return DEVICE_CATEGORY_HEARING_AID;
@@ -139,6 +140,7 @@
case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER:
case AUDIO_DEVICE_OUT_USB_ACCESSORY:
case AUDIO_DEVICE_OUT_REMOTE_SUBMIX:
+ case AUDIO_DEVICE_OUT_BLE_SPEAKER:
default:
return DEVICE_CATEGORY_SPEAKER;
}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index 6f47abc..a40f6aa 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -72,7 +72,7 @@
const struct audio_port_config *srcConfig = NULL) const;
virtual sp<AudioPort> getAudioPort() const { return mProfile; }
- void toAudioPort(struct audio_port *port) const;
+ void toAudioPort(struct audio_port_v7 *port) const;
void setPreemptedSessions(const SortedVector<audio_session_t>& sessions);
SortedVector<audio_session_t> getPreemptedSessions() const;
bool hasPreemptedSession(audio_session_t session) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 39d1140..5153dce 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -182,6 +182,7 @@
* Active ref count of the client will be incremented/decremented through setActive API
*/
virtual void setClientActive(const sp<TrackClientDescriptor>& client, bool active);
+ bool isClientActive(const sp<TrackClientDescriptor>& client);
bool isActive(uint32_t inPastMs) const;
bool isActive(VolumeSource volumeSource = VOLUME_SOURCE_NONE,
@@ -260,7 +261,7 @@
const struct audio_port_config *srcConfig = NULL) const;
virtual sp<AudioPort> getAudioPort() const { return mPolicyAudioPort->asAudioPort(); }
- virtual void toAudioPort(struct audio_port *port) const;
+ virtual void toAudioPort(struct audio_port_v7 *port) const;
audio_module_handle_t getModuleHandle() const;
@@ -357,7 +358,7 @@
virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
const struct audio_port_config *srcConfig = NULL) const;
- virtual void toAudioPort(struct audio_port *port) const;
+ virtual void toAudioPort(struct audio_port_v7 *port) const;
status_t open(const audio_config_t *config,
const DeviceVector &devices,
@@ -431,7 +432,7 @@
virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
const struct audio_port_config *srcConfig = NULL) const;
- virtual void toAudioPort(struct audio_port *port) const;
+ virtual void toAudioPort(struct audio_port_v7 *port) const;
const sp<SourceClientDescriptor> mSource;
@@ -498,11 +499,6 @@
*/
bool isA2dpOffloadedOnPrimary() const;
- /**
- * returns true if A2DP is supported (either via hardware offload or software encoding)
- */
- bool isA2dpSupported() const;
-
sp<SwAudioOutputDescriptor> getOutputFromId(audio_port_handle_t id) const;
sp<SwAudioOutputDescriptor> getPrimaryOutput() const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index 395bc70..cf1f64c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -33,6 +33,15 @@
namespace android {
+// This class gathers together various bits of AudioPolicyManager
+// configuration, which are usually filled out as a result of parsing
+// the audio_policy_configuration.xml file.
+//
+// Note that AudioPolicyConfig doesn't own some of the data,
+// it simply proxies access to the fields of AudioPolicyManager
+// class. Be careful about the fields that are references,
+// e.g. 'mOutputDevices'. This also means that it's impossible
+// to implement "deep copying" of this class without re-designing it.
class AudioPolicyConfig
{
public:
@@ -40,14 +49,24 @@
DeviceVector &outputDevices,
DeviceVector &inputDevices,
sp<DeviceDescriptor> &defaultOutputDevice)
- : mEngineLibraryNameSuffix(kDefaultEngineLibraryNameSuffix),
- mHwModules(hwModules),
+ : mHwModules(hwModules),
mOutputDevices(outputDevices),
mInputDevices(inputDevices),
- mDefaultOutputDevice(defaultOutputDevice),
- mIsSpeakerDrcEnabled(false),
- mIsCallScreenModeSupported(false)
- {}
+ mDefaultOutputDevice(defaultOutputDevice) {
+ clear();
+ }
+
+ void clear() {
+ mSource = {};
+ mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
+ mHwModules.clear();
+ mOutputDevices.clear();
+ mInputDevices.clear();
+ mDefaultOutputDevice.clear();
+ mIsSpeakerDrcEnabled = false;
+ mIsCallScreenModeSupported = false;
+ mSurroundFormats.clear();
+ }
const std::string& getSource() const {
return mSource;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index b82305d..c6bdb04 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -101,7 +101,7 @@
* An example of failure is when there are already rules in place to restrict
* a mix to the given uid (i.e. when a MATCH_UID rule was set for it).
*/
- status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+ status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices);
status_t removeUidDeviceAffinities(uid_t uid);
status_t getDevicesForUid(uid_t uid, Vector<AudioDeviceTypeAddr>& devices) const;
@@ -115,7 +115,7 @@
* An example of failure is when there are already rules in place to restrict
* a mix to the given userId (i.e. when a MATCH_USERID rule was set for it).
*/
- status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+ status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices);
status_t removeUserIdDeviceAffinities(int userId);
status_t getDevicesForUserId(int userId, Vector<AudioDeviceTypeAddr>& devices) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 0c5d1d0..80afe9d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -35,6 +35,7 @@
namespace android {
+class AudioPolicyMix;
class DeviceDescriptor;
class HwAudioOutputDescriptor;
class SwAudioOutputDescriptor;
@@ -90,11 +91,12 @@
product_strategy_t strategy, VolumeSource volumeSource,
audio_output_flags_t flags,
bool isPreferredDeviceForExclusiveUse,
- std::vector<wp<SwAudioOutputDescriptor>> secondaryOutputs) :
+ std::vector<wp<SwAudioOutputDescriptor>> secondaryOutputs,
+ wp<AudioPolicyMix> primaryMix) :
ClientDescriptor(portId, uid, sessionId, attributes, config, preferredDeviceId,
isPreferredDeviceForExclusiveUse),
mStream(stream), mStrategy(strategy), mVolumeSource(volumeSource), mFlags(flags),
- mSecondaryOutputs(std::move(secondaryOutputs)) {}
+ mSecondaryOutputs(std::move(secondaryOutputs)), mPrimaryMix(primaryMix) {}
~TrackClientDescriptor() override = default;
using ClientDescriptor::dump;
@@ -108,6 +110,12 @@
return mSecondaryOutputs;
};
VolumeSource volumeSource() const { return mVolumeSource; }
+ const sp<AudioPolicyMix> getPrimaryMix() const {
+ return mPrimaryMix.promote();
+ };
+ bool hasLostPrimaryMix() const {
+ return mPrimaryMix.unsafe_get() && !mPrimaryMix.promote();
+ }
void setActive(bool active) override
{
@@ -136,7 +144,7 @@
const VolumeSource mVolumeSource;
const audio_output_flags_t mFlags;
const std::vector<wp<SwAudioOutputDescriptor>> mSecondaryOutputs;
-
+ const wp<AudioPolicyMix> mPrimaryMix;
/**
* required for duplicating thread, prevent from removing active client from an output
* involved in a duplication.
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index dd1499c..7c712e3 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -85,6 +85,7 @@
// AudioPort
virtual void toAudioPort(struct audio_port *port) const;
+ virtual void toAudioPort(struct audio_port_v7 *port) const;
void importAudioPortAndPickAudioProfile(const sp<PolicyAudioPort>& policyPort,
bool force = false);
@@ -94,6 +95,13 @@
void dump(String8 *dst, int spaces, int index, bool verbose = true) const;
private:
+ template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
+ || std::is_same<T, struct audio_port_v7>::value, int> = 0>
+ void toAudioPortInternal(T* port) const {
+ DeviceDescriptorBase::toAudioPort(port);
+ port->ext.device.hw_module = getModuleHandle();
+ }
+
std::string mTagName; // Unique human readable identifier for a device port found in conf file.
FormatVector mEncodedFormats;
audio_format_t mCurrentEncodedFormat;
@@ -146,6 +154,15 @@
// 4) the combination of all devices is invalid for selection
sp<DeviceDescriptor> getDeviceForOpening() const;
+ // Return the device descriptor that matches the given AudioDeviceTypeAddr
+ sp<DeviceDescriptor> getDeviceFromDeviceTypeAddr(
+ const AudioDeviceTypeAddr& deviceTypeAddr) const;
+
+ // Return the device vector that contains device descriptor whose AudioDeviceTypeAddr appears
+ // in the given AudioDeviceTypeAddrVector
+ DeviceVector getDevicesFromDeviceTypeAddrVec(
+ const AudioDeviceTypeAddrVector& deviceTypeAddrVector) const;
+
// If there are devices with the given type and the devices to add is not empty,
// remove all the devices with the given type and add all the devices to add.
void replaceDevicesByType(audio_devices_t typeToRemove, const DeviceVector &devicesToAdd);
@@ -248,7 +265,9 @@
return String8("");
}
- std::string toString() const;
+ // Return a string to describe the DeviceVector. The sensitive information will only be
+ // added to the string if `includeSensitiveInfo` is true.
+ std::string toString(bool includeSensitiveInfo = false) const;
void dump(String8 *dst, const String8 &tag, int spaces = 0, bool verbose = true) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index c4eab30..59eee52 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -72,6 +72,9 @@
audio_io_handle_t dstOutput);
void moveEffects(const std::vector<int>& ids, audio_io_handle_t dstOutput);
+ audio_io_handle_t getIoForSession(audio_session_t sessionId,
+ const effect_uuid_t *effectType = nullptr);
+
void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
private:
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index 5f551d5..621c630 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -131,7 +131,7 @@
bool devicesSupportEncodedFormats(DeviceTypeSet deviceTypes) const
{
if (deviceTypes.empty()) {
- return true; // required for isOffloadSupported() check
+ return true; // required for getOffloadSupport() check
}
DeviceVector deviceList =
mSupportedDevices.getDevicesFromTypes(deviceTypes);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index b963121..7016a08 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -92,7 +92,7 @@
dstConfig->ext.mix.usecase.source = source();
}
-void AudioInputDescriptor::toAudioPort(struct audio_port *port) const
+void AudioInputDescriptor::toAudioPort(struct audio_port_v7 *port) const
{
ALOG_ASSERT(mProfile != 0, "toAudioPort() called on input with null profile %d", mIoHandle);
@@ -516,7 +516,7 @@
dst->appendFormat(" Sampling rate: %d\n", mSamplingRate);
dst->appendFormat(" Format: %d\n", mFormat);
dst->appendFormat(" Channels: %08x\n", mChannelMask);
- dst->appendFormat(" Devices %s\n", mDevice->toString().c_str());
+ dst->appendFormat(" Devices %s\n", mDevice->toString(true /*includeSensitiveInfo*/).c_str());
mEnabledEffects.dump(dst, 1 /*spaces*/, false /*verbose*/);
dst->append(" AudioRecord Clients:\n");
ClientMapHandler<RecordClientDescriptor>::dump(dst);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index d5272bc..c4d7340 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -123,6 +123,12 @@
client->setActive(active);
}
+bool AudioOutputDescriptor::isClientActive(const sp<TrackClientDescriptor>& client)
+{
+ return client != nullptr &&
+ std::find(begin(mActiveClients), end(mActiveClients), client) != end(mActiveClients);
+}
+
bool AudioOutputDescriptor::isActive(VolumeSource vs, uint32_t inPastMs, nsecs_t sysTime) const
{
return (vs == VOLUME_SOURCE_NONE) ?
@@ -209,7 +215,7 @@
dstConfig->ext.mix.usecase.stream = AUDIO_STREAM_DEFAULT;
}
-void AudioOutputDescriptor::toAudioPort(struct audio_port *port) const
+void AudioOutputDescriptor::toAudioPort(struct audio_port_v7 *port) const
{
// Should not be called for duplicated ports, see SwAudioOutputDescriptor::toAudioPortConfig.
mPolicyAudioPort->asAudioPort()->toAudioPort(port);
@@ -245,7 +251,7 @@
dst->appendFormat(" Sampling rate: %d\n", mSamplingRate);
dst->appendFormat(" Format: %08x\n", mFormat);
dst->appendFormat(" Channels: %08x\n", mChannelMask);
- dst->appendFormat(" Devices: %s\n", devices().toString().c_str());
+ dst->appendFormat(" Devices: %s\n", devices().toString(true /*includeSensitiveInfo*/).c_str());
dst->appendFormat(" Global active count: %u\n", mGlobalActiveCount);
for (const auto &iter : mRoutingActivities) {
dst->appendFormat(" Product Strategy id: %d", iter.first);
@@ -400,8 +406,7 @@
dstConfig->ext.mix.handle = mIoHandle;
}
-void SwAudioOutputDescriptor::toAudioPort(
- struct audio_port *port) const
+void SwAudioOutputDescriptor::toAudioPort(struct audio_port_v7 *port) const
{
ALOG_ASSERT(!isDuplicated(), "toAudioPort() called on duplicated output %d", mIoHandle);
@@ -648,8 +653,7 @@
mSource->srcDevice()->toAudioPortConfig(dstConfig, srcConfig);
}
-void HwAudioOutputDescriptor::toAudioPort(
- struct audio_port *port) const
+void HwAudioOutputDescriptor::toAudioPort(struct audio_port_v7 *port) const
{
mSource->srcDevice()->toAudioPort(port);
}
@@ -690,7 +694,9 @@
const sp<SwAudioOutputDescriptor> outputDesc = this->valueAt(i);
if (outputDesc->isActive(volumeSource, inPastMs, sysTime)
&& (!(outputDesc->devices()
- .containsDeviceAmongTypes(getAllOutRemoteDevices())))) {
+ .containsDeviceAmongTypes(getAllOutRemoteDevices())
+ || outputDesc->devices()
+ .onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_TELEPHONY_TX)))) {
return true;
}
}
@@ -722,7 +728,11 @@
const sp<SwAudioOutputDescriptor> otherDesc = valueAt(i);
if (desc->sharesHwModuleWith(otherDesc) &&
otherDesc->isStrategyActive(ps, inPastMs, sysTime)) {
- return true;
+ if (desc == otherDesc
+ || !otherDesc->devices()
+ .onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
+ return true;
+ }
}
}
return false;
@@ -758,11 +768,6 @@
return false;
}
-bool SwAudioOutputCollection::isA2dpSupported() const
-{
- return (isA2dpOffloadedOnPrimary() || (getA2dpOutput() != 0));
-}
-
sp<SwAudioOutputDescriptor> SwAudioOutputCollection::getPrimaryOutput() const
{
for (size_t i = 0; i < size(); i++) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index fc1a59f..fc1d0e2 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -28,7 +28,7 @@
void AudioPolicyMix::dump(String8 *dst, int spaces, int index) const
{
- dst->appendFormat("%*sAudio Policy Mix %d:\n", spaces, "", index + 1);
+ dst->appendFormat("%*sAudio Policy Mix %d (%p):\n", spaces, "", index + 1, this);
std::string mixTypeLiteral;
if (!MixTypeConverter::toString(mMixType, mixTypeLiteral)) {
ALOGE("%s: failed to convert mix type %d", __FUNCTION__, mMixType);
@@ -44,6 +44,9 @@
dst->appendFormat("%*s- device address: %s\n", spaces, "", mDeviceAddress.string());
+ dst->appendFormat("%*s- output: %d\n", spaces, "",
+ mOutput == nullptr ? 0 : mOutput->mIoHandle);
+
int indexCriterion = 0;
for (const auto &criterion : mCriteria) {
dst->appendFormat("%*s- Criterion %d: ", spaces + 2, "", indexCriterion++);
@@ -460,7 +463,7 @@
}
status_t AudioPolicyMixCollection::setUidDeviceAffinities(uid_t uid,
- const Vector<AudioDeviceTypeAddr>& devices) {
+ const AudioDeviceTypeAddrVector& devices) {
// verify feasibility: for each player mix: if it already contains a
// "match uid" rule for this uid, return an error
// (adding a uid-device affinity would result in contradictory rules)
@@ -562,7 +565,7 @@
}
status_t AudioPolicyMixCollection::setUserIdDeviceAffinities(int userId,
- const Vector<AudioDeviceTypeAddr>& devices) {
+ const AudioDeviceTypeAddrVector& devices) {
// verify feasibility: for each player mix: if it already contains a
// "match userId" rule for this userId, return an error
// (adding a userId-device affinity would result in contradictory rules)
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 95822b9..afc4d01 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -23,6 +23,7 @@
#include <TypeConverter.h>
#include "AudioOutputDescriptor.h"
#include "AudioPatch.h"
+#include "AudioPolicyMix.h"
#include "ClientDescriptor.h"
#include "DeviceDescriptor.h"
#include "HwModule.h"
@@ -55,6 +56,12 @@
ClientDescriptor::dump(dst, spaces, index);
dst->appendFormat("%*s- Stream: %d flags: %08x\n", spaces, "", mStream, mFlags);
dst->appendFormat("%*s- Refcount: %d\n", spaces, "", mActivityCount);
+ dst->appendFormat("%*s- DAP Primary Mix: %p\n", spaces, "", mPrimaryMix.promote().get());
+ dst->appendFormat("%*s- DAP Secondary Outputs:\n", spaces, "");
+ for (auto desc : mSecondaryOutputs) {
+ dst->appendFormat("%*s - %d\n", spaces, "",
+ desc.promote() == nullptr ? 0 : desc.promote()->mIoHandle);
+ }
}
std::string TrackClientDescriptor::toShortString() const
@@ -88,7 +95,7 @@
TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
{config.sample_rate, config.channel_mask, config.format}, AUDIO_PORT_HANDLE_NONE,
stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
- {} /* Sources do not support secondary outputs*/), mSrcDevice(srcDevice)
+ {} /* Sources do not support secondary outputs*/, nullptr), mSrcDevice(srcDevice)
{
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index a29e60e..30b739c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -155,8 +155,12 @@
void DeviceDescriptor::toAudioPort(struct audio_port *port) const
{
ALOGV("DeviceDescriptor::toAudioPort() handle %d type %08x", mId, mDeviceTypeAddr.mType);
- DeviceDescriptorBase::toAudioPort(port);
- port->ext.device.hw_module = getModuleHandle();
+ toAudioPortInternal(port);
+}
+
+void DeviceDescriptor::toAudioPort(struct audio_port_v7 *port) const {
+ ALOGV("DeviceDescriptor::toAudioPort() v7 handle %d type %08x", mId, mDeviceTypeAddr.mType);
+ toAudioPortInternal(port);
}
void DeviceDescriptor::importAudioPortAndPickAudioProfile(
@@ -227,6 +231,7 @@
{
bool added = false;
for (const auto& device : devices) {
+ ALOG_ASSERT(device != nullptr, "Null pointer found when adding DeviceVector");
if (indexOf(device) < 0 && SortedVector::add(device) >= 0) {
added = true;
}
@@ -238,6 +243,7 @@
ssize_t DeviceVector::add(const sp<DeviceDescriptor>& item)
{
+ ALOG_ASSERT(item != nullptr, "Adding null pointer to DeviceVector");
ssize_t ret = indexOf(item);
if (ret < 0) {
@@ -375,7 +381,7 @@
if (isEmpty()) {
// Return nullptr if this collection is empty.
return nullptr;
- } else if (areAllOfSameDeviceType(types(), audio_is_input_device)) {
+ } else if (areAllOfSameDeviceType(types(), audio_call_is_input_device)) {
// For input case, return the first one when there is only one device.
return size() > 1 ? nullptr : *begin();
} else if (areAllOfSameDeviceType(types(), audio_is_output_device)) {
@@ -388,6 +394,24 @@
return nullptr;
}
+sp<DeviceDescriptor> DeviceVector::getDeviceFromDeviceTypeAddr(
+ const AudioDeviceTypeAddr& deviceTypeAddr) const {
+ return getDevice(deviceTypeAddr.mType, String8(deviceTypeAddr.getAddress()),
+ AUDIO_FORMAT_DEFAULT);
+}
+
+DeviceVector DeviceVector::getDevicesFromDeviceTypeAddrVec(
+ const AudioDeviceTypeAddrVector& deviceTypeAddrVector) const {
+ DeviceVector devices;
+ for (const auto& deviceTypeAddr : deviceTypeAddrVector) {
+ sp<DeviceDescriptor> device = getDeviceFromDeviceTypeAddr(deviceTypeAddr);
+ if (device != nullptr) {
+ devices.add(device);
+ }
+ }
+ return devices;
+}
+
void DeviceVector::replaceDevicesByType(
audio_devices_t typeToRemove, const DeviceVector &devicesToAdd) {
DeviceVector devicesToRemove = getDevicesFromType(typeToRemove);
@@ -408,7 +432,7 @@
}
}
-std::string DeviceVector::toString() const
+std::string DeviceVector::toString(bool includeSensitiveInfo) const
{
if (isEmpty()) {
return {"AUDIO_DEVICE_NONE"};
@@ -418,7 +442,7 @@
if (device != *begin()) {
result += ";";
}
- result += device->toString();
+ result += device->toString(includeSensitiveInfo);
}
return result + "}";
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 415962a..843f5da 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -202,6 +202,19 @@
}
}
+audio_io_handle_t EffectDescriptorCollection::getIoForSession(audio_session_t sessionId,
+ const effect_uuid_t *effectType)
+{
+ for (size_t i = 0; i < size(); ++i) {
+ sp<EffectDescriptor> effect = valueAt(i);
+ if (effect->mSession == sessionId && (effectType == nullptr ||
+ memcmp(&effect->mDesc.type, effectType, sizeof(effect_uuid_t)) == 0)) {
+ return effect->mIo;
+ }
+ }
+ return AUDIO_IO_HANDLE_NONE;
+}
+
EffectDescriptorCollection EffectDescriptorCollection::getEffectsForIo(audio_io_handle_t io) const
{
EffectDescriptorCollection effects;
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index bf1a0f7..ae92b40 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -17,7 +17,7 @@
#define LOG_TAG "APM::IOProfile"
//#define LOG_NDEBUG 0
-#include <system/audio-base.h>
+#include <system/audio.h>
#include "IOProfile.h"
#include "HwModule.h"
#include "TypeConverter.h"
@@ -112,12 +112,11 @@
dst->append(portStr.c_str());
dst->appendFormat(" - flags: 0x%04x", getFlags());
- std::string flagsLiteral;
- if (getRole() == AUDIO_PORT_ROLE_SINK) {
- InputFlagConverter::maskToString(getFlags(), flagsLiteral);
- } else if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
- OutputFlagConverter::maskToString(getFlags(), flagsLiteral);
- }
+ std::string flagsLiteral =
+ getRole() == AUDIO_PORT_ROLE_SINK ?
+ toString(static_cast<audio_input_flags_t>(getFlags())) :
+ getRole() == AUDIO_PORT_ROLE_SOURCE ?
+ toString(static_cast<audio_output_flags_t>(getFlags())) : "";
if (!flagsLiteral.empty()) {
dst->appendFormat(" (%s)", flagsLiteral.c_str());
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 883e713..0cc3a68 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -254,9 +254,8 @@
constexpr void (*xmlDeleter)(T* t);
template <>
constexpr auto xmlDeleter<xmlDoc> = xmlFreeDoc;
-// http://b/111067277 - Add back constexpr when we switch to C++17.
template <>
-auto xmlDeleter<xmlChar> = [](xmlChar *s) { xmlFree(s); };
+constexpr auto xmlDeleter<xmlChar> = [](xmlChar *s) { xmlFree(s); };
/** @return a unique_ptr with the correct deleter for the libxml2 object. */
template <class T>
@@ -337,7 +336,7 @@
std::string mode = getXmlAttribute(cur, Attributes::mode);
if (!mode.empty()) {
- gain->setMode(GainModeConverter::maskFromString(mode));
+ gain->setMode(GainModeConverter::maskFromString(mode, " "));
}
std::string channelsLiteral = getXmlAttribute(cur, Attributes::channelMask);
@@ -501,7 +500,7 @@
AUDIO_PORT_ROLE_SOURCE : AUDIO_PORT_ROLE_SINK;
audio_devices_t type = AUDIO_DEVICE_NONE;
- if (!deviceFromString(typeName, type) ||
+ if (!DeviceConverter::fromString(typeName, type) ||
(!audio_is_input_device(type) && portRole == AUDIO_PORT_ROLE_SOURCE) ||
(!audio_is_output_devices(type) && portRole == AUDIO_PORT_ROLE_SINK)) {
ALOGW("%s: bad type %08x", __func__, type);
@@ -804,7 +803,9 @@
status_t deserializeAudioPolicyFile(const char *fileName, AudioPolicyConfig *config)
{
PolicySerializer serializer;
- return serializer.deserialize(fileName, config);
+ status_t status = serializer.deserialize(fileName, config);
+ if (status != OK) config->clear();
+ return status;
}
} // namespace android
diff --git a/services/audiopolicy/config/a2dp_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/a2dp_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..2d323f6
--- /dev/null
+++ b/services/audiopolicy/config/a2dp_audio_policy_configuration_7_0.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- A2dp Audio HAL Audio Policy Configuration file -->
+<module name="a2dp" halVersion="2.0">
+ <mixPorts>
+ <mixPort name="a2dp output" role="source"/>
+ <mixPort name="a2dp input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="BT A2DP Headphones" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="BT A2DP Speaker" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="BT A2DP In" type="AUDIO_DEVICE_IN_BLUETOOTH_A2DP" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="BT A2DP Out"
+ sources="a2dp output"/>
+ <route type="mix" sink="BT A2DP Headphones"
+ sources="a2dp output"/>
+ <route type="mix" sink="BT A2DP Speaker"
+ sources="a2dp output"/>
+ <route type="mix" sink="a2dp input"
+ sources="BT A2DP In"/>
+ </routes>
+</module>
diff --git a/services/audiopolicy/config/a2dp_in_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/a2dp_in_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..d59ad70
--- /dev/null
+++ b/services/audiopolicy/config/a2dp_in_audio_policy_configuration_7_0.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Bluetooth Input Audio HAL Audio Policy Configuration file -->
+<module name="a2dp" halVersion="2.0">
+ <mixPorts>
+ <mixPort name="a2dp input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="BT A2DP In" type="AUDIO_DEVICE_IN_BLUETOOTH_A2DP" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="a2dp input"
+ sources="BT A2DP In"/>
+ </routes>
+</module>
diff --git a/services/audiopolicy/config/audio_policy_configuration.xml b/services/audiopolicy/config/audio_policy_configuration.xml
index b28381b..dcdc035 100644
--- a/services/audiopolicy/config/audio_policy_configuration.xml
+++ b/services/audiopolicy/config/audio_policy_configuration.xml
@@ -91,7 +91,7 @@
<!-- Output devices declaration, i.e. Sink DEVICE PORT -->
<devicePort tagName="Earpiece" type="AUDIO_DEVICE_OUT_EARPIECE" role="sink">
<profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
- samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
</devicePort>
<devicePort tagName="Speaker" role="sink" type="AUDIO_DEVICE_OUT_SPEAKER" address="">
<profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
diff --git a/services/audiopolicy/config/audio_policy_configuration_7_0.xml b/services/audiopolicy/config/audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..31c8954
--- /dev/null
+++ b/services/audiopolicy/config/audio_policy_configuration_7_0.xml
@@ -0,0 +1,211 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <!-- version section contains a “version” tag in the form “major.minor” e.g version=”1.0” -->
+
+ <!-- Global configuration Decalaration -->
+ <globalConfiguration speaker_drc_enabled="true"/>
+
+
+ <!-- Modules section:
+ There is one section per audio HW module present on the platform.
+ Each module section will contains two mandatory tags for audio HAL “halVersion” and “name”.
+ The module names are the same as in current .conf file:
+ “primary”, “A2DP”, “remote_submix”, “USB”
+ Each module will contain the following sections:
+ “devicePorts”: a list of device descriptors for all input and output devices accessible via this
+ module.
+ This contains both permanently attached devices and removable devices.
+ “mixPorts”: listing all output and input streams exposed by the audio HAL
+ “routes”: list of possible connections between input and output devices or between stream and
+ devices.
+ "route": is defined by an attribute:
+ -"type": <mux|mix> means all sources are mutual exclusive (mux) or can be mixed (mix)
+ -"sink": the sink involved in this route
+ -"sources": all the sources than can be connected to the sink via vis route
+ “attachedDevices”: permanently attached devices.
+ The attachedDevices section is a list of devices names. The names correspond to device names
+ defined in <devicePorts> section.
+ “defaultOutputDevice”: device to be used by default when no policy rule applies
+ -->
+ <modules>
+ <!-- Primary Audio HAL -->
+ <module name="primary" halVersion="3.0">
+ <attachedDevices>
+ <item>Speaker</item>
+ <item>Built-In Mic</item>
+ <item>Built-In Back Mic</item>
+ </attachedDevices>
+ <defaultOutputDevice>Speaker</defaultOutputDevice>
+ <mixPorts>
+ <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="deep_buffer" role="source"
+ flags="AUDIO_OUTPUT_FLAG_DEEP_BUFFER">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="compressed_offload" role="source"
+ flags="AUDIO_OUTPUT_FLAG_DIRECT AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD AUDIO_OUTPUT_FLAG_NON_BLOCKING">
+ <profile name="" format="AUDIO_FORMAT_MP3"
+ samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+ <profile name="" format="AUDIO_FORMAT_AAC"
+ samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+ <profile name="" format="AUDIO_FORMAT_AAC_LC"
+ samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+ </mixPort>
+ <mixPort name="voice_tx" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+ </mixPort>
+ <mixPort name="primary input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+ </mixPort>
+ <mixPort name="voice_rx" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <!-- Output devices declaration, i.e. Sink DEVICE PORT -->
+ <devicePort tagName="Earpiece" type="AUDIO_DEVICE_OUT_EARPIECE" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+ </devicePort>
+ <devicePort tagName="Speaker" role="sink" type="AUDIO_DEVICE_OUT_SPEAKER" address="">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <gains>
+ <gain name="gain_1" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-8400"
+ maxValueMB="4000"
+ defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="Wired Headset" type="AUDIO_DEVICE_OUT_WIRED_HEADSET" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="Wired Headphones" type="AUDIO_DEVICE_OUT_WIRED_HEADPHONE" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+ </devicePort>
+ <devicePort tagName="BT SCO Headset" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+ </devicePort>
+ <devicePort tagName="BT SCO Car Kit" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+ </devicePort>
+ <devicePort tagName="Telephony Tx" type="AUDIO_DEVICE_OUT_TELEPHONY_TX" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+ </devicePort>
+
+ <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+ </devicePort>
+ <devicePort tagName="Built-In Back Mic" type="AUDIO_DEVICE_IN_BACK_MIC" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+ </devicePort>
+ <devicePort tagName="Wired Headset Mic" type="AUDIO_DEVICE_IN_WIRED_HEADSET" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+ </devicePort>
+ <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+ </devicePort>
+ <devicePort tagName="Telephony Rx" type="AUDIO_DEVICE_IN_TELEPHONY_RX" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+ </devicePort>
+ </devicePorts>
+ <!-- route declaration, i.e. list all available sources for a given sink -->
+ <routes>
+ <route type="mix" sink="Earpiece"
+ sources="primary output,deep_buffer,BT SCO Headset Mic"/>
+ <route type="mix" sink="Speaker"
+ sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
+ <route type="mix" sink="Wired Headset"
+ sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
+ <route type="mix" sink="Wired Headphones"
+ sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
+ <route type="mix" sink="primary input"
+ sources="Built-In Mic,Built-In Back Mic,Wired Headset Mic,BT SCO Headset Mic"/>
+ <route type="mix" sink="Telephony Tx"
+ sources="Built-In Mic,Built-In Back Mic,Wired Headset Mic,BT SCO Headset Mic, voice_tx"/>
+ <route type="mix" sink="voice_rx"
+ sources="Telephony Rx"/>
+ </routes>
+
+ </module>
+
+ <!-- A2dp Input Audio HAL -->
+ <xi:include href="a2dp_in_audio_policy_configuration_7_0.xml"/>
+
+ <!-- Usb Audio HAL -->
+ <xi:include href="usb_audio_policy_configuration.xml"/>
+
+ <!-- Remote Submix Audio HAL -->
+ <xi:include href="r_submix_audio_policy_configuration.xml"/>
+
+ <!-- Bluetooth Audio HAL -->
+ <xi:include href="bluetooth_audio_policy_configuration_7_0.xml"/>
+
+ <!-- MSD Audio HAL (optional) -->
+ <xi:include href="msd_audio_policy_configuration_7_0.xml"/>
+
+ </modules>
+ <!-- End of Modules section -->
+
+ <!-- Volume section:
+ IMPORTANT NOTE: Volume tables have been moved to engine configuration.
+ Keep it here for legacy.
+ Engine will fallback on these files if none are provided by engine.
+ -->
+
+ <xi:include href="audio_policy_volumes.xml"/>
+ <xi:include href="default_volume_tables.xml"/>
+
+ <!-- End of Volume section -->
+
+ <!-- Surround Sound configuration -->
+
+ <xi:include href="surround_sound_configuration_5_0.xml"/>
+
+ <!-- End of Surround Sound configuration -->
+
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..2dffe02
--- /dev/null
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Bluetooth Audio HAL Audio Policy Configuration file -->
+<module name="bluetooth" halVersion="2.0">
+ <mixPorts>
+ <!-- A2DP Audio Ports -->
+ <mixPort name="a2dp output" role="source"/>
+ <!-- Hearing AIDs Audio Ports -->
+ <mixPort name="hearing aid output" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="24000 16000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <!-- A2DP Audio Ports -->
+ <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100 48000 88200 96000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="BT A2DP Headphones" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100 48000 88200 96000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="BT A2DP Speaker" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100 48000 88200 96000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <!-- Hearing AIDs Audio Ports -->
+ <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="BT A2DP Out"
+ sources="a2dp output"/>
+ <route type="mix" sink="BT A2DP Headphones"
+ sources="a2dp output"/>
+ <route type="mix" sink="BT A2DP Speaker"
+ sources="a2dp output"/>
+ <route type="mix" sink="BT Hearing Aid Out"
+ sources="hearing aid output"/>
+ </routes>
+</module>
diff --git a/services/audiopolicy/config/hearing_aid_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/hearing_aid_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..8c364e4
--- /dev/null
+++ b/services/audiopolicy/config/hearing_aid_audio_policy_configuration_7_0.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Hearing aid Audio HAL Audio Policy Configuration file -->
+<module name="hearing_aid" halVersion="2.0">
+ <mixPorts>
+ <mixPort name="hearing aid output" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="24000 16000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="BT Hearing Aid Out" sources="hearing aid output"/>
+ </routes>
+</module>
diff --git a/services/audiopolicy/config/msd_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/msd_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..f167f0b
--- /dev/null
+++ b/services/audiopolicy/config/msd_audio_policy_configuration_7_0.xml
@@ -0,0 +1,78 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2017-2018 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- Multi Stream Decoder Audio Policy Configuration file -->
+<module name="msd" halVersion="2.0">
+ <attachedDevices>
+ <item>MS12 Input</item>
+ <item>MS12 Output</item>
+ </attachedDevices>
+ <mixPorts>
+ <mixPort name="ms12 input" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="ms12 compressed input" role="source"
+ flags="AUDIO_OUTPUT_FLAG_DIRECT AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD AUDIO_OUTPUT_FLAG_NON_BLOCKING">
+ <profile name="" format="AUDIO_FORMAT_AC3"
+ samplingRates="32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1"/>
+ <profile name="" format="AUDIO_FORMAT_E_AC3"
+ samplingRates="32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+ <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+ samplingRates="32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+ <profile name="" format="AUDIO_FORMAT_AC4"
+ samplingRates="32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+ </mixPort>
+ <!-- The HW AV Sync flag is not required, but is recommended -->
+ <mixPort name="ms12 output" role="sink" flags="AUDIO_INPUT_FLAG_HW_AV_SYNC AUDIO_INPUT_FLAG_DIRECT">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ <profile name="" format="AUDIO_FORMAT_AC3"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
+ <profile name="" format="AUDIO_FORMAT_E_AC3"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="MS12 Input" type="AUDIO_DEVICE_OUT_BUS" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <profile name="" format="AUDIO_FORMAT_AC3"
+ samplingRates="32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1"/>
+ <profile name="" format="AUDIO_FORMAT_E_AC3"
+ samplingRates="32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+ <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+ samplingRates="32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+ <profile name="" format="AUDIO_FORMAT_AC4"
+ samplingRates="32000 44100 48000"
+ channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+ </devicePort>
+ <devicePort tagName="MS12 Output" type="AUDIO_DEVICE_IN_BUS" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="MS12 Input" sources="ms12 input,ms12 compressed input"/>
+ <route type="mix" sink="ms12 output" sources="MS12 Output"/>
+ </routes>
+</module>
diff --git a/services/audiopolicy/config/primary_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/primary_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..68a56b2
--- /dev/null
+++ b/services/audiopolicy/config/primary_audio_policy_configuration_7_0.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Default Primary Audio HAL Module Audio Policy Configuration include file -->
+<module name="primary" halVersion="2.0">
+ <attachedDevices>
+ <item>Speaker</item>
+ <item>Built-In Mic</item>
+ </attachedDevices>
+ <defaultOutputDevice>Speaker</defaultOutputDevice>
+ <mixPorts>
+ <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="44100" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="primary input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+ </devicePort>
+
+ <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="Speaker"
+ sources="primary output"/>
+ <route type="mix" sink="primary input"
+ sources="Built-In Mic"/>
+ </routes>
+</module>
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
old mode 100755
new mode 100644
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
old mode 100755
new mode 100644
index 7f339dc..3e42e2d
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -93,13 +93,13 @@
void dump(String8 *dst) const override;
- status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device) override;
+ status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) override;
- status_t removePreferredDeviceForStrategy(product_strategy_t strategy) override;
+ status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) override;
- status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device) const override;
+ status_t getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+ AudioDeviceTypeAddrVector &devices) const override;
engineConfig::ParsingResult loadAudioPolicyEngineConfig();
@@ -127,11 +127,52 @@
status_t restoreOriginVolumeCurve(audio_stream_type_t stream);
+ status_t setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) override;
+
+ status_t addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) override;
+
+ /**
+ * Remove devices role for capture preset. When `forceMatched` is true, the devices to be
+ * removed must all show as role for the capture preset. Otherwise, only devices that has shown
+ * as role for the capture preset will be remove.
+ */
+ status_t doRemoveDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role, const AudioDeviceTypeAddrVector& devices,
+ bool forceMatched=true);
+
+ status_t removeDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role, const AudioDeviceTypeAddrVector& devices) override;
+
+ status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role) override;
+
+ status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role, AudioDeviceTypeAddrVector &devices) const override;
+
+ DeviceVector getActiveMediaDevices(const DeviceVector& availableDevices) const override;
+
private:
+ /**
+ * Get media devices as the given role
+ *
+ * @param role the audio devices role
+ * @param availableDevices all available devices
+ * @param devices the DeviceVector to store devices as the given role
+ * @return NO_ERROR if all devices associated to the given role are present in available devices
+ * NAME_NO_FOUND if there is no strategy for media or there are no devices associate to
+ * the given role
+ * NOT_ENOUGH_DATA if not all devices as given role are present in available devices
+ */
+ status_t getMediaDevicesForRole(device_role_t role, const DeviceVector& availableDevices,
+ DeviceVector& devices) const;
+
AudioPolicyManagerObserver *mApmObserver = nullptr;
ProductStrategyMap mProductStrategies;
ProductStrategyPreferredRoutingMap mProductStrategyPreferredDevices;
+ CapturePresetDevicesRoleMap mCapturePresetDevicesRole;
VolumeGroupMap mVolumeGroups;
LastRemovableMediaDevices mLastRemovableMediaDevices;
audio_mode_t mPhoneState = AUDIO_MODE_NORMAL; /**< current phone state. */
diff --git a/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h b/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h
old mode 100755
new mode 100644
index a3053a4..d7f8b1e
--- a/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h
+++ b/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h
@@ -36,6 +36,8 @@
void setRemovableMediaDevices(sp<DeviceDescriptor> desc, audio_policy_dev_state_t state);
std::vector<audio_devices_t> getLastRemovableMediaDevices(
device_out_group_t group = GROUP_NONE) const;
+ sp<DeviceDescriptor> getLastRemovableMediaDevice(
+ const DeviceVector& excludedDevices, device_out_group_t group = GROUP_NONE) const;
private:
struct DeviceGroupDescriptor {
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 3ebe7d1..c505456 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -28,8 +28,11 @@
#include <utils/String8.h>
#include <media/AudioAttributes.h>
#include <media/AudioContainers.h>
+#include <media/AudioDeviceTypeAddr.h>
#include <media/AudioPolicy.h>
+#include <vector>
+
namespace android {
/**
@@ -164,7 +167,8 @@
product_strategy_t mDefaultStrategy = PRODUCT_STRATEGY_NONE;
};
-class ProductStrategyPreferredRoutingMap : public std::map<product_strategy_t, AudioDeviceTypeAddr>
+class ProductStrategyPreferredRoutingMap : public std::map<product_strategy_t,
+ AudioDeviceTypeAddrVector>
{
public:
void dump(String8 *dst, int spaces = 0) const;
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 1bc7fe3..2137dd0 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -339,8 +339,8 @@
return NO_ERROR;
}
-status_t EngineBase::setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device)
+status_t EngineBase::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices)
{
// verify strategy exists
if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
@@ -348,11 +348,24 @@
return BAD_VALUE;
}
- mProductStrategyPreferredDevices[strategy] = device;
+ switch (role) {
+ case DEVICE_ROLE_PREFERRED:
+ mProductStrategyPreferredDevices[strategy] = devices;
+ break;
+ case DEVICE_ROLE_DISABLED:
+ // TODO: support set devices role as disabled for strategy.
+ ALOGI("%s no implemented for role as %d", __func__, role);
+ break;
+ case DEVICE_ROLE_NONE:
+ // Intentionally fall-through as it is no need to set device role as none for a strategy.
+ default:
+ ALOGE("%s invalid role %d", __func__, role);
+ return BAD_VALUE;
+ }
return NO_ERROR;
}
-status_t EngineBase::removePreferredDeviceForStrategy(product_strategy_t strategy)
+status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
{
// verify strategy exists
if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
@@ -360,32 +373,260 @@
return BAD_VALUE;
}
- if (mProductStrategyPreferredDevices.erase(strategy) == 0) {
- // no preferred device was set
- return NAME_NOT_FOUND;
+ switch (role) {
+ case DEVICE_ROLE_PREFERRED:
+ if (mProductStrategyPreferredDevices.erase(strategy) == 0) {
+ // no preferred device was set
+ return NAME_NOT_FOUND;
+ }
+ break;
+ case DEVICE_ROLE_DISABLED:
+ // TODO: support remove devices role as disabled for strategy.
+ ALOGI("%s no implemented for role as %d", __func__, role);
+ break;
+ case DEVICE_ROLE_NONE:
+ // Intentionally fall-through as it makes no sense to remove devices with
+ // role as DEVICE_ROLE_NONE for a strategy
+ default:
+ ALOGE("%s invalid role %d", __func__, role);
+ return BAD_VALUE;
}
return NO_ERROR;
}
-status_t EngineBase::getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device) const
+status_t EngineBase::getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+ AudioDeviceTypeAddrVector &devices) const
{
// verify strategy exists
if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
ALOGE("%s unknown strategy %u", __func__, strategy);
return BAD_VALUE;
}
- // preferred device for this strategy?
- auto devIt = mProductStrategyPreferredDevices.find(strategy);
- if (devIt == mProductStrategyPreferredDevices.end()) {
- ALOGV("%s no preferred device for strategy %u", __func__, strategy);
- return NAME_NOT_FOUND;
+
+ switch (role) {
+ case DEVICE_ROLE_PREFERRED: {
+ // preferred device for this strategy?
+ auto devIt = mProductStrategyPreferredDevices.find(strategy);
+ if (devIt == mProductStrategyPreferredDevices.end()) {
+ ALOGV("%s no preferred device for strategy %u", __func__, strategy);
+ return NAME_NOT_FOUND;
+ }
+
+ devices = devIt->second;
+ } break;
+ case DEVICE_ROLE_NONE:
+ // Intentionally fall-through as the DEVICE_ROLE_NONE is never set
+ default:
+ ALOGE("%s invalid role %d", __func__, role);
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+}
+
+status_t EngineBase::setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices)
+{
+ // verify if the audio source is valid
+ if (!audio_is_valid_audio_source(audioSource)) {
+ ALOGE("%s unknown audio source %u", __func__, audioSource);
}
- device = devIt->second;
+ switch (role) {
+ case DEVICE_ROLE_PREFERRED:
+ mCapturePresetDevicesRole[audioSource][role] = devices;
+ // When the devices are set as preferred devices, remove them from the disabled devices.
+ doRemoveDevicesRoleForCapturePreset(
+ audioSource, DEVICE_ROLE_DISABLED, devices, false /*forceMatched*/);
+ break;
+ case DEVICE_ROLE_DISABLED:
+ // TODO: support setting devices role as disabled for capture preset.
+ ALOGI("%s no implemented for role as %d", __func__, role);
+ break;
+ case DEVICE_ROLE_NONE:
+ // Intentionally fall-through as it is no need to set device role as none
+ default:
+ ALOGE("%s invalid role %d", __func__, role);
+ return BAD_VALUE;
+ }
return NO_ERROR;
}
+status_t EngineBase::addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices)
+{
+ // verify if the audio source is valid
+ if (!audio_is_valid_audio_source(audioSource)) {
+ ALOGE("%s unknown audio source %u", __func__, audioSource);
+ }
+
+ switch (role) {
+ case DEVICE_ROLE_PREFERRED:
+ mCapturePresetDevicesRole[audioSource][role] = excludeDeviceTypeAddrsFrom(
+ mCapturePresetDevicesRole[audioSource][role], devices);
+ for (const auto& device : devices) {
+ mCapturePresetDevicesRole[audioSource][role].push_back(device);
+ }
+ // When the devices are set as preferred devices, remove them from the disabled devices.
+ doRemoveDevicesRoleForCapturePreset(
+ audioSource, DEVICE_ROLE_DISABLED, devices, false /*forceMatched*/);
+ break;
+ case DEVICE_ROLE_DISABLED:
+ // TODO: support setting devices role as disabled for capture preset.
+ ALOGI("%s no implemented for role as %d", __func__, role);
+ break;
+ case DEVICE_ROLE_NONE:
+ // Intentionally fall-through as it is no need to set device role as none
+ default:
+ ALOGE("%s invalid role %d", __func__, role);
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+}
+
+status_t EngineBase::removeDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices) {
+ return doRemoveDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t EngineBase::doRemoveDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role, const AudioDeviceTypeAddrVector& devices, bool forceMatched)
+{
+ // verify if the audio source is valid
+ if (!audio_is_valid_audio_source(audioSource)) {
+ ALOGE("%s unknown audio source %u", __func__, audioSource);
+ }
+
+ switch (role) {
+ case DEVICE_ROLE_PREFERRED:
+ case DEVICE_ROLE_DISABLED: {
+ if (mCapturePresetDevicesRole.count(audioSource) == 0 ||
+ mCapturePresetDevicesRole[audioSource].count(role) == 0) {
+ return NAME_NOT_FOUND;
+ }
+ AudioDeviceTypeAddrVector remainingDevices = excludeDeviceTypeAddrsFrom(
+ mCapturePresetDevicesRole[audioSource][role], devices);
+ if (forceMatched && remainingDevices.size() !=
+ mCapturePresetDevicesRole[audioSource][role].size() - devices.size()) {
+ // There are some devices from `devicesToRemove` that are not shown in the cached record
+ return BAD_VALUE;
+ }
+ mCapturePresetDevicesRole[audioSource][role] = remainingDevices;
+ if (mCapturePresetDevicesRole[audioSource][role].empty()) {
+ // Remove the role when device list is empty
+ mCapturePresetDevicesRole[audioSource].erase(role);
+ }
+ } break;
+ case DEVICE_ROLE_NONE:
+ // Intentionally fall-through as it makes no sense to remove devices with
+ // role as DEVICE_ROLE_NONE
+ default:
+ ALOGE("%s invalid role %d", __func__, role);
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+}
+
+status_t EngineBase::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role)
+{
+ // verify if the audio source is valid
+ if (!audio_is_valid_audio_source(audioSource)) {
+ ALOGE("%s unknown audio source %u", __func__, audioSource);
+ }
+
+ switch (role) {
+ case DEVICE_ROLE_PREFERRED:
+ if (mCapturePresetDevicesRole.count(audioSource) == 0 ||
+ mCapturePresetDevicesRole[audioSource].erase(role) == 0) {
+ // no preferred device for the given audio source
+ return NAME_NOT_FOUND;
+ }
+ break;
+ case DEVICE_ROLE_DISABLED:
+ // TODO: support remove devices role as disabled for strategy.
+ ALOGI("%s no implemented for role as %d", __func__, role);
+ break;
+ case DEVICE_ROLE_NONE:
+ // Intentionally fall-through as it makes no sense to remove devices with
+ // role as DEVICE_ROLE_NONE for a strategy
+ default:
+ ALOGE("%s invalid role %d", __func__, role);
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+}
+
+status_t EngineBase::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role, AudioDeviceTypeAddrVector &devices) const
+{
+ // verify if the audio source is valid
+ if (!audio_is_valid_audio_source(audioSource)) {
+ ALOGE("%s unknown audio source %u", __func__, audioSource);
+ return BAD_VALUE;
+ }
+
+ switch (role) {
+ case DEVICE_ROLE_PREFERRED:
+ case DEVICE_ROLE_DISABLED: {
+ if (mCapturePresetDevicesRole.count(audioSource) == 0) {
+ return NAME_NOT_FOUND;
+ }
+ auto devIt = mCapturePresetDevicesRole.at(audioSource).find(role);
+ if (devIt == mCapturePresetDevicesRole.at(audioSource).end()) {
+ ALOGV("%s no devices role(%d) for capture preset %u", __func__, role, audioSource);
+ return NAME_NOT_FOUND;
+ }
+
+ devices = devIt->second;
+ } break;
+ case DEVICE_ROLE_NONE:
+ // Intentionally fall-through as the DEVICE_ROLE_NONE is never set
+ default:
+ ALOGE("%s invalid role %d", __func__, role);
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+}
+
+status_t EngineBase::getMediaDevicesForRole(device_role_t role,
+ const DeviceVector& availableDevices, DeviceVector& devices) const
+{
+ product_strategy_t strategy = getProductStrategyByName("STRATEGY_MEDIA" /*name*/);
+ if (strategy == PRODUCT_STRATEGY_NONE) {
+ strategy = getProductStrategyForStream(AUDIO_STREAM_MUSIC);
+ }
+ if (strategy == PRODUCT_STRATEGY_NONE) {
+ return NAME_NOT_FOUND;
+ }
+ AudioDeviceTypeAddrVector deviceAddrVec;
+ status_t status = getDevicesForRoleAndStrategy(strategy, role, deviceAddrVec);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ devices = availableDevices.getDevicesFromDeviceTypeAddrVec(deviceAddrVec);
+ return deviceAddrVec.size() == devices.size() ? NO_ERROR : NOT_ENOUGH_DATA;
+}
+
+DeviceVector EngineBase::getActiveMediaDevices(const DeviceVector& availableDevices) const
+{
+ // The priority of active devices as follows:
+ // 1: the available preferred devices for media
+ // 2: the latest connected removable media device that is enabled
+ DeviceVector activeDevices;
+ if (getMediaDevicesForRole(
+ DEVICE_ROLE_PREFERRED, availableDevices, activeDevices) != NO_ERROR) {
+ activeDevices.clear();
+ DeviceVector disabledDevices;
+ getMediaDevicesForRole(DEVICE_ROLE_DISABLED, availableDevices, disabledDevices);
+ sp<DeviceDescriptor> device =
+ mLastRemovableMediaDevices.getLastRemovableMediaDevice(disabledDevices);
+ if (device != nullptr) {
+ activeDevices.add(device);
+ }
+ }
+ return activeDevices;
+}
+
void EngineBase::dump(String8 *dst) const
{
mProductStrategies.dump(dst, 2);
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index 981582e..d39eff6 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -26,8 +26,8 @@
{"STRATEGY_PHONE",
{
{"phone", AUDIO_STREAM_VOICE_CALL, "AUDIO_STREAM_VOICE_CALL",
- {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION, AUDIO_SOURCE_DEFAULT, 0,
- ""}},
+ {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""}},
},
{"sco", AUDIO_STREAM_BLUETOOTH_SCO, "AUDIO_STREAM_BLUETOOTH_SCO",
{{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_SCO,
@@ -39,10 +39,11 @@
{
{"ring", AUDIO_STREAM_RING, "AUDIO_STREAM_RING",
{{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
- AUDIO_SOURCE_DEFAULT, 0, ""}}
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
},
{"alarm", AUDIO_STREAM_ALARM, "AUDIO_STREAM_ALARM",
- {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT, 0, ""}},
+ {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""}},
}
},
},
@@ -58,7 +59,7 @@
{
{"", AUDIO_STREAM_ACCESSIBILITY, "AUDIO_STREAM_ACCESSIBILITY",
{{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
- AUDIO_SOURCE_DEFAULT, 0, ""}}
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
}
},
},
@@ -66,15 +67,16 @@
{
{"", AUDIO_STREAM_NOTIFICATION, "AUDIO_STREAM_NOTIFICATION",
{
- {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT, 0, ""},
+ {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""},
{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_EVENT,
- AUDIO_SOURCE_DEFAULT, 0, ""}
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
}
}
},
@@ -83,21 +85,25 @@
{
{"assistant", AUDIO_STREAM_ASSISTANT, "AUDIO_STREAM_ASSISTANT",
{{AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, 0, ""}}
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
},
{"music", AUDIO_STREAM_MUSIC, "AUDIO_STREAM_MUSIC",
{
- {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT, 0, ""},
- {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_GAME, AUDIO_SOURCE_DEFAULT, 0, ""},
- {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANT, AUDIO_SOURCE_DEFAULT, 0, ""},
+ {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""},
+ {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_GAME, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""},
+ {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANT, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""},
{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
- AUDIO_SOURCE_DEFAULT, 0, ""},
- {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, 0, ""}
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
+ {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""}
},
},
{"system", AUDIO_STREAM_SYSTEM, "AUDIO_STREAM_SYSTEM",
{{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_SONIFICATION,
- AUDIO_SOURCE_DEFAULT, 0, ""}}
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
}
},
},
@@ -106,7 +112,7 @@
{"", AUDIO_STREAM_DTMF, "AUDIO_STREAM_DTMF",
{
{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
- AUDIO_SOURCE_DEFAULT, 0, ""}
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
}
}
},
@@ -114,7 +120,8 @@
{"STRATEGY_CALL_ASSISTANT",
{
{"", AUDIO_STREAM_CALL_ASSISTANT, "AUDIO_STREAM_CALL_ASSISTANT",
- {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_CALL_ASSISTANT, AUDIO_SOURCE_DEFAULT, 0, ""}}
+ {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_CALL_ASSISTANT, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""}}
}
},
},
@@ -136,14 +143,16 @@
{"rerouting",
{
{"", AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
- {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, 0, ""}}
+ {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VIRTUAL_SOURCE, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""}}
}
},
},
{"patch",
{
{"", AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
- {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, 0, ""}}
+ {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""}}
}
},
}
diff --git a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
old mode 100755
new mode 100644
index 87b6aaf..b3f8947
--- a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
+++ b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
@@ -55,6 +55,17 @@
return ret;
}
+sp<DeviceDescriptor> LastRemovableMediaDevices::getLastRemovableMediaDevice(
+ const DeviceVector& excludedDevices, device_out_group_t group) const {
+ for (auto iter = mMediaDevices.begin(); iter != mMediaDevices.end(); ++iter) {
+ if ((group == GROUP_NONE || group == getDeviceOutGroup((iter->desc)->type())) &&
+ !excludedDevices.contains(iter->desc)) {
+ return iter->desc;
+ }
+ }
+ return nullptr;
+}
+
device_out_group_t LastRemovableMediaDevices::getDeviceOutGroup(audio_devices_t device) const
{
switch (device) {
@@ -69,6 +80,11 @@
case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER:
+ // TODO (b/122931261): remove when preferred device for strategy media will be used instead of
+ // AUDIO_POLICY_FORCE_NO_BT_A2DP.
+ case AUDIO_DEVICE_OUT_HEARING_AID:
+ case AUDIO_DEVICE_OUT_BLE_HEADSET:
+ case AUDIO_DEVICE_OUT_BLE_SPEAKER:
return GROUP_BT_A2DP;
default:
return GROUP_NONE;
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index 151c7bb..060568a 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -321,10 +321,11 @@
void ProductStrategyPreferredRoutingMap::dump(android::String8* dst, int spaces) const {
dst->appendFormat("\n%*sPreferred devices per product strategy dump:", spaces, "");
for (const auto& iter : *this) {
- dst->appendFormat("\n%*sStrategy %u dev:%08x addr:%s",
+ dst->appendFormat("\n%*sStrategy %u %s",
spaces + 2, "",
(uint32_t) iter.first,
- iter.second.mType, iter.second.mAddress.c_str());
+ dumpAudioDeviceTypeAddrVector(iter.second, true /*includeSensitiveInfo*/)
+ .c_str());
}
dst->appendFormat("\n");
}
diff --git a/services/audiopolicy/engine/config/TEST_MAPPING b/services/audiopolicy/engine/config/TEST_MAPPING
new file mode 100644
index 0000000..06ce111
--- /dev/null
+++ b/services/audiopolicy/engine/config/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+ "presubmit": [
+ {
+ "name": "audiopolicy_engineconfig_tests"
+ }
+ ]
+}
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 5d22c24..c565926 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -111,6 +111,8 @@
*/
ParsingResult parse(const char* path = DEFAULT_PATH);
android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups);
+// Exposed for testing.
+android::status_t parseLegacyVolumeFile(const char* path, VolumeGroups &volumeGroups);
} // namespace engineConfig
} // namespace android
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index 4842cb2..7cfef5b 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -228,7 +228,8 @@
std::string flags = getXmlAttribute(cur, "value");
ALOGV("%s flags %s", __FUNCTION__, flags.c_str());
- attributes.flags = AudioFlagConverter::maskFromString(flags, " ");
+ attributes.flags = static_cast<audio_flags_mask_t>(
+ AudioFlagConverter::maskFromString(flags, " "));
}
if (!xmlStrcmp(cur->name, (const xmlChar *)("Bundle"))) {
std::string bundleKey = getXmlAttribute(cur, "key");
@@ -588,6 +589,7 @@
}
}
}
+ VolumeGroups tempVolumeGroups = volumeGroups;
for (const auto &volumeMapIter : legacyVolumeMap) {
// In order to let AudioService setting the min and max (compatibility), set Min and Max
// to -1 except for private streams
@@ -598,8 +600,10 @@
}
int indexMin = streamType >= AUDIO_STREAM_PUBLIC_CNT ? 0 : -1;
int indexMax = streamType >= AUDIO_STREAM_PUBLIC_CNT ? 100 : -1;
- volumeGroups.push_back({ volumeMapIter.first, indexMin, indexMax, volumeMapIter.second });
+ tempVolumeGroups.push_back(
+ { volumeMapIter.first, indexMin, indexMax, volumeMapIter.second });
}
+ std::swap(tempVolumeGroups, volumeGroups);
return NO_ERROR;
}
@@ -694,35 +698,14 @@
return deserializeLegacyVolumeCollection(doc, cur, volumeGroups, nbSkippedElements);
}
-static const int gApmXmlConfigFilePathMaxLength = 128;
-
-static constexpr const char *apmXmlConfigFileName = "audio_policy_configuration.xml";
-static constexpr const char *apmA2dpOffloadDisabledXmlConfigFileName =
- "audio_policy_configuration_a2dp_offload_disabled.xml";
-
android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups) {
- char audioPolicyXmlConfigFile[gApmXmlConfigFilePathMaxLength];
- std::vector<const char *> fileNames;
- status_t ret;
-
- if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false) &&
- property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
- // A2DP offload supported but disabled: try to use special XML file
- fileNames.push_back(apmA2dpOffloadDisabledXmlConfigFileName);
+ if (std::string audioPolicyXmlConfigFile = audio_get_audio_policy_config_file();
+ !audioPolicyXmlConfigFile.empty()) {
+ return parseLegacyVolumeFile(audioPolicyXmlConfigFile.c_str(), volumeGroups);
+ } else {
+ ALOGE("No readable audio policy config file found");
+ return BAD_VALUE;
}
- fileNames.push_back(apmXmlConfigFileName);
-
- for (const char* fileName : fileNames) {
- for (const auto& path : audio_get_configuration_paths()) {
- snprintf(audioPolicyXmlConfigFile, sizeof(audioPolicyXmlConfigFile),
- "%s/%s", path.c_str(), fileName);
- ret = parseLegacyVolumeFile(audioPolicyXmlConfigFile, volumeGroups);
- if (ret == NO_ERROR) {
- return ret;
- }
- }
- }
- return BAD_VALUE;
}
} // namespace engineConfig
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
new file mode 100644
index 0000000..6b0774f
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -0,0 +1,25 @@
+cc_test {
+ name: "audiopolicy_engineconfig_tests",
+
+ shared_libs: [
+ "libbase",
+ "liblog",
+ "libmedia_helper",
+ "libutils",
+ "libxml2",
+ ],
+ static_libs: [
+ "libaudiopolicyengine_config",
+ ],
+
+ srcs: ["engineconfig_tests.cpp"],
+
+ data: [":audiopolicy_engineconfig_files"],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ test_suites: ["device-tests"],
+}
diff --git a/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp b/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp
new file mode 100644
index 0000000..f61e02f
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#define LOG_TAG "APM_Test"
+#include <android-base/file.h>
+#include <log/log.h>
+
+#include "EngineConfig.h"
+
+using namespace android;
+
+TEST(EngineConfigTestInit, LegacyVolumeGroupsLoadingIsTransactional) {
+ engineConfig::VolumeGroups groups;
+ ASSERT_TRUE(groups.empty());
+ status_t status = engineConfig::parseLegacyVolumeFile(
+ (base::GetExecutableDirectory() + "/test_invalid_apm_volume_tables.xml").c_str(),
+ groups);
+ ASSERT_NE(NO_ERROR, status);
+ EXPECT_TRUE(groups.empty());
+ status = engineConfig::parseLegacyVolumeFile(
+ (base::GetExecutableDirectory() + "/test_apm_volume_tables.xml").c_str(),
+ groups);
+ ASSERT_EQ(NO_ERROR, status);
+ EXPECT_FALSE(groups.empty());
+}
diff --git a/services/audiopolicy/engine/config/tests/resources/Android.bp b/services/audiopolicy/engine/config/tests/resources/Android.bp
new file mode 100644
index 0000000..0aee0e9
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/Android.bp
@@ -0,0 +1,7 @@
+filegroup {
+ name: "audiopolicy_engineconfig_files",
+ srcs: [
+ "test_apm_volume_tables.xml",
+ "test_invalid_apm_volume_tables.xml",
+ ],
+}
diff --git a/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml b/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml
new file mode 100644
index 0000000..16126b6
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <globalConfiguration speaker_drc_enabled="true"/>
+ <volumes>
+ <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEADSET">
+ <point>0,-4200</point>
+ <point>33,-2800</point>
+ <point>66,-1400</point>
+ <point>100,0</point>
+ </volume>
+ <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+ <point>0,-2400</point>
+ <point>33,-1600</point>
+ <point>66,-800</point>
+ <point>100,0</point>
+ </volume>
+ <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ </volumes>
+ <volumes>
+ <reference name="FULL_SCALE_VOLUME_CURVE">
+ <!-- Full Scale reference Volume Curve -->
+ <point>0,0</point>
+ <point>100,0</point>
+ </reference>
+ </volumes>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml b/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml
new file mode 100644
index 0000000..3ec5d10
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- This file uses a non-existent audio stream name. -->
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <globalConfiguration speaker_drc_enabled="true"/>
+ <volumes>
+ <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEADSET">
+ <point>0,-4200</point>
+ <point>33,-2800</point>
+ <point>66,-1400</point>
+ <point>100,0</point>
+ </volume>
+ <volume stream="AUDIO_STREAM_NON_EXISTING" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+ <point>0,-2400</point>
+ <point>33,-1600</point>
+ <point>66,-800</point>
+ <point>100,0</point>
+ </volume>
+ <volume stream="AUDIO_STREAM_RING" deviceCategory="DEVICE_CATEGORY_HEADSET"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ <volume stream="AUDIO_STREAM_MUSIC" deviceCategory="DEVICE_CATEGORY_HEADSET"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ <volume stream="AUDIO_STREAM_ALARM" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+ <point>0,-2970</point>
+ <point>33,-2010</point>
+ <point>66,-1020</point>
+ <point>100,0</point>
+ </volume>
+ <volume stream="AUDIO_STREAM_NOTIFICATION" deviceCategory="DEVICE_CATEGORY_HEADSET"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ <volume stream="AUDIO_STREAM_BLUETOOTH_SCO" deviceCategory="DEVICE_CATEGORY_EXT_MEDIA"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ <volume stream="AUDIO_STREAM_ENFORCED_AUDIBLE" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ <volume stream="AUDIO_STREAM_DTMF" deviceCategory="DEVICE_CATEGORY_SPEAKER"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ </volumes>
+ <volumes>
+ <reference name="FULL_SCALE_VOLUME_CURVE">
+ <!-- Full Scale reference Volume Curve -->
+ <point>0,0</point>
+ <point>100,0</point>
+ </reference>
+ </volumes>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index dfb20b5..a9b536b 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -34,6 +34,8 @@
using DeviceStrategyMap = std::map<product_strategy_t, DeviceVector>;
using StrategyVector = std::vector<product_strategy_t>;
using VolumeGroupVector = std::vector<volume_group_t>;
+using CapturePresetDevicesRoleMap =
+ std::map<audio_source_t, std::map<device_role_t, AudioDeviceTypeAddrVector>>;
/**
* This interface is dedicated to the policy manager that a Policy Engine shall implement.
@@ -293,37 +295,120 @@
virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups) const = 0;
/**
- * @brief setPreferredDeviceForStrategy sets the default device to be used for a
- * strategy when available
+ * @brief setDevicesRoleForStrategy sets devices role for a strategy when available. To remove
+ * devices role, removeDevicesRoleForStrategy must be called. When devices role is set
+ * successfully, previously set devices for the same role and strategy will be removed.
* @param strategy the audio strategy whose routing will be affected
- * @param device the audio device to route to when available
- * @return BAD_VALUE if the strategy is invalid,
- * or NO_ERROR if the preferred device was set
+ * @param role the role of the devices for the strategy. All device roles are defined at
+ * system/media/audio/include/system/audio_policy.h. DEVICE_ROLE_NONE is invalid
+ * for setting.
+ * @param devices the audio devices to be set
+ * @return BAD_VALUE if the strategy or role is invalid,
+ * or NO_ERROR if the role of the devices for strategy was set
*/
- virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device) = 0;
+ virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
/**
- * @brief removePreferredDeviceForStrategy removes the preferred device previously set
+ * @brief removeDevicesRoleForStrategy removes the role of device(s) previously set
* for the given strategy
* @param strategy the audio strategy whose routing will be affected
- * @return BAD_VALUE if the strategy is invalid,
- * or NO_ERROR if the preferred device was removed
+ * @param role the role of the devices for strategy
+ * @return BAD_VALUE if the strategy or role is invalid,
+ * or NO_ERROR if the devices for this role was removed
*/
- virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy) = 0;
+ virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role) = 0;
/**
- * @brief getPreferredDeviceForStrategy queries which device is set as the
- * preferred device for the given strategy
+ * @brief getDevicesForRoleAndStrategy queries which devices have the specified role for the
+ * specified strategy
* @param strategy the strategy to query
- * @param device returns configured as the preferred device if one was set
- * @return BAD_VALUE if the strategy is invalid,
- * or NAME_NOT_FOUND if no preferred device was set
- * or NO_ERROR if the device parameter was initialized to the preferred device
+ * @param role the role of the devices to query
+ * @param devices returns list of devices with matching role for the specified strategy.
+ * DEVICE_ROLE_NONE is invalid as input.
+ * @return BAD_VALUE if the strategy or role is invalid,
+ * or NAME_NOT_FOUND if no device for the role and strategy was set
+ * or NO_ERROR if the devices parameter contains a list of devices
*/
- virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device) const = 0;
+ virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+ AudioDeviceTypeAddrVector &devices) const = 0;
+ /**
+ * @brief setDevicesRoleForCapturePreset sets devices role for a capture preset when available.
+ * To remove devices role, removeDevicesRoleForCapturePreset must be called. Calling
+ * clearDevicesRoleForCapturePreset will remove all devices as role. When devices role is set
+ * successfully, previously set devices for the same role and capture preset will be removed.
+ * @param audioSource the audio capture preset whose routing will be affected
+ * @param role the role of the devices for the capture preset. All device roles are defined at
+ * system/media/audio/include/system/audio_policy.h. DEVICE_ROLE_NONE is invalid
+ * for setting.
+ * @param devices the audio devices to be set
+ * @return BAD_VALUE if the capture preset or role is invalid,
+ * or NO_ERROR if the role of the devices for capture preset was set
+ */
+ virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
+
+ /**
+ * @brief addDevicesRoleForCapturePreset adds devices role for a capture preset when available.
+ * To remove devices role, removeDevicesRoleForCapturePreset must be called. Calling
+ * clearDevicesRoleForCapturePreset will remove all devices as role.
+ * @param audioSource the audio capture preset whose routing will be affected
+ * @param role the role of the devices for the capture preset. All device roles are defined at
+ * system/media/audio/include/system/audio_policy.h. DEVICE_ROLE_NONE is invalid
+ * for setting.
+ * @param devices the audio devices to be added
+ * @return BAD_VALUE if the capture preset or role is invalid,
+ * or NO_ERROR if the role of the devices for capture preset was added
+ */
+ virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
+
+ /**
+ * @brief removeDevicesRoleForCapturePreset removes the role of device(s) previously set
+ * for the given capture preset
+ * @param audioSource the audio capture preset whose routing will be affected
+ * @param role the role of the devices for the capture preset
+ * @param devices the devices to be removed
+ * @return BAD_VALUE if 1) the capture preset is invalid, 2) role is invalid or 3) the list of
+ * devices to be removed are not all present as role for a capture preset
+ * or NO_ERROR if the devices for this role was removed
+ */
+ virtual status_t removeDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role, const AudioDeviceTypeAddrVector& devices) = 0;
+
+ /**
+ * @brief clearDevicesRoleForCapturePreset removes the role of all device(s) previously set
+ * for the given capture preset
+ * @param audioSource the audio capture preset whose routing will be affected
+ * @param role the role of the devices for the capture preset
+ * @return BAD_VALUE if the capture preset or role is invalid,
+ * or NO_ERROR if the devices for this role was removed
+ */
+ virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role);
+
+ /**
+ * @brief getDevicesForRoleAndCapturePreset queries which devices have the specified role for
+ * the specified capture preset
+ * @param audioSource the capture preset to query
+ * @param role the role of the devices to query
+ * @param devices returns list of devices with matching role for the specified capture preset.
+ * DEVICE_ROLE_NONE is invalid as input.
+ * @return BAD_VALUE if the capture preset or role is invalid,
+ * or NAME_NOT_FOUND if no device for the role and capture preset was set
+ * or NO_ERROR if the devices parameter contains a list of devices
+ */
+ virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role, AudioDeviceTypeAddrVector &devices) const = 0;
+
+ /**
+ * @brief getActiveMediaDevices returns which devices will most likely to be used for media
+ * @param availableDevices all available devices
+ * @return collection of active devices
+ */
+ virtual DeviceVector getActiveMediaDevices(const DeviceVector& availableDevices) const = 0;
virtual void dump(String8 *dst) const = 0;
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
index a7388da..bc32416 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -65,6 +65,12 @@
</ProductStrategy>
<ProductStrategy name="STRATEGY_MEDIA">
+ <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+ <Attributes>
+ <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+ <Usage value="AUDIO_USAGE_ASSISTANT"/>
+ </Attributes>
+ </AttributesGroup>
<AttributesGroup streamType="AUDIO_STREAM_MUSIC" volumeGroup="music">
<Attributes> <Usage value="AUDIO_USAGE_MEDIA"/> </Attributes>
<Attributes> <Usage value="AUDIO_USAGE_GAME"/> </Attributes>
@@ -72,12 +78,6 @@
<Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
<Attributes></Attributes>
</AttributesGroup>
- <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
- <Attributes>
- <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
- <Usage value="AUDIO_USAGE_ASSISTANT"/>
- </Attributes>
- </AttributesGroup>
<AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
<Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
</AttributesGroup>
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
index f91f8d7..f8a6fc0 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
@@ -45,7 +45,7 @@
bool InputSource::sendToHW(string & /*error*/)
{
- uint32_t applicableInputDevice;
+ audio_devices_t applicableInputDevice;
blackboardRead(&applicableInputDevice, sizeof(applicableInputDevice));
return mPolicyPluginInterface->setDeviceForInputSource(mId, applicableInputDevice);
}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
index 244f082..6c8eb65 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
@@ -32,7 +32,7 @@
struct Device
{
- uint32_t applicableDevice; /**< applicable device for this strategy. */
+ audio_devices_t applicableDevice; /**< applicable device for this strategy. */
char deviceAddress[mMaxStringSize]; /**< device address associated with this strategy. */
} __attribute__((packed));
diff --git a/services/audiopolicy/engineconfigurable/src/InputSource.cpp b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
index aa06ae3..f4645e6 100644
--- a/services/audiopolicy/engineconfigurable/src/InputSource.cpp
+++ b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
@@ -51,7 +51,7 @@
mApplicableDevices = devices;
return NO_ERROR;
}
- devices |= AUDIO_DEVICE_BIT_IN;
+ devices = static_cast<audio_devices_t>(devices | AUDIO_DEVICE_BIT_IN);
if (!audio_is_input_device(devices)) {
ALOGE("%s: trying to set an invalid device 0x%X for input source %s",
__FUNCTION__, devices, getName().c_str());
diff --git a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
index 9a7fa8f..5083b14 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
@@ -52,13 +52,19 @@
def findBitPos(decimal):
pos = 0
i = 1
- while i != decimal:
+ while i < decimal:
i = i << 1
pos = pos + 1
if pos == 32:
return -1
- return pos
+ # TODO: b/168065706. This is just to fix the build. That the problem of devices with
+ # multiple bits set must be addressed more generally in the configurable audio policy
+ # and parameter framework.
+ if i > decimal:
+ logging.info("Device:{} which has multiple bits set is skipped. b/168065706".format(decimal))
+ return -2
+ return pos
def generateXmlStructureFile(componentTypeDict, structureTypesFile, outputFile):
@@ -74,10 +80,12 @@
if bitparameters_node is not None:
ordered_values = OrderedDict(sorted(values_dict.items(), key=lambda x: x[1]))
for key, value in ordered_values.items():
- value_node = ET.SubElement(bitparameters_node, "BitParameter")
- value_node.set('Name', key)
- value_node.set('Size', "1")
- value_node.set('Pos', str(findBitPos(value)))
+ pos = findBitPos(value)
+ if pos >= 0:
+ value_node = ET.SubElement(bitparameters_node, "BitParameter")
+ value_node.set('Name', key)
+ value_node.set('Size', "1")
+ value_node.set('Pos', str(pos))
enum_parameter_node = component_type.find("EnumParameter")
if enum_parameter_node is not None:
@@ -118,9 +126,9 @@
ignored_values = ['CNT', 'MAX', 'ALL', 'NONE']
criteria_pattern = re.compile(
- r"\s*(?P<type>(?:"+'|'.join(component_type_mapping_table.keys()) + "))_" \
- r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*=\s*" \
- r"(?P<values>(?:0[xX])?[0-9a-fA-F]+)")
+ r"\s*V\((?P<type>(?:"+'|'.join(component_type_mapping_table.keys()) + "))_" \
+ r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*,\s*" \
+ r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])[0-9a-fA-F]+|[0-9]+)")
logging.info("Checking Android Header file {}".format(androidaudiobaseheaderFile))
@@ -156,6 +164,13 @@
logging.debug("type:{}, literal:{}, values:{}.".format(component_type_name, component_type_literal, component_type_numerical_value))
+ if "stub" not in all_component_types["OutputDevicesMask"]:
+ all_component_types["OutputDevicesMask"]["stub"] = 0x40000000
+ logging.info("added stub output device mask")
+ if "stub" not in all_component_types["InputDevicesMask"]:
+ all_component_types["InputDevicesMask"]["stub"] = 0x40000000
+ logging.info("added stub input device mask")
+
# Transform input source in inclusive criterion
shift = len(all_component_types['OutputDevicesMask'])
if shift > 32:
diff --git a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
index a7388da..bc32416 100644
--- a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -65,6 +65,12 @@
</ProductStrategy>
<ProductStrategy name="STRATEGY_MEDIA">
+ <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+ <Attributes>
+ <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+ <Usage value="AUDIO_USAGE_ASSISTANT"/>
+ </Attributes>
+ </AttributesGroup>
<AttributesGroup streamType="AUDIO_STREAM_MUSIC" volumeGroup="music">
<Attributes> <Usage value="AUDIO_USAGE_MEDIA"/> </Attributes>
<Attributes> <Usage value="AUDIO_USAGE_GAME"/> </Attributes>
@@ -72,12 +78,6 @@
<Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
<Attributes></Attributes>
</AttributesGroup>
- <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
- <Attributes>
- <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
- <Usage value="AUDIO_USAGE_ASSISTANT"/>
- </Attributes>
- </AttributesGroup>
<AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
<Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
</AttributesGroup>
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
old mode 100755
new mode 100644
index b14d2bb..159ca08
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -184,16 +184,7 @@
break;
case STRATEGY_DTMF:
- if (!isInCall()) {
- // when off call, DTMF strategy follows the same rules as MEDIA strategy
- devices = getDevicesForStrategyInt(
- STRATEGY_MEDIA, availableOutputDevices, availableInputDevices, outputs);
- break;
- }
- // when in call, DTMF and PHONE strategies follow the same rules
- FALLTHROUGH_INTENDED;
-
- case STRATEGY_PHONE:
+ case STRATEGY_PHONE: {
// Force use of only devices on primary output if:
// - in call AND
// - cannot route from voice call RX OR
@@ -216,75 +207,24 @@
availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID));
if ((availableInputDevices.getDevice(AUDIO_DEVICE_IN_TELEPHONY_RX,
- String8(""), AUDIO_FORMAT_DEFAULT) == nullptr) ||
- ((availPrimaryInputDevices.getDevice(
- txDevice, String8(""), AUDIO_FORMAT_DEFAULT) != nullptr) &&
- (primaryOutput->getPolicyAudioPort()->getModuleVersionMajor() < 3))) {
+ String8(""), AUDIO_FORMAT_DEFAULT) == nullptr) ||
+ ((availPrimaryInputDevices.getDevice(
+ txDevice, String8(""), AUDIO_FORMAT_DEFAULT) != nullptr) &&
+ (primaryOutput->getPolicyAudioPort()->getModuleVersionMajor() < 3))) {
availableOutputDevices = availPrimaryOutputDevices;
}
}
- // for phone strategy, we first consider the forced use and then the available devices by
- // order of priority
- switch (getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION)) {
- case AUDIO_POLICY_FORCE_BT_SCO:
- if (!isInCall() || strategy != STRATEGY_DTMF) {
- devices = availableOutputDevices.getDevicesFromType(
- AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT);
- if (!devices.isEmpty()) break;
- }
- devices = availableOutputDevices.getFirstDevicesFromTypes({
- AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET, AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
- if (!devices.isEmpty()) break;
- // if SCO device is requested but no SCO device is available, fall back to default case
- FALLTHROUGH_INTENDED;
-
- default: // FORCE_NONE
- devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
- if (!devices.isEmpty()) break;
- // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to A2DP
- if (!isInCall() &&
- (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
- outputs.isA2dpSupported()) {
- devices = availableOutputDevices.getFirstDevicesFromTypes({
- AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
- AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES});
- if (!devices.isEmpty()) break;
- }
- devices = availableOutputDevices.getFirstDevicesFromTypes({
- AUDIO_DEVICE_OUT_WIRED_HEADPHONE, AUDIO_DEVICE_OUT_WIRED_HEADSET,
- AUDIO_DEVICE_OUT_LINE, AUDIO_DEVICE_OUT_USB_HEADSET,
- AUDIO_DEVICE_OUT_USB_DEVICE});
- if (!devices.isEmpty()) break;
- if (!isInCall()) {
- devices = availableOutputDevices.getFirstDevicesFromTypes({
- AUDIO_DEVICE_OUT_USB_ACCESSORY, AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET,
- AUDIO_DEVICE_OUT_AUX_DIGITAL, AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET});
- if (!devices.isEmpty()) break;
- }
- devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_EARPIECE);
- break;
-
- case AUDIO_POLICY_FORCE_SPEAKER:
- // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to
- // A2DP speaker when forcing to speaker output
- if (!isInCall() &&
- (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
- outputs.isA2dpSupported()) {
- devices = availableOutputDevices.getDevicesFromType(
- AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER);
- if (!devices.isEmpty()) break;
- }
- if (!isInCall()) {
- devices = availableOutputDevices.getFirstDevicesFromTypes({
- AUDIO_DEVICE_OUT_USB_ACCESSORY, AUDIO_DEVICE_OUT_USB_DEVICE,
- AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_AUX_DIGITAL,
- AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET});
- if (!devices.isEmpty()) break;
- }
- devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
- break;
- }
- break;
+ devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
+ if (!devices.isEmpty()) break;
+ devices = availableOutputDevices.getFirstDevicesFromTypes({
+ AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
+ AUDIO_DEVICE_OUT_WIRED_HEADSET,
+ AUDIO_DEVICE_OUT_LINE,
+ AUDIO_DEVICE_OUT_USB_HEADSET,
+ AUDIO_DEVICE_OUT_USB_DEVICE});
+ if (!devices.isEmpty()) break;
+ devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_EARPIECE);
+ } break;
case STRATEGY_SONIFICATION:
@@ -327,7 +267,8 @@
}
}
// Use both Bluetooth SCO and phone default output when ringing in normal mode
- if (getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) == AUDIO_POLICY_FORCE_BT_SCO) {
+ if (audio_is_bluetooth_out_sco_device(getPreferredDeviceTypeForLegacyStrategy(
+ availableOutputDevices, STRATEGY_PHONE))) {
if (strategy == STRATEGY_SONIFICATION) {
devices.replaceDevicesByType(
AUDIO_DEVICE_OUT_SPEAKER,
@@ -386,18 +327,13 @@
STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
break;
}
- // FIXME: Find a better solution to prevent routing to BT hearing aid(b/122931261).
- if ((devices2.isEmpty()) &&
- (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
- devices2 = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
- }
+
if ((devices2.isEmpty()) &&
(getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) == AUDIO_POLICY_FORCE_SPEAKER)) {
devices2 = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
}
if (devices2.isEmpty() && (getLastRemovableMediaDevices().size() > 0)) {
- if ((getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
- outputs.isA2dpSupported()) {
+ if ((getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
// Get the last connected device of wired and bluetooth a2dp
devices2 = availableOutputDevices.getFirstDevicesFromTypes(
getLastRemovableMediaDevices());
@@ -452,22 +388,26 @@
devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_TELEPHONY_TX);
break;
+ case STRATEGY_NONE:
+ // Happens when internal strategies are processed ("rerouting", "patch"...)
+ break;
+
default:
- ALOGW("getDevicesForStrategy() unknown strategy: %d", strategy);
+ ALOGW("%s unknown strategy: %d", __func__, strategy);
break;
}
if (devices.isEmpty()) {
- ALOGV("getDevicesForStrategy() no device found for strategy %d", strategy);
+ ALOGV("%s no device found for strategy %d", __func__, strategy);
sp<DeviceDescriptor> defaultOutputDevice = getApmObserver()->getDefaultOutputDevice();
if (defaultOutputDevice != nullptr) {
devices.add(defaultOutputDevice);
}
ALOGE_IF(devices.isEmpty(),
- "getDevicesForStrategy() no default device defined");
+ "%s no default device defined", __func__);
}
- ALOGVV("getDevices ForStrategy() strategy %d, device %s",
+ ALOGVV("%s strategy %d, device %s", __func__,
strategy, dumpDeviceTypes(devices.types()).c_str());
return devices;
}
@@ -502,20 +442,24 @@
}
}
+ audio_devices_t commDeviceType =
+ getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE);
+
switch (inputSource) {
case AUDIO_SOURCE_DEFAULT:
case AUDIO_SOURCE_MIC:
device = availableDevices.getDevice(
AUDIO_DEVICE_IN_BLUETOOTH_A2DP, String8(""), AUDIO_FORMAT_DEFAULT);
if (device != nullptr) break;
- if (getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) == AUDIO_POLICY_FORCE_BT_SCO) {
+ if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
device = availableDevices.getDevice(
AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
if (device != nullptr) break;
}
device = availableDevices.getFirstExistingDevice({
- AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
- AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BUILTIN_MIC});
+ AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+ AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
+ AUDIO_DEVICE_IN_BUILTIN_MIC});
break;
case AUDIO_SOURCE_VOICE_COMMUNICATION:
@@ -528,26 +472,30 @@
availableDevices = availablePrimaryDevices;
}
- switch (getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION)) {
- case AUDIO_POLICY_FORCE_BT_SCO:
+ if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
// if SCO device is requested but no SCO device is available, fall back to default case
device = availableDevices.getDevice(
AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
if (device != nullptr) {
break;
}
- FALLTHROUGH_INTENDED;
-
+ }
+ switch (commDeviceType) {
+ case AUDIO_DEVICE_OUT_BLE_HEADSET:
+ device = availableDevices.getDevice(
+ AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+ break;
+ case AUDIO_DEVICE_OUT_SPEAKER:
+ device = availableDevices.getFirstExistingDevice({
+ AUDIO_DEVICE_IN_BACK_MIC, AUDIO_DEVICE_IN_BUILTIN_MIC,
+ AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_USB_HEADSET});
+ break;
default: // FORCE_NONE
device = availableDevices.getFirstExistingDevice({
AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BUILTIN_MIC});
break;
- case AUDIO_POLICY_FORCE_SPEAKER:
- device = availableDevices.getFirstExistingDevice({
- AUDIO_DEVICE_IN_BACK_MIC, AUDIO_DEVICE_IN_BUILTIN_MIC});
- break;
}
break;
@@ -560,14 +508,15 @@
LOG_ALWAYS_FATAL_IF(availablePrimaryDevices.isEmpty(), "Primary devices not found");
availableDevices = availablePrimaryDevices;
}
- if (getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) == AUDIO_POLICY_FORCE_BT_SCO) {
+ if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
device = availableDevices.getDevice(
AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
if (device != nullptr) break;
}
device = availableDevices.getFirstExistingDevice({
- AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
- AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BUILTIN_MIC});
+ AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+ AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
+ AUDIO_DEVICE_IN_BUILTIN_MIC});
break;
case AUDIO_SOURCE_CAMCORDER:
// For a device without built-in mic, adding usb device
@@ -609,6 +558,7 @@
ALOGE_IF(device == nullptr,
"getDeviceForInputSource() no default device defined");
}
+
ALOGV_IF(device != nullptr,
"getDeviceForInputSource()input source %d, device %08x",
inputSource, device->type());
@@ -626,31 +576,66 @@
}
}
-DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t strategy) const {
- DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+product_strategy_t Engine::getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const {
+ for (const auto& strategyMap : mLegacyStrategyMap) {
+ if (strategyMap.second == legacyStrategy) {
+ return strategyMap.first;
+ }
+ }
+ return PRODUCT_STRATEGY_NONE;
+}
- // check if this strategy has a preferred device that is available,
- // if yes, give priority to it
- AudioDeviceTypeAddr preferredStrategyDevice;
- const status_t status = getPreferredDeviceForStrategy(strategy, preferredStrategyDevice);
+audio_devices_t Engine::getPreferredDeviceTypeForLegacyStrategy(
+ const DeviceVector& availableOutputDevices, legacy_strategy legacyStrategy) const {
+ product_strategy_t strategy = getProductStrategyFromLegacy(legacyStrategy);
+ DeviceVector devices = getPreferredAvailableDevicesForProductStrategy(
+ availableOutputDevices, strategy);
+ if (devices.size() > 0) {
+ return devices[0]->type();
+ }
+ return AUDIO_DEVICE_NONE;
+}
+
+DeviceVector Engine::getPreferredAvailableDevicesForProductStrategy(
+ const DeviceVector& availableOutputDevices, product_strategy_t strategy) const {
+ DeviceVector preferredAvailableDevVec = {};
+ AudioDeviceTypeAddrVector preferredStrategyDevices;
+ const status_t status = getDevicesForRoleAndStrategy(
+ strategy, DEVICE_ROLE_PREFERRED, preferredStrategyDevices);
if (status == NO_ERROR) {
// there is a preferred device, is it available?
- sp<DeviceDescriptor> preferredAvailableDevDescr = availableOutputDevices.getDevice(
- preferredStrategyDevice.mType,
- String8(preferredStrategyDevice.mAddress.c_str()),
- AUDIO_FORMAT_DEFAULT);
- if (preferredAvailableDevDescr != nullptr) {
- ALOGVV("%s using pref device 0x%08x/%s for strategy %u",
- __func__, preferredStrategyDevice.mType,
- preferredStrategyDevice.mAddress.c_str(), strategy);
- return DeviceVector(preferredAvailableDevDescr);
+ preferredAvailableDevVec =
+ availableOutputDevices.getDevicesFromDeviceTypeAddrVec(preferredStrategyDevices);
+ if (preferredAvailableDevVec.size() == preferredAvailableDevVec.size()) {
+ ALOGVV("%s using pref device %s for strategy %u",
+ __func__, preferredAvailableDevVec.toString().c_str(), strategy);
+ return preferredAvailableDevVec;
}
}
+ return preferredAvailableDevVec;
+}
+
+DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t strategy) const {
+ DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+ auto legacyStrategy = mLegacyStrategyMap.find(strategy) != end(mLegacyStrategyMap) ?
+ mLegacyStrategyMap.at(strategy) : STRATEGY_NONE;
+
+ // When not in call, STRATEGY_PHONE and STRATEGY_DTMF follow STRATEGY_MEDIA
+ if (!isInCall() && (legacyStrategy == STRATEGY_PHONE || legacyStrategy == STRATEGY_DTMF)) {
+ legacyStrategy = STRATEGY_MEDIA;
+ strategy = getProductStrategyFromLegacy(STRATEGY_MEDIA);
+ }
+ // check if this strategy has a preferred device that is available,
+ // if yes, give priority to it.
+ DeviceVector preferredAvailableDevVec =
+ getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, strategy);
+ if (!preferredAvailableDevVec.isEmpty()) {
+ return preferredAvailableDevVec;
+ }
DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
- auto legacyStrategy = mLegacyStrategyMap.find(strategy) != end(mLegacyStrategyMap) ?
- mLegacyStrategyMap.at(strategy) : STRATEGY_NONE;
+
return getDevicesForStrategyInt(legacyStrategy,
availableOutputDevices,
availableInputDevices, outputs);
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index bb9e2df..6214fe7 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -83,6 +83,12 @@
sp<DeviceDescriptor> getDeviceForInputSource(audio_source_t inputSource) const;
+ product_strategy_t getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const;
+ audio_devices_t getPreferredDeviceTypeForLegacyStrategy(
+ const DeviceVector& availableOutputDevices, legacy_strategy legacyStrategy) const;
+ DeviceVector getPreferredAvailableDevicesForProductStrategy(
+ const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+
DeviceStrategyMap mDevicesForStrategies;
std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 2a9a4c4..69f9a69 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -29,29 +29,26 @@
#define ALOGVV(a...) do { } while(0)
#endif
-#define AUDIO_POLICY_XML_CONFIG_FILE_PATH_MAX_LENGTH 128
-#define AUDIO_POLICY_XML_CONFIG_FILE_NAME "audio_policy_configuration.xml"
-#define AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME \
- "audio_policy_configuration_a2dp_offload_disabled.xml"
-#define AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME \
- "audio_policy_configuration_bluetooth_legacy_hal.xml"
-
#include <algorithm>
#include <inttypes.h>
#include <math.h>
#include <set>
#include <unordered_set>
#include <vector>
+
+#include <Serializer.h>
+#include <cutils/bitops.h>
#include <cutils/properties.h>
-#include <utils/Log.h>
#include <media/AudioParameter.h>
+#include <policy.h>
#include <private/android_filesystem_config.h>
#include <system/audio.h>
#include <system/audio_config.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
+#include <utils/Log.h>
+
#include "AudioPolicyManager.h"
-#include <Serializer.h>
#include "TypeConverter.h"
-#include <policy.h>
namespace android {
@@ -209,6 +206,9 @@
// Reset active device codec
device->setEncodedFormat(AUDIO_FORMAT_DEFAULT);
+ // remove device from mReportedFormatsMap cache
+ mReportedFormatsMap.erase(device);
+
} break;
default:
@@ -337,6 +337,9 @@
mAvailableInputDevices.remove(device);
checkInputsForDevice(device, state);
+
+ // remove device from mReportedFormatsMap cache
+ mReportedFormatsMap.erase(device);
} break;
default:
@@ -461,7 +464,16 @@
}
}
}
-
+ auto musicStrategy = streamToStrategy(AUDIO_STREAM_MUSIC);
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+ // mute media strategies and delay device switch by the largest
+ // This avoid sending the music tail into the earpiece or headset.
+ setStrategyMute(musicStrategy, true, desc);
+ setStrategyMute(musicStrategy, false, desc, MUTE_TIME_MS,
+ mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
+ nullptr, true /*fromCache*/).types());
+ }
// Toggle the device state: UNAVAILABLE -> AVAILABLE
// This will force reading again the device configuration
status = setDeviceConnectionState(device,
@@ -780,16 +792,7 @@
}
updateCallAndOutputRouting(forceVolumeReeval, delayMs);
-
- for (const auto& activeDesc : mInputs.getActiveInputs()) {
- auto newDevice = getNewInputDevice(activeDesc);
- // Force new input selection if the new device can not be reached via current input
- if (activeDesc->mProfile->getSupportedDevices().contains(newDevice)) {
- setInputDevice(activeDesc->mIoHandle, newDevice);
- } else {
- closeInput(activeDesc->mIoHandle);
- }
- }
+ updateInputRouting();
}
void AudioPolicyManager::setSystemProperty(const char* property, const char* value)
@@ -893,7 +896,8 @@
// Only honor audibility enforced when required. The client will be
// forced to reconnect if the forced usage changes.
if (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
- dstAttr->flags &= ~AUDIO_FLAG_AUDIBILITY_ENFORCED;
+ dstAttr->flags = static_cast<audio_flags_mask_t>(
+ dstAttr->flags & ~AUDIO_FLAG_AUDIBILITY_ENFORCED);
}
return NO_ERROR;
@@ -925,7 +929,7 @@
return status;
}
if (auto it = mAllowedCapturePolicies.find(uid); it != end(mAllowedCapturePolicies)) {
- resultAttr->flags |= it->second;
+ resultAttr->flags = static_cast<audio_flags_mask_t>(resultAttr->flags | it->second);
}
*stream = mEngine->getStreamTypeForAttributes(*resultAttr);
@@ -1102,14 +1106,15 @@
};
*portId = PolicyAudioPort::getNextUniqueId();
+ sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(*output);
sp<TrackClientDescriptor> clientDesc =
new TrackClientDescriptor(*portId, uid, session, resultAttr, clientConfig,
sanitizedRequestedPortId, *stream,
mEngine->getProductStrategyForAttributes(resultAttr),
toVolumeSource(resultAttr),
*flags, isRequestedDeviceForExclusiveUse,
- std::move(weakSecondaryOutputDescs));
- sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(*output);
+ std::move(weakSecondaryOutputDescs),
+ outputDesc->mPolicyMix);
outputDesc->addClient(clientDesc);
ALOGV("%s() returns output %d requestedPortId %d selectedDeviceId %d for port ID %d", __func__,
@@ -1242,7 +1247,8 @@
// Discard haptic channel mask when forcing muting haptic channels.
audio_channel_mask_t channelMask = forceMutingHaptic
- ? (config->channel_mask & ~AUDIO_CHANNEL_HAPTIC_ALL) : config->channel_mask;
+ ? static_cast<audio_channel_mask_t>(config->channel_mask & ~AUDIO_CHANNEL_HAPTIC_ALL)
+ : config->channel_mask;
// open a direct output if required by specified parameters
//force direct flag if offload flag is set: offloading implies a direct output stream
@@ -1298,7 +1304,8 @@
// at this stage we should ignore the DIRECT flag as no direct output could be found earlier
*flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
- output = selectOutput(outputs, *flags, config->format, channelMask, config->sample_rate);
+ output = selectOutput(
+ outputs, *flags, config->format, channelMask, config->sample_rate, session);
}
ALOGW_IF((output == 0), "getOutputForDevices() could not find output for stream %d, "
"sampling rate %d, format %#x, channels %#x, flags %#x",
@@ -1471,14 +1478,26 @@
}
audio_io_handle_t AudioPolicyManager::selectOutput(const SortedVector<audio_io_handle_t>& outputs,
- audio_output_flags_t flags,
- audio_format_t format,
- audio_channel_mask_t channelMask,
- uint32_t samplingRate)
+ audio_output_flags_t flags,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ uint32_t samplingRate,
+ audio_session_t sessionId)
{
LOG_ALWAYS_FATAL_IF(!(format == AUDIO_FORMAT_INVALID || audio_is_linear_pcm(format)),
"%s called with format %#x", __func__, format);
+ // Return the output that haptic-generating attached to when 1) session id is specified,
+ // 2) haptic-generating effect exists for given session id and 3) the output that
+ // haptic-generating effect attached to is in given outputs.
+ if (sessionId != AUDIO_SESSION_NONE) {
+ audio_io_handle_t hapticGeneratingOutput = mEffects.getIoForSession(
+ sessionId, FX_IID_HAPTICGENERATOR);
+ if (outputs.indexOf(hapticGeneratingOutput) >= 0) {
+ return hapticGeneratingOutput;
+ }
+ }
+
// Flags disqualifying an output: the match must happen before calling selectOutput()
static const audio_output_flags_t kExcludedFlags = (audio_output_flags_t)
(AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
@@ -1764,7 +1783,8 @@
checkAndSetVolume(curves, client->volumeSource(),
curves.getVolumeIndex(outputDesc->devices().types()),
outputDesc,
- outputDesc->devices().types());
+ outputDesc->devices().types(), 0 /*delay*/,
+ outputDesc->useHwGain() /*force*/);
// update the outputs if starting an output with a stream that can affect notification
// routing
@@ -1931,6 +1951,12 @@
ALOGV("releaseOutput() %d", outputDesc->mIoHandle);
+ sp<TrackClientDescriptor> client = outputDesc->getClient(portId);
+ if (outputDesc->isClientActive(client)) {
+ ALOGW("releaseOutput() inactivates portId %d in good faith", portId);
+ stopOutput(portId);
+ }
+
if (outputDesc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) {
if (outputDesc->mDirectOpenCount <= 0) {
ALOGW("releaseOutput() invalid open count %d for output %d",
@@ -1942,9 +1968,7 @@
mpClientInterface->onAudioPortListUpdate();
}
}
- // stopOutput() needs to be successfully called before releaseOutput()
- // otherwise there may be inaccurate stream reference counts.
- // This is checked in outputDesc->removeClient below.
+
outputDesc->removeClient(portId);
}
@@ -2255,7 +2279,7 @@
sp<AudioInputDescriptor> inputDesc = mInputs.getInputForClient(portId);
if (inputDesc == 0) {
ALOGW("%s no input for client %d", __FUNCTION__, portId);
- return BAD_VALUE;
+ return DEAD_OBJECT;
}
audio_io_handle_t input = inputDesc->mIoHandle;
sp<RecordClientDescriptor> client = inputDesc->getClient(portId);
@@ -2877,7 +2901,7 @@
{
ALOGV("registerPolicyMixes() %zu mix(es)", mixes.size());
status_t res = NO_ERROR;
-
+ bool checkOutputs = false;
sp<HwModule> rSubmixModule;
// examine each mix's route type
for (size_t i = 0; i < mixes.size(); i++) {
@@ -2996,11 +3020,16 @@
i, type, address.string());
res = INVALID_OPERATION;
break;
+ } else {
+ checkOutputs = true;
}
}
}
if (res != NO_ERROR) {
unregisterPolicyMixes(mixes);
+ } else if (checkOutputs) {
+ checkForDeviceAndOutputChanges();
+ updateCallAndOutputRouting();
}
return res;
}
@@ -3009,6 +3038,7 @@
{
ALOGV("unregisterPolicyMixes() num mixes %zu", mixes.size());
status_t res = NO_ERROR;
+ bool checkOutputs = false;
sp<HwModule> rSubmixModule;
// examine each mix's route type
for (const auto& mix : mixes) {
@@ -3049,9 +3079,15 @@
if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
res = INVALID_OPERATION;
continue;
+ } else {
+ checkOutputs = true;
}
}
}
+ if (res == NO_ERROR && checkOutputs) {
+ checkForDeviceAndOutputChanges();
+ updateCallAndOutputRouting();
+ }
return res;
}
@@ -3070,16 +3106,16 @@
// Returns true if all devices types match the predicate and are supported by one HW module
bool AudioPolicyManager::areAllDevicesSupported(
- const Vector<AudioDeviceTypeAddr>& devices,
+ const AudioDeviceTypeAddrVector& devices,
std::function<bool(audio_devices_t)> predicate,
const char *context) {
for (size_t i = 0; i < devices.size(); i++) {
sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(
- devices[i].mType, devices[i].mAddress.c_str(), String8(),
+ devices[i].mType, devices[i].getAddress(), String8(),
AUDIO_FORMAT_DEFAULT, false /*allowToCreate*/, true /*matchAddress*/);
if (devDesc == nullptr || (predicate != nullptr && !predicate(devices[i].mType))) {
- ALOGE("%s: device type %#x address %s not supported or not an output device",
- context, devices[i].mType, devices[i].mAddress.c_str());
+ ALOGE("%s: device type %#x address %s not supported or not match predicate",
+ context, devices[i].mType, devices[i].getAddress());
return false;
}
}
@@ -3087,7 +3123,7 @@
}
status_t AudioPolicyManager::setUidDeviceAffinities(uid_t uid,
- const Vector<AudioDeviceTypeAddr>& devices) {
+ const AudioDeviceTypeAddrVector& devices) {
ALOGV("%s() uid=%d num devices %zu", __FUNCTION__, uid, devices.size());
if (!areAllDevicesSupported(devices, audio_is_output_device, __func__)) {
return BAD_VALUE;
@@ -3119,25 +3155,35 @@
return res;
}
-status_t AudioPolicyManager::setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device) {
- ALOGV("%s() strategy=%d device=%08x addr=%s", __FUNCTION__,
- strategy, device.mType, device.mAddress.c_str());
- Vector<AudioDeviceTypeAddr> devices;
- devices.add(device);
+status_t AudioPolicyManager::setDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) {
+ ALOGV("%s() strategy=%d role=%d %s", __func__, strategy, role,
+ dumpAudioDeviceTypeAddrVector(devices).c_str());
+
if (!areAllDevicesSupported(devices, audio_is_output_device, __func__)) {
return BAD_VALUE;
}
- status_t status = mEngine->setPreferredDeviceForStrategy(strategy, device);
+ status_t status = mEngine->setDevicesRoleForStrategy(strategy, role, devices);
if (status != NO_ERROR) {
- ALOGW("Engine could not set preferred device %08x %s for strategy %d",
- device.mType, device.mAddress.c_str(), strategy);
+ ALOGW("Engine could not set preferred devices %s for strategy %d role %d",
+ dumpAudioDeviceTypeAddrVector(devices).c_str(), strategy, role);
return status;
}
checkForDeviceAndOutputChanges();
- updateCallAndOutputRouting();
+
+ bool forceVolumeReeval = false;
+ // FIXME: workaround for truncated touch sounds
+ // to be removed when the problem is handled by system UI
+ uint32_t delayMs = 0;
+ if (strategy == mCommunnicationStrategy) {
+ forceVolumeReeval = true;
+ delayMs = TOUCH_SOUND_FIXED_DELAY_MS;
+ updateInputRouting();
+ }
+ updateCallAndOutputRouting(forceVolumeReeval, delayMs);
return NO_ERROR;
}
@@ -3148,6 +3194,8 @@
if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, true /*fromCache*/);
waitMs = updateCallRouting(newDevices, delayMs);
+ // Only apply special touch sound delay once
+ delayMs = 0;
}
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
@@ -3157,6 +3205,8 @@
// preventing the force re-routing in case of default dev that distinguishes on address.
// Let's give back to engine full device choice decision however.
waitMs = setOutputDevices(outputDesc, newDevices, !newDevices.isEmpty(), delayMs);
+ // Only apply special touch sound delay once
+ delayMs = 0;
}
if (forceVolumeReeval && !newDevices.isEmpty()) {
applyStreamVolumes(outputDesc, newDevices.types(), waitMs, true);
@@ -3164,30 +3214,124 @@
}
}
-status_t AudioPolicyManager::removePreferredDeviceForStrategy(product_strategy_t strategy)
-{
- ALOGI("%s() strategy=%d", __FUNCTION__, strategy);
+void AudioPolicyManager::updateInputRouting() {
+ for (const auto& activeDesc : mInputs.getActiveInputs()) {
+ auto newDevice = getNewInputDevice(activeDesc);
+ // Force new input selection if the new device can not be reached via current input
+ if (activeDesc->mProfile->getSupportedDevices().contains(newDevice)) {
+ setInputDevice(activeDesc->mIoHandle, newDevice);
+ } else {
+ closeInput(activeDesc->mIoHandle);
+ }
+ }
+}
- status_t status = mEngine->removePreferredDeviceForStrategy(strategy);
+status_t AudioPolicyManager::removeDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role)
+{
+ ALOGI("%s() strategy=%d role=%d", __func__, strategy, role);
+
+ status_t status = mEngine->removeDevicesRoleForStrategy(strategy, role);
if (status != NO_ERROR) {
- ALOGW("Engine could not remove preferred device for strategy %d", strategy);
+ ALOGV("Engine could not remove preferred device for strategy %d status %d",
+ strategy, status);
return status;
}
checkForDeviceAndOutputChanges();
- updateCallAndOutputRouting();
+
+ bool forceVolumeReeval = false;
+ // FIXME: workaround for truncated touch sounds
+ // to be removed when the problem is handled by system UI
+ uint32_t delayMs = 0;
+ if (strategy == mCommunnicationStrategy) {
+ forceVolumeReeval = true;
+ delayMs = TOUCH_SOUND_FIXED_DELAY_MS;
+ updateInputRouting();
+ }
+ updateCallAndOutputRouting(forceVolumeReeval, delayMs);
return NO_ERROR;
}
-status_t AudioPolicyManager::getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device) {
- return mEngine->getPreferredDeviceForStrategy(strategy, device);
+status_t AudioPolicyManager::getDevicesForRoleAndStrategy(product_strategy_t strategy,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices) {
+ return mEngine->getDevicesForRoleAndStrategy(strategy, role, devices);
+}
+
+status_t AudioPolicyManager::setDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector &devices) {
+ ALOGV("%s() audioSource=%d role=%d %s", __func__, audioSource, role,
+ dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+ if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+ return BAD_VALUE;
+ }
+ status_t status = mEngine->setDevicesRoleForCapturePreset(audioSource, role, devices);
+ ALOGW_IF(status != NO_ERROR,
+ "Engine could not set preferred devices %s for audio source %d role %d",
+ dumpAudioDeviceTypeAddrVector(devices).c_str(), audioSource, role);
+
+ return status;
+}
+
+status_t AudioPolicyManager::addDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector &devices) {
+ ALOGV("%s() audioSource=%d role=%d %s", __func__, audioSource, role,
+ dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+ if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+ return BAD_VALUE;
+ }
+ status_t status = mEngine->addDevicesRoleForCapturePreset(audioSource, role, devices);
+ ALOGW_IF(status != NO_ERROR,
+ "Engine could not add preferred devices %s for audio source %d role %d",
+ dumpAudioDeviceTypeAddrVector(devices).c_str(), audioSource, role);
+
+ updateInputRouting();
+ return status;
+}
+
+status_t AudioPolicyManager::removeDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices)
+{
+ ALOGV("%s() audioSource=%d role=%d devices=%s", __func__, audioSource, role,
+ dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+ if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+ return BAD_VALUE;
+ }
+
+ status_t status = mEngine->removeDevicesRoleForCapturePreset(
+ audioSource, role, devices);
+ ALOGW_IF(status != NO_ERROR,
+ "Engine could not remove devices role (%d) for capture preset %d", role, audioSource);
+
+ updateInputRouting();
+ return status;
+}
+
+status_t AudioPolicyManager::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role) {
+ ALOGV("%s() audioSource=%d role=%d", __func__, audioSource, role);
+
+ status_t status = mEngine->clearDevicesRoleForCapturePreset(audioSource, role);
+ ALOGW_IF(status != NO_ERROR,
+ "Engine could not clear devices role (%d) for capture preset %d", role, audioSource);
+
+ updateInputRouting();
+ return status;
+}
+
+status_t AudioPolicyManager::getDevicesForRoleAndCapturePreset(
+ audio_source_t audioSource, device_role_t role, AudioDeviceTypeAddrVector &devices) {
+ return mEngine->getDevicesForRoleAndCapturePreset(audioSource, role, devices);
}
status_t AudioPolicyManager::setUserIdDeviceAffinities(int userId,
- const Vector<AudioDeviceTypeAddr>& devices) {
- ALOGI("%s() userId=%d num devices %zu", __FUNCTION__, userId, devices.size());\
+ const AudioDeviceTypeAddrVector& devices) {
+ ALOGI("%s() userId=%d num devices %zu", __func__, userId, devices.size());
if (!areAllDevicesSupported(devices, audio_is_output_device, __func__)) {
return BAD_VALUE;
}
@@ -3246,7 +3390,9 @@
}
dst->appendFormat(" TTS output %savailable\n", mTtsOutputAvailable ? "" : "not ");
dst->appendFormat(" Master mono: %s\n", mMasterMono ? "on" : "off");
+ dst->appendFormat(" Communnication Strategy: %d\n", mCommunnicationStrategy);
dst->appendFormat(" Config source: %s\n", mConfig.getSource().c_str()); // getConfig not const
+
mAvailableOutputDevices.dump(dst, String8("Available output"));
mAvailableInputDevices.dump(dst, String8("Available input"));
mHwModulesAll.dump(dst);
@@ -3283,38 +3429,38 @@
// This function checks for the parameters which can be offloaded.
// This can be enhanced depending on the capability of the DSP and policy
// of the system.
-bool AudioPolicyManager::isOffloadSupported(const audio_offload_info_t& offloadInfo)
+audio_offload_mode_t AudioPolicyManager::getOffloadSupport(const audio_offload_info_t& offloadInfo)
{
- ALOGV("isOffloadSupported: SR=%u, CM=0x%x, Format=0x%x, StreamType=%d,"
+ ALOGV("%s: SR=%u, CM=0x%x, Format=0x%x, StreamType=%d,"
" BitRate=%u, duration=%" PRId64 " us, has_video=%d",
- offloadInfo.sample_rate, offloadInfo.channel_mask,
+ __func__, offloadInfo.sample_rate, offloadInfo.channel_mask,
offloadInfo.format,
offloadInfo.stream_type, offloadInfo.bit_rate, offloadInfo.duration_us,
offloadInfo.has_video);
if (mMasterMono) {
- return false; // no offloading if mono is set.
+ return AUDIO_OFFLOAD_NOT_SUPPORTED; // no offloading if mono is set.
}
// Check if offload has been disabled
if (property_get_bool("audio.offload.disable", false /* default_value */)) {
- ALOGV("offload disabled by audio.offload.disable");
- return false;
+ ALOGV("%s: offload disabled by audio.offload.disable", __func__);
+ return AUDIO_OFFLOAD_NOT_SUPPORTED;
}
// Check if stream type is music, then only allow offload as of now.
if (offloadInfo.stream_type != AUDIO_STREAM_MUSIC)
{
- ALOGV("isOffloadSupported: stream_type != MUSIC, returning false");
- return false;
+ ALOGV("%s: stream_type != MUSIC, returning false", __func__);
+ return AUDIO_OFFLOAD_NOT_SUPPORTED;
}
//TODO: enable audio offloading with video when ready
const bool allowOffloadWithVideo =
property_get_bool("audio.offload.video", false /* default_value */);
if (offloadInfo.has_video && !allowOffloadWithVideo) {
- ALOGV("isOffloadSupported: has_video == true, returning false");
- return false;
+ ALOGV("%s: has_video == true, returning false", __func__);
+ return AUDIO_OFFLOAD_NOT_SUPPORTED;
}
//If duration is less than minimum value defined in property, return false
@@ -3322,13 +3468,14 @@
"audio.offload.min.duration.secs", -1 /* default_value */);
if (min_duration_secs >= 0) {
if (offloadInfo.duration_us < min_duration_secs * 1000000LL) {
- ALOGV("Offload denied by duration < audio.offload.min.duration.secs(=%d)",
- min_duration_secs);
- return false;
+ ALOGV("%s: Offload denied by duration < audio.offload.min.duration.secs(=%d)",
+ __func__, min_duration_secs);
+ return AUDIO_OFFLOAD_NOT_SUPPORTED;
}
} else if (offloadInfo.duration_us < OFFLOAD_DEFAULT_MIN_DURATION_SECS * 1000000) {
- ALOGV("Offload denied by duration < default min(=%u)", OFFLOAD_DEFAULT_MIN_DURATION_SECS);
- return false;
+ ALOGV("%s: Offload denied by duration < default min(=%u)",
+ __func__, OFFLOAD_DEFAULT_MIN_DURATION_SECS);
+ return AUDIO_OFFLOAD_NOT_SUPPORTED;
}
// Do not allow offloading if one non offloadable effect is enabled. This prevents from
@@ -3338,7 +3485,7 @@
// This may prevent offloading in rare situations where effects are left active by apps
// in the background.
if (mEffects.isNonOffloadableEffectEnabled()) {
- return false;
+ return AUDIO_OFFLOAD_NOT_SUPPORTED;
}
// See if there is a profile to support this.
@@ -3349,8 +3496,14 @@
offloadInfo.channel_mask,
AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD,
true /* directOnly */);
- ALOGV("isOffloadSupported() profile %sfound", profile != 0 ? "" : "NOT ");
- return (profile != 0);
+ ALOGV("%s: profile %sfound", __func__, profile != 0 ? "" : "NOT ");
+ if (profile == nullptr) {
+ return AUDIO_OFFLOAD_NOT_SUPPORTED;
+ }
+ if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD) != 0) {
+ return AUDIO_OFFLOAD_GAPLESS_SUPPORTED;
+ }
+ return AUDIO_OFFLOAD_SUPPORTED;
}
bool AudioPolicyManager::isDirectOutputSupported(const audio_config_base_t& config,
@@ -3374,15 +3527,15 @@
status_t AudioPolicyManager::listAudioPorts(audio_port_role_t role,
audio_port_type_t type,
unsigned int *num_ports,
- struct audio_port *ports,
+ struct audio_port_v7 *ports,
unsigned int *generation)
{
- if (num_ports == NULL || (*num_ports != 0 && ports == NULL) ||
- generation == NULL) {
+ if (num_ports == nullptr || (*num_ports != 0 && ports == nullptr) ||
+ generation == nullptr) {
return BAD_VALUE;
}
ALOGV("listAudioPorts() role %d type %d num_ports %d ports %p", role, type, *num_ports, ports);
- if (ports == NULL) {
+ if (ports == nullptr) {
*num_ports = 0;
}
@@ -3440,7 +3593,7 @@
return NO_ERROR;
}
-status_t AudioPolicyManager::getAudioPort(struct audio_port *port)
+status_t AudioPolicyManager::getAudioPort(struct audio_port_v7 *port)
{
if (port == nullptr || port->id == AUDIO_PORT_HANDLE_NONE) {
return BAD_VALUE;
@@ -3820,7 +3973,11 @@
ALOGE("%s output not found for id %d", __func__, patch->sources[0].id);
return BAD_VALUE;
}
- // Reset handle so that setOutputDevice will force new AF patch to reach the sink
+ if (patchDesc->getHandle() != outputDesc->getPatchHandle()) {
+ // force SwOutput patch removal as AF counter part patch has already gone.
+ ALOGV("%s reset patch handle on Output as different from SWBridge", __func__);
+ removeAudioPatch(outputDesc->getPatchHandle());
+ }
outputDesc->setPatchHandle(AUDIO_PATCH_HANDLE_NONE);
setOutputDevices(outputDesc,
getNewOutputDevices(outputDesc, true /*fromCache*/),
@@ -4218,14 +4375,28 @@
// checkOutputsForDevice().
for (size_t i = 0; i < mAvailableOutputDevices.size(); i++) {
sp<DeviceDescriptor> device = mAvailableOutputDevices[i];
- FormatVector supportedFormats =
- device->getAudioPort()->getAudioProfiles().getSupportedFormats();
- for (size_t j = 0; j < supportedFormats.size(); j++) {
- if (mConfig.getSurroundFormats().count(supportedFormats[j]) != 0) {
- formats.insert(supportedFormats[j]);
+ audio_devices_t deviceType = device->type();
+ // Enabling/disabling formats are applied to only HDMI devices. So, this function
+ // returns formats reported by HDMI devices.
+ if (deviceType != AUDIO_DEVICE_OUT_HDMI) {
+ continue;
+ }
+ // Formats reported by sink devices
+ std::unordered_set<audio_format_t> formatset;
+ if (auto it = mReportedFormatsMap.find(device); it != mReportedFormatsMap.end()) {
+ formatset.insert(it->second.begin(), it->second.end());
+ }
+
+ // Formats hard-coded in the in policy configuration file (if any).
+ FormatVector encodedFormats = device->encodedFormats();
+ formatset.insert(encodedFormats.begin(), encodedFormats.end());
+ // Filter the formats which are supported by the vendor hardware.
+ for (auto it = formatset.begin(); it != formatset.end(); ++it) {
+ if (mConfig.getSurroundFormats().count(*it) != 0) {
+ formats.insert(*it);
} else {
for (const auto& pair : mConfig.getSurroundFormats()) {
- if (pair.second.count(supportedFormats[j]) != 0) {
+ if (pair.second.count(*it) != 0) {
formats.insert(pair.first);
break;
}
@@ -4424,37 +4595,15 @@
}
static status_t deserializeAudioPolicyXmlConfig(AudioPolicyConfig &config) {
- char audioPolicyXmlConfigFile[AUDIO_POLICY_XML_CONFIG_FILE_PATH_MAX_LENGTH];
- std::vector<const char*> fileNames;
- status_t ret;
-
- if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false)) {
- if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.disabled", false) &&
- property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
- // Both BluetoothAudio@2.0 and BluetoothA2dp@1.0 (Offlaod) are disabled, and uses
- // the legacy hardware module for A2DP and hearing aid.
- fileNames.push_back(AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME);
- } else if (property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
- // A2DP offload supported but disabled: try to use special XML file
- fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+ if (std::string audioPolicyXmlConfigFile = audio_get_audio_policy_config_file();
+ !audioPolicyXmlConfigFile.empty()) {
+ status_t ret = deserializeAudioPolicyFile(audioPolicyXmlConfigFile.c_str(), &config);
+ if (ret == NO_ERROR) {
+ config.setSource(audioPolicyXmlConfigFile);
}
- } else if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.disabled", false)) {
- fileNames.push_back(AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME);
+ return ret;
}
- fileNames.push_back(AUDIO_POLICY_XML_CONFIG_FILE_NAME);
-
- for (const char* fileName : fileNames) {
- for (const auto& path : audio_get_configuration_paths()) {
- snprintf(audioPolicyXmlConfigFile, sizeof(audioPolicyXmlConfigFile),
- "%s/%s", path.c_str(), fileName);
- ret = deserializeAudioPolicyFile(audioPolicyXmlConfigFile, &config);
- if (ret == NO_ERROR) {
- config.setSource(audioPolicyXmlConfigFile);
- return ret;
- }
- }
- }
- return ret;
+ return BAD_VALUE;
}
AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface,
@@ -4538,6 +4687,9 @@
// Silence ALOGV statements
property_set("log.tag." LOG_TAG, "D");
+ mCommunnicationStrategy = mEngine->getProductStrategyForAttributes(
+ mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL));
+
updateDevicesAndOutputs();
return status;
}
@@ -4745,7 +4897,15 @@
}
if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
- // first list already open outputs that can be routed to this device
+ // first call getAudioPort to get the supported attributes from the HAL
+ struct audio_port_v7 port = {};
+ device->toAudioPort(&port);
+ status_t status = mpClientInterface->getAudioPort(&port);
+ if (status == NO_ERROR) {
+ device->importAudioPort(port);
+ }
+
+ // then list already open outputs that can be routed to this device
for (size_t i = 0; i < mOutputs.size(); i++) {
desc = mOutputs.valueAt(i);
if (!desc->isDuplicated() && desc->supportsDevice(device)
@@ -4807,8 +4967,8 @@
deviceType, address.string(), profile.get(), profile->getName().c_str());
desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- status_t status = desc->open(nullptr, DeviceVector(device),
- AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
+ status = desc->open(nullptr, DeviceVector(device),
+ AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
if (status == NO_ERROR) {
// Here is where the out_set_parameters() for card & device gets called
@@ -4832,9 +4992,8 @@
config.offload_info.channel_mask = config.channel_mask;
config.offload_info.format = config.format;
- status_t status = desc->open(&config, DeviceVector(device),
- AUDIO_STREAM_DEFAULT,
- AUDIO_OUTPUT_FLAG_NONE, &output);
+ status = desc->open(&config, DeviceVector(device),
+ AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
if (status != NO_ERROR) {
output = AUDIO_IO_HANDLE_NONE;
}
@@ -4864,8 +5023,8 @@
// open a duplicating output thread for the new output and the primary output
sp<SwAudioOutputDescriptor> dupOutputDesc =
new SwAudioOutputDescriptor(NULL, mpClientInterface);
- status_t status = dupOutputDesc->openDuplicating(mPrimaryOutput, desc,
- &duplicatedOutput);
+ status = dupOutputDesc->openDuplicating(mPrimaryOutput, desc,
+ &duplicatedOutput);
if (status == NO_ERROR) {
// add duplicated output descriptor
addOutput(duplicatedOutput, dupOutputDesc);
@@ -5222,32 +5381,38 @@
SortedVector<audio_io_handle_t> srcOutputs = getOutputsForDevices(oldDevices, mPreviousOutputs);
SortedVector<audio_io_handle_t> dstOutputs = getOutputsForDevices(newDevices, mOutputs);
- // also take into account external policy-related changes: add all outputs which are
- // associated with policies in the "before" and "after" output vectors
- ALOGVV("%s(): policy related outputs", __func__);
- bool hasDynamicPolicy = false;
- for (size_t i = 0 ; i < mPreviousOutputs.size() ; i++) {
- const sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueAt(i);
- if (desc != 0 && desc->mPolicyMix != NULL) {
- srcOutputs.add(desc->mIoHandle);
- hasDynamicPolicy = true;
- ALOGVV(" previous outputs: adding %d", desc->mIoHandle);
+ uint32_t maxLatency = 0;
+ bool invalidate = false;
+ // take into account dynamic audio policies related changes: if a client is now associated
+ // to a different policy mix than at creation time, invalidate corresponding stream
+ for (size_t i = 0; i < mPreviousOutputs.size() && !invalidate; i++) {
+ const sp<SwAudioOutputDescriptor>& desc = mPreviousOutputs.valueAt(i);
+ if (desc->isDuplicated()) {
+ continue;
}
- }
- for (size_t i = 0 ; i < mOutputs.size() ; i++) {
- const sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
- if (desc != 0 && desc->mPolicyMix != NULL) {
- dstOutputs.add(desc->mIoHandle);
- hasDynamicPolicy = true;
- ALOGVV(" new outputs: adding %d", desc->mIoHandle);
+ for (const sp<TrackClientDescriptor>& client : desc->getClientIterable()) {
+ if (mEngine->getProductStrategyForAttributes(client->attributes()) != psId) {
+ continue;
+ }
+ sp<AudioPolicyMix> primaryMix;
+ status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->uid(),
+ client->flags(), primaryMix, nullptr);
+ if (status != OK) {
+ continue;
+ }
+ if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
+ invalidate = true;
+ if (desc->isStrategyActive(psId)) {
+ maxLatency = desc->latency();
+ }
+ break;
+ }
}
}
- if (srcOutputs != dstOutputs) {
+ if (srcOutputs != dstOutputs || invalidate) {
// get maximum latency of all source outputs to determine the minimum mute time guaranteeing
// audio from invalidated tracks will be rendered when unmuting
- uint32_t maxLatency = 0;
- bool invalidate = hasDynamicPolicy;
for (audio_io_handle_t srcOut : srcOutputs) {
sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
if (desc == nullptr) continue;
@@ -5266,7 +5431,8 @@
client->flags(),
client->config().format,
client->config().channel_mask,
- client->config().sample_rate);
+ client->config().sample_rate,
+ client->session());
if (newOutput != srcOut) {
invalidate = true;
break;
@@ -5359,6 +5525,17 @@
}
}
+bool AudioPolicyManager::isScoRequestedForComm() const {
+ AudioDeviceTypeAddrVector devices;
+ mEngine->getDevicesForRoleAndStrategy(mCommunnicationStrategy, DEVICE_ROLE_PREFERRED, devices);
+ for (const auto &device : devices) {
+ if (audio_is_bluetooth_out_sco_device(device.mType)) {
+ return true;
+ }
+ }
+ return false;
+}
+
void AudioPolicyManager::checkA2dpSuspend()
{
audio_io_handle_t a2dpOutput = mOutputs.getA2dpOutput();
@@ -5370,23 +5547,21 @@
bool isScoConnected =
(mAvailableInputDevices.types().count(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET) != 0 ||
!Intersection(mAvailableOutputDevices.types(), getAudioDeviceOutAllScoSet()).empty());
+ bool isScoRequested = isScoRequestedForComm();
// if suspended, restore A2DP output if:
// ((SCO device is NOT connected) ||
- // ((forced usage communication is NOT SCO) && (forced usage for record is NOT SCO) &&
+ // ((SCO is not requested) &&
// (phone state is NOT in call) && (phone state is NOT ringing)))
//
// if not suspended, suspend A2DP output if:
// (SCO device is connected) &&
- // ((forced usage for communication is SCO) || (forced usage for record is SCO) ||
+ // ((SCO is requested) ||
// ((phone state is in call) || (phone state is ringing)))
//
if (mA2dpSuspended) {
if (!isScoConnected ||
- ((mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) !=
- AUDIO_POLICY_FORCE_BT_SCO) &&
- (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) !=
- AUDIO_POLICY_FORCE_BT_SCO) &&
+ (!isScoRequested &&
(mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) &&
(mEngine->getPhoneState() != AUDIO_MODE_RINGTONE))) {
@@ -5395,10 +5570,7 @@
}
} else {
if (isScoConnected &&
- ((mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) ==
- AUDIO_POLICY_FORCE_BT_SCO) ||
- (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) ==
- AUDIO_POLICY_FORCE_BT_SCO) ||
+ (isScoRequested ||
(mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) ||
(mEngine->getPhoneState() == AUDIO_MODE_RINGTONE))) {
@@ -5423,6 +5595,12 @@
}
}
+ // Do not retrieve engine device for outputs through MSD
+ // TODO: support explicit routing requests by resetting MSD patch to engine device.
+ if (outputDesc->devices() == getMsdAudioOutDevices()) {
+ return outputDesc->devices();
+ }
+
// Honor explicit routing requests only if no client using default routing is active on this
// input: a specific app can not force routing for other apps by setting a preferred device.
bool active; // unused
@@ -5510,8 +5688,8 @@
}
DeviceVector activeDevices;
DeviceVector devices;
- for (audio_stream_type_t curStream = AUDIO_STREAM_MIN; curStream < AUDIO_STREAM_PUBLIC_CNT;
- curStream = (audio_stream_type_t) (curStream + 1)) {
+ for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_PUBLIC_CNT; ++i) {
+ const audio_stream_type_t curStream{static_cast<audio_stream_type_t>(i)};
if (!streamsMatchForvolume(stream, curStream)) {
continue;
}
@@ -5625,7 +5803,7 @@
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
setVolumeSourceMute(ttsVolumeSource, mute/*on*/, desc, 0 /*delay*/, DeviceTypeSet());
const uint32_t latency = desc->latency() * 2;
- if (latency > maxLatency) {
+ if (desc->isActive(latency * 2) && latency > maxLatency) {
maxLatency = latency;
}
}
@@ -5747,15 +5925,6 @@
DeviceVector filteredDevices = outputDesc->filterSupportedDevices(devices);
DeviceVector prevDevices = outputDesc->devices();
- // no need to proceed if new device is not AUDIO_DEVICE_NONE and not supported by current
- // output profile or if new device is not supported AND previous device(s) is(are) still
- // available (otherwise reset device must be done on the output)
- if (!devices.isEmpty() && filteredDevices.isEmpty() &&
- !mAvailableOutputDevices.filter(prevDevices).empty()) {
- ALOGV("%s: unsupported device %s for output", __func__, devices.toString().c_str());
- return 0;
- }
-
ALOGV("setOutputDevices() prevDevice %s", prevDevices.toString().c_str());
if (!filteredDevices.isEmpty()) {
@@ -5770,6 +5939,17 @@
muteWaitMs = 0;
}
+ // no need to proceed if new device is not AUDIO_DEVICE_NONE and not supported by current
+ // output profile or if new device is not supported AND previous device(s) is(are) still
+ // available (otherwise reset device must be done on the output)
+ if (!devices.isEmpty() && filteredDevices.isEmpty() &&
+ !mAvailableOutputDevices.filter(prevDevices).empty()) {
+ ALOGV("%s: unsupported device %s for output", __func__, devices.toString().c_str());
+ // restore previous device after evaluating strategy mute state
+ outputDesc->setDevices(prevDevices);
+ return muteWaitMs;
+ }
+
// Do not change the routing if:
// the requested device is AUDIO_DEVICE_NONE
// OR the requested device is the same as current device
@@ -6006,7 +6186,8 @@
if (!Intersection(deviceTypes,
{AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,
AUDIO_DEVICE_OUT_WIRED_HEADSET, AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
- AUDIO_DEVICE_OUT_USB_HEADSET, AUDIO_DEVICE_OUT_HEARING_AID}).empty() &&
+ AUDIO_DEVICE_OUT_USB_HEADSET, AUDIO_DEVICE_OUT_HEARING_AID,
+ AUDIO_DEVICE_OUT_BLE_HEADSET}).empty() &&
((volumeSource == alarmVolumeSrc ||
volumeSource == ringVolumeSrc) ||
(volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION)) ||
@@ -6101,16 +6282,17 @@
bool isVoiceVolSrc = callVolSrc == volumeSource;
bool isBtScoVolSrc = btScoVolSrc == volumeSource;
- audio_policy_forced_cfg_t forceUseForComm =
- mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION);
+ bool isScoRequested = isScoRequestedForComm();
// do not change in call volume if bluetooth is connected and vice versa
// if sco and call follow same curves, bypass forceUseForComm
if ((callVolSrc != btScoVolSrc) &&
- ((isVoiceVolSrc && forceUseForComm == AUDIO_POLICY_FORCE_BT_SCO) ||
- (isBtScoVolSrc && forceUseForComm != AUDIO_POLICY_FORCE_BT_SCO))) {
- ALOGV("%s cannot set volume group %d volume with force use = %d for comm", __func__,
- volumeSource, forceUseForComm);
- return INVALID_OPERATION;
+ ((isVoiceVolSrc && isScoRequested) ||
+ (isBtScoVolSrc && !isScoRequested))) {
+ ALOGV("%s cannot set volume group %d volume when is%srequested for comm", __func__,
+ volumeSource, isScoRequested ? " " : "n ot ");
+ // Do not return an error here as AudioService will always set both voice call
+ // and bluetooth SCO volumes due to stream aliasing.
+ return NO_ERROR;
}
if (deviceTypes.empty()) {
deviceTypes = outputDesc->devices().types();
@@ -6118,9 +6300,8 @@
float volumeDb = computeVolume(curves, volumeSource, index, deviceTypes);
if (outputDesc->isFixedVolume(deviceTypes) ||
- // Force VoIP volume to max for bluetooth SCO
-
- ((isVoiceVolSrc || isBtScoVolSrc) &&
+ // Force VoIP volume to max for bluetooth SCO device except if muted
+ (index != 0 && (isVoiceVolSrc || isBtScoVolSrc) &&
isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device))) {
volumeDb = 0.0f;
}
@@ -6422,6 +6603,7 @@
return;
}
FormatVector formats = formatsFromString(reply.string());
+ mReportedFormatsMap[devDesc] = formats;
if (device == AUDIO_DEVICE_OUT_HDMI
|| isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD)) {
modifySurroundFormats(devDesc, &formats);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index b588f89..4e745bd 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -225,7 +225,7 @@
status_t dump(int fd) override;
status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) override;
- virtual bool isOffloadSupported(const audio_offload_info_t& offloadInfo);
+ virtual audio_offload_mode_t getOffloadSupport(const audio_offload_info_t& offloadInfo);
virtual bool isDirectOutputSupported(const audio_config_base_t& config,
const audio_attributes_t& attributes);
@@ -233,9 +233,9 @@
virtual status_t listAudioPorts(audio_port_role_t role,
audio_port_type_t type,
unsigned int *num_ports,
- struct audio_port *ports,
+ struct audio_port_v7 *ports,
unsigned int *generation);
- virtual status_t getAudioPort(struct audio_port *port);
+ virtual status_t getAudioPort(struct audio_port_v7 *port);
virtual status_t createAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle,
uid_t uid) {
@@ -263,17 +263,42 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes);
virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
virtual status_t setUidDeviceAffinities(uid_t uid,
- const Vector<AudioDeviceTypeAddr>& devices);
+ const AudioDeviceTypeAddrVector& devices);
virtual status_t removeUidDeviceAffinities(uid_t uid);
virtual status_t setUserIdDeviceAffinities(int userId,
- const Vector<AudioDeviceTypeAddr>& devices);
+ const AudioDeviceTypeAddrVector& devices);
virtual status_t removeUserIdDeviceAffinities(int userId);
- virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device);
- virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy);
- virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device);
+ virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices);
+
+ virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role);
+
+
+ virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices);
+
+ virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices);
+
+ virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices);
+
+ virtual status_t removeDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector& devices);
+
+ virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role);
+
+ virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices);
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
@@ -532,6 +557,11 @@
void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0);
/**
+ * @brief updates routing for all inputs.
+ */
+ void updateInputRouting();
+
+ /**
* @brief checkOutputForAttributes checks and if necessary changes outputs used for the
* given audio attributes.
* must be called every time a condition that affects the output choice for a given
@@ -608,7 +638,8 @@
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
audio_format_t format = AUDIO_FORMAT_INVALID,
audio_channel_mask_t channelMask = AUDIO_CHANNEL_NONE,
- uint32_t samplingRate = 0);
+ uint32_t samplingRate = 0,
+ audio_session_t sessionId = AUDIO_SESSION_NONE);
// samplingRate, format, channelMask are in/out and so may be modified
sp<IOProfile> getInputProfile(const sp<DeviceDescriptor> & device,
uint32_t& samplingRate,
@@ -787,6 +818,13 @@
std::unordered_set<audio_format_t> mManualSurroundFormats;
std::unordered_map<uid_t, audio_flags_mask_t> mAllowedCapturePolicies;
+
+ // The map of device descriptor and formats reported by the device.
+ std::map<wp<DeviceDescriptor>, FormatVector> mReportedFormatsMap;
+
+ // Cached product strategy ID corresponding to legacy strategy STRATEGY_PHONE
+ product_strategy_t mCommunnicationStrategy;
+
private:
void onNewAudioModulesAvailableInt(DeviceVector *newDevices);
@@ -938,10 +976,11 @@
sp<AudioPatch> *patchDescPtr);
bool areAllDevicesSupported(
- const Vector<AudioDeviceTypeAddr>& devices,
+ const AudioDeviceTypeAddrVector& devices,
std::function<bool(audio_devices_t)> predicate,
const char* context);
+ bool isScoRequestedForComm() const;
};
};
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 8a7a1b2..ceddb7e 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -15,6 +15,7 @@
shared_libs: [
"libaudioclient",
+ "libaudioclient_aidl_conversion",
"libaudiofoundation",
"libaudiopolicymanager",
"libaudioutils",
@@ -28,6 +29,9 @@
"libmediautils",
"libsensorprivacy",
"libutils",
+ "audioclient-types-aidl-unstable-cpp",
+ "audioflinger-aidl-unstable-cpp",
+ "audiopolicy-aidl-unstable-cpp",
"capture_state_listener-aidl-cpp",
],
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 9fa7a53..90b93e2 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -50,7 +50,22 @@
ALOGW("%s: could not get AudioFlinger", __func__);
return PERMISSION_DENIED;
}
- return af->openOutput(module, output, config, device, latencyMs, flags);
+
+ media::OpenOutputRequest request;
+ media::OpenOutputResponse response;
+
+ request.module = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_module_handle_t_int32_t(module));
+ request.config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(*config));
+ request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_DeviceDescriptorBase(device));
+ request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+
+ status_t status = af->openOutput(request, &response);
+ if (status == OK) {
+ *output = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(response.output));
+ *config = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioConfig_audio_config_t(response.config));
+ *latencyMs = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(response.latencyMs));
+ }
+ return status;
}
audio_io_handle_t AudioPolicyService::AudioPolicyClient::openDuplicateOutput(
@@ -111,7 +126,22 @@
return PERMISSION_DENIED;
}
- return af->openInput(module, input, config, device, address, source, flags);
+ AudioDeviceTypeAddr deviceTypeAddr(*device, address.c_str());
+
+ media::OpenInputRequest request;
+ request.module = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_module_handle_t_int32_t(module));
+ request.input = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(*input));
+ request.config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(*config));
+ request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(deviceTypeAddr));
+ request.source = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_source_t_AudioSourceType(source));
+ request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
+
+ media::OpenInputResponse response;
+ status_t status = af->openInput(request, &response);
+ if (status == OK) {
+ *input = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_module_handle_t(response.input));
+ }
+ return status;
}
status_t AudioPolicyService::AudioPolicyClient::closeInput(audio_io_handle_t input)
@@ -246,4 +276,14 @@
mAudioPolicyService->mCaptureStateNotifier.setCaptureState(active);
}
+status_t AudioPolicyService::AudioPolicyClient::getAudioPort(struct audio_port_v7 *port)
+{
+ sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+ if (af == 0) {
+ ALOGW("%s: could not get AudioFlinger", __func__);
+ return PERMISSION_DENIED;
+ }
+ return af->getAudioPort(port);
+}
+
} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 1ec0c5e..b738633 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -121,8 +121,8 @@
Vector <EffectDesc *> effects = mInputSources.valueAt(index)->mEffects;
for (size_t i = 0; i < effects.size(); i++) {
EffectDesc *effect = effects[i];
- sp<AudioEffect> fx = new AudioEffect(NULL, String16("android"), &effect->mUuid, -1, 0,
- 0, audioSession, input);
+ sp<AudioEffect> fx = new AudioEffect(String16("android"));
+ fx->set(NULL, &effect->mUuid, -1, 0, 0, audioSession, input);
status_t status = fx->initCheck();
if (status != NO_ERROR && status != ALREADY_EXISTS) {
ALOGW("addInputEffects(): failed to create Fx %s on source %d",
@@ -270,8 +270,8 @@
Vector <EffectDesc *> effects = mOutputStreams.valueAt(index)->mEffects;
for (size_t i = 0; i < effects.size(); i++) {
EffectDesc *effect = effects[i];
- sp<AudioEffect> fx = new AudioEffect(NULL, String16("android"), &effect->mUuid, 0, 0, 0,
- audioSession, output);
+ sp<AudioEffect> fx = new AudioEffect(String16("android"));
+ fx->set(NULL, &effect->mUuid, 0, 0, 0, audioSession, output);
status_t status = fx->initCheck();
if (status != NO_ERROR && status != ALREADY_EXISTS) {
ALOGE("addOutputSessionEffects(): failed to create Fx %s on session %d",
@@ -970,11 +970,11 @@
for (const auto& deviceEffectsIter : mDeviceEffects) {
const auto& deviceEffects = deviceEffectsIter.second;
for (const auto& effectDesc : deviceEffects->mEffectDescriptors->mEffects) {
- auto fx = std::make_unique<AudioEffect>(
- EFFECT_UUID_NULL, String16("android"), &effectDesc->mUuid, 0, nullptr,
- nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
- AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
- deviceEffects->getDeviceAddress()});
+ auto fx = std::make_unique<AudioEffect>(String16("android"));
+ fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0, nullptr,
+ nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
+ AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
+ deviceEffects->getDeviceAddress()});
status_t status = fx->initCheck();
if (status != NO_ERROR && status != ALREADY_EXISTS) {
ALOGE("%s(): failed to create Fx %s on port type=%d address=%s", __func__,
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 9577160..10bf707 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -244,11 +244,12 @@
uid = callingUid;
}
if (!mPackageManager.allowPlaybackCapture(uid)) {
- attr->flags |= AUDIO_FLAG_NO_MEDIA_PROJECTION;
+ attr->flags = static_cast<audio_flags_mask_t>(attr->flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
}
if (((attr->flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
&& !bypassInterruptionPolicyAllowed(pid, uid)) {
- attr->flags &= ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE);
+ attr->flags = static_cast<audio_flags_mask_t>(
+ attr->flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
}
AutoCallerClear acc;
AudioPolicyInterface::output_type_t outputType;
@@ -453,8 +454,9 @@
}
// check calling permissions.
- // Capturing from FM_TUNER source is controlled by captureAudioOutputAllowed() only as this
- // does not affect users privacy as does capturing from an actual microphone.
+ // Capturing from FM_TUNER source is controlled by captureTunerAudioInputAllowed() and
+ // captureAudioOutputAllowed() (deprecated) as this does not affect users privacy
+ // as does capturing from an actual microphone.
if (!(recordingAllowed(opPackageName, pid, uid) || attr->source == AUDIO_SOURCE_FM_TUNER)) {
ALOGE("%s permission denied: recording not allowed for uid %d pid %d",
__func__, uid, pid);
@@ -465,9 +467,14 @@
if ((inputSource == AUDIO_SOURCE_VOICE_UPLINK ||
inputSource == AUDIO_SOURCE_VOICE_DOWNLINK ||
inputSource == AUDIO_SOURCE_VOICE_CALL ||
- inputSource == AUDIO_SOURCE_ECHO_REFERENCE||
- inputSource == AUDIO_SOURCE_FM_TUNER) &&
- !canCaptureOutput) {
+ inputSource == AUDIO_SOURCE_ECHO_REFERENCE)
+ && !canCaptureOutput) {
+ return PERMISSION_DENIED;
+ }
+
+ if (inputSource == AUDIO_SOURCE_FM_TUNER
+ && !captureTunerAudioInputAllowed(pid, uid)
+ && !canCaptureOutput) {
return PERMISSION_DENIED;
}
@@ -546,7 +553,7 @@
}
std::string AudioPolicyService::getDeviceTypeStrForPortId(audio_port_handle_t portId) {
- struct audio_port port = {};
+ struct audio_port_v7 port = {};
port.id = portId;
status_t status = mAudioPolicyManager->getAudioPort(&port);
if (status == NO_ERROR && port.type == AUDIO_PORT_TYPE_DEVICE) {
@@ -572,7 +579,8 @@
}
// check calling permissions
- if (!(startRecording(client->opPackageName, client->pid, client->uid)
+ if (!(startRecording(client->opPackageName, client->pid, client->uid,
+ client->attributes.source)
|| client->attributes.source == AUDIO_SOURCE_FM_TUNER)) {
ALOGE("%s permission denied: recording not allowed for uid %d pid %d",
__func__, client->uid, client->pid);
@@ -593,7 +601,7 @@
}
// including successes gets very verbose
- // but once we cut over to westworld, log them all.
+ // but once we cut over to statsd, log them all.
if (status != NO_ERROR) {
static constexpr char kAudioPolicy[] = "audiopolicy";
@@ -660,7 +668,8 @@
client->active = false;
client->startTimeNs = 0;
updateUidStates_l();
- finishRecording(client->opPackageName, client->uid);
+ finishRecording(client->opPackageName, client->uid,
+ client->attributes.source);
}
return status;
@@ -686,7 +695,8 @@
updateUidStates_l();
// finish the recording app op
- finishRecording(client->opPackageName, client->uid);
+ finishRecording(client->opPackageName, client->uid,
+ client->attributes.source);
AutoCallerClear acc;
return mAudioPolicyManager->stopInput(portId);
}
@@ -1085,15 +1095,15 @@
return mAudioPolicyManager->setAllowedCapturePolicy(uid, capturePolicy);
}
-bool AudioPolicyService::isOffloadSupported(const audio_offload_info_t& info)
+audio_offload_mode_t AudioPolicyService::getOffloadSupport(const audio_offload_info_t& info)
{
if (mAudioPolicyManager == NULL) {
ALOGV("mAudioPolicyManager == NULL");
- return false;
+ return AUDIO_OFFLOAD_NOT_SUPPORTED;
}
Mutex::Autolock _l(mLock);
AutoCallerClear acc;
- return mAudioPolicyManager->isOffloadSupported(info);
+ return mAudioPolicyManager->getOffloadSupport(info);
}
bool AudioPolicyService::isDirectOutputSupported(const audio_config_base_t& config,
@@ -1116,7 +1126,7 @@
status_t AudioPolicyService::listAudioPorts(audio_port_role_t role,
audio_port_type_t type,
unsigned int *num_ports,
- struct audio_port *ports,
+ struct audio_port_v7 *ports,
unsigned int *generation)
{
Mutex::Autolock _l(mLock);
@@ -1127,7 +1137,7 @@
return mAudioPolicyManager->listAudioPorts(role, type, num_ports, ports, generation);
}
-status_t AudioPolicyService::getAudioPort(struct audio_port *port)
+status_t AudioPolicyService::getAudioPort(struct audio_port_v7 *port)
{
Mutex::Autolock _l(mLock);
if (mAudioPolicyManager == NULL) {
@@ -1257,7 +1267,7 @@
}
status_t AudioPolicyService::setUidDeviceAffinities(uid_t uid,
- const Vector<AudioDeviceTypeAddr>& devices) {
+ const AudioDeviceTypeAddrVector& devices) {
Mutex::Autolock _l(mLock);
if(!modifyAudioRoutingAllowed()) {
return PERMISSION_DENIED;
@@ -1282,7 +1292,7 @@
}
status_t AudioPolicyService::setUserIdDeviceAffinities(int userId,
- const Vector<AudioDeviceTypeAddr>& devices) {
+ const AudioDeviceTypeAddrVector& devices) {
Mutex::Autolock _l(mLock);
if(!modifyAudioRoutingAllowed()) {
return PERMISSION_DENIED;
@@ -1494,33 +1504,36 @@
return mAudioPolicyManager->isCallScreenModeSupported();
}
-status_t AudioPolicyService::setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device)
+status_t AudioPolicyService::setDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices)
{
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
Mutex::Autolock _l(mLock);
- return mAudioPolicyManager->setPreferredDeviceForStrategy(strategy, device);
+ return mAudioPolicyManager->setDevicesRoleForStrategy(strategy, role, devices);
}
-status_t AudioPolicyService::removePreferredDeviceForStrategy(product_strategy_t strategy)
+status_t AudioPolicyService::removeDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role)
{
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
Mutex::Autolock _l(mLock);
- return mAudioPolicyManager->removePreferredDeviceForStrategy(strategy);
+ return mAudioPolicyManager->removeDevicesRoleForStrategy(strategy, role);
}
-status_t AudioPolicyService::getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device)
+status_t AudioPolicyService::getDevicesForRoleAndStrategy(product_strategy_t strategy,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices)
{
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
Mutex::Autolock _l(mLock);
- return mAudioPolicyManager->getPreferredDeviceForStrategy(strategy, device);
+ return mAudioPolicyManager->getDevicesForRoleAndStrategy(strategy, role, devices);
}
status_t AudioPolicyService::registerSoundTriggerCaptureStateListener(
@@ -1531,4 +1544,55 @@
return NO_ERROR;
}
+status_t AudioPolicyService::setDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector &devices)
+{
+ if (mAudioPolicyManager == nullptr) {
+ return NO_INIT;
+ }
+ Mutex::Autolock _l(mLock);
+ return mAudioPolicyManager->setDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioPolicyService::addDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector &devices)
+{
+ if (mAudioPolicyManager == nullptr) {
+ return NO_INIT;
+ }
+ Mutex::Autolock _l(mLock);
+ return mAudioPolicyManager->addDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioPolicyService::removeDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices)
+{
+ if (mAudioPolicyManager == nullptr) {
+ return NO_INIT;
+ }
+ Mutex::Autolock _l(mLock);
+ return mAudioPolicyManager->removeDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t AudioPolicyService::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role)
+{
+ if (mAudioPolicyManager == nullptr) {
+ return NO_INIT;
+ }
+ Mutex::Autolock _l(mLock);
+ return mAudioPolicyManager->clearDevicesRoleForCapturePreset(audioSource, role);
+}
+
+status_t AudioPolicyService::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices)
+{
+ if (mAudioPolicyManager == nullptr) {
+ return NO_INIT;
+ }
+ Mutex::Autolock _l(mLock);
+ return mAudioPolicyManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices);
+}
+
} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 9b61e74..d71a317 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -35,6 +35,7 @@
#include <utils/threads.h>
#include "AudioPolicyService.h"
#include <hardware_legacy/power.h>
+#include <media/AidlConversion.h>
#include <media/AudioEffect.h>
#include <media/AudioParameter.h>
#include <mediautils/ServiceUtilities.h>
@@ -111,7 +112,7 @@
// A notification client is always registered by AudioSystem when the client process
// connects to AudioPolicyService.
-void AudioPolicyService::registerClient(const sp<IAudioPolicyServiceClient>& client)
+void AudioPolicyService::registerClient(const sp<media::IAudioPolicyServiceClient>& client)
{
if (client == 0) {
ALOGW("%s got NULL client", __FUNCTION__);
@@ -293,10 +294,11 @@
return mAudioCommandThread->setAudioPortConfigCommand(config, delayMs);
}
-AudioPolicyService::NotificationClient::NotificationClient(const sp<AudioPolicyService>& service,
- const sp<IAudioPolicyServiceClient>& client,
- uid_t uid,
- pid_t pid)
+AudioPolicyService::NotificationClient::NotificationClient(
+ const sp<AudioPolicyService>& service,
+ const sp<media::IAudioPolicyServiceClient>& client,
+ uid_t uid,
+ pid_t pid)
: mService(service), mUid(uid), mPid(pid), mAudioPolicyServiceClient(client),
mAudioPortCallbacksEnabled(false), mAudioVolumeGroupCallbacksEnabled(false)
{
@@ -342,7 +344,8 @@
const String8& regId, int32_t state)
{
if (mAudioPolicyServiceClient != 0 && isServiceUid(mUid)) {
- mAudioPolicyServiceClient->onDynamicPolicyMixStateUpdate(regId, state);
+ mAudioPolicyServiceClient->onDynamicPolicyMixStateUpdate(
+ legacy2aidl_String8_string(regId).value(), state);
}
}
@@ -357,8 +360,37 @@
audio_source_t source)
{
if (mAudioPolicyServiceClient != 0 && isServiceUid(mUid)) {
- mAudioPolicyServiceClient->onRecordingConfigurationUpdate(event, clientInfo,
- clientConfig, clientEffects, deviceConfig, effects, patchHandle, source);
+ status_t status = [&]() -> status_t {
+ int32_t eventAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(event));
+ media::RecordClientInfo clientInfoAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_record_client_info_t_RecordClientInfo(*clientInfo));
+ media::AudioConfigBase clientConfigAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_config_base_t_AudioConfigBase(*clientConfig));
+ std::vector<media::EffectDescriptor> clientEffectsAidl = VALUE_OR_RETURN_STATUS(
+ convertContainer<std::vector<media::EffectDescriptor>>(
+ clientEffects,
+ legacy2aidl_effect_descriptor_t_EffectDescriptor));
+ media::AudioConfigBase deviceConfigAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_config_base_t_AudioConfigBase(*deviceConfig));
+ std::vector<media::EffectDescriptor> effectsAidl = VALUE_OR_RETURN_STATUS(
+ convertContainer<std::vector<media::EffectDescriptor>>(
+ effects,
+ legacy2aidl_effect_descriptor_t_EffectDescriptor));
+ int32_t patchHandleAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_patch_handle_t_int32_t(patchHandle));
+ media::AudioSourceType sourceAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_source_t_AudioSourceType(source));
+ return aidl_utils::statusTFromBinderStatus(
+ mAudioPolicyServiceClient->onRecordingConfigurationUpdate(eventAidl,
+ clientInfoAidl,
+ clientConfigAidl,
+ clientEffectsAidl,
+ deviceConfigAidl,
+ effectsAidl,
+ patchHandleAidl,
+ sourceAidl));
+ }();
+ ALOGW_IF(status != OK, "onRecordingConfigurationUpdate() failed: %d", status);
}
}
@@ -453,7 +485,7 @@
sp<AudioRecordClient> topActive;
sp<AudioRecordClient> latestActive;
sp<AudioRecordClient> topSensitiveActive;
- sp<AudioRecordClient> latestSensitiveActive;
+ sp<AudioRecordClient> latestSensitiveActiveOrComm;
nsecs_t topStartNs = 0;
nsecs_t latestStartNs = 0;
@@ -467,6 +499,7 @@
bool rttCallActive = (isInCall || isInCommunication)
&& mUidPolicy->isRttEnabled();
bool onlyHotwordActive = true;
+ bool isPhoneStateOwnerActive = false;
// if Sensor Privacy is enabled then all recordings should be silenced.
if (mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
@@ -488,12 +521,13 @@
}
bool isAccessibility = mUidPolicy->isA11yUid(current->uid);
- // Clients capturing for Accessibility services are not considered
+ // Clients capturing for Accessibility services or virtual sources are not considered
// for top or latest active to avoid masking regular clients started before
- if (!isAccessibility) {
+ if (!isAccessibility && !isVirtualSource(current->attributes.source)) {
bool isAssistant = mUidPolicy->isAssistantUid(current->uid);
bool isPrivacySensitive =
(current->attributes.flags & AUDIO_FLAG_CAPTURE_PRIVATE) != 0;
+
if (appState == APP_STATE_TOP) {
if (isPrivacySensitive) {
if (current->startTimeNs > topSensitiveStartNs) {
@@ -515,9 +549,15 @@
if (!(current->attributes.source == AUDIO_SOURCE_HOTWORD
|| ((isA11yOnTop || rttCallActive) && isAssistant))) {
if (isPrivacySensitive) {
- if (current->startTimeNs > latestSensitiveStartNs) {
- latestSensitiveActive = current;
- latestSensitiveStartNs = current->startTimeNs;
+ // if audio mode is IN_COMMUNICATION, make sure the audio mode owner
+ // is marked latest sensitive active even if another app qualifies.
+ if (current->startTimeNs > latestSensitiveStartNs
+ || (isInCommunication && current->uid == mPhoneStateOwnerUid)) {
+ if (!isInCommunication || latestSensitiveActiveOrComm == nullptr
+ || latestSensitiveActiveOrComm->uid != mPhoneStateOwnerUid) {
+ latestSensitiveActiveOrComm = current;
+ latestSensitiveStartNs = current->startTimeNs;
+ }
}
isSensitiveActive = true;
} else {
@@ -531,6 +571,9 @@
if (current->attributes.source != AUDIO_SOURCE_HOTWORD) {
onlyHotwordActive = false;
}
+ if (current->uid == mPhoneStateOwnerUid) {
+ isPhoneStateOwnerActive = true;
+ }
}
// if no active client with UI on Top, consider latest active as top
@@ -539,8 +582,15 @@
topStartNs = latestStartNs;
}
if (topSensitiveActive == nullptr) {
- topSensitiveActive = latestSensitiveActive;
+ topSensitiveActive = latestSensitiveActiveOrComm;
topSensitiveStartNs = latestSensitiveStartNs;
+ } else if (latestSensitiveActiveOrComm != nullptr) {
+ // if audio mode is IN_COMMUNICATION, favor audio mode owner over an app with
+ // foreground UI in case both are capturing with privacy sensitive flag.
+ if (isInCommunication && latestSensitiveActiveOrComm->uid == mPhoneStateOwnerUid) {
+ topSensitiveActive = latestSensitiveActiveOrComm;
+ topSensitiveStartNs = latestSensitiveStartNs;
+ }
}
// If both privacy sensitive and regular capture are active:
@@ -567,8 +617,8 @@
auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mLock) {
bool canCaptureCall = recordClient->canCaptureOutput;
bool canCaptureCommunication = recordClient->canCaptureOutput
- || recordClient->uid == mPhoneStateOwnerUid
- || isServiceUid(mPhoneStateOwnerUid);
+ || !isPhoneStateOwnerActive
+ || recordClient->uid == mPhoneStateOwnerUid;
return !(isInCall && !canCaptureCall)
&& !(isInCommunication && !canCaptureCommunication);
};
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 869a963..c0e29ee 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -192,16 +192,16 @@
virtual status_t setVoiceVolume(float volume, int delayMs = 0);
status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages);
status_t setAllowedCapturePolicy(uint_t uid, audio_flags_mask_t capturePolicy) override;
- virtual bool isOffloadSupported(const audio_offload_info_t &config);
+ virtual audio_offload_mode_t getOffloadSupport(const audio_offload_info_t &config);
virtual bool isDirectOutputSupported(const audio_config_base_t& config,
const audio_attributes_t& attributes);
virtual status_t listAudioPorts(audio_port_role_t role,
audio_port_type_t type,
unsigned int *num_ports,
- struct audio_port *ports,
+ struct audio_port_v7 *ports,
unsigned int *generation);
- virtual status_t getAudioPort(struct audio_port *port);
+ virtual status_t getAudioPort(struct audio_port_v7 *port);
virtual status_t createAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle);
virtual status_t releaseAudioPatch(audio_patch_handle_t handle);
@@ -210,7 +210,7 @@
unsigned int *generation);
virtual status_t setAudioPortConfig(const struct audio_port_config *config);
- virtual void registerClient(const sp<IAudioPolicyServiceClient>& client);
+ virtual void registerClient(const sp<media::IAudioPolicyServiceClient>& client);
virtual void setAudioPortCallbacksEnabled(bool enabled);
@@ -226,19 +226,41 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
- virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+ virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices);
virtual status_t removeUidDeviceAffinities(uid_t uid);
- virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
- const AudioDeviceTypeAddr &device);
+ virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices);
- virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy);
+ virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role);
+ virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices);
- virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
- AudioDeviceTypeAddr &device);
- virtual status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+ virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices);
+
+ virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices);
+
+ virtual status_t removeDevicesRoleForCapturePreset(
+ audio_source_t audioSource, device_role_t role,
+ const AudioDeviceTypeAddrVector& devices);
+
+ virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+ device_role_t role);
+
+ virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+ device_role_t role,
+ AudioDeviceTypeAddrVector &devices);
+
+ virtual status_t setUserIdDeviceAffinities(int userId,
+ const AudioDeviceTypeAddrVector& devices);
virtual status_t removeUserIdDeviceAffinities(int userId);
@@ -743,6 +765,8 @@
void setSoundTriggerCaptureState(bool active) override;
+ status_t getAudioPort(struct audio_port_v7 *port) override;
+
private:
AudioPolicyService *mAudioPolicyService;
};
@@ -751,7 +775,7 @@
class NotificationClient : public IBinder::DeathRecipient {
public:
NotificationClient(const sp<AudioPolicyService>& service,
- const sp<IAudioPolicyServiceClient>& client,
+ const sp<media::IAudioPolicyServiceClient>& client,
uid_t uid, pid_t pid);
virtual ~NotificationClient();
@@ -783,12 +807,12 @@
NotificationClient(const NotificationClient&);
NotificationClient& operator = (const NotificationClient&);
- const wp<AudioPolicyService> mService;
- const uid_t mUid;
- const pid_t mPid;
- const sp<IAudioPolicyServiceClient> mAudioPolicyServiceClient;
- bool mAudioPortCallbacksEnabled;
- bool mAudioVolumeGroupCallbacksEnabled;
+ const wp<AudioPolicyService> mService;
+ const uid_t mUid;
+ const pid_t mPid;
+ const sp<media::IAudioPolicyServiceClient> mAudioPolicyServiceClient;
+ bool mAudioPortCallbacksEnabled;
+ bool mAudioVolumeGroupCallbacksEnabled;
};
class AudioClient : public virtual RefBase {
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index efdb241..daedf31 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -18,7 +18,10 @@
"libxml2",
],
- static_libs: ["libaudiopolicycomponents"],
+ static_libs: [
+ "libaudiopolicycomponents",
+ "libgmock"
+ ],
header_libs: [
"libaudiopolicycommon",
@@ -42,6 +45,7 @@
cc_test {
name: "audio_health_tests",
+ require_root: true,
shared_libs: [
"libaudiofoundation",
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index e1721ea..433a6ff 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -75,6 +75,10 @@
status_t createAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle,
int /*delayMs*/) override {
+ auto iter = mActivePatches.find(*handle);
+ if (iter != mActivePatches.end()) {
+ mActivePatches.erase(*handle);
+ }
*handle = mNextPatchHandle++;
mActivePatches.insert(std::make_pair(*handle, *patch));
return NO_ERROR;
@@ -117,6 +121,8 @@
size_t getAudioPortListUpdateCount() const { return mAudioPortListUpdateCount; }
+ virtual void addSupportedFormat(audio_format_t /* format */) {}
+
private:
audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h b/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
new file mode 100644
index 0000000..a5ad9b1
--- /dev/null
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <map>
+#include <set>
+
+#include <system/audio.h>
+#include <utils/Log.h>
+#include <utils/String8.h>
+
+#include "AudioPolicyTestClient.h"
+
+namespace android {
+
+class AudioPolicyManagerTestClientForHdmi : public AudioPolicyManagerTestClient {
+public:
+ String8 getParameters(audio_io_handle_t /* ioHandle */, const String8& /* keys*/ ) override {
+ return mAudioParameters.toString();
+ }
+
+ void addSupportedFormat(audio_format_t format) override {
+ mAudioParameters.add(
+ String8(AudioParameter::keyStreamSupportedFormats),
+ String8(audio_format_to_string(format)));
+ mAudioParameters.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), 48000);
+ mAudioParameters.add(String8(AudioParameter::keyStreamSupportedChannels), String8(""));
+ }
+
+private:
+ AudioParameter mAudioParameters;
+};
+
+} // namespace android
\ No newline at end of file
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index c628e70..fa6b90f 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -87,6 +87,9 @@
audio_session_t sessionId __unused,
bool suspended __unused) {}
void setSoundTriggerCaptureState(bool active __unused) override {};
+ status_t getAudioPort(struct audio_port_v7 *port __unused) override {
+ return INVALID_OPERATION;
+ };
};
} // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 8bab020..be860e5 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -29,6 +29,7 @@
using AudioPolicyManager::getOutputs;
using AudioPolicyManager::getAvailableOutputDevices;
using AudioPolicyManager::getAvailableInputDevices;
+ using AudioPolicyManager::setSurroundFormatEnabled;
uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
};
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index b5c67a1..ca2f0c6 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -16,6 +16,7 @@
#define LOG_TAG "AudioPolicy_Boot_Test"
+#include <string>
#include <unordered_set>
#include <gtest/gtest.h>
@@ -33,7 +34,7 @@
unsigned int numPorts;
unsigned int generation1;
unsigned int generation;
- struct audio_port *audioPorts = NULL;
+ struct audio_port_v7 *audioPorts = nullptr;
int attempts = 10;
do {
if (attempts-- < 0) {
@@ -42,13 +43,14 @@
}
numPorts = 0;
ASSERT_EQ(NO_ERROR, AudioSystem::listAudioPorts(
- AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_DEVICE, &numPorts, NULL, &generation1));
+ AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_DEVICE, &numPorts, nullptr, &generation1));
if (numPorts == 0) {
free(audioPorts);
GTEST_FAIL() << "Number of audio ports should not be zero";
}
- audioPorts = (struct audio_port *)realloc(audioPorts, numPorts * sizeof(struct audio_port));
+ audioPorts = (struct audio_port_v7 *)realloc(
+ audioPorts, numPorts * sizeof(struct audio_port_v7));
status_t status = AudioSystem::listAudioPorts(
AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_DEVICE, &numPorts, audioPorts, &generation);
if (status != NO_ERROR) {
@@ -74,3 +76,43 @@
ASSERT_NE(attachedDevices.end(), attachedDevices.find(desc->type()));
}
}
+
+TEST(AudioHealthTest, ConnectSupportedDevice) {
+ AudioPolicyManagerTestClient client;
+ AudioPolicyTestManager manager(&client);
+ manager.loadConfig();
+ ASSERT_NE("AudioPolicyConfig::setDefault", manager.getConfig().getSource());
+
+ DeviceVector devices;
+ for (const auto& hwModule : manager.getConfig().getHwModules()) {
+ for (const auto& profile : hwModule->getOutputProfiles()) {
+ devices.merge(profile->getSupportedDevices());
+ }
+ for (const auto& profile : hwModule->getInputProfiles()) {
+ devices.merge(profile->getSupportedDevices());
+ }
+ }
+ for (const auto& device : devices) {
+ if (!audio_is_bluetooth_out_sco_device(device->type()) &&
+ !audio_is_bluetooth_in_sco_device(device->type())) {
+ // There are two reasons to only test connecting BT devices.
+ // 1) It is easier to construct a fake address.
+ // 2) This test will be run in presubmit. In that case, it makes sense to make the test
+ // processing time short.
+ continue;
+ }
+ std::string address = "11:22:33:44:55:66";
+ ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
+ ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
+ device->type(), AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address.c_str(),
+ "" /*device_name*/, AUDIO_FORMAT_DEFAULT));
+ ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
+ ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
+ device->type(), AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, address.c_str(),
+ "" /*device_name*/, AUDIO_FORMAT_DEFAULT));
+ ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
+ }
+}
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index a0074bc..889efac 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -20,6 +20,7 @@
#include <unistd.h>
#include <gtest/gtest.h>
+#include <gmock/gmock.h>
#define LOG_TAG "APM_Test"
#include <Serializer.h>
@@ -32,10 +33,12 @@
#include "AudioPolicyInterface.h"
#include "AudioPolicyManagerTestClient.h"
+#include "AudioPolicyManagerTestClientForHdmi.h"
#include "AudioPolicyTestClient.h"
#include "AudioPolicyTestManager.h"
using namespace android;
+using testing::UnorderedElementsAre;
TEST(AudioPolicyManagerTestInit, EngineFailure) {
AudioPolicyTestClient client;
@@ -56,6 +59,34 @@
ASSERT_EQ(NO_INIT, manager.initCheck());
}
+// Verifies that a failure while loading a config doesn't leave
+// APM config in a "dirty" state. Since AudioPolicyConfig object
+// is a proxy for the data hosted by APM, it isn't possible
+// to "deep copy" it, and thus we have to test its elements
+// individually.
+TEST(AudioPolicyManagerTestInit, ConfigLoadingIsTransactional) {
+ AudioPolicyTestClient client;
+ AudioPolicyTestManager manager(&client);
+ ASSERT_TRUE(manager.getConfig().getHwModules().isEmpty());
+ ASSERT_TRUE(manager.getConfig().getInputDevices().isEmpty());
+ ASSERT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
+ status_t status = deserializeAudioPolicyFile(
+ (base::GetExecutableDirectory() +
+ "/test_invalid_audio_policy_configuration.xml").c_str(),
+ &manager.getConfig());
+ ASSERT_NE(NO_ERROR, status);
+ EXPECT_TRUE(manager.getConfig().getHwModules().isEmpty());
+ EXPECT_TRUE(manager.getConfig().getInputDevices().isEmpty());
+ EXPECT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
+ status = deserializeAudioPolicyFile(
+ (base::GetExecutableDirectory() + "/test_audio_policy_configuration.xml").c_str(),
+ &manager.getConfig());
+ ASSERT_EQ(NO_ERROR, status);
+ EXPECT_FALSE(manager.getConfig().getHwModules().isEmpty());
+ EXPECT_FALSE(manager.getConfig().getInputDevices().isEmpty());
+ EXPECT_FALSE(manager.getConfig().getOutputDevices().isEmpty());
+}
+
class PatchCountCheck {
public:
@@ -87,7 +118,7 @@
void getOutputForAttr(
audio_port_handle_t *selectedDeviceId,
audio_format_t format,
- int channelMask,
+ audio_channel_mask_t channelMask,
int sampleRate,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
audio_io_handle_t *output = nullptr,
@@ -98,7 +129,7 @@
audio_unique_id_t riid,
audio_port_handle_t *selectedDeviceId,
audio_format_t format,
- int channelMask,
+ audio_channel_mask_t channelMask,
int sampleRate,
audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
audio_port_handle_t *portId = nullptr);
@@ -107,15 +138,16 @@
// Tries to find a device port. If 'foundPort' isn't nullptr,
// will generate a failure if the port hasn't been found.
bool findDevicePort(audio_port_role_t role, audio_devices_t deviceType,
- const std::string &address, audio_port *foundPort);
+ const std::string &address, audio_port_v7 *foundPort);
static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch* patch);
+ virtual AudioPolicyManagerTestClient* getClient() { return new AudioPolicyManagerTestClient; }
std::unique_ptr<AudioPolicyManagerTestClient> mClient;
std::unique_ptr<AudioPolicyTestManager> mManager;
};
void AudioPolicyManagerTest::SetUp() {
- mClient.reset(new AudioPolicyManagerTestClient);
+ mClient.reset(getClient());
mManager.reset(new AudioPolicyTestManager(mClient.get()));
SetUpManagerConfig(); // Subclasses may want to customize the config.
ASSERT_EQ(NO_ERROR, mManager->initialize());
@@ -164,7 +196,7 @@
void AudioPolicyManagerTest::getOutputForAttr(
audio_port_handle_t *selectedDeviceId,
audio_format_t format,
- int channelMask,
+ audio_channel_mask_t channelMask,
int sampleRate,
audio_output_flags_t flags,
audio_io_handle_t *output,
@@ -194,7 +226,7 @@
audio_unique_id_t riid,
audio_port_handle_t *selectedDeviceId,
audio_format_t format,
- int channelMask,
+ audio_channel_mask_t channelMask,
int sampleRate,
audio_input_flags_t flags,
audio_port_handle_t *portId) {
@@ -214,7 +246,7 @@
}
bool AudioPolicyManagerTest::findDevicePort(audio_port_role_t role,
- audio_devices_t deviceType, const std::string &address, audio_port *foundPort) {
+ audio_devices_t deviceType, const std::string &address, audio_port_v7 *foundPort) {
uint32_t numPorts = 0;
uint32_t generation1;
status_t ret;
@@ -224,7 +256,7 @@
if (HasFailure()) return false;
uint32_t generation2;
- struct audio_port ports[numPorts];
+ struct audio_port_v7 ports[numPorts];
ret = mManager->listAudioPorts(role, AUDIO_PORT_TYPE_DEVICE, &numPorts, ports, &generation2);
EXPECT_EQ(NO_ERROR, ret) << "mManager->listAudioPorts returned error";
EXPECT_EQ(generation1, generation2) << "Generations changed during ports retrieval";
@@ -638,6 +670,165 @@
ASSERT_EQ(INVALID_OPERATION, ret);
}
+class AudioPolicyManagerTestForHdmi
+ : public AudioPolicyManagerTestWithConfigurationFile {
+protected:
+ void SetUp() override;
+ std::string getConfigFile() override { return sTvConfig; }
+ std::map<audio_format_t, bool> getSurroundFormatsHelper(bool reported);
+ std::unordered_set<audio_format_t> getFormatsFromPorts();
+ AudioPolicyManagerTestClient* getClient() override {
+ return new AudioPolicyManagerTestClientForHdmi;
+ }
+ void TearDown() override;
+
+ static const std::string sTvConfig;
+
+};
+
+const std::string AudioPolicyManagerTestForHdmi::sTvConfig =
+ AudioPolicyManagerTestForHdmi::sExecutableDir +
+ "test_settop_box_surround_configuration.xml";
+
+void AudioPolicyManagerTestForHdmi::SetUp() {
+ AudioPolicyManagerTest::SetUp();
+ mClient->addSupportedFormat(AUDIO_FORMAT_E_AC3);
+ mManager->setDeviceConnectionState(
+ AUDIO_DEVICE_OUT_HDMI, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "" /*address*/, "" /*name*/, AUDIO_FORMAT_DEFAULT);
+}
+
+void AudioPolicyManagerTestForHdmi::TearDown() {
+ mManager->setDeviceConnectionState(
+ AUDIO_DEVICE_OUT_HDMI, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ "" /*address*/, "" /*name*/, AUDIO_FORMAT_DEFAULT);
+ AudioPolicyManagerTest::TearDown();
+}
+
+std::map<audio_format_t, bool>
+ AudioPolicyManagerTestForHdmi::getSurroundFormatsHelper(bool reported) {
+ unsigned int numSurroundFormats = 0;
+ std::map<audio_format_t, bool> surroundFormatsMap;
+ status_t ret = mManager->getSurroundFormats(
+ &numSurroundFormats, nullptr /* surroundFormats */,
+ nullptr /* surroundFormatsEnabled */, reported);
+ EXPECT_EQ(NO_ERROR, ret);
+ if (ret != NO_ERROR) {
+ return surroundFormatsMap;
+ }
+ audio_format_t surroundFormats[numSurroundFormats];
+ memset(surroundFormats, 0, sizeof(audio_format_t) * numSurroundFormats);
+ bool surroundFormatsEnabled[numSurroundFormats];
+ memset(surroundFormatsEnabled, 0, sizeof(bool) * numSurroundFormats);
+ ret = mManager->getSurroundFormats(
+ &numSurroundFormats, surroundFormats, surroundFormatsEnabled, reported);
+ EXPECT_EQ(NO_ERROR, ret);
+ if (ret != NO_ERROR) {
+ return surroundFormatsMap;
+ }
+ for (int i = 0; i< numSurroundFormats; i++) {
+ surroundFormatsMap[surroundFormats[i]] = surroundFormatsEnabled[i];
+ }
+ return surroundFormatsMap;
+}
+
+std::unordered_set<audio_format_t>
+ AudioPolicyManagerTestForHdmi::getFormatsFromPorts() {
+ uint32_t numPorts = 0;
+ uint32_t generation1;
+ status_t ret;
+ std::unordered_set<audio_format_t> formats;
+ ret = mManager->listAudioPorts(
+ AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE, &numPorts, nullptr, &generation1);
+ EXPECT_EQ(NO_ERROR, ret) << "mManager->listAudioPorts returned error";
+ if (ret != NO_ERROR) {
+ return formats;
+ }
+ struct audio_port_v7 ports[numPorts];
+ ret = mManager->listAudioPorts(
+ AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE, &numPorts, ports, &generation1);
+ EXPECT_EQ(NO_ERROR, ret) << "mManager->listAudioPorts returned error";
+ if (ret != NO_ERROR) {
+ return formats;
+ }
+ for (const auto &port : ports) {
+ for (size_t i = 0; i < port.num_audio_profiles; ++i) {
+ formats.insert(port.audio_profiles[i].format);
+ }
+ }
+ return formats;
+}
+
+TEST_F(AudioPolicyManagerTestForHdmi, GetSurroundFormatsReturnsSupportedFormats) {
+ mManager->setForceUse(
+ AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
+ auto surroundFormats = getSurroundFormatsHelper(false /*reported*/);
+ ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+}
+
+TEST_F(AudioPolicyManagerTestForHdmi,
+ GetSurroundFormatsReturnsManipulatedFormats) {
+ mManager->setForceUse(
+ AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
+
+ status_t ret =
+ mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
+ ASSERT_EQ(NO_ERROR, ret);
+ auto surroundFormats = getSurroundFormatsHelper(false /*reported*/);
+ ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+ ASSERT_FALSE(surroundFormats[AUDIO_FORMAT_E_AC3]);
+
+ ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/);
+ ASSERT_EQ(NO_ERROR, ret);
+ surroundFormats = getSurroundFormatsHelper(false /*reported*/);
+ ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+ ASSERT_TRUE(surroundFormats[AUDIO_FORMAT_E_AC3]);
+
+ ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
+ ASSERT_EQ(NO_ERROR, ret);
+ surroundFormats = getSurroundFormatsHelper(false /*reported*/);
+ ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+ ASSERT_FALSE(surroundFormats[AUDIO_FORMAT_E_AC3]);
+}
+
+TEST_F(AudioPolicyManagerTestForHdmi,
+ ListAudioPortsReturnManipulatedHdmiFormats) {
+ mManager->setForceUse(
+ AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
+
+ ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/));
+ auto formats = getFormatsFromPorts();
+ ASSERT_EQ(0, formats.count(AUDIO_FORMAT_E_AC3));
+
+ ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/));
+ formats = getFormatsFromPorts();
+ ASSERT_EQ(1, formats.count(AUDIO_FORMAT_E_AC3));
+}
+
+TEST_F(AudioPolicyManagerTestForHdmi,
+ GetReportedSurroundFormatsReturnsHdmiReportedFormats) {
+ mManager->setForceUse(
+ AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
+ auto surroundFormats = getSurroundFormatsHelper(true /*reported*/);
+ ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+}
+
+TEST_F(AudioPolicyManagerTestForHdmi,
+ GetReportedSurroundFormatsReturnsNonManipulatedHdmiReportedFormats) {
+ mManager->setForceUse(
+ AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
+
+ status_t ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
+ ASSERT_EQ(NO_ERROR, ret);
+ auto surroundFormats = getSurroundFormatsHelper(true /*reported*/);
+ ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+
+ ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/);
+ ASSERT_EQ(NO_ERROR, ret);
+ surroundFormats = getSurroundFormatsHelper(true /*reported*/);
+ ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+}
+
class AudioPolicyManagerTestDPNoRemoteSubmixModule : public AudioPolicyManagerTestDynamicPolicy {
protected:
std::string getConfigFile() override { return sPrimaryOnlyConfig; }
@@ -684,7 +875,7 @@
{AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT, RULE_MATCH_ATTRIBUTE_USAGE}
};
- struct audio_port mInjectionPort;
+ struct audio_port_v7 mInjectionPort;
audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
};
@@ -701,13 +892,14 @@
AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, mUsageRules);
ASSERT_EQ(NO_ERROR, ret);
- struct audio_port extractionPort;
+ struct audio_port_v7 extractionPort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SOURCE, AUDIO_DEVICE_IN_REMOTE_SUBMIX,
mMixAddress, &extractionPort));
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
audio_source_t source = AUDIO_SOURCE_REMOTE_SUBMIX;
- audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, 0, ""};
+ audio_attributes_t attr = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
std::string tags = "addr=" + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
getInputForAttr(attr, mTracker->getRiid(), &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
@@ -757,9 +949,9 @@
AudioPolicyManagerTestDPPlaybackReRouting,
testing::Values(
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, 0, ""}
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
)
);
@@ -768,47 +960,47 @@
AudioPolicyManagerTestDPPlaybackReRouting,
testing::Values(
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_SONIFICATION,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_USAGE_ASSISTANCE_SONIFICATION,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VIRTUAL_SOURCE,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"}
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"}
)
);
@@ -817,41 +1009,41 @@
AudioPolicyManagerTestDPPlaybackReRouting,
testing::Values(
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
AUDIO_USAGE_ASSISTANCE_SONIFICATION,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, 0, ""},
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, 0, ""}
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
)
);
@@ -869,7 +1061,7 @@
{AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_VOICE_COMMUNICATION, RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET}
};
- struct audio_port mExtractionPort;
+ struct audio_port_v7 mExtractionPort;
audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
};
@@ -886,13 +1078,14 @@
AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, mSourceRules);
ASSERT_EQ(NO_ERROR, ret);
- struct audio_port injectionPort;
+ struct audio_port_v7 injectionPort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
mMixAddress, &injectionPort));
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
audio_usage_t usage = AUDIO_USAGE_VIRTUAL_SOURCE;
- audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT, 0, ""};
+ audio_attributes_t attr =
+ {AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
std::string tags = std::string("addr=") + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
@@ -941,17 +1134,19 @@
AudioPolicyManagerTestDPMixRecordInjection,
testing::Values(
(audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_CAMCORDER, 0, ""},
+ AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_CAMCORDER, 0, "addr=remote_submix_media"},
+ AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE,
+ "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_MIC, 0, "addr=remote_submix_media"},
+ AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE,
+ "addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_MIC, 0, ""},
+ AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_COMMUNICATION, 0, ""},
+ AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_COMMUNICATION, 0,
+ AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE,
"addr=remote_submix_media"}
)
);
@@ -962,14 +1157,15 @@
AudioPolicyManagerTestDPMixRecordInjection,
testing::Values(
(audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_RECOGNITION, 0, ""},
+ AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_HOTWORD, 0, ""},
+ AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE, ""},
(audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_RECOGNITION, 0,
+ AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE,
"addr=remote_submix_media"},
(audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_HOTWORD, 0, "addr=remote_submix_media"}
+ AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE,
+ "addr=remote_submix_media"}
)
);
@@ -1033,7 +1229,7 @@
type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
address.c_str(), name.c_str(), AUDIO_FORMAT_DEFAULT));
- audio_port devicePort;
+ audio_port_v7 devicePort;
const audio_port_role_t role = audio_is_output_device(type)
? AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE;
ASSERT_TRUE(findDevicePort(role, type, address, &devicePort));
@@ -1094,7 +1290,7 @@
flags, &output, &portId);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
- audio_port port = {};
+ audio_port_v7 port = {};
outDesc->toAudioPort(&port);
mManager->releaseOutput(portId);
ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
@@ -1176,7 +1372,7 @@
findDevicePort(AUDIO_PORT_ROLE_SOURCE, AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", nullptr));
mClient->swapAllowedModuleNames({"primary", "r_submix"});
mManager->onNewAudioModulesAvailable();
- struct audio_port port;
+ struct audio_port_v7 port;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SOURCE, AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", &port));
}
@@ -1188,3 +1384,109 @@
EXPECT_GT(mClient->getAudioPortListUpdateCount(), prevAudioPortListUpdateCount);
EXPECT_GT(mManager->getAudioPortGeneration(), prevAudioPortGeneration);
}
+
+using DevicesRoleForCapturePresetParam = std::tuple<audio_source_t, device_role_t>;
+
+class AudioPolicyManagerDevicesRoleForCapturePresetTest
+ : public AudioPolicyManagerTestWithConfigurationFile,
+ public testing::WithParamInterface<DevicesRoleForCapturePresetParam> {
+protected:
+ // The `inputDevice` and `inputDevice2` indicate the audio devices type to be used for setting
+ // device role. They must be declared in the test_audio_policy_configuration.xml
+ AudioDeviceTypeAddr inputDevice = AudioDeviceTypeAddr(AUDIO_DEVICE_IN_BUILTIN_MIC, "");
+ AudioDeviceTypeAddr inputDevice2 = AudioDeviceTypeAddr(AUDIO_DEVICE_IN_HDMI, "");
+};
+
+TEST_P(AudioPolicyManagerDevicesRoleForCapturePresetTest, DevicesRoleForCapturePreset) {
+ const audio_source_t audioSource = std::get<0>(GetParam());
+ const device_role_t role = std::get<1>(GetParam());
+
+ // Test invalid device when setting
+ const AudioDeviceTypeAddr outputDevice(AUDIO_DEVICE_OUT_SPEAKER, "");
+ const AudioDeviceTypeAddrVector outputDevices = {outputDevice};
+ ASSERT_EQ(BAD_VALUE,
+ mManager->setDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+ ASSERT_EQ(BAD_VALUE,
+ mManager->addDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+ AudioDeviceTypeAddrVector devices;
+ ASSERT_EQ(NAME_NOT_FOUND,
+ mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+ ASSERT_TRUE(devices.empty());
+ ASSERT_EQ(BAD_VALUE,
+ mManager->removeDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+
+ // Without setting, call get/remove/clear must fail
+ ASSERT_EQ(NAME_NOT_FOUND,
+ mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+ ASSERT_EQ(NAME_NOT_FOUND,
+ mManager->removeDevicesRoleForCapturePreset(audioSource, role, devices));
+ ASSERT_EQ(NAME_NOT_FOUND,
+ mManager->clearDevicesRoleForCapturePreset(audioSource, role));
+
+ // Test set/get devices role
+ const AudioDeviceTypeAddrVector inputDevices = {inputDevice};
+ ASSERT_EQ(NO_ERROR,
+ mManager->setDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+ ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+ EXPECT_THAT(devices, UnorderedElementsAre(inputDevice));
+
+ // Test setting will change the previously set devices
+ const AudioDeviceTypeAddrVector inputDevices2 = {inputDevice2};
+ ASSERT_EQ(NO_ERROR,
+ mManager->setDevicesRoleForCapturePreset(audioSource, role, inputDevices2));
+ devices.clear();
+ ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+ EXPECT_THAT(devices, UnorderedElementsAre(inputDevice2));
+
+ // Test add devices
+ ASSERT_EQ(NO_ERROR,
+ mManager->addDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+ devices.clear();
+ ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+ EXPECT_THAT(devices, UnorderedElementsAre(inputDevice, inputDevice2));
+
+ // Test remove devices
+ ASSERT_EQ(NO_ERROR,
+ mManager->removeDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+ devices.clear();
+ ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+ EXPECT_THAT(devices, UnorderedElementsAre(inputDevice2));
+
+ // Test remove devices that are not set as the device role
+ ASSERT_EQ(BAD_VALUE,
+ mManager->removeDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+
+ // Test clear devices
+ ASSERT_EQ(NO_ERROR,
+ mManager->clearDevicesRoleForCapturePreset(audioSource, role));
+ devices.clear();
+ ASSERT_EQ(NAME_NOT_FOUND,
+ mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+}
+
+INSTANTIATE_TEST_CASE_P(
+ DevicesRoleForCapturePresetOperation,
+ AudioPolicyManagerDevicesRoleForCapturePresetTest,
+ testing::Values(
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_MIC, DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_UPLINK,
+ DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_DOWNLINK,
+ DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_CALL, DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_CAMCORDER, DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_RECOGNITION,
+ DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_COMMUNICATION,
+ DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_REMOTE_SUBMIX,
+ DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_UNPROCESSED, DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_PERFORMANCE,
+ DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_ECHO_REFERENCE,
+ DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_FM_TUNER, DEVICE_ROLE_PREFERRED}),
+ DevicesRoleForCapturePresetParam({AUDIO_SOURCE_HOTWORD, DEVICE_ROLE_PREFERRED})
+ )
+ );
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index d9476d9..2f6e925 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -3,6 +3,8 @@
srcs: [
"test_audio_policy_configuration.xml",
"test_audio_policy_primary_only_configuration.xml",
+ "test_invalid_audio_policy_configuration.xml",
"test_tv_apm_configuration.xml",
+ "test_settop_box_surround_configuration.xml",
],
}
diff --git a/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml
new file mode 100644
index 0000000..25641d5
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml
@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- This file contains an unnamed device port in the "r_submix" module section. -->
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <globalConfiguration speaker_drc_enabled="true"/>
+
+ <modules>
+ <!-- Primary module -->
+ <module name="primary" halVersion="2.0">
+ <attachedDevices>
+ <item>Speaker</item>
+ <item>Built-In Mic</item>
+ </attachedDevices>
+ <defaultOutputDevice>Speaker</defaultOutputDevice>
+ <mixPorts>
+ <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="primary input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bt_hfp_output" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bt_hfp_input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000,11025,16000,44100,48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_MONO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+ </devicePort>
+ <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+ </devicePort>
+ <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink">
+ </devicePort>
+ <devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
+ </devicePort>
+ <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO"
+ role="sink" address="hfp_client_out">
+ </devicePort>
+ <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"
+ role="source" address="hfp_client_in">
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="Speaker"
+ sources="primary output"/>
+ <route type="mix" sink="primary input"
+ sources="Built-In Mic,Hdmi-In Mic"/>
+ <route type="mix" sink="Hdmi"
+ sources="primary output"/>
+ <route type="mix" sink="BT SCO"
+ sources="mixport_bt_hfp_output"/>
+ <route type="mix" sink="mixport_bt_hfp_input"
+ sources="BT SCO Headset Mic"/>
+ </routes>
+ </module>
+
+ <!-- Remote Submix module -->
+ <module name="r_submix" halVersion="2.0">
+ <attachedDevices>
+ <item>Remote Submix In</item>
+ </attachedDevices>
+ <mixPorts>
+ <mixPort name="r_submix output" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="r_submix input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <!-- This port is missing "tagName" attribute. -->
+ <devicePort type="AUDIO_DEVICE_OUT_REMOTE_SUBMIX" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="Remote Submix In" type="AUDIO_DEVICE_IN_REMOTE_SUBMIX" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="Remote Submix Out"
+ sources="r_submix output"/>
+ <route type="mix" sink="r_submix input"
+ sources="Remote Submix In"/>
+ </routes>
+ </module>
+ </modules>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/tests/resources/test_settop_box_surround_configuration.xml b/services/audiopolicy/tests/resources/test_settop_box_surround_configuration.xml
new file mode 100644
index 0000000..6f7375e
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_settop_box_surround_configuration.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ ~ Copyright (C) 2020 The Android Open Source Project
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <globalConfiguration speaker_drc_enabled="false"/>
+ <modules>
+ <module name="primary" halVersion="2.0">
+ <attachedDevices>
+ <item>Stub</item>
+ </attachedDevices>
+ <defaultOutputDevice>Stub</defaultOutputDevice>
+ <mixPorts>
+ <mixPort name="primary pcm" role="source"
+ flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="multichannel output" role="source"
+ flags="AUDIO_OUTPUT_FLAG_DIRECT">
+ <profile name="" />
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="Stub" type="AUDIO_DEVICE_OUT_STUB" role="sink" />
+ <devicePort tagName="HDMI" type="AUDIO_DEVICE_OUT_HDMI" role="sink" />
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="Stub" sources="primary pcm"/>
+ <route type="mix" sink="HDMI" sources="primary pcm,multichannel output"/>
+ </routes>
+ </module>
+ </modules>
+</audioPolicyConfiguration>
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 501d922..8cb40e0 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -30,7 +30,6 @@
"common/CameraProviderManager.cpp",
"common/DepthPhotoProcessor.cpp",
"common/FrameProcessorBase.cpp",
- "api1/CameraClient.cpp",
"api1/Camera2Client.cpp",
"api1/client2/Parameters.cpp",
"api1/client2/FrameProcessor.cpp",
@@ -46,7 +45,6 @@
"api2/DepthCompositeStream.cpp",
"api2/HeicEncoderInfoManager.cpp",
"api2/HeicCompositeStream.cpp",
- "device1/CameraHardwareInterface.cpp",
"device3/BufferUtils.cpp",
"device3/Camera3Device.cpp",
"device3/Camera3OfflineSession.cpp",
@@ -54,7 +52,7 @@
"device3/Camera3IOStreamBase.cpp",
"device3/Camera3InputStream.cpp",
"device3/Camera3OutputStream.cpp",
- "device3/Camera3DummyStream.cpp",
+ "device3/Camera3FakeStream.cpp",
"device3/Camera3SharedOutputStream.cpp",
"device3/StatusTracker.cpp",
"device3/Camera3BufferManager.cpp",
@@ -71,11 +69,13 @@
"hidl/Convert.cpp",
"hidl/HidlCameraDeviceUser.cpp",
"hidl/HidlCameraService.cpp",
+ "utils/CameraServiceProxyWrapper.cpp",
"utils/CameraThreadState.cpp",
"utils/CameraTraces.cpp",
"utils/AutoConditionLock.cpp",
"utils/ExifUtils.cpp",
"utils/SessionConfigurationUtils.cpp",
+ "utils/SessionStatsBuilder.cpp",
"utils/TagMonitor.cpp",
"utils/LatencyHistogram.cpp",
],
@@ -117,12 +117,13 @@
"android.frameworks.cameraservice.common@2.0",
"android.frameworks.cameraservice.service@2.0",
"android.frameworks.cameraservice.service@2.1",
+ "android.frameworks.cameraservice.service@2.2",
"android.frameworks.cameraservice.device@2.0",
+ "android.frameworks.cameraservice.device@2.1",
"android.hardware.camera.common@1.0",
"android.hardware.camera.provider@2.4",
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
- "android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
"android.hardware.camera.device@3.4",
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index e629cdd..ccdd9e5 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -59,9 +59,8 @@
if (mProviderManager->supportSetTorchMode(cameraId.string())) {
mFlashControl = new ProviderFlashControl(mProviderManager);
} else {
- // Only HAL1 devices do not support setTorchMode
- mFlashControl =
- new CameraHardwareInterfaceFlashControl(mProviderManager, mCallbacks);
+ ALOGE("Flashlight control not supported by this device!");
+ return NO_INIT;
}
return OK;
@@ -309,271 +308,4 @@
}
// ProviderFlashControl implementation ends
-/////////////////////////////////////////////////////////////////////
-// CameraHardwareInterfaceFlashControl implementation begins
-// Flash control for camera module <= v2.3 and camera HAL v1
-/////////////////////////////////////////////////////////////////////
-
-CameraHardwareInterfaceFlashControl::CameraHardwareInterfaceFlashControl(
- sp<CameraProviderManager> manager,
- CameraProviderManager::StatusListener* callbacks) :
- mProviderManager(manager),
- mCallbacks(callbacks),
- mTorchEnabled(false) {
-}
-
-CameraHardwareInterfaceFlashControl::~CameraHardwareInterfaceFlashControl() {
- disconnectCameraDevice();
-
- mSurface.clear();
- mSurfaceTexture.clear();
- mProducer.clear();
- mConsumer.clear();
-
- if (mTorchEnabled) {
- if (mCallbacks) {
- ALOGV("%s: notify the framework that torch was turned off",
- __FUNCTION__);
- mCallbacks->onTorchStatusChanged(mCameraId, TorchModeStatus::AVAILABLE_OFF);
- }
- }
-}
-
-status_t CameraHardwareInterfaceFlashControl::setTorchMode(
- const String8& cameraId, bool enabled) {
- Mutex::Autolock l(mLock);
-
- // pre-check
- status_t res;
- if (enabled) {
- bool hasFlash = false;
- // Check if it has a flash unit and leave camera device open.
- res = hasFlashUnitLocked(cameraId, &hasFlash, /*keepDeviceOpen*/true);
- // invalid camera?
- if (res) {
- // hasFlashUnitLocked() returns BAD_INDEX if mDevice is connected to
- // another camera device.
- return res == BAD_INDEX ? BAD_INDEX : -EINVAL;
- }
- // no flash unit?
- if (!hasFlash) {
- // Disconnect camera device if it has no flash.
- disconnectCameraDevice();
- return -ENOSYS;
- }
- } else if (mDevice == NULL || cameraId != mCameraId) {
- // disabling the torch mode of an un-opened or different device.
- return OK;
- } else {
- // disabling the torch mode of currently opened device
- disconnectCameraDevice();
- mTorchEnabled = false;
- mCallbacks->onTorchStatusChanged(cameraId, TorchModeStatus::AVAILABLE_OFF);
- return OK;
- }
-
- res = startPreviewAndTorch();
- if (res) {
- return res;
- }
-
- mTorchEnabled = true;
- mCallbacks->onTorchStatusChanged(cameraId, TorchModeStatus::AVAILABLE_ON);
- return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::hasFlashUnit(
- const String8& cameraId, bool *hasFlash) {
- Mutex::Autolock l(mLock);
- // Close device after checking if it has a flash unit.
- return hasFlashUnitLocked(cameraId, hasFlash, /*keepDeviceOpen*/false);
-}
-
-status_t CameraHardwareInterfaceFlashControl::hasFlashUnitLocked(
- const String8& cameraId, bool *hasFlash, bool keepDeviceOpen) {
- bool closeCameraDevice = false;
-
- if (!hasFlash) {
- return BAD_VALUE;
- }
-
- status_t res;
- if (mDevice == NULL) {
- // Connect to camera device to query if it has a flash unit.
- res = connectCameraDevice(cameraId);
- if (res) {
- return res;
- }
- // Close camera device only when it is just opened and the caller doesn't want to keep
- // the camera device open.
- closeCameraDevice = !keepDeviceOpen;
- }
-
- if (cameraId != mCameraId) {
- return BAD_INDEX;
- }
-
- const char *flashMode =
- mParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
- if (flashMode && strstr(flashMode, CameraParameters::FLASH_MODE_TORCH)) {
- *hasFlash = true;
- } else {
- *hasFlash = false;
- }
-
- if (closeCameraDevice) {
- res = disconnectCameraDevice();
- if (res != OK) {
- ALOGE("%s: Failed to disconnect camera device. %s (%d)", __FUNCTION__,
- strerror(-res), res);
- return res;
- }
- }
-
- return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::startPreviewAndTorch() {
- status_t res = OK;
- res = mDevice->startPreview();
- if (res) {
- ALOGE("%s: start preview failed. %s (%d)", __FUNCTION__,
- strerror(-res), res);
- return res;
- }
-
- mParameters.set(CameraParameters::KEY_FLASH_MODE,
- CameraParameters::FLASH_MODE_TORCH);
-
- return mDevice->setParameters(mParameters);
-}
-
-status_t CameraHardwareInterfaceFlashControl::getSmallestSurfaceSize(
- int32_t *width, int32_t *height) {
- if (!width || !height) {
- return BAD_VALUE;
- }
-
- int32_t w = INT32_MAX;
- int32_t h = 1;
- Vector<Size> sizes;
-
- mParameters.getSupportedPreviewSizes(sizes);
- for (size_t i = 0; i < sizes.size(); i++) {
- Size s = sizes[i];
- if (w * h > s.width * s.height) {
- w = s.width;
- h = s.height;
- }
- }
-
- if (w == INT32_MAX) {
- return NAME_NOT_FOUND;
- }
-
- *width = w;
- *height = h;
-
- return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::initializePreviewWindow(
- const sp<CameraHardwareInterface>& device, int32_t width, int32_t height) {
- status_t res;
- BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-
- mSurfaceTexture = new GLConsumer(mConsumer, 0, GLConsumer::TEXTURE_EXTERNAL,
- true, true);
- if (mSurfaceTexture == NULL) {
- return NO_MEMORY;
- }
-
- int32_t format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- res = mSurfaceTexture->setDefaultBufferSize(width, height);
- if (res) {
- return res;
- }
- res = mSurfaceTexture->setDefaultBufferFormat(format);
- if (res) {
- return res;
- }
-
- mSurface = new Surface(mProducer, /*useAsync*/ true);
- if (mSurface == NULL) {
- return NO_MEMORY;
- }
-
- res = native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_CAMERA);
- if (res) {
- ALOGE("%s: Unable to connect to native window", __FUNCTION__);
- return res;
- }
-
- return device->setPreviewWindow(mSurface);
-}
-
-status_t CameraHardwareInterfaceFlashControl::connectCameraDevice(
- const String8& cameraId) {
- sp<CameraHardwareInterface> device =
- new CameraHardwareInterface(cameraId.string());
-
- status_t res = device->initialize(mProviderManager);
- if (res) {
- ALOGE("%s: initializing camera %s failed", __FUNCTION__,
- cameraId.string());
- return res;
- }
-
- // need to set __get_memory in set_callbacks().
- device->setCallbacks(NULL, NULL, NULL, NULL, NULL);
-
- mParameters = device->getParameters();
-
- int32_t width, height;
- res = getSmallestSurfaceSize(&width, &height);
- if (res) {
- ALOGE("%s: failed to get smallest surface size for camera %s",
- __FUNCTION__, cameraId.string());
- return res;
- }
-
- res = initializePreviewWindow(device, width, height);
- if (res) {
- ALOGE("%s: failed to initialize preview window for camera %s",
- __FUNCTION__, cameraId.string());
- return res;
- }
-
- mCameraId = cameraId;
- mDevice = device;
- return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::disconnectCameraDevice() {
- if (mDevice == NULL) {
- return OK;
- }
-
- if (mParameters.get(CameraParameters::KEY_FLASH_MODE)) {
- // There is a flash, turn if off.
- // (If there isn't one, leave the parameter null)
- mParameters.set(CameraParameters::KEY_FLASH_MODE,
- CameraParameters::FLASH_MODE_OFF);
- mDevice->setParameters(mParameters);
- }
- mDevice->stopPreview();
- status_t res = native_window_api_disconnect(mSurface.get(),
- NATIVE_WINDOW_API_CAMERA);
- if (res) {
- ALOGW("%s: native_window_api_disconnect failed: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- }
- mDevice->setPreviewWindow(NULL);
- mDevice->release();
- mDevice = NULL;
-
- return OK;
-}
-// CameraHardwareInterfaceFlashControl implementation ends
-
}
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index 1baaba2..b97fa5f 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -23,8 +23,6 @@
#include <utils/SortedVector.h>
#include "common/CameraProviderManager.h"
#include "common/CameraDeviceBase.h"
-#include "device1/CameraHardwareInterface.h"
-
namespace android {
@@ -124,59 +122,6 @@
Mutex mLock;
};
-/**
- * Flash control for camera module <= v2.3 and camera HAL v1
- */
-class CameraHardwareInterfaceFlashControl : public FlashControlBase {
- public:
- CameraHardwareInterfaceFlashControl(
- sp<CameraProviderManager> manager,
- CameraProviderManager::StatusListener* callbacks);
- virtual ~CameraHardwareInterfaceFlashControl();
-
- // FlashControlBase
- status_t setTorchMode(const String8& cameraId, bool enabled);
- status_t hasFlashUnit(const String8& cameraId, bool *hasFlash);
-
- private:
- // connect to a camera device
- status_t connectCameraDevice(const String8& cameraId);
-
- // disconnect and free mDevice
- status_t disconnectCameraDevice();
-
- // initialize the preview window
- status_t initializePreviewWindow(const sp<CameraHardwareInterface>& device,
- int32_t width, int32_t height);
-
- // start preview and enable torch
- status_t startPreviewAndTorch();
-
- // get the smallest surface
- status_t getSmallestSurfaceSize(int32_t *width, int32_t *height);
-
- // protected by mLock
- // If this function opens camera device in order to check if it has a flash unit, the
- // camera device will remain open if keepDeviceOpen is true and the camera device will be
- // closed if keepDeviceOpen is false. If camera device is already open when calling this
- // function, keepDeviceOpen is ignored.
- status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash, bool keepDeviceOpen);
-
- sp<CameraProviderManager> mProviderManager;
- CameraProviderManager::StatusListener* mCallbacks;
- sp<CameraHardwareInterface> mDevice;
- String8 mCameraId;
- CameraParameters mParameters;
- bool mTorchEnabled;
-
- sp<IGraphicBufferProducer> mProducer;
- sp<IGraphicBufferConsumer> mConsumer;
- sp<GLConsumer> mSurfaceTexture;
- sp<Surface> mSurface;
-
- Mutex mLock;
-};
-
} // namespace android
#endif
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 683955b..b4c0da3 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -21,6 +21,7 @@
#include <algorithm>
#include <climits>
#include <stdio.h>
+#include <cstdlib>
#include <cstring>
#include <ctime>
#include <string>
@@ -37,7 +38,6 @@
#include <binder/ActivityManager.h>
#include <binder/AppOpsManager.h>
#include <binder/IPCThreadState.h>
-#include <binder/IServiceManager.h>
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <binder/PermissionController.h>
@@ -70,12 +70,12 @@
#include <system/camera.h>
#include "CameraService.h"
-#include "api1/CameraClient.h"
#include "api1/Camera2Client.h"
#include "api2/CameraDeviceClient.h"
#include "utils/CameraTraces.h"
#include "utils/TagMonitor.h"
#include "utils/CameraThreadState.h"
+#include "utils/CameraServiceProxyWrapper.h"
namespace {
const char* kPermissionServiceName = "permission";
@@ -88,7 +88,6 @@
using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
using hardware::ICamera;
using hardware::ICameraClient;
-using hardware::ICameraServiceProxy;
using hardware::ICameraServiceListener;
using hardware::camera::common::V1_0::CameraDeviceStatus;
using hardware::camera::common::V1_0::TorchModeStatus;
@@ -135,9 +134,6 @@
static constexpr int32_t kVendorClientState = 1;
const String8 CameraService::kOfflineDevice("offline-");
-Mutex CameraService::sProxyMutex;
-sp<hardware::ICameraServiceProxy> CameraService::sCameraServiceProxy;
-
CameraService::CameraService() :
mEventLog(DEFAULT_EVENT_LOG_LENGTH),
mNumberOfCameras(0),
@@ -179,7 +175,7 @@
// This needs to be last call in this function, so that it's as close to
// ServiceManager::addService() as possible.
- CameraService::pingCameraServiceProxy();
+ CameraServiceProxyWrapper::pingCameraServiceProxy();
ALOGI("CameraService pinged cameraservice proxy");
}
@@ -229,29 +225,6 @@
return OK;
}
-sp<ICameraServiceProxy> CameraService::getCameraServiceProxy() {
-#ifndef __BRILLO__
- Mutex::Autolock al(sProxyMutex);
- if (sCameraServiceProxy == nullptr) {
- sp<IServiceManager> sm = defaultServiceManager();
- // Use checkService because cameraserver normally starts before the
- // system server and the proxy service. So the long timeout that getService
- // has before giving up is inappropriate.
- sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
- if (binder != nullptr) {
- sCameraServiceProxy = interface_cast<ICameraServiceProxy>(binder);
- }
- }
-#endif
- return sCameraServiceProxy;
-}
-
-void CameraService::pingCameraServiceProxy() {
- sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
- if (proxyBinder == nullptr) return;
- proxyBinder->pingForUserUpdate();
-}
-
void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status) {
Mutex::Autolock lock(mStatusListenerLock);
@@ -467,10 +440,21 @@
logDeviceRemoved(idCombo,
String8::format("Device status changed to %d", newStatus));
}
-
+ // Avoid calling getSystemCameraKind() with mStatusListenerLock held (b/141756275)
+ SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
+ if (getSystemCameraKind(id, &deviceKind) != OK) {
+ ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, id.string());
+ return;
+ }
String16 id16(id), physicalId16(physicalId);
Mutex::Autolock lock(mStatusListenerLock);
for (auto& listener : mListenerList) {
+ if (shouldSkipStatusUpdates(deviceKind, listener->isVendorListener(),
+ listener->getListenerPid(), listener->getListenerUid())) {
+ ALOGV("Skipping discovery callback for system-only camera device %s",
+ id.c_str());
+ continue;
+ }
listener->getListener()->onPhysicalCameraStatusChanged(mapToInterface(newStatus),
id16, physicalId16);
}
@@ -485,9 +469,6 @@
clientToDisconnect->notifyError(
hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
CaptureResultExtras{});
- // Ensure not in binder RPC so client disconnect PID checks work correctly
- LOG_ALWAYS_FATAL_IF(CameraThreadState::getCallingPid() != getpid(),
- "onDeviceStatusChanged must be called from the camera service process!");
clientToDisconnect->disconnect();
}
}
@@ -671,9 +652,15 @@
status_t res = mCameraProviderManager->getCameraCharacteristics(
String8(cameraId).string(), cameraInfo);
if (res != OK) {
- return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
- "characteristics for device %s: %s (%d)", String8(cameraId).string(),
- strerror(-res), res);
+ if (res == NAME_NOT_FOUND) {
+ return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
+ "characteristics for unknown device %s: %s (%d)", String8(cameraId).string(),
+ strerror(-res), res);
+ } else {
+ return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
+ "characteristics for device %s: %s (%d)", String8(cameraId).string(),
+ strerror(-res), res);
+ }
}
SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
if (getSystemCameraKind(String8(cameraId), &deviceKind) != OK) {
@@ -794,35 +781,26 @@
Status CameraService::makeClient(const sp<CameraService>& cameraService,
const sp<IInterface>& cameraCb, const String16& packageName,
- const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
- int facing, int clientPid, uid_t clientUid, int servicePid, int halVersion,
+ const std::optional<String16>& featureId, const String8& cameraId,
+ int api1CameraId, int facing, int clientPid, uid_t clientUid, int servicePid,
int deviceVersion, apiLevel effectiveApiLevel,
/*out*/sp<BasicClient>* client) {
- if (halVersion < 0 || halVersion == deviceVersion) {
- // Default path: HAL version is unspecified by caller, create CameraClient
- // based on device version reported by the HAL.
- switch(deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_1_0:
- if (effectiveApiLevel == API_1) { // Camera1 API route
- sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
- *client = new CameraClient(cameraService, tmp, packageName, featureId,
- api1CameraId, facing, clientPid, clientUid,
- getpid());
- } else { // Camera2 API route
- ALOGW("Camera using old HAL version: %d", deviceVersion);
- return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
- "Camera device \"%s\" HAL version %d does not support camera2 API",
- cameraId.string(), deviceVersion);
- }
+ // Create CameraClient based on device version reported by the HAL.
+ switch(deviceVersion) {
+ case CAMERA_DEVICE_API_VERSION_1_0:
+ ALOGE("Camera using old HAL version: %d", deviceVersion);
+ return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
+ "Camera device \"%s\" HAL version %d no longer supported",
+ cameraId.string(), deviceVersion);
break;
- case CAMERA_DEVICE_API_VERSION_3_0:
- case CAMERA_DEVICE_API_VERSION_3_1:
- case CAMERA_DEVICE_API_VERSION_3_2:
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_4:
- case CAMERA_DEVICE_API_VERSION_3_5:
- case CAMERA_DEVICE_API_VERSION_3_6:
+ case CAMERA_DEVICE_API_VERSION_3_0:
+ case CAMERA_DEVICE_API_VERSION_3_1:
+ case CAMERA_DEVICE_API_VERSION_3_2:
+ case CAMERA_DEVICE_API_VERSION_3_3:
+ case CAMERA_DEVICE_API_VERSION_3_4:
+ case CAMERA_DEVICE_API_VERSION_3_5:
+ case CAMERA_DEVICE_API_VERSION_3_6:
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, packageName, featureId,
@@ -836,32 +814,12 @@
cameraId, facing, clientPid, clientUid, servicePid);
}
break;
- default:
+ default:
// Should not be reachable
ALOGE("Unknown camera device HAL version: %d", deviceVersion);
return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
"Camera device \"%s\" has unknown HAL version %d",
cameraId.string(), deviceVersion);
- }
- } else {
- // A particular HAL version is requested by caller. Create CameraClient
- // based on the requested HAL version.
- if (deviceVersion > CAMERA_DEVICE_API_VERSION_1_0 &&
- halVersion == CAMERA_DEVICE_API_VERSION_1_0) {
- // Only support higher HAL version device opened as HAL1.0 device.
- sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
- *client = new CameraClient(cameraService, tmp, packageName, featureId,
- api1CameraId, facing, clientPid, clientUid,
- servicePid);
- } else {
- // Other combinations (e.g. HAL3.x open as HAL2.x) are not supported yet.
- ALOGE("Invalid camera HAL version %x: HAL %x device can only be"
- " opened as HAL %x device", halVersion, deviceVersion,
- CAMERA_DEVICE_API_VERSION_1_0);
- return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
- "Camera device \"%s\" (HAL version %d) cannot be opened as HAL version %d",
- cameraId.string(), deviceVersion, halVersion);
- }
}
return Status::ok();
}
@@ -949,7 +907,6 @@
sp<Client> tmp = nullptr;
if (!(ret = connectHelper<ICameraClient,Client>(
sp<ICameraClient>{nullptr}, id, cameraId,
- static_cast<int>(CAMERA_HAL_API_VERSION_UNSPECIFIED),
internalPackageName, {}, uid, USE_CALLING_PID,
API_1, /*shimUpdateOnly*/ true, /*out*/ tmp)
).isOk()) {
@@ -1366,7 +1323,12 @@
Mutex::Autolock l(mLogLock);
mEventLog.add(msg);
- return -EBUSY;
+ auto current = mActiveClientManager.get(cameraId);
+ if (current != nullptr) {
+ return -EBUSY; // CAMERA_IN_USE
+ } else {
+ return -EUSERS; // MAX_CAMERAS_IN_USE
+ }
}
for (auto& i : evicted) {
@@ -1463,34 +1425,7 @@
String8 id = cameraIdIntToStr(api1CameraId);
sp<Client> client = nullptr;
ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
- CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, {},
- clientUid, clientPid, API_1, /*shimUpdateOnly*/ false, /*out*/client);
-
- if(!ret.isOk()) {
- logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
- ret.toString8());
- return ret;
- }
-
- *device = client;
- return ret;
-}
-
-Status CameraService::connectLegacy(
- const sp<ICameraClient>& cameraClient,
- int api1CameraId, int halVersion,
- const String16& clientPackageName,
- int clientUid,
- /*out*/
- sp<ICamera>* device) {
-
- ATRACE_CALL();
- String8 id = cameraIdIntToStr(api1CameraId);
-
- Status ret = Status::ok();
- sp<Client> client = nullptr;
- ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId, halVersion,
- clientPackageName, {}, clientUid, USE_CALLING_PID, API_1,
+ clientPackageName, {}, clientUid, clientPid, API_1,
/*shimUpdateOnly*/ false, /*out*/client);
if(!ret.isOk()) {
@@ -1532,8 +1467,9 @@
int cUid = CameraThreadState::getCallingUid();
SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
if (getSystemCameraKind(cameraId, &systemCameraKind) != OK) {
- ALOGE("%s: Invalid camera id %s, ", __FUNCTION__, cameraId.c_str());
- return true;
+ // This isn't a known camera ID, so it's not a system camera
+ ALOGV("%s: Unknown camera id %s, ", __FUNCTION__, cameraId.c_str());
+ return false;
}
// (1) Cameraserver trying to connect, accept.
@@ -1582,8 +1518,7 @@
clientPackageNameAdj = String16(vendorClient.c_str());
}
ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
- /*api1CameraId*/-1,
- CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageNameAdj, clientFeatureId,
+ /*api1CameraId*/-1, clientPackageNameAdj, clientFeatureId,
clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, /*out*/client);
if(!ret.isOk()) {
@@ -1598,7 +1533,7 @@
template<class CALLBACK, class CLIENT>
Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
- int api1CameraId, int halVersion, const String16& clientPackageName,
+ int api1CameraId, const String16& clientPackageName,
const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly,
/*out*/sp<CLIENT>& device) {
@@ -1608,12 +1543,15 @@
int originalClientPid = 0;
- ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) for HAL version %s and "
+ ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
"Camera API version %d", clientPid, clientName8.string(), cameraId.string(),
- (halVersion == -1) ? "default" : std::to_string(halVersion).c_str(),
static_cast<int>(effectiveApiLevel));
+ nsecs_t openTimeNs = systemTime();
+
sp<CLIENT> client = nullptr;
+ int facing = -1;
+ bool isNdk = (clientPackageName.size() == 0);
{
// Acquire mServiceLock and prevent other clients from connecting
std::unique_ptr<AutoConditionLock> lock =
@@ -1627,7 +1565,7 @@
cameraId.string(), clientName8.string(), clientPid);
}
- // Enforce client permissions and do basic sanity checks
+ // Enforce client permissions and do basic validity checks
if(!(ret = validateConnectLocked(cameraId, clientName8,
/*inout*/clientUid, /*inout*/clientPid, /*out*/originalClientPid)).isOk()) {
return ret;
@@ -1658,6 +1596,10 @@
return STATUS_ERROR_FMT(ERROR_CAMERA_IN_USE,
"Higher-priority client using camera, ID \"%s\" currently unavailable",
cameraId.string());
+ case -EUSERS:
+ return STATUS_ERROR_FMT(ERROR_MAX_CAMERAS_IN_USE,
+ "Too many cameras already open, cannot open camera \"%s\"",
+ cameraId.string());
default:
return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
"Unexpected error %s (%d) opening camera \"%s\"",
@@ -1674,7 +1616,6 @@
// give flashlight a chance to close devices if necessary.
mFlashlight->prepareDeviceOpen(cameraId);
- int facing = -1;
int deviceVersion = getDeviceVersion(cameraId, /*out*/&facing);
if (facing == -1) {
ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.string());
@@ -1686,7 +1627,7 @@
if(!(ret = makeClient(this, cameraCb, clientPackageName, clientFeatureId,
cameraId, api1CameraId, facing,
clientPid, clientUid, getpid(),
- halVersion, deviceVersion, effectiveApiLevel,
+ deviceVersion, effectiveApiLevel,
/*out*/&tmp)).isOk()) {
return ret;
}
@@ -1754,11 +1695,18 @@
// Otherwise, add client to active clients list
finishConnectLocked(client, partial);
}
+
+ client->setImageDumpMask(mImageDumpMask);
} // lock is destroyed, allow further connect calls
// Important: release the mutex here so the client can call back into the service from its
// destructor (can be at the end of the call)
device = client;
+
+ int32_t openLatencyMs = ns2ms(systemTime() - openTimeNs);
+ CameraServiceProxyWrapper::logOpen(cameraId, facing, clientPackageName,
+ effectiveApiLevel, isNdk, openLatencyMs);
+
return ret;
}
@@ -2916,14 +2864,6 @@
// Transition device availability listeners from PRESENT -> NOT_AVAILABLE
sCameraService->updateStatus(StatusInternal::NOT_AVAILABLE, mCameraIdStr);
- int apiLevel = hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1;
- if (canCastToApiClient(API_2)) {
- apiLevel = hardware::ICameraServiceProxy::CAMERA_API_LEVEL_2;
- }
- // Transition device state to OPEN
- sCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_OPEN,
- mCameraIdStr, mCameraFacing, mClientPackageName, apiLevel);
-
sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
// Notify listeners of camera open/close status
@@ -2952,14 +2892,6 @@
// Transition to PRESENT if the camera is not in either of the rejected states
sCameraService->updateStatus(StatusInternal::PRESENT,
mCameraIdStr, rejected);
-
- int apiLevel = hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1;
- if (canCastToApiClient(API_2)) {
- apiLevel = hardware::ICameraServiceProxy::CAMERA_API_LEVEL_2;
- }
- // Transition device state to CLOSED
- sCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_CLOSED,
- mCameraIdStr, mCameraFacing, mClientPackageName, apiLevel);
}
// Always stop watching, even if no camera op is active
if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
@@ -3728,9 +3660,14 @@
__FUNCTION__, cameraId.string());
return;
}
+
+ // Collect the logical cameras without holding mStatusLock in updateStatus
+ // as that can lead to a deadlock(b/162192331).
+ auto logicalCameraIds = getLogicalCameras(cameraId);
// Update the status for this camera state, then send the onStatusChangedCallbacks to each
// of the listeners with both the mStatusLock and mStatusListenerLock held
- state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind, &supportsHAL3]
+ state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind, &supportsHAL3,
+ &logicalCameraIds]
(const String8& cameraId, StatusInternal status) {
if (status != StatusInternal::ENUMERATING) {
@@ -3750,14 +3687,14 @@
}
Mutex::Autolock lock(mStatusListenerLock);
-
- notifyPhysicalCameraStatusLocked(mapToInterface(status), cameraId);
+ notifyPhysicalCameraStatusLocked(mapToInterface(status), String16(cameraId),
+ logicalCameraIds, deviceKind);
for (auto& listener : mListenerList) {
bool isVendorListener = listener->isVendorListener();
if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
listener->getListenerPid(), listener->getListenerUid()) ||
- (isVendorListener && !supportsHAL3)) {
+ (isVendorListener && !supportsHAL3)) {
ALOGV("Skipping discovery callback for system-only camera/HAL1 device %s",
cameraId.c_str());
continue;
@@ -3834,14 +3771,6 @@
onStatusUpdatedLocked(cameraId, status);
}
-void CameraService::updateProxyDeviceState(int newState,
- const String8& cameraId, int facing, const String16& clientName, int apiLevel) {
- sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
- if (proxyBinder == nullptr) return;
- String16 id(cameraId);
- proxyBinder->notifyCameraState(id, newState, facing, clientName, apiLevel);
-}
-
status_t CameraService::getTorchStatusLocked(
const String8& cameraId,
TorchModeStatus *status) const {
@@ -3869,7 +3798,9 @@
return OK;
}
-void CameraService::notifyPhysicalCameraStatusLocked(int32_t status, const String8& cameraId) {
+std::list<String16> CameraService::getLogicalCameras(
+ const String8& physicalCameraId) {
+ std::list<String16> retList;
Mutex::Autolock lock(mCameraStatesLock);
for (const auto& state : mCameraStates) {
std::vector<std::string> physicalCameraIds;
@@ -3877,20 +3808,39 @@
// This is not a logical multi-camera.
continue;
}
- if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), cameraId.c_str())
+ if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), physicalCameraId.c_str())
== physicalCameraIds.end()) {
// cameraId is not a physical camera of this logical multi-camera.
continue;
}
- String16 id16(state.first), physicalId16(cameraId);
+ retList.emplace_back(String16(state.first));
+ }
+ return retList;
+}
+
+void CameraService::notifyPhysicalCameraStatusLocked(int32_t status,
+ const String16& physicalCameraId, const std::list<String16>& logicalCameraIds,
+ SystemCameraKind deviceKind) {
+ // mStatusListenerLock is expected to be locked
+ for (const auto& logicalCameraId : logicalCameraIds) {
for (auto& listener : mListenerList) {
+ // Note: we check only the deviceKind of the physical camera id
+ // since, logical camera ids and their physical camera ids are
+ // guaranteed to have the same system camera kind.
+ if (shouldSkipStatusUpdates(deviceKind, listener->isVendorListener(),
+ listener->getListenerPid(), listener->getListenerUid())) {
+ ALOGV("Skipping discovery callback for system-only camera device %s",
+ String8(physicalCameraId).c_str());
+ continue;
+ }
listener->getListener()->onPhysicalCameraStatusChanged(status,
- id16, physicalId16);
+ logicalCameraId, physicalCameraId);
}
}
}
+
void CameraService::blockClientsForUid(uid_t uid) {
const auto clients = mActiveClientManager.getAll();
for (auto& current : clients) {
@@ -3933,6 +3883,10 @@
return handleSetRotateAndCrop(args);
} else if (args.size() >= 1 && args[0] == String16("get-rotate-and-crop")) {
return handleGetRotateAndCrop(out);
+ } else if (args.size() >= 2 && args[0] == String16("set-image-dump-mask")) {
+ return handleSetImageDumpMask(args);
+ } else if (args.size() >= 1 && args[0] == String16("get-image-dump-mask")) {
+ return handleGetImageDumpMask(out);
} else if (args.size() == 1 && args[0] == String16("help")) {
printHelp(out);
return NO_ERROR;
@@ -4032,6 +3986,30 @@
return dprintf(out, "rotateAndCrop override: %d\n", mOverrideRotateAndCropMode);
}
+status_t CameraService::handleSetImageDumpMask(const Vector<String16>& args) {
+ char *endPtr;
+ errno = 0;
+ String8 maskString8 = String8(args[1]);
+ long maskValue = strtol(maskString8.c_str(), &endPtr, 10);
+
+ if (errno != 0) return BAD_VALUE;
+ if (endPtr != maskString8.c_str() + maskString8.size()) return BAD_VALUE;
+ if (maskValue < 0 || maskValue > 1) return BAD_VALUE;
+
+ Mutex::Autolock lock(mServiceLock);
+
+ mImageDumpMask = maskValue;
+
+ return OK;
+}
+
+status_t CameraService::handleGetImageDumpMask(int out) {
+ Mutex::Autolock lock(mServiceLock);
+
+ return dprintf(out, "Image dump mask: %d\n", mImageDumpMask);
+}
+
+
status_t CameraService::printHelp(int out) {
return dprintf(out, "Camera service commands:\n"
" get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
@@ -4040,6 +4018,9 @@
" set-rotate-and-crop <ROTATION> overrides the rotate-and-crop value for AUTO backcompat\n"
" Valid values 0=0 deg, 1=90 deg, 2=180 deg, 3=270 deg, 4=No override\n"
" get-rotate-and-crop returns the current override rotate-and-crop value\n"
+ " set-image-dump-mask <MASK> specifies the formats to be saved to disk\n"
+ " Valid values 0=OFF, 1=ON for JPEG\n"
+ " get-image-dump-mask returns the current image-dump-mask value\n"
" help print this message\n");
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 590f5eb..43b03e6 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -20,7 +20,6 @@
#include <android/hardware/BnCameraService.h>
#include <android/hardware/BnSensorPrivacyListener.h>
#include <android/hardware/ICameraServiceListener.h>
-#include <android/hardware/ICameraServiceProxy.h>
#include <cutils/multiuser.h>
#include <utils/Vector.h>
@@ -49,6 +48,7 @@
#include <set>
#include <string>
+#include <list>
#include <map>
#include <memory>
#include <optional>
@@ -70,7 +70,6 @@
public virtual CameraProviderManager::StatusListener
{
friend class BinderService<CameraService>;
- friend class CameraClient;
friend class CameraOfflineSessionClient;
public:
class Client;
@@ -134,12 +133,6 @@
/*out*/
sp<hardware::ICamera>* device);
- virtual binder::Status connectLegacy(const sp<hardware::ICameraClient>& cameraClient,
- int32_t cameraId, int32_t halVersion,
- const String16& clientPackageName, int32_t clientUid,
- /*out*/
- sp<hardware::ICamera>* device);
-
virtual binder::Status connectDevice(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb, const String16& cameraId,
const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
@@ -217,16 +210,6 @@
void loadSoundLocked(sound_kind kind);
void decreaseSoundRef();
void increaseSoundRef();
- /**
- * Update the state of a given camera device (open/close/active/idle) with
- * the camera proxy service in the system service
- */
- static void updateProxyDeviceState(
- int newState,
- const String8& cameraId,
- int facing,
- const String16& clientName,
- int apiLevel);
/////////////////////////////////////////////////////////////////////
// CameraDeviceFactory functionality
@@ -415,6 +398,8 @@
// Check what API level is used for this client. This is used to determine which
// superclass this can be cast to.
virtual bool canCastToApiClient(apiLevel level) const;
+
+ void setImageDumpMask(int /*mask*/) { }
protected:
// Initialized in constructor
@@ -728,7 +713,7 @@
// Single implementation shared between the various connect calls
template<class CALLBACK, class CLIENT>
binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
- int api1CameraId, int halVersion, const String16& clientPackageName,
+ int api1CameraId, const String16& clientPackageName,
const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly, /*out*/sp<CLIENT>& device);
@@ -1006,7 +991,13 @@
hardware::camera::common::V1_0::TorchModeStatus status);
// notify physical camera status when the physical camera is public.
- void notifyPhysicalCameraStatusLocked(int32_t status, const String8& cameraId);
+ // Expects mStatusListenerLock to be locked.
+ void notifyPhysicalCameraStatusLocked(int32_t status, const String16& physicalCameraId,
+ const std::list<String16>& logicalCameraIds, SystemCameraKind deviceKind);
+
+ // get list of logical cameras which are backed by physicalCameraId
+ std::list<String16> getLogicalCameras(const String8& physicalCameraId);
+
// IBinder::DeathRecipient implementation
virtual void binderDied(const wp<IBinder> &who);
@@ -1047,6 +1038,12 @@
// Get the rotate-and-crop AUTO override behavior
status_t handleGetRotateAndCrop(int out);
+ // Set the mask for image dump to disk
+ status_t handleSetImageDumpMask(const Vector<String16>& args);
+
+ // Get the mask for image dump to disk
+ status_t handleGetImageDumpMask(int out);
+
// Prints the shell command help
status_t printHelp(int out);
@@ -1058,7 +1055,7 @@
static binder::Status makeClient(const sp<CameraService>& cameraService,
const sp<IInterface>& cameraCb, const String16& packageName,
const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
- int facing, int clientPid, uid_t clientUid, int servicePid, int halVersion,
+ int facing, int clientPid, uid_t clientUid, int servicePid,
int deviceVersion, apiLevel effectiveApiLevel,
/*out*/sp<BasicClient>* client);
@@ -1069,13 +1066,6 @@
static StatusInternal mapToInternal(hardware::camera::common::V1_0::CameraDeviceStatus status);
static int32_t mapToInterface(StatusInternal status);
- // Guard mCameraServiceProxy
- static Mutex sProxyMutex;
- // Cached interface to the camera service proxy in system service
- static sp<hardware::ICameraServiceProxy> sCameraServiceProxy;
-
- static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
- static void pingCameraServiceProxy();
void broadcastTorchModeStatus(const String8& cameraId,
hardware::camera::common::V1_0::TorchModeStatus status);
@@ -1095,6 +1085,9 @@
// Current override rotate-and-crop mode
uint8_t mOverrideRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+
+ // Current image dump mask
+ uint8_t mImageDumpMask = 0;
};
} // namespace android
diff --git a/services/camera/libcameraservice/TEST_MAPPING b/services/camera/libcameraservice/TEST_MAPPING
index 6fdac68..ca6cc58 100644
--- a/services/camera/libcameraservice/TEST_MAPPING
+++ b/services/camera/libcameraservice/TEST_MAPPING
@@ -1,7 +1,12 @@
{
"presubmit": [
{
- "name": "cameraservice_test"
+ "name": "cameraservice_test"
+ }
+ ],
+ "imports": [
+ {
+ "path": "frameworks/av/camera"
}
]
}
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index e01e86d..662b58f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -34,6 +34,7 @@
#include "api1/client2/CallbackProcessor.h"
#include "api1/client2/ZslProcessor.h"
#include "utils/CameraThreadState.h"
+#include "utils/CameraServiceProxyWrapper.h"
#define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
#define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
@@ -396,6 +397,7 @@
binder::Status Camera2Client::disconnect() {
ATRACE_CALL();
+ nsecs_t startTime = systemTime();
Mutex::Autolock icl(mBinderSerializationLock);
binder::Status res = binder::Status::ok();
@@ -457,6 +459,9 @@
CameraService::Client::disconnect();
+ int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
+ CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+
return res;
}
@@ -1777,6 +1782,14 @@
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
ALOGW("%s: Received recoverable error %d from HAL - ignoring, requestId %" PRId32,
__FUNCTION__, errorCode, resultExtras.requestId);
+
+ if ((hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST == errorCode) ||
+ (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT == errorCode)) {
+ Mutex::Autolock al(mLatestRequestMutex);
+
+ mLatestFailedRequestId = resultExtras.requestId;
+ mLatestRequestSignal.signal();
+ }
mCaptureSequencer->notifyError(errorCode, resultExtras);
return;
default:
@@ -2303,7 +2316,7 @@
status_t Camera2Client::waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout) {
Mutex::Autolock l(mLatestRequestMutex);
- while (mLatestRequestId != requestId) {
+ while ((mLatestRequestId != requestId) && (mLatestFailedRequestId != requestId)) {
nsecs_t startTime = systemTime();
auto res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
@@ -2312,7 +2325,7 @@
timeout -= (systemTime() - startTime);
}
- return OK;
+ return (mLatestRequestId == requestId) ? OK : DEAD_OBJECT;
}
void Camera2Client::notifyRequestId(int32_t requestId) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index c5f0428..f8da0b6 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -237,6 +237,7 @@
mutable Mutex mLatestRequestMutex;
Condition mLatestRequestSignal;
int32_t mLatestRequestId = -1;
+ int32_t mLatestFailedRequestId = -1;
status_t waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout);
status_t waitUntilCurrentRequestIdLocked();
};
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
deleted file mode 100644
index b860ceb..0000000
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ /dev/null
@@ -1,1208 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "CameraClient"
-//#define LOG_NDEBUG 0
-
-#include <cutils/atomic.h>
-#include <cutils/properties.h>
-#include <gui/Surface.h>
-#include <media/hardware/HardwareAPI.h>
-
-#include "api1/CameraClient.h"
-#include "device1/CameraHardwareInterface.h"
-#include "CameraService.h"
-#include "utils/CameraThreadState.h"
-
-namespace android {
-
-#define LOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
-#define LOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
-
-CameraClient::CameraClient(const sp<CameraService>& cameraService,
- const sp<hardware::ICameraClient>& cameraClient,
- const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
- int cameraId, int cameraFacing,
- int clientPid, int clientUid,
- int servicePid):
- Client(cameraService, cameraClient, clientPackageName, clientFeatureId,
- String8::format("%d", cameraId), cameraId, cameraFacing, clientPid,
- clientUid, servicePid)
-{
- int callingPid = CameraThreadState::getCallingPid();
- LOG1("CameraClient::CameraClient E (pid %d, id %d)", callingPid, cameraId);
-
- mHardware = NULL;
- mMsgEnabled = 0;
- mSurface = 0;
- mPreviewWindow = 0;
- mDestructionStarted = false;
-
- // Callback is disabled by default
- mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
- mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
- mPlayShutterSound = true;
- LOG1("CameraClient::CameraClient X (pid %d, id %d)", callingPid, cameraId);
-}
-
-status_t CameraClient::initialize(sp<CameraProviderManager> manager,
- const String8& /*monitorTags*/) {
- int callingPid = CameraThreadState::getCallingPid();
- status_t res;
-
- LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);
-
- // Verify ops permissions
- res = startCameraOps();
- if (res != OK) {
- return res;
- }
-
- char camera_device_name[10];
- snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
-
- mHardware = new CameraHardwareInterface(camera_device_name);
- res = mHardware->initialize(manager);
- if (res != OK) {
- ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
- mHardware.clear();
- return res;
- }
-
- mHardware->setCallbacks(notifyCallback,
- dataCallback,
- dataCallbackTimestamp,
- handleCallbackTimestampBatch,
- (void *)(uintptr_t)mCameraId);
-
- // Enable zoom, error, focus, and metadata messages by default
- enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
- CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);
-
- LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
- return OK;
-}
-
-
-// tear down the client
-CameraClient::~CameraClient() {
- mDestructionStarted = true;
- int callingPid = CameraThreadState::getCallingPid();
- LOG1("CameraClient::~CameraClient E (pid %d, this %p)", callingPid, this);
-
- disconnect();
- LOG1("CameraClient::~CameraClient X (pid %d, this %p)", callingPid, this);
-}
-
-status_t CameraClient::dump(int fd, const Vector<String16>& args) {
- return BasicClient::dump(fd, args);
-}
-
-status_t CameraClient::dumpClient(int fd, const Vector<String16>& args) {
- const size_t SIZE = 256;
- char buffer[SIZE];
-
- size_t len = snprintf(buffer, SIZE, "Client[%d] (%p) with UID %d\n",
- mCameraId,
- (getRemoteCallback() != NULL ?
- IInterface::asBinder(getRemoteCallback()).get() : NULL),
- mClientUid);
- len = (len > SIZE - 1) ? SIZE - 1 : len;
- write(fd, buffer, len);
-
- len = snprintf(buffer, SIZE, "Latest set parameters:\n");
- len = (len > SIZE - 1) ? SIZE - 1 : len;
- write(fd, buffer, len);
-
- mLatestSetParameters.dump(fd, args);
-
- const char *enddump = "\n\n";
- write(fd, enddump, strlen(enddump));
-
- sp<CameraHardwareInterface> hardware = mHardware;
- if (hardware != nullptr) {
- return hardware->dump(fd, args);
- }
- ALOGI("%s: camera device closed already, skip dumping", __FUNCTION__);
- return OK;
-}
-
-// ----------------------------------------------------------------------------
-
-status_t CameraClient::checkPid() const {
- int callingPid = CameraThreadState::getCallingPid();
- if (callingPid == mClientPid) return NO_ERROR;
-
- ALOGW("attempt to use a locked camera from a different process"
- " (old pid %d, new pid %d)", mClientPid, callingPid);
- return EBUSY;
-}
-
-status_t CameraClient::checkPidAndHardware() const {
- if (mHardware == 0) {
- ALOGE("attempt to use a camera after disconnect() (pid %d)",
- CameraThreadState::getCallingPid());
- return INVALID_OPERATION;
- }
- status_t result = checkPid();
- if (result != NO_ERROR) return result;
- return NO_ERROR;
-}
-
-status_t CameraClient::lock() {
- int callingPid = CameraThreadState::getCallingPid();
- LOG1("lock (pid %d)", callingPid);
- Mutex::Autolock lock(mLock);
-
- // lock camera to this client if the the camera is unlocked
- if (mClientPid == 0) {
- mClientPid = callingPid;
- return NO_ERROR;
- }
-
- // returns NO_ERROR if the client already owns the camera, EBUSY otherwise
- return checkPid();
-}
-
-status_t CameraClient::unlock() {
- int callingPid = CameraThreadState::getCallingPid();
- LOG1("unlock (pid %d)", callingPid);
- Mutex::Autolock lock(mLock);
-
- // allow anyone to use camera (after they lock the camera)
- status_t result = checkPid();
- if (result == NO_ERROR) {
- if (mHardware->recordingEnabled()) {
- ALOGE("Not allowed to unlock camera during recording.");
- return INVALID_OPERATION;
- }
- mClientPid = 0;
- LOG1("clear mRemoteCallback (pid %d)", callingPid);
- // we need to remove the reference to ICameraClient so that when the app
- // goes away, the reference count goes to 0.
- mRemoteCallback.clear();
- }
- return result;
-}
-
-// connect a new client to the camera
-status_t CameraClient::connect(const sp<hardware::ICameraClient>& client) {
- int callingPid = CameraThreadState::getCallingPid();
- LOG1("connect E (pid %d)", callingPid);
- Mutex::Autolock lock(mLock);
-
- if (mClientPid != 0 && checkPid() != NO_ERROR) {
- ALOGW("Tried to connect to a locked camera (old pid %d, new pid %d)",
- mClientPid, callingPid);
- return EBUSY;
- }
-
- if (mRemoteCallback != 0 &&
- (IInterface::asBinder(client) == IInterface::asBinder(mRemoteCallback))) {
- LOG1("Connect to the same client");
- return NO_ERROR;
- }
-
- mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
- mClientPid = callingPid;
- mRemoteCallback = client;
-
- LOG1("connect X (pid %d)", callingPid);
- return NO_ERROR;
-}
-
-static void disconnectWindow(const sp<ANativeWindow>& window) {
- if (window != 0) {
- status_t result = native_window_api_disconnect(window.get(),
- NATIVE_WINDOW_API_CAMERA);
- if (result != NO_ERROR) {
- ALOGW("native_window_api_disconnect failed: %s (%d)", strerror(-result),
- result);
- }
- }
-}
-
-binder::Status CameraClient::disconnect() {
- int callingPid = CameraThreadState::getCallingPid();
- LOG1("disconnect E (pid %d)", callingPid);
- Mutex::Autolock lock(mLock);
-
- binder::Status res = binder::Status::ok();
- // Allow both client and the cameraserver to disconnect at all times
- if (callingPid != mClientPid && callingPid != mServicePid) {
- ALOGW("different client - don't disconnect");
- return res;
- }
-
- // Make sure disconnect() is done once and once only, whether it is called
- // from the user directly, or called by the destructor.
- if (mHardware == 0) return res;
-
- LOG1("hardware teardown");
- // Before destroying mHardware, we must make sure it's in the
- // idle state.
- // Turn off all messages.
- disableMsgType(CAMERA_MSG_ALL_MSGS);
- mHardware->stopPreview();
- sCameraService->updateProxyDeviceState(
- hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
- mCameraIdStr, mCameraFacing, mClientPackageName,
- hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
- mHardware->cancelPicture();
- // Release the hardware resources.
- mHardware->release();
-
- // Release the held ANativeWindow resources.
- if (mPreviewWindow != 0) {
- disconnectWindow(mPreviewWindow);
- mPreviewWindow = 0;
- mHardware->setPreviewWindow(mPreviewWindow);
- }
- mHardware.clear();
-
- CameraService::Client::disconnect();
-
- LOG1("disconnect X (pid %d)", callingPid);
-
- return res;
-}
-
-// ----------------------------------------------------------------------------
-
-status_t CameraClient::setPreviewWindow(const sp<IBinder>& binder,
- const sp<ANativeWindow>& window) {
- Mutex::Autolock lock(mLock);
- status_t result = checkPidAndHardware();
- if (result != NO_ERROR) return result;
-
- // return if no change in surface.
- if (binder == mSurface) {
- return NO_ERROR;
- }
-
- if (window != 0) {
- result = native_window_api_connect(window.get(), NATIVE_WINDOW_API_CAMERA);
- if (result != NO_ERROR) {
- ALOGE("native_window_api_connect failed: %s (%d)", strerror(-result),
- result);
- return result;
- }
- }
-
- // If preview has been already started, register preview buffers now.
- if (mHardware->previewEnabled()) {
- if (window != 0) {
- mHardware->setPreviewScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
- mHardware->setPreviewTransform(mOrientation);
- result = mHardware->setPreviewWindow(window);
- }
- }
-
- if (result == NO_ERROR) {
- // Everything has succeeded. Disconnect the old window and remember the
- // new window.
- disconnectWindow(mPreviewWindow);
- mSurface = binder;
- mPreviewWindow = window;
- } else {
- // Something went wrong after we connected to the new window, so
- // disconnect here.
- disconnectWindow(window);
- }
-
- return result;
-}
-
-// set the buffer consumer that the preview will use
-status_t CameraClient::setPreviewTarget(
- const sp<IGraphicBufferProducer>& bufferProducer) {
- LOG1("setPreviewTarget(%p) (pid %d)", bufferProducer.get(),
- CameraThreadState::getCallingPid());
-
- sp<IBinder> binder;
- sp<ANativeWindow> window;
- if (bufferProducer != 0) {
- binder = IInterface::asBinder(bufferProducer);
- // Using controlledByApp flag to ensure that the buffer queue remains in
- // async mode for the old camera API, where many applications depend
- // on that behavior.
- window = new Surface(bufferProducer, /*controlledByApp*/ true);
- }
- return setPreviewWindow(binder, window);
-}
-
-// set the preview callback flag to affect how the received frames from
-// preview are handled.
-void CameraClient::setPreviewCallbackFlag(int callback_flag) {
- LOG1("setPreviewCallbackFlag(%d) (pid %d)", callback_flag, CameraThreadState::getCallingPid());
- Mutex::Autolock lock(mLock);
- if (checkPidAndHardware() != NO_ERROR) return;
-
- mPreviewCallbackFlag = callback_flag;
- if (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
- enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- } else {
- disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- }
-}
-
-status_t CameraClient::setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer) {
- (void)callbackProducer;
- ALOGE("%s: Unimplemented!", __FUNCTION__);
- return INVALID_OPERATION;
-}
-
-// start preview mode
-status_t CameraClient::startPreview() {
- LOG1("startPreview (pid %d)", CameraThreadState::getCallingPid());
- return startCameraMode(CAMERA_PREVIEW_MODE);
-}
-
-// start recording mode
-status_t CameraClient::startRecording() {
- LOG1("startRecording (pid %d)", CameraThreadState::getCallingPid());
- return startCameraMode(CAMERA_RECORDING_MODE);
-}
-
-// start preview or recording
-status_t CameraClient::startCameraMode(camera_mode mode) {
- LOG1("startCameraMode(%d)", mode);
- Mutex::Autolock lock(mLock);
- status_t result = checkPidAndHardware();
- if (result != NO_ERROR) return result;
-
- switch(mode) {
- case CAMERA_PREVIEW_MODE:
- if (mSurface == 0 && mPreviewWindow == 0) {
- LOG1("mSurface is not set yet.");
- // still able to start preview in this case.
- }
- return startPreviewMode();
- case CAMERA_RECORDING_MODE:
- if (mSurface == 0 && mPreviewWindow == 0) {
- ALOGE("mSurface or mPreviewWindow must be set before startRecordingMode.");
- return INVALID_OPERATION;
- }
- return startRecordingMode();
- default:
- return UNKNOWN_ERROR;
- }
-}
-
-status_t CameraClient::startPreviewMode() {
- LOG1("startPreviewMode");
- status_t result = NO_ERROR;
-
- // if preview has been enabled, nothing needs to be done
- if (mHardware->previewEnabled()) {
- return NO_ERROR;
- }
-
- if (mPreviewWindow != 0) {
- mHardware->setPreviewScalingMode(
- NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
- mHardware->setPreviewTransform(mOrientation);
- }
- mHardware->setPreviewWindow(mPreviewWindow);
- result = mHardware->startPreview();
- if (result == NO_ERROR) {
- sCameraService->updateProxyDeviceState(
- hardware::ICameraServiceProxy::CAMERA_STATE_ACTIVE,
- mCameraIdStr, mCameraFacing, mClientPackageName,
- hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
- }
- return result;
-}
-
-status_t CameraClient::startRecordingMode() {
- LOG1("startRecordingMode");
- status_t result = NO_ERROR;
-
- // if recording has been enabled, nothing needs to be done
- if (mHardware->recordingEnabled()) {
- return NO_ERROR;
- }
-
- // if preview has not been started, start preview first
- if (!mHardware->previewEnabled()) {
- result = startPreviewMode();
- if (result != NO_ERROR) {
- return result;
- }
- }
-
- // start recording mode
- enableMsgType(CAMERA_MSG_VIDEO_FRAME);
- sCameraService->playSound(CameraService::SOUND_RECORDING_START);
- result = mHardware->startRecording();
- if (result != NO_ERROR) {
- ALOGE("mHardware->startRecording() failed with status %d", result);
- }
- return result;
-}
-
-// stop preview mode
-void CameraClient::stopPreview() {
- LOG1("stopPreview (pid %d)", CameraThreadState::getCallingPid());
- Mutex::Autolock lock(mLock);
- if (checkPidAndHardware() != NO_ERROR) return;
-
-
- disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- mHardware->stopPreview();
- sCameraService->updateProxyDeviceState(
- hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
- mCameraIdStr, mCameraFacing, mClientPackageName,
- hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
- mPreviewBuffer.clear();
-}
-
-// stop recording mode
-void CameraClient::stopRecording() {
- LOG1("stopRecording (pid %d)", CameraThreadState::getCallingPid());
- {
- Mutex::Autolock lock(mLock);
- if (checkPidAndHardware() != NO_ERROR) return;
-
- disableMsgType(CAMERA_MSG_VIDEO_FRAME);
- mHardware->stopRecording();
- sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
-
- mPreviewBuffer.clear();
- }
-
- {
- Mutex::Autolock l(mAvailableCallbackBuffersLock);
- if (!mAvailableCallbackBuffers.empty()) {
- mAvailableCallbackBuffers.clear();
- }
- }
-}
-
-// release a recording frame
-void CameraClient::releaseRecordingFrame(const sp<IMemory>& mem) {
- Mutex::Autolock lock(mLock);
- if (checkPidAndHardware() != NO_ERROR) return;
- if (mem == nullptr) {
- android_errorWriteWithInfoLog(CameraService::SN_EVENT_LOG_ID, "26164272",
- CameraThreadState::getCallingUid(), nullptr, 0);
- return;
- }
-
- mHardware->releaseRecordingFrame(mem);
-}
-
-void CameraClient::releaseRecordingFrameHandle(native_handle_t *handle) {
- if (handle == nullptr) return;
- Mutex::Autolock lock(mLock);
- sp<IMemory> dataPtr;
- {
- Mutex::Autolock l(mAvailableCallbackBuffersLock);
- if (!mAvailableCallbackBuffers.empty()) {
- dataPtr = mAvailableCallbackBuffers.back();
- mAvailableCallbackBuffers.pop_back();
- }
- }
-
- if (dataPtr == nullptr) {
- ALOGE("%s: %d: No callback buffer available. Dropping a native handle.", __FUNCTION__,
- __LINE__);
- native_handle_close(handle);
- native_handle_delete(handle);
- return;
- } else if (dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
- ALOGE("%s: %d: Callback buffer size doesn't match VideoNativeHandleMetadata", __FUNCTION__,
- __LINE__);
- native_handle_close(handle);
- native_handle_delete(handle);
- return;
- }
-
- if (mHardware != nullptr) {
- VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
- metadata->eType = kMetadataBufferTypeNativeHandleSource;
- metadata->pHandle = handle;
- mHardware->releaseRecordingFrame(dataPtr);
- }
-}
-
-void CameraClient::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
- Mutex::Autolock lock(mLock);
- bool disconnected = (mHardware == nullptr);
- size_t n = handles.size();
- std::vector<sp<IMemory>> frames;
- if (!disconnected) {
- frames.reserve(n);
- }
- bool error = false;
- for (auto& handle : handles) {
- sp<IMemory> dataPtr;
- {
- Mutex::Autolock l(mAvailableCallbackBuffersLock);
- if (!mAvailableCallbackBuffers.empty()) {
- dataPtr = mAvailableCallbackBuffers.back();
- mAvailableCallbackBuffers.pop_back();
- }
- }
-
- if (dataPtr == nullptr) {
- ALOGE("%s: %d: No callback buffer available. Dropping frames.", __FUNCTION__,
- __LINE__);
- error = true;
- break;
- } else if (dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
- ALOGE("%s: %d: Callback buffer must be VideoNativeHandleMetadata", __FUNCTION__,
- __LINE__);
- error = true;
- break;
- }
-
- if (!disconnected) {
- VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
- metadata->eType = kMetadataBufferTypeNativeHandleSource;
- metadata->pHandle = handle;
- frames.push_back(dataPtr);
- }
- }
-
- if (error) {
- for (auto& handle : handles) {
- native_handle_close(handle);
- native_handle_delete(handle);
- }
- } else if (!disconnected) {
- mHardware->releaseRecordingFrameBatch(frames);
- }
- return;
-}
-
-status_t CameraClient::setVideoBufferMode(int32_t videoBufferMode) {
- LOG1("setVideoBufferMode: %d", videoBufferMode);
- bool enableMetadataInBuffers = false;
-
- if (videoBufferMode == VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA) {
- enableMetadataInBuffers = true;
- } else if (videoBufferMode != VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
- ALOGE("%s: %d: videoBufferMode %d is not supported.", __FUNCTION__, __LINE__,
- videoBufferMode);
- return BAD_VALUE;
- }
-
- Mutex::Autolock lock(mLock);
- if (checkPidAndHardware() != NO_ERROR) {
- return UNKNOWN_ERROR;
- }
-
- return mHardware->storeMetaDataInBuffers(enableMetadataInBuffers);
-}
-
-bool CameraClient::previewEnabled() {
- LOG1("previewEnabled (pid %d)", CameraThreadState::getCallingPid());
-
- Mutex::Autolock lock(mLock);
- if (checkPidAndHardware() != NO_ERROR) return false;
- return mHardware->previewEnabled();
-}
-
-bool CameraClient::recordingEnabled() {
- LOG1("recordingEnabled (pid %d)", CameraThreadState::getCallingPid());
-
- Mutex::Autolock lock(mLock);
- if (checkPidAndHardware() != NO_ERROR) return false;
- return mHardware->recordingEnabled();
-}
-
-status_t CameraClient::autoFocus() {
- LOG1("autoFocus (pid %d)", CameraThreadState::getCallingPid());
-
- Mutex::Autolock lock(mLock);
- status_t result = checkPidAndHardware();
- if (result != NO_ERROR) return result;
-
- return mHardware->autoFocus();
-}
-
-status_t CameraClient::cancelAutoFocus() {
- LOG1("cancelAutoFocus (pid %d)", CameraThreadState::getCallingPid());
-
- Mutex::Autolock lock(mLock);
- status_t result = checkPidAndHardware();
- if (result != NO_ERROR) return result;
-
- return mHardware->cancelAutoFocus();
-}
-
-// take a picture - image is returned in callback
-status_t CameraClient::takePicture(int msgType) {
- LOG1("takePicture (pid %d): 0x%x", CameraThreadState::getCallingPid(), msgType);
-
- Mutex::Autolock lock(mLock);
- status_t result = checkPidAndHardware();
- if (result != NO_ERROR) return result;
-
- if ((msgType & CAMERA_MSG_RAW_IMAGE) &&
- (msgType & CAMERA_MSG_RAW_IMAGE_NOTIFY)) {
- ALOGE("CAMERA_MSG_RAW_IMAGE and CAMERA_MSG_RAW_IMAGE_NOTIFY"
- " cannot be both enabled");
- return BAD_VALUE;
- }
-
- // We only accept picture related message types
- // and ignore other types of messages for takePicture().
- int picMsgType = msgType
- & (CAMERA_MSG_SHUTTER |
- CAMERA_MSG_POSTVIEW_FRAME |
- CAMERA_MSG_RAW_IMAGE |
- CAMERA_MSG_RAW_IMAGE_NOTIFY |
- CAMERA_MSG_COMPRESSED_IMAGE);
-
- enableMsgType(picMsgType);
-
- return mHardware->takePicture();
-}
-
-// set preview/capture parameters - key/value pairs
-status_t CameraClient::setParameters(const String8& params) {
- LOG1("setParameters (pid %d) (%s)", CameraThreadState::getCallingPid(), params.string());
-
- Mutex::Autolock lock(mLock);
- status_t result = checkPidAndHardware();
- if (result != NO_ERROR) return result;
-
- mLatestSetParameters = CameraParameters(params);
- CameraParameters p(params);
- return mHardware->setParameters(p);
-}
-
-// get preview/capture parameters - key/value pairs
-String8 CameraClient::getParameters() const {
- Mutex::Autolock lock(mLock);
- // The camera service can unconditionally get the parameters at all times
- if (CameraThreadState::getCallingPid() != mServicePid && checkPidAndHardware() != NO_ERROR) {
- return String8();
- }
-
- String8 params(mHardware->getParameters().flatten());
- LOG1("getParameters (pid %d) (%s)", CameraThreadState::getCallingPid(), params.string());
- return params;
-}
-
-// enable shutter sound
-status_t CameraClient::enableShutterSound(bool enable) {
- LOG1("enableShutterSound (pid %d)", CameraThreadState::getCallingPid());
-
- status_t result = checkPidAndHardware();
- if (result != NO_ERROR) return result;
-
- if (enable) {
- mPlayShutterSound = true;
- return OK;
- }
-
- mPlayShutterSound = false;
- return OK;
-}
-
-status_t CameraClient::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
- LOG1("sendCommand (pid %d)", CameraThreadState::getCallingPid());
- int orientation;
- Mutex::Autolock lock(mLock);
- status_t result = checkPidAndHardware();
- if (result != NO_ERROR) return result;
-
- if (cmd == CAMERA_CMD_SET_DISPLAY_ORIENTATION) {
- // Mirror the preview if the camera is front-facing.
- orientation = getOrientation(arg1, mCameraFacing == CAMERA_FACING_FRONT);
- if (orientation == -1) return BAD_VALUE;
-
- if (mOrientation != orientation) {
- mOrientation = orientation;
- if (mPreviewWindow != 0) {
- mHardware->setPreviewTransform(mOrientation);
- }
- }
- return OK;
- } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) {
- switch (arg1) {
- case 0:
- return enableShutterSound(false);
- case 1:
- return enableShutterSound(true);
- default:
- return BAD_VALUE;
- }
- return OK;
- } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
- sCameraService->playSound(CameraService::SOUND_RECORDING_START);
- } else if (cmd == CAMERA_CMD_SET_VIDEO_BUFFER_COUNT) {
- // Silently ignore this command
- return INVALID_OPERATION;
- } else if (cmd == CAMERA_CMD_PING) {
- // If mHardware is 0, checkPidAndHardware will return error.
- return OK;
- }
-
- return mHardware->sendCommand(cmd, arg1, arg2);
-}
-
-// ----------------------------------------------------------------------------
-
-void CameraClient::enableMsgType(int32_t msgType) {
- android_atomic_or(msgType, &mMsgEnabled);
- mHardware->enableMsgType(msgType);
-}
-
-void CameraClient::disableMsgType(int32_t msgType) {
- android_atomic_and(~msgType, &mMsgEnabled);
- mHardware->disableMsgType(msgType);
-}
-
-#define CHECK_MESSAGE_INTERVAL 10 // 10ms
-bool CameraClient::lockIfMessageWanted(int32_t msgType) {
- int sleepCount = 0;
- while (mMsgEnabled & msgType) {
- if (mLock.tryLock() == NO_ERROR) {
- if (sleepCount > 0) {
- LOG1("lockIfMessageWanted(%d): waited for %d ms",
- msgType, sleepCount * CHECK_MESSAGE_INTERVAL);
- }
-
- // If messages are no longer enabled after acquiring lock, release and drop message
- if ((mMsgEnabled & msgType) == 0) {
- mLock.unlock();
- break;
- }
-
- return true;
- }
- if (sleepCount++ == 0) {
- LOG1("lockIfMessageWanted(%d): enter sleep", msgType);
- }
- usleep(CHECK_MESSAGE_INTERVAL * 1000);
- }
- ALOGW("lockIfMessageWanted(%d): dropped unwanted message", msgType);
- return false;
-}
-
-sp<CameraClient> CameraClient::getClientFromCookie(void* user) {
- String8 cameraId = String8::format("%d", (int)(intptr_t) user);
- auto clientDescriptor = sCameraService->mActiveClientManager.get(cameraId);
- if (clientDescriptor != nullptr) {
- return sp<CameraClient>{
- static_cast<CameraClient*>(clientDescriptor->getValue().get())};
- }
- return sp<CameraClient>{nullptr};
-}
-
-// Callback messages can be dispatched to internal handlers or pass to our
-// client's callback functions, depending on the message type.
-//
-// notifyCallback:
-// CAMERA_MSG_SHUTTER handleShutter
-// (others) c->notifyCallback
-// dataCallback:
-// CAMERA_MSG_PREVIEW_FRAME handlePreviewData
-// CAMERA_MSG_POSTVIEW_FRAME handlePostview
-// CAMERA_MSG_RAW_IMAGE handleRawPicture
-// CAMERA_MSG_COMPRESSED_IMAGE handleCompressedPicture
-// (others) c->dataCallback
-// dataCallbackTimestamp
-// (others) c->dataCallbackTimestamp
-
-void CameraClient::notifyCallback(int32_t msgType, int32_t ext1,
- int32_t ext2, void* user) {
- LOG2("notifyCallback(%d)", msgType);
-
- sp<CameraClient> client = getClientFromCookie(user);
- if (client.get() == nullptr) return;
-
- if (!client->lockIfMessageWanted(msgType)) return;
-
- switch (msgType) {
- case CAMERA_MSG_SHUTTER:
- // ext1 is the dimension of the yuv picture.
- client->handleShutter();
- break;
- default:
- client->handleGenericNotify(msgType, ext1, ext2);
- break;
- }
-}
-
-void CameraClient::dataCallback(int32_t msgType,
- const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
- LOG2("dataCallback(%d)", msgType);
-
- sp<CameraClient> client = getClientFromCookie(user);
- if (client.get() == nullptr) return;
-
- if (!client->lockIfMessageWanted(msgType)) return;
- if (dataPtr == 0 && metadata == NULL) {
- ALOGE("Null data returned in data callback");
- client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
- return;
- }
-
- switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
- case CAMERA_MSG_PREVIEW_FRAME:
- client->handlePreviewData(msgType, dataPtr, metadata);
- break;
- case CAMERA_MSG_POSTVIEW_FRAME:
- client->handlePostview(dataPtr);
- break;
- case CAMERA_MSG_RAW_IMAGE:
- client->handleRawPicture(dataPtr);
- break;
- case CAMERA_MSG_COMPRESSED_IMAGE:
- client->handleCompressedPicture(dataPtr);
- break;
- default:
- client->handleGenericData(msgType, dataPtr, metadata);
- break;
- }
-}
-
-void CameraClient::dataCallbackTimestamp(nsecs_t timestamp,
- int32_t msgType, const sp<IMemory>& dataPtr, void* user) {
- LOG2("dataCallbackTimestamp(%d)", msgType);
-
- sp<CameraClient> client = getClientFromCookie(user);
- if (client.get() == nullptr) return;
-
- if (!client->lockIfMessageWanted(msgType)) return;
-
- if (dataPtr == 0) {
- ALOGE("Null data returned in data with timestamp callback");
- client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
- return;
- }
-
- client->handleGenericDataTimestamp(timestamp, msgType, dataPtr);
-}
-
-void CameraClient::handleCallbackTimestampBatch(
- int32_t msgType, const std::vector<HandleTimestampMessage>& msgs, void* user) {
- LOG2("dataCallbackTimestampBatch");
- sp<CameraClient> client = getClientFromCookie(user);
- if (client.get() == nullptr) return;
- if (!client->lockIfMessageWanted(msgType)) return;
-
- sp<hardware::ICameraClient> c = client->mRemoteCallback;
- client->mLock.unlock();
- if (c != 0 && msgs.size() > 0) {
- size_t n = msgs.size();
- std::vector<nsecs_t> timestamps;
- std::vector<native_handle_t*> handles;
- timestamps.reserve(n);
- handles.reserve(n);
- for (auto& msg : msgs) {
- native_handle_t* handle = nullptr;
- if (msg.dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
- ALOGE("%s: dataPtr does not contain VideoNativeHandleMetadata!", __FUNCTION__);
- return;
- }
- // TODO: Using unsecurePointer() has some associated security pitfalls
- // (see declaration for details).
- // Either document why it is safe in this case or address the
- // issue (e.g. by copying).
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(msg.dataPtr->unsecurePointer());
- if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
- handle = metadata->pHandle;
- }
-
- if (handle == nullptr) {
- ALOGE("%s: VideoNativeHandleMetadata type mismatch or null handle passed!",
- __FUNCTION__);
- return;
- }
- {
- Mutex::Autolock l(client->mAvailableCallbackBuffersLock);
- client->mAvailableCallbackBuffers.push_back(msg.dataPtr);
- }
- timestamps.push_back(msg.timestamp);
- handles.push_back(handle);
- }
- c->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
- }
-}
-
-// snapshot taken callback
-void CameraClient::handleShutter(void) {
- if (mPlayShutterSound) {
- sCameraService->playSound(CameraService::SOUND_SHUTTER);
- }
-
- sp<hardware::ICameraClient> c = mRemoteCallback;
- if (c != 0) {
- mLock.unlock();
- c->notifyCallback(CAMERA_MSG_SHUTTER, 0, 0);
- if (!lockIfMessageWanted(CAMERA_MSG_SHUTTER)) return;
- }
- disableMsgType(CAMERA_MSG_SHUTTER);
-
- // Shutters only happen in response to takePicture, so mark device as
- // idle now, until preview is restarted
- sCameraService->updateProxyDeviceState(
- hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
- mCameraIdStr, mCameraFacing, mClientPackageName,
- hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-
- mLock.unlock();
-}
-
-// preview callback - frame buffer update
-void CameraClient::handlePreviewData(int32_t msgType,
- const sp<IMemory>& mem,
- camera_frame_metadata_t *metadata) {
- ssize_t offset;
- size_t size;
- sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-
- // local copy of the callback flags
- int flags = mPreviewCallbackFlag;
-
- // is callback enabled?
- if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
- // If the enable bit is off, the copy-out and one-shot bits are ignored
- LOG2("frame callback is disabled");
- mLock.unlock();
- return;
- }
-
- // hold a strong pointer to the client
- sp<hardware::ICameraClient> c = mRemoteCallback;
-
- // clear callback flags if no client or one-shot mode
- if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
- LOG2("Disable preview callback");
- mPreviewCallbackFlag &= ~(CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
- CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
- CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
- disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- }
-
- if (c != 0) {
- // Is the received frame copied out or not?
- if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
- LOG2("frame is copied");
- copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata);
- } else {
- LOG2("frame is forwarded");
- mLock.unlock();
- c->dataCallback(msgType, mem, metadata);
- }
- } else {
- mLock.unlock();
- }
-}
-
-// picture callback - postview image ready
-void CameraClient::handlePostview(const sp<IMemory>& mem) {
- disableMsgType(CAMERA_MSG_POSTVIEW_FRAME);
-
- sp<hardware::ICameraClient> c = mRemoteCallback;
- mLock.unlock();
- if (c != 0) {
- c->dataCallback(CAMERA_MSG_POSTVIEW_FRAME, mem, NULL);
- }
-}
-
-// picture callback - raw image ready
-void CameraClient::handleRawPicture(const sp<IMemory>& mem) {
- disableMsgType(CAMERA_MSG_RAW_IMAGE);
-
- ssize_t offset;
- size_t size;
- sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-
- sp<hardware::ICameraClient> c = mRemoteCallback;
- mLock.unlock();
- if (c != 0) {
- c->dataCallback(CAMERA_MSG_RAW_IMAGE, mem, NULL);
- }
-}
-
-// picture callback - compressed picture ready
-void CameraClient::handleCompressedPicture(const sp<IMemory>& mem) {
- disableMsgType(CAMERA_MSG_COMPRESSED_IMAGE);
-
- sp<hardware::ICameraClient> c = mRemoteCallback;
- mLock.unlock();
- if (c != 0) {
- c->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mem, NULL);
- }
-}
-
-
-void CameraClient::handleGenericNotify(int32_t msgType,
- int32_t ext1, int32_t ext2) {
- sp<hardware::ICameraClient> c = mRemoteCallback;
- mLock.unlock();
- if (c != 0) {
- c->notifyCallback(msgType, ext1, ext2);
- }
-}
-
-void CameraClient::handleGenericData(int32_t msgType,
- const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata) {
- sp<hardware::ICameraClient> c = mRemoteCallback;
- mLock.unlock();
- if (c != 0) {
- c->dataCallback(msgType, dataPtr, metadata);
- }
-}
-
-void CameraClient::handleGenericDataTimestamp(nsecs_t timestamp,
- int32_t msgType, const sp<IMemory>& dataPtr) {
- sp<hardware::ICameraClient> c = mRemoteCallback;
- mLock.unlock();
- if (c != 0 && dataPtr != nullptr) {
- native_handle_t* handle = nullptr;
-
- // Check if dataPtr contains a VideoNativeHandleMetadata.
- if (dataPtr->size() == sizeof(VideoNativeHandleMetadata)) {
- // TODO: Using unsecurePointer() has some associated security pitfalls
- // (see declaration for details).
- // Either document why it is safe in this case or address the
- // issue (e.g. by copying).
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
- if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
- handle = metadata->pHandle;
- }
- }
-
- // If dataPtr contains a native handle, send it via recordingFrameHandleCallbackTimestamp.
- if (handle != nullptr) {
- {
- Mutex::Autolock l(mAvailableCallbackBuffersLock);
- mAvailableCallbackBuffers.push_back(dataPtr);
- }
- c->recordingFrameHandleCallbackTimestamp(timestamp, handle);
- } else {
- c->dataCallbackTimestamp(timestamp, msgType, dataPtr);
- }
- }
-}
-
-void CameraClient::copyFrameAndPostCopiedFrame(
- int32_t msgType, const sp<hardware::ICameraClient>& client,
- const sp<IMemoryHeap>& heap, size_t offset, size_t size,
- camera_frame_metadata_t *metadata) {
- LOG2("copyFrameAndPostCopiedFrame");
- // It is necessary to copy out of pmem before sending this to
- // the callback. For efficiency, reuse the same MemoryHeapBase
- // provided it's big enough. Don't allocate the memory or
- // perform the copy if there's no callback.
- // hold the preview lock while we grab a reference to the preview buffer
- sp<MemoryHeapBase> previewBuffer;
-
- if (mPreviewBuffer == 0) {
- mPreviewBuffer = new MemoryHeapBase(size, 0, NULL);
- } else if (size > mPreviewBuffer->virtualSize()) {
- mPreviewBuffer.clear();
- mPreviewBuffer = new MemoryHeapBase(size, 0, NULL);
- }
- if (mPreviewBuffer == 0) {
- ALOGE("failed to allocate space for preview buffer");
- mLock.unlock();
- return;
- }
- previewBuffer = mPreviewBuffer;
-
- void* previewBufferBase = previewBuffer->base();
- void* heapBase = heap->base();
-
- if (heapBase == MAP_FAILED) {
- ALOGE("%s: Failed to mmap heap for preview frame.", __FUNCTION__);
- mLock.unlock();
- return;
- } else if (previewBufferBase == MAP_FAILED) {
- ALOGE("%s: Failed to mmap preview buffer for preview frame.", __FUNCTION__);
- mLock.unlock();
- return;
- }
-
- memcpy(previewBufferBase, (uint8_t *) heapBase + offset, size);
-
- sp<MemoryBase> frame = new MemoryBase(previewBuffer, 0, size);
- if (frame == 0) {
- ALOGE("failed to allocate space for frame callback");
- mLock.unlock();
- return;
- }
-
- mLock.unlock();
- client->dataCallback(msgType, frame, metadata);
-}
-
-int CameraClient::getOrientation(int degrees, bool mirror) {
- if (!mirror) {
- if (degrees == 0) return 0;
- else if (degrees == 90) return HAL_TRANSFORM_ROT_90;
- else if (degrees == 180) return HAL_TRANSFORM_ROT_180;
- else if (degrees == 270) return HAL_TRANSFORM_ROT_270;
- } else { // Do mirror (horizontal flip)
- if (degrees == 0) { // FLIP_H and ROT_0
- return HAL_TRANSFORM_FLIP_H;
- } else if (degrees == 90) { // FLIP_H and ROT_90
- return HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90;
- } else if (degrees == 180) { // FLIP_H and ROT_180
- return HAL_TRANSFORM_FLIP_V;
- } else if (degrees == 270) { // FLIP_H and ROT_270
- return HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90;
- }
- }
- ALOGE("Invalid setDisplayOrientation degrees=%d", degrees);
- return -1;
-}
-
-status_t CameraClient::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
- (void)bufferProducer;
- ALOGE("%s: %d: CameraClient doesn't support setting a video target.", __FUNCTION__, __LINE__);
- return INVALID_OPERATION;
-}
-
-status_t CameraClient::setAudioRestriction(int mode) {
- if (!isValidAudioRestriction(mode)) {
- ALOGE("%s: invalid audio restriction mode %d", __FUNCTION__, mode);
- return BAD_VALUE;
- }
-
- Mutex::Autolock lock(mLock);
- if (checkPidAndHardware() != NO_ERROR) {
- return INVALID_OPERATION;
- }
- return BasicClient::setAudioRestriction(mode);
-}
-
-int32_t CameraClient::getGlobalAudioRestriction() {
- Mutex::Autolock lock(mLock);
- if (checkPidAndHardware() != NO_ERROR) {
- return INVALID_OPERATION;
- }
- return BasicClient::getServiceAudioRestriction();
-}
-
-// API1->Device1 does not support this feature
-status_t CameraClient::setRotateAndCropOverride(uint8_t /*rotateAndCrop*/) {
- return OK;
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
deleted file mode 100644
index aacb00e..0000000
--- a/services/camera/libcameraservice/api1/CameraClient.h
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERACLIENT_H
-#define ANDROID_SERVERS_CAMERA_CAMERACLIENT_H
-
-#include "CameraService.h"
-
-namespace android {
-
-class MemoryHeapBase;
-class CameraHardwareInterface;
-
-/**
- * Interface between android.hardware.Camera API and Camera HAL device for version
- * CAMERA_DEVICE_API_VERSION_1_0.
- */
-
-class CameraClient : public CameraService::Client
-{
-public:
- // ICamera interface (see ICamera for details)
- virtual binder::Status disconnect();
- virtual status_t connect(const sp<hardware::ICameraClient>& client);
- virtual status_t lock();
- virtual status_t unlock();
- virtual status_t setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer);
- virtual void setPreviewCallbackFlag(int flag);
- virtual status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer);
- virtual status_t startPreview();
- virtual void stopPreview();
- virtual bool previewEnabled();
- virtual status_t setVideoBufferMode(int32_t videoBufferMode);
- virtual status_t startRecording();
- virtual void stopRecording();
- virtual bool recordingEnabled();
- virtual void releaseRecordingFrame(const sp<IMemory>& mem);
- virtual void releaseRecordingFrameHandle(native_handle_t *handle);
- virtual void releaseRecordingFrameHandleBatch(
- const std::vector<native_handle_t*>& handles);
- virtual status_t autoFocus();
- virtual status_t cancelAutoFocus();
- virtual status_t takePicture(int msgType);
- virtual status_t setParameters(const String8& params);
- virtual String8 getParameters() const;
- virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
- virtual status_t setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
- virtual status_t setAudioRestriction(int mode);
- virtual int32_t getGlobalAudioRestriction();
-
- virtual status_t setRotateAndCropOverride(uint8_t override);
-
- // Interface used by CameraService
- CameraClient(const sp<CameraService>& cameraService,
- const sp<hardware::ICameraClient>& cameraClient,
- const String16& clientPackageName,
- const std::optional<String16>& clientFeatureId,
- int cameraId,
- int cameraFacing,
- int clientPid,
- int clientUid,
- int servicePid);
- ~CameraClient();
-
- virtual status_t initialize(sp<CameraProviderManager> manager,
- const String8& monitorTags) override;
-
- virtual status_t dump(int fd, const Vector<String16>& args);
-
- virtual status_t dumpClient(int fd, const Vector<String16>& args);
-
-private:
-
- // check whether the calling process matches mClientPid.
- status_t checkPid() const;
- status_t checkPidAndHardware() const; // also check mHardware != 0
-
- // these are internal functions used to set up preview buffers
- status_t registerPreviewBuffers();
-
- // camera operation mode
- enum camera_mode {
- CAMERA_PREVIEW_MODE = 0, // frame automatically released
- CAMERA_RECORDING_MODE = 1, // frame has to be explicitly released by releaseRecordingFrame()
- };
- // these are internal functions used for preview/recording
- status_t startCameraMode(camera_mode mode);
- status_t startPreviewMode();
- status_t startRecordingMode();
-
- // internal function used by sendCommand to enable/disable shutter sound.
- status_t enableShutterSound(bool enable);
-
- static sp<CameraClient> getClientFromCookie(void* user);
-
- // these are static callback functions
- static void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user);
- static void dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
- camera_frame_metadata_t *metadata, void* user);
- static void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr, void* user);
- static void handleCallbackTimestampBatch(
- int32_t msgType, const std::vector<HandleTimestampMessage>&, void* user);
- // handlers for messages
- void handleShutter(void);
- void handlePreviewData(int32_t msgType, const sp<IMemory>& mem,
- camera_frame_metadata_t *metadata);
- void handlePostview(const sp<IMemory>& mem);
- void handleRawPicture(const sp<IMemory>& mem);
- void handleCompressedPicture(const sp<IMemory>& mem);
- void handleGenericNotify(int32_t msgType, int32_t ext1, int32_t ext2);
- void handleGenericData(int32_t msgType, const sp<IMemory>& dataPtr,
- camera_frame_metadata_t *metadata);
- void handleGenericDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
-
- void copyFrameAndPostCopiedFrame(
- int32_t msgType,
- const sp<hardware::ICameraClient>& client,
- const sp<IMemoryHeap>& heap,
- size_t offset, size_t size,
- camera_frame_metadata_t *metadata);
-
- int getOrientation(int orientation, bool mirror);
-
- status_t setPreviewWindow(
- const sp<IBinder>& binder,
- const sp<ANativeWindow>& window);
-
-
- // these are initialized in the constructor.
- sp<CameraHardwareInterface> mHardware; // cleared after disconnect()
- int mPreviewCallbackFlag;
- int mOrientation; // Current display orientation
- bool mPlayShutterSound;
- bool mLegacyMode; // camera2 api legacy mode?
-
- // Ensures atomicity among the public methods
- mutable Mutex mLock;
- // This is a binder of Surface or Surface.
- sp<IBinder> mSurface;
- sp<ANativeWindow> mPreviewWindow;
-
- // If the user want us to return a copy of the preview frame (instead
- // of the original one), we allocate mPreviewBuffer and reuse it if possible.
- sp<MemoryHeapBase> mPreviewBuffer;
-
- // Debugging information
- CameraParameters mLatestSetParameters;
-
- // mAvailableCallbackBuffers stores sp<IMemory> that HAL uses to send VideoNativeHandleMetadata.
- // It will be used to send VideoNativeHandleMetadata back to HAL when camera receives the
- // native handle from releaseRecordingFrameHandle.
- Mutex mAvailableCallbackBuffersLock;
- std::vector<sp<IMemory>> mAvailableCallbackBuffers;
-
- // We need to avoid the deadlock when the incoming command thread and
- // the CameraHardwareInterface callback thread both want to grab mLock.
- // An extra flag is used to tell the callback thread that it should stop
- // trying to deliver the callback messages if the client is not
- // interested in it anymore. For example, if the client is calling
- // stopPreview(), the preview frame messages do not need to be delivered
- // anymore.
-
- // This function takes the same parameter as the enableMsgType() and
- // disableMsgType() functions in CameraHardwareInterface.
- void enableMsgType(int32_t msgType);
- void disableMsgType(int32_t msgType);
- volatile int32_t mMsgEnabled;
-
- // This function keeps trying to grab mLock, or give up if the message
- // is found to be disabled. It returns true if mLock is grabbed.
- bool lockIfMessageWanted(int32_t msgType);
-};
-
-}
-
-#endif
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 20333d1..d543cab 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -230,7 +230,7 @@
previewFpsRange[1] = fastInfo.bestStillCaptureFpsRange[1];
// PREVIEW_FRAME_RATE / SUPPORTED_PREVIEW_FRAME_RATES are deprecated, but
- // still have to do something sane for them
+ // still have to do something reasonable for them
// NOTE: Not scaled like FPS range values are.
int previewFps = fpsFromRange(previewFpsRange[0], previewFpsRange[1]);
@@ -3253,6 +3253,8 @@
status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov)
const {
+ // For external camera, use FOVs = (-1.0, -1.0) as default values. Calculate
+ // FOVs only if there is sufficient information.
if (fastInfo.isExternalCamera) {
if (horizFov != NULL) {
*horizFov = -1.0;
@@ -3260,16 +3262,29 @@
if (vertFov != NULL) {
*vertFov = -1.0;
}
- return OK;
}
camera_metadata_ro_entry_t sensorSize =
staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2);
- if (!sensorSize.count) return NO_INIT;
+ if (!sensorSize.count) {
+ // It is non-fatal for external cameras since it has default values.
+ if (fastInfo.isExternalCamera) {
+ return OK;
+ } else {
+ return NO_INIT;
+ }
+ }
camera_metadata_ro_entry_t pixelArraySize =
staticInfo(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 2, 2);
- if (!pixelArraySize.count) return NO_INIT;
+ if (!pixelArraySize.count) {
+ // It is non-fatal for external cameras since it has default values.
+ if (fastInfo.isExternalCamera) {
+ return OK;
+ } else {
+ return NO_INIT;
+ }
+ }
float arrayAspect = static_cast<float>(fastInfo.arrayWidth) /
fastInfo.arrayHeight;
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 8dc9863..8753dcf 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -172,7 +172,7 @@
mBufferQueueDepth = mFrameListDepth + 1;
mZslQueue.insertAt(0, mBufferQueueDepth);
- mFrameList.insertAt(0, mFrameListDepth);
+ mFrameList.resize(mFrameListDepth);
sp<CaptureSequencer> captureSequencer = mSequencer.promote();
if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
}
@@ -208,7 +208,7 @@
// Corresponding buffer has been cleared. No need to push into mFrameList
if (timestamp <= mLatestClearedBufferTimestamp) return;
- mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
+ mFrameList[mFrameListHead] = result.mMetadata;
mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
}
@@ -671,7 +671,7 @@
void ZslProcessor::clearZslResultQueueLocked() {
mFrameList.clear();
mFrameListHead = 0;
- mFrameList.insertAt(0, mFrameListDepth);
+ mFrameList.resize(mFrameListDepth);
}
void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 1db2403..3186233 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -125,7 +125,7 @@
static const int32_t kDefaultMaxPipelineDepth = 4;
size_t mBufferQueueDepth;
size_t mFrameListDepth;
- Vector<CameraMetadata> mFrameList;
+ std::vector<CameraMetadata> mFrameList;
size_t mFrameListHead;
ZslPair mNextPair;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 022d686..66eda5d 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -21,6 +21,7 @@
#include <cutils/properties.h>
#include <utils/CameraThreadState.h>
#include <utils/Log.h>
+#include <utils/SessionConfigurationUtils.h>
#include <utils/Trace.h>
#include <gui/Surface.h>
#include <camera/camera2/CaptureRequest.h>
@@ -30,6 +31,7 @@
#include "device3/Camera3Device.h"
#include "device3/Camera3OutputStream.h"
#include "api2/CameraDeviceClient.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include <camera_metadata_hidden.h>
@@ -470,7 +472,7 @@
}
binder::Status CameraDeviceClient::endConfigure(int operatingMode,
- const hardware::camera2::impl::CameraMetadataNative& sessionParams,
+ const hardware::camera2::impl::CameraMetadataNative& sessionParams, int64_t startTimeMs,
std::vector<int>* offlineStreamIds /*out*/) {
ATRACE_CALL();
ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
@@ -492,7 +494,8 @@
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
- res = checkOperatingMode(operatingMode, mDevice->info(), mCameraIdStr);
+ res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
+ mCameraIdStr);
if (!res.isOk()) {
return res;
}
@@ -545,252 +548,16 @@
for (const auto& offlineStreamId : *offlineStreamIds) {
mStreamInfoMap[offlineStreamId].supportsOffline = true;
}
+
+ nsecs_t configureEnd = systemTime();
+ int32_t configureDurationMs = ns2ms(configureEnd) - startTimeMs;
+ CameraServiceProxyWrapper::logStreamConfigured(mCameraIdStr, operatingMode,
+ false /*internalReconfig*/, configureDurationMs);
}
return res;
}
-binder::Status CameraDeviceClient::checkSurfaceType(size_t numBufferProducers,
- bool deferredConsumer, int surfaceType) {
- if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
- ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
- __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
- } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
- ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
- }
-
- bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
- (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
-
- if (deferredConsumer && !validSurfaceType) {
- ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
- }
-
- return binder::Status::ok();
-}
-
-binder::Status CameraDeviceClient::checkPhysicalCameraId(
- const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
- const String8 &logicalCameraId) {
- if (physicalCameraId.size() == 0) {
- return binder::Status::ok();
- }
- if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
- physicalCameraId.string()) == physicalCameraIds.end()) {
- String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
- logicalCameraId.string(), physicalCameraId.string());
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
- return binder::Status::ok();
-}
-
-binder::Status CameraDeviceClient::checkOperatingMode(int operatingMode,
- const CameraMetadata &staticInfo, const String8 &cameraId) {
- if (operatingMode < 0) {
- String8 msg = String8::format(
- "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
- msg.string());
- }
-
- bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
- if (isConstrainedHighSpeed) {
- camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
- bool isConstrainedHighSpeedSupported = false;
- for(size_t i = 0; i < entry.count; ++i) {
- uint8_t capability = entry.data.u8[i];
- if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
- isConstrainedHighSpeedSupported = true;
- break;
- }
- }
- if (!isConstrainedHighSpeedSupported) {
- String8 msg = String8::format(
- "Camera %s: Try to create a constrained high speed configuration on a device"
- " that doesn't support it.", cameraId.string());
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
- msg.string());
- }
- }
-
- return binder::Status::ok();
-}
-
-void CameraDeviceClient::mapStreamInfo(const OutputStreamInfo &streamInfo,
- camera3_stream_rotation_t rotation, String8 physicalId,
- hardware::camera::device::V3_4::Stream *stream /*out*/) {
- if (stream == nullptr) {
- return;
- }
-
- stream->v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
- stream->v3_2.width = streamInfo.width;
- stream->v3_2.height = streamInfo.height;
- stream->v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
- auto u = streamInfo.consumerUsage;
- camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
- stream->v3_2.usage = Camera3Device::mapToConsumerUsage(u);
- stream->v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
- stream->v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
- stream->v3_2.id = -1; // Invalid stream id
- stream->physicalCameraId = std::string(physicalId.string());
- stream->bufferSize = 0;
-}
-
-binder::Status
-CameraDeviceClient::convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
- const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
- metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
- hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
- bool *unsupported) {
- auto operatingMode = sessionConfiguration.getOperatingMode();
- binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
- if (!res.isOk()) {
- return res;
- }
-
- if (unsupported == nullptr) {
- String8 msg("unsupported nullptr");
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
- *unsupported = false;
- auto ret = Camera3Device::mapToStreamConfigurationMode(
- static_cast<camera3_stream_configuration_mode_t> (operatingMode),
- /*out*/ &streamConfiguration.operationMode);
- if (ret != OK) {
- String8 msg = String8::format(
- "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
- logicalCameraId.string(), operatingMode, strerror(-ret), ret);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
- msg.string());
- }
-
- bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
- (sessionConfiguration.getInputHeight() > 0) &&
- (sessionConfiguration.getInputFormat() > 0);
- auto outputConfigs = sessionConfiguration.getOutputConfigurations();
- size_t streamCount = outputConfigs.size();
- streamCount = isInputValid ? streamCount + 1 : streamCount;
- streamConfiguration.streams.resize(streamCount);
- size_t streamIdx = 0;
- if (isInputValid) {
- streamConfiguration.streams[streamIdx++] = {{/*streamId*/0,
- hardware::camera::device::V3_2::StreamType::INPUT,
- static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
- static_cast<uint32_t> (sessionConfiguration.getInputHeight()),
- Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
- /*usage*/ 0, HAL_DATASPACE_UNKNOWN,
- hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
- /*physicalId*/ nullptr, /*bufferSize*/0};
- }
-
- for (const auto &it : outputConfigs) {
- const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
- it.getGraphicBufferProducers();
- bool deferredConsumer = it.isDeferred();
- String8 physicalCameraId = String8(it.getPhysicalCameraId());
- size_t numBufferProducers = bufferProducers.size();
- bool isStreamInfoValid = false;
- OutputStreamInfo streamInfo;
-
- res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
- if (!res.isOk()) {
- return res;
- }
- res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
- logicalCameraId);
- if (!res.isOk()) {
- return res;
- }
-
- if (deferredConsumer) {
- streamInfo.width = it.getWidth();
- streamInfo.height = it.getHeight();
- streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
- auto surfaceType = it.getSurfaceType();
- streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
- if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
- streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
- }
- mapStreamInfo(streamInfo, CAMERA3_STREAM_ROTATION_0, physicalCameraId,
- &streamConfiguration.streams[streamIdx++]);
- isStreamInfoValid = true;
-
- if (numBufferProducers == 0) {
- continue;
- }
- }
-
- for (auto& bufferProducer : bufferProducers) {
- sp<Surface> surface;
- const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId);
- res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
- logicalCameraId,
- physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo );
-
- if (!res.isOk())
- return res;
-
- if (!isStreamInfoValid) {
- bool isDepthCompositeStream =
- camera3::DepthCompositeStream::isDepthCompositeStream(surface);
- bool isHeicCompositeStream =
- camera3::HeicCompositeStream::isHeicCompositeStream(surface);
- if (isDepthCompositeStream || isHeicCompositeStream) {
- // We need to take in to account that composite streams can have
- // additional internal camera streams.
- std::vector<OutputStreamInfo> compositeStreams;
- if (isDepthCompositeStream) {
- ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
- deviceInfo, &compositeStreams);
- } else {
- ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
- deviceInfo, &compositeStreams);
- }
- if (ret != OK) {
- String8 msg = String8::format(
- "Camera %s: Failed adding composite streams: %s (%d)",
- logicalCameraId.string(), strerror(-ret), ret);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
-
- if (compositeStreams.size() == 0) {
- // No internal streams means composite stream not
- // supported.
- *unsupported = true;
- return binder::Status::ok();
- } else if (compositeStreams.size() > 1) {
- streamCount += compositeStreams.size() - 1;
- streamConfiguration.streams.resize(streamCount);
- }
-
- for (const auto& compositeStream : compositeStreams) {
- mapStreamInfo(compositeStream,
- static_cast<camera3_stream_rotation_t> (it.getRotation()),
- physicalCameraId, &streamConfiguration.streams[streamIdx++]);
- }
- } else {
- mapStreamInfo(streamInfo,
- static_cast<camera3_stream_rotation_t> (it.getRotation()),
- physicalCameraId, &streamConfiguration.streams[streamIdx++]);
- }
- isStreamInfoValid = true;
- }
- }
- }
- return binder::Status::ok();
-}
-
binder::Status CameraDeviceClient::isSessionConfigurationSupported(
const SessionConfiguration& sessionConfiguration, bool *status /*out*/) {
ATRACE_CALL();
@@ -806,7 +573,8 @@
}
auto operatingMode = sessionConfiguration.getOperatingMode();
- res = checkOperatingMode(operatingMode, mDevice->info(), mCameraIdStr);
+ res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
+ mCameraIdStr);
if (!res.isOk()) {
return res;
}
@@ -821,8 +589,9 @@
metadataGetter getMetadata = [this](const String8 &id) {return mDevice->infoPhysical(id);};
std::vector<std::string> physicalCameraIds;
mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
- res = convertToHALStreamCombination(sessionConfiguration, mCameraIdStr,
- mDevice->info(), getMetadata, physicalCameraIds, streamConfiguration, &earlyExit);
+ res = SessionConfigurationUtils::convertToHALStreamCombination(sessionConfiguration,
+ mCameraIdStr, mDevice->info(), getMetadata, physicalCameraIds, streamConfiguration,
+ &earlyExit);
if (!res.isOk()) {
return res;
}
@@ -970,7 +739,7 @@
String8 physicalCameraId = String8(outputConfiguration.getPhysicalCameraId());
bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
- res = checkSurfaceType(numBufferProducers, deferredConsumer,
+ res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
if (!res.isOk()) {
return res;
@@ -981,7 +750,8 @@
}
std::vector<std::string> physicalCameraIds;
mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
- res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId, mCameraIdStr);
+ res = SessionConfigurationUtils::checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
+ mCameraIdStr);
if (!res.isOk()) {
return res;
}
@@ -1009,8 +779,8 @@
}
sp<Surface> surface;
- res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
- mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
+ res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo, isStreamInfoValid,
+ surface, bufferProducer, mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
if (!res.isOk())
return res;
@@ -1313,8 +1083,9 @@
for (size_t i = 0; i < newOutputsMap.size(); i++) {
OutputStreamInfo outInfo;
sp<Surface> surface;
- res = createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false, surface,
- newOutputsMap.valueAt(i), mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
+ res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false,
+ surface, newOutputsMap.valueAt(i), mCameraIdStr,
+ mDevice->infoPhysical(physicalCameraId));
if (!res.isOk())
return res;
@@ -1364,226 +1135,6 @@
return res;
}
-bool CameraDeviceClient::isPublicFormat(int32_t format)
-{
- switch(format) {
- case HAL_PIXEL_FORMAT_RGBA_8888:
- case HAL_PIXEL_FORMAT_RGBX_8888:
- case HAL_PIXEL_FORMAT_RGB_888:
- case HAL_PIXEL_FORMAT_RGB_565:
- case HAL_PIXEL_FORMAT_BGRA_8888:
- case HAL_PIXEL_FORMAT_YV12:
- case HAL_PIXEL_FORMAT_Y8:
- case HAL_PIXEL_FORMAT_Y16:
- case HAL_PIXEL_FORMAT_RAW16:
- case HAL_PIXEL_FORMAT_RAW10:
- case HAL_PIXEL_FORMAT_RAW12:
- case HAL_PIXEL_FORMAT_RAW_OPAQUE:
- case HAL_PIXEL_FORMAT_BLOB:
- case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
- case HAL_PIXEL_FORMAT_YCbCr_420_888:
- case HAL_PIXEL_FORMAT_YCbCr_422_SP:
- case HAL_PIXEL_FORMAT_YCrCb_420_SP:
- case HAL_PIXEL_FORMAT_YCbCr_422_I:
- return true;
- default:
- return false;
- }
-}
-
-binder::Status CameraDeviceClient::createSurfaceFromGbp(
- OutputStreamInfo& streamInfo, bool isStreamInfoValid,
- sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
- const String8 &cameraId, const CameraMetadata &physicalCameraMetadata) {
-
- // bufferProducer must be non-null
- if (gbp == nullptr) {
- String8 msg = String8::format("Camera %s: Surface is NULL", cameraId.string());
- ALOGW("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
- // HACK b/10949105
- // Query consumer usage bits to set async operation mode for
- // GLConsumer using controlledByApp parameter.
- bool useAsync = false;
- uint64_t consumerUsage = 0;
- status_t err;
- if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
- String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
- cameraId.string(), strerror(-err), err);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
- }
- if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
- ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for stream",
- __FUNCTION__, cameraId.string(), consumerUsage);
- useAsync = true;
- }
-
- uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
- GRALLOC_USAGE_RENDERSCRIPT;
- uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
- GraphicBuffer::USAGE_HW_TEXTURE |
- GraphicBuffer::USAGE_HW_COMPOSER;
- bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
- (consumerUsage & allowedFlags) != 0;
-
- surface = new Surface(gbp, useAsync);
- ANativeWindow *anw = surface.get();
-
- int width, height, format;
- android_dataspace dataSpace;
- if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
- String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
- cameraId.string(), strerror(-err), err);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
- }
- if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
- String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
- cameraId.string(), strerror(-err), err);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
- }
- if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
- String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
- cameraId.string(), strerror(-err), err);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
- }
- if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
- reinterpret_cast<int*>(&dataSpace))) != OK) {
- String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
- cameraId.string(), strerror(-err), err);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
- }
-
- // FIXME: remove this override since the default format should be
- // IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
- if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
- ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
- ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
- ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
- __FUNCTION__, cameraId.string(), format);
- format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- }
- // Round dimensions to the nearest dimensions available for this format
- if (flexibleConsumer && isPublicFormat(format) &&
- !CameraDeviceClient::roundBufferDimensionNearest(width, height,
- format, dataSpace, physicalCameraMetadata, /*out*/&width, /*out*/&height)) {
- String8 msg = String8::format("Camera %s: No supported stream configurations with "
- "format %#x defined, failed to create output stream",
- cameraId.string(), format);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
-
- if (!isStreamInfoValid) {
- streamInfo.width = width;
- streamInfo.height = height;
- streamInfo.format = format;
- streamInfo.dataSpace = dataSpace;
- streamInfo.consumerUsage = consumerUsage;
- return binder::Status::ok();
- }
- if (width != streamInfo.width) {
- String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
- cameraId.string(), width, streamInfo.width);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
- if (height != streamInfo.height) {
- String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
- cameraId.string(), height, streamInfo.height);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
- if (format != streamInfo.format) {
- String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
- cameraId.string(), format, streamInfo.format);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
- if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
- if (dataSpace != streamInfo.dataSpace) {
- String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
- cameraId.string(), dataSpace, streamInfo.dataSpace);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
- //At the native side, there isn't a way to check whether 2 surfaces come from the same
- //surface class type. Use usage flag to approximate the comparison.
- if (consumerUsage != streamInfo.consumerUsage) {
- String8 msg = String8::format(
- "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
- cameraId.string(), consumerUsage, streamInfo.consumerUsage);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
- }
- return binder::Status::ok();
-}
-
-bool CameraDeviceClient::roundBufferDimensionNearest(int32_t width, int32_t height,
- int32_t format, android_dataspace dataSpace, const CameraMetadata& info,
- /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) {
-
- camera_metadata_ro_entry streamConfigs =
- (dataSpace == HAL_DATASPACE_DEPTH) ?
- info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) :
- (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
- info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) :
- info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-
- int32_t bestWidth = -1;
- int32_t bestHeight = -1;
-
- // Iterate through listed stream configurations and find the one with the smallest euclidean
- // distance from the given dimensions for the given format.
- for (size_t i = 0; i < streamConfigs.count; i += 4) {
- int32_t fmt = streamConfigs.data.i32[i];
- int32_t w = streamConfigs.data.i32[i + 1];
- int32_t h = streamConfigs.data.i32[i + 2];
-
- // Ignore input/output type for now
- if (fmt == format) {
- if (w == width && h == height) {
- bestWidth = width;
- bestHeight = height;
- break;
- } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
- CameraDeviceClient::euclidDistSquare(w, h, width, height) <
- CameraDeviceClient::euclidDistSquare(bestWidth, bestHeight, width, height))) {
- bestWidth = w;
- bestHeight = h;
- }
- }
- }
-
- if (bestWidth == -1) {
- // Return false if no configurations for this format were listed
- return false;
- }
-
- // Set the outputs to the closet width/height
- if (outWidth != NULL) {
- *outWidth = bestWidth;
- }
- if (outHeight != NULL) {
- *outHeight = bestHeight;
- }
-
- // Return true if at least one configuration for this format was listed
- return true;
-}
-
-int64_t CameraDeviceClient::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
- int64_t d0 = x0 - x1;
- int64_t d1 = y0 - y1;
- return d0 * d0 + d1 * d1;
-}
-
// Create a request object from a template.
binder::Status CameraDeviceClient::createDefaultRequest(int templateId,
/*out*/
@@ -1896,8 +1447,9 @@
}
sp<Surface> surface;
- res = createSurfaceFromGbp(mStreamInfoMap[streamId], true /*isStreamInfoValid*/,
- surface, bufferProducer, mCameraIdStr, mDevice->infoPhysical(physicalId));
+ res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
+ true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
+ mDevice->infoPhysical(physicalId));
if (!res.isOk())
return res;
@@ -2162,14 +1714,16 @@
mStreamingRequestId = REQUEST_ID_NONE;
}
-void CameraDeviceClient::notifyIdle() {
+void CameraDeviceClient::notifyIdle(
+ int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats) {
// Thread safe. Don't bother locking.
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
if (remoteCb != 0) {
remoteCb->onDeviceIdle();
}
- Camera2ClientBase::notifyIdle();
+ Camera2ClientBase::notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -2205,6 +1759,7 @@
void CameraDeviceClient::detachDevice() {
if (mDevice == 0) return;
+ nsecs_t startTime = systemTime();
ALOGV("Camera %s: Stopping processors", mCameraIdStr.string());
mFrameProcessor->removeListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
@@ -2239,6 +1794,9 @@
mCompositeStreamMap.clear();
Camera2ClientBase::detachDevice();
+
+ int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
+ CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
}
/** Device-related methods */
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index e7e26da..3f72eca 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -95,6 +95,7 @@
virtual binder::Status endConfigure(int operatingMode,
const hardware::camera2::impl::CameraMetadataNative& sessionParams,
+ int64_t startTimeMs,
/*out*/
std::vector<int>* offlineStreamIds) override;
@@ -196,7 +197,8 @@
* Device listener interface
*/
- virtual void notifyIdle();
+ virtual void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats);
virtual void notifyError(int32_t errorCode,
const CaptureResultExtras& resultExtras);
virtual void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp);
@@ -204,16 +206,7 @@
virtual void notifyRequestQueueEmpty();
virtual void notifyRepeatingRequestError(long lastFrameNumber);
- // utility function to convert AIDL SessionConfiguration to HIDL
- // streamConfiguration. Also checks for sanity of SessionConfiguration and
- // returns a non-ok binder::Status if the passed in session configuration
- // isn't valid.
- static binder::Status
- convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
- const String8 &cameraId, const CameraMetadata &deviceInfo,
- metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
- hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
- bool *earlyExit);
+ void setImageDumpMask(int mask) { if (mDevice != nullptr) mDevice->setImageDumpMask(mask); }
/**
* Interface used by independent components of CameraDeviceClient.
*/
@@ -266,18 +259,8 @@
/** Utility members */
binder::Status checkPidStatus(const char* checkLocation);
- static binder::Status checkOperatingMode(int operatingMode, const CameraMetadata &staticInfo,
- const String8 &cameraId);
- static binder::Status checkSurfaceType(size_t numBufferProducers, bool deferredConsumer,
- int surfaceType);
- static void mapStreamInfo(const OutputStreamInfo &streamInfo,
- camera3_stream_rotation_t rotation, String8 physicalId,
- hardware::camera::device::V3_4::Stream *stream /*out*/);
bool enforceRequestPermissions(CameraMetadata& metadata);
- // Find the square of the euclidean distance between two points
- static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
-
// Create an output stream with surface deferred for future.
binder::Status createDeferredSurfaceStreamLocked(
const hardware::camera2::params::OutputConfiguration &outputConfiguration,
@@ -288,33 +271,11 @@
// cases.
binder::Status setStreamTransformLocked(int streamId);
- // Find the closest dimensions for a given format in available stream configurations with
- // a width <= ROUNDING_WIDTH_CAP
- static const int32_t ROUNDING_WIDTH_CAP = 1920;
- static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
- android_dataspace dataSpace, const CameraMetadata& info,
- /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
-
- //check if format is not custom format
- static bool isPublicFormat(int32_t format);
-
- // Create a Surface from an IGraphicBufferProducer. Returns error if
- // IGraphicBufferProducer's property doesn't match with streamInfo
- static binder::Status createSurfaceFromGbp(OutputStreamInfo& streamInfo, bool isStreamInfoValid,
- sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp, const String8 &cameraId,
- const CameraMetadata &physicalCameraMetadata);
-
-
// Utility method to insert the surface into SurfaceMap
binder::Status insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
/*out*/SurfaceMap* surfaceMap, /*out*/Vector<int32_t>* streamIds,
/*out*/int32_t* currentStreamId);
- // Check that the physicalCameraId passed in is spported by the camera
- // device.
- static binder::Status checkPhysicalCameraId(const std::vector<std::string> &physicalCameraIds,
- const String8 &physicalCameraId, const String8 &logicalCameraId);
-
// IGraphicsBufferProducer binder -> Stream ID + Surface ID for output streams
KeyedVector<sp<IBinder>, StreamSurfaceId> mStreamMap;
@@ -346,7 +307,6 @@
KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
- static const int32_t MAX_SURFACES_PER_STREAM = 4;
sp<CameraProviderManager> mProviderManager;
};
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 237c24b..62b5479 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -265,7 +265,9 @@
}
}
-void CameraOfflineSessionClient::notifyIdle() {
+void CameraOfflineSessionClient::notifyIdle(
+ int64_t /*requestCount*/, int64_t /*resultErrorCount*/, bool /*deviceError*/,
+ const std::vector<hardware::CameraStreamStats>& /*streamStats*/) {
if (mRemoteCallback.get() != nullptr) {
mRemoteCallback->onDeviceIdle();
}
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 03621c8..839c435 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -86,7 +86,8 @@
// NotificationListener API
void notifyError(int32_t errorCode, const CaptureResultExtras& resultExtras) override;
void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
- void notifyIdle() override;
+ void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats) override;
void notifyAutoFocus(uint8_t newState, int triggerId) override;
void notifyAutoExposure(uint8_t newState, int triggerId) override;
void notifyAutoWhitebalance(uint8_t newState, int triggerId) override;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 1a0881f..a7173d1 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -65,7 +65,6 @@
mYuvBufferAcquired(false),
mProducerListener(new ProducerListener()),
mDequeuedOutputBufferCnt(0),
- mLockedAppSegmentBufferCnt(0),
mCodecOutputCounter(0),
mQuality(-1),
mGridTimestampUs(0),
@@ -510,7 +509,8 @@
sp<camera3::StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr) {
- mStatusId = statusTracker->addComponent();
+ std::string name = std::string("HeicStream ") + std::to_string(getStreamId());
+ mStatusId = statusTracker->addComponent(name);
}
run("HeicCompositeStreamProc");
@@ -620,7 +620,8 @@
if (mPendingInputFrames.find(mAppSegmentFrameNumbers.front()) == mPendingInputFrames.end()) {
ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
mAppSegmentFrameNumbers.front());
- mInputYuvBuffers.erase(it);
+ mInputAppSegmentBuffers.erase(it);
+ mAppSegmentFrameNumbers.pop();
continue;
}
@@ -633,7 +634,6 @@
mAppSegmentConsumer->unlockBuffer(imgBuffer);
} else {
mPendingInputFrames[frameNumber].appSegmentBuffer = imgBuffer;
- mLockedAppSegmentBufferCnt++;
}
mInputAppSegmentBuffers.erase(it);
mAppSegmentFrameNumbers.pop();
@@ -664,6 +664,7 @@
ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
mMainImageFrameNumbers.front());
mInputYuvBuffers.erase(it);
+ mMainImageFrameNumbers.pop();
continue;
}
@@ -895,10 +896,6 @@
strerror(-res), res);
return res;
}
- } else if (mLockedAppSegmentBufferCnt == kMaxAcquiredAppSegment) {
- ALOGE("%s: Out-of-order app segment buffers reaches limit %u", __FUNCTION__,
- kMaxAcquiredAppSegment);
- return INVALID_OPERATION;
}
}
@@ -1036,7 +1033,6 @@
mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
inputFrame.appSegmentBuffer.data = nullptr;
inputFrame.exifError = false;
- mLockedAppSegmentBufferCnt--;
return OK;
}
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 33ca69a..a373127 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -253,7 +253,6 @@
// Keep all incoming APP segment Blob buffer pending further processing.
std::vector<int64_t> mInputAppSegmentBuffers;
- int32_t mLockedAppSegmentBufferCnt;
// Keep all incoming HEIC blob buffer pending further processing.
std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 609698c..6fd8d45 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -27,12 +27,15 @@
#include <gui/Surface.h>
#include <gui/Surface.h>
+#include <camera/CameraSessionStats.h>
+
#include "common/Camera2ClientBase.h"
#include "api2/CameraDeviceClient.h"
#include "device3/Camera3Device.h"
#include "utils/CameraThreadState.h"
+#include "utils/CameraServiceProxyWrapper.h"
namespace android {
using namespace camera2;
@@ -194,7 +197,7 @@
CameraService::BasicClient::disconnect();
- ALOGV("Camera %s: Shut down complete complete", TClientBase::mCameraIdStr.string());
+ ALOGV("Camera %s: Shut down complete", TClientBase::mCameraIdStr.string());
return res;
}
@@ -245,13 +248,12 @@
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyIdle() {
+void Camera2ClientBase<TClientBase>::notifyIdle(
+ int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats) {
if (mDeviceActive) {
- getCameraService()->updateProxyDeviceState(
- hardware::ICameraServiceProxy::CAMERA_STATE_IDLE, TClientBase::mCameraIdStr,
- TClientBase::mCameraFacing, TClientBase::mClientPackageName,
- ((mApi1CameraId < 0) ? hardware::ICameraServiceProxy::CAMERA_API_LEVEL_2 :
- hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1));
+ CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
+ requestCount, resultErrorCount, deviceError, streamStats);
}
mDeviceActive = false;
@@ -265,11 +267,7 @@
(void)timestamp;
if (!mDeviceActive) {
- getCameraService()->updateProxyDeviceState(
- hardware::ICameraServiceProxy::CAMERA_STATE_ACTIVE, TClientBase::mCameraIdStr,
- TClientBase::mCameraFacing, TClientBase::mClientPackageName,
- ((mApi1CameraId < 0) ? hardware::ICameraServiceProxy::CAMERA_API_LEVEL_2 :
- hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1));
+ CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr);
}
mDeviceActive = true;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index d7506af..1ce4393 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -66,7 +66,9 @@
virtual void notifyError(int32_t errorCode,
const CaptureResultExtras& resultExtras);
- virtual void notifyIdle();
+ virtual void notifyIdle(int64_t requestCount, int64_t resultErrorCount,
+ bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats);
virtual void notifyShutter(const CaptureResultExtras& resultExtras,
nsecs_t timestamp);
virtual void notifyAutoFocus(uint8_t newState, int triggerId);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index a537ef5..77e660f 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -367,6 +367,14 @@
* Get the status tracker of the camera device
*/
virtual wp<camera3::StatusTracker> getStatusTracker() = 0;
+
+ /**
+ * Set bitmask for image dump flag
+ */
+ void setImageDumpMask(int mask) { mImageDumpMask = mask; }
+
+protected:
+ bool mImageDumpMask = 0;
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 1f835a9..e02e146 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -17,11 +17,14 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERAOFFLINESESSIONBASE_H
#define ANDROID_SERVERS_CAMERA_CAMERAOFFLINESESSIONBASE_H
+#include <vector>
+
#include <utils/RefBase.h>
#include <utils/String8.h>
#include <utils/Timers.h>
#include "camera/CaptureResult.h"
+#include "camera/CameraSessionStats.h"
#include "FrameProducer.h"
namespace android {
@@ -39,7 +42,8 @@
const CaptureResultExtras &resultExtras) = 0;
// Required only for API2
- virtual void notifyIdle() = 0;
+ virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats) = 0;
virtual void notifyShutter(const CaptureResultExtras &resultExtras,
nsecs_t timestamp) = 0;
virtual void notifyPrepared(int streamId) = 0;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 32d118d..e9dcb01 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -417,46 +417,6 @@
return mapToStatusT(status);
}
-status_t CameraProviderManager::openSession(const std::string &id,
- const sp<device::V1_0::ICameraDeviceCallback>& callback,
- /*out*/
- sp<device::V1_0::ICameraDevice> *session) {
-
- std::lock_guard<std::mutex> lock(mInterfaceMutex);
-
- auto deviceInfo = findDeviceInfoLocked(id,
- /*minVersion*/ {1,0}, /*maxVersion*/ {2,0});
- if (deviceInfo == nullptr) return NAME_NOT_FOUND;
-
- auto *deviceInfo1 = static_cast<ProviderInfo::DeviceInfo1*>(deviceInfo);
- sp<ProviderInfo> parentProvider = deviceInfo->mParentProvider.promote();
- if (parentProvider == nullptr) {
- return DEAD_OBJECT;
- }
- const sp<provider::V2_4::ICameraProvider> provider = parentProvider->startProviderInterface();
- if (provider == nullptr) {
- return DEAD_OBJECT;
- }
- saveRef(DeviceMode::CAMERA, id, provider);
-
- auto interface = deviceInfo1->startDeviceInterface<
- CameraProviderManager::ProviderInfo::DeviceInfo1::InterfaceT>();
- if (interface == nullptr) {
- return DEAD_OBJECT;
- }
- hardware::Return<Status> status = interface->open(callback);
- if (!status.isOk()) {
- removeRef(DeviceMode::CAMERA, id);
- ALOGE("%s: Transaction error opening a session for camera device %s: %s",
- __FUNCTION__, id.c_str(), status.description().c_str());
- return DEAD_OBJECT;
- }
- if (status == Status::OK) {
- *session = interface;
- }
- return mapToStatusT(status);
-}
-
void CameraProviderManager::saveRef(DeviceMode usageType, const std::string &cameraId,
sp<provider::V2_4::ICameraProvider> provider) {
if (!kEnableLazyHal) {
@@ -1344,6 +1304,20 @@
}
}
+ // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
+ // before setCallback returns
+ hardware::Return<Status> status = interface->setCallback(this);
+ if (!status.isOk()) {
+ ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
+ __FUNCTION__, mProviderName.c_str(), status.description().c_str());
+ return DEAD_OBJECT;
+ }
+ if (status != Status::OK) {
+ ALOGE("%s: Unable to register callbacks with camera provider '%s'",
+ __FUNCTION__, mProviderName.c_str());
+ return mapToStatusT(status);
+ }
+
hardware::Return<bool> linked = interface->linkToDeath(this, /*cookie*/ mId);
if (!linked.isOk()) {
ALOGE("%s: Transaction error in linking to camera provider '%s' death: %s",
@@ -1372,7 +1346,6 @@
return res;
}
- Status status;
// Get initial list of camera devices, if any
std::vector<std::string> devices;
hardware::Return<void> ret = interface->getCameraIdList([&status, this, &devices](
@@ -1437,26 +1410,43 @@
}
}
- // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
- // before setCallback returns. setCallback must be called after addDevice so that
- // the physical camera status callback can look up available regular
- // cameras.
- hardware::Return<Status> st = interface->setCallback(this);
- if (!st.isOk()) {
- ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
- __FUNCTION__, mProviderName.c_str(), st.description().c_str());
- return DEAD_OBJECT;
- }
- if (st != Status::OK) {
- ALOGE("%s: Unable to register callbacks with camera provider '%s'",
- __FUNCTION__, mProviderName.c_str());
- return mapToStatusT(st);
- }
-
ALOGI("Camera provider %s ready with %zu camera devices",
mProviderName.c_str(), mDevices.size());
- mInitialized = true;
+ // Process cached status callbacks
+ std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus =
+ std::make_unique<std::vector<CameraStatusInfoT>>();
+ {
+ std::lock_guard<std::mutex> lock(mInitLock);
+
+ for (auto& statusInfo : mCachedStatus) {
+ std::string id, physicalId;
+ status_t res = OK;
+ if (statusInfo.isPhysicalCameraStatus) {
+ res = physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
+ statusInfo.cameraId, statusInfo.physicalCameraId, statusInfo.status);
+ } else {
+ res = cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
+ }
+ if (res == OK) {
+ cachedStatus->emplace_back(statusInfo.isPhysicalCameraStatus,
+ id.c_str(), physicalId.c_str(), statusInfo.status);
+ }
+ }
+ mCachedStatus.clear();
+
+ mInitialized = true;
+ }
+
+ // The cached status change callbacks cannot be fired directly from this
+ // function, due to same-thread deadlock trying to acquire mInterfaceMutex
+ // twice.
+ if (listener != nullptr) {
+ mInitialStatusCallbackFuture = std::async(std::launch::async,
+ &CameraProviderManager::ProviderInfo::notifyInitialStatusChange, this,
+ listener, std::move(cachedStatus));
+ }
+
return OK;
}
@@ -1537,9 +1527,9 @@
std::unique_ptr<DeviceInfo> deviceInfo;
switch (major) {
case 1:
- deviceInfo = initializeDeviceInfo<DeviceInfo1>(name, mProviderTagid,
- id, minor);
- break;
+ ALOGE("%s: Device %s: Unsupported HIDL device HAL major version %d:", __FUNCTION__,
+ name.c_str(), major);
+ return BAD_VALUE;
case 3:
deviceInfo = initializeDeviceInfo<DeviceInfo3>(name, mProviderTagid,
id, minor);
@@ -1734,104 +1724,139 @@
CameraDeviceStatus newStatus) {
sp<StatusListener> listener;
std::string id;
- bool initialized = false;
+ std::lock_guard<std::mutex> lock(mInitLock);
+
+ if (!mInitialized) {
+ mCachedStatus.emplace_back(false /*isPhysicalCameraStatus*/,
+ cameraDeviceName.c_str(), std::string().c_str(), newStatus);
+ return hardware::Void();
+ }
+
{
std::lock_guard<std::mutex> lock(mLock);
- bool known = false;
- for (auto& deviceInfo : mDevices) {
- if (deviceInfo->mName == cameraDeviceName) {
- ALOGI("Camera device %s status is now %s, was %s", cameraDeviceName.c_str(),
- deviceStatusToString(newStatus), deviceStatusToString(deviceInfo->mStatus));
- deviceInfo->mStatus = newStatus;
- // TODO: Handle device removal (NOT_PRESENT)
- id = deviceInfo->mId;
- known = true;
- break;
- }
- }
- // Previously unseen device; status must not be NOT_PRESENT
- if (!known) {
- if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
- ALOGW("Camera provider %s says an unknown camera device %s is not present. Curious.",
- mProviderName.c_str(), cameraDeviceName.c_str());
- return hardware::Void();
- }
- addDevice(cameraDeviceName, newStatus, &id);
- } else if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
- removeDevice(id);
+ if (OK != cameraDeviceStatusChangeLocked(&id, cameraDeviceName, newStatus)) {
+ return hardware::Void();
}
listener = mManager->getStatusListener();
- initialized = mInitialized;
- if (reCacheConcurrentStreamingCameraIdsLocked() != OK) {
- ALOGE("%s: CameraProvider %s could not re-cache concurrent streaming camera id list ",
- __FUNCTION__, mProviderName.c_str());
- }
}
+
// Call without lock held to allow reentrancy into provider manager
- // Don't send the callback if providerInfo hasn't been initialized.
- // CameraService will initialize device status after provider is
- // initialized
- if (listener != nullptr && initialized) {
+ if (listener != nullptr) {
listener->onDeviceStatusChanged(String8(id.c_str()), newStatus);
}
+
return hardware::Void();
}
+status_t CameraProviderManager::ProviderInfo::cameraDeviceStatusChangeLocked(
+ std::string* id, const hardware::hidl_string& cameraDeviceName,
+ CameraDeviceStatus newStatus) {
+ bool known = false;
+ std::string cameraId;
+ for (auto& deviceInfo : mDevices) {
+ if (deviceInfo->mName == cameraDeviceName) {
+ ALOGI("Camera device %s status is now %s, was %s", cameraDeviceName.c_str(),
+ deviceStatusToString(newStatus), deviceStatusToString(deviceInfo->mStatus));
+ deviceInfo->mStatus = newStatus;
+ // TODO: Handle device removal (NOT_PRESENT)
+ cameraId = deviceInfo->mId;
+ known = true;
+ break;
+ }
+ }
+ // Previously unseen device; status must not be NOT_PRESENT
+ if (!known) {
+ if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
+ ALOGW("Camera provider %s says an unknown camera device %s is not present. Curious.",
+ mProviderName.c_str(), cameraDeviceName.c_str());
+ return BAD_VALUE;
+ }
+ addDevice(cameraDeviceName, newStatus, &cameraId);
+ } else if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
+ removeDevice(cameraId);
+ }
+ if (reCacheConcurrentStreamingCameraIdsLocked() != OK) {
+ ALOGE("%s: CameraProvider %s could not re-cache concurrent streaming camera id list ",
+ __FUNCTION__, mProviderName.c_str());
+ }
+ *id = cameraId;
+ return OK;
+}
+
hardware::Return<void> CameraProviderManager::ProviderInfo::physicalCameraDeviceStatusChange(
const hardware::hidl_string& cameraDeviceName,
const hardware::hidl_string& physicalCameraDeviceName,
CameraDeviceStatus newStatus) {
sp<StatusListener> listener;
std::string id;
- bool initialized = false;
+ std::string physicalId;
+ std::lock_guard<std::mutex> lock(mInitLock);
+
+ if (!mInitialized) {
+ mCachedStatus.emplace_back(true /*isPhysicalCameraStatus*/, cameraDeviceName,
+ physicalCameraDeviceName, newStatus);
+ return hardware::Void();
+ }
+
{
std::lock_guard<std::mutex> lock(mLock);
- bool known = false;
- for (auto& deviceInfo : mDevices) {
- if (deviceInfo->mName == cameraDeviceName) {
- id = deviceInfo->mId;
- if (!deviceInfo->mIsLogicalCamera) {
- ALOGE("%s: Invalid combination of camera id %s, physical id %s",
- __FUNCTION__, id.c_str(), physicalCameraDeviceName.c_str());
- return hardware::Void();
- }
- if (std::find(deviceInfo->mPhysicalIds.begin(), deviceInfo->mPhysicalIds.end(),
- physicalCameraDeviceName) == deviceInfo->mPhysicalIds.end()) {
- ALOGE("%s: Invalid combination of camera id %s, physical id %s",
- __FUNCTION__, id.c_str(), physicalCameraDeviceName.c_str());
- return hardware::Void();
- }
- ALOGI("Camera device %s physical device %s status is now %s, was %s",
- cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(),
- deviceStatusToString(newStatus), deviceStatusToString(
- deviceInfo->mPhysicalStatus[physicalCameraDeviceName]));
- known = true;
- break;
- }
- }
- // Previously unseen device; status must not be NOT_PRESENT
- if (!known) {
- ALOGW("Camera provider %s says an unknown camera device %s-%s is not present. Curious.",
- mProviderName.c_str(), cameraDeviceName.c_str(),
- physicalCameraDeviceName.c_str());
+ if (OK != physicalCameraDeviceStatusChangeLocked(&id, &physicalId, cameraDeviceName,
+ physicalCameraDeviceName, newStatus)) {
return hardware::Void();
}
+
listener = mManager->getStatusListener();
- initialized = mInitialized;
}
// Call without lock held to allow reentrancy into provider manager
- // Don't send the callback if providerInfo hasn't been initialized.
- // CameraService will initialize device status after provider is
- // initialized
- if (listener != nullptr && initialized) {
- String8 physicalId(physicalCameraDeviceName.c_str());
+ if (listener != nullptr) {
listener->onDeviceStatusChanged(String8(id.c_str()),
- physicalId, newStatus);
+ String8(physicalId.c_str()), newStatus);
}
return hardware::Void();
}
+status_t CameraProviderManager::ProviderInfo::physicalCameraDeviceStatusChangeLocked(
+ std::string* id, std::string* physicalId,
+ const hardware::hidl_string& cameraDeviceName,
+ const hardware::hidl_string& physicalCameraDeviceName,
+ CameraDeviceStatus newStatus) {
+ bool known = false;
+ std::string cameraId;
+ for (auto& deviceInfo : mDevices) {
+ if (deviceInfo->mName == cameraDeviceName) {
+ cameraId = deviceInfo->mId;
+ if (!deviceInfo->mIsLogicalCamera) {
+ ALOGE("%s: Invalid combination of camera id %s, physical id %s",
+ __FUNCTION__, cameraId.c_str(), physicalCameraDeviceName.c_str());
+ return BAD_VALUE;
+ }
+ if (std::find(deviceInfo->mPhysicalIds.begin(), deviceInfo->mPhysicalIds.end(),
+ physicalCameraDeviceName) == deviceInfo->mPhysicalIds.end()) {
+ ALOGE("%s: Invalid combination of camera id %s, physical id %s",
+ __FUNCTION__, cameraId.c_str(), physicalCameraDeviceName.c_str());
+ return BAD_VALUE;
+ }
+ ALOGI("Camera device %s physical device %s status is now %s",
+ cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(),
+ deviceStatusToString(newStatus));
+ known = true;
+ break;
+ }
+ }
+ // Previously unseen device; status must not be NOT_PRESENT
+ if (!known) {
+ ALOGW("Camera provider %s says an unknown camera device %s-%s is not present. Curious.",
+ mProviderName.c_str(), cameraDeviceName.c_str(),
+ physicalCameraDeviceName.c_str());
+ return BAD_VALUE;
+ }
+
+ *id = cameraId;
+ *physicalId = physicalCameraDeviceName.c_str();
+ return OK;
+}
+
hardware::Return<void> CameraProviderManager::ProviderInfo::torchModeStatusChange(
const hardware::hidl_string& cameraDeviceName,
TorchModeStatus newStatus) {
@@ -1986,6 +2011,20 @@
return INVALID_OPERATION;
}
+void CameraProviderManager::ProviderInfo::notifyInitialStatusChange(
+ sp<StatusListener> listener,
+ std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus) {
+ for (auto& statusInfo : *cachedStatus) {
+ if (statusInfo.isPhysicalCameraStatus) {
+ listener->onDeviceStatusChanged(String8(statusInfo.cameraId.c_str()),
+ String8(statusInfo.physicalCameraId.c_str()), statusInfo.status);
+ } else {
+ listener->onDeviceStatusChanged(
+ String8(statusInfo.cameraId.c_str()), statusInfo.status);
+ }
+ }
+}
+
template<class DeviceInfoT>
std::unique_ptr<CameraProviderManager::ProviderInfo::DeviceInfo>
CameraProviderManager::ProviderInfo::initializeDeviceInfo(
@@ -2034,35 +2073,6 @@
}
template<>
-sp<device::V1_0::ICameraDevice>
-CameraProviderManager::ProviderInfo::startDeviceInterface
- <device::V1_0::ICameraDevice>(const std::string &name) {
- Status status;
- sp<device::V1_0::ICameraDevice> cameraInterface;
- hardware::Return<void> ret;
- const sp<provider::V2_4::ICameraProvider> interface = startProviderInterface();
- if (interface == nullptr) {
- return nullptr;
- }
- ret = interface->getCameraDeviceInterface_V1_x(name, [&status, &cameraInterface](
- Status s, sp<device::V1_0::ICameraDevice> interface) {
- status = s;
- cameraInterface = interface;
- });
- if (!ret.isOk()) {
- ALOGE("%s: Transaction error trying to obtain interface for camera device %s: %s",
- __FUNCTION__, name.c_str(), ret.description().c_str());
- return nullptr;
- }
- if (status != Status::OK) {
- ALOGE("%s: Unable to obtain interface for camera device %s: %s", __FUNCTION__,
- name.c_str(), statusToString(status));
- return nullptr;
- }
- return cameraInterface;
-}
-
-template<>
sp<device::V3_2::ICameraDevice>
CameraProviderManager::ProviderInfo::startDeviceInterface
<device::V3_2::ICameraDevice>(const std::string &name) {
@@ -2115,126 +2125,6 @@
return mapToStatusT(s);
}
-CameraProviderManager::ProviderInfo::DeviceInfo1::DeviceInfo1(const std::string& name,
- const metadata_vendor_id_t tagId, const std::string &id,
- uint16_t minorVersion,
- const CameraResourceCost& resourceCost,
- sp<ProviderInfo> parentProvider,
- const std::vector<std::string>& publicCameraIds,
- sp<InterfaceT> interface) :
- DeviceInfo(name, tagId, id, hardware::hidl_version{1, minorVersion},
- publicCameraIds, resourceCost, parentProvider) {
- // Get default parameters and initialize flash unit availability
- // Requires powering on the camera device
- hardware::Return<Status> status = interface->open(nullptr);
- if (!status.isOk()) {
- ALOGE("%s: Transaction error opening camera device %s to check for a flash unit: %s",
- __FUNCTION__, id.c_str(), status.description().c_str());
- return;
- }
- if (status != Status::OK) {
- ALOGE("%s: Unable to open camera device %s to check for a flash unit: %s", __FUNCTION__,
- id.c_str(), CameraProviderManager::statusToString(status));
- return;
- }
- hardware::Return<void> ret;
- ret = interface->getParameters([this](const hardware::hidl_string& parms) {
- mDefaultParameters.unflatten(String8(parms.c_str()));
- });
- if (!ret.isOk()) {
- ALOGE("%s: Transaction error reading camera device %s params to check for a flash unit: %s",
- __FUNCTION__, id.c_str(), status.description().c_str());
- return;
- }
- const char *flashMode =
- mDefaultParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
- if (flashMode && strstr(flashMode, CameraParameters::FLASH_MODE_TORCH)) {
- mHasFlashUnit = true;
- }
-
- status_t res = cacheCameraInfo(interface);
- if (res != OK) {
- ALOGE("%s: Could not cache CameraInfo", __FUNCTION__);
- return;
- }
-
- ret = interface->close();
- if (!ret.isOk()) {
- ALOGE("%s: Transaction error closing camera device %s after check for a flash unit: %s",
- __FUNCTION__, id.c_str(), status.description().c_str());
- }
-
- if (!kEnableLazyHal) {
- // Save HAL reference indefinitely
- mSavedInterface = interface;
- }
-}
-
-CameraProviderManager::ProviderInfo::DeviceInfo1::~DeviceInfo1() {}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::setTorchMode(bool enabled) {
- return setTorchModeForDevice<InterfaceT>(enabled);
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::getCameraInfo(
- hardware::CameraInfo *info) const {
- if (info == nullptr) return BAD_VALUE;
- *info = mInfo;
- return OK;
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::cacheCameraInfo(
- sp<CameraProviderManager::ProviderInfo::DeviceInfo1::InterfaceT> interface) {
- Status status;
- device::V1_0::CameraInfo cInfo;
- hardware::Return<void> ret;
- ret = interface->getCameraInfo([&status, &cInfo](Status s, device::V1_0::CameraInfo camInfo) {
- status = s;
- cInfo = camInfo;
- });
- if (!ret.isOk()) {
- ALOGE("%s: Transaction error reading camera info from device %s: %s",
- __FUNCTION__, mId.c_str(), ret.description().c_str());
- return DEAD_OBJECT;
- }
- if (status != Status::OK) {
- return mapToStatusT(status);
- }
-
- switch(cInfo.facing) {
- case device::V1_0::CameraFacing::BACK:
- mInfo.facing = hardware::CAMERA_FACING_BACK;
- break;
- case device::V1_0::CameraFacing::EXTERNAL:
- // Map external to front for legacy API
- case device::V1_0::CameraFacing::FRONT:
- mInfo.facing = hardware::CAMERA_FACING_FRONT;
- break;
- default:
- ALOGW("%s: Device %s: Unknown camera facing: %d",
- __FUNCTION__, mId.c_str(), cInfo.facing);
- mInfo.facing = hardware::CAMERA_FACING_BACK;
- }
- mInfo.orientation = cInfo.orientation;
-
- return OK;
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::dumpState(int fd) {
- native_handle_t* handle = native_handle_create(1,0);
- handle->data[0] = fd;
- const sp<InterfaceT> interface = startDeviceInterface<InterfaceT>();
- if (interface == nullptr) {
- return DEAD_OBJECT;
- }
- hardware::Return<Status> s = interface->dumpState(handle);
- native_handle_delete(handle);
- if (!s.isOk()) {
- return INVALID_OPERATION;
- }
- return mapToStatusT(s);
-}
-
CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
const metadata_vendor_id_t tagId, const std::string &id,
uint16_t minorVersion,
@@ -2689,9 +2579,11 @@
CameraProviderManager::ProviderInfo::~ProviderInfo() {
+ if (mInitialStatusCallbackFuture.valid()) {
+ mInitialStatusCallbackFuture.wait();
+ }
// Destruction of ProviderInfo is only supposed to happen when the respective
// CameraProvider interface dies, so do not unregister callbacks.
-
}
status_t CameraProviderManager::mapToStatusT(const Status& s) {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 25d3639..8727e7f 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -22,6 +22,7 @@
#include <unordered_set>
#include <string>
#include <mutex>
+#include <future>
#include <camera/camera2/ConcurrentCamera.h>
#include <camera/CameraParameters2.h>
@@ -269,11 +270,6 @@
/*out*/
sp<hardware::camera::device::V3_2::ICameraDeviceSession> *session);
- status_t openSession(const std::string &id,
- const sp<hardware::camera::device::V1_0::ICameraDeviceCallback>& callback,
- /*out*/
- sp<hardware::camera::device::V1_0::ICameraDevice> *session);
-
/**
* Save the ICameraProvider while it is being used by a camera or torch client
*/
@@ -403,6 +399,15 @@
const hardware::hidl_string& physicalCameraDeviceName,
hardware::camera::common::V1_0::CameraDeviceStatus newStatus) override;
+ status_t cameraDeviceStatusChangeLocked(
+ std::string* id, const hardware::hidl_string& cameraDeviceName,
+ hardware::camera::common::V1_0::CameraDeviceStatus newStatus);
+ status_t physicalCameraDeviceStatusChangeLocked(
+ std::string* id, std::string* physicalId,
+ const hardware::hidl_string& cameraDeviceName,
+ const hardware::hidl_string& physicalCameraDeviceName,
+ hardware::camera::common::V1_0::CameraDeviceStatus newStatus);
+
// hidl_death_recipient interface - this locks the parent mInterfaceMutex
virtual void serviceDied(uint64_t cookie, const wp<hidl::base::V1_0::IBase>& who) override;
@@ -444,8 +449,6 @@
const hardware::camera::common::V1_0::CameraResourceCost mResourceCost;
hardware::camera::common::V1_0::CameraDeviceStatus mStatus;
- std::map<std::string, hardware::camera::common::V1_0::CameraDeviceStatus>
- mPhysicalStatus;
wp<ProviderInfo> mParentProvider;
@@ -513,27 +516,6 @@
// physical camera IDs.
std::vector<std::string> mProviderPublicCameraIds;
- // HALv1-specific camera fields, including the actual device interface
- struct DeviceInfo1 : public DeviceInfo {
- typedef hardware::camera::device::V1_0::ICameraDevice InterfaceT;
-
- virtual status_t setTorchMode(bool enabled) override;
- virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
- //In case of Device1Info assume that we are always API1 compatible
- virtual bool isAPI1Compatible() const override { return true; }
- virtual status_t dumpState(int fd) override;
- DeviceInfo1(const std::string& name, const metadata_vendor_id_t tagId,
- const std::string &id, uint16_t minorVersion,
- const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
- sp<ProviderInfo> parentProvider,
- const std::vector<std::string>& publicCameraIds,
- sp<InterfaceT> interface);
- virtual ~DeviceInfo1();
- private:
- CameraParameters2 mDefaultParameters;
- status_t cacheCameraInfo(sp<InterfaceT> interface);
- };
-
// HALv3-specific camera fields, including the actual device interface
struct DeviceInfo3 : public DeviceInfo {
typedef hardware::camera::device::V3_2::ICameraDevice InterfaceT;
@@ -600,7 +582,27 @@
CameraProviderManager *mManager;
+ struct CameraStatusInfoT {
+ bool isPhysicalCameraStatus = false;
+ hardware::hidl_string cameraId;
+ hardware::hidl_string physicalCameraId;
+ hardware::camera::common::V1_0::CameraDeviceStatus status;
+ CameraStatusInfoT(bool isForPhysicalCamera, const hardware::hidl_string& id,
+ const hardware::hidl_string& physicalId,
+ hardware::camera::common::V1_0::CameraDeviceStatus s) :
+ isPhysicalCameraStatus(isForPhysicalCamera), cameraId(id),
+ physicalCameraId(physicalId), status(s) {}
+ };
+
+ // Lock to synchronize between initialize() and camera status callbacks
+ std::mutex mInitLock;
bool mInitialized = false;
+ std::vector<CameraStatusInfoT> mCachedStatus;
+ // End of scope for mInitLock
+
+ std::future<void> mInitialStatusCallbackFuture;
+ void notifyInitialStatusChange(sp<StatusListener> listener,
+ std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus);
std::vector<std::unordered_set<std::string>> mConcurrentCameraIdCombinations;
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
deleted file mode 100644
index 62ef681..0000000
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
+++ /dev/null
@@ -1,818 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_TAG "CameraHardwareInterface"
-//#define LOG_NDEBUG 0
-
-#include <inttypes.h>
-#include <media/hardware/HardwareAPI.h> // For VideoNativeHandleMetadata
-#include "CameraHardwareInterface.h"
-
-namespace android {
-
-using namespace hardware::camera::device::V1_0;
-using namespace hardware::camera::common::V1_0;
-using hardware::hidl_handle;
-
-CameraHardwareInterface::~CameraHardwareInterface()
-{
- ALOGI("Destroying camera %s", mName.string());
- if (mHidlDevice != nullptr) {
- mHidlDevice->close();
- mHidlDevice.clear();
- cleanupCirculatingBuffers();
- }
-}
-
-status_t CameraHardwareInterface::initialize(sp<CameraProviderManager> manager) {
- ALOGI("Opening camera %s", mName.string());
-
- status_t ret = manager->openSession(mName.string(), this, &mHidlDevice);
- if (ret != OK) {
- ALOGE("%s: openSession failed! %s (%d)", __FUNCTION__, strerror(-ret), ret);
- }
- return ret;
-}
-
-status_t CameraHardwareInterface::setPreviewScalingMode(int scalingMode)
-{
- int rc = OK;
- mPreviewScalingMode = scalingMode;
- if (mPreviewWindow != nullptr) {
- rc = native_window_set_scaling_mode(mPreviewWindow.get(),
- scalingMode);
- }
- return rc;
-}
-
-status_t CameraHardwareInterface::setPreviewTransform(int transform) {
- int rc = OK;
- mPreviewTransform = transform;
- if (mPreviewWindow != nullptr) {
- rc = native_window_set_buffers_transform(mPreviewWindow.get(),
- mPreviewTransform);
- }
- return rc;
-}
-
-/**
- * Implementation of android::hardware::camera::device::V1_0::ICameraDeviceCallback
- */
-hardware::Return<void> CameraHardwareInterface::notifyCallback(
- NotifyCallbackMsg msgType, int32_t ext1, int32_t ext2) {
- sNotifyCb((int32_t) msgType, ext1, ext2, (void*) this);
- return hardware::Void();
-}
-
-hardware::Return<uint32_t> CameraHardwareInterface::registerMemory(
- const hardware::hidl_handle& descriptor,
- uint32_t bufferSize, uint32_t bufferCount) {
- if (descriptor->numFds != 1) {
- ALOGE("%s: camera memory descriptor has numFds %d (expect 1)",
- __FUNCTION__, descriptor->numFds);
- return 0;
- }
- if (descriptor->data[0] < 0) {
- ALOGE("%s: camera memory descriptor has FD %d (expect >= 0)",
- __FUNCTION__, descriptor->data[0]);
- return 0;
- }
-
- camera_memory_t* mem = sGetMemory(descriptor->data[0], bufferSize, bufferCount, this);
- sp<CameraHeapMemory> camMem(static_cast<CameraHeapMemory *>(mem->handle));
- int memPoolId = camMem->mHeap->getHeapID();
- if (memPoolId < 0) {
- ALOGE("%s: CameraHeapMemory has FD %d (expect >= 0)", __FUNCTION__, memPoolId);
- return 0;
- }
- std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
- mHidlMemPoolMap.insert(std::make_pair(memPoolId, mem));
- return memPoolId;
-}
-
-hardware::Return<void> CameraHardwareInterface::unregisterMemory(uint32_t memId) {
- camera_memory_t* mem = nullptr;
- {
- std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
- if (mHidlMemPoolMap.count(memId) == 0) {
- ALOGE("%s: memory pool ID %d not found", __FUNCTION__, memId);
- return hardware::Void();
- }
- mem = mHidlMemPoolMap.at(memId);
- mHidlMemPoolMap.erase(memId);
- }
- sPutMemory(mem);
- return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::dataCallback(
- DataCallbackMsg msgType, uint32_t data, uint32_t bufferIndex,
- const hardware::camera::device::V1_0::CameraFrameMetadata& metadata) {
- camera_memory_t* mem = nullptr;
- {
- std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
- if (mHidlMemPoolMap.count(data) == 0) {
- ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
- return hardware::Void();
- }
- mem = mHidlMemPoolMap.at(data);
- }
- camera_frame_metadata_t md;
- md.number_of_faces = metadata.faces.size();
- md.faces = (camera_face_t*) metadata.faces.data();
- sDataCb((int32_t) msgType, mem, bufferIndex, &md, this);
- return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::dataCallbackTimestamp(
- DataCallbackMsg msgType, uint32_t data,
- uint32_t bufferIndex, int64_t timestamp) {
- camera_memory_t* mem = nullptr;
- {
- std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
- if (mHidlMemPoolMap.count(data) == 0) {
- ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
- return hardware::Void();
- }
- mem = mHidlMemPoolMap.at(data);
- }
- sDataCbTimestamp(timestamp, (int32_t) msgType, mem, bufferIndex, this);
- return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::handleCallbackTimestamp(
- DataCallbackMsg msgType, const hidl_handle& frameData, uint32_t data,
- uint32_t bufferIndex, int64_t timestamp) {
- camera_memory_t* mem = nullptr;
- {
- std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
- if (mHidlMemPoolMap.count(data) == 0) {
- ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
- return hardware::Void();
- }
- mem = mHidlMemPoolMap.at(data);
- }
- sp<CameraHeapMemory> heapMem(static_cast<CameraHeapMemory *>(mem->handle));
- // TODO: Using unsecurePointer() has some associated security pitfalls
- // (see declaration for details).
- // Either document why it is safe in this case or address the
- // issue (e.g. by copying).
- VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*)
- heapMem->mBuffers[bufferIndex]->unsecurePointer();
- md->pHandle = const_cast<native_handle_t*>(frameData.getNativeHandle());
- sDataCbTimestamp(timestamp, (int32_t) msgType, mem, bufferIndex, this);
- return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::handleCallbackTimestampBatch(
- DataCallbackMsg msgType,
- const hardware::hidl_vec<hardware::camera::device::V1_0::HandleTimestampMessage>& messages) {
- std::vector<android::HandleTimestampMessage> msgs;
- msgs.reserve(messages.size());
- {
- std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
- for (const auto& hidl_msg : messages) {
- if (mHidlMemPoolMap.count(hidl_msg.data) == 0) {
- ALOGE("%s: memory pool ID %d not found", __FUNCTION__, hidl_msg.data);
- return hardware::Void();
- }
- sp<CameraHeapMemory> mem(
- static_cast<CameraHeapMemory *>(mHidlMemPoolMap.at(hidl_msg.data)->handle));
-
- if (hidl_msg.bufferIndex >= mem->mNumBufs) {
- ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
- hidl_msg.bufferIndex, mem->mNumBufs);
- return hardware::Void();
- }
- // TODO: Using unsecurePointer() has some associated security pitfalls
- // (see declaration for details).
- // Either document why it is safe in this case or address the
- // issue (e.g. by copying).
- VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*)
- mem->mBuffers[hidl_msg.bufferIndex]->unsecurePointer();
- md->pHandle = const_cast<native_handle_t*>(hidl_msg.frameData.getNativeHandle());
-
- msgs.push_back({hidl_msg.timestamp, mem->mBuffers[hidl_msg.bufferIndex]});
- }
- }
- mDataCbTimestampBatch((int32_t) msgType, msgs, mCbUser);
- return hardware::Void();
-}
-
-std::pair<bool, uint64_t> CameraHardwareInterface::getBufferId(
- ANativeWindowBuffer* anb) {
- std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-
- buffer_handle_t& buf = anb->handle;
- auto it = mBufferIdMap.find(buf);
- if (it == mBufferIdMap.end()) {
- uint64_t bufId = mNextBufferId++;
- mBufferIdMap[buf] = bufId;
- mReversedBufMap[bufId] = anb;
- return std::make_pair(true, bufId);
- } else {
- return std::make_pair(false, it->second);
- }
-}
-
-void CameraHardwareInterface::cleanupCirculatingBuffers() {
- std::lock_guard<std::mutex> lock(mBufferIdMapLock);
- mBufferIdMap.clear();
- mReversedBufMap.clear();
-}
-
-hardware::Return<void>
-CameraHardwareInterface::dequeueBuffer(dequeueBuffer_cb _hidl_cb) {
- ANativeWindow *a = mPreviewWindow.get();
- if (a == nullptr) {
- ALOGE("%s: preview window is null", __FUNCTION__);
- return hardware::Void();
- }
- ANativeWindowBuffer* anb;
- int rc = native_window_dequeue_buffer_and_wait(a, &anb);
- Status s = Status::INTERNAL_ERROR;
- uint64_t bufferId = 0;
- uint32_t stride = 0;
- hidl_handle buf = nullptr;
- if (rc == OK) {
- s = Status::OK;
- auto pair = getBufferId(anb);
- buf = (pair.first) ? anb->handle : nullptr;
- bufferId = pair.second;
- stride = anb->stride;
- }
-
- _hidl_cb(s, bufferId, buf, stride);
- return hardware::Void();
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::enqueueBuffer(uint64_t bufferId) {
- ANativeWindow *a = mPreviewWindow.get();
- if (a == nullptr) {
- ALOGE("%s: preview window is null", __FUNCTION__);
- return Status::INTERNAL_ERROR;
- }
- if (mReversedBufMap.count(bufferId) == 0) {
- ALOGE("%s: bufferId %" PRIu64 " not found", __FUNCTION__, bufferId);
- return Status::ILLEGAL_ARGUMENT;
- }
- int rc = a->queueBuffer(a, mReversedBufMap.at(bufferId), -1);
- if (rc == 0) {
- return Status::OK;
- }
- return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::cancelBuffer(uint64_t bufferId) {
- ANativeWindow *a = mPreviewWindow.get();
- if (a == nullptr) {
- ALOGE("%s: preview window is null", __FUNCTION__);
- return Status::INTERNAL_ERROR;
- }
- if (mReversedBufMap.count(bufferId) == 0) {
- ALOGE("%s: bufferId %" PRIu64 " not found", __FUNCTION__, bufferId);
- return Status::ILLEGAL_ARGUMENT;
- }
- int rc = a->cancelBuffer(a, mReversedBufMap.at(bufferId), -1);
- if (rc == 0) {
- return Status::OK;
- }
- return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setBufferCount(uint32_t count) {
- ANativeWindow *a = mPreviewWindow.get();
- if (a != nullptr) {
- // Workaround for b/27039775
- // Previously, setting the buffer count would reset the buffer
- // queue's flag that allows for all buffers to be dequeued on the
- // producer side, instead of just the producer's declared max count,
- // if no filled buffers have yet been queued by the producer. This
- // reset no longer happens, but some HALs depend on this behavior,
- // so it needs to be maintained for HAL backwards compatibility.
- // Simulate the prior behavior by disconnecting/reconnecting to the
- // window and setting the values again. This has the drawback of
- // actually causing memory reallocation, which may not have happened
- // in the past.
- native_window_api_disconnect(a, NATIVE_WINDOW_API_CAMERA);
- native_window_api_connect(a, NATIVE_WINDOW_API_CAMERA);
- if (mPreviewScalingMode != NOT_SET) {
- native_window_set_scaling_mode(a, mPreviewScalingMode);
- }
- if (mPreviewTransform != NOT_SET) {
- native_window_set_buffers_transform(a, mPreviewTransform);
- }
- if (mPreviewWidth != NOT_SET) {
- native_window_set_buffers_dimensions(a,
- mPreviewWidth, mPreviewHeight);
- native_window_set_buffers_format(a, mPreviewFormat);
- }
- if (mPreviewUsage != 0) {
- native_window_set_usage(a, mPreviewUsage);
- }
- if (mPreviewSwapInterval != NOT_SET) {
- a->setSwapInterval(a, mPreviewSwapInterval);
- }
- if (mPreviewCrop.left != NOT_SET) {
- native_window_set_crop(a, &(mPreviewCrop));
- }
- }
- int rc = native_window_set_buffer_count(a, count);
- if (rc == OK) {
- cleanupCirculatingBuffers();
- return Status::OK;
- }
- return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setBuffersGeometry(
- uint32_t w, uint32_t h, hardware::graphics::common::V1_0::PixelFormat format) {
- Status s = Status::INTERNAL_ERROR;
- ANativeWindow *a = mPreviewWindow.get();
- if (a == nullptr) {
- ALOGE("%s: preview window is null", __FUNCTION__);
- return s;
- }
- mPreviewWidth = w;
- mPreviewHeight = h;
- mPreviewFormat = (int) format;
- int rc = native_window_set_buffers_dimensions(a, w, h);
- if (rc == OK) {
- rc = native_window_set_buffers_format(a, mPreviewFormat);
- }
- if (rc == OK) {
- cleanupCirculatingBuffers();
- s = Status::OK;
- }
- return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setCrop(int32_t left, int32_t top, int32_t right, int32_t bottom) {
- Status s = Status::INTERNAL_ERROR;
- ANativeWindow *a = mPreviewWindow.get();
- if (a == nullptr) {
- ALOGE("%s: preview window is null", __FUNCTION__);
- return s;
- }
- mPreviewCrop.left = left;
- mPreviewCrop.top = top;
- mPreviewCrop.right = right;
- mPreviewCrop.bottom = bottom;
- int rc = native_window_set_crop(a, &mPreviewCrop);
- if (rc == OK) {
- s = Status::OK;
- }
- return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setUsage(hardware::graphics::common::V1_0::BufferUsage usage) {
- Status s = Status::INTERNAL_ERROR;
- ANativeWindow *a = mPreviewWindow.get();
- if (a == nullptr) {
- ALOGE("%s: preview window is null", __FUNCTION__);
- return s;
- }
- mPreviewUsage = static_cast<uint64_t> (usage);
- int rc = native_window_set_usage(a, mPreviewUsage);
- if (rc == OK) {
- cleanupCirculatingBuffers();
- s = Status::OK;
- }
- return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setSwapInterval(int32_t interval) {
- Status s = Status::INTERNAL_ERROR;
- ANativeWindow *a = mPreviewWindow.get();
- if (a == nullptr) {
- ALOGE("%s: preview window is null", __FUNCTION__);
- return s;
- }
- mPreviewSwapInterval = interval;
- int rc = a->setSwapInterval(a, interval);
- if (rc == OK) {
- s = Status::OK;
- }
- return s;
-}
-
-hardware::Return<void>
-CameraHardwareInterface::getMinUndequeuedBufferCount(getMinUndequeuedBufferCount_cb _hidl_cb) {
- ANativeWindow *a = mPreviewWindow.get();
- if (a == nullptr) {
- ALOGE("%s: preview window is null", __FUNCTION__);
- return hardware::Void();
- }
- int count = 0;
- int rc = a->query(a, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &count);
- Status s = Status::INTERNAL_ERROR;
- if (rc == OK) {
- s = Status::OK;
- }
- _hidl_cb(s, count);
- return hardware::Void();
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setTimestamp(int64_t timestamp) {
- Status s = Status::INTERNAL_ERROR;
- ANativeWindow *a = mPreviewWindow.get();
- if (a == nullptr) {
- ALOGE("%s: preview window is null", __FUNCTION__);
- return s;
- }
- int rc = native_window_set_buffers_timestamp(a, timestamp);
- if (rc == OK) {
- s = Status::OK;
- }
- return s;
-}
-
-status_t CameraHardwareInterface::setPreviewWindow(const sp<ANativeWindow>& buf)
-{
- ALOGV("%s(%s) buf %p", __FUNCTION__, mName.string(), buf.get());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- mPreviewWindow = buf;
- if (buf != nullptr) {
- if (mPreviewScalingMode != NOT_SET) {
- setPreviewScalingMode(mPreviewScalingMode);
- }
- if (mPreviewTransform != NOT_SET) {
- setPreviewTransform(mPreviewTransform);
- }
- }
- return CameraProviderManager::mapToStatusT(
- mHidlDevice->setPreviewWindow(buf.get() ? this : nullptr));
- }
- return INVALID_OPERATION;
-}
-
-void CameraHardwareInterface::setCallbacks(notify_callback notify_cb,
- data_callback data_cb,
- data_callback_timestamp data_cb_timestamp,
- data_callback_timestamp_batch data_cb_timestamp_batch,
- void* user)
-{
- mNotifyCb = notify_cb;
- mDataCb = data_cb;
- mDataCbTimestamp = data_cb_timestamp;
- mDataCbTimestampBatch = data_cb_timestamp_batch;
- mCbUser = user;
-
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
-}
-
-void CameraHardwareInterface::enableMsgType(int32_t msgType)
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- mHidlDevice->enableMsgType(msgType);
- }
-}
-
-void CameraHardwareInterface::disableMsgType(int32_t msgType)
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- mHidlDevice->disableMsgType(msgType);
- }
-}
-
-int CameraHardwareInterface::msgTypeEnabled(int32_t msgType)
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return mHidlDevice->msgTypeEnabled(msgType);
- }
- return false;
-}
-
-status_t CameraHardwareInterface::startPreview()
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return CameraProviderManager::mapToStatusT(
- mHidlDevice->startPreview());
- }
- return INVALID_OPERATION;
-}
-
-void CameraHardwareInterface::stopPreview()
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- mHidlDevice->stopPreview();
- }
-}
-
-int CameraHardwareInterface::previewEnabled()
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return mHidlDevice->previewEnabled();
- }
- return false;
-}
-
-status_t CameraHardwareInterface::storeMetaDataInBuffers(int enable)
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return CameraProviderManager::mapToStatusT(
- mHidlDevice->storeMetaDataInBuffers(enable));
- }
- return enable ? INVALID_OPERATION: OK;
-}
-
-status_t CameraHardwareInterface::startRecording()
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return CameraProviderManager::mapToStatusT(
- mHidlDevice->startRecording());
- }
- return INVALID_OPERATION;
-}
-
-/**
- * Stop a previously started recording.
- */
-void CameraHardwareInterface::stopRecording()
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- mHidlDevice->stopRecording();
- }
-}
-
-/**
- * Returns true if recording is enabled.
- */
-int CameraHardwareInterface::recordingEnabled()
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return mHidlDevice->recordingEnabled();
- }
- return false;
-}
-
-void CameraHardwareInterface::releaseRecordingFrame(const sp<IMemory>& mem)
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- ssize_t offset;
- size_t size;
- sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
- int heapId = heap->getHeapID();
- int bufferIndex = offset / size;
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- if (size == sizeof(VideoNativeHandleMetadata)) {
- // TODO: Using unsecurePointer() has some associated security pitfalls
- // (see declaration for details).
- // Either document why it is safe in this case or address the
- // issue (e.g. by copying).
- VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) mem->unsecurePointer();
- // Caching the handle here because md->pHandle will be subject to HAL's edit
- native_handle_t* nh = md->pHandle;
- hidl_handle frame = nh;
- mHidlDevice->releaseRecordingFrameHandle(heapId, bufferIndex, frame);
- native_handle_close(nh);
- native_handle_delete(nh);
- } else {
- mHidlDevice->releaseRecordingFrame(heapId, bufferIndex);
- }
- }
-}
-
-void CameraHardwareInterface::releaseRecordingFrameBatch(const std::vector<sp<IMemory>>& frames)
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- size_t n = frames.size();
- std::vector<VideoFrameMessage> msgs;
- msgs.reserve(n);
- for (auto& mem : frames) {
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- ssize_t offset;
- size_t size;
- sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
- if (size == sizeof(VideoNativeHandleMetadata)) {
- uint32_t heapId = heap->getHeapID();
- uint32_t bufferIndex = offset / size;
- // TODO: Using unsecurePointer() has some associated security pitfalls
- // (see declaration for details).
- // Either document why it is safe in this case or address the
- // issue (e.g. by copying).
- VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) mem->unsecurePointer();
- // Caching the handle here because md->pHandle will be subject to HAL's edit
- native_handle_t* nh = md->pHandle;
- VideoFrameMessage msg;
- msgs.push_back({nh, heapId, bufferIndex});
- } else {
- ALOGE("%s only supports VideoNativeHandleMetadata mode", __FUNCTION__);
- return;
- }
- }
- }
-
- mHidlDevice->releaseRecordingFrameHandleBatch(msgs);
-
- for (auto& msg : msgs) {
- native_handle_t* nh = const_cast<native_handle_t*>(msg.frameData.getNativeHandle());
- native_handle_close(nh);
- native_handle_delete(nh);
- }
-}
-
-status_t CameraHardwareInterface::autoFocus()
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return CameraProviderManager::mapToStatusT(
- mHidlDevice->autoFocus());
- }
- return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::cancelAutoFocus()
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return CameraProviderManager::mapToStatusT(
- mHidlDevice->cancelAutoFocus());
- }
- return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::takePicture()
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return CameraProviderManager::mapToStatusT(
- mHidlDevice->takePicture());
- }
- return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::cancelPicture()
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return CameraProviderManager::mapToStatusT(
- mHidlDevice->cancelPicture());
- }
- return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::setParameters(const CameraParameters ¶ms)
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return CameraProviderManager::mapToStatusT(
- mHidlDevice->setParameters(params.flatten().string()));
- }
- return INVALID_OPERATION;
-}
-
-CameraParameters CameraHardwareInterface::getParameters() const
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- CameraParameters parms;
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- hardware::hidl_string outParam;
- mHidlDevice->getParameters(
- [&outParam](const auto& outStr) {
- outParam = outStr;
- });
- String8 tmp(outParam.c_str());
- parms.unflatten(tmp);
- }
- return parms;
-}
-
-status_t CameraHardwareInterface::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- return CameraProviderManager::mapToStatusT(
- mHidlDevice->sendCommand((CommandType) cmd, arg1, arg2));
- }
- return INVALID_OPERATION;
-}
-
-/**
- * Release the hardware resources owned by this object. Note that this is
- * *not* done in the destructor.
- */
-void CameraHardwareInterface::release() {
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- mHidlDevice->close();
- mHidlDevice.clear();
- }
-}
-
-/**
- * Dump state of the camera hardware
- */
-status_t CameraHardwareInterface::dump(int fd, const Vector<String16>& /*args*/) const
-{
- ALOGV("%s(%s)", __FUNCTION__, mName.string());
- if (CC_LIKELY(mHidlDevice != nullptr)) {
- native_handle_t* handle = native_handle_create(1,0);
- handle->data[0] = fd;
- Status s = mHidlDevice->dumpState(handle);
- native_handle_delete(handle);
- return CameraProviderManager::mapToStatusT(s);
- }
- return OK; // It's fine if the HAL doesn't implement dump()
-}
-
-void CameraHardwareInterface::sNotifyCb(int32_t msg_type, int32_t ext1,
- int32_t ext2, void *user)
-{
- ALOGV("%s", __FUNCTION__);
- CameraHardwareInterface *object =
- static_cast<CameraHardwareInterface *>(user);
- object->mNotifyCb(msg_type, ext1, ext2, object->mCbUser);
-}
-
-void CameraHardwareInterface::sDataCb(int32_t msg_type,
- const camera_memory_t *data, unsigned int index,
- camera_frame_metadata_t *metadata,
- void *user)
-{
- ALOGV("%s", __FUNCTION__);
- CameraHardwareInterface *object =
- static_cast<CameraHardwareInterface *>(user);
- sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));
- if (index >= mem->mNumBufs) {
- ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
- index, mem->mNumBufs);
- return;
- }
- object->mDataCb(msg_type, mem->mBuffers[index], metadata, object->mCbUser);
-}
-
-void CameraHardwareInterface::sDataCbTimestamp(nsecs_t timestamp, int32_t msg_type,
- const camera_memory_t *data, unsigned index,
- void *user)
-{
- ALOGV("%s", __FUNCTION__);
- CameraHardwareInterface *object =
- static_cast<CameraHardwareInterface *>(user);
- // Start refcounting the heap object from here on. When the clients
- // drop all references, it will be destroyed (as well as the enclosed
- // MemoryHeapBase.
- sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));
- if (index >= mem->mNumBufs) {
- ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
- index, mem->mNumBufs);
- return;
- }
- object->mDataCbTimestamp(timestamp, msg_type, mem->mBuffers[index], object->mCbUser);
-}
-
-camera_memory_t* CameraHardwareInterface::sGetMemory(
- int fd, size_t buf_size, uint_t num_bufs,
- void *user __attribute__((unused)))
-{
- CameraHeapMemory *mem;
- if (fd < 0) {
- mem = new CameraHeapMemory(buf_size, num_bufs);
- } else {
- mem = new CameraHeapMemory(fd, buf_size, num_bufs);
- }
- mem->incStrong(mem);
- return &mem->handle;
-}
-
-void CameraHardwareInterface::sPutMemory(camera_memory_t *data)
-{
- if (!data) {
- return;
- }
-
- CameraHeapMemory *mem = static_cast<CameraHeapMemory *>(data->handle);
- mem->decStrong(mem);
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
deleted file mode 100644
index e519b04..0000000
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h
+++ /dev/null
@@ -1,488 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
-#define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
-
-#include <unordered_map>
-#include <binder/IMemory.h>
-#include <binder/MemoryBase.h>
-#include <binder/MemoryHeapBase.h>
-#include <utils/RefBase.h>
-#include <ui/GraphicBuffer.h>
-#include <camera/Camera.h>
-#include <camera/CameraParameters.h>
-#include <system/window.h>
-#include <hardware/camera.h>
-
-#include <common/CameraProviderManager.h>
-
-namespace android {
-
-typedef void (*notify_callback)(int32_t msgType,
- int32_t ext1,
- int32_t ext2,
- void* user);
-
-typedef void (*data_callback)(int32_t msgType,
- const sp<IMemory> &dataPtr,
- camera_frame_metadata_t *metadata,
- void* user);
-
-typedef void (*data_callback_timestamp)(nsecs_t timestamp,
- int32_t msgType,
- const sp<IMemory> &dataPtr,
- void *user);
-
-struct HandleTimestampMessage {
- nsecs_t timestamp;
- const sp<IMemory> dataPtr;
-};
-
-typedef void (*data_callback_timestamp_batch)(
- int32_t msgType,
- const std::vector<HandleTimestampMessage>&, void* user);
-
-/**
- * CameraHardwareInterface.h defines the interface to the
- * camera hardware abstraction layer, used for setting and getting
- * parameters, live previewing, and taking pictures. It is used for
- * HAL devices with version CAMERA_DEVICE_API_VERSION_1_0 only.
- *
- * It is a referenced counted interface with RefBase as its base class.
- * CameraService calls openCameraHardware() to retrieve a strong pointer to the
- * instance of this interface and may be called multiple times. The
- * following steps describe a typical sequence:
- *
- * -# After CameraService calls openCameraHardware(), getParameters() and
- * setParameters() are used to initialize the camera instance.
- * -# startPreview() is called.
- *
- * Prior to taking a picture, CameraService often calls autofocus(). When auto
- * focusing has completed, the camera instance sends a CAMERA_MSG_FOCUS notification,
- * which informs the application whether focusing was successful. The camera instance
- * only sends this message once and it is up to the application to call autoFocus()
- * again if refocusing is desired.
- *
- * CameraService calls takePicture() to request the camera instance take a
- * picture. At this point, if a shutter, postview, raw, and/or compressed
- * callback is desired, the corresponding message must be enabled. Any memory
- * provided in a data callback must be copied if it's needed after returning.
- */
-
-class CameraHardwareInterface :
- public virtual RefBase,
- public virtual hardware::camera::device::V1_0::ICameraDeviceCallback,
- public virtual hardware::camera::device::V1_0::ICameraDevicePreviewCallback {
-
-public:
- explicit CameraHardwareInterface(const char *name):
- mHidlDevice(nullptr),
- mName(name),
- mPreviewScalingMode(NOT_SET),
- mPreviewTransform(NOT_SET),
- mPreviewWidth(NOT_SET),
- mPreviewHeight(NOT_SET),
- mPreviewFormat(NOT_SET),
- mPreviewUsage(0),
- mPreviewSwapInterval(NOT_SET),
- mPreviewCrop{NOT_SET,NOT_SET,NOT_SET,NOT_SET}
- {
- }
-
- ~CameraHardwareInterface();
-
- status_t initialize(sp<CameraProviderManager> manager);
-
- /** Set the ANativeWindow to which preview frames are sent */
- status_t setPreviewWindow(const sp<ANativeWindow>& buf);
-
- status_t setPreviewScalingMode(int scalingMode);
-
- status_t setPreviewTransform(int transform);
-
- /** Set the notification and data callbacks */
- void setCallbacks(notify_callback notify_cb,
- data_callback data_cb,
- data_callback_timestamp data_cb_timestamp,
- data_callback_timestamp_batch data_cb_timestamp_batch,
- void* user);
-
- /**
- * The following three functions all take a msgtype,
- * which is a bitmask of the messages defined in
- * include/ui/Camera.h
- */
-
- /**
- * Enable a message, or set of messages.
- */
- void enableMsgType(int32_t msgType);
-
- /**
- * Disable a message, or a set of messages.
- *
- * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal
- * should not rely on its client to call releaseRecordingFrame() to release
- * video recording frames sent out by the cameral hal before and after the
- * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not
- * modify/access any video recording frame after calling
- * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
- */
- void disableMsgType(int32_t msgType);
-
- /**
- * Query whether a message, or a set of messages, is enabled.
- * Note that this is operates as an AND, if any of the messages
- * queried are off, this will return false.
- */
- int msgTypeEnabled(int32_t msgType);
-
- /**
- * Start preview mode.
- */
- status_t startPreview();
-
- /**
- * Stop a previously started preview.
- */
- void stopPreview();
-
- /**
- * Returns true if preview is enabled.
- */
- int previewEnabled();
-
- /**
- * Request the camera hal to store meta data or real YUV data in
- * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
- * recording session. If it is not called, the default camera
- * hal behavior is to store real YUV data in the video buffers.
- *
- * This method should be called before startRecording() in order
- * to be effective.
- *
- * If meta data is stored in the video buffers, it is up to the
- * receiver of the video buffers to interpret the contents and
- * to find the actual frame data with the help of the meta data
- * in the buffer. How this is done is outside of the scope of
- * this method.
- *
- * Some camera hal may not support storing meta data in the video
- * buffers, but all camera hal should support storing real YUV data
- * in the video buffers. If the camera hal does not support storing
- * the meta data in the video buffers when it is requested to do
- * do, INVALID_OPERATION must be returned. It is very useful for
- * the camera hal to pass meta data rather than the actual frame
- * data directly to the video encoder, since the amount of the
- * uncompressed frame data can be very large if video size is large.
- *
- * @param enable if true to instruct the camera hal to store
- * meta data in the video buffers; false to instruct
- * the camera hal to store real YUV data in the video
- * buffers.
- *
- * @return OK on success.
- */
-
- status_t storeMetaDataInBuffers(int enable);
-
- /**
- * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
- * message is sent with the corresponding frame. Every record frame must be released
- * by a cameral hal client via releaseRecordingFrame() before the client calls
- * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
- * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility
- * to manage the life-cycle of the video recording frames, and the client must
- * not modify/access any video recording frames.
- */
- status_t startRecording();
-
- /**
- * Stop a previously started recording.
- */
- void stopRecording();
-
- /**
- * Returns true if recording is enabled.
- */
- int recordingEnabled();
-
- /**
- * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
- *
- * It is camera hal client's responsibility to release video recording
- * frames sent out by the camera hal before the camera hal receives
- * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
- * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
- * responsibility of managing the life-cycle of the video recording
- * frames.
- */
- void releaseRecordingFrame(const sp<IMemory>& mem);
-
- /**
- * Release a batch of recording frames previously returned by
- * CAMERA_MSG_VIDEO_FRAME. This method only supports frames that are
- * stored as VideoNativeHandleMetadata.
- *
- * It is camera hal client's responsibility to release video recording
- * frames sent out by the camera hal before the camera hal receives
- * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
- * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
- * responsibility of managing the life-cycle of the video recording
- * frames.
- */
- void releaseRecordingFrameBatch(const std::vector<sp<IMemory>>& frames);
-
- /**
- * Start auto focus, the notification callback routine is called
- * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
- * will be called again if another auto focus is needed.
- */
- status_t autoFocus();
-
- /**
- * Cancels auto-focus function. If the auto-focus is still in progress,
- * this function will cancel it. Whether the auto-focus is in progress
- * or not, this function will return the focus position to the default.
- * If the camera does not support auto-focus, this is a no-op.
- */
- status_t cancelAutoFocus();
-
- /**
- * Take a picture.
- */
- status_t takePicture();
-
- /**
- * Cancel a picture that was started with takePicture. Calling this
- * method when no picture is being taken is a no-op.
- */
- status_t cancelPicture();
-
- /**
- * Set the camera parameters. This returns BAD_VALUE if any parameter is
- * invalid or not supported. */
- status_t setParameters(const CameraParameters ¶ms);
-
- /** Return the camera parameters. */
- CameraParameters getParameters() const;
-
- /**
- * Send command to camera driver.
- */
- status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
-
- /**
- * Release the hardware resources owned by this object. Note that this is
- * *not* done in the destructor.
- */
- void release();
-
- /**
- * Dump state of the camera hardware
- */
- status_t dump(int fd, const Vector<String16>& /*args*/) const;
-
-private:
- sp<hardware::camera::device::V1_0::ICameraDevice> mHidlDevice;
- String8 mName;
-
- static void sNotifyCb(int32_t msg_type, int32_t ext1,
- int32_t ext2, void *user);
-
- static void sDataCb(int32_t msg_type,
- const camera_memory_t *data, unsigned int index,
- camera_frame_metadata_t *metadata,
- void *user);
-
- static void sDataCbTimestamp(nsecs_t timestamp, int32_t msg_type,
- const camera_memory_t *data, unsigned index,
- void *user);
-
- // This is a utility class that combines a MemoryHeapBase and a MemoryBase
- // in one. Since we tend to use them in a one-to-one relationship, this is
- // handy.
- class CameraHeapMemory : public RefBase {
- public:
- CameraHeapMemory(int fd, size_t buf_size, uint_t num_buffers = 1) :
- mBufSize(buf_size),
- mNumBufs(num_buffers)
- {
- mHeap = new MemoryHeapBase(fd, buf_size * num_buffers);
- commonInitialization();
- }
-
- explicit CameraHeapMemory(size_t buf_size, uint_t num_buffers = 1) :
- mBufSize(buf_size),
- mNumBufs(num_buffers)
- {
- mHeap = new MemoryHeapBase(buf_size * num_buffers);
- commonInitialization();
- }
-
- void commonInitialization()
- {
- handle.data = mHeap->base();
- handle.size = mBufSize * mNumBufs;
- handle.handle = this;
-
- mBuffers = new sp<MemoryBase>[mNumBufs];
- for (uint_t i = 0; i < mNumBufs; i++)
- mBuffers[i] = new MemoryBase(mHeap,
- i * mBufSize,
- mBufSize);
-
- handle.release = sPutMemory;
- }
-
- virtual ~CameraHeapMemory()
- {
- delete [] mBuffers;
- }
-
- size_t mBufSize;
- uint_t mNumBufs;
- sp<MemoryHeapBase> mHeap;
- sp<MemoryBase> *mBuffers;
-
- camera_memory_t handle;
- };
-
- static camera_memory_t* sGetMemory(int fd, size_t buf_size, uint_t num_bufs,
- void *user __attribute__((unused)));
-
- static void sPutMemory(camera_memory_t *data);
-
- std::pair<bool, uint64_t> getBufferId(ANativeWindowBuffer* anb);
- void cleanupCirculatingBuffers();
-
- /**
- * Implementation of android::hardware::camera::device::V1_0::ICameraDeviceCallback
- */
- hardware::Return<void> notifyCallback(
- hardware::camera::device::V1_0::NotifyCallbackMsg msgType,
- int32_t ext1, int32_t ext2) override;
- hardware::Return<uint32_t> registerMemory(
- const hardware::hidl_handle& descriptor,
- uint32_t bufferSize, uint32_t bufferCount) override;
- hardware::Return<void> unregisterMemory(uint32_t memId) override;
- hardware::Return<void> dataCallback(
- hardware::camera::device::V1_0::DataCallbackMsg msgType,
- uint32_t data, uint32_t bufferIndex,
- const hardware::camera::device::V1_0::CameraFrameMetadata& metadata) override;
- hardware::Return<void> dataCallbackTimestamp(
- hardware::camera::device::V1_0::DataCallbackMsg msgType,
- uint32_t data, uint32_t bufferIndex, int64_t timestamp) override;
- hardware::Return<void> handleCallbackTimestamp(
- hardware::camera::device::V1_0::DataCallbackMsg msgType,
- const hardware::hidl_handle& frameData, uint32_t data,
- uint32_t bufferIndex, int64_t timestamp) override;
- hardware::Return<void> handleCallbackTimestampBatch(
- hardware::camera::device::V1_0::DataCallbackMsg msgType,
- const hardware::hidl_vec<
- hardware::camera::device::V1_0::HandleTimestampMessage>&) override;
-
- /**
- * Implementation of android::hardware::camera::device::V1_0::ICameraDevicePreviewCallback
- */
- hardware::Return<void> dequeueBuffer(dequeueBuffer_cb _hidl_cb) override;
- hardware::Return<hardware::camera::common::V1_0::Status>
- enqueueBuffer(uint64_t bufferId) override;
- hardware::Return<hardware::camera::common::V1_0::Status>
- cancelBuffer(uint64_t bufferId) override;
- hardware::Return<hardware::camera::common::V1_0::Status>
- setBufferCount(uint32_t count) override;
- hardware::Return<hardware::camera::common::V1_0::Status>
- setBuffersGeometry(uint32_t w, uint32_t h,
- hardware::graphics::common::V1_0::PixelFormat format) override;
- hardware::Return<hardware::camera::common::V1_0::Status>
- setCrop(int32_t left, int32_t top, int32_t right, int32_t bottom) override;
- hardware::Return<hardware::camera::common::V1_0::Status>
- setUsage(hardware::graphics::common::V1_0::BufferUsage usage) override;
- hardware::Return<hardware::camera::common::V1_0::Status>
- setSwapInterval(int32_t interval) override;
- hardware::Return<void> getMinUndequeuedBufferCount(
- getMinUndequeuedBufferCount_cb _hidl_cb) override;
- hardware::Return<hardware::camera::common::V1_0::Status>
- setTimestamp(int64_t timestamp) override;
-
- sp<ANativeWindow> mPreviewWindow;
-
- notify_callback mNotifyCb;
- data_callback mDataCb;
- data_callback_timestamp mDataCbTimestamp;
- data_callback_timestamp_batch mDataCbTimestampBatch;
- void *mCbUser;
-
- // Cached values for preview stream parameters
- static const int NOT_SET = -1;
- int mPreviewScalingMode;
- int mPreviewTransform;
- int mPreviewWidth;
- int mPreviewHeight;
- int mPreviewFormat;
- uint64_t mPreviewUsage;
- int mPreviewSwapInterval;
- android_native_rect_t mPreviewCrop;
-
- struct BufferHasher {
- size_t operator()(const buffer_handle_t& buf) const {
- if (buf == nullptr)
- return 0;
-
- size_t result = 1;
- result = 31 * result + buf->numFds;
- result = 31 * result + buf->numInts;
- int length = buf->numFds + buf->numInts;
- for (int i = 0; i < length; i++) {
- result = 31 * result + buf->data[i];
- }
- return result;
- }
- };
-
- struct BufferComparator {
- bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
- if (buf1->numFds == buf2->numFds && buf1->numInts == buf2->numInts) {
- int length = buf1->numFds + buf1->numInts;
- for (int i = 0; i < length; i++) {
- if (buf1->data[i] != buf2->data[i]) {
- return false;
- }
- }
- return true;
- }
- return false;
- }
- };
-
- std::mutex mBufferIdMapLock; // protecting mBufferIdMap and mNextBufferId
- typedef std::unordered_map<const buffer_handle_t, uint64_t,
- BufferHasher, BufferComparator> BufferIdMap;
- // stream ID -> per stream buffer ID map
- BufferIdMap mBufferIdMap;
- std::unordered_map<uint64_t, ANativeWindowBuffer*> mReversedBufMap;
- uint64_t mNextBufferId = 1;
- static const uint64_t BUFFER_ID_NO_BUFFER = 0;
-
- std::mutex mHidlMemPoolMapLock; // protecting mHidlMemPoolMap
- std::unordered_map<int, camera_memory_t*> mHidlMemPoolMap;
-};
-
-}; // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index b00a2d9..8754ad3 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -56,10 +56,12 @@
#include "device3/Camera3Device.h"
#include "device3/Camera3OutputStream.h"
#include "device3/Camera3InputStream.h"
-#include "device3/Camera3DummyStream.h"
+#include "device3/Camera3FakeStream.h"
#include "device3/Camera3SharedOutputStream.h"
#include "CameraService.h"
#include "utils/CameraThreadState.h"
+#include "utils/TraceHFR.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include <algorithm>
#include <tuple>
@@ -269,7 +271,7 @@
}
/** Register in-flight map to the status tracker */
- mInFlightStatusId = mStatusTracker->addComponent();
+ mInFlightStatusId = mStatusTracker->addComponent("InflightRequests");
if (mUseHalBufManager) {
res = mRequestBufferSM.initialize(mStatusTracker);
@@ -308,7 +310,7 @@
internalUpdateStatusLocked(STATUS_UNCONFIGURED);
mNextStreamId = 0;
- mDummyStreamId = NO_STREAM;
+ mFakeStreamId = NO_STREAM;
mNeedConfig = true;
mPauseStateNotify = false;
@@ -866,7 +868,7 @@
status_t Camera3Device::convertMetadataListToRequestListLocked(
const List<const PhysicalCameraSettingsList> &metadataList,
const std::list<const SurfaceMap> &surfaceMaps,
- bool repeating,
+ bool repeating, nsecs_t requestTimeNs,
RequestList *requestList) {
if (requestList == NULL) {
CLOGE("requestList cannot be NULL.");
@@ -885,6 +887,7 @@
}
newRequest->mRepeating = repeating;
+ newRequest->mRequestTimeNs = requestTimeNs;
// Setup burst Id and request Id
newRequest->mResultExtras.burstId = burstId++;
@@ -952,6 +955,8 @@
/*out*/
int64_t *lastFrameNumber) {
ATRACE_CALL();
+ nsecs_t requestTimeNs = systemTime();
+
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
@@ -964,7 +969,7 @@
RequestList requestList;
res = convertMetadataListToRequestListLocked(requests, surfaceMaps,
- repeating, /*out*/&requestList);
+ repeating, requestTimeNs, /*out*/&requestList);
if (res != OK) {
// error logged by previous call
return res;
@@ -996,7 +1001,7 @@
const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
requestStreamBuffers_cb _hidl_cb) {
RequestBufferStates states {
- mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+ mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
*this, *mInterface, *this};
camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
return hardware::Void();
@@ -1005,7 +1010,7 @@
hardware::Return<void> Camera3Device::returnStreamBuffers(
const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
ReturnBufferStates states {
- mId, mUseHalBufManager, mOutputStreams, *mInterface};
+ mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, *mInterface};
camera3::returnStreamBuffers(states, buffers);
return hardware::Void();
}
@@ -1043,8 +1048,9 @@
}
CaptureOutputStates states {
mId,
- mInFlightLock, mInFlightMap,
- mOutputLock, mResultQueue, mResultSignal,
+ mInFlightLock, mLastCompletedRegularFrameNumber,
+ mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+ mInFlightMap, mOutputLock, mResultQueue, mResultSignal,
mNextShutterFrameNumber,
mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
mNextResultFrameNumber,
@@ -1052,7 +1058,8 @@
mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
- mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+ mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+ *mInterface
};
for (const auto& result : results) {
@@ -1100,8 +1107,9 @@
CaptureOutputStates states {
mId,
- mInFlightLock, mInFlightMap,
- mOutputLock, mResultQueue, mResultSignal,
+ mInFlightLock, mLastCompletedRegularFrameNumber,
+ mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+ mInFlightMap, mOutputLock, mResultQueue, mResultSignal,
mNextShutterFrameNumber,
mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
mNextResultFrameNumber,
@@ -1109,7 +1117,8 @@
mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
- mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+ mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+ *mInterface
};
for (const auto& result : results) {
@@ -1139,8 +1148,9 @@
CaptureOutputStates states {
mId,
- mInFlightLock, mInFlightMap,
- mOutputLock, mResultQueue, mResultSignal,
+ mInFlightLock, mLastCompletedRegularFrameNumber,
+ mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+ mInFlightMap, mOutputLock, mResultQueue, mResultSignal,
mNextShutterFrameNumber,
mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
mNextResultFrameNumber,
@@ -1148,7 +1158,8 @@
mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
- mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+ mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+ *mInterface
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
@@ -1445,12 +1456,16 @@
newStream->setBufferManager(mBufferManager);
+ newStream->setImageDumpMask(mImageDumpMask);
+
res = mOutputStreams.add(mNextStreamId, newStream);
if (res < 0) {
SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res);
return res;
}
+ mSessionStatsBuilder.addStream(mNextStreamId);
+
*id = mNextStreamId++;
mNeedConfig = true;
@@ -1574,6 +1589,7 @@
CLOGE("Stream %d does not exist", id);
return BAD_VALUE;
}
+ mSessionStatsBuilder.removeStream(id);
}
// Delete output stream or the output part of a bi-directional stream.
@@ -1764,6 +1780,7 @@
maxExpectedDuration);
status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
if (res != OK) {
+ mStatusTracker->dumpActiveComponents();
SET_ERR_L("Error waiting for HAL to drain: %s (%d)", strerror(-res),
res);
}
@@ -1777,13 +1794,6 @@
mStatusChanged.broadcast();
}
-void Camera3Device::pauseStateNotify(bool enable) {
- Mutex::Autolock il(mInterfaceLock);
- Mutex::Autolock l(mLock);
-
- mPauseStateNotify = enable;
-}
-
// Pause to reconfigure
status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration) {
if (mRequestThread.get() != nullptr) {
@@ -1836,10 +1846,12 @@
mStatusWaiters++;
+ bool signalPipelineDrain = false;
if (!active && mUseHalBufManager) {
auto streamIds = mOutputStreams.getStreamIds();
if (mStatus == STATUS_ACTIVE) {
mRequestThread->signalPipelineDrain(streamIds);
+ signalPipelineDrain = true;
}
mRequestBufferSM.onWaitUntilIdle();
}
@@ -1869,6 +1881,10 @@
}
} while (!stateSeen);
+ if (signalPipelineDrain) {
+ mRequestThread->resetPipelineDrain();
+ }
+
mStatusWaiters--;
return res;
@@ -2006,6 +2022,9 @@
}
mRequestThread->clear(/*out*/frameNumber);
+
+ // Stop session and stream counter
+ mSessionStatsBuilder.stopCounter();
}
return mRequestThread->flush();
@@ -2083,6 +2102,9 @@
void Camera3Device::notifyStatus(bool idle) {
ATRACE_CALL();
+ std::vector<int> streamIds;
+ std::vector<hardware::CameraStreamStats> streamStats;
+
{
// Need mLock to safely update state and synchronize to current
// state of methods in flight.
@@ -2100,6 +2122,24 @@
// Skip notifying listener if we're doing some user-transparent
// state changes
if (mPauseStateNotify) return;
+
+ // Populate stream statistics in case of Idle
+ if (idle) {
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ auto stream = mOutputStreams[i];
+ if (stream.get() == nullptr) continue;
+ streamIds.push_back(stream->getId());
+ Camera3Stream* camera3Stream = Camera3Stream::cast(stream->asHalStream());
+ int64_t usage = 0LL;
+ if (camera3Stream != nullptr) {
+ usage = camera3Stream->getUsage();
+ }
+ streamStats.emplace_back(stream->getWidth(), stream->getHeight(),
+ stream->getFormat(), stream->getDataSpace(), usage,
+ stream->getMaxHalBuffers(),
+ stream->getMaxTotalBuffers() - stream->getMaxHalBuffers());
+ }
+ }
}
sp<NotificationListener> listener;
@@ -2108,7 +2148,22 @@
listener = mListener.promote();
}
if (idle && listener != NULL) {
- listener->notifyIdle();
+ // Get session stats from the builder, and notify the listener.
+ int64_t requestCount, resultErrorCount;
+ bool deviceError;
+ std::map<int, StreamStats> streamStatsMap;
+ mSessionStatsBuilder.buildAndReset(&requestCount, &resultErrorCount,
+ &deviceError, &streamStatsMap);
+ for (size_t i = 0; i < streamIds.size(); i++) {
+ int streamId = streamIds[i];
+ auto stats = streamStatsMap.find(streamId);
+ if (stats != streamStatsMap.end()) {
+ streamStats[i].mRequestCount = stats->second.mRequestedFrameCount;
+ streamStats[i].mErrorCount = stats->second.mDroppedFrameCount;
+ streamStats[i].mStartLatencyMs = stats->second.mStartLatencyMs;
+ }
+ }
+ listener->notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
}
}
@@ -2218,6 +2273,12 @@
ALOGE("%s: Stream %d is not found.", __FUNCTION__, streamId);
return BAD_VALUE;
}
+
+ if (dropping) {
+ mSessionStatsBuilder.stopCounter(streamId);
+ } else {
+ mSessionStatsBuilder.startCounter(streamId);
+ }
return stream->dropBuffers(dropping);
}
@@ -2309,6 +2370,15 @@
newRequest->mRotateAndCropAuto = false;
}
+ auto zoomRatioEntry =
+ newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+ if (zoomRatioEntry.count > 0 &&
+ zoomRatioEntry.data.f[0] == 1.0f) {
+ newRequest->mZoomRatioIs1x = true;
+ } else {
+ newRequest->mZoomRatioIs1x = false;
+ }
+
return newRequest;
}
@@ -2359,10 +2429,12 @@
return false;
}
-bool Camera3Device::reconfigureCamera(const CameraMetadata& sessionParams) {
+bool Camera3Device::reconfigureCamera(const CameraMetadata& sessionParams, int clientStatusId) {
ATRACE_CALL();
bool ret = false;
+ nsecs_t startTime = systemTime();
+
Mutex::Autolock il(mInterfaceLock);
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
@@ -2373,7 +2445,16 @@
return true;
}
- auto rc = internalPauseAndWaitLocked(maxExpectedDuration);
+ status_t rc = NO_ERROR;
+ bool markClientActive = false;
+ if (mStatus == STATUS_ACTIVE) {
+ markClientActive = true;
+ mPauseStateNotify = true;
+ mStatusTracker->markComponentIdle(clientStatusId, Fence::NO_FENCE);
+
+ rc = internalPauseAndWaitLocked(maxExpectedDuration);
+ }
+
if (rc == NO_ERROR) {
mNeedConfig = true;
rc = configureStreamsLocked(mOperatingMode, sessionParams, /*notifyRequestThread*/ false);
@@ -2401,6 +2482,13 @@
ALOGE("%s: Failed to pause streaming: %d", __FUNCTION__, rc);
}
+ CameraServiceProxyWrapper::logStreamConfigured(mId, mOperatingMode, true /*internalReconfig*/,
+ ns2ms(systemTime() - startTime));
+
+ if (markClientActive) {
+ mStatusTracker->markComponentActive(clientStatusId);
+ }
+
return ret;
}
@@ -2456,12 +2544,12 @@
}
// Workaround for device HALv3.2 or older spec bug - zero streams requires
- // adding a dummy stream instead.
+ // adding a fake stream instead.
// TODO: Bug: 17321404 for fixing the HAL spec and removing this workaround.
if (mOutputStreams.size() == 0) {
- addDummyStreamLocked();
+ addFakeStreamLocked();
} else {
- tryRemoveDummyStreamLocked();
+ tryRemoveFakeStreamLocked();
}
// Start configuring the streams
@@ -2623,7 +2711,7 @@
mNeedConfig = false;
- internalUpdateStatusLocked((mDummyStreamId == NO_STREAM) ?
+ internalUpdateStatusLocked((mFakeStreamId == NO_STREAM) ?
STATUS_CONFIGURED : STATUS_UNCONFIGURED);
ALOGV("%s: Camera %s: Stream configuration complete", __FUNCTION__, mId.string());
@@ -2637,69 +2725,69 @@
return rc;
}
- if (mDummyStreamId == NO_STREAM) {
+ if (mFakeStreamId == NO_STREAM) {
mRequestBufferSM.onStreamsConfigured();
}
return OK;
}
-status_t Camera3Device::addDummyStreamLocked() {
+status_t Camera3Device::addFakeStreamLocked() {
ATRACE_CALL();
status_t res;
- if (mDummyStreamId != NO_STREAM) {
- // Should never be adding a second dummy stream when one is already
+ if (mFakeStreamId != NO_STREAM) {
+ // Should never be adding a second fake stream when one is already
// active
- SET_ERR_L("%s: Camera %s: A dummy stream already exists!",
+ SET_ERR_L("%s: Camera %s: A fake stream already exists!",
__FUNCTION__, mId.string());
return INVALID_OPERATION;
}
- ALOGV("%s: Camera %s: Adding a dummy stream", __FUNCTION__, mId.string());
+ ALOGV("%s: Camera %s: Adding a fake stream", __FUNCTION__, mId.string());
- sp<Camera3OutputStreamInterface> dummyStream =
- new Camera3DummyStream(mNextStreamId);
+ sp<Camera3OutputStreamInterface> fakeStream =
+ new Camera3FakeStream(mNextStreamId);
- res = mOutputStreams.add(mNextStreamId, dummyStream);
+ res = mOutputStreams.add(mNextStreamId, fakeStream);
if (res < 0) {
- SET_ERR_L("Can't add dummy stream to set: %s (%d)", strerror(-res), res);
+ SET_ERR_L("Can't add fake stream to set: %s (%d)", strerror(-res), res);
return res;
}
- mDummyStreamId = mNextStreamId;
+ mFakeStreamId = mNextStreamId;
mNextStreamId++;
return OK;
}
-status_t Camera3Device::tryRemoveDummyStreamLocked() {
+status_t Camera3Device::tryRemoveFakeStreamLocked() {
ATRACE_CALL();
status_t res;
- if (mDummyStreamId == NO_STREAM) return OK;
+ if (mFakeStreamId == NO_STREAM) return OK;
if (mOutputStreams.size() == 1) return OK;
- ALOGV("%s: Camera %s: Removing the dummy stream", __FUNCTION__, mId.string());
+ ALOGV("%s: Camera %s: Removing the fake stream", __FUNCTION__, mId.string());
- // Ok, have a dummy stream and there's at least one other output stream,
- // so remove the dummy
+ // Ok, have a fake stream and there's at least one other output stream,
+ // so remove the fake
- sp<Camera3StreamInterface> deletedStream = mOutputStreams.get(mDummyStreamId);
+ sp<Camera3StreamInterface> deletedStream = mOutputStreams.get(mFakeStreamId);
if (deletedStream == nullptr) {
- SET_ERR_L("Dummy stream %d does not appear to exist", mDummyStreamId);
+ SET_ERR_L("Fake stream %d does not appear to exist", mFakeStreamId);
return INVALID_OPERATION;
}
- mOutputStreams.remove(mDummyStreamId);
+ mOutputStreams.remove(mFakeStreamId);
// Free up the stream endpoint so that it can be used by some other stream
res = deletedStream->disconnect();
if (res != OK) {
- SET_ERR_L("Can't disconnect deleted dummy stream %d", mDummyStreamId);
+ SET_ERR_L("Can't disconnect deleted fake stream %d", mFakeStreamId);
// fall through since we want to still list the stream as deleted.
}
mDeletedStreams.add(deletedStream);
- mDummyStreamId = NO_STREAM;
+ mFakeStreamId = NO_STREAM;
return res;
}
@@ -2750,6 +2838,7 @@
if (listener != NULL) {
listener->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
CaptureResultExtras());
+ mSessionStatsBuilder.onDeviceError();
}
// Save stack trace. View by dumping it later.
@@ -2766,14 +2855,14 @@
bool hasAppCallback, nsecs_t maxExpectedDuration,
std::set<String8>& physicalCameraIds, bool isStillCapture,
bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& cameraIdsWithZoom,
- const SurfaceMap& outputSurfaces) {
+ const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
ATRACE_CALL();
std::lock_guard<std::mutex> l(mInFlightLock);
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
hasAppCallback, maxExpectedDuration, physicalCameraIds, isStillCapture, isZslCapture,
- rotateAndCropAuto, cameraIdsWithZoom, outputSurfaces));
+ rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs, outputSurfaces));
if (res < 0) return res;
if (mInFlightMap.size() == 1) {
@@ -2804,7 +2893,7 @@
}
void Camera3Device::checkInflightMapLengthLocked() {
- // Sanity check - if we have too many in-flight frames with long total inflight duration,
+ // Validation check - if we have too many in-flight frames with long total inflight duration,
// something has likely gone wrong. This might still be legit only if application send in
// a long burst of long exposure requests.
if (mExpectedInflightDuration > kMinWarnInflightDuration) {
@@ -2825,7 +2914,7 @@
}
void Camera3Device::removeInFlightMapEntryLocked(int idx) {
- ATRACE_CALL();
+ ATRACE_HFR_CALL();
nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
mInFlightMap.removeItemsAt(idx, 1);
@@ -2843,7 +2932,7 @@
FlushInflightReqStates states {
mId, mInFlightLock, mInFlightMap, mUseHalBufManager,
- listener, *this, *mInterface, *this};
+ listener, *this, *mInterface, *this, mSessionStatsBuilder};
camera3::flushInflightRequests(states);
}
@@ -3752,6 +3841,7 @@
mInterface(interface),
mListener(nullptr),
mId(getId(parent)),
+ mFirstRepeating(false),
mReconfigured(false),
mDoPause(false),
mPaused(true),
@@ -3769,7 +3859,7 @@
mSessionParamKeys(sessionParamKeys),
mLatestSessionParams(sessionParamKeys.size()),
mUseHalBufManager(useHalBufManager) {
- mStatusId = statusTracker->addComponent();
+ mStatusId = statusTracker->addComponent("RequestThread");
}
Camera3Device::RequestThread::~RequestThread() {}
@@ -3882,6 +3972,7 @@
*lastFrameNumber = mRepeatingLastFrameNumber;
}
mRepeatingRequests.clear();
+ mFirstRepeating = true;
mRepeatingRequests.insert(mRepeatingRequests.begin(),
requests.begin(), requests.end());
@@ -4277,22 +4368,11 @@
}
if (res == OK) {
- sp<StatusTracker> statusTracker = mStatusTracker.promote();
- if (statusTracker != 0) {
- sp<Camera3Device> parent = mParent.promote();
- if (parent != nullptr) {
- parent->pauseStateNotify(true);
- }
-
- statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
-
- if (parent != nullptr) {
- mReconfigured |= parent->reconfigureCamera(mLatestSessionParams);
- }
-
- statusTracker->markComponentActive(mStatusId);
- setPaused(false);
+ sp<Camera3Device> parent = mParent.promote();
+ if (parent != nullptr) {
+ mReconfigured |= parent->reconfigureCamera(mLatestSessionParams, mStatusId);
}
+ setPaused(false);
if (mNextRequests[0].captureRequest->mInputStream != nullptr) {
mNextRequests[0].captureRequest->mInputStream->restoreConfiguredState();
@@ -4406,11 +4486,11 @@
std::set<std::string> cameraIdsWithZoom;
/**
* HAL workaround:
- * Insert a dummy trigger ID if a trigger is set but no trigger ID is
+ * Insert a fake trigger ID if a trigger is set but no trigger ID is
*/
- res = addDummyTriggerIds(captureRequest);
+ res = addFakeTriggerIds(captureRequest);
if (res != OK) {
- SET_ERR("RequestThread: Unable to insert dummy trigger IDs "
+ SET_ERR("RequestThread: Unable to insert fake trigger IDs "
"(capture request %d, HAL device: %s (%d)",
halRequest->frame_number, strerror(-res), res);
return INVALID_OPERATION;
@@ -4427,13 +4507,17 @@
parent->mDistortionMappers.end()) {
continue;
}
- res = parent->mDistortionMappers[it->cameraId].correctCaptureRequest(
- &(it->metadata));
- if (res != OK) {
- SET_ERR("RequestThread: Unable to correct capture requests "
- "for lens distortion for request %d: %s (%d)",
- halRequest->frame_number, strerror(-res), res);
- return INVALID_OPERATION;
+
+ if (!captureRequest->mDistortionCorrectionUpdated) {
+ res = parent->mDistortionMappers[it->cameraId].correctCaptureRequest(
+ &(it->metadata));
+ if (res != OK) {
+ SET_ERR("RequestThread: Unable to correct capture requests "
+ "for lens distortion for request %d: %s (%d)",
+ halRequest->frame_number, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ captureRequest->mDistortionCorrectionUpdated = true;
}
}
@@ -4444,21 +4528,24 @@
continue;
}
- camera_metadata_entry_t e = it->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
- if (e.count > 0 && e.data.f[0] != 1.0f) {
+ if (!captureRequest->mZoomRatioIs1x) {
cameraIdsWithZoom.insert(it->cameraId);
}
- res = parent->mZoomRatioMappers[it->cameraId].updateCaptureRequest(
- &(it->metadata));
- if (res != OK) {
- SET_ERR("RequestThread: Unable to correct capture requests "
- "for zoom ratio for request %d: %s (%d)",
- halRequest->frame_number, strerror(-res), res);
- return INVALID_OPERATION;
+ if (!captureRequest->mZoomRatioUpdated) {
+ res = parent->mZoomRatioMappers[it->cameraId].updateCaptureRequest(
+ &(it->metadata));
+ if (res != OK) {
+ SET_ERR("RequestThread: Unable to correct capture requests "
+ "for zoom ratio for request %d: %s (%d)",
+ halRequest->frame_number, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ captureRequest->mZoomRatioUpdated = true;
}
}
- if (captureRequest->mRotateAndCropAuto) {
+ if (captureRequest->mRotateAndCropAuto &&
+ !captureRequest->mRotationAndCropUpdated) {
for (it = captureRequest->mSettingsList.begin();
it != captureRequest->mSettingsList.end(); it++) {
auto mapper = parent->mRotateAndCropMappers.find(it->cameraId);
@@ -4472,6 +4559,7 @@
}
}
}
+ captureRequest->mRotationAndCropUpdated = true;
}
}
}
@@ -4674,7 +4762,7 @@
requestedPhysicalCameras, isStillCapture, isZslCapture,
captureRequest->mRotateAndCropAuto, mPrevCameraIdsWithZoom,
(mUseHalBufManager) ? uniqueSurfaceIdMap :
- SurfaceMap{});
+ SurfaceMap{}, captureRequest->mRequestTimeNs);
ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
", burstId = %" PRId32 ".",
__FUNCTION__,
@@ -4786,6 +4874,12 @@
mStreamIdsToBeDrained = streamIds;
}
+void Camera3Device::RequestThread::resetPipelineDrain() {
+ Mutex::Autolock pl(mPauseLock);
+ mNotifyPipelineDrain = false;
+ mStreamIdsToBeDrained.clear();
+}
+
void Camera3Device::RequestThread::clearPreviousRequest() {
Mutex::Autolock l(mRequestLock);
mPrevRequest.clear();
@@ -4990,6 +5084,17 @@
// list. Guarantees a complete in-sequence set of captures to
// application.
const RequestList &requests = mRepeatingRequests;
+ if (mFirstRepeating) {
+ mFirstRepeating = false;
+ } else {
+ for (auto& request : requests) {
+ // For repeating requests, override timestamp request using
+ // the time a request is inserted into the request queue,
+ // because the original repeating request will have an old
+ // fixed timestamp.
+ request->mRequestTimeNs = systemTime();
+ }
+ }
RequestList::const_iterator firstRequest =
requests.begin();
nextRequest = *firstRequest;
@@ -5314,26 +5419,26 @@
return OK;
}
-status_t Camera3Device::RequestThread::addDummyTriggerIds(
+status_t Camera3Device::RequestThread::addFakeTriggerIds(
const sp<CaptureRequest> &request) {
// Trigger ID 0 had special meaning in the HAL2 spec, so avoid it here
- static const int32_t dummyTriggerId = 1;
+ static const int32_t fakeTriggerId = 1;
status_t res;
CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
- // If AF trigger is active, insert a dummy AF trigger ID if none already
+ // If AF trigger is active, insert a fake AF trigger ID if none already
// exists
camera_metadata_entry afTrigger = metadata.find(ANDROID_CONTROL_AF_TRIGGER);
camera_metadata_entry afId = metadata.find(ANDROID_CONTROL_AF_TRIGGER_ID);
if (afTrigger.count > 0 &&
afTrigger.data.u8[0] != ANDROID_CONTROL_AF_TRIGGER_IDLE &&
afId.count == 0) {
- res = metadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &dummyTriggerId, 1);
+ res = metadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &fakeTriggerId, 1);
if (res != OK) return res;
}
- // If AE precapture trigger is active, insert a dummy precapture trigger ID
+ // If AE precapture trigger is active, insert a fake precapture trigger ID
// if none already exists
camera_metadata_entry pcTrigger =
metadata.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
@@ -5342,7 +5447,7 @@
pcTrigger.data.u8[0] != ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE &&
pcId.count == 0) {
res = metadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
- &dummyTriggerId, 1);
+ &fakeTriggerId, 1);
if (res != OK) return res;
}
@@ -5608,7 +5713,7 @@
std::lock_guard<std::mutex> lock(mLock);
mStatusTracker = statusTracker;
- mRequestBufferStatusId = statusTracker->addComponent();
+ mRequestBufferStatusId = statusTracker->addComponent("BufferRequestSM");
return OK;
}
@@ -5906,11 +6011,13 @@
// though technically no other thread should be talking to Camera3Device at this point
Camera3OfflineStates offlineStates(
mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
- mUsePartialResult, mNumPartialResults, mNextResultFrameNumber,
- mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
- mNextShutterFrameNumber, mNextReprocessShutterFrameNumber,
- mNextZslStillShutterFrameNumber, mDeviceInfo, mPhysicalDeviceInfoMap,
- mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers);
+ mUsePartialResult, mNumPartialResults, mLastCompletedRegularFrameNumber,
+ mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+ mNextResultFrameNumber, mNextReprocessResultFrameNumber,
+ mNextZslStillResultFrameNumber, mNextShutterFrameNumber,
+ mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+ mDeviceInfo, mPhysicalDeviceInfoMap, mDistortionMappers,
+ mZoomRatioMappers, mRotateAndCropMappers);
*session = new Camera3OfflineSession(mId, inputStream, offlineStreamSet,
std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index c059f55..de7df81 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -471,10 +471,12 @@
camera3::StreamSet mOutputStreams;
sp<camera3::Camera3Stream> mInputStream;
+ SessionStatsBuilder mSessionStatsBuilder;
+
int mNextStreamId;
bool mNeedConfig;
- int mDummyStreamId;
+ int mFakeStreamId;
// Whether to send state updates upstream
// Pause when doing transparent reconfiguration
@@ -517,6 +519,21 @@
// overriding of ROTATE_AND_CROP value and adjustment of coordinates
// in several other controls in both the request and the result
bool mRotateAndCropAuto;
+ // Whether this capture request has its zoom ratio set to 1.0x before
+ // the framework overrides it for camera HAL consumption.
+ bool mZoomRatioIs1x;
+ // The systemTime timestamp when the request is created.
+ nsecs_t mRequestTimeNs;
+
+
+ // Whether this capture request's distortion correction update has
+ // been done.
+ bool mDistortionCorrectionUpdated = false;
+ // Whether this capture request's rotation and crop update has been
+ // done.
+ bool mRotationAndCropUpdated = false;
+ // Whether this capture request's zoom ratio update has been done.
+ bool mZoomRatioUpdated = false;
};
typedef List<sp<CaptureRequest> > RequestList;
@@ -525,7 +542,7 @@
status_t convertMetadataListToRequestListLocked(
const List<const PhysicalCameraSettingsList> &metadataList,
const std::list<const SurfaceMap> &surfaceMaps,
- bool repeating,
+ bool repeating, nsecs_t requestTimeNs,
/*out*/
RequestList *requestList);
@@ -638,17 +655,10 @@
const SurfaceMap &surfaceMap);
/**
- * Pause state updates to the client application. Needed to mask out idle/active
- * transitions during internal reconfigure
- */
- void pauseStateNotify(bool enable);
-
- /**
* Internally re-configure camera device using new session parameters.
- * This will get triggered by the request thread. Be sure to call
- * pauseStateNotify(true) before going idle in the requesting location.
+ * This will get triggered by the request thread.
*/
- bool reconfigureCamera(const CameraMetadata& sessionParams);
+ bool reconfigureCamera(const CameraMetadata& sessionParams, int clientStatusId);
/**
* Return true in case of any output or input abandoned streams,
@@ -675,15 +685,15 @@
void cancelStreamsConfigurationLocked();
/**
- * Add a dummy stream to the current stream set as a workaround for
+ * Add a fake stream to the current stream set as a workaround for
* not allowing 0 streams in the camera HAL spec.
*/
- status_t addDummyStreamLocked();
+ status_t addFakeStreamLocked();
/**
- * Remove a dummy stream if the current config includes real streams.
+ * Remove a fake stream if the current config includes real streams.
*/
- status_t tryRemoveDummyStreamLocked();
+ status_t tryRemoveFakeStreamLocked();
/**
* Set device into an error state due to some fatal failure, and set an
@@ -839,6 +849,7 @@
}
void signalPipelineDrain(const std::vector<int>& streamIds);
+ void resetPipelineDrain();
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -867,7 +878,7 @@
// HAL workaround: Make sure a trigger ID always exists if
// a trigger does
- status_t addDummyTriggerIds(const sp<CaptureRequest> &request);
+ status_t addFakeTriggerIds(const sp<CaptureRequest> &request);
// Override rotate_and_crop control if needed; returns true if the current value was changed
bool overrideAutoRotateAndCrop(const sp<CaptureRequest> &request);
@@ -954,6 +965,7 @@
Condition mRequestSubmittedSignal;
RequestList mRequestQueue;
RequestList mRepeatingRequests;
+ bool mFirstRepeating;
// The next batch of requests being prepped for submission to the HAL, no longer
// on the request queue. Read-only even with mRequestLock held, outside
// of threadLoop
@@ -1017,6 +1029,9 @@
std::mutex mInFlightLock;
camera3::InFlightRequestMap mInFlightMap;
nsecs_t mExpectedInflightDuration = 0;
+ int64_t mLastCompletedRegularFrameNumber = -1;
+ int64_t mLastCompletedReprocessFrameNumber = -1;
+ int64_t mLastCompletedZslFrameNumber = -1;
// End of mInFlightLock protection scope
int mInFlightStatusId; // const after initialize
@@ -1025,7 +1040,8 @@
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
bool callback, nsecs_t maxExpectedDuration, std::set<String8>& physicalCameraIds,
bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
- const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces);
+ const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
+ nsecs_t requestTimeNs);
/**
* Tracking for idle detection
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
deleted file mode 100644
index b637160..0000000
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright (C) 2014-2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "Camera3-DummyStream"
-#define ATRACE_TAG ATRACE_TAG_CAMERA
-//#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-#include <utils/Trace.h>
-#include "Camera3DummyStream.h"
-
-namespace android {
-
-namespace camera3 {
-
-const String8 Camera3DummyStream::DUMMY_ID;
-
-Camera3DummyStream::Camera3DummyStream(int id) :
- Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, DUMMY_WIDTH, DUMMY_HEIGHT,
- /*maxSize*/0, DUMMY_FORMAT, DUMMY_DATASPACE, DUMMY_ROTATION,
- DUMMY_ID) {
-
-}
-
-Camera3DummyStream::~Camera3DummyStream() {
-
-}
-
-status_t Camera3DummyStream::getBufferLocked(camera3_stream_buffer *,
- const std::vector<size_t>&) {
- ATRACE_CALL();
- ALOGE("%s: Stream %d: Dummy stream cannot produce buffers!", __FUNCTION__, mId);
- return INVALID_OPERATION;
-}
-
-status_t Camera3DummyStream::returnBufferLocked(
- const camera3_stream_buffer &,
- nsecs_t, const std::vector<size_t>&) {
- ATRACE_CALL();
- ALOGE("%s: Stream %d: Dummy stream cannot return buffers!", __FUNCTION__, mId);
- return INVALID_OPERATION;
-}
-
-status_t Camera3DummyStream::returnBufferCheckedLocked(
- const camera3_stream_buffer &,
- nsecs_t,
- bool,
- const std::vector<size_t>&,
- /*out*/
- sp<Fence>*) {
- ATRACE_CALL();
- ALOGE("%s: Stream %d: Dummy stream cannot return buffers!", __FUNCTION__, mId);
- return INVALID_OPERATION;
-}
-
-void Camera3DummyStream::dump(int fd, const Vector<String16> &args) const {
- (void) args;
- String8 lines;
- lines.appendFormat(" Stream[%d]: Dummy\n", mId);
- write(fd, lines.string(), lines.size());
-
- Camera3IOStreamBase::dump(fd, args);
-}
-
-status_t Camera3DummyStream::setTransform(int) {
- ATRACE_CALL();
- // Do nothing
- return OK;
-}
-
-status_t Camera3DummyStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
- (void) buffer;
- (void) fenceFd;
- // Do nothing
- return OK;
-}
-
-status_t Camera3DummyStream::configureQueueLocked() {
- // Do nothing
- return OK;
-}
-
-status_t Camera3DummyStream::disconnectLocked() {
- mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
- : STATE_CONSTRUCTED;
- return OK;
-}
-
-status_t Camera3DummyStream::getEndpointUsage(uint64_t *usage) const {
- *usage = DUMMY_USAGE;
- return OK;
-}
-
-bool Camera3DummyStream::isVideoStream() const {
- return false;
-}
-
-bool Camera3DummyStream::isConsumerConfigurationDeferred(size_t /*surface_id*/) const {
- return false;
-}
-
-status_t Camera3DummyStream::dropBuffers(bool /*dropping*/) {
- return OK;
-}
-
-const String8& Camera3DummyStream::getPhysicalCameraId() const {
- return DUMMY_ID;
-}
-
-status_t Camera3DummyStream::setConsumers(const std::vector<sp<Surface>>& /*consumers*/) {
- ALOGE("%s: Stream %d: Dummy stream doesn't support set consumer surface!",
- __FUNCTION__, mId);
- return INVALID_OPERATION;
-}
-
-status_t Camera3DummyStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
- const std::vector<OutputStreamInfo> &/*outputInfo*/,
- const std::vector<size_t> &/*removedSurfaceIds*/,
- KeyedVector<sp<Surface>, size_t> * /*outputMap*/) {
- ALOGE("%s: this method is not supported!", __FUNCTION__);
- return INVALID_OPERATION;
-}
-
-}; // namespace camera3
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
deleted file mode 100644
index 4b67ea5..0000000
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright (C) 2014-2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA3_DUMMY_STREAM_H
-#define ANDROID_SERVERS_CAMERA3_DUMMY_STREAM_H
-
-#include <utils/RefBase.h>
-#include <gui/Surface.h>
-
-#include "Camera3Stream.h"
-#include "Camera3IOStreamBase.h"
-#include "Camera3OutputStreamInterface.h"
-
-namespace android {
-namespace camera3 {
-
-/**
- * A dummy output stream class, to be used as a placeholder when no valid
- * streams are configured by the client.
- * This is necessary because camera HAL v3.2 or older disallow configuring
- * 0 output streams, while the public camera2 API allows for it.
- */
-class Camera3DummyStream :
- public Camera3IOStreamBase,
- public Camera3OutputStreamInterface {
-
- public:
- /**
- * Set up a dummy stream; doesn't actually connect to anything, and uses
- * a default dummy format and size.
- */
- explicit Camera3DummyStream(int id);
-
- virtual ~Camera3DummyStream();
-
- /**
- * Camera3Stream interface
- */
-
- virtual void dump(int fd, const Vector<String16> &args) const;
-
- status_t setTransform(int transform);
-
- virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
-
- /**
- * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
- * drop buffers for stream of streamId.
- */
- virtual status_t dropBuffers(bool /*dropping*/) override;
-
- /**
- * Query the physical camera id for the output stream.
- */
- virtual const String8& getPhysicalCameraId() const override;
-
- /**
- * Return if this output stream is for video encoding.
- */
- bool isVideoStream() const;
-
- /**
- * Return if the consumer configuration of this stream is deferred.
- */
- virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
-
- /**
- * Set the consumer surfaces to the output stream.
- */
- virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
-
- /**
- * Query the output surface id.
- */
- virtual ssize_t getSurfaceId(const sp<Surface> &/*surface*/) { return 0; }
-
- virtual status_t getUniqueSurfaceIds(const std::vector<size_t>&,
- /*out*/std::vector<size_t>*) { return INVALID_OPERATION; };
-
- /**
- * Update the stream output surfaces.
- */
- virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
- const std::vector<OutputStreamInfo> &outputInfo,
- const std::vector<size_t> &removedSurfaceIds,
- KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
-
- protected:
-
- /**
- * Note that we release the lock briefly in this function
- */
- virtual status_t returnBufferCheckedLocked(
- const camera3_stream_buffer &buffer,
- nsecs_t timestamp,
- bool output,
- const std::vector<size_t>& surface_ids,
- /*out*/
- sp<Fence> *releaseFenceOut);
-
- virtual status_t disconnectLocked();
-
- private:
-
- // Default dummy parameters; 320x240 is a required size for all devices,
- // otherwise act like a SurfaceView would.
- static const int DUMMY_WIDTH = 320;
- static const int DUMMY_HEIGHT = 240;
- static const int DUMMY_FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- static const android_dataspace DUMMY_DATASPACE = HAL_DATASPACE_UNKNOWN;
- static const camera3_stream_rotation_t DUMMY_ROTATION = CAMERA3_STREAM_ROTATION_0;
- static const uint64_t DUMMY_USAGE = GRALLOC_USAGE_HW_COMPOSER;
- static const String8 DUMMY_ID;
-
- /**
- * Internal Camera3Stream interface
- */
- virtual status_t getBufferLocked(camera3_stream_buffer *buffer,
- const std::vector<size_t>& surface_ids = std::vector<size_t>());
- virtual status_t returnBufferLocked(
- const camera3_stream_buffer &buffer,
- nsecs_t timestamp, const std::vector<size_t>& surface_ids);
-
- virtual status_t configureQueueLocked();
-
- virtual status_t getEndpointUsage(uint64_t *usage) const;
-
-}; // class Camera3DummyStream
-
-} // namespace camera3
-
-} // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
new file mode 100644
index 0000000..230512a
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2014-2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-FakeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include "Camera3FakeStream.h"
+
+namespace android {
+
+namespace camera3 {
+
+const String8 Camera3FakeStream::FAKE_ID;
+
+Camera3FakeStream::Camera3FakeStream(int id) :
+ Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, FAKE_WIDTH, FAKE_HEIGHT,
+ /*maxSize*/0, FAKE_FORMAT, FAKE_DATASPACE, FAKE_ROTATION,
+ FAKE_ID) {
+
+}
+
+Camera3FakeStream::~Camera3FakeStream() {
+
+}
+
+status_t Camera3FakeStream::getBufferLocked(camera3_stream_buffer *,
+ const std::vector<size_t>&) {
+ ATRACE_CALL();
+ ALOGE("%s: Stream %d: Fake stream cannot produce buffers!", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+}
+
+status_t Camera3FakeStream::returnBufferLocked(
+ const camera3_stream_buffer &,
+ nsecs_t, const std::vector<size_t>&) {
+ ATRACE_CALL();
+ ALOGE("%s: Stream %d: Fake stream cannot return buffers!", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+}
+
+status_t Camera3FakeStream::returnBufferCheckedLocked(
+ const camera3_stream_buffer &,
+ nsecs_t,
+ bool,
+ const std::vector<size_t>&,
+ /*out*/
+ sp<Fence>*) {
+ ATRACE_CALL();
+ ALOGE("%s: Stream %d: Fake stream cannot return buffers!", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+}
+
+void Camera3FakeStream::dump(int fd, const Vector<String16> &args) const {
+ (void) args;
+ String8 lines;
+ lines.appendFormat(" Stream[%d]: Fake\n", mId);
+ write(fd, lines.string(), lines.size());
+
+ Camera3IOStreamBase::dump(fd, args);
+}
+
+status_t Camera3FakeStream::setTransform(int) {
+ ATRACE_CALL();
+ // Do nothing
+ return OK;
+}
+
+status_t Camera3FakeStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
+ (void) buffer;
+ (void) fenceFd;
+ // Do nothing
+ return OK;
+}
+
+status_t Camera3FakeStream::configureQueueLocked() {
+ // Do nothing
+ return OK;
+}
+
+status_t Camera3FakeStream::disconnectLocked() {
+ mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
+ : STATE_CONSTRUCTED;
+ return OK;
+}
+
+status_t Camera3FakeStream::getEndpointUsage(uint64_t *usage) const {
+ *usage = FAKE_USAGE;
+ return OK;
+}
+
+bool Camera3FakeStream::isVideoStream() const {
+ return false;
+}
+
+bool Camera3FakeStream::isConsumerConfigurationDeferred(size_t /*surface_id*/) const {
+ return false;
+}
+
+status_t Camera3FakeStream::dropBuffers(bool /*dropping*/) {
+ return OK;
+}
+
+const String8& Camera3FakeStream::getPhysicalCameraId() const {
+ return FAKE_ID;
+}
+
+status_t Camera3FakeStream::setConsumers(const std::vector<sp<Surface>>& /*consumers*/) {
+ ALOGE("%s: Stream %d: Fake stream doesn't support set consumer surface!",
+ __FUNCTION__, mId);
+ return INVALID_OPERATION;
+}
+
+status_t Camera3FakeStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
+ const std::vector<OutputStreamInfo> &/*outputInfo*/,
+ const std::vector<size_t> &/*removedSurfaceIds*/,
+ KeyedVector<sp<Surface>, size_t> * /*outputMap*/) {
+ ALOGE("%s: this method is not supported!", __FUNCTION__);
+ return INVALID_OPERATION;
+}
+
+}; // namespace camera3
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
new file mode 100644
index 0000000..fbf37e6
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -0,0 +1,147 @@
+/*
+ * Copyright (C) 2014-2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_FAKE_STREAM_H
+#define ANDROID_SERVERS_CAMERA3_FAKE_STREAM_H
+
+#include <utils/RefBase.h>
+#include <gui/Surface.h>
+
+#include "Camera3Stream.h"
+#include "Camera3IOStreamBase.h"
+#include "Camera3OutputStreamInterface.h"
+
+namespace android {
+namespace camera3 {
+
+/**
+ * A fake output stream class, to be used as a placeholder when no valid
+ * streams are configured by the client.
+ * This is necessary because camera HAL v3.2 or older disallow configuring
+ * 0 output streams, while the public camera2 API allows for it.
+ */
+class Camera3FakeStream :
+ public Camera3IOStreamBase,
+ public Camera3OutputStreamInterface {
+
+ public:
+ /**
+ * Set up a fake stream; doesn't actually connect to anything, and uses
+ * a default fake format and size.
+ */
+ explicit Camera3FakeStream(int id);
+
+ virtual ~Camera3FakeStream();
+
+ /**
+ * Camera3Stream interface
+ */
+
+ virtual void dump(int fd, const Vector<String16> &args) const;
+
+ status_t setTransform(int transform);
+
+ virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
+
+ /**
+ * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
+ * drop buffers for stream of streamId.
+ */
+ virtual status_t dropBuffers(bool /*dropping*/) override;
+
+ /**
+ * Query the physical camera id for the output stream.
+ */
+ virtual const String8& getPhysicalCameraId() const override;
+
+ /**
+ * Return if this output stream is for video encoding.
+ */
+ bool isVideoStream() const;
+
+ /**
+ * Return if the consumer configuration of this stream is deferred.
+ */
+ virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
+
+ /**
+ * Set the consumer surfaces to the output stream.
+ */
+ virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
+
+ /**
+ * Query the output surface id.
+ */
+ virtual ssize_t getSurfaceId(const sp<Surface> &/*surface*/) { return 0; }
+
+ virtual status_t getUniqueSurfaceIds(const std::vector<size_t>&,
+ /*out*/std::vector<size_t>*) { return INVALID_OPERATION; };
+
+ /**
+ * Update the stream output surfaces.
+ */
+ virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+ const std::vector<OutputStreamInfo> &outputInfo,
+ const std::vector<size_t> &removedSurfaceIds,
+ KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
+
+ protected:
+
+ /**
+ * Note that we release the lock briefly in this function
+ */
+ virtual status_t returnBufferCheckedLocked(
+ const camera3_stream_buffer &buffer,
+ nsecs_t timestamp,
+ bool output,
+ const std::vector<size_t>& surface_ids,
+ /*out*/
+ sp<Fence> *releaseFenceOut);
+
+ virtual status_t disconnectLocked();
+
+ private:
+
+ // Default fake parameters; 320x240 is a required size for all devices,
+ // otherwise act like a SurfaceView would.
+ static const int FAKE_WIDTH = 320;
+ static const int FAKE_HEIGHT = 240;
+ static const int FAKE_FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ static const android_dataspace FAKE_DATASPACE = HAL_DATASPACE_UNKNOWN;
+ static const camera3_stream_rotation_t FAKE_ROTATION = CAMERA3_STREAM_ROTATION_0;
+ static const uint64_t FAKE_USAGE = GRALLOC_USAGE_HW_COMPOSER;
+ static const String8 FAKE_ID;
+
+ /**
+ * Internal Camera3Stream interface
+ */
+ virtual status_t getBufferLocked(camera3_stream_buffer *buffer,
+ const std::vector<size_t>& surface_ids = std::vector<size_t>());
+ virtual status_t returnBufferLocked(
+ const camera3_stream_buffer &buffer,
+ nsecs_t timestamp, const std::vector<size_t>& surface_ids);
+
+ virtual status_t configureQueueLocked();
+
+ virtual status_t getEndpointUsage(uint64_t *usage) const;
+
+}; // class Camera3FakeStream
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 448379c..ca62239 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -48,6 +48,7 @@
virtual void dump(int fd, const Vector<String16> &args) const;
+ int getMaxTotalBuffers() const { return mTotalBufferCount; }
protected:
size_t mTotalBufferCount;
// sum of input and output buffers that are currently acquired by HAL
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index cb59a76..ebd33e9 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -27,13 +27,13 @@
namespace camera3 {
-const String8 Camera3InputStream::DUMMY_ID;
+const String8 Camera3InputStream::FAKE_ID;
Camera3InputStream::Camera3InputStream(int id,
uint32_t width, uint32_t height, int format) :
Camera3IOStreamBase(id, CAMERA3_STREAM_INPUT, width, height, /*maxSize*/0,
format, HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0,
- DUMMY_ID) {
+ FAKE_ID) {
if (format == HAL_PIXEL_FORMAT_BLOB) {
ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__);
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 97a627a..22697b7 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -53,7 +53,7 @@
sp<IGraphicBufferProducer> mProducer;
Vector<BufferItem> mBuffersInFlight;
- static const String8 DUMMY_ID;
+ static const String8 FAKE_ID;
/**
* Camera3IOStreamBase
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
index 5942868..a7e64ce 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
@@ -61,6 +61,9 @@
mNeedFixupMonochromeTags(offlineStates.mNeedFixupMonochromeTags),
mUsePartialResult(offlineStates.mUsePartialResult),
mNumPartialResults(offlineStates.mNumPartialResults),
+ mLastCompletedRegularFrameNumber(offlineStates.mLastCompletedRegularFrameNumber),
+ mLastCompletedReprocessFrameNumber(offlineStates.mLastCompletedReprocessFrameNumber),
+ mLastCompletedZslFrameNumber(offlineStates.mLastCompletedZslFrameNumber),
mNextResultFrameNumber(offlineStates.mNextResultFrameNumber),
mNextReprocessResultFrameNumber(offlineStates.mNextReprocessResultFrameNumber),
mNextZslStillResultFrameNumber(offlineStates.mNextZslStillResultFrameNumber),
@@ -173,7 +176,7 @@
FlushInflightReqStates states {
mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager,
- listener, *this, mBufferRecords, *this};
+ listener, *this, mBufferRecords, *this, mSessionStatsBuilder};
camera3::flushInflightRequests(states);
@@ -247,8 +250,9 @@
CaptureOutputStates states {
mId,
- mOfflineReqsLock, mOfflineReqs,
- mOutputLock, mResultQueue, mResultSignal,
+ mOfflineReqsLock, mLastCompletedRegularFrameNumber,
+ mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+ mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
mNextShutterFrameNumber,
mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
mNextResultFrameNumber,
@@ -256,7 +260,8 @@
mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
- mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+ mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+ mBufferRecords
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -285,8 +290,9 @@
CaptureOutputStates states {
mId,
- mOfflineReqsLock, mOfflineReqs,
- mOutputLock, mResultQueue, mResultSignal,
+ mOfflineReqsLock, mLastCompletedRegularFrameNumber,
+ mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+ mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
mNextShutterFrameNumber,
mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
mNextResultFrameNumber,
@@ -294,7 +300,8 @@
mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
- mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+ mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+ mBufferRecords
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -318,8 +325,9 @@
CaptureOutputStates states {
mId,
- mOfflineReqsLock, mOfflineReqs,
- mOutputLock, mResultQueue, mResultSignal,
+ mOfflineReqsLock, mLastCompletedRegularFrameNumber,
+ mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+ mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
mNextShutterFrameNumber,
mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
mNextResultFrameNumber,
@@ -327,7 +335,8 @@
mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
- mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+ mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+ mBufferRecords
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
@@ -347,7 +356,7 @@
}
RequestBufferStates states {
- mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+ mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
*this, mBufferRecords, *this};
camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
return hardware::Void();
@@ -364,7 +373,7 @@
}
ReturnBufferStates states {
- mId, mUseHalBufManager, mOutputStreams, mBufferRecords};
+ mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, mBufferRecords};
camera3::returnStreamBuffers(states, buffers);
return hardware::Void();
}
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index 208f70d..5581964 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -36,7 +36,6 @@
#include "device3/RotateAndCropMapper.h"
#include "device3/ZoomRatioMapper.h"
#include "utils/TagMonitor.h"
-#include "utils/LatencyHistogram.h"
#include <camera_metadata_hidden.h>
namespace android {
@@ -57,10 +56,11 @@
const TagMonitor& tagMonitor, const metadata_vendor_id_t vendorTagId,
const bool useHalBufManager, const bool needFixupMonochromeTags,
const bool usePartialResult, const uint32_t numPartialResults,
- const uint32_t nextResultFN, const uint32_t nextReprocResultFN,
- const uint32_t nextZslResultFN, const uint32_t nextShutterFN,
- const uint32_t nextReprocShutterFN, const uint32_t nextZslShutterFN,
- const CameraMetadata& deviceInfo,
+ const int64_t lastCompletedRegularFN, const int64_t lastCompletedReprocessFN,
+ const int64_t lastCompletedZslFN, const uint32_t nextResultFN,
+ const uint32_t nextReprocResultFN, const uint32_t nextZslResultFN,
+ const uint32_t nextShutterFN, const uint32_t nextReprocShutterFN,
+ const uint32_t nextZslShutterFN, const CameraMetadata& deviceInfo,
const std::unordered_map<std::string, CameraMetadata>& physicalDeviceInfoMap,
const std::unordered_map<std::string, camera3::DistortionMapper>& distortionMappers,
const std::unordered_map<std::string, camera3::ZoomRatioMapper>& zoomRatioMappers,
@@ -69,6 +69,9 @@
mTagMonitor(tagMonitor), mVendorTagId(vendorTagId),
mUseHalBufManager(useHalBufManager), mNeedFixupMonochromeTags(needFixupMonochromeTags),
mUsePartialResult(usePartialResult), mNumPartialResults(numPartialResults),
+ mLastCompletedRegularFrameNumber(lastCompletedRegularFN),
+ mLastCompletedReprocessFrameNumber(lastCompletedReprocessFN),
+ mLastCompletedZslFrameNumber(lastCompletedZslFN),
mNextResultFrameNumber(nextResultFN),
mNextReprocessResultFrameNumber(nextReprocResultFN),
mNextZslStillResultFrameNumber(nextZslResultFN),
@@ -90,6 +93,15 @@
const bool mUsePartialResult;
const uint32_t mNumPartialResults;
+ // The last completed (buffers, result metadata, and error notify) regular
+ // request frame number
+ const int64_t mLastCompletedRegularFrameNumber;
+ // The last completed (buffers, result metadata, and error notify) reprocess
+ // request frame number
+ const int64_t mLastCompletedReprocessFrameNumber;
+ // The last completed (buffers, result metadata, and error notify) zsl
+ // request frame number
+ const int64_t mLastCompletedZslFrameNumber;
// the minimal frame number of the next non-reprocess result
const uint32_t mNextResultFrameNumber;
// the minimal frame number of the next reprocess result
@@ -196,6 +208,7 @@
sp<camera3::Camera3Stream> mInputStream;
camera3::StreamSet mOutputStreams;
camera3::BufferRecords mBufferRecords;
+ SessionStatsBuilder mSessionStatsBuilder;
std::mutex mOfflineReqsLock;
camera3::InFlightRequestMap mOfflineReqs;
@@ -214,6 +227,12 @@
std::mutex mOutputLock;
std::list<CaptureResult> mResultQueue;
std::condition_variable mResultSignal;
+ // the last completed frame number of regular requests
+ int64_t mLastCompletedRegularFrameNumber;
+ // the last completed frame number of reprocess requests
+ int64_t mLastCompletedReprocessFrameNumber;
+ // the last completed frame number of ZSL still capture requests
+ int64_t mLastCompletedZslFrameNumber;
// the minimal frame number of the next non-reprocess result
uint32_t mNextResultFrameNumber;
// the minimal frame number of the next reprocess result
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index e1d35e8..6dfc838 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -18,9 +18,17 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <ctime>
+#include <fstream>
+
+#include <android-base/unique_fd.h>
+#include <ui/GraphicBuffer.h>
#include <utils/Log.h>
#include <utils/Trace.h>
+
+#include "api1/client2/JpegProcessor.h"
#include "Camera3OutputStream.h"
+#include "utils/TraceHFR.h"
#ifndef container_of
#define container_of(ptr, type, member) \
@@ -113,7 +121,7 @@
mState = STATE_ERROR;
}
- // Sanity check for the consumer usage flag.
+ // Validation check for the consumer usage flag.
if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
(consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
@@ -160,7 +168,7 @@
status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer,
const std::vector<size_t>&) {
- ATRACE_CALL();
+ ATRACE_HFR_CALL();
ANativeWindowBuffer* anb;
int fenceFd = -1;
@@ -190,7 +198,7 @@
status_t Camera3OutputStream::returnBufferLocked(
const camera3_stream_buffer &buffer,
nsecs_t timestamp, const std::vector<size_t>& surface_ids) {
- ATRACE_CALL();
+ ATRACE_HFR_CALL();
status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true, surface_ids);
@@ -278,6 +286,12 @@
__FUNCTION__, mId, strerror(-res), res);
return res;
}
+ // If this is a JPEG output, and image dump mask is set, save image to
+ // disk.
+ if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF &&
+ mImageDumpMask) {
+ dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
+ }
res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
if (shouldLogError(res, state)) {
@@ -516,7 +530,7 @@
}
status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
- ATRACE_CALL();
+ ATRACE_HFR_CALL();
status_t res;
if ((res = getBufferPreconditionCheckLocked()) != OK) {
@@ -956,6 +970,49 @@
return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
}
+void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
+ ANativeWindowBuffer* anwBuffer, int fence) {
+ // Deriver output file name
+ std::string fileExtension = "jpg";
+ char imageFileName[64];
+ time_t now = time(0);
+ tm *localTime = localtime(&now);
+ snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
+ 1900 + localTime->tm_year, localTime->tm_mon, localTime->tm_mday,
+ localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
+ timestamp, fileExtension.c_str());
+
+ // Lock the image for CPU read
+ sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
+ void* mapped = nullptr;
+ base::unique_fd fenceFd(dup(fence));
+ status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
+ fenceFd.get());
+ if (res != OK) {
+ ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
+ return;
+ }
+
+ // Figure out actual file size
+ auto actualJpegSize = android::camera2::JpegProcessor::findJpegSize((uint8_t*)mapped, mMaxSize);
+ if (actualJpegSize == 0) {
+ actualJpegSize = mMaxSize;
+ }
+
+ // Output image data to file
+ std::string filePath = "/data/misc/cameraserver/";
+ filePath += imageFileName;
+ std::ofstream imageFile(filePath.c_str(), std::ofstream::binary);
+ if (!imageFile.is_open()) {
+ ALOGE("%s: Unable to create file %s", __FUNCTION__, filePath.c_str());
+ graphicBuffer->unlock();
+ return;
+ }
+ imageFile.write((const char*)mapped, actualJpegSize);
+
+ graphicBuffer->unlock();
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index b4e49f9..55f0d41 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -210,6 +210,8 @@
*/
static void applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/);
+ void setImageDumpMask(int mask) { mImageDumpMask = mask; }
+
protected:
Camera3OutputStream(int id, camera3_stream_type_t type,
uint32_t width, uint32_t height, int format,
@@ -325,9 +327,14 @@
// STATE_ABANDONED
static bool shouldLogError(status_t res, StreamState state);
+ // Dump images to disk before returning to consumer
+ void dumpImageToDisk(nsecs_t timestamp, ANativeWindowBuffer* anwBuffer, int fence);
+
static const int32_t kDequeueLatencyBinSize = 5; // in ms
CameraLatencyHistogram mDequeueBufferLatency;
+ int mImageDumpMask = 0;
+
}; // class Camera3OutputStream
} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 603f516..f88b062 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -182,7 +182,33 @@
return;
}
- insertResultLocked(states, &captureResult, frameNumber);
+ // Update partial result by removing keys remapped by DistortionCorrection, ZoomRatio,
+ // and RotationAndCrop mappers.
+ std::set<uint32_t> keysToRemove;
+
+ auto iter = states.distortionMappers.find(states.cameraId.c_str());
+ if (iter != states.distortionMappers.end()) {
+ const auto& remappedKeys = iter->second.getRemappedKeys();
+ keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
+ }
+
+ const auto& remappedKeys = states.zoomRatioMappers[states.cameraId.c_str()].getRemappedKeys();
+ keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
+
+ auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
+ if (mapper != states.rotateAndCropMappers.end()) {
+ const auto& remappedKeys = iter->second.getRemappedKeys();
+ keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
+ }
+
+ for (uint32_t key : keysToRemove) {
+ captureResult.mMetadata.erase(key);
+ }
+
+ // Send partial result
+ if (captureResult.mMetadata.entryCount() > 0) {
+ insertResultLocked(states, &captureResult, frameNumber);
+ }
}
void sendCaptureResult(
@@ -397,6 +423,7 @@
InFlightRequestMap& inflightMap = states.inflightMap;
const InFlightRequest &request = inflightMap.valueAt(idx);
const uint32_t frameNumber = inflightMap.keyAt(idx);
+ SessionStatsBuilder& sessionStatsBuilder = states.sessionStatsBuilder;
nsecs_t sensorTimestamp = request.sensorTimestamp;
nsecs_t shutterTimestamp = request.shutterTimestamp;
@@ -405,8 +432,8 @@
// In the case of a successful request:
// all input and output buffers, all result metadata, shutter callback
// arrived.
- // In the case of a unsuccessful request:
- // all input and output buffers arrived.
+ // In the case of an unsuccessful request:
+ // all input and output buffers, as well as request/result error notifications, arrived.
if (request.numBuffersLeft == 0 &&
(request.skipResultMetadata ||
(request.haveResultMetadata && shutterTimestamp != 0))) {
@@ -416,7 +443,7 @@
ATRACE_ASYNC_END("frame capture", frameNumber);
- // Sanity check - if sensor timestamp matches shutter timestamp in the
+ // Validation check - if sensor timestamp matches shutter timestamp in the
// case of request having callback.
if (request.hasCallback && request.requestStatus == OK &&
sensorTimestamp != shutterTimestamp) {
@@ -433,8 +460,22 @@
returnOutputBuffers(
states.useHalBufManager, states.listener,
request.pendingOutputBuffers.array(),
- request.pendingOutputBuffers.size(), 0, /*timestampIncreasing*/true,
- request.outputSurfaces, request.resultExtras);
+ request.pendingOutputBuffers.size(), 0,
+ /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+ /*timestampIncreasing*/true,
+ request.outputSurfaces, request.resultExtras,
+ request.errorBufStrategy);
+
+ // Note down the just completed frame number
+ if (request.hasInputBuffer) {
+ states.lastCompletedReprocessFrameNumber = frameNumber;
+ } else if (request.zslCapture) {
+ states.lastCompletedZslFrameNumber = frameNumber;
+ } else {
+ states.lastCompletedRegularFrameNumber = frameNumber;
+ }
+
+ sessionStatsBuilder.incResultCounter(request.skipResultMetadata);
removeInFlightMapEntryLocked(states, idx);
ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
@@ -487,10 +528,13 @@
InFlightRequest &request = states.inflightMap.editValueAt(idx);
ALOGVV("%s: got InFlightRequest requestId = %" PRId32
", frameNumber = %" PRId64 ", burstId = %" PRId32
- ", partialResultCount = %d, hasCallback = %d",
+ ", partialResultCount = %d/%d, hasCallback = %d, num_output_buffers %d"
+ ", usePartialResult = %d",
__FUNCTION__, request.resultExtras.requestId,
request.resultExtras.frameNumber, request.resultExtras.burstId,
- result->partial_result, request.hasCallback);
+ result->partial_result, states.numPartialResults,
+ request.hasCallback, result->num_output_buffers,
+ states.usePartialResult);
// Always update the partial count to the latest one if it's not 0
// (buffers only). When framework aggregates adjacent partial results
// into one, the latest partial count will be used.
@@ -555,6 +599,7 @@
request.collectedPartialResult);
}
request.haveResultMetadata = true;
+ request.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
}
uint32_t numBuffersReturned = result->num_output_buffers;
@@ -581,18 +626,14 @@
request.sensorTimestamp = entry.data.i64[0];
}
- // If shutter event isn't received yet, append the output buffers to
- // the in-flight request. Otherwise, return the output buffers to
- // streams.
- if (shutterTimestamp == 0) {
- request.pendingOutputBuffers.appendArray(result->output_buffers,
+ // If shutter event isn't received yet, do not return the pending output
+ // buffers.
+ request.pendingOutputBuffers.appendArray(result->output_buffers,
result->num_output_buffers);
- } else {
- bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
- returnOutputBuffers(states.useHalBufManager, states.listener,
- result->output_buffers, result->num_output_buffers,
- shutterTimestamp, timestampIncreasing,
- request.outputSurfaces, request.resultExtras);
+ if (shutterTimestamp != 0) {
+ returnAndRemovePendingOutputBuffers(
+ states.useHalBufManager, states.listener,
+ request, states.sessionStatsBuilder);
}
if (result->result != NULL && !isPartialResult) {
@@ -789,42 +830,78 @@
bool useHalBufManager,
sp<NotificationListener> listener,
const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
- nsecs_t timestamp, bool timestampIncreasing,
+ nsecs_t timestamp, bool requested, nsecs_t requestTimeNs,
+ SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing,
const SurfaceMap& outputSurfaces,
- const CaptureResultExtras &inResultExtras) {
+ const CaptureResultExtras &inResultExtras,
+ ERROR_BUF_STRATEGY errorBufStrategy) {
for (size_t i = 0; i < numBuffers; i++)
{
+ Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
+ int streamId = stream->getId();
+
+ // Call notify(ERROR_BUFFER) if necessary.
+ if (outputBuffers[i].status == CAMERA3_BUFFER_STATUS_ERROR &&
+ errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
+ if (listener != nullptr) {
+ CaptureResultExtras extras = inResultExtras;
+ extras.errorStreamId = streamId;
+ listener->notifyError(
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
+ extras);
+ }
+ }
+
if (outputBuffers[i].buffer == nullptr) {
if (!useHalBufManager) {
// With HAL buffer management API, HAL sometimes will have to return buffers that
// has not got a output buffer handle filled yet. This is though illegal if HAL
// buffer management API is not being used.
ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
+ } else {
+ if (requested) {
+ sessionStatsBuilder.incCounter(streamId, /*dropped*/true, 0);
+ }
}
continue;
}
- Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
- int streamId = stream->getId();
const auto& it = outputSurfaces.find(streamId);
status_t res = OK;
- if (it != outputSurfaces.end()) {
- res = stream->returnBuffer(
- outputBuffers[i], timestamp, timestampIncreasing, it->second,
- inResultExtras.frameNumber);
- } else {
- res = stream->returnBuffer(
- outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
- inResultExtras.frameNumber);
- }
+ // Do not return the buffer if the buffer status is error, and the error
+ // buffer strategy is CACHE.
+ if (outputBuffers[i].status != CAMERA3_BUFFER_STATUS_ERROR ||
+ errorBufStrategy != ERROR_BUF_CACHE) {
+ if (it != outputSurfaces.end()) {
+ res = stream->returnBuffer(
+ outputBuffers[i], timestamp, timestampIncreasing, it->second,
+ inResultExtras.frameNumber);
+ } else {
+ res = stream->returnBuffer(
+ outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
+ inResultExtras.frameNumber);
+ }
+ }
// Note: stream may be deallocated at this point, if this buffer was
// the last reference to it.
+ bool dropped = false;
if (res == NO_INIT || res == DEAD_OBJECT) {
ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
+ sessionStatsBuilder.stopCounter(streamId);
} else if (res != OK) {
ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
+ dropped = true;
+ } else {
+ if (outputBuffers[i].status == CAMERA3_BUFFER_STATUS_ERROR || timestamp == 0) {
+ dropped = true;
+ }
+ }
+ if (requested) {
+ nsecs_t bufferTimeNs = systemTime();
+ int32_t captureLatencyMs = ns2ms(bufferTimeNs - requestTimeNs);
+ sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
}
// Long processing consumers can cause returnBuffer timeout for shared stream
@@ -834,7 +911,8 @@
// cancel the buffer
camera3_stream_buffer_t sb = outputBuffers[i];
sb.status = CAMERA3_BUFFER_STATUS_ERROR;
- stream->returnBuffer(sb, /*timestamp*/0, timestampIncreasing, std::vector<size_t> (),
+ stream->returnBuffer(sb, /*timestamp*/0,
+ timestampIncreasing, std::vector<size_t> (),
inResultExtras.frameNumber);
if (listener != nullptr) {
@@ -848,6 +926,30 @@
}
}
+void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
+ sp<NotificationListener> listener, InFlightRequest& request,
+ SessionStatsBuilder& sessionStatsBuilder) {
+ bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
+ returnOutputBuffers(useHalBufManager, listener,
+ request.pendingOutputBuffers.array(),
+ request.pendingOutputBuffers.size(),
+ request.shutterTimestamp, /*requested*/true,
+ request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
+ request.outputSurfaces, request.resultExtras,
+ request.errorBufStrategy);
+
+ // Remove error buffers that are not cached.
+ for (auto iter = request.pendingOutputBuffers.begin();
+ iter != request.pendingOutputBuffers.end(); ) {
+ if (request.errorBufStrategy != ERROR_BUF_CACHE ||
+ iter->status != CAMERA3_BUFFER_STATUS_ERROR) {
+ iter = request.pendingOutputBuffers.erase(iter);
+ } else {
+ iter++;
+ }
+ }
+}
+
void notifyShutter(CaptureOutputStates& states, const camera3_shutter_msg_t &msg) {
ATRACE_CALL();
ssize_t idx;
@@ -899,6 +1001,12 @@
msg.frame_number, r.resultExtras.requestId, msg.timestamp);
// Call listener, if any
if (states.listener != nullptr) {
+ r.resultExtras.lastCompletedRegularFrameNumber =
+ states.lastCompletedRegularFrameNumber;
+ r.resultExtras.lastCompletedReprocessFrameNumber =
+ states.lastCompletedReprocessFrameNumber;
+ r.resultExtras.lastCompletedZslFrameNumber =
+ states.lastCompletedZslFrameNumber;
states.listener->notifyShutter(r.resultExtras, msg.timestamp);
}
// send pending result and buffers
@@ -908,13 +1016,8 @@
r.hasInputBuffer, r.zslCapture && r.stillCapture,
r.rotateAndCropAuto, r.cameraIdsWithZoom, r.physicalMetadatas);
}
- bool timestampIncreasing = !(r.zslCapture || r.hasInputBuffer);
- returnOutputBuffers(
- states.useHalBufManager, states.listener,
- r.pendingOutputBuffers.array(),
- r.pendingOutputBuffers.size(), r.shutterTimestamp, timestampIncreasing,
- r.outputSurfaces, r.resultExtras);
- r.pendingOutputBuffers.clear();
+ returnAndRemovePendingOutputBuffers(
+ states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
removeInFlightRequestIfReadyLocked(states, idx);
}
@@ -968,7 +1071,6 @@
break;
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
- case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
{
std::lock_guard<std::mutex> l(states.inflightLock);
ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
@@ -976,7 +1078,7 @@
InFlightRequest &r = states.inflightMap.editValueAt(idx);
r.requestStatus = msg.error_code;
resultExtras = r.resultExtras;
- bool logicalDeviceResultError = false;
+ bool physicalDeviceResultError = false;
if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
errorCode) {
if (physicalCameraId.size() > 0) {
@@ -990,23 +1092,22 @@
}
r.physicalCameraIds.erase(iter);
resultExtras.errorPhysicalCameraId = physicalCameraId;
- } else {
- logicalDeviceResultError = true;
+ physicalDeviceResultError = true;
}
}
- if (logicalDeviceResultError
- || hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST ==
- errorCode) {
+ if (!physicalDeviceResultError) {
r.skipResultMetadata = true;
- }
- if (logicalDeviceResultError) {
- // In case of missing result check whether the buffers
- // returned. If they returned, then remove inflight
- // request.
- // TODO: should we call this for ERROR_CAMERA_REQUEST as well?
- // otherwise we are depending on HAL to send the buffers back after
- // calling notifyError. Not sure if that's in the spec.
+ if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT
+ == errorCode) {
+ r.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
+ } else {
+ // errorCode is ERROR_CAMERA_REQUEST
+ r.errorBufStrategy = ERROR_BUF_RETURN;
+ }
+
+ // Check whether the buffers returned. If they returned,
+ // remove inflight request.
removeInFlightRequestIfReadyLocked(states, idx);
}
} else {
@@ -1024,6 +1125,10 @@
states.cameraId.string(), __FUNCTION__);
}
break;
+ case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
+ // Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
+ // callback to the app. Rather, use STATUS_ERROR of image buffers.
+ break;
default:
// SET_ERR calls notifyError
SET_ERR("Unknown error message from HAL: %d", msg.error_code);
@@ -1164,13 +1269,13 @@
return;
}
+ bufRet.streamId = streamId;
if (outputStream->isAbandoned()) {
bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
allReqsSucceeds = false;
continue;
}
- bufRet.streamId = streamId;
size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
uint32_t numBuffersRequested = bufReq.numBuffersRequested;
size_t totalHandout = handOutBufferCount + numBuffersRequested;
@@ -1201,6 +1306,7 @@
ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
__FUNCTION__, streamId, strerror(-res), res);
bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
+ states.sessionStatsBuilder.stopCounter(streamId);
} else {
ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
__FUNCTION__, streamId, strerror(-res), res);
@@ -1266,7 +1372,8 @@
sb.status = CAMERA3_BUFFER_STATUS_ERROR;
}
returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
- streamBuffers.data(), numAllocatedBuffers, 0);
+ streamBuffers.data(), numAllocatedBuffers, 0, /*requested*/false,
+ /*requestTimeNs*/0, states.sessionStatsBuilder);
}
}
@@ -1323,23 +1430,30 @@
}
streamBuffer.stream = stream->asHalStream();
returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
- &streamBuffer, /*size*/1, /*timestamp*/ 0);
+ &streamBuffer, /*size*/1, /*timestamp*/ 0, /*requested*/false,
+ /*requestTimeNs*/0, states.sessionStatsBuilder);
}
}
void flushInflightRequests(FlushInflightReqStates& states) {
ATRACE_CALL();
- { // First return buffers cached in mInFlightMap
+ { // First return buffers cached in inFlightMap
std::lock_guard<std::mutex> l(states.inflightLock);
for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
const InFlightRequest &request = states.inflightMap.valueAt(idx);
returnOutputBuffers(
states.useHalBufManager, states.listener,
request.pendingOutputBuffers.array(),
- request.pendingOutputBuffers.size(), 0,
- /*timestampIncreasing*/true, request.outputSurfaces,
- request.resultExtras);
+ request.pendingOutputBuffers.size(), 0, /*requested*/true,
+ request.requestTimeNs, states.sessionStatsBuilder, /*timestampIncreasing*/true,
+ request.outputSurfaces, request.resultExtras, request.errorBufStrategy);
+ ALOGW("%s: Frame %d | Timestamp: %" PRId64 ", metadata"
+ " arrived: %s, buffers left: %d.\n", __FUNCTION__,
+ states.inflightMap.keyAt(idx), request.shutterTimestamp,
+ request.haveResultMetadata ? "true" : "false",
+ request.numBuffersLeft);
}
+
states.inflightMap.clear();
states.inflightIntf.onInflightMapFlushedLocked();
}
@@ -1404,7 +1518,8 @@
switch (halStream->stream_type) {
case CAMERA3_STREAM_OUTPUT:
res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
- /*timestampIncreasing*/true, std::vector<size_t> (), frameNumber);
+ /*timestampIncreasing*/true,
+ std::vector<size_t> (), frameNumber);
if (res != OK) {
ALOGE("%s: Can't return output buffer for frame %d to"
" stream %d: %s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index fbb47f8..45c8a43 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -33,6 +33,7 @@
#include "device3/InFlightRequest.h"
#include "device3/Camera3Stream.h"
#include "device3/Camera3OutputStreamInterface.h"
+#include "utils/SessionStatsBuilder.h"
#include "utils/TagMonitor.h"
namespace android {
@@ -44,22 +45,37 @@
/**
* Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
*/
- // helper function to return the output buffers to output streams.
+
+ // helper function to return the output buffers to output streams. The
+ // function also optionally calls notify(ERROR_BUFFER).
void returnOutputBuffers(
bool useHalBufManager,
sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
const camera3_stream_buffer_t *outputBuffers,
- size_t numBuffers, nsecs_t timestamp, bool timestampIncreasing = true,
+ size_t numBuffers, nsecs_t timestamp, bool requested, nsecs_t requestTimeNs,
+ SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing = true,
// The following arguments are only meant for surface sharing use case
const SurfaceMap& outputSurfaces = SurfaceMap{},
// Used to send buffer error callback when failing to return buffer
- const CaptureResultExtras &resultExtras = CaptureResultExtras{});
+ const CaptureResultExtras &resultExtras = CaptureResultExtras{},
+ ERROR_BUF_STRATEGY errorBufStrategy = ERROR_BUF_RETURN);
+
+ // helper function to return the output buffers to output streams, and
+ // remove the returned buffers from the inflight request's pending buffers
+ // vector.
+ void returnAndRemovePendingOutputBuffers(
+ bool useHalBufManager,
+ sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
+ InFlightRequest& request, SessionStatsBuilder& sessionStatsBuilder);
// Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
// callbacks
struct CaptureOutputStates {
const String8& cameraId;
std::mutex& inflightLock;
+ int64_t& lastCompletedRegularFrameNumber;
+ int64_t& lastCompletedReprocessFrameNumber;
+ int64_t& lastCompletedZslFrameNumber;
InFlightRequestMap& inflightMap; // end of inflightLock scope
std::mutex& outputLock;
std::list<CaptureResult>& resultQueue;
@@ -84,6 +100,7 @@
TagMonitor& tagMonitor;
sp<Camera3Stream> inputStream;
StreamSet& outputStreams;
+ SessionStatsBuilder& sessionStatsBuilder;
sp<NotificationListener> listener;
SetErrorInterface& setErrIntf;
InflightRequestUpdateInterface& inflightIntf;
@@ -107,6 +124,7 @@
std::mutex& reqBufferLock; // lock to serialize request buffer calls
const bool useHalBufManager;
StreamSet& outputStreams;
+ SessionStatsBuilder& sessionStatsBuilder;
SetErrorInterface& setErrIntf;
BufferRecordsInterface& bufferRecordsIntf;
RequestBufferInterface& reqBufferIntf;
@@ -120,6 +138,7 @@
const String8& cameraId;
const bool useHalBufManager;
StreamSet& outputStreams;
+ SessionStatsBuilder& sessionStatsBuilder;
BufferRecordsInterface& bufferRecordsIntf;
};
@@ -135,6 +154,7 @@
InflightRequestUpdateInterface& inflightIntf;
BufferRecordsInterface& bufferRecordsIntf;
FlushBufferInterface& flushBufferIntf;
+ SessionStatsBuilder& sessionStatsBuilder;
};
void flushInflightRequests(FlushInflightReqStates& states);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index e54a99b..9a8f6fe 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -22,6 +22,7 @@
#include <utils/Trace.h>
#include "device3/Camera3Stream.h"
#include "device3/StatusTracker.h"
+#include "utils/TraceHFR.h"
#include <cutils/properties.h>
@@ -151,6 +152,10 @@
return mPhysicalCameraId;
}
+int Camera3Stream::getMaxHalBuffers() const {
+ return camera3_stream::max_buffers;
+}
+
void Camera3Stream::setOfflineProcessingSupport(bool support) {
mSupportOfflineProcessing = support;
}
@@ -329,7 +334,8 @@
// Register for idle tracking
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != 0 && mStatusId == StatusTracker::NO_STATUS_ID) {
- mStatusId = statusTracker->addComponent();
+ std::string name = std::string("Stream ") + std::to_string(mId);
+ mStatusId = statusTracker->addComponent(name.c_str());
}
// Check if the stream configuration is unchanged, and skip reallocation if
@@ -601,7 +607,7 @@
status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer,
nsecs_t waitBufferTimeout,
const std::vector<size_t>& surface_ids) {
- ATRACE_CALL();
+ ATRACE_HFR_CALL();
Mutex::Autolock l(mLock);
status_t res = OK;
@@ -682,7 +688,7 @@
status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,
nsecs_t timestamp, bool timestampIncreasing,
const std::vector<size_t>& surface_ids, uint64_t frameNumber) {
- ATRACE_CALL();
+ ATRACE_HFR_CALL();
Mutex::Autolock l(mLock);
// Check if this buffer is outstanding.
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index d768d3d..3654f89 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -165,6 +165,7 @@
void setDataSpaceOverride(bool dataSpaceOverriden);
bool isDataSpaceOverridden() const;
android_dataspace getOriginalDataSpace() const;
+ int getMaxHalBuffers() const;
const String8& physicalCameraId() const;
void setOfflineProcessingSupport(bool) override;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 667e3bb..a053262 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -99,6 +99,8 @@
virtual void setDataSpaceOverride(bool dataSpaceOverriden) = 0;
virtual bool isDataSpaceOverridden() const = 0;
virtual android_dataspace getOriginalDataSpace() const = 0;
+ virtual int getMaxHalBuffers() const = 0;
+ virtual int getMaxTotalBuffers() const = 0;
/**
* Offline processing
diff --git a/services/camera/libcameraservice/device3/CoordinateMapper.h b/services/camera/libcameraservice/device3/CoordinateMapper.h
index 5164856..558f4c0 100644
--- a/services/camera/libcameraservice/device3/CoordinateMapper.h
+++ b/services/camera/libcameraservice/device3/CoordinateMapper.h
@@ -18,16 +18,23 @@
#define ANDROID_SERVERS_COORDINATEMAPPER_H
#include <array>
+#include <set>
namespace android {
namespace camera3 {
class CoordinateMapper {
- // Right now only stores metadata tags containing 2D coordinates
- // to be corrected.
+public:
+ // The result metadata tags that are to be re-mapped
+ const std::set<uint32_t>& getRemappedKeys() const {
+ return mRemappedKeys;
+ }
+
+ virtual ~CoordinateMapper() = default;
+
protected:
- // Metadata key lists to correct
+ // Metadata tags containing 2D coordinates to be corrected.
// Both capture request and result
static const std::array<uint32_t, 3> kMeteringRegionsToCorrect;
@@ -37,6 +44,10 @@
// Only for capture results; don't clamp
static const std::array<uint32_t, 2> kResultPointsToCorrectNoClamp;
+
+ virtual void initRemappedKeys() = 0;
+ std::set<uint32_t> mRemappedKeys;
+
}; // class CoordinateMapper
} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 8132225..316303e 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -29,6 +29,20 @@
DistortionMapper::DistortionMapper() : mValidMapping(false), mValidGrids(false) {
+ initRemappedKeys();
+}
+
+void DistortionMapper::initRemappedKeys() {
+ mRemappedKeys.insert(
+ kMeteringRegionsToCorrect.begin(),
+ kMeteringRegionsToCorrect.end());
+ mRemappedKeys.insert(
+ kRectsToCorrect.begin(),
+ kRectsToCorrect.end());
+ mRemappedKeys.insert(
+ kResultPointsToCorrectNoClamp.begin(),
+ kResultPointsToCorrectNoClamp.end());
+ mRemappedKeys.insert(ANDROID_DISTORTION_CORRECTION_MODE);
}
bool DistortionMapper::isDistortionSupported(const CameraMetadata &deviceInfo) {
@@ -485,7 +499,7 @@
float det = b * b - 4 * a * c;
if (det < 0) {
- // Sanity check - should not happen if pt is within the quad
+ // Validation check - should not happen if pt is within the quad
ALOGE("Bad determinant! a: %f, b: %f, c: %f, det: %f", a,b,c,det);
return -1;
}
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.h b/services/camera/libcameraservice/device3/DistortionMapper.h
index 7dcb67b..5027bd0 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.h
+++ b/services/camera/libcameraservice/device3/DistortionMapper.h
@@ -32,7 +32,7 @@
* Utilities to transform between raw (distorted) and warped (corrected) coordinate systems
* for cameras that support geometric distortion
*/
-class DistortionMapper : private CoordinateMapper {
+class DistortionMapper : public CoordinateMapper {
public:
DistortionMapper();
@@ -43,7 +43,10 @@
mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
mActiveWidth(other.mActiveWidth), mActiveHeight(other.mActiveHeight),
mArrayDiffX(other.mArrayDiffX), mArrayDiffY(other.mArrayDiffY),
- mCorrectedGrid(other.mCorrectedGrid), mDistortedGrid(other.mDistortedGrid) {}
+ mCorrectedGrid(other.mCorrectedGrid), mDistortedGrid(other.mDistortedGrid) {
+ initRemappedKeys(); }
+
+ void initRemappedKeys() override;
/**
* Check whether distortion correction is supported by the camera HAL
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 424043b..c7b7475 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -32,7 +32,18 @@
namespace camera3 {
+typedef enum {
+ // Cache the buffers with STATUS_ERROR within InFlightRequest
+ ERROR_BUF_CACHE,
+ // Return the buffers with STATUS_ERROR to the buffer queue
+ ERROR_BUF_RETURN,
+ // Return the buffers with STATUS_ERROR to the buffer queue, and call
+ // notify(ERROR_BUFFER) as well
+ ERROR_BUF_RETURN_NOTIFY
+} ERROR_BUF_STRATEGY;
+
struct InFlightRequest {
+
// Set by notify() SHUTTER call.
nsecs_t shutterTimestamp;
// Set by process_capture_result().
@@ -43,6 +54,9 @@
// Decremented by calls to process_capture_result with valid output
// and input buffers
int numBuffersLeft;
+
+ // The inflight request is considered complete if all buffers are returned
+
CaptureResultExtras resultExtras;
// If this request has any input buffer
bool hasInputBuffer;
@@ -79,6 +93,10 @@
// REQUEST/RESULT error.
bool skipResultMetadata;
+ // Whether the buffers with STATUS_ERROR should be cached as pending buffers,
+ // returned to the buffer queue, or returned to the buffer queue and notify with ERROR_BUFFER.
+ ERROR_BUF_STRATEGY errorBufStrategy;
+
// The physical camera ids being requested.
std::set<String8> physicalCameraIds;
@@ -97,6 +115,9 @@
// Requested camera ids (both logical and physical) with zoomRatio != 1.0f
std::set<std::string> cameraIdsWithZoom;
+ // Time of capture request (from systemTime) in Ns
+ nsecs_t requestTimeNs;
+
// What shared surfaces an output should go to
SurfaceMap outputSurfaces;
@@ -114,16 +135,18 @@
hasCallback(true),
maxExpectedDuration(kDefaultExpectedDuration),
skipResultMetadata(false),
+ errorBufStrategy(ERROR_BUF_CACHE),
stillCapture(false),
zslCapture(false),
- rotateAndCropAuto(false) {
+ rotateAndCropAuto(false),
+ requestTimeNs(0) {
}
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
bool hasAppCallback, nsecs_t maxDuration,
const std::set<String8>& physicalCameraIdSet, bool isStillCapture,
bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
- const SurfaceMap& outSurfaces = SurfaceMap{}) :
+ nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
shutterTimestamp(0),
sensorTimestamp(0),
requestStatus(OK),
@@ -134,11 +157,13 @@
hasCallback(hasAppCallback),
maxExpectedDuration(maxDuration),
skipResultMetadata(false),
+ errorBufStrategy(ERROR_BUF_CACHE),
physicalCameraIds(physicalCameraIdSet),
stillCapture(isStillCapture),
zslCapture(isZslCapture),
rotateAndCropAuto(rotateAndCropAuto),
cameraIdsWithZoom(idsWithZoom),
+ requestTimeNs(requestNs),
outputSurfaces(outSurfaces) {
}
};
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
index 3718f54..a02e5f6 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
@@ -27,6 +27,18 @@
namespace camera3 {
+void RotateAndCropMapper::initRemappedKeys() {
+ mRemappedKeys.insert(
+ kMeteringRegionsToCorrect.begin(),
+ kMeteringRegionsToCorrect.end());
+ mRemappedKeys.insert(
+ kResultPointsToCorrectNoClamp.begin(),
+ kResultPointsToCorrectNoClamp.end());
+
+ mRemappedKeys.insert(ANDROID_SCALER_ROTATE_AND_CROP);
+ mRemappedKeys.insert(ANDROID_SCALER_CROP_REGION);
+}
+
bool RotateAndCropMapper::isNeeded(const CameraMetadata* deviceInfo) {
auto entry = deviceInfo->find(ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
for (size_t i = 0; i < entry.count; i++) {
@@ -36,6 +48,8 @@
}
RotateAndCropMapper::RotateAndCropMapper(const CameraMetadata* deviceInfo) {
+ initRemappedKeys();
+
auto entry = deviceInfo->find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (entry.count != 4) return;
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.h b/services/camera/libcameraservice/device3/RotateAndCropMapper.h
index 459e27f..f9e2263 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.h
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.h
@@ -32,12 +32,14 @@
* Utilities to transform between unrotated and rotated-and-cropped coordinate systems
* for cameras that support SCALER_ROTATE_AND_CROP controls in AUTO mode.
*/
-class RotateAndCropMapper : private CoordinateMapper {
+class RotateAndCropMapper : public CoordinateMapper {
public:
static bool isNeeded(const CameraMetadata* deviceInfo);
RotateAndCropMapper(const CameraMetadata* deviceInfo);
+ void initRemappedKeys() override;
+
/**
* Adjust capture request assuming rotate and crop AUTO is enabled
*/
diff --git a/services/camera/libcameraservice/device3/StatusTracker.cpp b/services/camera/libcameraservice/device3/StatusTracker.cpp
index 723b5c2..ea1f2c1 100644
--- a/services/camera/libcameraservice/device3/StatusTracker.cpp
+++ b/services/camera/libcameraservice/device3/StatusTracker.cpp
@@ -40,7 +40,7 @@
StatusTracker::~StatusTracker() {
}
-int StatusTracker::addComponent() {
+int StatusTracker::addComponent(std::string componentName) {
int id;
ssize_t err;
{
@@ -49,8 +49,12 @@
ALOGV("%s: Adding new component %d", __FUNCTION__, id);
err = mStates.add(id, IDLE);
- ALOGE_IF(err < 0, "%s: Can't add new component %d: %s (%zd)",
- __FUNCTION__, id, strerror(-err), err);
+ if (componentName.empty()) {
+ componentName = std::to_string(id);
+ }
+ mComponentNames.add(id, componentName);
+ ALOGE_IF(err < 0, "%s: Can't add new component %d (%s): %s (%zd)",
+ __FUNCTION__, id, componentName.c_str(), strerror(-err), err);
}
if (err >= 0) {
@@ -68,6 +72,7 @@
Mutex::Autolock l(mLock);
ALOGV("%s: Removing component %d", __FUNCTION__, id);
idx = mStates.removeItem(id);
+ mComponentNames.removeItem(id);
}
if (idx >= 0) {
@@ -80,6 +85,20 @@
}
+void StatusTracker::dumpActiveComponents() {
+ Mutex::Autolock l(mLock);
+ if (mDeviceState == IDLE) {
+ ALOGI("%s: all components are IDLE", __FUNCTION__);
+ return;
+ }
+ for (size_t i = 0; i < mStates.size(); i++) {
+ if (mStates.valueAt(i) == ACTIVE) {
+ ALOGI("%s: component %d (%s) is active", __FUNCTION__, mStates.keyAt(i),
+ mComponentNames.valueAt(i).c_str());
+ }
+ }
+}
+
void StatusTracker::markComponentIdle(int id, const sp<Fence>& componentFence) {
markComponent(id, IDLE, componentFence);
}
diff --git a/services/camera/libcameraservice/device3/StatusTracker.h b/services/camera/libcameraservice/device3/StatusTracker.h
index 3a1d85c..3741cce 100644
--- a/services/camera/libcameraservice/device3/StatusTracker.h
+++ b/services/camera/libcameraservice/device3/StatusTracker.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H
#define ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H
+#include <string>
#include <utils/Condition.h>
#include <utils/Errors.h>
#include <utils/List.h>
@@ -54,7 +55,7 @@
// Add a component to track; returns non-negative unique ID for the new
// component on success, negative error code on failure.
// New components start in the idle state.
- int addComponent();
+ int addComponent(std::string componentName);
// Remove existing component from idle tracking. Ignores unknown IDs
void removeComponent(int id);
@@ -68,6 +69,8 @@
// Set the state of a tracked component to be active. Ignores unknown IDs.
void markComponentActive(int id);
+ void dumpActiveComponents();
+
virtual void requestExit();
protected:
@@ -105,6 +108,7 @@
// Current component states
KeyedVector<int, ComponentState> mStates;
+ KeyedVector<int, std::string> mComponentNames;
// Merged fence for all processed state changes
sp<Fence> mIdleFence;
// Current overall device state
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index a87de77..1bc2081 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -25,6 +25,19 @@
namespace camera3 {
+void ZoomRatioMapper::initRemappedKeys() {
+ mRemappedKeys.insert(
+ kMeteringRegionsToCorrect.begin(),
+ kMeteringRegionsToCorrect.end());
+ mRemappedKeys.insert(
+ kRectsToCorrect.begin(),
+ kRectsToCorrect.end());
+ mRemappedKeys.insert(
+ kResultPointsToCorrectNoClamp.begin(),
+ kResultPointsToCorrectNoClamp.end());
+
+ mRemappedKeys.insert(ANDROID_CONTROL_ZOOM_RATIO);
+}
status_t ZoomRatioMapper::initZoomRatioInTemplate(CameraMetadata *request) {
camera_metadata_entry_t entry;
@@ -117,6 +130,8 @@
ZoomRatioMapper::ZoomRatioMapper(const CameraMetadata* deviceInfo,
bool supportNativeZoomRatio, bool usePrecorrectArray) {
+ initRemappedKeys();
+
camera_metadata_ro_entry_t entry;
entry = deviceInfo->find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
@@ -153,6 +168,19 @@
entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
if (entry.count == 1 && entry.data.f[0] != 1.0f) {
zoomRatioIs1 = false;
+
+ // If cropRegion is windowboxing, override it with activeArray
+ camera_metadata_entry_t cropRegionEntry = request->find(ANDROID_SCALER_CROP_REGION);
+ if (cropRegionEntry.count == 4) {
+ int cropWidth = cropRegionEntry.data.i32[2];
+ int cropHeight = cropRegionEntry.data.i32[3];
+ if (cropWidth < mArrayWidth && cropHeight < mArrayHeight) {
+ cropRegionEntry.data.i32[0] = 0;
+ cropRegionEntry.data.i32[1] = 0;
+ cropRegionEntry.data.i32[2] = mArrayWidth;
+ cropRegionEntry.data.i32[3] = mArrayHeight;
+ }
+ }
}
if (mHalSupportsZoomRatio && zoomRatioIs1) {
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index 698f87f..3769299 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -33,7 +33,7 @@
* - HAL supports zoomRatio and the application uses cropRegion, or
* - HAL doesn't support zoomRatio, but the application uses zoomRatio
*/
-class ZoomRatioMapper : private CoordinateMapper {
+class ZoomRatioMapper : public CoordinateMapper {
public:
ZoomRatioMapper() = default;
ZoomRatioMapper(const CameraMetadata *deviceInfo,
@@ -41,7 +41,9 @@
ZoomRatioMapper(const ZoomRatioMapper& other) :
mHalSupportsZoomRatio(other.mHalSupportsZoomRatio),
mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
- mIsValid(other.mIsValid) {}
+ mIsValid(other.mIsValid) { initRemappedKeys(); }
+
+ void initRemappedKeys() override;
/**
* Initialize request template with valid zoomRatio if necessary.
diff --git a/services/camera/libcameraservice/fuzzer/Android.bp b/services/camera/libcameraservice/fuzzer/Android.bp
new file mode 100644
index 0000000..c5b7f00
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/Android.bp
@@ -0,0 +1,44 @@
+// Copyright 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_defaults {
+ name: "libcameraservice_fuzz_defaults",
+ fuzz_config: {
+ componentid: 41727
+ },
+}
+
+cc_fuzz {
+ name: "libcameraservice_distortion_mapper_fuzzer",
+ defaults: ["libcameraservice_fuzz_defaults"],
+ srcs: [
+ "DistortionMapperFuzzer.cpp",
+ ],
+ shared_libs: [
+ "libcameraservice",
+ "libcamera_client",
+ ],
+}
+
+cc_fuzz {
+ name: "libcameraservice_depth_processor_fuzzer",
+ defaults: ["libcameraservice_fuzz_defaults"],
+ srcs: [
+ "DepthProcessorFuzzer.cpp",
+ ],
+ shared_libs: [
+ "libcameraservice",
+ ],
+ corpus: ["corpus/*.jpg"],
+}
diff --git a/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp b/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
new file mode 100644
index 0000000..650ca91
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <vector>
+
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "common/DepthPhotoProcessor.h"
+
+using namespace android;
+using namespace android::camera3;
+
+static const size_t kTestBufferWidth = 640;
+static const size_t kTestBufferHeight = 480;
+static const size_t kTestBufferDepthSize (kTestBufferWidth * kTestBufferHeight);
+
+void generateDepth16Buffer(const uint8_t* data, size_t size, std::array<uint16_t, kTestBufferDepthSize> *depth16Buffer /*out*/) {
+ FuzzedDataProvider dataProvider(data, size);
+ for (size_t i = 0; i < depth16Buffer->size(); i++) {
+ (*depth16Buffer)[i] = dataProvider.ConsumeIntegral<uint16_t>();
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ DepthPhotoInputFrame inputFrame;
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(data, size, &depth16Buffer);
+
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (data);
+ inputFrame.mMainJpegSize = size;
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapWidth = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+ processDepthPhotoFrame(
+ inputFrame,
+ depthPhotoBuffer.size(),
+ depthPhotoBuffer.data(),
+ &actualDepthPhotoSize);
+
+ return 0;
+}
diff --git a/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp b/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
new file mode 100644
index 0000000..96bab4e
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "device3/DistortionMapper.h"
+#include <camera/CameraMetadata.h>
+
+using namespace android;
+using namespace android::camera3;
+
+int32_t testActiveArray[] = {100, 100, 1000, 750};
+float testICal[] = { 1000.f, 1000.f, 500.f, 500.f, 0.f };
+float identityDistortion[] = { 0.f, 0.f, 0.f, 0.f, 0.f};
+
+void setupTestMapper(DistortionMapper *m,
+ float distortion[5], float intrinsics[5],
+ int32_t activeArray[4], int32_t preCorrectionActiveArray[4]) {
+ CameraMetadata deviceInfo;
+
+ deviceInfo.update(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+ preCorrectionActiveArray, 4);
+
+ deviceInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+ activeArray, 4);
+
+ deviceInfo.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
+ intrinsics, 5);
+
+ deviceInfo.update(ANDROID_LENS_DISTORTION,
+ distortion, 5);
+
+ m->setupStaticInfo(deviceInfo);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp(data, size);
+
+ DistortionMapper m;
+ setupTestMapper(&m, identityDistortion, testICal,
+ /*activeArray*/ testActiveArray,
+ /*preCorrectionActiveArray*/ testActiveArray);
+
+ bool clamp = fdp.ConsumeBool();
+ bool simple = fdp.ConsumeBool();
+ std::vector<int32_t> input;
+ for (int index = 0; fdp.remaining_bytes() > 0; index++) {
+ input.push_back(fdp.ConsumeIntegral<int32_t>());
+ }
+
+ // The size argument counts how many coordinate pairs there are, so
+ // it is expected to be 1/2 the size of the input.
+ m.mapCorrectedToRaw(input.data(), input.size()/2, clamp, simple);
+
+ return 0;
+}
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Canon_MakerNote_variant_type_1.jpg b/services/camera/libcameraservice/fuzzer/corpus/Canon_MakerNote_variant_type_1.jpg
new file mode 100644
index 0000000..1eb37d0
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Canon_MakerNote_variant_type_1.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Fuji_MakerNote_variant_type_1.jpg b/services/camera/libcameraservice/fuzzer/corpus/Fuji_MakerNote_variant_type_1.jpg
new file mode 100644
index 0000000..75e0371
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Fuji_MakerNote_variant_type_1.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_2.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_2.jpg
new file mode 100644
index 0000000..461d613
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_2.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_3.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_3.jpg
new file mode 100644
index 0000000..42498e2
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_3.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_4.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_4.jpg
new file mode 100644
index 0000000..233ff78
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_4.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_5.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_5.jpg
new file mode 100644
index 0000000..f083f75
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_5.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_2.jpg b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_2.jpg
new file mode 100644
index 0000000..0ef0ef2
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_2.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_3.jpg b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_3.jpg
new file mode 100644
index 0000000..d93b86f
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_3.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_4.jpg b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_4.jpg
new file mode 100644
index 0000000..297ea1c
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_4.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index bf89ca5..2509e6c 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -28,7 +28,7 @@
namespace frameworks {
namespace cameraservice {
namespace device {
-namespace V2_0 {
+namespace V2_1 {
namespace implementation {
using hardware::cameraservice::utils::conversion::convertToHidl;
@@ -115,7 +115,7 @@
// is guaranteed to be called serially by the client if it decides to
// use fmq.
if (e.settings.getDiscriminator() ==
- FmqSizeOrMetadata::hidl_discriminator::fmqMetadataSize) {
+ V2_0::FmqSizeOrMetadata::hidl_discriminator::fmqMetadataSize) {
/**
* Get settings from the fmq.
*/
@@ -196,6 +196,12 @@
Return<HStatus> HidlCameraDeviceUser::endConfigure(StreamConfigurationMode operatingMode,
const hidl_vec<uint8_t>& sessionParams) {
+ return endConfigure_2_1(operatingMode, sessionParams, systemTime());
+}
+
+Return<HStatus> HidlCameraDeviceUser::endConfigure_2_1(StreamConfigurationMode operatingMode,
+ const hidl_vec<uint8_t>& sessionParams,
+ nsecs_t startTimeNs) {
android::CameraMetadata cameraMetadata;
if (!convertFromHidl(sessionParams, &cameraMetadata)) {
return HStatus::ILLEGAL_ARGUMENT;
@@ -203,7 +209,8 @@
std::vector<int> offlineStreamIds;
binder::Status ret = mDeviceRemote->endConfigure(convertFromHidl(operatingMode),
- cameraMetadata, &offlineStreamIds);
+ cameraMetadata, ns2ms(startTimeNs),
+ &offlineStreamIds);
return B2HStatus(ret);
}
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
index c3a80fe..0e2ab3d 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
@@ -24,6 +24,7 @@
#include <android/frameworks/cameraservice/common/2.0/types.h>
#include <android/frameworks/cameraservice/service/2.0/types.h>
#include <android/frameworks/cameraservice/device/2.0/ICameraDeviceUser.h>
+#include <android/frameworks/cameraservice/device/2.1/ICameraDeviceUser.h>
#include <android/frameworks/cameraservice/device/2.0/types.h>
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
#include <fmq/MessageQueue.h>
@@ -36,7 +37,7 @@
namespace frameworks {
namespace cameraservice {
namespace device {
-namespace V2_0 {
+namespace V2_1 {
namespace implementation {
using frameworks::cameraservice::device::V2_0::StreamConfigurationMode;
@@ -50,7 +51,7 @@
using CaptureRequestMetadataQueue = MessageQueue<uint8_t, kSynchronizedReadWrite>;
using TemplateId = frameworks::cameraservice::device::V2_0::TemplateId;
-using HCameraDeviceUser = device::V2_0::ICameraDeviceUser;
+using HCameraDeviceUser = device::V2_1::ICameraDeviceUser;
using HCameraMetadata = cameraservice::service::V2_0::CameraMetadata;
using HCaptureRequest = device::V2_0::CaptureRequest;
using HSessionConfiguration = frameworks::cameraservice::device::V2_0::SessionConfiguration;
@@ -83,6 +84,10 @@
virtual Return<HStatus> endConfigure(StreamConfigurationMode operatingMode,
const hidl_vec<uint8_t>& sessionParams);
+ virtual Return<HStatus> endConfigure_2_1(StreamConfigurationMode operatingMode,
+ const hidl_vec<uint8_t>& sessionParams,
+ nsecs_t startTimeNs);
+
virtual Return<HStatus> deleteStream(int32_t streamId) override;
virtual Return<void> createStream(const HOutputConfiguration& outputConfiguration,
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 9ea9526..aa1e95a 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -38,7 +38,7 @@
using hardware::Void;
using device::V2_0::implementation::H2BCameraDeviceCallbacks;
-using device::V2_0::implementation::HidlCameraDeviceUser;
+using device::V2_1::implementation::HidlCameraDeviceUser;
using service::V2_0::implementation::H2BCameraServiceListener;
using HCameraMetadataType = frameworks::cameraservice::common::V2_0::CameraMetadataType;
using HVendorTag = frameworks::cameraservice::common::V2_0::VendorTag;
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.h b/services/camera/libcameraservice/hidl/HidlCameraService.h
index 097f4c5..86a7cec 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.h
@@ -21,7 +21,7 @@
#include <thread>
#include <android/frameworks/cameraservice/common/2.0/types.h>
-#include <android/frameworks/cameraservice/service/2.1/ICameraService.h>
+#include <android/frameworks/cameraservice/service/2.2/ICameraService.h>
#include <android/frameworks/cameraservice/service/2.0/types.h>
#include <android/frameworks/cameraservice/device/2.0/types.h>
@@ -42,7 +42,7 @@
using HCameraDeviceCallback = frameworks::cameraservice::device::V2_0::ICameraDeviceCallback;
using HCameraMetadata = frameworks::cameraservice::service::V2_0::CameraMetadata;
-using HCameraService = frameworks::cameraservice::service::V2_1::ICameraService;
+using HCameraService = frameworks::cameraservice::service::V2_2::ICameraService;
using HCameraServiceListener = frameworks::cameraservice::service::V2_0::ICameraServiceListener;
using HCameraServiceListener2_1 = frameworks::cameraservice::service::V2_1::ICameraServiceListener;
using HStatus = frameworks::cameraservice::common::V2_0::Status;
diff --git a/services/camera/libcameraservice/tests/ClientManagerTest.cpp b/services/camera/libcameraservice/tests/ClientManagerTest.cpp
new file mode 100644
index 0000000..6a38427
--- /dev/null
+++ b/services/camera/libcameraservice/tests/ClientManagerTest.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "ClientManagerTest"
+
+#include "../utils/ClientManager.h"
+#include <gtest/gtest.h>
+
+using namespace android::resource_policy;
+
+struct TestClient {
+ TestClient(int id, int32_t cost, const std::set<int>& conflictingKeys, int32_t ownerId,
+ int32_t score, int32_t state, bool isVendorClient) :
+ mId(id), mCost(cost), mConflictingKeys(conflictingKeys),
+ mOwnerId(ownerId), mScore(score), mState(state), mIsVendorClient(isVendorClient) {};
+ int mId;
+ int32_t mCost; // Int 0..100
+ std::set<int> mConflictingKeys;
+ int32_t mOwnerId; // PID
+ int32_t mScore; // Priority
+ int32_t mState; // Foreground/background etc
+ bool mIsVendorClient;
+};
+
+using TestClientDescriptor = ClientDescriptor<int, TestClient>;
+using TestDescriptorPtr = std::shared_ptr<TestClientDescriptor>;
+
+TestDescriptorPtr makeDescFromTestClient(const TestClient& tc) {
+ return std::make_shared<TestClientDescriptor>(/*ID*/tc.mId, tc, tc.mCost, tc.mConflictingKeys,
+ tc.mScore, tc.mOwnerId, tc.mState, tc.mIsVendorClient);
+}
+
+class TestClientManager : public ClientManager<int, TestClient> {
+public:
+ TestClientManager() {}
+ virtual ~TestClientManager() {}
+};
+
+
+// Test ClientMager behavior when there is only one single owner
+// The expected behavior is that if one owner (application or vendor) is trying
+// to open second camera, it may succeed or not, but the first opened camera
+// should never be evicted.
+TEST(ClientManagerTest, SingleOwnerMultipleCamera) {
+
+ TestClientManager cm;
+ TestClient cam0Client(/*ID*/0, /*cost*/100, /*conflicts*/{1},
+ /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+ auto cam0Desc = makeDescFromTestClient(cam0Client);
+ auto evicted = cm.addAndEvict(cam0Desc);
+ ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
+
+ TestClient cam1Client(/*ID*/1, /*cost*/100, /*conflicts*/{0},
+ /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+ auto cam1Desc = makeDescFromTestClient(cam1Client);
+
+ // 1. Check with conflicting devices, new client would be evicted
+ auto wouldBeEvicted = cm.wouldEvict(cam1Desc);
+ ASSERT_EQ(wouldBeEvicted.size(), 1u) << "Evicted list length must be 1";
+ ASSERT_EQ(wouldBeEvicted[0]->getKey(), cam1Desc->getKey()) << "cam1 must be evicted";
+
+ cm.removeAll();
+
+ TestClient cam2Client(/*ID*/2, /*cost*/100, /*conflicts*/{},
+ /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+ auto cam2Desc = makeDescFromTestClient(cam2Client);
+ evicted = cm.addAndEvict(cam2Desc);
+ ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
+
+ TestClient cam3Client(/*ID*/3, /*cost*/100, /*conflicts*/{},
+ /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+ auto cam3Desc = makeDescFromTestClient(cam3Client);
+
+ // 2. Check without conflicting devices, the pre-existing client won't be evicted
+ // In this case, the new client would be granted, but could later be rejected by HAL due to
+ // resource cost.
+ wouldBeEvicted = cm.wouldEvict(cam3Desc);
+ ASSERT_EQ(wouldBeEvicted.size(), 0u) << "Evicted list must be empty";
+
+ cm.removeAll();
+
+ evicted = cm.addAndEvict(cam0Desc);
+ ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
+
+ TestClient cam0ClientNew(/*ID*/0, /*cost*/100, /*conflicts*/{1},
+ /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+ auto cam0DescNew = makeDescFromTestClient(cam0ClientNew);
+ wouldBeEvicted = cm.wouldEvict(cam0DescNew);
+
+ // 3. Check opening the same camera twice will evict the older client
+ ASSERT_EQ(wouldBeEvicted.size(), 1u) << "Evicted list length must be 1";
+ ASSERT_EQ(wouldBeEvicted[0], cam0Desc) << "cam0 (old) must be evicted";
+}
+
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
new file mode 100644
index 0000000..0557fcc
--- /dev/null
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -0,0 +1,244 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraServiceProxyWrapper"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <inttypes.h>
+#include <utils/Log.h>
+#include <binder/IServiceManager.h>
+
+#include "CameraServiceProxyWrapper.h"
+
+namespace android {
+
+using hardware::ICameraServiceProxy;
+using hardware::CameraSessionStats;
+
+Mutex CameraServiceProxyWrapper::sProxyMutex;
+sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::sCameraServiceProxy;
+
+Mutex CameraServiceProxyWrapper::mLock;
+std::map<String8, std::shared_ptr<CameraServiceProxyWrapper::CameraSessionStatsWrapper>>
+ CameraServiceProxyWrapper::mSessionStatsMap;
+
+/**
+ * CameraSessionStatsWrapper functions
+ */
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen() {
+ Mutex::Autolock l(mLock);
+
+ updateProxyDeviceState(mSessionStats);
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(int32_t latencyMs) {
+ Mutex::Autolock l(mLock);
+
+ mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_CLOSED;
+ mSessionStats.mLatencyMs = latencyMs;
+ updateProxyDeviceState(mSessionStats);
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onStreamConfigured(
+ int operatingMode, bool internalReconfig, int32_t latencyMs) {
+ Mutex::Autolock l(mLock);
+
+ if (internalReconfig) {
+ mSessionStats.mInternalReconfigure++;
+ } else {
+ mSessionStats.mLatencyMs = latencyMs;
+ mSessionStats.mSessionType = operatingMode;
+ }
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive() {
+ Mutex::Autolock l(mLock);
+
+ mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
+ updateProxyDeviceState(mSessionStats);
+
+ // Reset mCreationDuration to -1 to distinguish between 1st session
+ // after configuration, and all other sessions after configuration.
+ mSessionStats.mLatencyMs = -1;
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
+ int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats) {
+ Mutex::Autolock l(mLock);
+
+ mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_IDLE;
+ mSessionStats.mRequestCount = requestCount;
+ mSessionStats.mResultErrorCount = resultErrorCount;
+ mSessionStats.mDeviceError = deviceError;
+ mSessionStats.mStreamStats = streamStats;
+ updateProxyDeviceState(mSessionStats);
+
+ mSessionStats.mInternalReconfigure = 0;
+ mSessionStats.mStreamStats.clear();
+}
+
+/**
+ * CameraServiceProxyWrapper functions
+ */
+
+sp<ICameraServiceProxy> CameraServiceProxyWrapper::getCameraServiceProxy() {
+#ifndef __BRILLO__
+ Mutex::Autolock al(sProxyMutex);
+ if (sCameraServiceProxy == nullptr) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ // Use checkService because cameraserver normally starts before the
+ // system server and the proxy service. So the long timeout that getService
+ // has before giving up is inappropriate.
+ sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
+ if (binder != nullptr) {
+ sCameraServiceProxy = interface_cast<ICameraServiceProxy>(binder);
+ }
+ }
+#endif
+ return sCameraServiceProxy;
+}
+
+void CameraServiceProxyWrapper::pingCameraServiceProxy() {
+ sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ if (proxyBinder == nullptr) return;
+ proxyBinder->pingForUserUpdate();
+}
+
+void CameraServiceProxyWrapper::updateProxyDeviceState(const CameraSessionStats& sessionStats) {
+ sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ if (proxyBinder == nullptr) return;
+ proxyBinder->notifyCameraState(sessionStats);
+}
+
+void CameraServiceProxyWrapper::logStreamConfigured(const String8& id,
+ int operatingMode, bool internalConfig, int32_t latencyMs) {
+ std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
+ {
+ Mutex::Autolock l(mLock);
+ sessionStats = mSessionStatsMap[id];
+ if (sessionStats == nullptr) {
+ ALOGE("%s: SessionStatsMap should contain camera %s",
+ __FUNCTION__, id.c_str());
+ return;
+ }
+ }
+
+ ALOGV("%s: id %s, operatingMode %d, internalConfig %d, latencyMs %d",
+ __FUNCTION__, id.c_str(), operatingMode, internalConfig, latencyMs);
+ sessionStats->onStreamConfigured(operatingMode, internalConfig, latencyMs);
+}
+
+void CameraServiceProxyWrapper::logActive(const String8& id) {
+ std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
+ {
+ Mutex::Autolock l(mLock);
+ sessionStats = mSessionStatsMap[id];
+ if (sessionStats == nullptr) {
+ ALOGE("%s: SessionStatsMap should contain camera %s when logActive is called",
+ __FUNCTION__, id.c_str());
+ return;
+ }
+ }
+
+ ALOGV("%s: id %s", __FUNCTION__, id.c_str());
+ sessionStats->onActive();
+}
+
+void CameraServiceProxyWrapper::logIdle(const String8& id,
+ int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats) {
+ std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
+ {
+ Mutex::Autolock l(mLock);
+ sessionStats = mSessionStatsMap[id];
+ }
+
+ if (sessionStats == nullptr) {
+ ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
+ __FUNCTION__, id.c_str());
+ return;
+ }
+
+ ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d",
+ __FUNCTION__, id.c_str(), requestCount, resultErrorCount, deviceError);
+ for (size_t i = 0; i < streamStats.size(); i++) {
+ ALOGV("%s: streamStats[%zu]: w %d h %d, requestedCount %" PRId64 ", dropCount %"
+ PRId64 ", startTimeMs %d" ,
+ __FUNCTION__, i, streamStats[i].mWidth, streamStats[i].mHeight,
+ streamStats[i].mRequestCount, streamStats[i].mErrorCount,
+ streamStats[i].mStartLatencyMs);
+ }
+
+ sessionStats->onIdle(requestCount, resultErrorCount, deviceError, streamStats);
+}
+
+void CameraServiceProxyWrapper::logOpen(const String8& id, int facing,
+ const String16& clientPackageName, int effectiveApiLevel, bool isNdk,
+ int32_t latencyMs) {
+ std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
+ {
+ Mutex::Autolock l(mLock);
+ if (mSessionStatsMap.count(id) > 0) {
+ ALOGE("%s: SessionStatsMap shouldn't contain camera %s",
+ __FUNCTION__, id.c_str());
+ return;
+ }
+
+ int apiLevel = CameraSessionStats::CAMERA_API_LEVEL_1;
+ if (effectiveApiLevel == 2) {
+ apiLevel = CameraSessionStats::CAMERA_API_LEVEL_2;
+ }
+
+ sessionStats = std::make_shared<CameraSessionStatsWrapper>(String16(id), facing,
+ CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
+ apiLevel, isNdk, latencyMs);
+ mSessionStatsMap.emplace(id, sessionStats);
+ ALOGV("%s: Adding id %s", __FUNCTION__, id.c_str());
+ }
+
+ ALOGV("%s: id %s, facing %d, effectiveApiLevel %d, isNdk %d, latencyMs %d",
+ __FUNCTION__, id.c_str(), facing, effectiveApiLevel, isNdk, latencyMs);
+ sessionStats->onOpen();
+}
+
+void CameraServiceProxyWrapper::logClose(const String8& id, int32_t latencyMs) {
+ std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
+ {
+ Mutex::Autolock l(mLock);
+ if (mSessionStatsMap.count(id) == 0) {
+ ALOGE("%s: SessionStatsMap should contain camera %s before it's closed",
+ __FUNCTION__, id.c_str());
+ return;
+ }
+
+ sessionStats = mSessionStatsMap[id];
+ if (sessionStats == nullptr) {
+ ALOGE("%s: SessionStatsMap should contain camera %s",
+ __FUNCTION__, id.c_str());
+ return;
+ }
+ mSessionStatsMap.erase(id);
+ ALOGV("%s: Erasing id %s", __FUNCTION__, id.c_str());
+ }
+
+ ALOGV("%s: id %s, latencyMs %d", __FUNCTION__, id.c_str(), latencyMs);
+ sessionStats->onClose(latencyMs);
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
new file mode 100644
index 0000000..9525935
--- /dev/null
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
+#define ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
+
+#include <android/hardware/ICameraServiceProxy.h>
+
+#include <utils/Mutex.h>
+#include <utils/String8.h>
+#include <utils/String16.h>
+#include <utils/StrongPointer.h>
+#include <utils/Timers.h>
+
+#include <camera/CameraSessionStats.h>
+
+namespace android {
+
+class CameraServiceProxyWrapper {
+private:
+ // Guard mCameraServiceProxy
+ static Mutex sProxyMutex;
+ // Cached interface to the camera service proxy in system service
+ static sp<hardware::ICameraServiceProxy> sCameraServiceProxy;
+
+ struct CameraSessionStatsWrapper {
+ hardware::CameraSessionStats mSessionStats;
+ Mutex mLock; // lock for per camera session stats
+
+ CameraSessionStatsWrapper(const String16& cameraId, int facing, int newCameraState,
+ const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs) :
+ mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk, latencyMs)
+ {}
+
+ void onOpen();
+ void onClose(int32_t latencyMs);
+ void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
+ void onActive();
+ void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats);
+ };
+
+ // Lock for camera session stats map
+ static Mutex mLock;
+ // Map from camera id to the camera's session statistics
+ static std::map<String8, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
+
+ /**
+ * Update the session stats of a given camera device (open/close/active/idle) with
+ * the camera proxy service in the system service
+ */
+ static void updateProxyDeviceState(
+ const hardware::CameraSessionStats& sessionStats);
+
+ static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+
+public:
+ // Open
+ static void logOpen(const String8& id, int facing,
+ const String16& clientPackageName, int apiLevel, bool isNdk,
+ int32_t latencyMs);
+
+ // Close
+ static void logClose(const String8& id, int32_t latencyMs);
+
+ // Stream configuration
+ static void logStreamConfigured(const String8& id, int operatingMode, bool internalReconfig,
+ int32_t latencyMs);
+
+ // Session state becomes active
+ static void logActive(const String8& id);
+
+ // Session state becomes idle
+ static void logIdle(const String8& id,
+ int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats);
+
+ // Ping camera service proxy for user update
+ static void pingCameraServiceProxy();
+};
+
+} // android
+
+#endif // ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index 35d25bf..64be6c5 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -496,6 +496,20 @@
evictList.clear();
evictList.push_back(client);
return evictList;
+ } else if (conflicting && owner == curOwner) {
+ // Pre-existing conflicting client with the same client owner exists
+ // Open the same device twice -> most recent open wins
+ // Otherwise let the existing client wins to avoid behaviors difference
+ // due to how HAL advertising conflicting devices (which is hidden from
+ // application)
+ if (curKey == key) {
+ evictList.push_back(i);
+ totalCost -= curCost;
+ } else {
+ evictList.clear();
+ evictList.push_back(client);
+ return evictList;
+ }
} else if (conflicting || ((totalCost > mMaxCost && curCost > 0) &&
(curPriority >= priority) &&
!(highestPriorityOwner == owner && owner == curOwner))) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 888671c..ba68a63 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -14,20 +14,493 @@
* limitations under the License.
*/
#include "SessionConfigurationUtils.h"
-#include "../api2/CameraDeviceClient.h"
+#include "../api2/DepthCompositeStream.h"
+#include "../api2/HeicCompositeStream.h"
+#include "common/CameraDeviceBase.h"
+#include "../CameraService.h"
+#include "device3/Camera3Device.h"
+#include "device3/Camera3OutputStream.h"
+
+// Convenience methods for constructing binder::Status objects for error returns
+
+#define STATUS_ERROR(errorCode, errorString) \
+ binder::Status::fromServiceSpecificError(errorCode, \
+ String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString))
+
+#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
+ binder::Status::fromServiceSpecificError(errorCode, \
+ String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \
+ __VA_ARGS__))
+
+using android::camera3::OutputStreamInfo;
+using android::camera3::OutputStreamInfo;
+using android::hardware::camera2::ICameraDeviceUser;
namespace android {
+int64_t SessionConfigurationUtils::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
+ int64_t d0 = x0 - x1;
+ int64_t d1 = y0 - y1;
+ return d0 * d0 + d1 * d1;
+}
+
+bool SessionConfigurationUtils::roundBufferDimensionNearest(int32_t width, int32_t height,
+ int32_t format, android_dataspace dataSpace, const CameraMetadata& info,
+ /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) {
+
+ camera_metadata_ro_entry streamConfigs =
+ (dataSpace == HAL_DATASPACE_DEPTH) ?
+ info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) :
+ (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
+ info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) :
+ info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+
+ int32_t bestWidth = -1;
+ int32_t bestHeight = -1;
+
+ // Iterate through listed stream configurations and find the one with the smallest euclidean
+ // distance from the given dimensions for the given format.
+ for (size_t i = 0; i < streamConfigs.count; i += 4) {
+ int32_t fmt = streamConfigs.data.i32[i];
+ int32_t w = streamConfigs.data.i32[i + 1];
+ int32_t h = streamConfigs.data.i32[i + 2];
+
+ // Ignore input/output type for now
+ if (fmt == format) {
+ if (w == width && h == height) {
+ bestWidth = width;
+ bestHeight = height;
+ break;
+ } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
+ SessionConfigurationUtils::euclidDistSquare(w, h, width, height) <
+ SessionConfigurationUtils::euclidDistSquare(bestWidth, bestHeight, width,
+ height))) {
+ bestWidth = w;
+ bestHeight = h;
+ }
+ }
+ }
+
+ if (bestWidth == -1) {
+ // Return false if no configurations for this format were listed
+ return false;
+ }
+
+ // Set the outputs to the closet width/height
+ if (outWidth != NULL) {
+ *outWidth = bestWidth;
+ }
+ if (outHeight != NULL) {
+ *outHeight = bestHeight;
+ }
+
+ // Return true if at least one configuration for this format was listed
+ return true;
+}
+
+bool SessionConfigurationUtils::isPublicFormat(int32_t format)
+{
+ switch(format) {
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ case HAL_PIXEL_FORMAT_RGB_888:
+ case HAL_PIXEL_FORMAT_RGB_565:
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ case HAL_PIXEL_FORMAT_YV12:
+ case HAL_PIXEL_FORMAT_Y8:
+ case HAL_PIXEL_FORMAT_Y16:
+ case HAL_PIXEL_FORMAT_RAW16:
+ case HAL_PIXEL_FORMAT_RAW10:
+ case HAL_PIXEL_FORMAT_RAW12:
+ case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+ case HAL_PIXEL_FORMAT_BLOB:
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ case HAL_PIXEL_FORMAT_YCbCr_422_SP:
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+ case HAL_PIXEL_FORMAT_YCbCr_422_I:
+ return true;
+ default:
+ return false;
+ }
+}
+
+binder::Status SessionConfigurationUtils::createSurfaceFromGbp(
+ OutputStreamInfo& streamInfo, bool isStreamInfoValid,
+ sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+ const String8 &cameraId, const CameraMetadata &physicalCameraMetadata) {
+
+ // bufferProducer must be non-null
+ if (gbp == nullptr) {
+ String8 msg = String8::format("Camera %s: Surface is NULL", cameraId.string());
+ ALOGW("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ // HACK b/10949105
+ // Query consumer usage bits to set async operation mode for
+ // GLConsumer using controlledByApp parameter.
+ bool useAsync = false;
+ uint64_t consumerUsage = 0;
+ status_t err;
+ if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
+ String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
+ cameraId.string(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+ }
+ if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
+ ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
+ "stream", __FUNCTION__, cameraId.string(), consumerUsage);
+ useAsync = true;
+ }
+
+ uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
+ GRALLOC_USAGE_RENDERSCRIPT;
+ uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
+ GraphicBuffer::USAGE_HW_TEXTURE |
+ GraphicBuffer::USAGE_HW_COMPOSER;
+ bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
+ (consumerUsage & allowedFlags) != 0;
+
+ surface = new Surface(gbp, useAsync);
+ ANativeWindow *anw = surface.get();
+
+ int width, height, format;
+ android_dataspace dataSpace;
+ if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
+ String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
+ cameraId.string(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+ }
+ if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
+ String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
+ cameraId.string(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+ }
+ if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
+ cameraId.string(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+ }
+ if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
+ reinterpret_cast<int*>(&dataSpace))) != OK) {
+ String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
+ cameraId.string(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+ }
+
+ // FIXME: remove this override since the default format should be
+ // IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
+ if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
+ ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
+ ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
+ ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
+ __FUNCTION__, cameraId.string(), format);
+ format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ }
+ // Round dimensions to the nearest dimensions available for this format
+ if (flexibleConsumer && isPublicFormat(format) &&
+ !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
+ format, dataSpace, physicalCameraMetadata, /*out*/&width, /*out*/&height)) {
+ String8 msg = String8::format("Camera %s: No supported stream configurations with "
+ "format %#x defined, failed to create output stream",
+ cameraId.string(), format);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+
+ if (!isStreamInfoValid) {
+ streamInfo.width = width;
+ streamInfo.height = height;
+ streamInfo.format = format;
+ streamInfo.dataSpace = dataSpace;
+ streamInfo.consumerUsage = consumerUsage;
+ return binder::Status::ok();
+ }
+ if (width != streamInfo.width) {
+ String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
+ cameraId.string(), width, streamInfo.width);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ if (height != streamInfo.height) {
+ String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
+ cameraId.string(), height, streamInfo.height);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ if (format != streamInfo.format) {
+ String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
+ cameraId.string(), format, streamInfo.format);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ if (dataSpace != streamInfo.dataSpace) {
+ String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
+ cameraId.string(), dataSpace, streamInfo.dataSpace);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ //At the native side, there isn't a way to check whether 2 surfaces come from the same
+ //surface class type. Use usage flag to approximate the comparison.
+ if (consumerUsage != streamInfo.consumerUsage) {
+ String8 msg = String8::format(
+ "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
+ cameraId.string(), consumerUsage, streamInfo.consumerUsage);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ }
+ return binder::Status::ok();
+}
+
+
+void SessionConfigurationUtils::mapStreamInfo(const OutputStreamInfo &streamInfo,
+ camera3_stream_rotation_t rotation, String8 physicalId,
+ hardware::camera::device::V3_4::Stream *stream /*out*/) {
+ if (stream == nullptr) {
+ return;
+ }
+
+ stream->v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
+ stream->v3_2.width = streamInfo.width;
+ stream->v3_2.height = streamInfo.height;
+ stream->v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
+ auto u = streamInfo.consumerUsage;
+ camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
+ stream->v3_2.usage = Camera3Device::mapToConsumerUsage(u);
+ stream->v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
+ stream->v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
+ stream->v3_2.id = -1; // Invalid stream id
+ stream->physicalCameraId = std::string(physicalId.string());
+ stream->bufferSize = 0;
+}
+
+binder::Status SessionConfigurationUtils::checkPhysicalCameraId(
+ const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
+ const String8 &logicalCameraId) {
+ if (physicalCameraId.size() == 0) {
+ return binder::Status::ok();
+ }
+ if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
+ physicalCameraId.string()) == physicalCameraIds.end()) {
+ String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
+ logicalCameraId.string(), physicalCameraId.string());
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ return binder::Status::ok();
+}
+
+binder::Status SessionConfigurationUtils::checkSurfaceType(size_t numBufferProducers,
+ bool deferredConsumer, int surfaceType) {
+ if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
+ ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
+ __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
+ } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
+ ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
+ }
+
+ bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+
+ if (deferredConsumer && !validSurfaceType) {
+ ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
+ }
+
+ return binder::Status::ok();
+}
+
+binder::Status SessionConfigurationUtils::checkOperatingMode(int operatingMode,
+ const CameraMetadata &staticInfo, const String8 &cameraId) {
+ if (operatingMode < 0) {
+ String8 msg = String8::format(
+ "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ msg.string());
+ }
+
+ bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
+ if (isConstrainedHighSpeed) {
+ camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ bool isConstrainedHighSpeedSupported = false;
+ for(size_t i = 0; i < entry.count; ++i) {
+ uint8_t capability = entry.data.u8[i];
+ if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
+ isConstrainedHighSpeedSupported = true;
+ break;
+ }
+ }
+ if (!isConstrainedHighSpeedSupported) {
+ String8 msg = String8::format(
+ "Camera %s: Try to create a constrained high speed configuration on a device"
+ " that doesn't support it.", cameraId.string());
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ msg.string());
+ }
+ }
+
+ return binder::Status::ok();
+}
+
binder::Status
SessionConfigurationUtils::convertToHALStreamCombination(
const SessionConfiguration& sessionConfiguration,
const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration, bool *earlyExit) {
- // TODO: http://b/148329298 Move the other dependencies from
- // CameraDeviceClient into SessionConfigurationUtils.
- return CameraDeviceClient::convertToHALStreamCombination(sessionConfiguration, logicalCameraId,
- deviceInfo, getMetadata, physicalCameraIds, streamConfiguration, earlyExit);
+
+ auto operatingMode = sessionConfiguration.getOperatingMode();
+ binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
+ if (!res.isOk()) {
+ return res;
+ }
+
+ if (earlyExit == nullptr) {
+ String8 msg("earlyExit nullptr");
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ *earlyExit = false;
+ auto ret = Camera3Device::mapToStreamConfigurationMode(
+ static_cast<camera3_stream_configuration_mode_t> (operatingMode),
+ /*out*/ &streamConfiguration.operationMode);
+ if (ret != OK) {
+ String8 msg = String8::format(
+ "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
+ logicalCameraId.string(), operatingMode, strerror(-ret), ret);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ msg.string());
+ }
+
+ bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
+ (sessionConfiguration.getInputHeight() > 0) &&
+ (sessionConfiguration.getInputFormat() > 0);
+ auto outputConfigs = sessionConfiguration.getOutputConfigurations();
+ size_t streamCount = outputConfigs.size();
+ streamCount = isInputValid ? streamCount + 1 : streamCount;
+ streamConfiguration.streams.resize(streamCount);
+ size_t streamIdx = 0;
+ if (isInputValid) {
+ streamConfiguration.streams[streamIdx++] = {{/*streamId*/0,
+ hardware::camera::device::V3_2::StreamType::INPUT,
+ static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
+ static_cast<uint32_t> (sessionConfiguration.getInputHeight()),
+ Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
+ /*usage*/ 0, HAL_DATASPACE_UNKNOWN,
+ hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
+ /*physicalId*/ nullptr, /*bufferSize*/0};
+ }
+
+ for (const auto &it : outputConfigs) {
+ const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
+ it.getGraphicBufferProducers();
+ bool deferredConsumer = it.isDeferred();
+ String8 physicalCameraId = String8(it.getPhysicalCameraId());
+ size_t numBufferProducers = bufferProducers.size();
+ bool isStreamInfoValid = false;
+ OutputStreamInfo streamInfo;
+
+ res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
+ if (!res.isOk()) {
+ return res;
+ }
+ res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
+ logicalCameraId);
+ if (!res.isOk()) {
+ return res;
+ }
+
+ if (deferredConsumer) {
+ streamInfo.width = it.getWidth();
+ streamInfo.height = it.getHeight();
+ streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ auto surfaceType = it.getSurfaceType();
+ streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
+ if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
+ streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
+ }
+ mapStreamInfo(streamInfo, CAMERA3_STREAM_ROTATION_0, physicalCameraId,
+ &streamConfiguration.streams[streamIdx++]);
+ isStreamInfoValid = true;
+
+ if (numBufferProducers == 0) {
+ continue;
+ }
+ }
+
+ for (auto& bufferProducer : bufferProducers) {
+ sp<Surface> surface;
+ const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId);
+ res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
+ logicalCameraId,
+ physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo );
+
+ if (!res.isOk())
+ return res;
+
+ if (!isStreamInfoValid) {
+ bool isDepthCompositeStream =
+ camera3::DepthCompositeStream::isDepthCompositeStream(surface);
+ bool isHeicCompositeStream =
+ camera3::HeicCompositeStream::isHeicCompositeStream(surface);
+ if (isDepthCompositeStream || isHeicCompositeStream) {
+ // We need to take in to account that composite streams can have
+ // additional internal camera streams.
+ std::vector<OutputStreamInfo> compositeStreams;
+ if (isDepthCompositeStream) {
+ ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+ deviceInfo, &compositeStreams);
+ } else {
+ ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
+ deviceInfo, &compositeStreams);
+ }
+ if (ret != OK) {
+ String8 msg = String8::format(
+ "Camera %s: Failed adding composite streams: %s (%d)",
+ logicalCameraId.string(), strerror(-ret), ret);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+
+ if (compositeStreams.size() == 0) {
+ // No internal streams means composite stream not
+ // supported.
+ *earlyExit = true;
+ return binder::Status::ok();
+ } else if (compositeStreams.size() > 1) {
+ streamCount += compositeStreams.size() - 1;
+ streamConfiguration.streams.resize(streamCount);
+ }
+
+ for (const auto& compositeStream : compositeStreams) {
+ mapStreamInfo(compositeStream,
+ static_cast<camera3_stream_rotation_t> (it.getRotation()),
+ physicalCameraId, &streamConfiguration.streams[streamIdx++]);
+ }
+ } else {
+ mapStreamInfo(streamInfo,
+ static_cast<camera3_stream_rotation_t> (it.getRotation()),
+ physicalCameraId, &streamConfiguration.streams[streamIdx++]);
+ }
+ isStreamInfoValid = true;
+ }
+ }
+ }
+ return binder::Status::ok();
+
}
}// namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index fb519d9..6ce2cd7 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -23,6 +23,9 @@
#include <camera/camera2/SubmitInfo.h>
#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+#include <hardware/camera3.h>
+#include <device3/Camera3StreamInterface.h>
+
#include <stdint.h>
namespace android {
@@ -31,8 +34,43 @@
class SessionConfigurationUtils {
public:
+
+ static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
+
+ // Find the closest dimensions for a given format in available stream configurations with
+ // a width <= ROUNDING_WIDTH_CAP
+ static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
+ android_dataspace dataSpace, const CameraMetadata& info,
+ /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
+
+ //check if format is not custom format
+ static bool isPublicFormat(int32_t format);
+
+ // Create a Surface from an IGraphicBufferProducer. Returns error if
+ // IGraphicBufferProducer's property doesn't match with streamInfo
+ static binder::Status createSurfaceFromGbp(
+ camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
+ sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+ const String8 &cameraId, const CameraMetadata &physicalCameraMetadata);
+
+ static void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
+ camera3_stream_rotation_t rotation, String8 physicalId,
+ hardware::camera::device::V3_4::Stream *stream /*out*/);
+
+ // Check that the physicalCameraId passed in is spported by the camera
+ // device.
+ static binder::Status checkPhysicalCameraId(
+ const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
+ const String8 &logicalCameraId);
+
+ static binder::Status checkSurfaceType(size_t numBufferProducers,
+ bool deferredConsumer, int surfaceType);
+
+ static binder::Status checkOperatingMode(int operatingMode,
+ const CameraMetadata &staticInfo, const String8 &cameraId);
+
// utility function to convert AIDL SessionConfiguration to HIDL
- // streamConfiguration. Also checks for sanity of SessionConfiguration and
+ // streamConfiguration. Also checks for validity of SessionConfiguration and
// returns a non-ok binder::Status if the passed in session configuration
// isn't valid.
static binder::Status
@@ -41,6 +79,10 @@
metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
bool *earlyExit);
+
+ static const int32_t MAX_SURFACES_PER_STREAM = 4;
+
+ static const int32_t ROUNDING_WIDTH_CAP = 1920;
};
} // android
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
new file mode 100644
index 0000000..83965c4
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraSessionStatsBuilder"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+
+#include "SessionStatsBuilder.h"
+
+namespace android {
+
+status_t SessionStatsBuilder::addStream(int id) {
+ std::lock_guard<std::mutex> l(mLock);
+ StreamStats stats;
+ mStatsMap.emplace(id, stats);
+ return OK;
+}
+
+status_t SessionStatsBuilder::removeStream(int id) {
+ std::lock_guard<std::mutex> l(mLock);
+ mStatsMap.erase(id);
+ return OK;
+}
+
+void SessionStatsBuilder::buildAndReset(int64_t* requestCount,
+ int64_t* errorResultCount, bool* deviceError,
+ std::map<int, StreamStats> *statsMap) {
+ std::lock_guard<std::mutex> l(mLock);
+ *requestCount = mRequestCount;
+ *errorResultCount = mErrorResultCount;
+ *deviceError = mDeviceError;
+ *statsMap = mStatsMap;
+
+ // Reset internal states
+ mRequestCount = 0;
+ mErrorResultCount = 0;
+ mCounterStopped = false;
+ mDeviceError = false;
+ for (auto& streamStats : mStatsMap) {
+ streamStats.second.mRequestedFrameCount = 0;
+ streamStats.second.mDroppedFrameCount = 0;
+ streamStats.second.mCounterStopped = false;
+ streamStats.second.mStartLatencyMs = 0;
+ }
+}
+
+void SessionStatsBuilder::startCounter(int id) {
+ std::lock_guard<std::mutex> l(mLock);
+ mStatsMap[id].mCounterStopped = false;
+}
+
+void SessionStatsBuilder::stopCounter(int id) {
+ std::lock_guard<std::mutex> l(mLock);
+ mStatsMap[id].mCounterStopped = true;
+}
+
+void SessionStatsBuilder::incCounter(int id, bool dropped, int32_t captureLatencyMs) {
+ std::lock_guard<std::mutex> l(mLock);
+ auto it = mStatsMap.find(id);
+ if (it != mStatsMap.end()) {
+ if (!it->second.mCounterStopped) {
+ it->second.mRequestedFrameCount++;
+ if (dropped) {
+ it->second.mDroppedFrameCount++;
+ } else if (it->second.mRequestedFrameCount == 1) {
+ // The capture latency for the first request.
+ it->second.mStartLatencyMs = captureLatencyMs;
+ }
+ }
+ }
+}
+
+void SessionStatsBuilder::stopCounter() {
+ std::lock_guard<std::mutex> l(mLock);
+ mCounterStopped = true;
+ for (auto& streamStats : mStatsMap) {
+ streamStats.second.mCounterStopped = true;
+ }
+}
+
+void SessionStatsBuilder::incResultCounter(bool dropped) {
+ std::lock_guard<std::mutex> l(mLock);
+ if (!mCounterStopped) {
+ mRequestCount ++;
+ if (dropped) mErrorResultCount++;
+ }
+}
+
+void SessionStatsBuilder::onDeviceError() {
+ std::lock_guard<std::mutex> l(mLock);
+ mDeviceError = true;
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.h b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
new file mode 100644
index 0000000..7943637
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVICE_UTILS_SESSION_STATS_BUILDER_H
+#define ANDROID_SERVICE_UTILS_SESSION_STATS_BUILDER_H
+
+#include <utils/Errors.h>
+
+#include <mutex>
+#include <map>
+
+namespace android {
+
+// Helper class to build stream stats
+struct StreamStats {
+ int64_t mRequestedFrameCount;
+ int64_t mDroppedFrameCount;
+ bool mCounterStopped;
+ int32_t mStartLatencyMs;
+
+ StreamStats() : mRequestedFrameCount(0),
+ mDroppedFrameCount(0),
+ mCounterStopped(false),
+ mStartLatencyMs(0) {}
+};
+
+// Helper class to build session stats
+class SessionStatsBuilder {
+public:
+
+ status_t addStream(int streamId);
+ status_t removeStream(int streamId);
+
+ // Return the session statistics and reset the internal states.
+ void buildAndReset(/*out*/int64_t* requestCount,
+ /*out*/int64_t* errorResultCount,
+ /*out*/bool* deviceError,
+ /*out*/std::map<int, StreamStats> *statsMap);
+
+ // Stream specific counter
+ void startCounter(int streamId);
+ void stopCounter(int streamId);
+ void incCounter(int streamId, bool dropped, int32_t captureLatencyMs);
+
+ // Session specific counter
+ void stopCounter();
+ void incResultCounter(bool dropped);
+ void onDeviceError();
+
+ SessionStatsBuilder() : mRequestCount(0), mErrorResultCount(0),
+ mCounterStopped(false), mDeviceError(false) {}
+private:
+ std::mutex mLock;
+ int64_t mRequestCount;
+ int64_t mErrorResultCount;
+ bool mCounterStopped;
+ bool mDeviceError;
+ // Map from stream id to stream statistics
+ std::map<int, StreamStats> mStatsMap;
+};
+
+}; // namespace android
+
+#endif // ANDROID_SERVICE_UTILS_SESSION_STATS_BUILDER_H
diff --git a/services/camera/libcameraservice/utils/TraceHFR.h b/services/camera/libcameraservice/utils/TraceHFR.h
new file mode 100644
index 0000000..3a1900f
--- /dev/null
+++ b/services/camera/libcameraservice/utils/TraceHFR.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_ENABLE_HFR_TRACES_H_
+#define ANDROID_SERVERS_ENABLE_HFR_TRACES_H_
+
+#include <utils/Trace.h>
+
+#ifdef HFR_ENABLE_TRACING
+#define ATRACE_HFR_CALL() ATRACE_CALL()
+#else
+#define ATRACE_HFR_CALL()
+#endif
+
+#endif
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index 4bf103c..dc0773b 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -1,6 +1,7 @@
cc_binary {
name: "mediaswcodec",
vendor_available: true,
+ min_sdk_version: "29",
srcs: [
"main_swcodecservice.cpp",
@@ -14,26 +15,10 @@
"libmedia_codecserviceregistrant",
],
- target: {
- android: {
- product_variables: {
- malloc_not_svelte: {
- // Scudo increases memory footprint, so only enable on
- // non-svelte devices.
- shared_libs: ["libc_scudo"],
- },
- },
- },
- },
-
header_libs: [
"libmedia_headers",
],
- init_rc: ["mediaswcodec.rc"],
-
- required: ["mediaswcodec.policy"],
-
cflags: [
"-Werror",
"-Wall",
diff --git a/services/mediacodec/mediaswcodec.rc b/services/mediacodec/mediaswcodec.rc
deleted file mode 100644
index 3549666..0000000
--- a/services/mediacodec/mediaswcodec.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service media.swcodec /system/bin/mediaswcodec
- class main
- user mediacodec
- group camera drmrpc mediadrm
- updatable
- ioprio rt 4
- writepid /dev/cpuset/foreground/tasks
diff --git a/services/mediaextractor/Android.bp b/services/mediaextractor/Android.bp
index 05b7d22..03e1e41 100644
--- a/services/mediaextractor/Android.bp
+++ b/services/mediaextractor/Android.bp
@@ -35,17 +35,6 @@
"liblog",
"libavservices_minijail",
],
- target: {
- android: {
- product_variables: {
- malloc_not_svelte: {
- // Scudo increases memory footprint, so only enable on
- // non-svelte devices.
- shared_libs: ["libc_scudo"],
- },
- },
- },
- },
init_rc: ["mediaextractor.rc"],
cflags: [
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index 74b63d5..3a27a43 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -1,4 +1,4 @@
-cc_library_shared {
+cc_library {
name: "libmedialogservice",
srcs: [
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
new file mode 100644
index 0000000..2afaaae
--- /dev/null
+++ b/services/medialog/fuzzer/Android.bp
@@ -0,0 +1,33 @@
+cc_fuzz {
+ name: "media_log_fuzzer",
+ static_libs: [
+ "libmedialogservice",
+ ],
+ srcs: [
+ "media_log_fuzzer.cpp",
+ ],
+ header_libs: [
+ "libmedia_headers",
+ ],
+ shared_libs: [
+ "libaudioutils",
+ "libbinder",
+ "liblog",
+ "libmediautils",
+ "libnblog",
+ "libutils",
+ ],
+ include_dirs: [
+ "frameworks/av/services/medialog",
+ ],
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/services/medialog/fuzzer/README.md b/services/medialog/fuzzer/README.md
new file mode 100644
index 0000000..b79e5c8
--- /dev/null
+++ b/services/medialog/fuzzer/README.md
@@ -0,0 +1,50 @@
+# Fuzzer for libmedialogservice
+
+## Plugin Design Considerations
+The fuzzer plugin for libmedialogservice is designed based on the understanding of the
+service and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+medialogservice supports the following parameters:
+1. Writer name (parameter name: `writerNameIdx`)
+2. Log size (parameter name: `logSize`)
+3. Enable dump before unrgister API (parameter name: `shouldDumpBeforeUnregister`)
+5. size of string for log dump (parameter name: `numberOfLines`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `writerNameIdx` | 0. `0` 1. `1` | Value obtained from FuzzedDataProvider |
+| `logSize` | In the range `256 to 65536` | Value obtained from FuzzedDataProvider |
+| `shouldDumpBeforeUnregister` | 0. `0` 1. `1` | Value obtained from FuzzedDataProvider |
+| `numberOfLines` | In the range `0 to 65535` | Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+## Build
+
+This describes steps to build media_log_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) media_log_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some files to that folder
+Push this directory to device.
+
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/media_log_fuzzer/media_log_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/medialog/fuzzer/media_log_fuzzer.cpp b/services/medialog/fuzzer/media_log_fuzzer.cpp
new file mode 100644
index 0000000..bd50d0f
--- /dev/null
+++ b/services/medialog/fuzzer/media_log_fuzzer.cpp
@@ -0,0 +1,76 @@
+/**
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <binder/IMemory.h>
+#include <binder/MemoryDealer.h>
+#include <private/android_filesystem_config.h>
+#include "MediaLogService.h"
+#include "fuzzer/FuzzedDataProvider.h"
+
+constexpr const char* kWriterNames[2] = {"FastMixer", "FastCapture"};
+constexpr size_t kMinSize = 0x100;
+constexpr size_t kMaxSize = 0x10000;
+constexpr size_t kLogMemorySize = 400 * 1024;
+constexpr size_t kMaxNumLines = USHRT_MAX;
+
+using namespace android;
+
+class MediaLogFuzzer {
+ public:
+ void init();
+ void process(const uint8_t* data, size_t size);
+
+ private:
+ sp<MemoryDealer> mMemoryDealer = nullptr;
+ sp<MediaLogService> mService = nullptr;
+};
+
+void MediaLogFuzzer::init() {
+ setuid(AID_MEDIA);
+ mService = new MediaLogService();
+ mMemoryDealer = new MemoryDealer(kLogMemorySize, "MediaLogFuzzer", MemoryHeapBase::READ_ONLY);
+}
+
+void MediaLogFuzzer::process(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fuzzedDataProvider(data, size);
+ size_t writerNameIdx =
+ fuzzedDataProvider.ConsumeIntegralInRange<size_t>(0, std::size(kWriterNames) - 1);
+ bool shouldDumpBeforeUnregister = fuzzedDataProvider.ConsumeBool();
+ size_t logSize = fuzzedDataProvider.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+ sp<IMemory> logBuffer = mMemoryDealer->allocate(NBLog::Timeline::sharedSize(logSize));
+ Vector<String16> args;
+ size_t numberOfLines = fuzzedDataProvider.ConsumeIntegralInRange<size_t>(0, kMaxNumLines);
+ for (size_t lineIdx = 0; lineIdx < numberOfLines; ++lineIdx) {
+ args.add(static_cast<String16>(fuzzedDataProvider.ConsumeRandomLengthString().c_str()));
+ }
+ const char* fileName = "logDumpFile";
+ int fd = memfd_create(fileName, MFD_ALLOW_SEALING);
+ fuzzedDataProvider.ConsumeData(logBuffer->unsecurePointer(), logBuffer->size());
+ mService->registerWriter(logBuffer, logSize, kWriterNames[writerNameIdx]);
+ if (shouldDumpBeforeUnregister) {
+ mService->dump(fd, args);
+ mService->unregisterWriter(logBuffer);
+ } else {
+ mService->unregisterWriter(logBuffer);
+ mService->dump(fd, args);
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ MediaLogFuzzer mediaLogFuzzer = MediaLogFuzzer();
+ mediaLogFuzzer.init();
+ mediaLogFuzzer.process(data, size);
+ return 0;
+}
diff --git a/services/mediametrics/AnalyticsState.h b/services/mediametrics/AnalyticsState.h
index b648947..09c0b4c 100644
--- a/services/mediametrics/AnalyticsState.h
+++ b/services/mediametrics/AnalyticsState.h
@@ -93,7 +93,7 @@
int32_t ll = lines;
if (ll > 0) {
- ss << "TransactionLog:\n";
+ ss << "TransactionLog: gc(" << mTransactionLog.getGarbageCollectionCount() << ")\n";
--ll;
}
if (ll > 0) {
@@ -102,7 +102,7 @@
ll -= l;
}
if (ll > 0) {
- ss << "TimeMachine:\n";
+ ss << "TimeMachine: gc(" << mTimeMachine.getGarbageCollectionCount() << ")\n";
--ll;
}
if (ll > 0) {
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index c87fbd9..3bb70f1 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -1,8 +1,94 @@
// Media Statistics service
//
+tidy_errors = [
+ // https://clang.llvm.org/extra/clang-tidy/checks/list.html
+ // For many categories, the checks are too many to specify individually.
+ // Feel free to disable as needed - as warnings are generally ignored,
+ // we treat warnings as errors.
+ "android-*",
+ "bugprone-*",
+ "cert-*",
+ "clang-analyzer-security*",
+ "google-*",
+ "misc-*",
+ //"modernize-*", // explicitly list the modernize as they can be subjective.
+ "modernize-avoid-bind",
+ //"modernize-avoid-c-arrays", // std::array<> can be verbose
+ "modernize-concat-nested-namespaces",
+ //"modernize-deprecated-headers", // C headers still ok even if there is C++ equivalent.
+ "modernize-deprecated-ios-base-aliases",
+ "modernize-loop-convert",
+ "modernize-make-shared",
+ "modernize-make-unique",
+ "modernize-pass-by-value",
+ "modernize-raw-string-literal",
+ "modernize-redundant-void-arg",
+ "modernize-replace-auto-ptr",
+ "modernize-replace-random-shuffle",
+ "modernize-return-braced-init-list",
+ "modernize-shrink-to-fit",
+ "modernize-unary-static-assert",
+ "modernize-use-auto", // debatable - auto can obscure type
+ "modernize-use-bool-literals",
+ "modernize-use-default-member-init",
+ "modernize-use-emplace",
+ "modernize-use-equals-default",
+ "modernize-use-equals-delete",
+ "modernize-use-nodiscard",
+ "modernize-use-noexcept",
+ "modernize-use-nullptr",
+ "modernize-use-override",
+ //"modernize-use-trailing-return-type", // not necessarily more readable
+ "modernize-use-transparent-functors",
+ "modernize-use-uncaught-exceptions",
+ "modernize-use-using",
+ "performance-*",
+
+ // Remove some pedantic stylistic requirements.
+ "-google-readability-casting", // C++ casts not always necessary and may be verbose
+ "-google-readability-todo", // do not require TODO(info)
+]
+
+cc_defaults {
+ name: "mediametrics_flags_defaults",
+ // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+ // https://clang.llvm.org/docs/DiagnosticsReference.html
+ cflags: [
+ "-Wall",
+ "-Wdeprecated",
+ "-Werror",
+ "-Werror=implicit-fallthrough",
+ "-Werror=sometimes-uninitialized",
+ "-Werror=conditional-uninitialized",
+ "-Wextra",
+ "-Wredundant-decls",
+ "-Wshadow",
+ "-Wstrict-aliasing",
+ "-fstrict-aliasing",
+ "-Wthread-safety",
+ //"-Wthread-safety-negative", // experimental - looks broken in R.
+ "-Wunreachable-code",
+ "-Wunreachable-code-break",
+ "-Wunreachable-code-return",
+ "-Wunused",
+ "-Wused-but-marked-unused",
+ ],
+ // https://clang.llvm.org/extra/clang-tidy/
+ tidy: true,
+ tidy_checks: tidy_errors,
+ tidy_checks_as_errors: tidy_errors,
+ tidy_flags: [
+ "-format-style='file'",
+ "--header-filter='frameworks/av/services/mediametrics/'",
+ ],
+}
+
cc_binary {
name: "mediametrics",
+ defaults: [
+ "mediametrics_flags_defaults",
+ ],
srcs: [
"main_mediametrics.cpp",
@@ -16,27 +102,26 @@
"libutils",
],
header_libs: [
+ "libaudioutils_headers",
"libmediametrics_headers",
],
init_rc: [
"mediametrics.rc",
],
-
- cflags: [
- "-Wall",
- "-Werror",
- "-Wextra",
- "-Wthread-safety",
- ],
}
-cc_library_shared {
+cc_library {
name: "libmediametricsservice",
+ defaults: [
+ "mediametrics_flags_defaults",
+ ],
srcs: [
"AudioAnalytics.cpp",
"AudioPowerUsage.cpp",
+ "AudioTypes.cpp",
+ "cleaner.cpp",
"iface_statsd.cpp",
"MediaMetricsService.cpp",
"statsd_audiopolicy.cpp",
@@ -46,8 +131,10 @@
"statsd_codec.cpp",
"statsd_drm.cpp",
"statsd_extractor.cpp",
+ "statsd_mediaparser.cpp",
"statsd_nuplayer.cpp",
"statsd_recorder.cpp",
+ "StringUtils.cpp"
],
proto: {
@@ -55,9 +142,11 @@
},
shared_libs: [
+ "libbase", // android logging
"libbinder",
"libcutils",
"liblog",
+ "libmedia_helper",
"libmediametrics",
"libmediautils",
"libmemunreachable",
@@ -73,11 +162,4 @@
include_dirs: [
"system/media/audio_utils/include",
],
-
- cflags: [
- "-Wall",
- "-Werror",
- "-Wextra",
- "-Wthread-safety",
- ],
}
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 6138d32..d78d1e3 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -16,20 +16,150 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "AudioAnalytics"
+#include <android-base/logging.h>
#include <utils/Log.h>
#include "AudioAnalytics.h"
-#include "MediaMetricsService.h" // package info
+
#include <audio_utils/clock.h> // clock conversions
+#include <cutils/properties.h>
#include <statslog.h> // statsd
-// Enable for testing of delivery to statsd
-// #define STATSD
+#include "AudioTypes.h" // string to int conversions
+#include "MediaMetricsService.h" // package info
+#include "StringUtils.h"
+
+#define PROP_AUDIO_ANALYTICS_CLOUD_ENABLED "persist.audio.analytics.cloud.enabled"
namespace android::mediametrics {
-AudioAnalytics::AudioAnalytics()
+// Enable for testing of delivery to statsd. Caution if this is enabled, all protos MUST exist.
+#define STATSD_ENABLE
+
+#ifdef STATSD_ENABLE
+#define CONDITION(INT_VALUE) (INT_VALUE) // allow value
+#else
+#define CONDITION(INT_VALUE) (int(0)) // mask value since the proto may not be defined yet.
+#endif
+
+// Maximum length of a device name.
+// static constexpr size_t STATSD_DEVICE_NAME_MAX_LENGTH = 32; // unused since we suppress
+
+// Transmit Enums to statsd in integer or strings (this must match the atoms.proto)
+static constexpr bool STATSD_USE_INT_FOR_ENUM = false;
+
+// derive types based on integer or strings.
+using short_enum_type_t = std::conditional_t<STATSD_USE_INT_FOR_ENUM, int32_t, std::string>;
+using long_enum_type_t = std::conditional_t<STATSD_USE_INT_FOR_ENUM, int64_t, std::string>;
+
+// Convert std::string to char *
+template <typename T>
+auto ENUM_EXTRACT(const T& x) {
+ if constexpr (std::is_same_v<std::decay_t<T>, std::string>) {
+ return x.c_str();
+ } else {
+ return x;
+ }
+}
+
+static constexpr const auto LOG_LEVEL = android::base::VERBOSE;
+
+static constexpr int PREVIOUS_STATE_EXPIRE_SEC = 60 * 60; // 1 hour.
+
+static constexpr const char * SUPPRESSED = "SUPPRESSED";
+
+/*
+ * For logging purposes, we list all of the MediaMetrics atom fields,
+ * which can then be associated with consecutive arguments to the statsd write.
+ */
+
+static constexpr const char * const AudioRecordDeviceUsageFields[] = {
+ "mediametrics_audiorecorddeviceusage_reported", // proto number
+ "devices",
+ "device_names",
+ "device_time_nanos",
+ "encoding",
+ "frame_count",
+ "interval_count",
+ "sample_rate",
+ "flags",
+ "package_name",
+ "selected_device_id",
+ "caller",
+ "source",
+};
+
+static constexpr const char * const AudioThreadDeviceUsageFields[] = {
+ "mediametrics_audiothreaddeviceusage_reported",
+ "devices",
+ "device_names",
+ "device_time_nanos",
+ "encoding",
+ "frame_count",
+ "interval_count",
+ "sample_rate",
+ "flags",
+ "xruns",
+ "type",
+};
+
+static constexpr const char * const AudioTrackDeviceUsageFields[] = {
+ "mediametrics_audiotrackdeviceusage_reported",
+ "devices",
+ "device_names",
+ "device_time_nanos",
+ "encoding",
+ "frame_count",
+ "interval_count",
+ "sample_rate",
+ "flags",
+ "xruns",
+ "package_name",
+ "device_latency_millis",
+ "device_startup_millis",
+ "device_volume",
+ "selected_device_id",
+ "stream_type",
+ "usage",
+ "content_type",
+ "caller",
+ "traits",
+};
+
+static constexpr const char * const AudioDeviceConnectionFields[] = {
+ "mediametrics_audiodeviceconnection_reported",
+ "input_devices",
+ "output_devices",
+ "device_names",
+ "result",
+ "time_to_connect_millis",
+ "connection_count",
+};
+
+/**
+ * sendToStatsd is a helper method that sends the arguments to statsd
+ * and returns a pair { result, summary_string }.
+ */
+template <size_t N, typename ...Types>
+std::pair<int, std::string> sendToStatsd(const char * const (& fields)[N], Types ... args)
{
+ int result = 0;
+ std::stringstream ss;
+
+#ifdef STATSD_ENABLE
+ result = android::util::stats_write(args...);
+ ss << "result:" << result;
+#endif
+ ss << " { ";
+ stringutils::fieldPrint(ss, fields, args...);
+ ss << "}";
+ return { result, ss.str() };
+}
+
+AudioAnalytics::AudioAnalytics()
+ : mDeliverStatistics(property_get_bool(PROP_AUDIO_ANALYTICS_CLOUD_ENABLED, true))
+{
+ SetMinimumLogSeverity(android::base::DEBUG); // for LOG().
ALOGD("%s", __func__);
// Add action to save AnalyticsState if audioserver is restarted.
@@ -47,6 +177,19 @@
// to end of full expression.
mAnalyticsState->clear(); // TODO: filter the analytics state.
// Perhaps report this.
+
+ // Set up a timer to expire the previous audio state to save space.
+ // Use the transaction log size as a cookie to see if it is the
+ // same as before. A benign race is possible where a state is cleared early.
+ const size_t size = mPreviousAnalyticsState->transactionLog().size();
+ mTimedAction.postIn(
+ std::chrono::seconds(PREVIOUS_STATE_EXPIRE_SEC), [this, size](){
+ if (mPreviousAnalyticsState->transactionLog().size() == size) {
+ ALOGD("expiring previous audio state after %d seconds.",
+ PREVIOUS_STATE_EXPIRE_SEC);
+ mPreviousAnalyticsState->clear(); // removes data from the state.
+ }
+ });
}));
// Handle device use record statistics
@@ -195,11 +338,26 @@
ll -= l;
}
+ if (ll > 0) {
+ // Print the statsd atoms we sent out.
+ const std::string statsd = mStatsdLog.dumpToString(" " /* prefix */, ll - 1);
+ const size_t n = std::count(statsd.begin(), statsd.end(), '\n') + 1; // we control this.
+ if ((size_t)ll >= n) {
+ if (n == 1) {
+ ss << "Statsd atoms: empty or truncated\n";
+ } else {
+ ss << "Statsd atoms:\n" << statsd;
+ }
+ ll -= n;
+ }
+ }
+
if (ll > 0 && prefix == nullptr) {
auto [s, l] = mAudioPowerUsage.dump(ll);
ss << s;
ll -= l;
}
+
return { ss.str(), lines - ll };
}
@@ -243,50 +401,72 @@
int32_t frameCount = 0;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_FRAMECOUNT, &frameCount);
- std::string inputDevices;
+ std::string inputDevicePairs;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
- key, AMEDIAMETRICS_PROP_INPUTDEVICES, &inputDevices);
+ key, AMEDIAMETRICS_PROP_INPUTDEVICES, &inputDevicePairs);
int32_t intervalCount = 0;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_INTERVALCOUNT, &intervalCount);
- std::string outputDevices;
+ std::string outputDevicePairs;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
- key, AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevices);
+ key, AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevicePairs);
int32_t sampleRate = 0;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_SAMPLERATE, &sampleRate);
std::string flags;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_FLAGS, &flags);
+
// We may have several devices.
- // Strings allow us to mix input and output devices together.
- // TODO: review if we want to separate them.
- std::stringstream ss;
- for (const auto& devicePairs : { outputDevices, inputDevices }) {
- const auto devaddrvec = MediaMetricsService::getDeviceAddressPairs(devicePairs);
+ // Accumulate the bit flags for input and output devices.
+ std::stringstream oss;
+ long_enum_type_t outputDeviceBits{};
+ { // compute outputDevices
+ const auto devaddrvec = stringutils::getDeviceAddressPairs(outputDevicePairs);
for (const auto& [device, addr] : devaddrvec) {
- if (ss.tellp() > 0) ss << "|"; // delimit devices with '|'.
- ss << device;
+ if (oss.tellp() > 0) oss << "|"; // delimit devices with '|'.
+ oss << device;
+ outputDeviceBits += types::lookup<types::OUTPUT_DEVICE, long_enum_type_t>(device);
}
}
- std::string devices = ss.str();
+ const std::string outputDevices = oss.str();
+
+ std::stringstream iss;
+ long_enum_type_t inputDeviceBits{};
+ { // compute inputDevices
+ const auto devaddrvec = stringutils::getDeviceAddressPairs(inputDevicePairs);
+ for (const auto& [device, addr] : devaddrvec) {
+ if (iss.tellp() > 0) iss << "|"; // delimit devices with '|'.
+ iss << device;
+ inputDeviceBits += types::lookup<types::INPUT_DEVICE, long_enum_type_t>(device);
+ }
+ }
+ const std::string inputDevices = iss.str();
// Get connected device name if from bluetooth.
bool isBluetooth = false;
- std::string deviceNames; // we only have one device name at this time.
+
+ std::string inputDeviceNames; // not filled currently.
+ std::string outputDeviceNames;
if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos) {
isBluetooth = true;
+ outputDeviceNames = SUPPRESSED;
+#if 0 // TODO(b/161554630) sanitize name
mAudioAnalytics.mAnalyticsState->timeMachine().get(
- "audio.device.bt_a2dp", AMEDIAMETRICS_PROP_NAME, &deviceNames);
- // We don't check if deviceName is sanitized.
- // TODO: remove reserved chars such as '|' and replace with a char like '_'.
+ "audio.device.bt_a2dp", AMEDIAMETRICS_PROP_NAME, &outputDeviceNames);
+ // Remove | if present
+ stringutils::replace(outputDeviceNames, "|", '?');
+ if (outputDeviceNames.size() > STATSD_DEVICE_NAME_MAX_LENGTH) {
+ outputDeviceNames.resize(STATSD_DEVICE_NAME_MAX_LENGTH); // truncate
+ }
+#endif
}
switch (itemType) {
case RECORD: {
std::string callerName;
- mAudioAnalytics.mAnalyticsState->timeMachine().get(
- key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName);
+ const bool clientCalled = mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName) == OK;
std::string packageName;
int64_t versionCode = 0;
@@ -305,40 +485,47 @@
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_SOURCE, &source);
- ALOGD("(key=%s) id:%s endAudioIntervalGroup devices:%s deviceNames:%s "
- "deviceTimeNs:%lld encoding:%s frameCount:%d intervalCount:%d "
- "sampleRate:%d "
- "packageName:%s "
- "selectedDeviceId:%d "
- "callerName:%s source:%s",
- key.c_str(), id.c_str(), devices.c_str(), deviceNames.c_str(),
- (long long)deviceTimeNs, encoding.c_str(), frameCount, intervalCount,
- sampleRate,
- packageName.c_str(), selectedDeviceId,
- callerName.c_str(), source.c_str());
+ const auto callerNameForStats =
+ types::lookup<types::CALLER_NAME, short_enum_type_t>(callerName);
+ const auto encodingForStats = types::lookup<types::ENCODING, short_enum_type_t>(encoding);
+ const auto flagsForStats = types::lookup<types::INPUT_FLAG, short_enum_type_t>(flags);
+ const auto sourceForStats = types::lookup<types::SOURCE_TYPE, short_enum_type_t>(source);
-#ifdef STATSD
- if (mAudioAnalytics.mDeliverStatistics) {
- (void)android::util::stats_write(
- android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED
- /* timestamp, */
- /* mediaApexVersion, */
- , devices.c_str()
- , deviceNames.c_str()
+ LOG(LOG_LEVEL) << "key:" << key
+ << " id:" << id
+ << " inputDevices:" << inputDevices << "(" << inputDeviceBits
+ << ") inputDeviceNames:" << inputDeviceNames
+ << " deviceTimeNs:" << deviceTimeNs
+ << " encoding:" << encoding << "(" << encodingForStats
+ << ") frameCount:" << frameCount
+ << " intervalCount:" << intervalCount
+ << " sampleRate:" << sampleRate
+ << " flags:" << flags << "(" << flagsForStats
+ << ") packageName:" << packageName
+ << " selectedDeviceId:" << selectedDeviceId
+ << " callerName:" << callerName << "(" << callerNameForStats
+ << ") source:" << source << "(" << sourceForStats << ")";
+ if (clientCalled // only log if client app called AudioRecord.
+ && mAudioAnalytics.mDeliverStatistics) {
+ const auto [ result, str ] = sendToStatsd(AudioRecordDeviceUsageFields,
+ CONDITION(android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED)
+ , ENUM_EXTRACT(inputDeviceBits)
+ , inputDeviceNames.c_str()
, deviceTimeNs
- , encoding.c_str()
+ , ENUM_EXTRACT(encodingForStats)
, frameCount
, intervalCount
, sampleRate
- , flags.c_str()
+ , ENUM_EXTRACT(flagsForStats)
, packageName.c_str()
, selectedDeviceId
- , callerName.c_str()
- , source.c_str()
+ , ENUM_EXTRACT(callerNameForStats)
+ , ENUM_EXTRACT(sourceForStats)
);
+ ALOGV("%s: statsd %s", __func__, str.c_str());
+ mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
}
-#endif
} break;
case THREAD: {
std::string type;
@@ -347,39 +534,52 @@
int32_t underrun = 0; // zero for record types
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_UNDERRUN, &underrun);
- ALOGD("(key=%s) id:%s endAudioIntervalGroup devices:%s deviceNames:%s "
- "deviceTimeNs:%lld encoding:%s frameCount:%d intervalCount:%d "
- "sampleRate:%d underrun:%d "
- "flags:%s type:%s",
- key.c_str(), id.c_str(), devices.c_str(), deviceNames.c_str(),
- (long long)deviceTimeNs, encoding.c_str(), frameCount, intervalCount,
- sampleRate, underrun,
- flags.c_str(), type.c_str());
-#ifdef STATSD
+
+ const bool isInput = types::isInputThreadType(type);
+ const auto encodingForStats = types::lookup<types::ENCODING, short_enum_type_t>(encoding);
+ const auto flagsForStats =
+ (isInput ? types::lookup<types::INPUT_FLAG, short_enum_type_t>(flags)
+ : types::lookup<types::OUTPUT_FLAG, short_enum_type_t>(flags));
+ const auto typeForStats = types::lookup<types::THREAD_TYPE, short_enum_type_t>(type);
+
+ LOG(LOG_LEVEL) << "key:" << key
+ << " id:" << id
+ << " inputDevices:" << inputDevices << "(" << inputDeviceBits
+ << ") outputDevices:" << outputDevices << "(" << outputDeviceBits
+ << ") inputDeviceNames:" << inputDeviceNames
+ << " outputDeviceNames:" << outputDeviceNames
+ << " deviceTimeNs:" << deviceTimeNs
+ << " encoding:" << encoding << "(" << encodingForStats
+ << ") frameCount:" << frameCount
+ << " intervalCount:" << intervalCount
+ << " sampleRate:" << sampleRate
+ << " underrun:" << underrun
+ << " flags:" << flags << "(" << flagsForStats
+ << ") type:" << type << "(" << typeForStats
+ << ")";
if (mAudioAnalytics.mDeliverStatistics) {
- (void)android::util::stats_write(
- android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED
- /* timestamp, */
- /* mediaApexVersion, */
- , devices.c_str()
- , deviceNames.c_str()
+ const auto [ result, str ] = sendToStatsd(AudioThreadDeviceUsageFields,
+ CONDITION(android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED)
+ , isInput ? ENUM_EXTRACT(inputDeviceBits) : ENUM_EXTRACT(outputDeviceBits)
+ , isInput ? inputDeviceNames.c_str() : outputDeviceNames.c_str()
, deviceTimeNs
- , encoding.c_str()
+ , ENUM_EXTRACT(encodingForStats)
, frameCount
, intervalCount
, sampleRate
- , flags.c_str()
-
+ , ENUM_EXTRACT(flagsForStats)
, underrun
- , type.c_str()
+ , ENUM_EXTRACT(typeForStats)
);
+ ALOGV("%s: statsd %s", __func__, str.c_str());
+ mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
}
-#endif
} break;
case TRACK: {
std::string callerName;
- mAudioAnalytics.mAnalyticsState->timeMachine().get(
- key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName);
+ const bool clientCalled = mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName) == OK;
+
std::string contentType;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_CONTENTTYPE, &contentType);
@@ -413,6 +613,9 @@
std::string streamType;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_STREAMTYPE, &streamType);
+ std::string traits;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_TRAITS, &traits);
int32_t underrun = 0;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_UNDERRUN, &underrun);
@@ -420,48 +623,69 @@
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_USAGE, &usage);
- ALOGD("(key=%s) id:%s endAudioIntervalGroup devices:%s deviceNames:%s "
- "deviceTimeNs:%lld encoding:%s frameCount:%d intervalCount:%d "
- "sampleRate:%d underrun:%d "
- "callerName:%s contentType:%s "
- "deviceLatencyMs:%lf deviceStartupMs:%lf deviceVolume:%lf "
- "packageName:%s playbackPitch:%lf playbackSpeed:%lf "
- "selectedDeviceId:%d streamType:%s usage:%s",
- key.c_str(), id.c_str(), devices.c_str(), deviceNames.c_str(),
- (long long)deviceTimeNs, encoding.c_str(), frameCount, intervalCount,
- sampleRate, underrun,
- callerName.c_str(), contentType.c_str(),
- deviceLatencyMs, deviceStartupMs, deviceVolume,
- packageName.c_str(), playbackPitch, playbackSpeed,
- selectedDeviceId, streamType.c_str(), usage.c_str());
-#ifdef STATSD
- if (mAudioAnalytics.mDeliverStatistics) {
- (void)android::util::stats_write(
- android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED
- /* timestamp, */
- /* mediaApexVersion, */
- , devices.c_str()
- , deviceNames.c_str()
+ const auto callerNameForStats =
+ types::lookup<types::CALLER_NAME, short_enum_type_t>(callerName);
+ const auto contentTypeForStats =
+ types::lookup<types::CONTENT_TYPE, short_enum_type_t>(contentType);
+ const auto encodingForStats = types::lookup<types::ENCODING, short_enum_type_t>(encoding);
+ const auto flagsForStats = types::lookup<types::OUTPUT_FLAG, short_enum_type_t>(flags);
+ const auto streamTypeForStats =
+ types::lookup<types::STREAM_TYPE, short_enum_type_t>(streamType);
+ const auto traitsForStats =
+ types::lookup<types::TRACK_TRAITS, short_enum_type_t>(traits);
+ const auto usageForStats = types::lookup<types::USAGE, short_enum_type_t>(usage);
+
+ LOG(LOG_LEVEL) << "key:" << key
+ << " id:" << id
+ << " outputDevices:" << outputDevices << "(" << outputDeviceBits
+ << ") outputDeviceNames:" << outputDeviceNames
+ << " deviceTimeNs:" << deviceTimeNs
+ << " encoding:" << encoding << "(" << encodingForStats
+ << ") frameCount:" << frameCount
+ << " intervalCount:" << intervalCount
+ << " sampleRate:" << sampleRate
+ << " underrun:" << underrun
+ << " flags:" << flags << "(" << flagsForStats
+ << ") callerName:" << callerName << "(" << callerNameForStats
+ << ") contentType:" << contentType << "(" << contentTypeForStats
+ << ") deviceLatencyMs:" << deviceLatencyMs
+ << " deviceStartupMs:" << deviceStartupMs
+ << " deviceVolume:" << deviceVolume
+ << " packageName:" << packageName
+ << " playbackPitch:" << playbackPitch
+ << " playbackSpeed:" << playbackSpeed
+ << " selectedDeviceId:" << selectedDeviceId
+ << " streamType:" << streamType << "(" << streamTypeForStats
+ << ") traits:" << traits << "(" << traitsForStats
+ << ") usage:" << usage << "(" << usageForStats
+ << ")";
+ if (clientCalled // only log if client app called AudioTracks
+ && mAudioAnalytics.mDeliverStatistics) {
+ const auto [ result, str ] = sendToStatsd(AudioTrackDeviceUsageFields,
+ CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED)
+ , ENUM_EXTRACT(outputDeviceBits)
+ , outputDeviceNames.c_str()
, deviceTimeNs
- , encoding.c_str()
+ , ENUM_EXTRACT(encodingForStats)
, frameCount
, intervalCount
, sampleRate
- , flags.c_str()
+ , ENUM_EXTRACT(flagsForStats)
, underrun
-
, packageName.c_str()
, (float)deviceLatencyMs
, (float)deviceStartupMs
, (float)deviceVolume
, selectedDeviceId
- , streamType.c_str()
- , usage.c_str()
- , contentType.c_str()
- , callerName.c_str()
+ , ENUM_EXTRACT(streamTypeForStats)
+ , ENUM_EXTRACT(usageForStats)
+ , ENUM_EXTRACT(contentTypeForStats)
+ , ENUM_EXTRACT(callerNameForStats)
+ , ENUM_EXTRACT(traitsForStats)
);
+ ALOGV("%s: statsd %s", __func__, str.c_str());
+ mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
}
-#endif
} break;
}
@@ -490,7 +714,6 @@
item->get(AMEDIAMETRICS_PROP_NAME, &name);
ALOGD("(key=%s) a2dp connected device:%s atNs:%lld",
key.c_str(), name.c_str(), (long long)atNs);
-
}
void AudioAnalytics::DeviceConnection::createPatch(
@@ -502,31 +725,43 @@
item->get(AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevices);
if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH_A2DP") != std::string::npos) {
// TODO compare address
- int64_t timeDiff = item->getTimestamp();
+ int64_t timeDiffNs = item->getTimestamp();
if (mA2dpConnectionRequestNs == 0) {
ALOGD("%s: A2DP create patch didn't see a connection request", __func__);
- timeDiff -= mA2dpConnectionServiceNs;
+ timeDiffNs -= mA2dpConnectionServiceNs;
} else {
- timeDiff -= mA2dpConnectionRequestNs;
+ timeDiffNs -= mA2dpConnectionRequestNs;
}
- ALOGD("(key=%s) A2DP device connection time: %lld", key.c_str(), (long long)timeDiff);
+
mA2dpConnectionRequestNs = 0;
mA2dpConnectionServiceNs = 0;
++mA2dpConnectionSuccesses;
-#ifdef STATSD
+ const auto connectionTimeMs = float(timeDiffNs * 1e-6);
+
+ const auto outputDeviceBits = types::lookup<types::OUTPUT_DEVICE, long_enum_type_t>(
+ "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP");
+
+ LOG(LOG_LEVEL) << "key:" << key
+ << " A2DP SUCCESS"
+ << " outputDevices:" << outputDeviceBits
+ << " deviceName:" << mA2dpDeviceName
+ << " connectionTimeMs:" << connectionTimeMs;
if (mAudioAnalytics.mDeliverStatistics) {
- (void)android::util::stats_write(
- android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED
- /* timestamp, */
- /* mediaApexVersion, */
- , "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP"
- , android::util::MEDIAMETRICS_AUDIO_DEVICE_CONNECTION_REPORTED__RESULT__SUCCESS
- , /* connection_time_ms */ timeDiff * 1e-6 /* NS to MS */
+ const long_enum_type_t inputDeviceBits{};
+
+ const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
+ CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+ , ENUM_EXTRACT(inputDeviceBits)
+ , ENUM_EXTRACT(outputDeviceBits)
+ , mA2dpDeviceName.c_str()
+ , types::DEVICE_CONNECTION_RESULT_SUCCESS
+ , connectionTimeMs
, /* connection_count */ 1
);
+ ALOGV("%s: statsd %s", __func__, str.c_str());
+ mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
}
-#endif
}
}
@@ -538,13 +773,17 @@
std::string state;
item->get(AMEDIAMETRICS_PROP_STATE, &state);
if (state != "connected") return;
+
+ std::string name;
+ item->get(AMEDIAMETRICS_PROP_NAME, &name);
{
std::lock_guard l(mLock);
mA2dpConnectionRequestNs = atNs;
++mA2dpConnectionRequests;
+ mA2dpDeviceName = SUPPRESSED; // TODO(b/161554630) sanitize name
}
- ALOGD("(key=%s) a2dp connection request atNs:%lld",
- key.c_str(), (long long)atNs);
+ ALOGD("(key=%s) a2dp connection name:%s request atNs:%lld",
+ key.c_str(), name.c_str(), (long long)atNs);
// TODO: attempt to cancel a timed event, rather than let it expire.
mAudioAnalytics.mTimedAction.postIn(std::chrono::seconds(5), [this](){ expire(); });
}
@@ -552,46 +791,56 @@
void AudioAnalytics::DeviceConnection::expire() {
std::lock_guard l(mLock);
if (mA2dpConnectionRequestNs == 0) return; // ignore (this was an internal connection).
+
+ const long_enum_type_t inputDeviceBits{};
+ const auto outputDeviceBits = types::lookup<types::OUTPUT_DEVICE, long_enum_type_t>(
+ "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP");
+
if (mA2dpConnectionServiceNs == 0) {
- ALOGD("A2DP device connection service cancels");
++mA2dpConnectionJavaServiceCancels; // service did not connect to A2DP
-#ifdef STATSD
+ LOG(LOG_LEVEL) << "A2DP CANCEL"
+ << " outputDevices:" << outputDeviceBits
+ << " deviceName:" << mA2dpDeviceName;
if (mAudioAnalytics.mDeliverStatistics) {
- (void)android::util::stats_write(
- android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED
- /* timestamp, */
- /* mediaApexVersion, */
- , "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP"
- , android::util::MEDIAMETRICS_AUDIO_DEVICE_CONNECTION_REPORTED__RESULT__JAVA_SERVICE_CANCEL
+ const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
+ CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+ , ENUM_EXTRACT(inputDeviceBits)
+ , ENUM_EXTRACT(outputDeviceBits)
+ , mA2dpDeviceName.c_str()
+ , types::DEVICE_CONNECTION_RESULT_JAVA_SERVICE_CANCEL
, /* connection_time_ms */ 0.f
, /* connection_count */ 1
);
+ ALOGV("%s: statsd %s", __func__, str.c_str());
+ mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
}
-#endif
return;
}
// AudioFlinger didn't play - an expiration may occur because there is no audio playing.
// Should we check elsewhere?
// TODO: disambiguate this case.
- ALOGD("A2DP device connection expired, state unknown");
mA2dpConnectionRequestNs = 0;
mA2dpConnectionServiceNs = 0;
++mA2dpConnectionUnknowns; // connection result unknown
-#ifdef STATSD
+
+ LOG(LOG_LEVEL) << "A2DP UNKNOWN"
+ << " outputDevices:" << outputDeviceBits
+ << " deviceName:" << mA2dpDeviceName;
if (mAudioAnalytics.mDeliverStatistics) {
- (void)android::util::stats_write(
- android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED
- /* timestamp, */
- /* mediaApexVersion, */
- , "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP"
- , android::util::MEDIAMETRICS_AUDIO_DEVICE_CONNECTION_REPORTED__RESULT__UNKNOWN
+ const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
+ CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+ , ENUM_EXTRACT(inputDeviceBits)
+ , ENUM_EXTRACT(outputDeviceBits)
+ , mA2dpDeviceName.c_str()
+ , types::DEVICE_CONNECTION_RESULT_UNKNOWN
, /* connection_time_ms */ 0.f
, /* connection_count */ 1
);
+ ALOGV("%s: statsd %s", __func__, str.c_str());
+ mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
}
-#endif
}
-} // namespace android
+} // namespace android::mediametrics
diff --git a/services/mediametrics/AudioAnalytics.h b/services/mediametrics/AudioAnalytics.h
index 9089d6f..df097b1 100644
--- a/services/mediametrics/AudioAnalytics.h
+++ b/services/mediametrics/AudioAnalytics.h
@@ -17,6 +17,7 @@
#pragma once
#include <android-base/thread_annotations.h>
+#include <audio_utils/SimpleLog.h>
#include "AnalyticsActions.h"
#include "AnalyticsState.h"
#include "AudioPowerUsage.h"
@@ -109,19 +110,21 @@
*/
std::string getThreadFromTrack(const std::string& track) const;
- const bool mDeliverStatistics __unused = true;
+ const bool mDeliverStatistics;
// Actions is individually locked
AnalyticsActions mActions;
// AnalyticsState is individually locked, and we use SharedPtrWrap
// to allow safe access even if the shared pointer changes underneath.
-
+ // These wrap pointers always point to a valid state object.
SharedPtrWrap<AnalyticsState> mAnalyticsState;
SharedPtrWrap<AnalyticsState> mPreviousAnalyticsState;
TimedAction mTimedAction; // locked internally
+ SimpleLog mStatsdLog{16 /* log lines */}; // locked internally
+
// DeviceUse is a nested class which handles audio device usage accounting.
// We define this class at the end to ensure prior variables all properly constructed.
// TODO: Track / Thread interaction
@@ -173,6 +176,7 @@
AudioAnalytics &mAudioAnalytics;
mutable std::mutex mLock;
+ std::string mA2dpDeviceName;
int64_t mA2dpConnectionRequestNs GUARDED_BY(mLock) = 0; // Time for BT service request.
int64_t mA2dpConnectionServiceNs GUARDED_BY(mLock) = 0; // Time audio service agrees.
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index e311bc8..34be0b9 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -20,6 +20,7 @@
#include "AudioAnalytics.h"
#include "MediaMetricsService.h"
+#include "StringUtils.h"
#include <map>
#include <sstream>
#include <string>
@@ -27,13 +28,13 @@
#include <cutils/properties.h>
#include <statslog.h>
#include <sys/timerfd.h>
-#include <system/audio-base.h>
+#include <system/audio.h>
// property to disable audio power use metrics feature, default is enabled
#define PROP_AUDIO_METRICS_DISABLED "persist.media.audio_metrics.power_usage_disabled"
#define AUDIO_METRICS_DISABLED_DEFAULT (false)
-// property to set how long to send audio power use metrics data to westworld, default is 24hrs
+// property to set how long to send audio power use metrics data to statsd, default is 24hrs
#define PROP_AUDIO_METRICS_INTERVAL_HR "persist.media.audio_metrics.interval_hr"
#define INTERVAL_HR_DEFAULT (24)
@@ -117,7 +118,7 @@
int32_t AudioPowerUsage::deviceFromStringPairs(const std::string& device_strings) {
int32_t deviceMask = 0;
- const auto devaddrvec = MediaMetricsService::getDeviceAddressPairs(device_strings);
+ const auto devaddrvec = stringutils::getDeviceAddressPairs(device_strings);
for (const auto &[device, addr] : devaddrvec) {
int32_t combo_device = 0;
deviceFromString(device, combo_device);
@@ -141,13 +142,11 @@
double volume;
if (!item->getDouble(AUDIO_POWER_USAGE_PROP_VOLUME, &volume)) return;
-#ifdef STATSD
(void)android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
device,
(int32_t)(duration_ns / NANOS_PER_SECOND),
(float)volume,
type);
-#endif
}
bool AudioPowerUsage::saveAsItem_l(
@@ -162,7 +161,7 @@
return true; //ignore unknown device
}
- for (auto item : mItems) {
+ for (const auto& item : mItems) {
int32_t item_type = 0, item_device = 0;
double item_volume = 0.;
int64_t item_duration_ns = 0;
@@ -201,6 +200,34 @@
return true;
}
+bool AudioPowerUsage::saveAsItems_l(
+ int32_t device, int64_t duration_ns, int32_t type, double average_vol)
+{
+ ALOGV("%s: (%#x, %d, %lld, %f)", __func__, device, type,
+ (long long)duration_ns, average_vol );
+ if (duration_ns == 0) {
+ return true; // skip duration 0 usage
+ }
+ if (device == 0) {
+ return true; //ignore unknown device
+ }
+
+ bool ret = false;
+ const int32_t input_bit = device & INPUT_DEVICE_BIT;
+ int32_t device_bits = device ^ input_bit;
+
+ while (device_bits != 0) {
+ int32_t tmp_device = device_bits & -device_bits; // get lowest bit
+ device_bits ^= tmp_device; // clear lowest bit
+ tmp_device |= input_bit; // restore input bit
+ ret = saveAsItem_l(tmp_device, duration_ns, type, average_vol);
+
+ ALOGV("%s: device %#x recorded, remaining device_bits = %#x", __func__,
+ tmp_device, device_bits);
+ }
+ return ret;
+}
+
void AudioPowerUsage::checkTrackRecord(
const std::shared_ptr<const mediametrics::Item>& item, bool isTrack)
{
@@ -246,7 +273,7 @@
ALOGV("device = %s => %d", device_strings.c_str(), device);
}
std::lock_guard l(mLock);
- saveAsItem_l(device, deviceTimeNs, type, deviceVolume);
+ saveAsItems_l(device, deviceTimeNs, type, deviceVolume);
}
void AudioPowerUsage::checkMode(const std::shared_ptr<const mediametrics::Item>& item)
@@ -261,9 +288,9 @@
const int64_t endCallNs = item->getTimestamp();
const int64_t durationNs = endCallNs - mDeviceTimeNs;
if (durationNs > 0) {
- mDeviceVolume = (mDeviceVolume * (mVolumeTimeNs - mDeviceTimeNs) +
- mVoiceVolume * (endCallNs - mVolumeTimeNs)) / durationNs;
- saveAsItem_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
+ mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
+ mVoiceVolume * double(endCallNs - mVolumeTimeNs)) / durationNs;
+ saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
}
} else if (mode == "AUDIO_MODE_IN_CALL") { // entering call mode
mStartCallNs = item->getTimestamp(); // advisory only
@@ -289,8 +316,8 @@
const int64_t timeNs = item->getTimestamp();
const int64_t durationNs = timeNs - mDeviceTimeNs;
if (durationNs > 0) {
- mDeviceVolume = (mDeviceVolume * (mVolumeTimeNs - mDeviceTimeNs) +
- mVoiceVolume * (timeNs - mVolumeTimeNs)) / durationNs;
+ mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
+ mVoiceVolume * double(timeNs - mVolumeTimeNs)) / durationNs;
mVolumeTimeNs = timeNs;
}
}
@@ -320,9 +347,9 @@
const int64_t endDeviceNs = item->getTimestamp();
const int64_t durationNs = endDeviceNs - mDeviceTimeNs;
if (durationNs > 0) {
- mDeviceVolume = (mDeviceVolume * (mVolumeTimeNs - mDeviceTimeNs) +
- mVoiceVolume * (endDeviceNs - mVolumeTimeNs)) / durationNs;
- saveAsItem_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
+ mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
+ mVoiceVolume * double(endDeviceNs - mVolumeTimeNs)) / durationNs;
+ saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
}
// reset statistics
mDeviceVolume = 0;
@@ -393,4 +420,4 @@
return { ss.str(), slot };
}
-} // namespace android
+} // namespace android::mediametrics
diff --git a/services/mediametrics/AudioPowerUsage.h b/services/mediametrics/AudioPowerUsage.h
index 446ff4f..b705a6a 100644
--- a/services/mediametrics/AudioPowerUsage.h
+++ b/services/mediametrics/AudioPowerUsage.h
@@ -85,6 +85,8 @@
REQUIRES(mLock);
static void sendItem(const std::shared_ptr<const mediametrics::Item>& item);
void collect();
+ bool saveAsItems_l(int32_t device, int64_t duration, int32_t type, double average_vol)
+ REQUIRES(mLock);
AudioAnalytics * const mAudioAnalytics;
const bool mDisabled;
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
new file mode 100644
index 0000000..5d044bb
--- /dev/null
+++ b/services/mediametrics/AudioTypes.cpp
@@ -0,0 +1,436 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AudioTypes.h"
+#include "StringUtils.h"
+#include <media/TypeConverter.h> // requires libmedia_helper to get the Audio code.
+
+namespace android::mediametrics::types {
+
+const std::unordered_map<std::string, int32_t>& getAudioCallerNameMap() {
+ // DO NOT MODIFY VALUES (OK to add new ones).
+ // This may be found in frameworks/av/media/libmediametrics/include/MediaMetricsConstants.h
+ static std::unordered_map<std::string, int32_t> map{
+ {"unknown", 0}, // callerName not set
+ {"aaudio", 1}, // Native AAudio
+ {"java", 2}, // Java API layer
+ {"media", 3}, // libmedia (mediaplayer)
+ {"opensles", 4}, // Open SLES
+ {"rtp", 5}, // RTP communication
+ {"soundpool", 6}, // SoundPool
+ {"tonegenerator", 7}, // dial tones
+ // R values above.
+ };
+ return map;
+}
+
+// A map in case we need to return a flag for input devices.
+// This is 64 bits (and hence not the same as audio_device_t) because we need extra
+// bits to represent new devices.
+// NOT USED FOR R. We do not use int64 flags.
+// This can be out of date for now, as it is unused even for string validation
+// (instead TypeConverter<InputDeviceTraits> is used).
+const std::unordered_map<std::string, int64_t>& getAudioDeviceInMap() {
+ // DO NOT MODIFY VALUES (OK to add new ones). This does NOT match audio_device_t.
+ static std::unordered_map<std::string, int64_t> map{
+ {"AUDIO_DEVICE_IN_COMMUNICATION", 1LL << 0},
+ {"AUDIO_DEVICE_IN_AMBIENT", 1LL << 1},
+ {"AUDIO_DEVICE_IN_BUILTIN_MIC", 1LL << 2},
+ {"AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET", 1LL << 3},
+ {"AUDIO_DEVICE_IN_WIRED_HEADSET", 1LL << 4},
+ {"AUDIO_DEVICE_IN_AUX_DIGITAL", 1LL << 5},
+ {"AUDIO_DEVICE_IN_HDMI", 1LL << 5}, // HDMI == AUX_DIGITAL (6 reserved)
+ {"AUDIO_DEVICE_IN_VOICE_CALL", 1LL << 7},
+ {"AUDIO_DEVICE_IN_TELEPHONY_RX", 1LL << 7}, // TELEPHONY_RX == VOICE_CALL (8 reserved)
+ {"AUDIO_DEVICE_IN_BACK_MIC", 1LL << 9},
+ {"AUDIO_DEVICE_IN_REMOTE_SUBMIX", 1LL << 10},
+ {"AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET", 1LL << 11},
+ {"AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET", 1LL << 12},
+ {"AUDIO_DEVICE_IN_USB_ACCESSORY", 1LL << 13},
+ {"AUDIO_DEVICE_IN_USB_DEVICE", 1LL << 14},
+ {"AUDIO_DEVICE_IN_FM_TUNER", 1LL << 15},
+ {"AUDIO_DEVICE_IN_TV_TUNER", 1LL << 16},
+ {"AUDIO_DEVICE_IN_LINE", 1LL << 17},
+ {"AUDIO_DEVICE_IN_SPDIF", 1LL << 18},
+ {"AUDIO_DEVICE_IN_BLUETOOTH_A2DP", 1LL << 19},
+ {"AUDIO_DEVICE_IN_LOOPBACK", 1LL << 20},
+ {"AUDIO_DEVICE_IN_IP", 1LL << 21},
+ {"AUDIO_DEVICE_IN_BUS", 1LL << 22},
+ {"AUDIO_DEVICE_IN_PROXY", 1LL << 23},
+ {"AUDIO_DEVICE_IN_USB_HEADSET", 1LL << 24},
+ {"AUDIO_DEVICE_IN_BLUETOOTH_BLE", 1LL << 25},
+ {"AUDIO_DEVICE_IN_HDMI_ARC", 1LL << 26},
+ {"AUDIO_DEVICE_IN_ECHO_REFERENCE", 1LL << 27},
+ {"AUDIO_DEVICE_IN_DEFAULT", 1LL << 28},
+ // R values above.
+ {"AUDIO_DEVICE_IN_BLE_HEADSET", 1LL << 29},
+ };
+ return map;
+}
+
+// A map in case we need to return a flag for output devices.
+// This is 64 bits (and hence not the same as audio_device_t) because we need extra
+// bits to represent new devices.
+// NOT USED FOR R. We do not use int64 flags.
+// This can be out of date for now, as it is unused even for string validation
+// (instead TypeConverter<OutputDeviceTraits> is used).
+const std::unordered_map<std::string, int64_t>& getAudioDeviceOutMap() {
+ // DO NOT MODIFY VALUES (OK to add new ones). This does NOT match audio_device_t.
+ static std::unordered_map<std::string, int64_t> map{
+ {"AUDIO_DEVICE_OUT_EARPIECE", 1LL << 0},
+ {"AUDIO_DEVICE_OUT_SPEAKER", 1LL << 1},
+ {"AUDIO_DEVICE_OUT_WIRED_HEADSET", 1LL << 2},
+ {"AUDIO_DEVICE_OUT_WIRED_HEADPHONE", 1LL << 3},
+ {"AUDIO_DEVICE_OUT_BLUETOOTH_SCO", 1LL << 4},
+ {"AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET", 1LL << 5},
+ {"AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT", 1LL << 6},
+ {"AUDIO_DEVICE_OUT_BLUETOOTH_A2DP", 1LL << 7},
+ {"AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES", 1LL << 8},
+ {"AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER", 1LL << 9},
+ {"AUDIO_DEVICE_OUT_AUX_DIGITAL", 1LL << 10},
+ {"AUDIO_DEVICE_OUT_HDMI", 1LL << 10}, // HDMI == AUX_DIGITAL (11 reserved)
+ {"AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET", 1LL << 12},
+ {"AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET", 1LL << 13},
+ {"AUDIO_DEVICE_OUT_USB_ACCESSORY", 1LL << 14},
+ {"AUDIO_DEVICE_OUT_USB_DEVICE", 1LL << 15},
+ {"AUDIO_DEVICE_OUT_REMOTE_SUBMIX", 1LL << 16},
+ {"AUDIO_DEVICE_OUT_TELEPHONY_TX", 1LL << 17},
+ {"AUDIO_DEVICE_OUT_LINE", 1LL << 18},
+ {"AUDIO_DEVICE_OUT_HDMI_ARC", 1LL << 19},
+ {"AUDIO_DEVICE_OUT_SPDIF", 1LL << 20},
+ {"AUDIO_DEVICE_OUT_FM", 1LL << 21},
+ {"AUDIO_DEVICE_OUT_AUX_LINE", 1LL << 22},
+ {"AUDIO_DEVICE_OUT_SPEAKER_SAFE", 1LL << 23},
+ {"AUDIO_DEVICE_OUT_IP", 1LL << 24},
+ {"AUDIO_DEVICE_OUT_BUS", 1LL << 25},
+ {"AUDIO_DEVICE_OUT_PROXY", 1LL << 26},
+ {"AUDIO_DEVICE_OUT_USB_HEADSET", 1LL << 27},
+ {"AUDIO_DEVICE_OUT_HEARING_AID", 1LL << 28},
+ {"AUDIO_DEVICE_OUT_ECHO_CANCELLER", 1LL << 29},
+ {"AUDIO_DEVICE_OUT_DEFAULT", 1LL << 30},
+ // R values above.
+ {"AUDIO_DEVICE_OUT_BLE_HEADSET", 1LL << 31},
+ {"AUDIO_DEVICE_OUT_BLE_SPAEKER", 1LL << 32},
+ };
+ return map;
+}
+
+const std::unordered_map<std::string, int32_t>& getAudioThreadTypeMap() {
+ // DO NOT MODIFY VALUES (OK to add new ones).
+ // This may be found in frameworks/av/services/audioflinger/Threads.h
+ static std::unordered_map<std::string, int32_t> map{
+ // UNKNOWN is -1
+ {"MIXER", 0}, // Thread class is MixerThread
+ {"DIRECT", 1}, // Thread class is DirectOutputThread
+ {"DUPLICATING", 2}, // Thread class is DuplicatingThread
+ {"RECORD", 3}, // Thread class is RecordThread
+ {"OFFLOAD", 4}, // Thread class is OffloadThread
+ {"MMAP_PLAYBACK", 5}, // Thread class for MMAP playback stream
+ {"MMAP_CAPTURE", 6}, // Thread class for MMAP capture stream
+ // R values above.
+ };
+ return map;
+}
+
+const std::unordered_map<std::string, int32_t>& getAudioTrackTraitsMap() {
+ // DO NOT MODIFY VALUES (OK to add new ones).
+ static std::unordered_map<std::string, int32_t> map{
+ {"static", (1 << 0)}, // A static track
+ // R values above.
+ };
+ return map;
+}
+
+// Helper: Create the corresponding int32 from string flags split with '|'.
+template <typename Traits>
+int32_t int32FromFlags(const std::string &flags)
+{
+ const auto result = stringutils::split(flags, "|");
+ int32_t intFlags = 0;
+ for (const auto& flag : result) {
+ typename Traits::Type value;
+ if (!TypeConverter<Traits>::fromString(flag, value)) {
+ break;
+ }
+ intFlags |= value;
+ }
+ return intFlags;
+}
+
+template <typename Traits>
+std::string stringFromFlags(const std::string &flags, size_t len)
+{
+ const auto result = stringutils::split(flags, "|");
+ std::string sFlags;
+ for (const auto& flag : result) {
+ typename Traits::Type value;
+ if (!TypeConverter<Traits>::fromString(flag, value)) {
+ break;
+ }
+ if (len >= flag.size()) continue;
+ if (!sFlags.empty()) sFlags += "|";
+ sFlags += flag.c_str() + len;
+ }
+ return sFlags;
+}
+
+template <typename M>
+std::string validateStringFromMap(const std::string &str, const M& map)
+{
+ if (str.empty()) return {};
+
+ const auto result = stringutils::split(str, "|");
+ std::stringstream ss;
+ for (const auto &s : result) {
+ if (map.count(s) > 0) {
+ if (ss.tellp() > 0) ss << "|";
+ ss << s;
+ }
+ }
+ return ss.str();
+}
+
+template <typename M>
+typename M::mapped_type flagsFromMap(const std::string &str, const M& map)
+{
+ if (str.empty()) return {};
+
+ const auto result = stringutils::split(str, "|");
+ typename M::mapped_type value{};
+ for (const auto &s : result) {
+ auto it = map.find(s);
+ if (it == map.end()) continue;
+ value |= it->second;
+ }
+ return value;
+}
+
+template <>
+int32_t lookup<CONTENT_TYPE>(const std::string &contentType)
+{
+ AudioContentTraits::Type value;
+ if (!TypeConverter<AudioContentTraits>::fromString(contentType, value)) {
+ value = AUDIO_CONTENT_TYPE_UNKNOWN;
+ }
+ return (int32_t)value;
+}
+
+template <>
+std::string lookup<CONTENT_TYPE>(const std::string &contentType)
+{
+ AudioContentTraits::Type value;
+ if (!TypeConverter<AudioContentTraits>::fromString(contentType, value)) {
+ return "";
+ }
+ return contentType.c_str() + sizeof("AUDIO_CONTENT_TYPE");
+}
+
+template <>
+int32_t lookup<ENCODING>(const std::string &encoding)
+{
+ FormatTraits::Type value;
+ if (!TypeConverter<FormatTraits>::fromString(encoding, value)) {
+ value = AUDIO_FORMAT_INVALID;
+ }
+ return (int32_t)value;
+}
+
+template <>
+std::string lookup<ENCODING>(const std::string &encoding)
+{
+ FormatTraits::Type value;
+ if (!TypeConverter<FormatTraits>::fromString(encoding, value)) {
+ return "";
+ }
+ return encoding.c_str() + sizeof("AUDIO_FORMAT");
+}
+
+template <>
+int32_t lookup<INPUT_FLAG>(const std::string &inputFlag)
+{
+ return int32FromFlags<InputFlagTraits>(inputFlag);
+}
+
+template <>
+std::string lookup<INPUT_FLAG>(const std::string &inputFlag)
+{
+ return stringFromFlags<InputFlagTraits>(inputFlag, sizeof("AUDIO_INPUT_FLAG"));
+}
+
+template <>
+int32_t lookup<OUTPUT_FLAG>(const std::string &outputFlag)
+{
+ return int32FromFlags<OutputFlagTraits>(outputFlag);
+}
+
+template <>
+std::string lookup<OUTPUT_FLAG>(const std::string &outputFlag)
+{
+ return stringFromFlags<OutputFlagTraits>(outputFlag, sizeof("AUDIO_OUTPUT_FLAG"));
+}
+
+template <>
+int32_t lookup<SOURCE_TYPE>(const std::string &sourceType)
+{
+ SourceTraits::Type value;
+ if (!TypeConverter<SourceTraits>::fromString(sourceType, value)) {
+ value = AUDIO_SOURCE_DEFAULT;
+ }
+ return (int32_t)value;
+}
+
+template <>
+std::string lookup<SOURCE_TYPE>(const std::string &sourceType)
+{
+ SourceTraits::Type value;
+ if (!TypeConverter<SourceTraits>::fromString(sourceType, value)) {
+ return "";
+ }
+ return sourceType.c_str() + sizeof("AUDIO_SOURCE");
+}
+
+template <>
+int32_t lookup<STREAM_TYPE>(const std::string &streamType)
+{
+ StreamTraits::Type value;
+ if (!TypeConverter<StreamTraits>::fromString(streamType, value)) {
+ value = AUDIO_STREAM_DEFAULT;
+ }
+ return (int32_t)value;
+}
+
+template <>
+std::string lookup<STREAM_TYPE>(const std::string &streamType)
+{
+ StreamTraits::Type value;
+ if (!TypeConverter<StreamTraits>::fromString(streamType, value)) {
+ return "";
+ }
+ return streamType.c_str() + sizeof("AUDIO_STREAM");
+}
+
+template <>
+int32_t lookup<USAGE>(const std::string &usage)
+{
+ UsageTraits::Type value;
+ if (!TypeConverter<UsageTraits>::fromString(usage, value)) {
+ value = AUDIO_USAGE_UNKNOWN;
+ }
+ return (int32_t)value;
+}
+
+template <>
+std::string lookup<USAGE>(const std::string &usage)
+{
+ UsageTraits::Type value;
+ if (!TypeConverter<UsageTraits>::fromString(usage, value)) {
+ return "";
+ }
+ return usage.c_str() + sizeof("AUDIO_USAGE");
+}
+
+template <>
+int64_t lookup<INPUT_DEVICE>(const std::string &inputDevice)
+{
+ // NOT USED FOR R.
+ // Returns a set of bits, each one representing a device in inputDevice.
+ // This is a 64 bit integer, not the same as audio_device_t.
+ return flagsFromMap(inputDevice, getAudioDeviceInMap());
+}
+
+template <>
+std::string lookup<INPUT_DEVICE>(const std::string &inputDevice)
+{
+ return stringFromFlags<InputDeviceTraits>(inputDevice, sizeof("AUDIO_DEVICE_IN"));
+}
+
+template <>
+int64_t lookup<OUTPUT_DEVICE>(const std::string &outputDevice)
+{
+ // NOT USED FOR R.
+ // Returns a set of bits, each one representing a device in outputDevice.
+ // This is a 64 bit integer, not the same as audio_device_t.
+ return flagsFromMap(outputDevice, getAudioDeviceOutMap());
+}
+
+template <>
+std::string lookup<OUTPUT_DEVICE>(const std::string &outputDevice)
+{
+ return stringFromFlags<OutputDeviceTraits>(outputDevice, sizeof("AUDIO_DEVICE_OUT"));
+}
+
+template <>
+int32_t lookup<CALLER_NAME>(const std::string &callerName)
+{
+ auto& map = getAudioCallerNameMap();
+ auto it = map.find(callerName);
+ if (it == map.end()) {
+ return 0; // return unknown
+ }
+ return it->second;
+}
+
+template <>
+std::string lookup<CALLER_NAME>(const std::string &callerName)
+{
+ auto& map = getAudioCallerNameMap();
+ auto it = map.find(callerName);
+ if (it == map.end()) {
+ return "";
+ }
+ return callerName;
+}
+
+template <>
+int32_t lookup<THREAD_TYPE>(const std::string &threadType)
+{
+ auto& map = getAudioThreadTypeMap();
+ auto it = map.find(threadType);
+ if (it == map.end()) {
+ return -1; // note this as an illegal thread value as we don't have unknown here.
+ }
+ return it->second;
+}
+
+template <>
+std::string lookup<THREAD_TYPE>(const std::string &threadType)
+{
+ auto& map = getAudioThreadTypeMap();
+ auto it = map.find(threadType);
+ if (it == map.end()) {
+ return "";
+ }
+ return threadType;
+}
+
+bool isInputThreadType(const std::string &threadType)
+{
+ return threadType == "RECORD" || threadType == "MMAP_CAPTURE";
+}
+
+template <>
+std::string lookup<TRACK_TRAITS>(const std::string &traits)
+{
+ return validateStringFromMap(traits, getAudioTrackTraitsMap());
+}
+
+template <>
+int32_t lookup<TRACK_TRAITS>(const std::string &traits)
+{
+ return flagsFromMap(traits, getAudioTrackTraitsMap());
+}
+
+} // namespace android::mediametrics::types
diff --git a/services/mediametrics/AudioTypes.h b/services/mediametrics/AudioTypes.h
new file mode 100644
index 0000000..e1deeb1
--- /dev/null
+++ b/services/mediametrics/AudioTypes.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <unordered_map>
+
+namespace android::mediametrics::types {
+
+// Helper methods that map mediametrics logged strings to integer codes.
+// In R we do not use the integer codes, but rather we can use these maps
+// to validate correct strings.
+const std::unordered_map<std::string, int32_t>& getAudioCallerNameMap();
+const std::unordered_map<std::string, int64_t>& getAudioDeviceInMap();
+const std::unordered_map<std::string, int64_t>& getAudioDeviceOutMap();
+const std::unordered_map<std::string, int32_t>& getAudioThreadTypeMap();
+const std::unordered_map<std::string, int32_t>& getAudioTrackTraitsMap();
+
+// Enumeration for the device connection results.
+enum DeviceConnectionResult : int32_t {
+ DEVICE_CONNECTION_RESULT_SUCCESS = 0, // Audio delivered
+ DEVICE_CONNECTION_RESULT_UNKNOWN = 1, // Success is unknown.
+ DEVICE_CONNECTION_RESULT_JAVA_SERVICE_CANCEL = 2, // Canceled in Java service
+ // Do not modify the constants above after R. Adding new constants is fine.
+};
+
+// Enumeration for all the string translations to integers (generally int32_t) unless noted.
+enum AudioEnumCategory {
+ CALLER_NAME,
+ CONTENT_TYPE,
+ ENCODING,
+ INPUT_DEVICE, // int64_t
+ INPUT_FLAG,
+ OUTPUT_DEVICE, // int64_t
+ OUTPUT_FLAG,
+ SOURCE_TYPE,
+ STREAM_TYPE,
+ THREAD_TYPE,
+ TRACK_TRAITS,
+ USAGE,
+};
+
+// Convert a string (or arbitrary S) from an AudioEnumCategory to a particular type.
+// This is used to convert log std::strings back to the original type (int32_t or int64_t).
+//
+// For a string, generally there is a prefix "AUDIO_INPUT_FLAG" or some such that could
+// actually indicate the category so the AudioEnumCategory could be superfluous, but
+// we use it to find the proper default value in case of an unknown string.
+//
+// lookup<ENCODING, int32_t>("AUDIO_FORMAT_PCM_16_BIT") -> 1
+//
+template <AudioEnumCategory C, typename T, typename S>
+T lookup(const S &str);
+
+// Helper: Allow using a const char * in lieu of std::string.
+template <AudioEnumCategory C, typename T>
+T lookup(const char *str) {
+ return lookup<C, T, std::string>(str);
+}
+
+bool isInputThreadType(const std::string &threadType);
+
+} // namespace android::mediametrics::types
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 3b3dc3e..bf6e428 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -32,7 +32,8 @@
namespace android {
-using namespace mediametrics;
+using mediametrics::Item;
+using mediametrics::startsWith;
// individual records kept in memory: age or count
// age: <= 28 hours (1 1/6 days)
@@ -63,7 +64,7 @@
bool MediaMetricsService::useUidForPackage(
const std::string& package, const std::string& installer)
{
- if (strchr(package.c_str(), '.') == NULL) {
+ if (strchr(package.c_str(), '.') == nullptr) {
return false; // not of form 'com.whatever...'; assume internal and ok
} else if (strncmp(package.c_str(), "android.", 8) == 0) {
return false; // android.* packages are assumed fine
@@ -94,58 +95,6 @@
}
}
-/* static */
-std::string MediaMetricsService::tokenizer(std::string::const_iterator& it,
- const std::string::const_iterator& end, const char *reserved) {
- // consume leading white space
- for (; it != end && std::isspace(*it); ++it);
- if (it == end) return {};
-
- auto start = it;
- // parse until we hit a reserved keyword or space
- if (strchr(reserved, *it)) return {start, ++it};
- for (;;) {
- ++it;
- if (it == end || std::isspace(*it) || strchr(reserved, *it)) return {start, it};
- }
-}
-
-/* static */
-std::vector<std::pair<std::string, std::string>>
-MediaMetricsService::getDeviceAddressPairs(const std::string& devices) {
- std::vector<std::pair<std::string, std::string>> result;
-
- // Currently, the device format is EXACTLY
- // (device1, addr1)|(device2, addr2)|...
-
- static constexpr char delim[] = "()|,";
- for (auto it = devices.begin(); ; ) {
- auto token = tokenizer(it, devices.end(), delim);
- if (token != "(") return result;
-
- auto device = tokenizer(it, devices.end(), delim);
- if (device.empty() || !std::isalnum(device[0])) return result;
-
- token = tokenizer(it, devices.end(), delim);
- if (token != ",") return result;
-
- // special handling here for empty addresses
- auto address = tokenizer(it, devices.end(), delim);
- if (address.empty() || !std::isalnum(device[0])) return result;
- if (address == ")") { // no address, just the ")"
- address.clear();
- } else {
- token = tokenizer(it, devices.end(), delim);
- if (token != ")") return result;
- }
-
- result.emplace_back(std::move(device), std::move(address));
-
- token = tokenizer(it, devices.end(), delim);
- if (token != "|") return result; // this includes end of string detection
- }
-}
-
MediaMetricsService::MediaMetricsService()
: mMaxRecords(kMaxRecords),
mMaxRecordAgeNs(kMaxRecordAgeNs),
@@ -203,9 +152,9 @@
// Overwrite package name and version if the caller was untrusted or empty
if (!isTrusted || item->getPkgName().empty()) {
- const uid_t uid = item->getUid();
+ const uid_t uidItem = item->getUid();
const auto [ pkgName, version ] =
- MediaMetricsService::getSanitizedPackageNameAndVersionCode(uid);
+ MediaMetricsService::getSanitizedPackageNameAndVersionCode(uidItem);
item->setPkgName(pkgName);
item->setPkgVersionCode(version);
}
@@ -236,7 +185,7 @@
}
if (!isTrusted || item->getTimestamp() == 0) {
- // Westworld logs two times for events: ElapsedRealTimeNs (BOOTTIME) and
+ // Statsd logs two times for events: ElapsedRealTimeNs (BOOTTIME) and
// WallClockTimeNs (REALTIME), but currently logs REALTIME to cloud.
//
// For consistency and correlation with other logging mechanisms
@@ -320,7 +269,7 @@
String8 value(args[i]);
char *endp;
const char *p = value.string();
- long long sec = strtoll(p, &endp, 10);
+ const auto sec = (int64_t)strtoll(p, &endp, 10);
if (endp == p || *endp != '\0' || sec == 0) {
sinceNs = 0;
} else if (sec < 0) {
@@ -519,6 +468,7 @@
"codec",
"extractor",
"mediadrm",
+ "mediaparser",
"nuplayer",
}) {
if (key == allowedKey) {
diff --git a/services/mediametrics/MediaMetricsService.h b/services/mediametrics/MediaMetricsService.h
index b8eb267..792b7f0 100644
--- a/services/mediametrics/MediaMetricsService.h
+++ b/services/mediametrics/MediaMetricsService.h
@@ -65,7 +65,7 @@
static nsecs_t roundTime(nsecs_t timeNs);
/**
- * Returns true if we should use uid for package name when uploading to WestWorld.
+ * Returns true if we should use uid for package name when uploading to statsd.
*/
static bool useUidForPackage(const std::string& package, const std::string& installer);
@@ -77,20 +77,6 @@
*/
static std::pair<std::string, int64_t> getSanitizedPackageNameAndVersionCode(uid_t uid);
- /**
- * Return string tokens from iterator, separated by spaces and reserved chars.
- */
- static std::string tokenizer(std::string::const_iterator& it,
- const std::string::const_iterator& end, const char *reserved);
-
- /**
- * Parse the devices string and return a vector of device address pairs.
- *
- * A failure to parse returns early with the contents that were able to be parsed.
- */
- static std::vector<std::pair<std::string, std::string>>
- getDeviceAddressPairs(const std::string &devices);
-
protected:
// Internal call where release is true if ownership of item is transferred
diff --git a/services/mediametrics/StringUtils.cpp b/services/mediametrics/StringUtils.cpp
new file mode 100644
index 0000000..50525bc
--- /dev/null
+++ b/services/mediametrics/StringUtils.cpp
@@ -0,0 +1,102 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaMetricsService::stringutils"
+#include <utils/Log.h>
+
+#include "StringUtils.h"
+
+namespace android::mediametrics::stringutils {
+
+std::string tokenizer(std::string::const_iterator& it,
+ const std::string::const_iterator& end, const char *reserved)
+{
+ // consume leading white space
+ for (; it != end && std::isspace(*it); ++it);
+ if (it == end) return {};
+
+ auto start = it;
+ // parse until we hit a reserved keyword or space
+ if (strchr(reserved, *it)) return {start, ++it};
+ for (;;) {
+ ++it;
+ if (it == end || std::isspace(*it) || strchr(reserved, *it)) return {start, it};
+ }
+}
+
+std::vector<std::string> split(const std::string& flags, const char *delim)
+{
+ std::vector<std::string> result;
+ for (auto it = flags.begin(); ; ) {
+ auto flag = tokenizer(it, flags.end(), delim);
+ if (flag.empty() || !std::isalnum(flag[0])) return result;
+ result.emplace_back(std::move(flag));
+
+ // look for the delimeter and discard
+ auto token = tokenizer(it, flags.end(), delim);
+ if (token.size() != 1 || strchr(delim, token[0]) == nullptr) return result;
+ }
+}
+
+std::vector<std::pair<std::string, std::string>> getDeviceAddressPairs(const std::string& devices)
+{
+ std::vector<std::pair<std::string, std::string>> result;
+
+ // Currently, the device format is EXACTLY
+ // (device1, addr1)|(device2, addr2)|...
+
+ static constexpr char delim[] = "()|,";
+ for (auto it = devices.begin(); ; ) {
+ auto token = tokenizer(it, devices.end(), delim);
+ if (token != "(") return result;
+
+ auto device = tokenizer(it, devices.end(), delim);
+ if (device.empty() || !std::isalnum(device[0])) return result;
+
+ token = tokenizer(it, devices.end(), delim);
+ if (token != ",") return result;
+
+ // special handling here for empty addresses
+ auto address = tokenizer(it, devices.end(), delim);
+ if (address.empty() || !std::isalnum(device[0])) return result;
+ if (address == ")") { // no address, just the ")"
+ address.clear();
+ } else {
+ token = tokenizer(it, devices.end(), delim);
+ if (token != ")") return result;
+ }
+
+ result.emplace_back(std::move(device), std::move(address));
+
+ token = tokenizer(it, devices.end(), delim);
+ if (token != "|") return result; // this includes end of string detection
+ }
+}
+
+size_t replace(std::string &str, const char *targetChars, const char replaceChar)
+{
+ size_t replaced = 0;
+ for (char &c : str) {
+ if (strchr(targetChars, c) != nullptr) {
+ c = replaceChar;
+ ++replaced;
+ }
+ }
+ return replaced;
+}
+
+} // namespace android::mediametrics::stringutils
diff --git a/services/mediametrics/StringUtils.h b/services/mediametrics/StringUtils.h
new file mode 100644
index 0000000..7a8bbee
--- /dev/null
+++ b/services/mediametrics/StringUtils.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <vector>
+
+namespace android::mediametrics::stringutils {
+
+/**
+ * fieldPrint is a helper method that logs to a stringstream a sequence of
+ * field names (in a fixed size array) together with a variable number of arg parameters.
+ *
+ * stringstream << field[0] << ":" << arg0 << " ";
+ * stringstream << field[1] << ":" << arg1 << " ";
+ * ...
+ * stringstream << field[N-1] << ":" << arg{N-1} << " ";
+ *
+ * The number of fields must exactly match the (variable) arguments.
+ *
+ * Example:
+ *
+ * const char * const fields[] = { "integer" };
+ * std::stringstream ss;
+ * fieldPrint(ss, fields, int(10));
+ */
+template <size_t N, typename... Targs>
+void fieldPrint(std::stringstream& ss, const char * const (& fields)[N], Targs... args) {
+ static_assert(N == sizeof...(args)); // guarantee #fields == #args
+ auto fptr = fields; // get a pointer to the base of fields array
+ ((ss << *fptr++ << ":" << args << " "), ...); // (fold expression), send to stringstream.
+}
+
+/**
+ * Return string tokens from iterator, separated by spaces and reserved chars.
+ */
+std::string tokenizer(std::string::const_iterator& it,
+ const std::string::const_iterator& end, const char *reserved);
+
+/**
+ * Splits flags string based on delimeters (or, whitespace which is removed).
+ */
+std::vector<std::string> split(const std::string& flags, const char *delim);
+
+/**
+ * Parse the devices string and return a vector of device address pairs.
+ *
+ * A failure to parse returns early with the contents that were able to be parsed.
+ */
+std::vector<std::pair<std::string, std::string>> getDeviceAddressPairs(const std::string &devices);
+
+/**
+ * Replaces targetChars with replaceChar in string, returns number of chars replaced.
+ */
+size_t replace(std::string &str, const char *targetChars, const char replaceChar);
+
+} // namespace android::mediametrics::stringutils
diff --git a/services/mediametrics/TimeMachine.h b/services/mediametrics/TimeMachine.h
index c82778b..ce579b3 100644
--- a/services/mediametrics/TimeMachine.h
+++ b/services/mediametrics/TimeMachine.h
@@ -18,6 +18,7 @@
#include <any>
#include <map>
+#include <mutex>
#include <sstream>
#include <string>
#include <variant>
@@ -81,6 +82,8 @@
, mCreationTime(time)
, mLastModificationTime(time)
{
+ (void)mCreationTime; // suppress unused warning.
+
// allowUid allows an untrusted client with a matching uid to set properties
// in this key.
// If allowUid == (uid_t)-1, no untrusted client may set properties in the key.
@@ -209,7 +212,7 @@
const std::string mKey;
const uid_t mAllowUid;
- const int64_t mCreationTime __unused;
+ const int64_t mCreationTime;
int64_t mLastModificationTime;
std::map<std::string /* property */, PropertyHistory> mPropertyMap;
@@ -217,10 +220,10 @@
using History = std::map<std::string /* key */, std::shared_ptr<KeyHistory>>;
- static inline constexpr size_t kTimeSequenceMaxElements = 100;
- static inline constexpr size_t kKeyMaxProperties = 100;
- static inline constexpr size_t kKeyLowWaterMark = 500;
- static inline constexpr size_t kKeyHighWaterMark = 1000;
+ static inline constexpr size_t kTimeSequenceMaxElements = 50;
+ static inline constexpr size_t kKeyMaxProperties = 50;
+ static inline constexpr size_t kKeyLowWaterMark = 400;
+ static inline constexpr size_t kKeyHighWaterMark = 500;
// Estimated max data space usage is 3KB * kKeyHighWaterMark.
@@ -252,6 +255,7 @@
{
std::lock_guard lock2(other.mLock);
mHistory = other.mHistory;
+ mGarbageCollectionCount = other.mGarbageCollectionCount.load();
}
// Now that we safely have our own shared pointers, let's dup them
@@ -417,6 +421,7 @@
void clear() {
std::lock_guard lock(mLock);
mHistory.clear();
+ mGarbageCollectionCount = 0;
}
/**
@@ -442,7 +447,7 @@
++it) {
if (ll <= 0) break;
if (prefix != nullptr && !startsWith(it->first, prefix)) break;
- std::lock_guard lock(getLockForKey(it->first));
+ std::lock_guard lock2(getLockForKey(it->first));
auto [s, l] = it->second->dump(ll, sinceNs);
ss << s;
ll -= l;
@@ -450,6 +455,10 @@
return { ss.str(), lines - ll };
}
+ size_t getGarbageCollectionCount() const {
+ return mGarbageCollectionCount;
+ }
+
private:
// Obtains the lock for a KeyHistory.
@@ -493,8 +502,6 @@
// TODO: something better than this for garbage collection.
if (mHistory.size() < mKeyHighWaterMark) return false;
- ALOGD("%s: garbage collection", __func__);
-
// erase everything explicitly expired.
std::multimap<int64_t, std::string> accessList;
// use a stale vector with precise type to avoid type erasure overhead in garbage
@@ -531,12 +538,16 @@
ALOGD("%s(%zu, %zu): key size:%zu",
__func__, mKeyLowWaterMark, mKeyHighWaterMark,
mHistory.size());
+
+ ++mGarbageCollectionCount;
return true;
}
const size_t mKeyLowWaterMark = kKeyLowWaterMark;
const size_t mKeyHighWaterMark = kKeyHighWaterMark;
+ std::atomic<size_t> mGarbageCollectionCount{};
+
/**
* Locking Strategy
*
diff --git a/services/mediametrics/TransactionLog.h b/services/mediametrics/TransactionLog.h
index 8a22826..0ca4639 100644
--- a/services/mediametrics/TransactionLog.h
+++ b/services/mediametrics/TransactionLog.h
@@ -43,9 +43,9 @@
// Transaction Log between the Low Water Mark and the High Water Mark.
// low water mark
- static inline constexpr size_t kLogItemsLowWater = 5000;
+ static inline constexpr size_t kLogItemsLowWater = 1700;
// high water mark
- static inline constexpr size_t kLogItemsHighWater = 10000;
+ static inline constexpr size_t kLogItemsHighWater = 2000;
// Estimated max data usage is 1KB * kLogItemsHighWater.
@@ -79,6 +79,7 @@
std::lock_guard lock2(other.mLock);
mLog = other.mLog;
mItemMap = other.mItemMap;
+ mGarbageCollectionCount = other.mGarbageCollectionCount.load();
return *this;
}
@@ -181,6 +182,11 @@
std::lock_guard lock(mLock);
mLog.clear();
mItemMap.clear();
+ mGarbageCollectionCount = 0;
+ }
+
+ size_t getGarbageCollectionCount() const {
+ return mGarbageCollectionCount;
}
private:
@@ -216,8 +222,6 @@
bool gc(std::vector<std::any>& garbage) REQUIRES(mLock) {
if (mLog.size() < mHighWaterMark) return false;
- ALOGD("%s: garbage collection", __func__);
-
auto eraseEnd = mLog.begin();
size_t toRemove = mLog.size() - mLowWaterMark;
// remove at least those elements.
@@ -265,6 +269,7 @@
ALOGD("%s(%zu, %zu): log size:%zu item map size:%zu, item map items:%zu",
__func__, mLowWaterMark, mHighWaterMark,
mLog.size(), mItemMap.size(), itemMapCount);
+ ++mGarbageCollectionCount;
return true;
}
@@ -287,6 +292,8 @@
const size_t mLowWaterMark = kLogItemsLowWater;
const size_t mHighWaterMark = kLogItemsHighWater;
+ std::atomic<size_t> mGarbageCollectionCount{};
+
mutable std::mutex mLock;
MapTimeItem mLog GUARDED_BY(mLock);
diff --git a/services/mediametrics/cleaner.cpp b/services/mediametrics/cleaner.cpp
new file mode 100644
index 0000000..e746842
--- /dev/null
+++ b/services/mediametrics/cleaner.cpp
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MetricsCleaner"
+#include <utils/Log.h>
+
+#include "cleaner.h"
+
+namespace android::mediametrics {
+
+// place time into buckets at 0,1,2,4,8,16,32 seconds and then at minute boundaries.
+// time is rounded up to the next boundary.
+//
+int64_t bucket_time_minutes(int64_t in_millis) {
+
+ const int64_t SEC_TO_MS = 1000;
+ const int64_t MIN_TO_MS = (60 * SEC_TO_MS);
+
+ if (in_millis <= 0) {
+ return 0;
+ }
+ if (in_millis <= 32 * SEC_TO_MS) {
+ for (int sec = 1; sec <= 32; sec *= 2) {
+ if (in_millis <= sec * SEC_TO_MS) {
+ return sec * SEC_TO_MS;
+ }
+ }
+ }
+ /* up to next 1 minute boundary */
+ int64_t minutes = (in_millis + MIN_TO_MS - 1) / MIN_TO_MS;
+ in_millis = minutes * MIN_TO_MS;
+ return in_millis;
+}
+
+} // namespace android::mediametrics
diff --git a/services/mediametrics/cleaner.h b/services/mediametrics/cleaner.h
new file mode 100644
index 0000000..72e24f9
--- /dev/null
+++ b/services/mediametrics/cleaner.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIAMETRICS_CLEANER_H
+#define MEDIAMETRICS_CLEANER_H
+
+namespace android::mediametrics {
+
+// break time into buckets at 1,2,4,8,16,32 seconds
+// and then at minute boundaries
+//
+extern int64_t bucket_time_minutes(int64_t incomingMs);
+
+} // namespace android::mediametrics
+
+#endif // MEDIAMETRICS_CLEANER_H
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
new file mode 100644
index 0000000..df4c867
--- /dev/null
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -0,0 +1,59 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+ name: "mediametrics_service_fuzzer",
+
+ srcs: [
+ "mediametrics_service_fuzzer.cpp",
+ ],
+
+ static_libs: [
+ "libmediametrics",
+ "libmediametricsservice",
+ "libplatformprotos",
+ ],
+
+ shared_libs: [
+ "libbase",
+ "libbinder",
+ "libcutils",
+ "liblog",
+ "libmedia_helper",
+ "libmediautils",
+ "libmemunreachable",
+ "libprotobuf-cpp-lite",
+ "libstagefright",
+ "libstatslog",
+ "libutils",
+ ],
+
+ include_dirs: [
+ "frameworks/av/services/mediametrics",
+ "system/media/audio_utils/include",
+ ],
+
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/services/mediametrics/fuzzer/README.md b/services/mediametrics/fuzzer/README.md
new file mode 100644
index 0000000..a13830e
--- /dev/null
+++ b/services/mediametrics/fuzzer/README.md
@@ -0,0 +1,54 @@
+# Fuzzer for libmediametricsservice
+
+## Plugin Design Considerations
+The fuzzer plugin for libmediametricsservice is designed based on the
+understanding of the service and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+Media Metrics Service contains the following modules:
+1. Media Metrics Item Manipulation (module name: `Item`)
+2. Media Metrics Time Machine Storage (module name: `TimeMachineStorage`)
+3. Media Metrics Transaction Log (module name: `TransactionLog`)
+4. Media Metrics Analytics Action (module name: `AnalyticsAction`)
+5. Media Metrics Audio Analytics (module name: `AudioAnalytics`)
+6. Media Metrics Timed Action (module name: `TimedAction`)
+
+| Module| Valid Input Values| Configured Value|
+|------------- |-------------| ----- |
+| `Item` | Key: `std::string`. Values: `INT32_MIN` to `INT32_MAX`, `INT64_MIN` to `INT64_MAX`, `std::string`, `double`, `pair<INT32_MIN to INT32_MAX, INT32_MIN to INT32_MAX>` | Value obtained from FuzzedDataProvider |
+| `TimeMachineStorage` | Key: `std::string`. Values: `INT32_MIN` to `INT32_MAX`, `INT64_MIN` to `INT64_MAX`, `std::string`, `double`, `pair<INT32_MIN to INT32_MAX, INT32_MIN to INT32_MAX>` | Value obtained from FuzzedDataProvider |
+| `TranscationLog` | `mediametrics::Item` | `mediametrics::Item` created by obtaining values from FuzzedDataProvider|
+| `AnalyticsAction` | URL: `std::string` ending with .event, Value: `std::string`, action: A function | URL and Values obtained from FuzzedDataProvider, a placeholder function was passed as action|
+| `AudioAnalytics` | `mediametrics::Item` | `mediametrics::Item` created by obtaining values from FuzzedDataProvider|
+| `TimedAction` | time: `std::chrono::seconds`, function: `std::function` | `std::chrono::seconds` : value obtained from FuzzedDataProvider, `std::function`: a placeholder function was used. |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+## Build
+
+This describes steps to build mediametrics_service_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) mediametrics_service_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some files to that folder
+Push this directory to device.
+
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/mediametrics_service_fuzzer/mediametrics_service_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
new file mode 100644
index 0000000..0cb2594
--- /dev/null
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -0,0 +1,372 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/MediaMetricsItem.h>
+#include <stdio.h>
+#include <string.h>
+#include <utils/Log.h>
+#include <algorithm>
+
+#include "AudioTypes.h"
+#include "MediaMetricsService.h"
+#include "StringUtils.h"
+
+using namespace android;
+
+// low water mark
+constexpr size_t kLogItemsLowWater = 1;
+// high water mark
+constexpr size_t kLogItemsHighWater = 2;
+
+class MediaMetricsServiceFuzzer {
+ public:
+ void invokeStartsWith(const uint8_t *data, size_t size);
+ void invokeInstantiate(const uint8_t *data, size_t size);
+ void invokePackageInstallerCheck(const uint8_t *data, size_t size);
+ void invokeItemManipulation(const uint8_t *data, size_t size);
+ void invokeItemExpansion(const uint8_t *data, size_t size);
+ void invokeTimeMachineStorage(const uint8_t *data, size_t size);
+ void invokeTransactionLog(const uint8_t *data, size_t size);
+ void invokeAnalyticsAction(const uint8_t *data, size_t size);
+ void invokeAudioAnalytics(const uint8_t *data, size_t size);
+ void invokeTimedAction(const uint8_t *data, size_t size);
+ void process(const uint8_t *data, size_t size);
+};
+
+void MediaMetricsServiceFuzzer::invokeStartsWith(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ while (fdp.remaining_bytes()) {
+ android::mediametrics::startsWith(fdp.ConsumeRandomLengthString(),
+ fdp.ConsumeRandomLengthString());
+ }
+}
+
+void MediaMetricsServiceFuzzer::invokeInstantiate(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ sp mediaMetricsService = new MediaMetricsService();
+
+ while (fdp.remaining_bytes()) {
+ std::unique_ptr<mediametrics::Item> random_key(
+ mediametrics::Item::create(fdp.ConsumeRandomLengthString()));
+ mediaMetricsService->submit(random_key.get());
+ random_key->setInt32(fdp.ConsumeRandomLengthString().c_str(),
+ fdp.ConsumeIntegral<int32_t>());
+ mediaMetricsService->submit(random_key.get());
+
+ std::unique_ptr<mediametrics::Item> audiotrack_key(
+ mediametrics::Item::create("audiotrack"));
+ mediaMetricsService->submit(audiotrack_key.get());
+ audiotrack_key->addInt32(fdp.ConsumeRandomLengthString().c_str(),
+ fdp.ConsumeIntegral<int32_t>());
+ mediaMetricsService->submit(audiotrack_key.get());
+ }
+}
+
+void MediaMetricsServiceFuzzer::invokePackageInstallerCheck(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ while (fdp.remaining_bytes()) {
+ MediaMetricsService::useUidForPackage(fdp.ConsumeRandomLengthString().c_str(),
+ fdp.ConsumeRandomLengthString().c_str());
+ }
+}
+
+void MediaMetricsServiceFuzzer::invokeItemManipulation(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+ mediametrics::Item item(fdp.ConsumeRandomLengthString().c_str());
+ while (fdp.remaining_bytes()) {
+ const uint8_t action = fdp.ConsumeIntegralInRange<uint8_t>(0, 16);
+ const std::string key = fdp.ConsumeRandomLengthString();
+ if (fdp.remaining_bytes() < 1 || key.length() < 1) {
+ break;
+ }
+ switch (action) {
+ case 0: {
+ item.setInt32(key.c_str(), fdp.ConsumeIntegral<int32_t>());
+ break;
+ }
+ case 1: {
+ item.addInt32(key.c_str(), fdp.ConsumeIntegral<int32_t>());
+ break;
+ }
+ case 2: {
+ int32_t i32 = 0;
+ item.getInt32(key.c_str(), &i32);
+ break;
+ }
+ case 3: {
+ item.setInt64(key.c_str(), fdp.ConsumeIntegral<int64_t>());
+ break;
+ }
+ case 4: {
+ item.addInt64(key.c_str(), fdp.ConsumeIntegral<int64_t>());
+ break;
+ }
+ case 5: {
+ int64_t i64 = 0;
+ item.getInt64(key.c_str(), &i64);
+ break;
+ }
+ case 6: {
+ item.setDouble(key.c_str(), fdp.ConsumeFloatingPoint<double>());
+ break;
+ }
+ case 7: {
+ item.addDouble(key.c_str(), fdp.ConsumeFloatingPoint<double>());
+ break;
+ }
+ case 8: {
+ double d = 0;
+ item.getDouble(key.c_str(), &d);
+ break;
+ }
+ case 9: {
+ item.setCString(key.c_str(), fdp.ConsumeRandomLengthString().c_str());
+ break;
+ }
+ case 10: {
+ char *s = nullptr;
+ item.getCString(key.c_str(), &s);
+ if (s) free(s);
+ break;
+ }
+ case 11: {
+ std::string s;
+ item.getString(key.c_str(), &s);
+ break;
+ }
+ case 12: {
+ item.setRate(key.c_str(), fdp.ConsumeIntegral<int64_t>(),
+ fdp.ConsumeIntegral<int64_t>());
+ break;
+ }
+ case 13: {
+ int64_t b = 0, h = 0;
+ double d = 0;
+ item.getRate(key.c_str(), &b, &h, &d);
+ break;
+ }
+ case 14: {
+ (void)item.filter(key.c_str());
+ break;
+ }
+ case 15: {
+ const char *arr[1] = {""};
+ arr[0] = const_cast<char *>(key.c_str());
+ (void)item.filterNot(1, arr);
+ break;
+ }
+ case 16: {
+ (void)item.toString().c_str();
+ break;
+ }
+ }
+ }
+
+ Parcel p;
+ mediametrics::Item item2;
+
+ (void)item.writeToParcel(&p);
+ p.setDataPosition(0); // rewind for reading
+ (void)item2.readFromParcel(p);
+
+ char *byteData = nullptr;
+ size_t length = 0;
+ (void)item.writeToByteString(&byteData, &length);
+ (void)item2.readFromByteString(byteData, length);
+ if (byteData) {
+ free(byteData);
+ }
+
+ sp mediaMetricsService = new MediaMetricsService();
+ mediaMetricsService->submit(&item2);
+}
+
+void MediaMetricsServiceFuzzer::invokeItemExpansion(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+ mediametrics::LogItem<1> item("FuzzItem");
+ item.setPid(fdp.ConsumeIntegral<int16_t>()).setUid(fdp.ConsumeIntegral<int16_t>());
+
+ while (fdp.remaining_bytes()) {
+ int32_t i = fdp.ConsumeIntegral<int32_t>();
+ item.set(std::to_string(i).c_str(), (int32_t)i);
+ }
+ item.updateHeader();
+
+ mediametrics::Item item2;
+ (void)item2.readFromByteString(item.getBuffer(), item.getLength());
+
+ sp mediaMetricsService = new MediaMetricsService();
+ mediaMetricsService->submit(&item2);
+}
+
+void MediaMetricsServiceFuzzer::invokeTimeMachineStorage(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+ auto item = std::make_shared<mediametrics::Item>("FuzzKey");
+ int32_t i32 = fdp.ConsumeIntegral<int32_t>();
+ int64_t i64 = fdp.ConsumeIntegral<int64_t>();
+ double d = fdp.ConsumeFloatingPoint<double>();
+ std::string str = fdp.ConsumeRandomLengthString();
+ std::pair<int64_t, int64_t> pair(fdp.ConsumeIntegral<int64_t>(),
+ fdp.ConsumeIntegral<int64_t>());
+ (*item).set("i32", i32).set("i64", i64).set("double", d).set("string", str).set("rate", pair);
+
+ android::mediametrics::TimeMachine timeMachine;
+ timeMachine.put(item, true);
+
+ timeMachine.get("Key", "i32", &i32, -1);
+
+ timeMachine.get("Key", "i64", &i64, -1);
+
+ timeMachine.get("Key", "double", &d, -1);
+
+ timeMachine.get("Key", "string", &str, -1);
+
+ timeMachine.get("Key.i32", &i32, -1);
+
+ timeMachine.get("Key.i64", &i64, -1);
+
+ timeMachine.get("Key.double", &d, -1);
+
+ str.clear();
+ timeMachine.get("Key.string", &str, -1);
+}
+
+void MediaMetricsServiceFuzzer::invokeTransactionLog(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+ auto item = std::make_shared<mediametrics::Item>("Key1");
+ (*item)
+ .set("one", fdp.ConsumeIntegral<int32_t>())
+ .set("two", fdp.ConsumeIntegral<int32_t>())
+ .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+
+ android::mediametrics::TransactionLog transactionLog(
+ kLogItemsLowWater, kLogItemsHighWater); // keep at most 2 items
+ transactionLog.size();
+
+ transactionLog.put(item);
+ transactionLog.size();
+
+ auto item2 = std::make_shared<mediametrics::Item>("Key2");
+ (*item2)
+ .set("three", fdp.ConsumeIntegral<int32_t>())
+ .set("[Key1]three", fdp.ConsumeIntegral<int32_t>())
+ .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+
+ transactionLog.put(item2);
+ transactionLog.size();
+
+ auto item3 = std::make_shared<mediametrics::Item>("Key3");
+ (*item3)
+ .set("six", fdp.ConsumeIntegral<int32_t>())
+ .set("[Key1]four", fdp.ConsumeIntegral<int32_t>()) // affects Key1
+ .set("[Key1]five", fdp.ConsumeIntegral<int32_t>()) // affects key1
+ .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+
+ transactionLog.put(item3);
+ transactionLog.size();
+}
+
+void MediaMetricsServiceFuzzer::invokeAnalyticsAction(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+ mediametrics::AnalyticsActions analyticsActions;
+ bool action = false;
+
+ while (fdp.remaining_bytes()) {
+ analyticsActions.addAction(
+ (fdp.ConsumeRandomLengthString() + std::string(".event")).c_str(),
+ fdp.ConsumeRandomLengthString(),
+ std::make_shared<mediametrics::AnalyticsActions::Function>(
+ [&](const std::shared_ptr<const android::mediametrics::Item> &) {
+ action = true;
+ }));
+ }
+
+ FuzzedDataProvider fdp2 = FuzzedDataProvider(data, size);
+
+ while (fdp2.remaining_bytes()) {
+ // make a test item
+ auto item = std::make_shared<mediametrics::Item>(fdp2.ConsumeRandomLengthString().c_str());
+ (*item).set("event", fdp2.ConsumeRandomLengthString().c_str());
+
+ // get the actions and execute them
+ auto actions = analyticsActions.getActionsForItem(item);
+ for (const auto &action : actions) {
+ action->operator()(item);
+ }
+ }
+}
+
+void MediaMetricsServiceFuzzer::invokeAudioAnalytics(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ android::mediametrics::AudioAnalytics audioAnalytics;
+
+ while (fdp.remaining_bytes()) {
+ auto item = std::make_shared<mediametrics::Item>(fdp.ConsumeRandomLengthString().c_str());
+ int32_t transactionUid = fdp.ConsumeIntegral<int32_t>(); // arbitrary
+ (*item)
+ .set(fdp.ConsumeRandomLengthString().c_str(), fdp.ConsumeIntegral<int32_t>())
+ .set(fdp.ConsumeRandomLengthString().c_str(), fdp.ConsumeIntegral<int32_t>())
+ .set(AMEDIAMETRICS_PROP_ALLOWUID, transactionUid)
+ .setUid(transactionUid)
+ .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+ audioAnalytics.submit(item, fdp.ConsumeBool());
+ }
+
+ audioAnalytics.dump(1000);
+}
+
+void MediaMetricsServiceFuzzer::invokeTimedAction(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ android::mediametrics::TimedAction timedAction;
+ std::atomic_int value = 0;
+
+ while (fdp.remaining_bytes()) {
+ timedAction.postIn(std::chrono::seconds(fdp.ConsumeIntegral<int32_t>()),
+ [&value] { ++value; });
+ timedAction.size();
+ }
+}
+
+void MediaMetricsServiceFuzzer::process(const uint8_t *data, size_t size) {
+ invokeStartsWith(data, size);
+ invokeInstantiate(data, size);
+ invokePackageInstallerCheck(data, size);
+ invokeItemManipulation(data, size);
+ invokeItemExpansion(data, size);
+ invokeTimeMachineStorage(data, size);
+ invokeTransactionLog(data, size);
+ invokeAnalyticsAction(data, size);
+ invokeAudioAnalytics(data, size);
+ invokeTimedAction(data, size);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ if (size < 1) {
+ return 0;
+ }
+ MediaMetricsServiceFuzzer mediaMetricsServiceFuzzer;
+ mediaMetricsServiceFuzzer.process(data, size);
+ return 0;
+}
diff --git a/services/mediametrics/iface_statsd.cpp b/services/mediametrics/iface_statsd.cpp
index 3a1eea7..16204de 100644
--- a/services/mediametrics/iface_statsd.cpp
+++ b/services/mediametrics/iface_statsd.cpp
@@ -64,6 +64,7 @@
{ "drmmanager", statsd_drmmanager },
{ "extractor", statsd_extractor },
{ "mediadrm", statsd_mediadrm },
+ { "mediaparser", statsd_mediaparser },
{ "nuplayer", statsd_nuplayer },
{ "nuplayer2", statsd_nuplayer },
{ "recorder", statsd_recorder },
@@ -71,7 +72,7 @@
// give me a record, i'll look at the type and upload appropriately
bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item) {
- if (item == NULL) return false;
+ if (item == nullptr) return false;
// get the key
std::string key = item->getKey();
diff --git a/services/mediametrics/iface_statsd.h b/services/mediametrics/iface_statsd.h
index 19505a4..9b49556 100644
--- a/services/mediametrics/iface_statsd.h
+++ b/services/mediametrics/iface_statsd.h
@@ -25,6 +25,7 @@
extern bool statsd_audiotrack(const mediametrics::Item *);
extern bool statsd_codec(const mediametrics::Item *);
extern bool statsd_extractor(const mediametrics::Item *);
+extern bool statsd_mediaparser(const mediametrics::Item *);
extern bool statsd_nuplayer(const mediametrics::Item *);
extern bool statsd_recorder(const mediametrics::Item *);
diff --git a/services/mediametrics/main_mediametrics.cpp b/services/mediametrics/main_mediametrics.cpp
index 6992c32..3a66538 100644
--- a/services/mediametrics/main_mediametrics.cpp
+++ b/services/mediametrics/main_mediametrics.cpp
@@ -25,9 +25,9 @@
#include <binder/ProcessState.h>
#include <mediautils/LimitProcessMemory.h>
-int main(int argc __unused, char **argv __unused)
+int main(int argc __unused, char **argv)
{
- using namespace android;
+ using namespace android; // NOLINT (clang-tidy)
limitProcessMemory(
"media.metrics.maxmem", /* property that defines limit */
@@ -39,7 +39,8 @@
// to match the service name
// we're replacing "/system/bin/mediametrics" with "media.metrics"
// we add a ".", but discard the path components: we finish with a shorter string
- strcpy(argv[0], MediaMetricsService::kServiceName);
+ const size_t origSize = strlen(argv[0]) + 1; // include null termination.
+ strlcpy(argv[0], MediaMetricsService::kServiceName, origSize);
defaultServiceManager()->addService(
String16(MediaMetricsService::kServiceName), new MediaMetricsService());
diff --git a/services/mediametrics/statsd_audiopolicy.cpp b/services/mediametrics/statsd_audiopolicy.cpp
index 634c801..6ef2f2c 100644
--- a/services/mediametrics/statsd_audiopolicy.cpp
+++ b/services/mediametrics/statsd_audiopolicy.cpp
@@ -32,14 +32,14 @@
#include <statslog.h>
#include "MediaMetricsService.h"
-#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
#include "iface_statsd.h"
namespace android {
bool statsd_audiopolicy(const mediametrics::Item *item)
{
- if (item == NULL) return false;
+ if (item == nullptr) return false;
// these go into the statsd wrapper
const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
@@ -122,4 +122,4 @@
return true;
}
-};
+} // namespace android
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index 69d1661..76f4b59 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -32,14 +32,14 @@
#include <statslog.h>
#include "MediaMetricsService.h"
-#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
#include "iface_statsd.h"
namespace android {
bool statsd_audiorecord(const mediametrics::Item *item)
{
- if (item == NULL) return false;
+ if (item == nullptr) return false;
// these go into the statsd wrapper
const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
@@ -155,4 +155,4 @@
return true;
}
-};
+} // namespace android
diff --git a/services/mediametrics/statsd_audiothread.cpp b/services/mediametrics/statsd_audiothread.cpp
index 300151b..2ad2562 100644
--- a/services/mediametrics/statsd_audiothread.cpp
+++ b/services/mediametrics/statsd_audiothread.cpp
@@ -32,14 +32,14 @@
#include <statslog.h>
#include "MediaMetricsService.h"
-#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
#include "iface_statsd.h"
namespace android {
bool statsd_audiothread(const mediametrics::Item *item)
{
- if (item == NULL) return false;
+ if (item == nullptr) return false;
// these go into the statsd wrapper
const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
@@ -204,4 +204,4 @@
return true;
}
-};
+} // namespace android
diff --git a/services/mediametrics/statsd_audiotrack.cpp b/services/mediametrics/statsd_audiotrack.cpp
index 397cdf3..6b08a78 100644
--- a/services/mediametrics/statsd_audiotrack.cpp
+++ b/services/mediametrics/statsd_audiotrack.cpp
@@ -32,14 +32,14 @@
#include <statslog.h>
#include "MediaMetricsService.h"
-#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
#include "iface_statsd.h"
namespace android {
bool statsd_audiotrack(const mediametrics::Item *item)
{
- if (item == NULL) return false;
+ if (item == nullptr) return false;
// these go into the statsd wrapper
const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
@@ -146,4 +146,4 @@
return true;
}
-};
+} // namespace android
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index f5fa57e..d502b30 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -31,15 +31,16 @@
#include <statslog.h>
+#include "cleaner.h"
#include "MediaMetricsService.h"
-#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
#include "iface_statsd.h"
namespace android {
bool statsd_codec(const mediametrics::Item *item)
{
- if (item == NULL) return false;
+ if (item == nullptr) return false;
// these go into the statsd wrapper
const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
@@ -168,6 +169,23 @@
}
// android.media.mediacodec.latency.hist NOT EMITTED
+ // android.media.mediacodec.bitrate_mode string
+ std::string bitrate_mode;
+ if (item->getString("android.media.mediacodec.bitrate_mode", &bitrate_mode)) {
+ metrics_proto.set_bitrate_mode(std::move(bitrate_mode));
+ }
+ // android.media.mediacodec.bitrate int32
+ int32_t bitrate = -1;
+ if (item->getInt32("android.media.mediacodec.bitrate", &bitrate)) {
+ metrics_proto.set_bitrate(bitrate);
+ }
+ // android.media.mediacodec.lifetimeMs int64
+ int64_t lifetimeMs = -1;
+ if ( item->getInt64("android.media.mediacodec.lifetimeMs", &lifetimeMs)) {
+ lifetimeMs = mediametrics::bucket_time_minutes(lifetimeMs);
+ metrics_proto.set_lifetime_millis(lifetimeMs);
+ }
+
std::string serialized;
if (!metrics_proto.SerializeToString(&serialized)) {
ALOGE("Failed to serialize codec metrics");
@@ -188,4 +206,4 @@
return true;
}
-};
+} // namespace android
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index 4f2e861..ac58929 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -43,67 +43,60 @@
// mediadrm
bool statsd_mediadrm(const mediametrics::Item *item)
{
- if (item == NULL) return false;
+ if (item == nullptr) return false;
const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
std::string pkgName = item->getPkgName();
int64_t pkgVersionCode = item->getPkgVersionCode();
int64_t mediaApexVersion = 0;
- char *vendor = NULL;
- (void) item->getCString("vendor", &vendor);
- char *description = NULL;
- (void) item->getCString("description", &description);
- char *serialized_metrics = NULL;
- (void) item->getCString("serialized_metrics", &serialized_metrics);
+ std::string vendor;
+ (void) item->getString("vendor", &vendor);
+ std::string description;
+ (void) item->getString("description", &description);
+ std::string serialized_metrics;
+ (void) item->getString("serialized_metrics", &serialized_metrics);
if (enabled_statsd) {
- android::util::BytesField bf_serialized(serialized_metrics ? serialized_metrics : NULL,
- serialized_metrics ? strlen(serialized_metrics)
- : 0);
+ android::util::BytesField bf_serialized(serialized_metrics.c_str(),
+ serialized_metrics.size());
android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
timestamp, pkgName.c_str(), pkgVersionCode,
mediaApexVersion,
- vendor, description,
+ vendor.c_str(),
+ description.c_str(),
bf_serialized);
} else {
- ALOGV("NOT sending: mediadrm private data (len=%zu)",
- serialized_metrics ? strlen(serialized_metrics) : 0);
+ ALOGV("NOT sending: mediadrm private data (len=%zu)", serialized_metrics.size());
}
- free(vendor);
- free(description);
- free(serialized_metrics);
return true;
}
// widevineCDM
bool statsd_widevineCDM(const mediametrics::Item *item)
{
- if (item == NULL) return false;
+ if (item == nullptr) return false;
const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
std::string pkgName = item->getPkgName();
int64_t pkgVersionCode = item->getPkgVersionCode();
int64_t mediaApexVersion = 0;
- char *serialized_metrics = NULL;
- (void) item->getCString("serialized_metrics", &serialized_metrics);
+ std::string serialized_metrics;
+ (void) item->getString("serialized_metrics", &serialized_metrics);
if (enabled_statsd) {
- android::util::BytesField bf_serialized(serialized_metrics ? serialized_metrics : NULL,
- serialized_metrics ? strlen(serialized_metrics)
- : 0);
+ android::util::BytesField bf_serialized(serialized_metrics.c_str(),
+ serialized_metrics.size());
android::util::stats_write(android::util::MEDIAMETRICS_DRM_WIDEVINE_REPORTED,
timestamp, pkgName.c_str(), pkgVersionCode,
mediaApexVersion,
bf_serialized);
} else {
- ALOGV("NOT sending: widevine private data (len=%zu)",
- serialized_metrics ? strlen(serialized_metrics) : 0);
+ ALOGV("NOT sending: widevine private data (len=%zu)", serialized_metrics.size());
}
- free(serialized_metrics);
return true;
}
@@ -111,7 +104,7 @@
bool statsd_drmmanager(const mediametrics::Item *item)
{
using namespace std::string_literals;
- if (item == NULL) return false;
+ if (item == nullptr) return false;
if (!enabled_statsd) {
ALOGV("NOT sending: drmmanager data");
@@ -123,14 +116,14 @@
int64_t pkgVersionCode = item->getPkgVersionCode();
int64_t mediaApexVersion = 0;
- char *plugin_id = NULL;
- (void) item->getCString("plugin_id", &plugin_id);
- char *description = NULL;
- (void) item->getCString("description", &description);
+ std::string plugin_id;
+ (void) item->getString("plugin_id", &plugin_id);
+ std::string description;
+ (void) item->getString("description", &description);
int32_t method_id = -1;
(void) item->getInt32("method_id", &method_id);
- char *mime_types = NULL;
- (void) item->getCString("mime_types", &mime_types);
+ std::string mime_types;
+ (void) item->getString("mime_types", &mime_types);
// Corresponds to the 13 APIs tracked in the MediametricsDrmManagerReported statsd proto
// Please see also DrmManager::kMethodIdMap
@@ -141,16 +134,15 @@
android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
timestamp, pkgName.c_str(), pkgVersionCode, mediaApexVersion,
- plugin_id, description, method_id, mime_types,
+ plugin_id.c_str(), description.c_str(),
+ method_id, mime_types.c_str(),
methodCounts[0], methodCounts[1], methodCounts[2],
methodCounts[3], methodCounts[4], methodCounts[5],
methodCounts[6], methodCounts[7], methodCounts[8],
methodCounts[9], methodCounts[10], methodCounts[11],
methodCounts[12]);
- free(plugin_id);
- free(description);
- free(mime_types);
return true;
}
+
} // namespace android
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index 8574358..4180e0c 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -32,14 +32,14 @@
#include <statslog.h>
#include "MediaMetricsService.h"
-#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
#include "iface_statsd.h"
namespace android {
bool statsd_extractor(const mediametrics::Item *item)
{
- if (item == NULL) return false;
+ if (item == nullptr) return false;
// these go into the statsd wrapper
const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
@@ -71,6 +71,22 @@
metrics_proto.set_tracks(ntrk);
}
+ // android.media.mediaextractor.entry string
+ std::string entry_point_string;
+ if (item->getString("android.media.mediaextractor.entry", &entry_point_string)) {
+ stats::mediametrics::ExtractorData::EntryPoint entry_point;
+ if (entry_point_string == "sdk") {
+ entry_point = stats::mediametrics::ExtractorData_EntryPoint_SDK;
+ } else if (entry_point_string == "ndk-with-jvm") {
+ entry_point = stats::mediametrics::ExtractorData_EntryPoint_NDK_WITH_JVM;
+ } else if (entry_point_string == "ndk-no-jvm") {
+ entry_point = stats::mediametrics::ExtractorData_EntryPoint_NDK_NO_JVM;
+ } else {
+ entry_point = stats::mediametrics::ExtractorData_EntryPoint_OTHER;
+ }
+ metrics_proto.set_entry_point(entry_point);
+ }
+
std::string serialized;
if (!metrics_proto.SerializeToString(&serialized)) {
ALOGE("Failed to serialize extractor metrics");
@@ -91,4 +107,4 @@
return true;
}
-};
+} // namespace android
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
new file mode 100644
index 0000000..262b2ae
--- /dev/null
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "statsd_mediaparser"
+#include <utils/Log.h>
+
+#include <dirent.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <pwd.h>
+#include <stdint.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <statslog.h>
+
+#include "MediaMetricsService.h"
+#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "iface_statsd.h"
+
+namespace android {
+
+bool statsd_mediaparser(const mediametrics::Item *item)
+{
+ if (item == nullptr) {
+ return false;
+ }
+
+ // statsd wrapper data.
+ const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
+ std::string pkgName = item->getPkgName();
+ int64_t pkgVersionCode = item->getPkgVersionCode();
+
+ std::string parserName;
+ item->getString("android.media.mediaparser.parserName", &parserName);
+
+ int32_t createdByName = -1;
+ item->getInt32("android.media.mediaparser.createdByName", &createdByName);
+
+ std::string parserPool;
+ item->getString("android.media.mediaparser.parserPool", &parserPool);
+
+ std::string lastException;
+ item->getString("android.media.mediaparser.lastException", &lastException);
+
+ int64_t resourceByteCount = -1;
+ item->getInt64("android.media.mediaparser.resourceByteCount", &resourceByteCount);
+
+ int64_t durationMillis = -1;
+ item->getInt64("android.media.mediaparser.durationMillis", &durationMillis);
+
+ std::string trackMimeTypes;
+ item->getString("android.media.mediaparser.trackMimeTypes", &trackMimeTypes);
+
+ std::string trackCodecs;
+ item->getString("android.media.mediaparser.trackCodecs", &trackCodecs);
+
+ std::string alteredParameters;
+ item->getString("android.media.mediaparser.alteredParameters", &alteredParameters);
+
+ int32_t videoWidth = -1;
+ item->getInt32("android.media.mediaparser.videoWidth", &videoWidth);
+
+ int32_t videoHeight = -1;
+ item->getInt32("android.media.mediaparser.videoHeight", &videoHeight);
+
+ if (enabled_statsd) {
+ (void) android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
+ timestamp,
+ pkgName.c_str(),
+ pkgVersionCode,
+ parserName.c_str(),
+ createdByName,
+ parserPool.c_str(),
+ lastException.c_str(),
+ resourceByteCount,
+ durationMillis,
+ trackMimeTypes.c_str(),
+ trackCodecs.c_str(),
+ alteredParameters.c_str(),
+ videoWidth,
+ videoHeight);
+ } else {
+ ALOGV("NOT sending MediaParser media metrics.");
+ }
+
+ return true;
+}
+
+} // namespace android
diff --git a/services/mediametrics/statsd_nuplayer.cpp b/services/mediametrics/statsd_nuplayer.cpp
index df7e59f..a8d0f55 100644
--- a/services/mediametrics/statsd_nuplayer.cpp
+++ b/services/mediametrics/statsd_nuplayer.cpp
@@ -32,7 +32,7 @@
#include <statslog.h>
#include "MediaMetricsService.h"
-#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
#include "iface_statsd.h"
namespace android {
@@ -43,7 +43,7 @@
*/
bool statsd_nuplayer(const mediametrics::Item *item)
{
- if (item == NULL) return false;
+ if (item == nullptr) return false;
// these go into the statsd wrapper
const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
@@ -167,4 +167,4 @@
return true;
}
-};
+} // namespace android
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 4de1746..2e5ada4 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -32,14 +32,14 @@
#include <statslog.h>
#include "MediaMetricsService.h"
-#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
#include "iface_statsd.h"
namespace android {
bool statsd_recorder(const mediametrics::Item *item)
{
- if (item == NULL) return false;
+ if (item == nullptr) return false;
// these go into the statsd wrapper
const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
@@ -186,4 +186,4 @@
return true;
}
-};
+} // namespace android
diff --git a/services/mediametrics/tests/Android.bp b/services/mediametrics/tests/Android.bp
index bdeda30..c2e0759 100644
--- a/services/mediametrics/tests/Android.bp
+++ b/services/mediametrics/tests/Android.bp
@@ -21,6 +21,10 @@
"libutils",
],
+ header_libs: [
+ "libaudioutils_headers",
+ ],
+
srcs: [
"mediametrics_tests.cpp",
],
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index f7988f1..478355b 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -20,9 +20,14 @@
#include "MediaMetricsService.h"
#include <stdio.h>
+#include <unordered_set>
#include <gtest/gtest.h>
#include <media/MediaMetricsItem.h>
+#include <system/audio.h>
+
+#include "AudioTypes.h"
+#include "StringUtils.h"
using namespace android;
@@ -35,6 +40,15 @@
return count;
}
+template <typename M>
+ssize_t countDuplicates(const M& map) {
+ std::unordered_set<typename M::mapped_type> s;
+ for (const auto &m : map) {
+ s.emplace(m.second);
+ }
+ return map.size() - s.size();
+}
+
TEST(mediametrics_tests, startsWith) {
std::string s("test");
ASSERT_EQ(true, android::mediametrics::startsWith(s, "te"));
@@ -803,7 +817,7 @@
// TODO: Verify contents of AudioAnalytics.
// Currently there is no getter API in AudioAnalytics besides dump.
- ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
+ ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
// untrusted entities can add to an existing key
@@ -839,7 +853,7 @@
// TODO: Verify contents of AudioAnalytics.
// Currently there is no getter API in AudioAnalytics besides dump.
- ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
+ ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
// untrusted entities can add to an existing key
@@ -884,12 +898,12 @@
}
TEST(mediametrics_tests, device_parsing) {
- auto devaddr = android::MediaMetricsService::getDeviceAddressPairs("(DEVICE, )");
+ auto devaddr = android::mediametrics::stringutils::getDeviceAddressPairs("(DEVICE, )");
ASSERT_EQ((size_t)1, devaddr.size());
ASSERT_EQ("DEVICE", devaddr[0].first);
ASSERT_EQ("", devaddr[0].second);
- devaddr = android::MediaMetricsService::getDeviceAddressPairs(
+ devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
"(DEVICE1, A)|(D, ADDRB)");
ASSERT_EQ((size_t)2, devaddr.size());
ASSERT_EQ("DEVICE1", devaddr[0].first);
@@ -897,7 +911,7 @@
ASSERT_EQ("D", devaddr[1].first);
ASSERT_EQ("ADDRB", devaddr[1].second);
- devaddr = android::MediaMetricsService::getDeviceAddressPairs(
+ devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
"(A,B)|(C,D)");
ASSERT_EQ((size_t)2, devaddr.size());
ASSERT_EQ("A", devaddr[0].first);
@@ -905,7 +919,7 @@
ASSERT_EQ("C", devaddr[1].first);
ASSERT_EQ("D", devaddr[1].second);
- devaddr = android::MediaMetricsService::getDeviceAddressPairs(
+ devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
" ( A1 , B ) | ( C , D2 ) ");
ASSERT_EQ((size_t)2, devaddr.size());
ASSERT_EQ("A1", devaddr[0].first);
@@ -925,6 +939,132 @@
ASSERT_EQ((size_t)1, timedAction.size());
}
+// Ensure we don't introduce unexpected duplicates into our maps.
+TEST(mediametrics_tests, audio_types_tables) {
+ using namespace android::mediametrics::types;
+
+ ASSERT_EQ(0, countDuplicates(getAudioCallerNameMap()));
+ ASSERT_EQ(2, countDuplicates(getAudioDeviceInMap())); // has dups
+ ASSERT_EQ(1, countDuplicates(getAudioDeviceOutMap())); // has dups
+ ASSERT_EQ(0, countDuplicates(getAudioThreadTypeMap()));
+ ASSERT_EQ(0, countDuplicates(getAudioTrackTraitsMap()));
+}
+
+// Check our string validation (before logging to statsd).
+// This variant checks the logged, possibly shortened string.
+TEST(mediametrics_tests, audio_types_string) {
+ using namespace android::mediametrics::types;
+
+ ASSERT_EQ("java", (lookup<CALLER_NAME, std::string>)("java"));
+ ASSERT_EQ("", (lookup<CALLER_NAME, std::string>)("random"));
+
+ ASSERT_EQ("SPEECH", (lookup<CONTENT_TYPE, std::string>)("AUDIO_CONTENT_TYPE_SPEECH"));
+ ASSERT_EQ("", (lookup<CONTENT_TYPE, std::string>)("random"));
+
+ ASSERT_EQ("FLAC", (lookup<ENCODING, std::string>)("AUDIO_FORMAT_FLAC"));
+ ASSERT_EQ("", (lookup<ENCODING, std::string>)("random"));
+
+ ASSERT_EQ("USB_DEVICE", (lookup<INPUT_DEVICE, std::string>)("AUDIO_DEVICE_IN_USB_DEVICE"));
+ ASSERT_EQ("BUILTIN_MIC|WIRED_HEADSET", (lookup<INPUT_DEVICE, std::string>)(
+ "AUDIO_DEVICE_IN_BUILTIN_MIC|AUDIO_DEVICE_IN_WIRED_HEADSET"));
+ ASSERT_EQ("", (lookup<INPUT_DEVICE, std::string>)("random"));
+
+ ASSERT_EQ("RAW", (lookup<INPUT_FLAG, std::string>)("AUDIO_INPUT_FLAG_RAW"));
+ ASSERT_EQ("HW_AV_SYNC|VOIP_TX", (lookup<INPUT_FLAG, std::string>)(
+ "AUDIO_INPUT_FLAG_HW_AV_SYNC|AUDIO_INPUT_FLAG_VOIP_TX"));
+ ASSERT_EQ("", (lookup<INPUT_FLAG, std::string>)("random"));
+
+ ASSERT_EQ("BLUETOOTH_SCO_CARKIT",
+ (lookup<OUTPUT_DEVICE, std::string>)("AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT"));
+ ASSERT_EQ("SPEAKER|HDMI", (lookup<OUTPUT_DEVICE, std::string>)(
+ "AUDIO_DEVICE_OUT_SPEAKER|AUDIO_DEVICE_OUT_HDMI"));
+ ASSERT_EQ("", (lookup<OUTPUT_DEVICE, std::string>)("random"));
+
+ ASSERT_EQ("PRIMARY", (lookup<OUTPUT_FLAG, std::string>)("AUDIO_OUTPUT_FLAG_PRIMARY"));
+ ASSERT_EQ("DEEP_BUFFER|NON_BLOCKING", (lookup<OUTPUT_FLAG, std::string>)(
+ "AUDIO_OUTPUT_FLAG_DEEP_BUFFER|AUDIO_OUTPUT_FLAG_NON_BLOCKING"));
+ ASSERT_EQ("", (lookup<OUTPUT_FLAG, std::string>)("random"));
+
+ ASSERT_EQ("MIC", (lookup<SOURCE_TYPE, std::string>)("AUDIO_SOURCE_MIC"));
+ ASSERT_EQ("", (lookup<SOURCE_TYPE, std::string>)("random"));
+
+ ASSERT_EQ("TTS", (lookup<STREAM_TYPE, std::string>)("AUDIO_STREAM_TTS"));
+ ASSERT_EQ("", (lookup<STREAM_TYPE, std::string>)("random"));
+
+ ASSERT_EQ("DIRECT", (lookup<THREAD_TYPE, std::string>)("DIRECT"));
+ ASSERT_EQ("", (lookup<THREAD_TYPE, std::string>)("random"));
+
+ ASSERT_EQ("static", (lookup<TRACK_TRAITS, std::string>)("static"));
+ ASSERT_EQ("", (lookup<TRACK_TRAITS, std::string>)("random"));
+
+ ASSERT_EQ("VOICE_COMMUNICATION",
+ (lookup<USAGE, std::string>)("AUDIO_USAGE_VOICE_COMMUNICATION"));
+ ASSERT_EQ("", (lookup<USAGE, std::string>)("random"));
+}
+
+// Check our string validation (before logging to statsd).
+// This variant checks integral value logging.
+TEST(mediametrics_tests, audio_types_integer) {
+ using namespace android::mediametrics::types;
+
+ ASSERT_EQ(2, (lookup<CALLER_NAME, int32_t>)("java"));
+ ASSERT_EQ(0, (lookup<CALLER_NAME, int32_t>)("random")); // 0 == unknown
+
+ ASSERT_EQ((int32_t)AUDIO_CONTENT_TYPE_SPEECH,
+ (lookup<CONTENT_TYPE, int32_t>)("AUDIO_CONTENT_TYPE_SPEECH"));
+ ASSERT_EQ((int32_t)AUDIO_CONTENT_TYPE_UNKNOWN, (lookup<CONTENT_TYPE, int32_t>)("random"));
+
+ ASSERT_EQ((int32_t)AUDIO_FORMAT_FLAC, (lookup<ENCODING, int32_t>)("AUDIO_FORMAT_FLAC"));
+ ASSERT_EQ((int32_t)AUDIO_FORMAT_INVALID, (lookup<ENCODING, int32_t>)("random"));
+
+ ASSERT_EQ(getAudioDeviceInMap().at("AUDIO_DEVICE_IN_USB_DEVICE"),
+ (lookup<INPUT_DEVICE, int64_t>)("AUDIO_DEVICE_IN_USB_DEVICE"));
+ ASSERT_EQ(getAudioDeviceInMap().at("AUDIO_DEVICE_IN_BUILTIN_MIC")
+ | getAudioDeviceInMap().at("AUDIO_DEVICE_IN_WIRED_HEADSET"),
+ (lookup<INPUT_DEVICE, int64_t>)(
+ "AUDIO_DEVICE_IN_BUILTIN_MIC|AUDIO_DEVICE_IN_WIRED_HEADSET"));
+ ASSERT_EQ(0, (lookup<INPUT_DEVICE, int64_t>)("random"));
+
+ ASSERT_EQ((int32_t)AUDIO_INPUT_FLAG_RAW,
+ (lookup<INPUT_FLAG, int32_t>)("AUDIO_INPUT_FLAG_RAW"));
+ ASSERT_EQ((int32_t)AUDIO_INPUT_FLAG_HW_AV_SYNC
+ | (int32_t)AUDIO_INPUT_FLAG_VOIP_TX,
+ (lookup<INPUT_FLAG, int32_t>)(
+ "AUDIO_INPUT_FLAG_HW_AV_SYNC|AUDIO_INPUT_FLAG_VOIP_TX"));
+ ASSERT_EQ(0, (lookup<INPUT_FLAG, int32_t>)("random"));
+
+ ASSERT_EQ(getAudioDeviceOutMap().at("AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT"),
+ (lookup<OUTPUT_DEVICE, int64_t>)("AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT"));
+ ASSERT_EQ(getAudioDeviceOutMap().at("AUDIO_DEVICE_OUT_SPEAKER")
+ | getAudioDeviceOutMap().at("AUDIO_DEVICE_OUT_HDMI"),
+ (lookup<OUTPUT_DEVICE, int64_t>)(
+ "AUDIO_DEVICE_OUT_SPEAKER|AUDIO_DEVICE_OUT_HDMI"));
+ ASSERT_EQ(0, (lookup<OUTPUT_DEVICE, int64_t>)("random"));
+
+ ASSERT_EQ((int32_t)AUDIO_OUTPUT_FLAG_PRIMARY,
+ (lookup<OUTPUT_FLAG, int32_t>)("AUDIO_OUTPUT_FLAG_PRIMARY"));
+ ASSERT_EQ((int32_t)AUDIO_OUTPUT_FLAG_DEEP_BUFFER | (int32_t)AUDIO_OUTPUT_FLAG_NON_BLOCKING,
+ (lookup<OUTPUT_FLAG, int32_t>)(
+ "AUDIO_OUTPUT_FLAG_DEEP_BUFFER|AUDIO_OUTPUT_FLAG_NON_BLOCKING"));
+ ASSERT_EQ(0, (lookup<OUTPUT_FLAG, int32_t>)("random"));
+
+ ASSERT_EQ((int32_t)AUDIO_SOURCE_MIC, (lookup<SOURCE_TYPE, int32_t>)("AUDIO_SOURCE_MIC"));
+ ASSERT_EQ((int32_t)AUDIO_SOURCE_DEFAULT, (lookup<SOURCE_TYPE, int32_t>)("random"));
+
+ ASSERT_EQ((int32_t)AUDIO_STREAM_TTS, (lookup<STREAM_TYPE, int32_t>)("AUDIO_STREAM_TTS"));
+ ASSERT_EQ((int32_t)AUDIO_STREAM_DEFAULT, (lookup<STREAM_TYPE, int32_t>)("random"));
+
+ ASSERT_EQ(1, (lookup<THREAD_TYPE, int32_t>)("DIRECT"));
+ ASSERT_EQ(-1, (lookup<THREAD_TYPE, int32_t>)("random"));
+
+ ASSERT_EQ(getAudioTrackTraitsMap().at("static"), (lookup<TRACK_TRAITS, int32_t>)("static"));
+ ASSERT_EQ(0, (lookup<TRACK_TRAITS, int32_t>)("random"));
+
+ ASSERT_EQ((int32_t)AUDIO_USAGE_VOICE_COMMUNICATION,
+ (lookup<USAGE, int32_t>)("AUDIO_USAGE_VOICE_COMMUNICATION"));
+ ASSERT_EQ((int32_t)AUDIO_USAGE_UNKNOWN, (lookup<USAGE, int32_t>)("random"));
+}
+
#if 0
// Stress test code for garbage collection, you need to enable AID_SHELL as trusted to run
// in MediaMetricsService.cpp.
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index a3519d5..cdf5a4e 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -1,10 +1,53 @@
+filegroup {
+ name: "resourcemanager_aidl",
+ srcs: [
+ "aidl/android/media/IResourceManagerClient.aidl",
+ "aidl/android/media/IResourceManagerService.aidl",
+ "aidl/android/media/MediaResourceType.aidl",
+ "aidl/android/media/MediaResourceSubType.aidl",
+ "aidl/android/media/MediaResourceParcel.aidl",
+ "aidl/android/media/MediaResourcePolicyParcel.aidl",
+ ],
+ path: "aidl",
+}
+filegroup {
+ name: "resourceobserver_aidl",
+ srcs: [
+ "aidl/android/media/IResourceObserver.aidl",
+ "aidl/android/media/IResourceObserverService.aidl",
+ "aidl/android/media/MediaObservableEvent.aidl",
+ "aidl/android/media/MediaObservableFilter.aidl",
+ "aidl/android/media/MediaObservableType.aidl",
+ "aidl/android/media/MediaObservableParcel.aidl",
+ ],
+ path: "aidl",
+}
-cc_library_shared {
+aidl_interface {
+ name: "resourcemanager_aidl_interface",
+ unstable: true,
+ local_include_dir: "aidl",
+ srcs: [
+ ":resourcemanager_aidl",
+ ],
+}
+
+aidl_interface {
+ name: "resourceobserver_aidl_interface",
+ unstable: true,
+ local_include_dir: "aidl",
+ srcs: [
+ ":resourceobserver_aidl",
+ ],
+}
+
+cc_library {
name: "libresourcemanagerservice",
srcs: [
"ResourceManagerService.cpp",
+ "ResourceObserverService.cpp",
"ServiceLog.cpp",
],
@@ -17,6 +60,10 @@
"liblog",
],
+ static_libs: [
+ "resourceobserver_aidl_interface-ndk_platform",
+ ],
+
include_dirs: ["frameworks/av/include"],
cflags: [
@@ -25,5 +72,4 @@
],
export_include_dirs: ["."],
-
}
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index ff45c87..289cffd 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -22,6 +22,7 @@
#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <binder/IMediaResourceMonitor.h>
+#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
#include <cutils/sched_policy.h>
#include <dirent.h>
@@ -36,18 +37,54 @@
#include <unistd.h>
#include "ResourceManagerService.h"
+#include "ResourceObserverService.h"
#include "ServiceLog.h"
namespace android {
+//static
+std::mutex ResourceManagerService::sCookieLock;
+//static
+uintptr_t ResourceManagerService::sCookieCounter = 0;
+//static
+std::map<uintptr_t, sp<DeathNotifier> > ResourceManagerService::sCookieToDeathNotifierMap;
+
+class DeathNotifier : public RefBase {
+public:
+ DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
+ int pid, int64_t clientId);
+
+ virtual ~DeathNotifier() {}
+
+ // Implement death recipient
+ static void BinderDiedCallback(void* cookie);
+ virtual void binderDied();
+
+protected:
+ std::weak_ptr<ResourceManagerService> mService;
+ int mPid;
+ int64_t mClientId;
+};
+
DeathNotifier::DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
int pid, int64_t clientId)
: mService(service), mPid(pid), mClientId(clientId) {}
//static
void DeathNotifier::BinderDiedCallback(void* cookie) {
- auto thiz = static_cast<DeathNotifier*>(cookie);
- thiz->binderDied();
+ sp<DeathNotifier> notifier;
+ {
+ std::scoped_lock lock{ResourceManagerService::sCookieLock};
+ auto it = ResourceManagerService::sCookieToDeathNotifierMap.find(
+ reinterpret_cast<uintptr_t>(cookie));
+ if (it == ResourceManagerService::sCookieToDeathNotifierMap.end()) {
+ return;
+ }
+ notifier = it->second;
+ }
+ if (notifier.get() != nullptr) {
+ notifier->binderDied();
+ }
}
void DeathNotifier::binderDied() {
@@ -57,9 +94,31 @@
ALOGW("ResourceManagerService is dead as well.");
return;
}
- service->removeResource(mPid, mClientId, false);
service->overridePid(mPid, -1);
+ // thiz is freed in the call below, so it must be last call referring thiz
+ service->removeResource(mPid, mClientId, false /*checkValid*/);
+}
+
+class OverrideProcessInfoDeathNotifier : public DeathNotifier {
+public:
+ OverrideProcessInfoDeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
+ int pid) : DeathNotifier(service, pid, 0) {}
+
+ virtual ~OverrideProcessInfoDeathNotifier() {}
+
+ virtual void binderDied();
+};
+
+void OverrideProcessInfoDeathNotifier::binderDied() {
+ // Don't check for pid validity since we know it's already dead.
+ std::shared_ptr<ResourceManagerService> service = mService.lock();
+ if (service == nullptr) {
+ ALOGW("ResourceManagerService is dead as well.");
+ return;
+ }
+
+ service->removeProcessInfoOverride(mPid);
}
template <typename T>
@@ -114,6 +173,7 @@
info.uid = uid;
info.clientId = clientId;
info.client = client;
+ info.cookie = 0;
info.pendingRemoval = false;
index = infos.add(clientId, info);
@@ -265,6 +325,13 @@
if (status != STATUS_OK) {
return;
}
+
+ std::shared_ptr<ResourceObserverService> observerService =
+ ResourceObserverService::instantiate();
+
+ if (observerService != nullptr) {
+ service->setObserverService(observerService);
+ }
// TODO: mediaserver main() is already starting the thread pool,
// move this to mediaserver main() when other services in mediaserver
// are converted to ndk-platform aidl.
@@ -273,6 +340,11 @@
ResourceManagerService::~ResourceManagerService() {}
+void ResourceManagerService::setObserverService(
+ const std::shared_ptr<ResourceObserverService>& observerService) {
+ mObserverService = observerService;
+}
+
Status ResourceManagerService::config(const std::vector<MediaResourcePolicyParcel>& policies) {
String8 log = String8::format("config(%s)", getString(policies).string());
mServiceLog->add(log);
@@ -351,11 +423,16 @@
Mutex::Autolock lock(mLock);
if (!mProcessInfo->isValidPid(pid)) {
- ALOGE("Rejected addResource call with invalid pid.");
- return Status::fromServiceSpecificError(BAD_VALUE);
+ pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ uid_t callingUid = IPCThreadState::self()->getCallingUid();
+ ALOGW("%s called with untrusted pid %d, using calling pid %d, uid %d", __FUNCTION__,
+ pid, callingPid, callingUid);
+ pid = callingPid;
+ uid = callingUid;
}
ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
ResourceInfo& info = getResourceInfoForEdit(uid, clientId, client, infos);
+ ResourceList resourceAdded;
for (size_t i = 0; i < resources.size(); ++i) {
const auto &res = resources[i];
@@ -377,11 +454,20 @@
} else {
mergeResources(info.resources[resType], res);
}
+ // Add it to the list of added resources for observers.
+ auto it = resourceAdded.find(resType);
+ if (it == resourceAdded.end()) {
+ resourceAdded[resType] = res;
+ } else {
+ mergeResources(it->second, res);
+ }
}
- if (info.deathNotifier == nullptr && client != nullptr) {
- info.deathNotifier = new DeathNotifier(ref<ResourceManagerService>(), pid, clientId);
- AIBinder_linkToDeath(client->asBinder().get(),
- mDeathRecipient.get(), info.deathNotifier.get());
+ if (info.cookie == 0 && client != nullptr) {
+ info.cookie = addCookieAndLink_l(client->asBinder(),
+ new DeathNotifier(ref<ResourceManagerService>(), pid, clientId));
+ }
+ if (mObserverService != nullptr && !resourceAdded.empty()) {
+ mObserverService->onResourceAdded(uid, pid, resourceAdded);
}
notifyResourceGranted(pid, resources);
return Status::ok();
@@ -396,8 +482,10 @@
Mutex::Autolock lock(mLock);
if (!mProcessInfo->isValidPid(pid)) {
- ALOGE("Rejected removeResource call with invalid pid.");
- return Status::fromServiceSpecificError(BAD_VALUE);
+ pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
+ pid, callingPid);
+ pid = callingPid;
}
ssize_t index = mMap.indexOfKey(pid);
if (index < 0) {
@@ -413,7 +501,7 @@
}
ResourceInfo &info = infos.editValueAt(index);
-
+ ResourceList resourceRemoved;
for (size_t i = 0; i < resources.size(); ++i) {
const auto &res = resources[i];
const auto resType = std::tuple(res.type, res.subType, res.id);
@@ -425,19 +513,32 @@
// ignore if we don't have it
if (info.resources.find(resType) != info.resources.end()) {
MediaResourceParcel &resource = info.resources[resType];
+ MediaResourceParcel actualRemoved = res;
if (resource.value > res.value) {
resource.value -= res.value;
} else {
onLastRemoved(res, info);
+ actualRemoved.value = resource.value;
info.resources.erase(resType);
}
+
+ // Add it to the list of removed resources for observers.
+ auto it = resourceRemoved.find(resType);
+ if (it == resourceRemoved.end()) {
+ resourceRemoved[resType] = actualRemoved;
+ } else {
+ mergeResources(it->second, actualRemoved);
+ }
}
}
+ if (mObserverService != nullptr && !resourceRemoved.empty()) {
+ mObserverService->onResourceRemoved(info.uid, pid, resourceRemoved);
+ }
return Status::ok();
}
Status ResourceManagerService::removeClient(int32_t pid, int64_t clientId) {
- removeResource(pid, clientId, true);
+ removeResource(pid, clientId, true /*checkValid*/);
return Status::ok();
}
@@ -449,8 +550,10 @@
Mutex::Autolock lock(mLock);
if (checkValid && !mProcessInfo->isValidPid(pid)) {
- ALOGE("Rejected removeResource call with invalid pid.");
- return Status::fromServiceSpecificError(BAD_VALUE);
+ pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
+ pid, callingPid);
+ pid = callingPid;
}
ssize_t index = mMap.indexOfKey(pid);
if (index < 0) {
@@ -470,8 +573,11 @@
onLastRemoved(it->second, info);
}
- AIBinder_unlinkToDeath(info.client->asBinder().get(),
- mDeathRecipient.get(), info.deathNotifier.get());
+ removeCookieAndUnlink_l(info.client->asBinder(), info.cookie);
+
+ if (mObserverService != nullptr && !info.resources.empty()) {
+ mObserverService->onResourceRemoved(info.uid, pid, info.resources);
+ }
infos.removeItemsAt(index);
return Status::ok();
@@ -502,8 +608,10 @@
{
Mutex::Autolock lock(mLock);
if (!mProcessInfo->isValidPid(callingPid)) {
- ALOGE("Rejected reclaimResource call with invalid callingPid.");
- return Status::fromServiceSpecificError(BAD_VALUE);
+ pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
+ ALOGW("%s called with untrusted pid %d, using actual calling pid %d", __FUNCTION__,
+ callingPid, actualCallingPid);
+ callingPid = actualCallingPid;
}
const MediaResourceParcel *secureCodec = NULL;
const MediaResourceParcel *nonSecureCodec = NULL;
@@ -573,13 +681,19 @@
}
}
+ *_aidl_return = reclaimInternal(clients);
+ return Status::ok();
+}
+
+bool ResourceManagerService::reclaimInternal(
+ const Vector<std::shared_ptr<IResourceManagerClient>> &clients) {
if (clients.size() == 0) {
- return Status::ok();
+ return false;
}
std::shared_ptr<IResourceManagerClient> failedClient;
for (size_t i = 0; i < clients.size(); ++i) {
- log = String8::format("reclaimResource from client %p", clients[i].get());
+ String8 log = String8::format("reclaimResource from client %p", clients[i].get());
mServiceLog->add(log);
bool success;
Status status = clients[i]->reclaimResource(&success);
@@ -590,8 +704,7 @@
}
if (failedClient == NULL) {
- *_aidl_return = true;
- return Status::ok();
+ return true;
}
{
@@ -616,7 +729,7 @@
}
}
- return Status::ok();
+ return false;
}
Status ResourceManagerService::overridePid(
@@ -649,6 +762,83 @@
return Status::ok();
}
+Status ResourceManagerService::overrideProcessInfo(
+ const std::shared_ptr<IResourceManagerClient>& client,
+ int pid,
+ int procState,
+ int oomScore) {
+ String8 log = String8::format("overrideProcessInfo(pid %d, procState %d, oomScore %d)",
+ pid, procState, oomScore);
+ mServiceLog->add(log);
+
+ // Only allow the override if the caller already can access process state and oom scores.
+ int callingPid = AIBinder_getCallingPid();
+ if (callingPid != getpid() && (callingPid != pid || !checkCallingPermission(String16(
+ "android.permission.GET_PROCESS_STATE_AND_OOM_SCORE")))) {
+ ALOGE("Permission Denial: overrideProcessInfo method from pid=%d", callingPid);
+ return Status::fromServiceSpecificError(PERMISSION_DENIED);
+ }
+
+ if (client == nullptr) {
+ return Status::fromServiceSpecificError(BAD_VALUE);
+ }
+
+ Mutex::Autolock lock(mLock);
+ removeProcessInfoOverride_l(pid);
+
+ if (!mProcessInfo->overrideProcessInfo(pid, procState, oomScore)) {
+ // Override value is rejected by ProcessInfo.
+ return Status::fromServiceSpecificError(BAD_VALUE);
+ }
+
+ uintptr_t cookie = addCookieAndLink_l(client->asBinder(),
+ new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), pid));
+
+ mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{cookie, client});
+
+ return Status::ok();
+}
+
+uintptr_t ResourceManagerService::addCookieAndLink_l(
+ ::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier) {
+ std::scoped_lock lock{sCookieLock};
+
+ uintptr_t cookie;
+ // Need to skip cookie 0 (if it wraps around). ResourceInfo has cookie initialized to 0
+ // indicating the death notifier is not created yet.
+ while ((cookie = ++sCookieCounter) == 0);
+ AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+ sCookieToDeathNotifierMap.emplace(cookie, notifier);
+
+ return cookie;
+}
+
+void ResourceManagerService::removeCookieAndUnlink_l(
+ ::ndk::SpAIBinder binder, uintptr_t cookie) {
+ std::scoped_lock lock{sCookieLock};
+ AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+ sCookieToDeathNotifierMap.erase(cookie);
+}
+
+void ResourceManagerService::removeProcessInfoOverride(int pid) {
+ Mutex::Autolock lock(mLock);
+
+ removeProcessInfoOverride_l(pid);
+}
+
+void ResourceManagerService::removeProcessInfoOverride_l(int pid) {
+ auto it = mProcessInfoOverrideMap.find(pid);
+ if (it == mProcessInfoOverrideMap.end()) {
+ return;
+ }
+
+ mProcessInfo->removeProcessInfoOverride(pid);
+
+ removeCookieAndUnlink_l(it->second.client->asBinder(), it->second.cookie);
+
+ mProcessInfoOverrideMap.erase(pid);
+}
+
Status ResourceManagerService::markClientForPendingRemoval(int32_t pid, int64_t clientId) {
String8 log = String8::format(
"markClientForPendingRemoval(pid %d, clientId %lld)",
@@ -657,8 +847,10 @@
Mutex::Autolock lock(mLock);
if (!mProcessInfo->isValidPid(pid)) {
- ALOGE("Rejected markClientForPendingRemoval call with invalid pid.");
- return Status::fromServiceSpecificError(BAD_VALUE);
+ pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
+ pid, callingPid);
+ pid = callingPid;
}
ssize_t index = mMap.indexOfKey(pid);
if (index < 0) {
@@ -679,6 +871,38 @@
return Status::ok();
}
+Status ResourceManagerService::reclaimResourcesFromClientsPendingRemoval(int32_t pid) {
+ String8 log = String8::format("reclaimResourcesFromClientsPendingRemoval(pid %d)", pid);
+ mServiceLog->add(log);
+
+ Vector<std::shared_ptr<IResourceManagerClient>> clients;
+ {
+ Mutex::Autolock lock(mLock);
+ if (!mProcessInfo->isValidPid(pid)) {
+ pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
+ pid, callingPid);
+ pid = callingPid;
+ }
+
+ for (MediaResource::Type type : {MediaResource::Type::kSecureCodec,
+ MediaResource::Type::kNonSecureCodec,
+ MediaResource::Type::kGraphicMemory,
+ MediaResource::Type::kDrmSession}) {
+ std::shared_ptr<IResourceManagerClient> client;
+ if (getBiggestClient_l(pid, type, &client, true /* pendingRemovalOnly */)) {
+ clients.add(client);
+ break;
+ }
+ }
+ }
+
+ if (!clients.empty()) {
+ reclaimInternal(clients);
+ }
+ return Status::ok();
+}
+
bool ResourceManagerService::getPriority_l(int pid, int* priority) {
int newPid = pid;
@@ -802,7 +1026,8 @@
bool pendingRemovalOnly) {
ssize_t index = mMap.indexOfKey(pid);
if (index < 0) {
- ALOGE("getBiggestClient_l: can't find resource info for pid %d", pid);
+ ALOGE_IF(!pendingRemovalOnly,
+ "getBiggestClient_l: can't find resource info for pid %d", pid);
return false;
}
@@ -826,7 +1051,9 @@
}
if (clientTemp == NULL) {
- ALOGE("getBiggestClient_l: can't find resource type %s for pid %d", asString(type), pid);
+ ALOGE_IF(!pendingRemovalOnly,
+ "getBiggestClient_l: can't find resource type %s for pid %d",
+ asString(type), pid);
return false;
}
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 49c247e..9c2636e 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -19,6 +19,7 @@
#define ANDROID_MEDIA_RESOURCEMANAGERSERVICE_H
#include <map>
+#include <mutex>
#include <aidl/android/media/BnResourceManagerService.h>
#include <arpa/inet.h>
@@ -33,6 +34,7 @@
class DeathNotifier;
class ResourceManagerService;
+class ResourceObserverService;
class ServiceLog;
struct ProcessInfoInterface;
@@ -50,7 +52,7 @@
int64_t clientId;
uid_t uid;
std::shared_ptr<IResourceManagerClient> client;
- sp<DeathNotifier> deathNotifier;
+ uintptr_t cookie{0};
ResourceList resources;
bool pendingRemoval{false};
};
@@ -59,22 +61,6 @@
typedef KeyedVector<int64_t, ResourceInfo> ResourceInfos;
typedef KeyedVector<int, ResourceInfos> PidResourceInfosMap;
-class DeathNotifier : public RefBase {
-public:
- DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
- int pid, int64_t clientId);
-
- ~DeathNotifier() {}
-
- // Implement death recipient
- static void BinderDiedCallback(void* cookie);
- void binderDied();
-
-private:
- std::weak_ptr<ResourceManagerService> mService;
- int mPid;
- int64_t mClientId;
-};
class ResourceManagerService : public BnResourceManagerService {
public:
struct SystemCallbackInterface : public RefBase {
@@ -95,6 +81,8 @@
const sp<ProcessInfoInterface> &processInfo,
const sp<SystemCallbackInterface> &systemResource);
virtual ~ResourceManagerService();
+ void setObserverService(
+ const std::shared_ptr<ResourceObserverService>& observerService);
// IResourceManagerService interface
Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
@@ -125,12 +113,27 @@
int originalPid,
int newPid) override;
+ Status overrideProcessInfo(
+ const std::shared_ptr<IResourceManagerClient>& client,
+ int pid,
+ int procState,
+ int oomScore) override;
+
Status markClientForPendingRemoval(int32_t pid, int64_t clientId) override;
+ Status reclaimResourcesFromClientsPendingRemoval(int32_t pid) override;
+
Status removeResource(int pid, int64_t clientId, bool checkValid);
private:
friend class ResourceManagerServiceTest;
+ friend class DeathNotifier;
+ friend class OverrideProcessInfoDeathNotifier;
+
+ // Reclaims resources from |clients|. Returns true if reclaim succeeded
+ // for all clients.
+ bool reclaimInternal(
+ const Vector<std::shared_ptr<IResourceManagerClient>> &clients);
// Gets the list of all the clients who own the specified resource type.
// Returns false if any client belongs to a process with higher priority than the
@@ -170,6 +173,12 @@
// Get priority from process's pid
bool getPriority_l(int pid, int* priority);
+ void removeProcessInfoOverride(int pid);
+
+ void removeProcessInfoOverride_l(int pid);
+ uintptr_t addCookieAndLink_l(::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier);
+ void removeCookieAndUnlink_l(::ndk::SpAIBinder binder, uintptr_t cookie);
+
mutable Mutex mLock;
sp<ProcessInfoInterface> mProcessInfo;
sp<SystemCallbackInterface> mSystemCB;
@@ -179,7 +188,17 @@
bool mSupportsSecureWithNonSecureCodec;
int32_t mCpuBoostCount;
::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+ struct ProcessInfoOverride {
+ uintptr_t cookie;
+ std::shared_ptr<IResourceManagerClient> client;
+ };
std::map<int, int> mOverridePidMap;
+ std::map<pid_t, ProcessInfoOverride> mProcessInfoOverrideMap;
+ static std::mutex sCookieLock;
+ static uintptr_t sCookieCounter GUARDED_BY(sCookieLock);
+ static std::map<uintptr_t, sp<DeathNotifier> > sCookieToDeathNotifierMap
+ GUARDED_BY(sCookieLock);
+ std::shared_ptr<ResourceObserverService> mObserverService;
};
// ----------------------------------------------------------------------------
diff --git a/services/mediaresourcemanager/ResourceObserverService.cpp b/services/mediaresourcemanager/ResourceObserverService.cpp
new file mode 100644
index 0000000..9cc6fe4
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceObserverService.cpp
@@ -0,0 +1,320 @@
+/**
+ *
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceObserverService"
+#include <utils/Log.h>
+
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/IServiceManager.h>
+#include <utils/String16.h>
+#include <aidl/android/media/MediaResourceParcel.h>
+
+#include "ResourceObserverService.h"
+
+namespace android {
+
+using ::aidl::android::media::MediaResourceParcel;
+using ::aidl::android::media::MediaObservableEvent;
+
+// MediaObservableEvent will be used as uint64_t flags.
+static_assert(sizeof(MediaObservableEvent) == sizeof(uint64_t));
+
+static std::vector<MediaObservableEvent> sEvents = {
+ MediaObservableEvent::kBusy,
+ MediaObservableEvent::kIdle,
+};
+
+static MediaObservableType getObservableType(const MediaResourceParcel& res) {
+ if (res.subType == MediaResourceSubType::kVideoCodec) {
+ if (res.type == MediaResourceType::kNonSecureCodec) {
+ return MediaObservableType::kVideoNonSecureCodec;
+ }
+ if (res.type == MediaResourceType::kSecureCodec) {
+ return MediaObservableType::kVideoSecureCodec;
+ }
+ }
+ return MediaObservableType::kInvalid;
+}
+
+//static
+std::mutex ResourceObserverService::sDeathRecipientLock;
+//static
+std::map<uintptr_t, std::shared_ptr<ResourceObserverService::DeathRecipient> >
+ResourceObserverService::sDeathRecipientMap;
+
+struct ResourceObserverService::DeathRecipient {
+ DeathRecipient(ResourceObserverService* _service,
+ const std::shared_ptr<IResourceObserver>& _observer)
+ : service(_service), observer(_observer) {}
+ ~DeathRecipient() {}
+
+ void binderDied() {
+ if (service != nullptr) {
+ service->unregisterObserver(observer);
+ }
+ }
+
+ ResourceObserverService* service;
+ std::shared_ptr<IResourceObserver> observer;
+};
+
+// static
+void ResourceObserverService::BinderDiedCallback(void* cookie) {
+ uintptr_t id = reinterpret_cast<uintptr_t>(cookie);
+
+ ALOGW("Observer %lld is dead", (long long)id);
+
+ std::shared_ptr<DeathRecipient> recipient;
+
+ {
+ std::scoped_lock lock{sDeathRecipientLock};
+
+ auto it = sDeathRecipientMap.find(id);
+ if (it != sDeathRecipientMap.end()) {
+ recipient = it->second;
+ }
+ }
+
+ if (recipient != nullptr) {
+ recipient->binderDied();
+ }
+}
+
+//static
+std::shared_ptr<ResourceObserverService> ResourceObserverService::instantiate() {
+ std::shared_ptr<ResourceObserverService> observerService =
+ ::ndk::SharedRefBase::make<ResourceObserverService>();
+ binder_status_t status = AServiceManager_addService(observerService->asBinder().get(),
+ ResourceObserverService::getServiceName());
+ if (status != STATUS_OK) {
+ return nullptr;
+ }
+ return observerService;
+}
+
+ResourceObserverService::ResourceObserverService()
+ : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {}
+
+binder_status_t ResourceObserverService::dump(
+ int fd, const char** /*args*/, uint32_t /*numArgs*/) {
+ String8 result;
+
+ if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
+ result.format("Permission Denial: "
+ "can't dump ResourceManagerService from pid=%d, uid=%d\n",
+ AIBinder_getCallingPid(),
+ AIBinder_getCallingUid());
+ write(fd, result.string(), result.size());
+ return PERMISSION_DENIED;
+ }
+
+ result.appendFormat("ResourceObserverService: %p\n", this);
+ result.appendFormat(" Registered Observers: %zu\n", mObserverInfoMap.size());
+
+ {
+ std::scoped_lock lock{mObserverLock};
+
+ for (auto &observer : mObserverInfoMap) {
+ result.appendFormat(" Observer %p:\n", observer.second.binder.get());
+ for (auto &observable : observer.second.filters) {
+ String8 enabledEventsStr;
+ for (auto &event : sEvents) {
+ if (((uint64_t)observable.eventFilter & (uint64_t)event) != 0) {
+ if (!enabledEventsStr.isEmpty()) {
+ enabledEventsStr.append("|");
+ }
+ enabledEventsStr.append(toString(event).c_str());
+ }
+ }
+ result.appendFormat(" %s: %s\n",
+ toString(observable.type).c_str(), enabledEventsStr.c_str());
+ }
+ }
+ }
+
+ write(fd, result.string(), result.size());
+ return OK;
+}
+
+Status ResourceObserverService::registerObserver(
+ const std::shared_ptr<IResourceObserver>& in_observer,
+ const std::vector<MediaObservableFilter>& in_filters) {
+ if ((getpid() != AIBinder_getCallingPid()) &&
+ checkCallingPermission(
+ String16("android.permission.REGISTER_MEDIA_RESOURCE_OBSERVER")) == false) {
+ ALOGE("Permission Denial: "
+ "can't registerObserver from pid=%d, uid=%d\n",
+ AIBinder_getCallingPid(),
+ AIBinder_getCallingUid());
+ return Status::fromServiceSpecificError(PERMISSION_DENIED);
+ }
+
+ ::ndk::SpAIBinder binder = in_observer->asBinder();
+
+ {
+ std::scoped_lock lock{mObserverLock};
+
+ if (mObserverInfoMap.find((uintptr_t)binder.get()) != mObserverInfoMap.end()) {
+ return Status::fromServiceSpecificError(ALREADY_EXISTS);
+ }
+
+ if (in_filters.empty()) {
+ return Status::fromServiceSpecificError(BAD_VALUE);
+ }
+
+ // Add observer info.
+ mObserverInfoMap.emplace((uintptr_t)binder.get(),
+ ObserverInfo{binder, in_observer, in_filters});
+
+ // Add observer to observable->subscribers map.
+ for (auto &filter : in_filters) {
+ for (auto &event : sEvents) {
+ if (!((uint64_t)filter.eventFilter & (uint64_t)event)) {
+ continue;
+ }
+ MediaObservableFilter key{filter.type, event};
+ mObservableToSubscribersMap[key].emplace((uintptr_t)binder.get(), in_observer);
+ }
+ }
+ }
+
+ // Add death binder and link.
+ uintptr_t cookie = (uintptr_t)binder.get();
+ {
+ std::scoped_lock lock{sDeathRecipientLock};
+ sDeathRecipientMap.emplace(
+ cookie, std::make_shared<DeathRecipient>(this, in_observer));
+ }
+
+ AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(),
+ reinterpret_cast<void*>(cookie));
+
+ return Status::ok();
+}
+
+Status ResourceObserverService::unregisterObserver(
+ const std::shared_ptr<IResourceObserver>& in_observer) {
+ if ((getpid() != AIBinder_getCallingPid()) &&
+ checkCallingPermission(
+ String16("android.permission.REGISTER_MEDIA_RESOURCE_OBSERVER")) == false) {
+ ALOGE("Permission Denial: "
+ "can't unregisterObserver from pid=%d, uid=%d\n",
+ AIBinder_getCallingPid(),
+ AIBinder_getCallingUid());
+ return Status::fromServiceSpecificError(PERMISSION_DENIED);
+ }
+
+ ::ndk::SpAIBinder binder = in_observer->asBinder();
+
+ {
+ std::scoped_lock lock{mObserverLock};
+
+ auto it = mObserverInfoMap.find((uintptr_t)binder.get());
+ if (it == mObserverInfoMap.end()) {
+ return Status::fromServiceSpecificError(NAME_NOT_FOUND);
+ }
+
+ // Remove observer from observable->subscribers map.
+ for (auto &filter : it->second.filters) {
+ for (auto &event : sEvents) {
+ if (!((uint64_t)filter.eventFilter & (uint64_t)event)) {
+ continue;
+ }
+ MediaObservableFilter key{filter.type, event};
+ mObservableToSubscribersMap[key].erase((uintptr_t)binder.get());
+
+ //Remove the entry if there's no more subscribers.
+ if (mObservableToSubscribersMap[key].empty()) {
+ mObservableToSubscribersMap.erase(key);
+ }
+ }
+ }
+
+ // Remove observer info.
+ mObserverInfoMap.erase(it);
+ }
+
+ // Unlink and remove death binder.
+ uintptr_t cookie = (uintptr_t)binder.get();
+ AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(),
+ reinterpret_cast<void*>(cookie));
+
+ {
+ std::scoped_lock lock{sDeathRecipientLock};
+ sDeathRecipientMap.erase(cookie);
+ }
+
+ return Status::ok();
+}
+
+void ResourceObserverService::notifyObservers(
+ MediaObservableEvent event, int uid, int pid, const ResourceList &resources) {
+ struct CalleeInfo {
+ std::shared_ptr<IResourceObserver> observer;
+ std::vector<MediaObservableParcel> monitors;
+ };
+ // Build a consolidated list of observers to call with their respective observables.
+ std::map<uintptr_t, CalleeInfo> calleeList;
+
+ {
+ std::scoped_lock lock{mObserverLock};
+
+ for (auto &res : resources) {
+ // Skip if this resource doesn't map to any observable type.
+ MediaObservableType observableType = getObservableType(res.second);
+ if (observableType == MediaObservableType::kInvalid) {
+ continue;
+ }
+ MediaObservableFilter key{observableType, event};
+ // Skip if no one subscribed to this observable.
+ auto observableIt = mObservableToSubscribersMap.find(key);
+ if (observableIt == mObservableToSubscribersMap.end()) {
+ continue;
+ }
+ // Loop through all subsribers.
+ for (auto &subscriber : observableIt->second) {
+ auto calleeIt = calleeList.find(subscriber.first);
+ if (calleeIt == calleeList.end()) {
+ calleeList.emplace(subscriber.first, CalleeInfo{
+ subscriber.second, {{observableType, res.second.value}}});
+ } else {
+ calleeIt->second.monitors.push_back({observableType, res.second.value});
+ }
+ }
+ }
+ }
+
+ // Finally call the observers about the status change.
+ for (auto &calleeInfo : calleeList) {
+ calleeInfo.second.observer->onStatusChanged(
+ event, uid, pid, calleeInfo.second.monitors);
+ }
+}
+
+void ResourceObserverService::onResourceAdded(
+ int uid, int pid, const ResourceList &resources) {
+ notifyObservers(MediaObservableEvent::kBusy, uid, pid, resources);
+}
+
+void ResourceObserverService::onResourceRemoved(
+ int uid, int pid, const ResourceList &resources) {
+ notifyObservers(MediaObservableEvent::kIdle, uid, pid, resources);
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceObserverService.h b/services/mediaresourcemanager/ResourceObserverService.h
new file mode 100644
index 0000000..46bc5fb
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceObserverService.h
@@ -0,0 +1,95 @@
+/**
+ *
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_RESOURCE_OBSERVER_SERVICE_H
+#define ANDROID_MEDIA_RESOURCE_OBSERVER_SERVICE_H
+
+#include <map>
+
+#include <aidl/android/media/BnResourceObserverService.h>
+#include "ResourceManagerService.h"
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::BnResourceObserverService;
+using ::aidl::android::media::IResourceObserver;
+using ::aidl::android::media::MediaObservableFilter;
+using ::aidl::android::media::MediaObservableParcel;
+using ::aidl::android::media::MediaObservableType;
+using ::aidl::android::media::MediaObservableEvent;
+
+class ResourceObserverService : public BnResourceObserverService {
+public:
+
+ static char const *getServiceName() { return "media.resource_observer"; }
+ static std::shared_ptr<ResourceObserverService> instantiate();
+
+ virtual inline binder_status_t dump(
+ int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
+
+ ResourceObserverService();
+ virtual ~ResourceObserverService() {}
+
+ // IResourceObserverService interface
+ Status registerObserver(const std::shared_ptr<IResourceObserver>& in_observer,
+ const std::vector<MediaObservableFilter>& in_filters) override;
+
+ Status unregisterObserver(const std::shared_ptr<IResourceObserver>& in_observer) override;
+ // ~IResourceObserverService interface
+
+ // Called by ResourceManagerService when resources are added.
+ void onResourceAdded(int uid, int pid, const ResourceList &resources);
+
+ // Called by ResourceManagerService when resources are removed.
+ void onResourceRemoved(int uid, int pid, const ResourceList &resources);
+
+private:
+ struct ObserverInfo {
+ ::ndk::SpAIBinder binder;
+ std::shared_ptr<IResourceObserver> observer;
+ std::vector<MediaObservableFilter> filters;
+ };
+ struct DeathRecipient;
+
+ // Below maps are all keyed on the observer's binder ptr value.
+ using ObserverInfoMap = std::map<uintptr_t, ObserverInfo>;
+ using SubscriberMap = std::map<uintptr_t, std::shared_ptr<IResourceObserver>>;
+
+ std::mutex mObserverLock;
+ // Binder->ObserverInfo
+ ObserverInfoMap mObserverInfoMap GUARDED_BY(mObserverLock);
+ // Observable(<type,event>)->Subscribers
+ std::map<MediaObservableFilter, SubscriberMap> mObservableToSubscribersMap
+ GUARDED_BY(mObserverLock);
+
+ ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+ // Binder death handling.
+ static std::mutex sDeathRecipientLock;
+ static std::map<uintptr_t, std::shared_ptr<DeathRecipient>> sDeathRecipientMap
+ GUARDED_BY(sDeathRecipientLock);
+ static void BinderDiedCallback(void* cookie);
+
+ void notifyObservers(MediaObservableEvent event,
+ int uid, int pid, const ResourceList &resources);
+};
+
+// ----------------------------------------------------------------------------
+} // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCE_OBSERVER_SERVICE_H
diff --git a/services/mediaresourcemanager/TEST_MAPPING b/services/mediaresourcemanager/TEST_MAPPING
index 418b159..52ad441 100644
--- a/services/mediaresourcemanager/TEST_MAPPING
+++ b/services/mediaresourcemanager/TEST_MAPPING
@@ -5,6 +5,9 @@
},
{
"name": "ServiceLog_test"
+ },
+ {
+ "name": "ResourceObserverService_test"
}
]
}
diff --git a/media/libmedia/aidl/android/media/IResourceManagerClient.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerClient.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/IResourceManagerClient.aidl
rename to services/mediaresourcemanager/aidl/android/media/IResourceManagerClient.aidl
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
new file mode 100644
index 0000000..7a0a50f
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -0,0 +1,134 @@
+/**
+ * Copyright (c) 2019, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.IResourceManagerClient;
+import android.media.MediaResourceParcel;
+import android.media.MediaResourcePolicyParcel;
+
+/**
+ * ResourceManagerService interface that keeps track of media resource
+ * owned by clients, and reclaims resources based on configured policies
+ * when necessary.
+ *
+ * {@hide}
+ */
+interface IResourceManagerService {
+ const @utf8InCpp String kPolicySupportsMultipleSecureCodecs
+ = "supports-multiple-secure-codecs";
+ const @utf8InCpp String kPolicySupportsSecureWithNonSecureCodec
+ = "supports-secure-with-non-secure-codec";
+
+ /**
+ * Configure the ResourceManagerService to adopted particular policies when
+ * managing the resources.
+ *
+ * @param policies an array of policies to be adopted.
+ */
+ void config(in MediaResourcePolicyParcel[] policies);
+
+ /**
+ * Add a client to a process with a list of resources.
+ *
+ * @param pid pid of the client.
+ * @param uid uid of the client.
+ * @param clientId an identifier that uniquely identifies the client within the pid.
+ * @param client interface for the ResourceManagerService to call the client.
+ * @param resources an array of resources to be added.
+ */
+ void addResource(
+ int pid,
+ int uid,
+ long clientId,
+ IResourceManagerClient client,
+ in MediaResourceParcel[] resources);
+
+ /**
+ * Remove the listed resources from a client.
+ *
+ * @param pid pid from which the list of resources will be removed.
+ * @param clientId clientId within the pid from which the list of resources will be removed.
+ * @param resources an array of resources to be removed from the client.
+ */
+ void removeResource(int pid, long clientId, in MediaResourceParcel[] resources);
+
+ /**
+ * Remove all resources from a client.
+ *
+ * @param pid pid from which the client's resources will be removed.
+ * @param clientId clientId within the pid that will be removed.
+ */
+ void removeClient(int pid, long clientId);
+
+ /**
+ * Tries to reclaim resource from processes with lower priority than the
+ * calling process according to the requested resources.
+ *
+ * @param callingPid pid of the calling process.
+ * @param resources an array of resources to be reclaimed.
+ *
+ * @return true if the reclaim was successful and false otherwise.
+ */
+ boolean reclaimResource(int callingPid, in MediaResourceParcel[] resources);
+
+ /**
+ * Override the pid of original calling process with the pid of the process
+ * who actually use the requested resources.
+ *
+ * @param originalPid pid of the original calling process.
+ * @param newPid pid of the actual process who use the resources.
+ * remove existing override on originalPid if newPid is -1.
+ */
+ void overridePid(int originalPid, int newPid);
+
+ /**
+ * Override the process state and OOM score of the calling process with the
+ * the specified values. This is used by native service processes to specify
+ * these values for ResourceManagerService to use. ResourceManagerService usually
+ * gets these values from ActivityManagerService, however, ActivityManagerService
+ * doesn't track native service processes.
+ *
+ * @param client a token for the ResourceManagerService to link to the caller and
+ * receive notification if it goes away. This is needed for clearing
+ * the overrides.
+ * @param pid pid of the calling process.
+ * @param procState the process state value that ResourceManagerService should
+ * use for this pid.
+ * @param oomScore the oom score value that ResourceManagerService should
+ * use for this pid.
+ */
+ void overrideProcessInfo(
+ IResourceManagerClient client,
+ int pid,
+ int procState,
+ int oomScore);
+
+ /**
+ * Mark a client for pending removal
+ *
+ * @param pid pid from which the client's resources will be removed.
+ * @param clientId clientId within the pid that will be removed.
+ */
+ void markClientForPendingRemoval(int pid, long clientId);
+
+ /**
+ * Reclaim resources from clients pending removal, if any.
+ *
+ * @param pid pid from which resources will be reclaimed.
+ */
+ void reclaimResourcesFromClientsPendingRemoval(int pid);
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceObserver.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceObserver.aidl
new file mode 100644
index 0000000..462009a
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceObserver.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.MediaObservableEvent;
+import android.media.MediaObservableParcel;
+
+/**
+ * IResourceObserver interface for receiving observable resource updates
+ * from IResourceObserverService.
+ *
+ * {@hide}
+ */
+interface IResourceObserver {
+ /**
+ * Called when an observed resource is granted to a client.
+ *
+ * @param event the status change that happened to the resource.
+ * @param uid uid to which the resource is associated.
+ * @param pid pid to which the resource is associated.
+ * @param observables the resources whose status has changed.
+ */
+ oneway void onStatusChanged(MediaObservableEvent event,
+ int uid, int pid, in MediaObservableParcel[] observables);
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceObserverService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceObserverService.aidl
new file mode 100644
index 0000000..08f4ca0
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceObserverService.aidl
@@ -0,0 +1,49 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.IResourceObserver;
+import android.media.MediaObservableFilter;
+
+/**
+ * IResourceObserverService interface for registering an IResourceObserver
+ * callback to receive status updates about observable media resources.
+ *
+ * {@hide}
+ */
+interface IResourceObserverService {
+
+ /**
+ * Register an observer on the IResourceObserverService to receive
+ * status updates for observable resources.
+ *
+ * @param observer the observer to register.
+ * @param filters an array of filters for resources and events to receive
+ * updates for.
+ */
+ void registerObserver(
+ IResourceObserver observer,
+ in MediaObservableFilter[] filters);
+
+ /**
+ * Unregister an observer from the IResourceObserverService.
+ * The observer will stop receiving the status updates.
+ *
+ * @param observer the observer to unregister.
+ */
+ void unregisterObserver(IResourceObserver observer);
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableEvent.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableEvent.aidl
new file mode 100644
index 0000000..56ab24d
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableEvent.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Enums for media observable events.
+ *
+ * These values are used as bitmasks to indicate the events that the
+ * observer is interested in in the MediaObservableFilter objects passed to
+ * IResourceObserverService::registerObserver().
+ *
+ * {@hide}
+ */
+@Backing(type="long")
+enum MediaObservableEvent {
+ /**
+ * A media resource is granted to a client and becomes busy.
+ */
+ kBusy = 1,
+
+ /**
+ * A media resource is released by a client and becomes idle.
+ */
+ kIdle = 2,
+
+ /**
+ * A bitmask that covers all observable events defined.
+ */
+ kAll = ~0,
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableFilter.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableFilter.aidl
new file mode 100644
index 0000000..38f7e39
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableFilter.aidl
@@ -0,0 +1,43 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.MediaObservableType;
+import android.media.MediaObservableEvent;
+
+/**
+ * Description of an observable resource and its associated events that the
+ * observer is interested in.
+ *
+ * {@hide}
+ */
+parcelable MediaObservableFilter {
+ /**
+ * Type of the observable media resource.
+ */
+ MediaObservableType type;
+
+ /**
+ * Events that the observer is interested in.
+ *
+ * This field is a bitwise-OR of the events in MediaObservableEvent. If a
+ * particular event's bit is set, it means that updates should be sent for
+ * that event. For example, if the observer is only interested in receiving
+ * updates when a resource becomes available, it should only set 'kIdle'.
+ */
+ MediaObservableEvent eventFilter;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableParcel.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableParcel.aidl
new file mode 100644
index 0000000..c4233e1
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableParcel.aidl
@@ -0,0 +1,37 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.MediaObservableType;
+
+/**
+ * Description of an observable resource whose status has changed.
+ *
+ * {@hide}
+ */
+parcelable MediaObservableParcel {
+ /**
+ * Type of the observable media resource.
+ */
+ MediaObservableType type;// = MediaObservableType::kInvalid;
+
+ /**
+ * Number of units of the observable resource (number of codecs, bytes of
+ * graphic memory, etc.).
+ */
+ long value = 0;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableType.aidl
new file mode 100644
index 0000000..ed202da
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableType.aidl
@@ -0,0 +1,35 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Type enums of observable media resources.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum MediaObservableType {
+ kInvalid = 0,
+
+ //kVideoStart = 1000,
+ kVideoSecureCodec = 1000,
+ kVideoNonSecureCodec = 1001,
+
+ //kAudioStart = 2000,
+
+ //kGraphicMemory = 3000,
+}
diff --git a/media/libmedia/aidl/android/media/MediaResourceParcel.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceParcel.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourceParcel.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourceParcel.aidl
diff --git a/media/libmedia/aidl/android/media/MediaResourcePolicyParcel.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourcePolicyParcel.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourcePolicyParcel.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourcePolicyParcel.aidl
diff --git a/media/libmedia/aidl/android/media/MediaResourceSubType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourceSubType.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
diff --git a/media/libmedia/aidl/android/media/MediaResourceType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourceType.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index b6c548c..308ee91 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -3,12 +3,12 @@
name: "ResourceManagerService_test",
srcs: ["ResourceManagerService_test.cpp"],
test_suites: ["device-tests"],
+ static_libs: ["libresourcemanagerservice"],
shared_libs: [
"libbinder",
"libbinder_ndk",
"liblog",
"libmedia",
- "libresourcemanagerservice",
"libutils",
],
include_dirs: [
@@ -19,17 +19,16 @@
"-Werror",
"-Wall",
],
- compile_multilib: "32",
}
cc_test {
name: "ServiceLog_test",
srcs: ["ServiceLog_test.cpp"],
test_suites: ["device-tests"],
+ static_libs: ["libresourcemanagerservice"],
shared_libs: [
"liblog",
"libmedia",
- "libresourcemanagerservice",
"libutils",
],
include_dirs: [
@@ -40,5 +39,29 @@
"-Werror",
"-Wall",
],
- compile_multilib: "32",
+}
+
+cc_test {
+ name: "ResourceObserverService_test",
+ srcs: ["ResourceObserverService_test.cpp"],
+ test_suites: ["device-tests"],
+ static_libs: [
+ "libresourcemanagerservice",
+ "resourceobserver_aidl_interface-ndk_platform",
+ ],
+ shared_libs: [
+ "libbinder",
+ "libbinder_ndk",
+ "liblog",
+ "libmedia",
+ "libutils",
+ ],
+ include_dirs: [
+ "frameworks/av/include",
+ "frameworks/av/services/mediaresourcemanager",
+ ],
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
}
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
new file mode 100644
index 0000000..8e29312
--- /dev/null
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "ResourceManagerService.h"
+#include <aidl/android/media/BnResourceManagerClient.h>
+#include <media/MediaResource.h>
+#include <media/MediaResourcePolicy.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/ProcessInfoInterface.h>
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::BnResourceManagerClient;
+using ::aidl::android::media::IResourceManagerService;
+using ::aidl::android::media::IResourceManagerClient;
+using ::aidl::android::media::MediaResourceParcel;
+
+static int64_t getId(const std::shared_ptr<IResourceManagerClient>& client) {
+ return (int64_t) client.get();
+}
+
+struct TestProcessInfo : public ProcessInfoInterface {
+ TestProcessInfo() {}
+ virtual ~TestProcessInfo() {}
+
+ virtual bool getPriority(int pid, int *priority) {
+ // For testing, use pid as priority.
+ // Lower the value higher the priority.
+ *priority = pid;
+ return true;
+ }
+
+ virtual bool isValidPid(int /* pid */) {
+ return true;
+ }
+
+ virtual bool overrideProcessInfo(
+ int /* pid */, int /* procState */, int /* oomScore */) {
+ return true;
+ }
+
+ virtual void removeProcessInfoOverride(int /* pid */) {
+ }
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(TestProcessInfo);
+};
+
+struct TestSystemCallback :
+ public ResourceManagerService::SystemCallbackInterface {
+ TestSystemCallback() :
+ mLastEvent({EventType::INVALID, 0}), mEventCount(0) {}
+
+ enum EventType {
+ INVALID = -1,
+ VIDEO_ON = 0,
+ VIDEO_OFF = 1,
+ VIDEO_RESET = 2,
+ CPUSET_ENABLE = 3,
+ CPUSET_DISABLE = 4,
+ };
+
+ struct EventEntry {
+ EventType type;
+ int arg;
+ };
+
+ virtual void noteStartVideo(int uid) override {
+ mLastEvent = {EventType::VIDEO_ON, uid};
+ mEventCount++;
+ }
+
+ virtual void noteStopVideo(int uid) override {
+ mLastEvent = {EventType::VIDEO_OFF, uid};
+ mEventCount++;
+ }
+
+ virtual void noteResetVideo() override {
+ mLastEvent = {EventType::VIDEO_RESET, 0};
+ mEventCount++;
+ }
+
+ virtual bool requestCpusetBoost(bool enable) override {
+ mLastEvent = {enable ? EventType::CPUSET_ENABLE : EventType::CPUSET_DISABLE, 0};
+ mEventCount++;
+ return true;
+ }
+
+ size_t eventCount() { return mEventCount; }
+ EventType lastEventType() { return mLastEvent.type; }
+ EventEntry lastEvent() { return mLastEvent; }
+
+protected:
+ virtual ~TestSystemCallback() {}
+
+private:
+ EventEntry mLastEvent;
+ size_t mEventCount;
+
+ DISALLOW_EVIL_CONSTRUCTORS(TestSystemCallback);
+};
+
+
+struct TestClient : public BnResourceManagerClient {
+ TestClient(int pid, const std::shared_ptr<ResourceManagerService> &service)
+ : mReclaimed(false), mPid(pid), mService(service) {}
+
+ Status reclaimResource(bool* _aidl_return) override {
+ mService->removeClient(mPid, getId(ref<TestClient>()));
+ mReclaimed = true;
+ *_aidl_return = true;
+ return Status::ok();
+ }
+
+ Status getName(::std::string* _aidl_return) override {
+ *_aidl_return = "test_client";
+ return Status::ok();
+ }
+
+ bool reclaimed() const {
+ return mReclaimed;
+ }
+
+ void reset() {
+ mReclaimed = false;
+ }
+
+ virtual ~TestClient() {}
+
+private:
+ bool mReclaimed;
+ int mPid;
+ std::shared_ptr<ResourceManagerService> mService;
+ DISALLOW_EVIL_CONSTRUCTORS(TestClient);
+};
+
+static const int kTestPid1 = 30;
+static const int kTestUid1 = 1010;
+
+static const int kTestPid2 = 20;
+static const int kTestUid2 = 1011;
+
+static const int kLowPriorityPid = 40;
+static const int kMidPriorityPid = 25;
+static const int kHighPriorityPid = 10;
+
+using EventType = TestSystemCallback::EventType;
+using EventEntry = TestSystemCallback::EventEntry;
+bool operator== (const EventEntry& lhs, const EventEntry& rhs) {
+ return lhs.type == rhs.type && lhs.arg == rhs.arg;
+}
+
+#define CHECK_STATUS_TRUE(condition) \
+ EXPECT_TRUE((condition).isOk() && (result))
+
+#define CHECK_STATUS_FALSE(condition) \
+ EXPECT_TRUE((condition).isOk() && !(result))
+
+class ResourceManagerServiceTestBase : public ::testing::Test {
+public:
+ ResourceManagerServiceTestBase()
+ : mSystemCB(new TestSystemCallback()),
+ mService(::ndk::SharedRefBase::make<ResourceManagerService>(
+ new TestProcessInfo, mSystemCB)),
+ mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, mService)),
+ mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)),
+ mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)) {
+ }
+
+ sp<TestSystemCallback> mSystemCB;
+ std::shared_ptr<ResourceManagerService> mService;
+ std::shared_ptr<IResourceManagerClient> mTestClient1;
+ std::shared_ptr<IResourceManagerClient> mTestClient2;
+ std::shared_ptr<IResourceManagerClient> mTestClient3;
+
+protected:
+ static bool isEqualResources(const std::vector<MediaResourceParcel> &resources1,
+ const ResourceList &resources2) {
+ // convert resource1 to ResourceList
+ ResourceList r1;
+ for (size_t i = 0; i < resources1.size(); ++i) {
+ const auto &res = resources1[i];
+ const auto resType = std::tuple(res.type, res.subType, res.id);
+ r1[resType] = res;
+ }
+ return r1 == resources2;
+ }
+
+ static void expectEqResourceInfo(const ResourceInfo &info,
+ int uid,
+ std::shared_ptr<IResourceManagerClient> client,
+ const std::vector<MediaResourceParcel> &resources) {
+ EXPECT_EQ(uid, info.uid);
+ EXPECT_EQ(client, info.client);
+ EXPECT_TRUE(isEqualResources(resources, info.resources));
+ }
+};
+
+} // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 702935d..a029d45 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -16,197 +16,17 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ResourceManagerService_test"
+
#include <utils/Log.h>
-#include <gtest/gtest.h>
-
+#include "ResourceManagerServiceTestUtils.h"
#include "ResourceManagerService.h"
-#include <aidl/android/media/BnResourceManagerClient.h>
-#include <media/MediaResource.h>
-#include <media/MediaResourcePolicy.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/ProcessInfoInterface.h>
-
-namespace aidl {
-namespace android {
-namespace media {
-bool operator== (const MediaResourceParcel& lhs, const MediaResourceParcel& rhs) {
- return lhs.type == rhs.type && lhs.subType == rhs.subType &&
- lhs.id == rhs.id && lhs.value == rhs.value;
-}}}}
namespace android {
-using Status = ::ndk::ScopedAStatus;
-using ::aidl::android::media::BnResourceManagerClient;
-using ::aidl::android::media::IResourceManagerService;
-using ::aidl::android::media::IResourceManagerClient;
-
-static int64_t getId(const std::shared_ptr<IResourceManagerClient>& client) {
- return (int64_t) client.get();
-}
-
-struct TestProcessInfo : public ProcessInfoInterface {
- TestProcessInfo() {}
- virtual ~TestProcessInfo() {}
-
- virtual bool getPriority(int pid, int *priority) {
- // For testing, use pid as priority.
- // Lower the value higher the priority.
- *priority = pid;
- return true;
- }
-
- virtual bool isValidPid(int /* pid */) {
- return true;
- }
-
-private:
- DISALLOW_EVIL_CONSTRUCTORS(TestProcessInfo);
-};
-
-struct TestSystemCallback :
- public ResourceManagerService::SystemCallbackInterface {
- TestSystemCallback() :
- mLastEvent({EventType::INVALID, 0}), mEventCount(0) {}
-
- enum EventType {
- INVALID = -1,
- VIDEO_ON = 0,
- VIDEO_OFF = 1,
- VIDEO_RESET = 2,
- CPUSET_ENABLE = 3,
- CPUSET_DISABLE = 4,
- };
-
- struct EventEntry {
- EventType type;
- int arg;
- };
-
- virtual void noteStartVideo(int uid) override {
- mLastEvent = {EventType::VIDEO_ON, uid};
- mEventCount++;
- }
-
- virtual void noteStopVideo(int uid) override {
- mLastEvent = {EventType::VIDEO_OFF, uid};
- mEventCount++;
- }
-
- virtual void noteResetVideo() override {
- mLastEvent = {EventType::VIDEO_RESET, 0};
- mEventCount++;
- }
-
- virtual bool requestCpusetBoost(bool enable) override {
- mLastEvent = {enable ? EventType::CPUSET_ENABLE : EventType::CPUSET_DISABLE, 0};
- mEventCount++;
- return true;
- }
-
- size_t eventCount() { return mEventCount; }
- EventType lastEventType() { return mLastEvent.type; }
- EventEntry lastEvent() { return mLastEvent; }
-
-protected:
- virtual ~TestSystemCallback() {}
-
-private:
- EventEntry mLastEvent;
- size_t mEventCount;
-
- DISALLOW_EVIL_CONSTRUCTORS(TestSystemCallback);
-};
-
-
-struct TestClient : public BnResourceManagerClient {
- TestClient(int pid, const std::shared_ptr<ResourceManagerService> &service)
- : mReclaimed(false), mPid(pid), mService(service) {}
-
- Status reclaimResource(bool* _aidl_return) override {
- mService->removeClient(mPid, getId(ref<TestClient>()));
- mReclaimed = true;
- *_aidl_return = true;
- return Status::ok();
- }
-
- Status getName(::std::string* _aidl_return) override {
- *_aidl_return = "test_client";
- return Status::ok();
- }
-
- bool reclaimed() const {
- return mReclaimed;
- }
-
- void reset() {
- mReclaimed = false;
- }
-
- virtual ~TestClient() {}
-
-private:
- bool mReclaimed;
- int mPid;
- std::shared_ptr<ResourceManagerService> mService;
- DISALLOW_EVIL_CONSTRUCTORS(TestClient);
-};
-
-static const int kTestPid1 = 30;
-static const int kTestUid1 = 1010;
-
-static const int kTestPid2 = 20;
-static const int kTestUid2 = 1011;
-
-static const int kLowPriorityPid = 40;
-static const int kMidPriorityPid = 25;
-static const int kHighPriorityPid = 10;
-
-using EventType = TestSystemCallback::EventType;
-using EventEntry = TestSystemCallback::EventEntry;
-bool operator== (const EventEntry& lhs, const EventEntry& rhs) {
- return lhs.type == rhs.type && lhs.arg == rhs.arg;
-}
-
-#define CHECK_STATUS_TRUE(condition) \
- EXPECT_TRUE((condition).isOk() && (result))
-
-#define CHECK_STATUS_FALSE(condition) \
- EXPECT_TRUE((condition).isOk() && !(result))
-
-class ResourceManagerServiceTest : public ::testing::Test {
+class ResourceManagerServiceTest : public ResourceManagerServiceTestBase {
public:
- ResourceManagerServiceTest()
- : mSystemCB(new TestSystemCallback()),
- mService(::ndk::SharedRefBase::make<ResourceManagerService>(
- new TestProcessInfo, mSystemCB)),
- mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, mService)),
- mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)),
- mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)) {
- }
-
-protected:
- static bool isEqualResources(const std::vector<MediaResourceParcel> &resources1,
- const ResourceList &resources2) {
- // convert resource1 to ResourceList
- ResourceList r1;
- for (size_t i = 0; i < resources1.size(); ++i) {
- const auto &res = resources1[i];
- const auto resType = std::tuple(res.type, res.subType, res.id);
- r1[resType] = res;
- }
- return r1 == resources2;
- }
-
- static void expectEqResourceInfo(const ResourceInfo &info,
- int uid,
- std::shared_ptr<IResourceManagerClient> client,
- const std::vector<MediaResourceParcel> &resources) {
- EXPECT_EQ(uid, info.uid);
- EXPECT_EQ(client, info.client);
- EXPECT_TRUE(isEqualResources(resources, info.resources));
- }
+ ResourceManagerServiceTest() : ResourceManagerServiceTestBase() {}
void verifyClients(bool c1, bool c2, bool c3) {
TestClient *client1 = static_cast<TestClient*>(mTestClient1.get());
@@ -520,6 +340,30 @@
// clean up client 3 which still left
mService->removeClient(kTestPid2, getId(mTestClient3));
}
+
+ {
+ addResource();
+ mService->mSupportsSecureWithNonSecureCodec = true;
+
+ mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
+
+ // client marked for pending removal got reclaimed
+ EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
+ verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+
+ // No more clients marked for removal
+ EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
+ verifyClients(false /* c1 */, false /* c2 */, false /* c3 */);
+
+ mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient3));
+
+ // client marked for pending removal got reclaimed
+ EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
+ verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+
+ // clean up client 1 which still left
+ mService->removeClient(kTestPid1, getId(mTestClient1));
+ }
}
void testRemoveClient() {
@@ -881,12 +725,6 @@
EXPECT_EQ(4u, mSystemCB->eventCount());
EXPECT_EQ(EventType::CPUSET_DISABLE, mSystemCB->lastEventType());
}
-
- sp<TestSystemCallback> mSystemCB;
- std::shared_ptr<ResourceManagerService> mService;
- std::shared_ptr<IResourceManagerClient> mTestClient1;
- std::shared_ptr<IResourceManagerClient> mTestClient2;
- std::shared_ptr<IResourceManagerClient> mTestClient3;
};
TEST_F(ResourceManagerServiceTest, config) {
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
new file mode 100644
index 0000000..e3d3e78
--- /dev/null
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -0,0 +1,455 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceObserverService_test"
+
+#include <iostream>
+#include <list>
+
+#include <aidl/android/media/BnResourceObserver.h>
+#include <utils/Log.h>
+#include "ResourceObserverService.h"
+#include "ResourceManagerServiceTestUtils.h"
+
+namespace android {
+
+using ::aidl::android::media::BnResourceObserver;
+using ::aidl::android::media::MediaObservableParcel;
+using ::aidl::android::media::MediaObservableType;
+
+#define BUSY ::aidl::android::media::MediaObservableEvent::kBusy
+#define IDLE ::aidl::android::media::MediaObservableEvent::kIdle
+#define ALL ::aidl::android::media::MediaObservableEvent::kAll
+
+struct EventTracker {
+ struct Event {
+ enum { NoEvent, Busy, Idle } type = NoEvent;
+ int uid = 0;
+ int pid = 0;
+ std::vector<MediaObservableParcel> observables;
+ };
+
+ static const Event NoEvent;
+
+ static std::string toString(const MediaObservableParcel& observable) {
+ return "{" + ::aidl::android::media::toString(observable.type)
+ + ", " + std::to_string(observable.value) + "}";
+ }
+ static std::string toString(const Event& event) {
+ std::string eventStr;
+ switch (event.type) {
+ case Event::Busy:
+ eventStr = "Busy";
+ break;
+ case Event::Idle:
+ eventStr = "Idle";
+ break;
+ default:
+ return "NoEvent";
+ }
+ std::string observableStr;
+ for (auto &observable : event.observables) {
+ if (!observableStr.empty()) {
+ observableStr += ", ";
+ }
+ observableStr += toString(observable);
+ }
+ return "{" + eventStr + ", " + std::to_string(event.uid) + ", "
+ + std::to_string(event.pid) + ", {" + observableStr + "}}";
+ }
+
+ static Event Busy(int uid, int pid, const std::vector<MediaObservableParcel>& observables) {
+ return { Event::Busy, uid, pid, observables };
+ }
+ static Event Idle(int uid, int pid, const std::vector<MediaObservableParcel>& observables) {
+ return { Event::Idle, uid, pid, observables };
+ }
+
+ // Pop 1 event from front, wait for up to timeoutUs if empty.
+ const Event& pop(int64_t timeoutUs = 0) {
+ std::unique_lock lock(mLock);
+
+ if (mEventQueue.empty() && timeoutUs > 0) {
+ mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+ }
+
+ if (mEventQueue.empty()) {
+ mPoppedEvent = NoEvent;
+ } else {
+ mPoppedEvent = *mEventQueue.begin();
+ mEventQueue.pop_front();
+ }
+
+ return mPoppedEvent;
+ }
+
+ // Push 1 event to back.
+ void append(const Event& event) {
+ ALOGD("%s", toString(event).c_str());
+
+ std::unique_lock lock(mLock);
+
+ mEventQueue.push_back(event);
+ mCondition.notify_one();
+ }
+
+private:
+ std::mutex mLock;
+ std::condition_variable mCondition;
+ Event mPoppedEvent;
+ std::list<Event> mEventQueue;
+};
+
+const EventTracker::Event EventTracker::NoEvent;
+
+// Operators for GTest macros.
+bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
+ return lhs.type == rhs.type && lhs.uid == rhs.uid && lhs.pid == rhs.pid &&
+ lhs.observables == rhs.observables;
+}
+
+std::ostream& operator<<(std::ostream& str, const EventTracker::Event& v) {
+ str << EventTracker::toString(v);
+ return str;
+}
+
+struct TestObserver : public BnResourceObserver, public EventTracker {
+ TestObserver(const char *name) : mName(name) {}
+ ~TestObserver() = default;
+ Status onStatusChanged(MediaObservableEvent event, int32_t uid, int32_t pid,
+ const std::vector<MediaObservableParcel>& observables) override {
+ ALOGD("%s: %s", mName.c_str(), __FUNCTION__);
+ if (event == MediaObservableEvent::kBusy) {
+ append(Busy(uid, pid, observables));
+ } else {
+ append(Idle(uid, pid, observables));
+ }
+
+ return Status::ok();
+ }
+ std::string mName;
+};
+
+class ResourceObserverServiceTest : public ResourceManagerServiceTestBase {
+public:
+ ResourceObserverServiceTest() : ResourceManagerServiceTestBase(),
+ mObserverService(::ndk::SharedRefBase::make<ResourceObserverService>()),
+ mTestObserver1(::ndk::SharedRefBase::make<TestObserver>("observer1")),
+ mTestObserver2(::ndk::SharedRefBase::make<TestObserver>("observer2")),
+ mTestObserver3(::ndk::SharedRefBase::make<TestObserver>("observer3")) {
+ mService->setObserverService(mObserverService);
+ }
+
+ void registerObservers(MediaObservableEvent filter = ALL) {
+ std::vector<MediaObservableFilter> filters1, filters2, filters3;
+ filters1 = {{MediaObservableType::kVideoSecureCodec, filter}};
+ filters2 = {{MediaObservableType::kVideoNonSecureCodec, filter}};
+ filters3 = {{MediaObservableType::kVideoSecureCodec, filter},
+ {MediaObservableType::kVideoNonSecureCodec, filter}};
+
+ // mTestObserver1 monitors secure video codecs.
+ EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+
+ // mTestObserver2 monitors non-secure video codecs.
+ EXPECT_TRUE(mObserverService->registerObserver(mTestObserver2, filters2).isOk());
+
+ // mTestObserver3 monitors both secure & non-secure video codecs.
+ EXPECT_TRUE(mObserverService->registerObserver(mTestObserver3, filters3).isOk());
+ }
+
+protected:
+ std::shared_ptr<ResourceObserverService> mObserverService;
+ std::shared_ptr<TestObserver> mTestObserver1;
+ std::shared_ptr<TestObserver> mTestObserver2;
+ std::shared_ptr<TestObserver> mTestObserver3;
+};
+
+TEST_F(ResourceObserverServiceTest, testRegisterObserver) {
+ std::vector<MediaObservableFilter> filters1;
+ Status status;
+
+ // Register with empty observables should fail.
+ status = mObserverService->registerObserver(mTestObserver1, filters1);
+ EXPECT_FALSE(status.isOk());
+ EXPECT_EQ(status.getServiceSpecificError(), BAD_VALUE);
+
+ // mTestObserver1 monitors secure video codecs.
+ filters1 = {{MediaObservableType::kVideoSecureCodec, ALL}};
+ EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+
+ // Register duplicates should fail.
+ status = mObserverService->registerObserver(mTestObserver1, filters1);
+ EXPECT_FALSE(status.isOk());
+ EXPECT_EQ(status.getServiceSpecificError(), ALREADY_EXISTS);
+}
+
+TEST_F(ResourceObserverServiceTest, testUnregisterObserver) {
+ std::vector<MediaObservableFilter> filters1;
+ Status status;
+
+ // Unregister without registering first should fail.
+ status = mObserverService->unregisterObserver(mTestObserver1);
+ EXPECT_FALSE(status.isOk());
+ EXPECT_EQ(status.getServiceSpecificError(), NAME_NOT_FOUND);
+
+ // mTestObserver1 monitors secure video codecs.
+ filters1 = {{MediaObservableType::kVideoSecureCodec, ALL}};
+ EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+ EXPECT_TRUE(mObserverService->unregisterObserver(mTestObserver1).isOk());
+
+ // Unregister again should fail.
+ status = mObserverService->unregisterObserver(mTestObserver1);
+ EXPECT_FALSE(status.isOk());
+ EXPECT_EQ(status.getServiceSpecificError(), NAME_NOT_FOUND);
+}
+
+TEST_F(ResourceObserverServiceTest, testAddResourceBasic) {
+ registerObservers();
+
+ std::vector<MediaObservableParcel> observables1, observables2, observables3;
+ observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+ observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+ observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+ {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+ std::vector<MediaResourceParcel> resources;
+ // Add secure video codec.
+ resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+ mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+
+ // Add non-secure video codec.
+ resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+ mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+
+ // Add secure & non-secure video codecs.
+ resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+ MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+ mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+
+ // Add additional audio codecs, should be ignored.
+ resources.push_back(MediaResource::CodecResource(1 /*secure*/, 0 /*video*/));
+ resources.push_back(MediaResource::CodecResource(0 /*secure*/, 0 /*video*/));
+ mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables3));
+}
+
+TEST_F(ResourceObserverServiceTest, testAddResourceMultiple) {
+ registerObservers();
+
+ std::vector<MediaObservableParcel> observables1, observables2, observables3;
+ observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+ observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+ observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+ {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+ std::vector<MediaResourceParcel> resources;
+
+ // Add multiple secure & non-secure video codecs.
+ // Multiple entries of the same type should be merged, count should be propagated correctly.
+ resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+ MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+ MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 3 /*count*/)};
+ observables1 = {{MediaObservableType::kVideoSecureCodec, 2}};
+ observables2 = {{MediaObservableType::kVideoNonSecureCodec, 3}};
+ observables3 = {{MediaObservableType::kVideoSecureCodec, 2},
+ {MediaObservableType::kVideoNonSecureCodec, 3}};
+ mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+}
+
+TEST_F(ResourceObserverServiceTest, testRemoveResourceBasic) {
+ registerObservers();
+
+ std::vector<MediaObservableParcel> observables1, observables2, observables3;
+ observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+ observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+ observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+ {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+ std::vector<MediaResourceParcel> resources;
+ // Add secure video codec to client1.
+ resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+ mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+ // Remove secure video codec. observer 1&3 should receive updates.
+ mService->removeResource(kTestPid1, getId(mTestClient1), resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid1, kTestPid1, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid1, kTestPid1, observables1));
+ // Remove secure video codec again, should have no event.
+ mService->removeResource(kTestPid1, getId(mTestClient1), resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+ // Remove client1, should have no event.
+ mService->removeClient(kTestPid1, getId(mTestClient1));
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+
+ // Add non-secure video codec to client2.
+ resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+ mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+ // Remove client2, observer 2&3 should receive updates.
+ mService->removeClient(kTestPid2, getId(mTestClient2));
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+ // Remove non-secure codec after client2 removed, should have no event.
+ mService->removeResource(kTestPid2, getId(mTestClient2), resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+ // Remove client2 again, should have no event.
+ mService->removeClient(kTestPid2, getId(mTestClient2));
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+
+ // Add secure & non-secure video codecs, plus audio codecs (that's ignored).
+ resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+ MediaResource::CodecResource(0 /*secure*/, 1 /*video*/),
+ MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
+ MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+ mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+ // Remove one audio codec, should have no event.
+ resources = {MediaResource::CodecResource(1 /*secure*/, 0 /*video*/)};
+ mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+ // Remove the other audio codec and the secure video codec, only secure video codec
+ // removal should be reported.
+ resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
+ MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+ mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+ // Remove client3 entirely. Non-secure video codec removal should be reported.
+ mService->removeClient(kTestPid2, getId(mTestClient3));
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+}
+
+TEST_F(ResourceObserverServiceTest, testRemoveResourceMultiple) {
+ registerObservers();
+
+ std::vector<MediaObservableParcel> observables1, observables2, observables3;
+ observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+ observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+ observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+ {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+ std::vector<MediaResourceParcel> resources;
+
+ // Add multiple secure & non-secure video codecs, plus audio codecs (that's ignored).
+ // (ResourceManager will merge these internally.)
+ resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+ MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 4 /*count*/),
+ MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
+ MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+ mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+ observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+ observables2 = {{MediaObservableType::kVideoNonSecureCodec, 4}};
+ observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+ {MediaObservableType::kVideoNonSecureCodec, 4}};
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+ // Remove one audio codec, 2 secure video codecs and 2 non-secure video codecs.
+ // 1 secure video codec removal and 2 non-secure video codec removals should be reported.
+ resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
+ MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+ MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+ MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 2 /*count*/)};
+ mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+ observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+ observables2 = {{MediaObservableType::kVideoNonSecureCodec, 2}};
+ observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+ {MediaObservableType::kVideoNonSecureCodec, 2}};
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables3));
+ // Remove client3 entirely. 2 non-secure video codecs removal should be reported.
+ mService->removeClient(kTestPid2, getId(mTestClient3));
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+}
+
+TEST_F(ResourceObserverServiceTest, testEventFilters) {
+ // Register observers with different event filters.
+ std::vector<MediaObservableFilter> filters1, filters2, filters3;
+ filters1 = {{MediaObservableType::kVideoSecureCodec, BUSY}};
+ filters2 = {{MediaObservableType::kVideoNonSecureCodec, IDLE}};
+ filters3 = {{MediaObservableType::kVideoSecureCodec, IDLE},
+ {MediaObservableType::kVideoNonSecureCodec, BUSY}};
+
+ // mTestObserver1 monitors secure video codecs.
+ EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+
+ // mTestObserver2 monitors non-secure video codecs.
+ EXPECT_TRUE(mObserverService->registerObserver(mTestObserver2, filters2).isOk());
+
+ // mTestObserver3 monitors both secure & non-secure video codecs.
+ EXPECT_TRUE(mObserverService->registerObserver(mTestObserver3, filters3).isOk());
+
+ std::vector<MediaObservableParcel> observables1, observables2;
+ observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+ observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+
+ std::vector<MediaResourceParcel> resources;
+
+ // Add secure & non-secure video codecs.
+ resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+ MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+ mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+
+ // Remove secure & non-secure video codecs.
+ mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+ EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+ EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+ EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/test/build_and_run_all_unit_tests.sh b/services/mediaresourcemanager/test/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..1c4ae98
--- /dev/null
+++ b/services/mediaresourcemanager/test/build_and_run_all_unit_tests.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+#
+# Run tests in this directory.
+#
+
+if [ "$SYNC_FINISHED" != true ]; then
+ if [ -z "$ANDROID_BUILD_TOP" ]; then
+ echo "Android build environment not set"
+ exit -1
+ fi
+
+ # ensure we have mm
+ . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+ mm
+
+ echo "waiting for device"
+
+ adb root && adb wait-for-device remount && adb sync
+fi
+
+echo "========================================"
+
+echo "testing ResourceManagerService"
+#adb shell /data/nativetest64/ResourceManagerService_test/ResourceManagerService_test
+adb shell /data/nativetest/ResourceManagerService_test/ResourceManagerService_test
+
+echo "testing ServiceLog"
+#adb shell /data/nativetest64/ServiceLog_test/ServiceLog_test
+adb shell /data/nativetest/ServiceLog_test/ServiceLog_test
+
+echo "testing ResourceObserverService"
+#adb shell /data/nativetest64/ResourceObserverService_test/ResourceObserverService_test
+adb shell /data/nativetest/ResourceObserverService_test/ResourceObserverService_test
diff --git a/services/mediatranscoding/Android.bp b/services/mediatranscoding/Android.bp
index 79e9fbc..3d6821b 100644
--- a/services/mediatranscoding/Android.bp
+++ b/services/mediatranscoding/Android.bp
@@ -8,13 +8,15 @@
],
shared_libs: [
+ "libandroid",
"libbase",
"libbinder_ndk",
+ "libcutils",
"liblog",
"libmediatranscoding",
"libutils",
],
-
+
export_shared_lib_headers: [
"libmediatranscoding",
],
@@ -38,8 +40,7 @@
shared_libs: [
"libbase",
- // TODO(hkuang): Use libbinder_ndk
- "libbinder",
+ "libbinder_ndk",
"libutils",
"liblog",
"libbase",
@@ -51,18 +52,6 @@
"mediatranscoding_aidl_interface-ndk_platform",
],
- target: {
- android: {
- product_variables: {
- malloc_not_svelte: {
- // Scudo increases memory footprint, so only enable on
- // non-svelte devices.
- shared_libs: ["libc_scudo"],
- },
- },
- },
- },
-
init_rc: ["mediatranscoding.rc"],
cflags: [
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index b843967..64def5e 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -20,10 +20,13 @@
#include <android/binder_manager.h>
#include <android/binder_process.h>
+#include <android/permission_manager.h>
+#include <cutils/properties.h>
+#include <media/TranscoderWrapper.h>
#include <media/TranscodingClientManager.h>
-#include <media/TranscodingJobScheduler.h>
+#include <media/TranscodingResourcePolicy.h>
+#include <media/TranscodingSessionController.h>
#include <media/TranscodingUidPolicy.h>
-#include <private/android_filesystem_config.h>
#include <utils/Log.h>
#include <utils/Vector.h>
@@ -37,33 +40,17 @@
errorCode, \
String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, ##__VA_ARGS__))
-// Can MediaTranscoding service trust the caller based on the calling UID?
-// TODO(hkuang): Add MediaProvider's UID.
-static bool isTrustedCallingUid(uid_t uid) {
- switch (uid) {
- case AID_ROOT: // root user
- case AID_SYSTEM:
- case AID_SHELL:
- case AID_MEDIA: // mediaserver
- return true;
- default:
- return false;
- }
-}
-
-MediaTranscodingService::MediaTranscodingService()
- : MediaTranscodingService(std::make_shared<SimulatedTranscoder>(),
- std::make_shared<TranscodingUidPolicy>()) {}
-
MediaTranscodingService::MediaTranscodingService(
- const std::shared_ptr<TranscoderInterface>& transcoder,
- const std::shared_ptr<UidPolicyInterface>& uidPolicy)
- : mJobScheduler(new TranscodingJobScheduler(transcoder, uidPolicy)),
- mClientManager(new TranscodingClientManager(mJobScheduler)) {
+ const std::shared_ptr<TranscoderInterface>& transcoder)
+ : mUidPolicy(new TranscodingUidPolicy()),
+ mResourcePolicy(new TranscodingResourcePolicy()),
+ mSessionController(
+ new TranscodingSessionController(transcoder, mUidPolicy, mResourcePolicy)),
+ mClientManager(new TranscodingClientManager(mSessionController)) {
ALOGV("MediaTranscodingService is created");
-
- transcoder->setCallback(mJobScheduler);
- uidPolicy->setCallback(mJobScheduler);
+ transcoder->setCallback(mSessionController);
+ mUidPolicy->setCallback(mSessionController);
+ mResourcePolicy->setCallback(mSessionController);
}
MediaTranscodingService::~MediaTranscodingService() {
@@ -72,6 +59,21 @@
binder_status_t MediaTranscodingService::dump(int fd, const char** /*args*/, uint32_t /*numArgs*/) {
String8 result;
+
+ uid_t callingUid = AIBinder_getCallingUid();
+ pid_t callingPid = AIBinder_getCallingPid();
+ int32_t permissionResult;
+ if (APermissionManager_checkPermission("android.permission.DUMP", callingPid, callingUid,
+ &permissionResult) != PERMISSION_MANAGER_STATUS_OK ||
+ permissionResult != PERMISSION_MANAGER_PERMISSION_GRANTED) {
+ result.format(
+ "Permission Denial: "
+ "can't dump MediaTranscodingService from pid=%d, uid=%d\n",
+ AIBinder_getCallingPid(), AIBinder_getCallingUid());
+ write(fd, result.string(), result.size());
+ return PERMISSION_DENIED;
+ }
+
const size_t SIZE = 256;
char buffer[SIZE];
@@ -81,13 +83,21 @@
Vector<String16> args;
mClientManager->dumpAllClients(fd, args);
+ mSessionController->dumpAllSessions(fd, args);
return OK;
}
//static
void MediaTranscodingService::instantiate() {
+ std::shared_ptr<TranscoderInterface> transcoder;
+ if (property_get_bool("debug.transcoding.simulated_transcoder", false)) {
+ transcoder = std::make_shared<SimulatedTranscoder>();
+ } else {
+ transcoder = std::make_shared<TranscoderWrapper>();
+ }
+
std::shared_ptr<MediaTranscodingService> service =
- ::ndk::SharedRefBase::make<MediaTranscodingService>();
+ ::ndk::SharedRefBase::make<MediaTranscodingService>(transcoder);
binder_status_t status =
AServiceManager_addService(service->asBinder().get(), getServiceName());
if (status != STATUS_OK) {
@@ -97,51 +107,18 @@
Status MediaTranscodingService::registerClient(
const std::shared_ptr<ITranscodingClientCallback>& in_callback,
- const std::string& in_clientName, const std::string& in_opPackageName, int32_t in_clientUid,
- int32_t in_clientPid, std::shared_ptr<ITranscodingClient>* _aidl_return) {
+ const std::string& in_clientName, const std::string& in_opPackageName,
+ std::shared_ptr<ITranscodingClient>* _aidl_return) {
if (in_callback == nullptr) {
*_aidl_return = nullptr;
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Client callback cannot be null!");
}
- int32_t callingPid = AIBinder_getCallingPid();
- int32_t callingUid = AIBinder_getCallingUid();
-
- // Check if we can trust clientUid. Only privilege caller could forward the
- // uid on app client's behalf.
- if (in_clientUid == USE_CALLING_UID) {
- in_clientUid = callingUid;
- } else if (!isTrustedCallingUid(callingUid)) {
- ALOGE("MediaTranscodingService::registerClient failed (calling PID %d, calling UID %d) "
- "rejected "
- "(don't trust clientUid %d)",
- in_clientPid, in_clientUid, in_clientUid);
- return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
- "Untrusted caller (calling PID %d, UID %d) trying to "
- "register client",
- in_clientPid, in_clientUid);
- }
-
- // Check if we can trust clientPid. Only privilege caller could forward the
- // pid on app client's behalf.
- if (in_clientPid == USE_CALLING_PID) {
- in_clientPid = callingPid;
- } else if (!isTrustedCallingUid(callingUid)) {
- ALOGE("MediaTranscodingService::registerClient client failed (calling PID %d, calling UID "
- "%d) rejected "
- "(don't trust clientPid %d)",
- in_clientPid, in_clientUid, in_clientPid);
- return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
- "Untrusted caller (calling PID %d, UID %d) trying to "
- "register client",
- in_clientPid, in_clientUid);
- }
-
// Creates the client and uses its process id as client id.
std::shared_ptr<ITranscodingClient> newClient;
- status_t err = mClientManager->addClient(in_callback, in_clientPid, in_clientUid, in_clientName,
- in_opPackageName, &newClient);
+ status_t err =
+ mClientManager->addClient(in_callback, in_clientName, in_opPackageName, &newClient);
if (err != OK) {
*_aidl_return = nullptr;
return STATUS_ERROR_FMT(err, "Failed to add client to TranscodingClientManager");
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/services/mediatranscoding/MediaTranscodingService.h
index f7ac336..428f777 100644
--- a/services/mediatranscoding/MediaTranscodingService.h
+++ b/services/mediatranscoding/MediaTranscodingService.h
@@ -26,21 +26,20 @@
using ::aidl::android::media::BnMediaTranscodingService;
using ::aidl::android::media::ITranscodingClient;
using ::aidl::android::media::ITranscodingClientCallback;
-using ::aidl::android::media::TranscodingJobParcel;
using ::aidl::android::media::TranscodingRequestParcel;
+using ::aidl::android::media::TranscodingSessionParcel;
class TranscodingClientManager;
-class TranscodingJobScheduler;
+class TranscodingSessionController;
class TranscoderInterface;
class UidPolicyInterface;
+class ResourcePolicyInterface;
class MediaTranscodingService : public BnMediaTranscodingService {
public:
- static constexpr int32_t kInvalidJobId = -1;
+ static constexpr int32_t kInvalidSessionId = -1;
static constexpr int32_t kInvalidClientId = -1;
- MediaTranscodingService();
- MediaTranscodingService(const std::shared_ptr<TranscoderInterface>& transcoder,
- const std::shared_ptr<UidPolicyInterface>& uidPolicy);
+ MediaTranscodingService(const std::shared_ptr<TranscoderInterface>& transcoder);
virtual ~MediaTranscodingService();
static void instantiate();
@@ -49,7 +48,6 @@
Status registerClient(const std::shared_ptr<ITranscodingClientCallback>& in_callback,
const std::string& in_clientName, const std::string& in_opPackageName,
- int32_t in_clientUid, int32_t in_clientPid,
std::shared_ptr<ITranscodingClient>* _aidl_return) override;
Status getNumOfClients(int32_t* _aidl_return) override;
@@ -61,7 +59,9 @@
mutable std::mutex mServiceLock;
- std::shared_ptr<TranscodingJobScheduler> mJobScheduler;
+ std::shared_ptr<UidPolicyInterface> mUidPolicy;
+ std::shared_ptr<ResourcePolicyInterface> mResourcePolicy;
+ std::shared_ptr<TranscodingSessionController> mSessionController;
std::shared_ptr<TranscodingClientManager> mClientManager;
};
diff --git a/services/mediatranscoding/SimulatedTranscoder.cpp b/services/mediatranscoding/SimulatedTranscoder.cpp
index 1b68d5c..03ee886 100644
--- a/services/mediatranscoding/SimulatedTranscoder.cpp
+++ b/services/mediatranscoding/SimulatedTranscoder.cpp
@@ -47,35 +47,60 @@
mCallback = cb;
}
-void SimulatedTranscoder::start(ClientIdType clientId, JobIdType jobId,
- const TranscodingRequestParcel& /*request*/) {
- queueEvent(Event::Start, clientId, jobId);
+void SimulatedTranscoder::start(
+ ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
+ if (request.testConfig.has_value() && request.testConfig->processingTotalTimeMs > 0) {
+ mSessionProcessingTimeMs = request.testConfig->processingTotalTimeMs;
+ }
+ ALOGV("%s: session {%d}: processingTime: %lld", __FUNCTION__, sessionId,
+ (long long)mSessionProcessingTimeMs);
+ queueEvent(Event::Start, clientId, sessionId, [=] {
+ auto callback = mCallback.lock();
+ if (callback != nullptr) {
+ callback->onStarted(clientId, sessionId);
+ }
+ });
}
-void SimulatedTranscoder::pause(ClientIdType clientId, JobIdType jobId) {
- queueEvent(Event::Pause, clientId, jobId);
+void SimulatedTranscoder::pause(ClientIdType clientId, SessionIdType sessionId) {
+ queueEvent(Event::Pause, clientId, sessionId, [=] {
+ auto callback = mCallback.lock();
+ if (callback != nullptr) {
+ callback->onPaused(clientId, sessionId);
+ }
+ });
}
-void SimulatedTranscoder::resume(ClientIdType clientId, JobIdType jobId) {
- queueEvent(Event::Resume, clientId, jobId);
+void SimulatedTranscoder::resume(
+ ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& /*request*/,
+ const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
+ queueEvent(Event::Resume, clientId, sessionId, [=] {
+ auto callback = mCallback.lock();
+ if (callback != nullptr) {
+ callback->onResumed(clientId, sessionId);
+ }
+ });
}
-void SimulatedTranscoder::stop(ClientIdType clientId, JobIdType jobId) {
- queueEvent(Event::Stop, clientId, jobId);
+void SimulatedTranscoder::stop(ClientIdType clientId, SessionIdType sessionId) {
+ queueEvent(Event::Stop, clientId, sessionId, nullptr);
}
-void SimulatedTranscoder::queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId) {
- ALOGV("%s: job {%lld, %d}: %s", __FUNCTION__, (long long)clientId, jobId, toString(type));
+void SimulatedTranscoder::queueEvent(Event::Type type, ClientIdType clientId,
+ SessionIdType sessionId, std::function<void()> runnable) {
+ ALOGV("%s: session {%lld, %d}: %s", __FUNCTION__, (long long)clientId, sessionId,
+ toString(type));
auto lock = std::scoped_lock(mLock);
- mQueue.push_back({type, clientId, jobId});
+ mQueue.push_back({type, clientId, sessionId, runnable});
mCondition.notify_one();
}
void SimulatedTranscoder::threadLoop() {
bool running = false;
- std::chrono::microseconds remainingUs(kJobDurationUs);
+ std::chrono::microseconds remainingUs(kSessionDurationUs);
std::chrono::system_clock::time_point lastRunningTime;
Event lastRunningEvent;
@@ -89,7 +114,7 @@
mCondition.wait(lock);
continue;
}
- // If running, wait for the remaining life of this job. Report finish if timed out.
+ // If running, wait for the remaining life of this session. Report finish if timed out.
std::cv_status status = mCondition.wait_for(lock, remainingUs);
if (status == std::cv_status::timeout) {
running = false;
@@ -97,7 +122,7 @@
auto callback = mCallback.lock();
if (callback != nullptr) {
lock.unlock();
- callback->onFinish(lastRunningEvent.clientId, lastRunningEvent.jobId);
+ callback->onFinish(lastRunningEvent.clientId, lastRunningEvent.sessionId);
lock.lock();
}
} else {
@@ -115,29 +140,28 @@
Event event = *mQueue.begin();
mQueue.pop_front();
- ALOGV("%s: job {%lld, %d}: %s", __FUNCTION__, (long long)event.clientId, event.jobId,
- toString(event.type));
+ ALOGV("%s: session {%lld, %d}: %s", __FUNCTION__, (long long)event.clientId,
+ event.sessionId, toString(event.type));
if (!running && (event.type == Event::Start || event.type == Event::Resume)) {
running = true;
lastRunningTime = std::chrono::system_clock::now();
lastRunningEvent = event;
if (event.type == Event::Start) {
- remainingUs = std::chrono::microseconds(kJobDurationUs);
+ remainingUs = std::chrono::milliseconds(mSessionProcessingTimeMs);
}
} else if (running && (event.type == Event::Pause || event.type == Event::Stop)) {
running = false;
remainingUs -= (std::chrono::system_clock::now() - lastRunningTime);
} else {
- ALOGW("%s: discarding bad event: job {%lld, %d}: %s", __FUNCTION__,
- (long long)event.clientId, event.jobId, toString(event.type));
+ ALOGW("%s: discarding bad event: session {%lld, %d}: %s", __FUNCTION__,
+ (long long)event.clientId, event.sessionId, toString(event.type));
continue;
}
- auto callback = mCallback.lock();
- if (callback != nullptr) {
+ if (event.runnable != nullptr) {
lock.unlock();
- callback->onProgressUpdate(event.clientId, event.jobId, event.type);
+ event.runnable();
lock.lock();
}
}
diff --git a/services/mediatranscoding/SimulatedTranscoder.h b/services/mediatranscoding/SimulatedTranscoder.h
index 646ba4e..ba2bba0 100644
--- a/services/mediatranscoding/SimulatedTranscoder.h
+++ b/services/mediatranscoding/SimulatedTranscoder.h
@@ -29,9 +29,11 @@
* SimulatedTranscoder is currently used to instantiate MediaTranscodingService
* on service side for testing, so that we could actually test the IPC calls of
* MediaTranscodingService to expose issues that's observable only over IPC.
+ * SimulatedTranscoder is used when useSimulatedTranscoder in TranscodingTestConfig
+ * is set to true.
*
- * SimulatedTranscoder simulates job execution by reporting finish after kJobDurationUs.
- * Job lifecycle events are reported via progress updates with special progress
+ * SimulatedTranscoder simulates session execution by reporting finish after kSessionDurationUs.
+ * Session lifecycle events are reported via progress updates with special progress
* numbers (equal to the Event's type).
*/
class SimulatedTranscoder : public TranscoderInterface {
@@ -39,20 +41,24 @@
struct Event {
enum Type { NoEvent, Start, Pause, Resume, Stop, Finished, Failed } type;
ClientIdType clientId;
- JobIdType jobId;
+ SessionIdType sessionId;
+ std::function<void()> runnable;
};
- static constexpr int64_t kJobDurationUs = 1000000;
+ static constexpr int64_t kSessionDurationUs = 1000000;
SimulatedTranscoder();
// TranscoderInterface
void setCallback(const std::shared_ptr<TranscoderCallbackInterface>& cb) override;
- void start(ClientIdType clientId, JobIdType jobId,
- const TranscodingRequestParcel& request) override;
- void pause(ClientIdType clientId, JobIdType jobId) override;
- void resume(ClientIdType clientId, JobIdType jobId) override;
- void stop(ClientIdType clientId, JobIdType jobId) override;
+ void start(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+ void pause(ClientIdType clientId, SessionIdType sessionId) override;
+ void resume(ClientIdType clientId, SessionIdType sessionId,
+ const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+ void stop(ClientIdType clientId, SessionIdType sessionId) override;
// ~TranscoderInterface
private:
@@ -61,8 +67,12 @@
std::condition_variable mCondition;
std::list<Event> mQueue GUARDED_BY(mLock);
+ // Minimum time spent on transcode the video. This is used just for testing.
+ int64_t mSessionProcessingTimeMs = kSessionDurationUs / 1000;
+
static const char* toString(Event::Type type);
- void queueEvent(Event::Type type, ClientIdType clientId, JobIdType jobId);
+ void queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
+ std::function<void()> runnable);
void threadLoop();
};
diff --git a/services/mediatranscoding/main_mediatranscodingservice.cpp b/services/mediatranscoding/main_mediatranscodingservice.cpp
index 7d862e6..14c568e 100644
--- a/services/mediatranscoding/main_mediatranscodingservice.cpp
+++ b/services/mediatranscoding/main_mediatranscodingservice.cpp
@@ -15,8 +15,7 @@
*/
#include <android-base/logging.h>
-#include <binder/IPCThreadState.h>
-#include <binder/ProcessState.h>
+#include <android/binder_process.h>
#include "MediaTranscodingService.h"
@@ -25,12 +24,9 @@
int main(int argc __unused, char** argv) {
LOG(INFO) << "media transcoding service starting";
- // TODO(hkuang): Start the service with libbinder_ndk.
strcpy(argv[0], "media.transcoding");
- sp<ProcessState> proc(ProcessState::self());
- sp<IServiceManager> sm = defaultServiceManager();
android::MediaTranscodingService::instantiate();
- ProcessState::self()->startThreadPool();
- IPCThreadState::self()->joinThreadPool();
+ ABinderProcess_startThreadPool();
+ ABinderProcess_joinThreadPool();
}
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index f37b39e..5a7c4cc 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -24,19 +24,36 @@
static_libs: [
"mediatranscoding_aidl_interface-ndk_platform",
+ "resourcemanager_aidl_interface-ndk_platform",
+ ],
+
+ required: [
+ "TranscodingUidPolicy_TestAppA",
+ "TranscodingUidPolicy_TestAppB",
+ "TranscodingUidPolicy_TestAppC",
],
}
-// MediaTranscodingService unit test
+// MediaTranscodingService unit test using simulated transcoder
cc_test {
- name: "mediatranscodingservice_tests",
+ name: "mediatranscodingservice_simulated_tests",
defaults: ["mediatranscodingservice_test_defaults"],
- srcs: ["mediatranscodingservice_tests.cpp"],
+ srcs: ["mediatranscodingservice_simulated_tests.cpp"],
+}
- required: [
- ":TranscodingUidPolicy_TestAppA",
- ":TranscodingUidPolicy_TestAppB",
- ":TranscodingUidPolicy_TestAppC",
- ],
-}
\ No newline at end of file
+// MediaTranscodingService unit test using real transcoder
+cc_test {
+ name: "mediatranscodingservice_real_tests",
+ defaults: ["mediatranscodingservice_test_defaults"],
+
+ srcs: ["mediatranscodingservice_real_tests.cpp"],
+}
+
+// MediaTranscodingService unit test related to resource management
+cc_test {
+ name: "mediatranscodingservice_resource_tests",
+ defaults: ["mediatranscodingservice_test_defaults"],
+
+ srcs: ["mediatranscodingservice_resource_tests.cpp"],
+}
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
new file mode 100644
index 0000000..66cced5
--- /dev/null
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -0,0 +1,498 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <aidl/android/media/TranscodingSessionParcel.h>
+#include <aidl/android/media/TranscodingSessionPriority.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/PermissionController.h>
+#include <cutils/multiuser.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <utils/Log.h>
+
+#include <iostream>
+#include <list>
+
+#include "SimulatedTranscoder.h"
+
+namespace android {
+
+namespace media {
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnTranscodingClientCallback;
+using aidl::android::media::IMediaTranscodingService;
+using aidl::android::media::ITranscodingClient;
+using aidl::android::media::ITranscodingClientCallback;
+using aidl::android::media::TranscodingRequestParcel;
+using aidl::android::media::TranscodingSessionParcel;
+using aidl::android::media::TranscodingSessionPriority;
+using aidl::android::media::TranscodingVideoTrackFormat;
+
+constexpr int32_t kClientUseCallingPid = IMediaTranscodingService::USE_CALLING_PID;
+
+constexpr uid_t kClientUid = 5000;
+#define UID(n) (kClientUid + (n))
+
+constexpr pid_t kClientPid = 10000;
+#define PID(n) (kClientPid + (n))
+
+constexpr int32_t kClientId = 0;
+#define CLIENT(n) (kClientId + (n))
+
+constexpr const char* kClientName = "TestClient";
+constexpr const char* kClientPackageA = "com.android.tests.transcoding.testapp.A";
+constexpr const char* kClientPackageB = "com.android.tests.transcoding.testapp.B";
+constexpr const char* kClientPackageC = "com.android.tests.transcoding.testapp.C";
+
+constexpr const char* kTestActivityName = "/com.android.tests.transcoding.MainActivity";
+
+static status_t getUidForPackage(String16 packageName, userid_t userId, /*inout*/ uid_t& uid) {
+ PermissionController pc;
+ uid = pc.getPackageUid(packageName, 0);
+ if (uid <= 0) {
+ ALOGE("Unknown package: '%s'", String8(packageName).string());
+ return BAD_VALUE;
+ }
+
+ if (userId < 0) {
+ ALOGE("Invalid user: %d", userId);
+ return BAD_VALUE;
+ }
+
+ uid = multiuser_get_uid(userId, uid);
+ return NO_ERROR;
+}
+
+struct ShellHelper {
+ static bool RunCmd(const std::string& cmdStr) {
+ int ret = system(cmdStr.c_str());
+ if (ret != 0) {
+ ALOGE("Failed to run cmd: %s, exitcode %d", cmdStr.c_str(), ret);
+ return false;
+ }
+ return true;
+ }
+
+ static bool Start(const char* packageName, const char* activityName) {
+ return RunCmd("am start -W " + std::string(packageName) + std::string(activityName) +
+ " &> /dev/null");
+ }
+
+ static bool Stop(const char* packageName) {
+ return RunCmd("am force-stop " + std::string(packageName));
+ }
+};
+
+struct EventTracker {
+ struct Event {
+ enum { NoEvent, Start, Pause, Resume, Finished, Failed } type;
+ int64_t clientId;
+ int32_t sessionId;
+ };
+
+#define DECLARE_EVENT(action) \
+ static Event action(int32_t clientId, int32_t sessionId) { \
+ return {Event::action, clientId, sessionId}; \
+ }
+
+ DECLARE_EVENT(Start);
+ DECLARE_EVENT(Pause);
+ DECLARE_EVENT(Resume);
+ DECLARE_EVENT(Finished);
+ DECLARE_EVENT(Failed);
+
+ static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
+
+ static std::string toString(const Event& event) {
+ std::string eventStr;
+ switch (event.type) {
+ case Event::Start:
+ eventStr = "Start";
+ break;
+ case Event::Pause:
+ eventStr = "Pause";
+ break;
+ case Event::Resume:
+ eventStr = "Resume";
+ break;
+ case Event::Finished:
+ eventStr = "Finished";
+ break;
+ case Event::Failed:
+ eventStr = "Failed";
+ break;
+ default:
+ return "NoEvent";
+ }
+ return "session {" + std::to_string(event.clientId) + ", " +
+ std::to_string(event.sessionId) + "}: " + eventStr;
+ }
+
+ // Pop 1 event from front, wait for up to timeoutUs if empty.
+ const Event& pop(int64_t timeoutUs = 0) {
+ std::unique_lock lock(mLock);
+
+ if (mEventQueue.empty() && timeoutUs > 0) {
+ mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+ }
+
+ if (mEventQueue.empty()) {
+ mPoppedEvent = NoEvent;
+ } else {
+ mPoppedEvent = *mEventQueue.begin();
+ mEventQueue.pop_front();
+ }
+
+ return mPoppedEvent;
+ }
+
+ bool waitForSpecificEventAndPop(const Event& target, std::list<Event>* outEvents,
+ int64_t timeoutUs = 0) {
+ std::unique_lock lock(mLock);
+
+ auto startTime = std::chrono::system_clock::now();
+ int64_t remainingUs = timeoutUs;
+
+ std::list<Event>::iterator it;
+ while (((it = std::find(mEventQueue.begin(), mEventQueue.end(), target)) ==
+ mEventQueue.end()) &&
+ remainingUs > 0) {
+ std::cv_status status =
+ mCondition.wait_for(lock, std::chrono::microseconds(remainingUs));
+ if (status == std::cv_status::timeout) {
+ break;
+ }
+ std::chrono::microseconds elapsedTime = std::chrono::system_clock::now() - startTime;
+ remainingUs = timeoutUs - elapsedTime.count();
+ }
+
+ if (it == mEventQueue.end()) {
+ return false;
+ }
+ *outEvents = std::list<Event>(mEventQueue.begin(), std::next(it));
+ mEventQueue.erase(mEventQueue.begin(), std::next(it));
+ return true;
+ }
+
+ // Push 1 event to back.
+ void append(const Event& event,
+ const TranscodingErrorCode err = TranscodingErrorCode::kNoError) {
+ ALOGD("%s", toString(event).c_str());
+
+ std::unique_lock lock(mLock);
+
+ mEventQueue.push_back(event);
+ mLastErr = err;
+ mCondition.notify_one();
+ }
+
+ void updateProgress(int progress) {
+ std::unique_lock lock(mLock);
+ mLastProgress = progress;
+ mUpdateCount++;
+ }
+
+ int getUpdateCount(int* lastProgress) {
+ std::unique_lock lock(mLock);
+ *lastProgress = mLastProgress;
+ return mUpdateCount;
+ }
+
+ TranscodingErrorCode getLastError() {
+ std::unique_lock lock(mLock);
+ return mLastErr;
+ }
+
+private:
+ std::mutex mLock;
+ std::condition_variable mCondition;
+ Event mPoppedEvent;
+ std::list<Event> mEventQueue;
+ TranscodingErrorCode mLastErr;
+ int mUpdateCount = 0;
+ int mLastProgress = -1;
+};
+
+// Operators for GTest macros.
+bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
+ return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.sessionId == rhs.sessionId;
+}
+
+std::ostream& operator<<(std::ostream& str, const EventTracker::Event& v) {
+ str << EventTracker::toString(v);
+ return str;
+}
+
+static constexpr bool success = true;
+static constexpr bool fail = false;
+
+struct TestClientCallback : public BnTranscodingClientCallback,
+ public EventTracker,
+ public std::enable_shared_from_this<TestClientCallback> {
+ TestClientCallback(const char* packageName, int32_t id)
+ : mClientId(id), mClientPid(PID(id)), mClientUid(UID(id)), mPackageName(packageName) {
+ ALOGI("TestClientCallback %d created: pid %d, uid %d", id, PID(id), UID(id));
+
+ // Use package uid if that's available.
+ uid_t packageUid;
+ if (getUidForPackage(String16(packageName), 0 /*userId*/, packageUid) == NO_ERROR) {
+ mClientUid = packageUid;
+ }
+ }
+
+ virtual ~TestClientCallback() { ALOGI("TestClientCallback %d destroyed", mClientId); }
+
+ Status openFileDescriptor(const std::string& in_fileUri, const std::string& in_mode,
+ ::ndk::ScopedFileDescriptor* _aidl_return) override {
+ ALOGD("@@@ openFileDescriptor: %s", in_fileUri.c_str());
+ int fd;
+ if (in_mode == "w" || in_mode == "rw") {
+ int kOpenFlags;
+ if (in_mode == "w") {
+ // Write-only, create file if non-existent, truncate existing file.
+ kOpenFlags = O_WRONLY | O_CREAT | O_TRUNC;
+ } else {
+ // Read-Write, create if non-existent, no truncate (service will truncate if needed)
+ kOpenFlags = O_RDWR | O_CREAT;
+ }
+ // User R+W permission.
+ constexpr int kFileMode = S_IRUSR | S_IWUSR;
+ fd = open(in_fileUri.c_str(), kOpenFlags, kFileMode);
+ } else {
+ fd = open(in_fileUri.c_str(), O_RDONLY);
+ }
+ _aidl_return->set(fd);
+ return Status::ok();
+ }
+
+ Status onTranscodingStarted(int32_t in_sessionId) override {
+ append(EventTracker::Start(mClientId, in_sessionId));
+ return Status::ok();
+ }
+
+ Status onTranscodingPaused(int32_t in_sessionId) override {
+ append(EventTracker::Pause(mClientId, in_sessionId));
+ return Status::ok();
+ }
+
+ Status onTranscodingResumed(int32_t in_sessionId) override {
+ append(EventTracker::Resume(mClientId, in_sessionId));
+ return Status::ok();
+ }
+
+ Status onTranscodingFinished(
+ int32_t in_sessionId,
+ const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
+ append(Finished(mClientId, in_sessionId));
+ return Status::ok();
+ }
+
+ Status onTranscodingFailed(int32_t in_sessionId,
+ ::aidl::android::media::TranscodingErrorCode in_errorCode) override {
+ append(Failed(mClientId, in_sessionId), in_errorCode);
+ return Status::ok();
+ }
+
+ Status onAwaitNumberOfSessionsChanged(int32_t /* in_sessionId */,
+ int32_t /* in_oldAwaitNumber */,
+ int32_t /* in_newAwaitNumber */) override {
+ return Status::ok();
+ }
+
+ Status onProgressUpdate(int32_t /* in_sessionId */, int32_t in_progress) override {
+ updateProgress(in_progress);
+ return Status::ok();
+ }
+
+ Status registerClient(const char* packageName,
+ const std::shared_ptr<IMediaTranscodingService>& service) {
+ // Override the default uid if the package uid is found.
+ uid_t uid;
+ if (getUidForPackage(String16(packageName), 0 /*userId*/, uid) == NO_ERROR) {
+ mClientUid = uid;
+ }
+
+ ALOGD("registering %s with uid %d", packageName, mClientUid);
+
+ std::shared_ptr<ITranscodingClient> client;
+ Status status =
+ service->registerClient(shared_from_this(), kClientName, packageName, &client);
+
+ mClient = status.isOk() ? client : nullptr;
+ return status;
+ }
+
+ Status unregisterClient() {
+ Status status;
+ if (mClient != nullptr) {
+ status = mClient->unregister();
+ mClient = nullptr;
+ }
+ return status;
+ }
+
+ template <bool expectation = success>
+ bool submit(int32_t sessionId, const char* sourceFilePath, const char* destinationFilePath,
+ TranscodingSessionPriority priority = TranscodingSessionPriority::kNormal,
+ int bitrateBps = -1, int overridePid = -1, int overrideUid = -1) {
+ constexpr bool shouldSucceed = (expectation == success);
+ bool result;
+ TranscodingRequestParcel request;
+ TranscodingSessionParcel session;
+
+ request.sourceFilePath = sourceFilePath;
+ request.destinationFilePath = destinationFilePath;
+ request.priority = priority;
+ request.clientPid = (overridePid == -1) ? mClientPid : overridePid;
+ request.clientUid = (overrideUid == -1) ? mClientUid : overrideUid;
+ request.clientPackageName = (overrideUid == -1) ? mPackageName : "";
+ if (bitrateBps > 0) {
+ request.requestedVideoTrackFormat.emplace(TranscodingVideoTrackFormat());
+ request.requestedVideoTrackFormat->bitrateBps = bitrateBps;
+ }
+ Status status = mClient->submitRequest(request, &session, &result);
+
+ EXPECT_TRUE(status.isOk());
+ EXPECT_EQ(result, shouldSucceed);
+ if (shouldSucceed) {
+ EXPECT_EQ(session.sessionId, sessionId);
+ }
+
+ return status.isOk() && (result == shouldSucceed) &&
+ (!shouldSucceed || session.sessionId == sessionId);
+ }
+
+ template <bool expectation = success>
+ bool cancel(int32_t sessionId) {
+ constexpr bool shouldSucceed = (expectation == success);
+ bool result;
+ Status status = mClient->cancelSession(sessionId, &result);
+
+ EXPECT_TRUE(status.isOk());
+ EXPECT_EQ(result, shouldSucceed);
+
+ return status.isOk() && (result == shouldSucceed);
+ }
+
+ template <bool expectation = success>
+ bool getSession(int32_t sessionId, const char* sourceFilePath,
+ const char* destinationFilePath) {
+ constexpr bool shouldSucceed = (expectation == success);
+ bool result;
+ TranscodingSessionParcel session;
+ Status status = mClient->getSessionWithId(sessionId, &session, &result);
+
+ EXPECT_TRUE(status.isOk());
+ EXPECT_EQ(result, shouldSucceed);
+ if (shouldSucceed) {
+ EXPECT_EQ(session.sessionId, sessionId);
+ EXPECT_EQ(session.request.sourceFilePath, sourceFilePath);
+ }
+
+ return status.isOk() && (result == shouldSucceed) &&
+ (!shouldSucceed || (session.sessionId == sessionId &&
+ session.request.sourceFilePath == sourceFilePath &&
+ session.request.destinationFilePath == destinationFilePath));
+ }
+
+ int32_t mClientId;
+ pid_t mClientPid;
+ uid_t mClientUid;
+ std::string mPackageName;
+ std::shared_ptr<ITranscodingClient> mClient;
+};
+
+class MediaTranscodingServiceTestBase : public ::testing::Test {
+public:
+ MediaTranscodingServiceTestBase() { ALOGI("MediaTranscodingServiceTestBase created"); }
+
+ virtual ~MediaTranscodingServiceTestBase() {
+ ALOGI("MediaTranscodingServiceTestBase destroyed");
+ }
+
+ void SetUp() override {
+ // Need thread pool to receive callbacks, otherwise oneway callbacks are
+ // silently ignored.
+ ABinderProcess_startThreadPool();
+ ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
+ mService = IMediaTranscodingService::fromBinder(binder);
+ if (mService == nullptr) {
+ ALOGE("Failed to connect to the media.trascoding service.");
+ return;
+ }
+
+ mClient1 = ::ndk::SharedRefBase::make<TestClientCallback>(kClientPackageA, 1);
+ mClient2 = ::ndk::SharedRefBase::make<TestClientCallback>(kClientPackageB, 2);
+ mClient3 = ::ndk::SharedRefBase::make<TestClientCallback>(kClientPackageC, 3);
+ }
+
+ Status registerOneClient(const std::shared_ptr<TestClientCallback>& callback) {
+ ALOGD("registering %s with uid %d", callback->mPackageName.c_str(), callback->mClientUid);
+
+ std::shared_ptr<ITranscodingClient> client;
+ Status status =
+ mService->registerClient(callback, kClientName, callback->mPackageName, &client);
+
+ if (status.isOk()) {
+ callback->mClient = client;
+ } else {
+ callback->mClient = nullptr;
+ }
+ return status;
+ }
+
+ void registerMultipleClients() {
+ // Register 3 clients.
+ EXPECT_TRUE(registerOneClient(mClient1).isOk());
+ EXPECT_TRUE(registerOneClient(mClient2).isOk());
+ EXPECT_TRUE(registerOneClient(mClient3).isOk());
+
+ // Check the number of clients.
+ int32_t numOfClients;
+ Status status = mService->getNumOfClients(&numOfClients);
+ EXPECT_TRUE(status.isOk());
+ EXPECT_GE(numOfClients, 3);
+ }
+
+ void unregisterMultipleClients() {
+ // Unregister the clients.
+ EXPECT_TRUE(mClient1->unregisterClient().isOk());
+ EXPECT_TRUE(mClient2->unregisterClient().isOk());
+ EXPECT_TRUE(mClient3->unregisterClient().isOk());
+ }
+
+ void deleteFile(const char* path) { unlink(path); }
+
+ std::shared_ptr<IMediaTranscodingService> mService;
+ std::shared_ptr<TestClientCallback> mClient1;
+ std::shared_ptr<TestClientCallback> mClient2;
+ std::shared_ptr<TestClientCallback> mClient3;
+};
+
+} // namespace media
+} // namespace android
diff --git a/services/mediatranscoding/tests/README.txt b/services/mediatranscoding/tests/README.txt
new file mode 100644
index 0000000..cde465e
--- /dev/null
+++ b/services/mediatranscoding/tests/README.txt
@@ -0,0 +1,8 @@
+mediatranscodingservice_simulated_tests:
+ Tests media transcoding service with simulated transcoder.
+
+mediatranscodingservice_real_tests:
+ Tests media transcoding service with real transcoder. Uses the same test assets
+ as the MediaTranscoder unit tests. Before running the test, please make sure
+ to push the test assets to /sdcard:
+ adb push $TOP/frameworks/av/media/libmediatranscoding/tests/assets /data/local/tmp/TranscodingTestAssets
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
index 56e34f5..0dff171 100644
--- a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
@@ -20,7 +20,16 @@
android:versionName="1.0" >
<application android:label="TestAppA">
- <activity android:name="com.android.tests.transcoding.MainActivity">
+ <activity android:name="com.android.tests.transcoding.MainActivity"
+ android:exported="true">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.DEFAULT"/>
+ <category android:name="android.intent.category.LAUNCHER" />
+ </intent-filter>
+ </activity>
+ <activity android:name="com.android.tests.transcoding.ResourcePolicyTestActivity"
+ android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.DEFAULT"/>
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
index e1e7857..4baa35a 100644
--- a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
@@ -20,7 +20,8 @@
android:versionName="1.0" >
<application android:label="TestAppB">
- <activity android:name="com.android.tests.transcoding.MainActivity">
+ <activity android:name="com.android.tests.transcoding.MainActivity"
+ android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.DEFAULT"/>
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
index 55693a4..3dde3af 100644
--- a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
@@ -20,7 +20,8 @@
android:versionName="1.0" >
<application android:label="TestAppC">
- <activity android:name="com.android.tests.transcoding.MainActivity">
+ <activity android:name="com.android.tests.transcoding.MainActivity"
+ android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.DEFAULT"/>
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
index 7295073..b79164d 100644
--- a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
@@ -46,7 +46,7 @@
// Called before subsequent visible lifetimes
// for an activity process.
@Override
- public void onRestart(){
+ public void onRestart() {
super.onRestart();
// Load changes knowing that the Activity has already
// been visible within this process.
@@ -54,14 +54,14 @@
// Called at the start of the visible lifetime.
@Override
- public void onStart(){
+ public void onStart() {
super.onStart();
// Apply any required UI change now that the Activity is visible.
}
// Called at the start of the active lifetime.
@Override
- public void onResume(){
+ public void onResume() {
super.onResume();
// Resume any paused UI updates, threads, or processes required
// by the Activity but suspended when it was inactive.
@@ -80,7 +80,7 @@
// Called at the end of the active lifetime.
@Override
- public void onPause(){
+ public void onPause() {
// Suspend UI updates, threads, or CPU intensive processes
// that don't need to be updated when the Activity isn't
// the active foreground Activity.
@@ -89,7 +89,7 @@
// Called at the end of the visible lifetime.
@Override
- public void onStop(){
+ public void onStop() {
// Suspend remaining UI updates, threads, or processing
// that aren't required when the Activity isn't visible.
// Persist all edits or state changes
@@ -99,10 +99,9 @@
// Sometimes called at the end of the full lifetime.
@Override
- public void onDestroy(){
+ public void onDestroy() {
// Clean up any resources including ending threads,
// closing database connections etc.
super.onDestroy();
}
-
}
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
new file mode 100644
index 0000000..c9e2ddb
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
@@ -0,0 +1,272 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.tests.transcoding;
+
+import android.app.Activity;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecInfo.VideoCapabilities;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.util.Log;
+import java.io.IOException;
+import java.util.Vector;
+
+public class ResourcePolicyTestActivity extends Activity {
+ public static final int TYPE_NONSECURE = 0;
+ public static final int TYPE_SECURE = 1;
+ public static final int TYPE_MIX = 2;
+
+ protected String TAG;
+ private static final int FRAME_RATE = 10;
+ private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames
+ private static final String MIME = MediaFormat.MIMETYPE_VIDEO_AVC;
+ private static final int TIMEOUT_MS = 5000;
+
+ private Vector<MediaCodec> mCodecs = new Vector<MediaCodec>();
+
+ private class TestCodecCallback extends MediaCodec.Callback {
+ @Override
+ public void onInputBufferAvailable(MediaCodec codec, int index) {
+ Log.d(TAG, "onInputBufferAvailable " + codec.toString());
+ }
+
+ @Override
+ public void onOutputBufferAvailable(
+ MediaCodec codec, int index, MediaCodec.BufferInfo info) {
+ Log.d(TAG, "onOutputBufferAvailable " + codec.toString());
+ }
+
+ @Override
+ public void onError(MediaCodec codec, MediaCodec.CodecException e) {
+ Log.d(TAG, "onError " + codec.toString() + " errorCode " + e.getErrorCode());
+ }
+
+ @Override
+ public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
+ Log.d(TAG, "onOutputFormatChanged " + codec.toString());
+ }
+ }
+
+ private MediaCodec.Callback mCallback = new TestCodecCallback();
+
+ private MediaFormat getTestFormat(CodecCapabilities caps, boolean securePlayback) {
+ VideoCapabilities vcaps = caps.getVideoCapabilities();
+ int width = vcaps.getSupportedWidths().getLower();
+ int height = vcaps.getSupportedHeightsFor(width).getLower();
+ int bitrate = vcaps.getBitrateRange().getLower();
+
+ MediaFormat format = MediaFormat.createVideoFormat(MIME, width, height);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, caps.colorFormats[0]);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
+ format.setFeatureEnabled(CodecCapabilities.FEATURE_SecurePlayback, securePlayback);
+ return format;
+ }
+
+ private MediaCodecInfo getTestCodecInfo(boolean securePlayback) {
+ // Use avc decoder for testing.
+ boolean isEncoder = false;
+
+ MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
+ for (MediaCodecInfo info : mcl.getCodecInfos()) {
+ if (info.isEncoder() != isEncoder) {
+ continue;
+ }
+ CodecCapabilities caps;
+ try {
+ caps = info.getCapabilitiesForType(MIME);
+ boolean securePlaybackSupported =
+ caps.isFeatureSupported(CodecCapabilities.FEATURE_SecurePlayback);
+ boolean securePlaybackRequired =
+ caps.isFeatureRequired(CodecCapabilities.FEATURE_SecurePlayback);
+ if ((securePlayback && securePlaybackSupported)
+ || (!securePlayback && !securePlaybackRequired)) {
+ Log.d(TAG, "securePlayback " + securePlayback + " will use " + info.getName());
+ } else {
+ Log.d(TAG, "securePlayback " + securePlayback + " skip " + info.getName());
+ continue;
+ }
+ } catch (IllegalArgumentException e) {
+ // mime is not supported
+ continue;
+ }
+ return info;
+ }
+
+ return null;
+ }
+
+ protected int allocateCodecs(int max) {
+ Bundle extras = getIntent().getExtras();
+ int type = TYPE_NONSECURE;
+ if (extras != null) {
+ type = extras.getInt("test-type", type);
+ Log.d(TAG, "type is: " + type);
+ }
+
+ boolean shouldSkip = false;
+ boolean securePlayback;
+ if (type == TYPE_NONSECURE || type == TYPE_MIX) {
+ securePlayback = false;
+ MediaCodecInfo info = getTestCodecInfo(securePlayback);
+ if (info != null) {
+ allocateCodecs(max, info, securePlayback);
+ } else {
+ shouldSkip = true;
+ }
+ }
+
+ if (!shouldSkip) {
+ if (type == TYPE_SECURE || type == TYPE_MIX) {
+ securePlayback = true;
+ MediaCodecInfo info = getTestCodecInfo(securePlayback);
+ if (info != null) {
+ allocateCodecs(max, info, securePlayback);
+ } else {
+ shouldSkip = true;
+ }
+ }
+ }
+
+ if (shouldSkip) {
+ Log.d(TAG, "test skipped as there's no supported codec.");
+ finishWithResult(RESULT_OK);
+ }
+
+ Log.d(TAG, "allocateCodecs returned " + mCodecs.size());
+ return mCodecs.size();
+ }
+
+ protected void allocateCodecs(int max, MediaCodecInfo info, boolean securePlayback) {
+ String name = info.getName();
+ CodecCapabilities caps = info.getCapabilitiesForType(MIME);
+ MediaFormat format = getTestFormat(caps, securePlayback);
+ MediaCodec codec = null;
+ for (int i = mCodecs.size(); i < max; ++i) {
+ try {
+ Log.d(TAG, "Create codec " + name + " #" + i);
+ codec = MediaCodec.createByCodecName(name);
+ codec.setCallback(mCallback);
+ Log.d(TAG, "Configure codec " + format);
+ codec.configure(format, null, null, 0);
+ Log.d(TAG, "Start codec " + format);
+ codec.start();
+ mCodecs.add(codec);
+ codec = null;
+ } catch (IllegalArgumentException e) {
+ Log.d(TAG, "IllegalArgumentException " + e.getMessage());
+ break;
+ } catch (IOException e) {
+ Log.d(TAG, "IOException " + e.getMessage());
+ break;
+ } catch (MediaCodec.CodecException e) {
+ Log.d(TAG, "CodecException 0x" + Integer.toHexString(e.getErrorCode()));
+ break;
+ } finally {
+ if (codec != null) {
+ Log.d(TAG, "release codec");
+ codec.release();
+ codec = null;
+ }
+ }
+ }
+ }
+
+ protected void finishWithResult(int result) {
+ for (int i = 0; i < mCodecs.size(); ++i) {
+ Log.d(TAG, "release codec #" + i);
+ mCodecs.get(i).release();
+ }
+ mCodecs.clear();
+ setResult(result);
+ finish();
+ Log.d(TAG, "activity finished");
+ }
+
+ private void doUseCodecs() {
+ int current = 0;
+ try {
+ for (current = 0; current < mCodecs.size(); ++current) {
+ mCodecs.get(current).getName();
+ }
+ } catch (MediaCodec.CodecException e) {
+ Log.d(TAG, "useCodecs got CodecException 0x" + Integer.toHexString(e.getErrorCode()));
+ if (e.getErrorCode() == MediaCodec.CodecException.ERROR_RECLAIMED) {
+ Log.d(TAG, "Remove codec " + current + " from the list");
+ mCodecs.get(current).release();
+ mCodecs.remove(current);
+ mGotReclaimedException = true;
+ mUseCodecs = false;
+ }
+ return;
+ }
+ }
+
+ private Thread mWorkerThread;
+ private volatile boolean mUseCodecs = true;
+ private volatile boolean mGotReclaimedException = false;
+ protected void useCodecs() {
+ mWorkerThread = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ long start = System.currentTimeMillis();
+ long timeSinceStartedMs = 0;
+ while (mUseCodecs && (timeSinceStartedMs < TIMEOUT_MS)) {
+ doUseCodecs();
+ try {
+ Thread.sleep(50 /* millis */);
+ } catch (InterruptedException e) {
+ }
+ timeSinceStartedMs = System.currentTimeMillis() - start;
+ }
+ if (mGotReclaimedException) {
+ Log.d(TAG, "Got expected reclaim exception.");
+ }
+ finishWithResult(RESULT_OK);
+ }
+ });
+ mWorkerThread.start();
+ }
+
+ private static final int MAX_INSTANCES = 32;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ TAG = "ResourcePolicyTestActivity";
+
+ Log.d(TAG, "onCreate called.");
+ super.onCreate(savedInstanceState);
+
+ if (allocateCodecs(MAX_INSTANCES) == MAX_INSTANCES) {
+ // haven't reached the limit with MAX_INSTANCES, no need to wait for reclaim exception.
+ //mWaitForReclaim = false;
+ Log.d(TAG, "Didn't hit resource limitation");
+ }
+
+ useCodecs();
+ }
+
+ @Override
+ protected void onDestroy() {
+ Log.d(TAG, "onDestroy called.");
+ super.onDestroy();
+ }
+}
diff --git a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh b/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
old mode 100644
new mode 100755
index bdc0394..edf6778
--- a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -13,6 +13,9 @@
mm
+# Push the files onto the device.
+. $ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/push_assets.sh
+
echo "[==========] installing test apps"
adb root
adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppA/arm64/TranscodingUidPolicy_TestAppA.apk
@@ -21,7 +24,24 @@
echo "[==========] waiting for device and sync"
adb wait-for-device remount && adb sync
-adb shell kill -9 `pid media.transcoding`
-#adb shell /data/nativetest64/mediatranscodingservice_tests/mediatranscodingservice_tests
-adb shell /data/nativetest/mediatranscodingservice_tests/mediatranscodingservice_tests
+echo "[==========] running simulated tests"
+adb shell setprop debug.transcoding.simulated_transcoder true
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_simulated_tests/mediatranscodingservice_simulated_tests
+adb shell /data/nativetest/mediatranscodingservice_simulated_tests/mediatranscodingservice_simulated_tests
+
+echo "[==========] running real tests"
+adb shell setprop debug.transcoding.simulated_transcoder false
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_real_tests/mediatranscodingservice_real_tests
+adb shell /data/nativetest/mediatranscodingservice_real_tests/mediatranscodingservice_real_tests
+
+echo "[==========] running resource tests"
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_resource_tests/mediatranscodingservice_resource_tests
+adb shell /data/nativetest/mediatranscodingservice_resource_tests/mediatranscodingservice_resource_tests
+
+echo "[==========] removing debug properties"
+adb shell setprop debug.transcoding.simulated_transcoder \"\"
+adb shell kill -9 `pid media.transcoding`
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
new file mode 100644
index 0000000..0550d77
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
@@ -0,0 +1,305 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceRealTest"
+
+#include "MediaTranscodingServiceTestHelper.h"
+
+/*
+ * Tests media transcoding service with real transcoder.
+ *
+ * Uses the same test assets as the MediaTranscoder unit tests. Before running the test,
+ * please make sure to push the test assets to /sdcard:
+ *
+ * adb push $TOP/frameworks/av/media/libmediatranscoding/transcoder/tests/assets /data/local/tmp/TranscodingTestAssets
+ */
+namespace android {
+
+namespace media {
+
+constexpr int64_t kPaddingUs = 400000;
+constexpr int64_t kSessionWithPaddingUs = 10000000 + kPaddingUs;
+constexpr int32_t kBitRate = 8 * 1000 * 1000; // 8Mbs
+
+constexpr const char* kShortSrcPath =
+ "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+constexpr const char* kLongSrcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+
+#define OUTPATH(name) "/data/local/tmp/MediaTranscodingService_" #name ".MP4"
+
+class MediaTranscodingServiceRealTest : public MediaTranscodingServiceTestBase {
+public:
+ MediaTranscodingServiceRealTest() { ALOGI("MediaTranscodingServiceResourceTest created"); }
+
+ virtual ~MediaTranscodingServiceRealTest() {
+ ALOGI("MediaTranscodingServiceResourceTest destroyed");
+ }
+};
+
+TEST_F(MediaTranscodingServiceRealTest, TestInvalidSource) {
+ registerMultipleClients();
+
+ const char* srcPath = "bad_file_uri";
+ const char* dstPath = OUTPATH(TestInvalidSource);
+ deleteFile(dstPath);
+
+ // Submit one session.
+ EXPECT_TRUE(
+ mClient1->submit(0, srcPath, dstPath, TranscodingSessionPriority::kNormal, kBitRate));
+
+ // Check expected error.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Failed(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->getLastError(), TranscodingErrorCode::kErrorIO);
+
+ unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestPassthru) {
+ registerMultipleClients();
+
+ const char* dstPath = OUTPATH(TestPassthru);
+ deleteFile(dstPath);
+
+ // Submit one session.
+ EXPECT_TRUE(mClient1->submit(0, kShortSrcPath, dstPath));
+
+ // Wait for session to finish.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+ unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideo) {
+ registerMultipleClients();
+
+ const char* dstPath = OUTPATH(TestTranscodeVideo);
+ deleteFile(dstPath);
+
+ // Submit one session.
+ EXPECT_TRUE(mClient1->submit(0, kShortSrcPath, dstPath, TranscodingSessionPriority::kNormal,
+ kBitRate));
+
+ // Wait for session to finish.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+ unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideoProgress) {
+ registerMultipleClients();
+
+ const char* dstPath = OUTPATH(TestTranscodeVideoProgress);
+ deleteFile(dstPath);
+
+ // Submit one session.
+ EXPECT_TRUE(mClient1->submit(0, kLongSrcPath, dstPath, TranscodingSessionPriority::kNormal,
+ kBitRate));
+
+ // Wait for session to finish.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+ // Check the progress update messages are received. For this clip (around ~15 second long),
+ // expect at least 10 updates, and the last update should be 100.
+ int lastProgress;
+ EXPECT_GE(mClient1->getUpdateCount(&lastProgress), 10);
+ EXPECT_EQ(lastProgress, 100);
+
+ unregisterMultipleClients();
+}
+
+/*
+ * Test cancel immediately after start.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestCancelImmediately) {
+ registerMultipleClients();
+
+ const char* srcPath0 = kLongSrcPath;
+ const char* srcPath1 = kShortSrcPath;
+ const char* dstPath0 = OUTPATH(TestCancelImmediately_Session0);
+ const char* dstPath1 = OUTPATH(TestCancelImmediately_Session1);
+
+ deleteFile(dstPath0);
+ deleteFile(dstPath1);
+ // Submit one session, should start immediately.
+ EXPECT_TRUE(
+ mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+ EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+ // Test cancel session immediately, getSession should fail after cancel.
+ EXPECT_TRUE(mClient1->cancel(0));
+ EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+
+ // Submit new session, new session should start immediately and finish.
+ EXPECT_TRUE(
+ mClient1->submit(1, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+ unregisterMultipleClients();
+}
+
+/*
+ * Test cancel in the middle of transcoding.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestCancelWhileRunning) {
+ registerMultipleClients();
+
+ const char* srcPath0 = kLongSrcPath;
+ const char* srcPath1 = kShortSrcPath;
+ const char* dstPath0 = OUTPATH(TestCancelWhileRunning_Session0);
+ const char* dstPath1 = OUTPATH(TestCancelWhileRunning_Session1);
+
+ deleteFile(dstPath0);
+ deleteFile(dstPath1);
+ // Submit two sessions, session 0 should start immediately, session 1 should be queued.
+ EXPECT_TRUE(
+ mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
+ EXPECT_TRUE(
+ mClient1->submit(1, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+ EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+ EXPECT_TRUE(mClient1->getSession(1, srcPath1, dstPath1));
+
+ // Session 0 (longtest) shouldn't finish in 1 seconds.
+ EXPECT_EQ(mClient1->pop(1000000), EventTracker::NoEvent);
+
+ // Now cancel session 0. Session 1 should start immediately and finish.
+ EXPECT_TRUE(mClient1->cancel(0));
+ EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+ unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeSingleClient) {
+ registerMultipleClients();
+
+ const char* srcPath0 = kLongSrcPath;
+ const char* srcPath1 = kShortSrcPath;
+ const char* dstPath0 = OUTPATH(TestPauseResumeSingleClient_Session0);
+ const char* dstPath1 = OUTPATH(TestPauseResumeSingleClient_Session1);
+ deleteFile(dstPath0);
+ deleteFile(dstPath1);
+
+ // Submit one offline session, should start immediately.
+ EXPECT_TRUE(mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kUnspecified,
+ kBitRate));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+ // Test get session after starts.
+ EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+ // Submit one realtime session.
+ EXPECT_TRUE(
+ mClient1->submit(1, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+
+ // Offline session should pause.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+ EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+ // Realtime session should start immediately, and run to finish.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+ // Test get session after finish fails.
+ EXPECT_TRUE(mClient1->getSession<fail>(1, "", ""));
+
+ // Then offline session should resume.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+ // Test get session after resume.
+ EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+ // Offline session should finish.
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+ // Test get session after finish fails.
+ EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+
+ unregisterMultipleClients();
+}
+
+/*
+ * Basic test for pause/resume with two clients, with one session each.
+ * Top app's session should preempt the other app's session.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeMultiClients) {
+ ALOGD("TestPauseResumeMultiClients starting...");
+
+ EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+ EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+ registerMultipleClients();
+
+ const char* srcPath0 = kLongSrcPath;
+ const char* srcPath1 = kShortSrcPath;
+ const char* dstPath0 = OUTPATH(TestPauseResumeMultiClients_Client0);
+ const char* dstPath1 = OUTPATH(TestPauseResumeMultiClients_Client1);
+ deleteFile(dstPath0);
+ deleteFile(dstPath1);
+
+ ALOGD("Moving app A to top...");
+ EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+ // Submit session to Client1.
+ ALOGD("Submitting session to client1 (app A) ...");
+ EXPECT_TRUE(
+ mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
+
+ // Client1's session should start immediately.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+ ALOGD("Moving app B to top...");
+ EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+ // Client1's session should continue to run, since Client2 (app B) doesn't have any session.
+ EXPECT_EQ(mClient1->pop(1000000), EventTracker::NoEvent);
+
+ // Submit session to Client2.
+ ALOGD("Submitting session to client2 (app B) ...");
+ EXPECT_TRUE(
+ mClient2->submit(0, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+
+ // Client1's session should pause, client2's session should start.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+ EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+
+ // Client2's session should finish, then Client1's session should resume.
+ EXPECT_EQ(mClient2->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(2), 0));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+
+ // Client1's session should finish.
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+ unregisterMultipleClients();
+
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+ ALOGD("TestPauseResumeMultiClients finished.");
+}
+
+} // namespace media
+} // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
new file mode 100644
index 0000000..790e80b
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
@@ -0,0 +1,204 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceResourceTest"
+
+#include <aidl/android/media/BnResourceManagerClient.h>
+#include <aidl/android/media/IResourceManagerService.h>
+#include <binder/ActivityManager.h>
+
+#include "MediaTranscodingServiceTestHelper.h"
+
+/*
+ * Tests media transcoding service with real transcoder.
+ *
+ * Uses the same test assets as the MediaTranscoder unit tests. Before running the test,
+ * please make sure to push the test assets to /sdcard:
+ *
+ * adb push $TOP/frameworks/av/media/libmediatranscoding/transcoder/tests/assets /data/local/tmp/TranscodingTestAssets
+ */
+namespace android {
+
+namespace media {
+
+constexpr int64_t kPaddingUs = 400000;
+constexpr int32_t kBitRate = 8 * 1000 * 1000; // 8Mbs
+
+constexpr const char* kLongSrcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+
+constexpr const char* kResourcePolicyTestActivity =
+ "/com.android.tests.transcoding.ResourcePolicyTestActivity";
+
+#define OUTPATH(name) "/data/local/tmp/MediaTranscodingService_" #name ".MP4"
+
+/*
+ * The OOM score we're going to ask ResourceManager to use for our native transcoding
+ * service. ResourceManager issues reclaims based on these scores. It gets the scores
+ * from ActivityManagerService, which doesn't track native services. The values of the
+ * OOM scores are defined in:
+ * frameworks/base/services/core/java/com/android/server/am/ProcessList.java
+ * We use SERVICE_ADJ which is lower priority than an app possibly visible to the
+ * user, but higher priority than a cached app (which could be killed without disruption
+ * to the user).
+ */
+constexpr static int32_t SERVICE_ADJ = 500;
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnResourceManagerClient;
+using aidl::android::media::IResourceManagerService;
+
+/*
+ * Placeholder ResourceManagerClient for registering process info override
+ * with the IResourceManagerService. This is only used as a token by the service
+ * to get notifications about binder death, not used for reclaiming resources.
+ */
+struct ResourceManagerClient : public BnResourceManagerClient {
+ explicit ResourceManagerClient() = default;
+
+ Status reclaimResource(bool* _aidl_return) override {
+ *_aidl_return = false;
+ return Status::ok();
+ }
+
+ Status getName(::std::string* _aidl_return) override {
+ _aidl_return->clear();
+ return Status::ok();
+ }
+
+ virtual ~ResourceManagerClient() = default;
+};
+
+static std::shared_ptr<ResourceManagerClient> gResourceManagerClient =
+ ::ndk::SharedRefBase::make<ResourceManagerClient>();
+
+void TranscodingHelper_setProcessInfoOverride(int32_t procState, int32_t oomScore) {
+ ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+ std::shared_ptr<IResourceManagerService> service = IResourceManagerService::fromBinder(binder);
+ if (service == nullptr) {
+ ALOGE("Failed to get IResourceManagerService");
+ return;
+ }
+ Status status =
+ service->overrideProcessInfo(gResourceManagerClient, getpid(), procState, oomScore);
+ if (!status.isOk()) {
+ ALOGW("Failed to setProcessInfoOverride.");
+ }
+}
+
+class MediaTranscodingServiceResourceTest : public MediaTranscodingServiceTestBase {
+public:
+ MediaTranscodingServiceResourceTest() { ALOGI("MediaTranscodingServiceResourceTest created"); }
+
+ virtual ~MediaTranscodingServiceResourceTest() {
+ ALOGI("MediaTranscodingServiceResourceTest destroyed");
+ }
+};
+
+/**
+ * Basic testing for handling resource lost.
+ *
+ * This test starts a transcoding session (that's somewhat long and takes several seconds),
+ * then launches an activity that allocates video codec instances until it hits insufficient
+ * resource error. Because the activity is running in foreground,
+ * ResourceManager would reclaim codecs from transcoding service which should
+ * cause the session to be paused. The activity will hold the codecs for a few seconds
+ * before releasing them, and the transcoding service should be able to resume
+ * and complete the session.
+ *
+ * Note that this test must run as root. We need to simulate submitting a request for a
+ * client {uid,pid} running at lower priority. As a cmd line test, it's not easy to get the
+ * pid of a living app, so we use our own {uid,pid} to submit. However, since we're a native
+ * process, RM doesn't have our proc info and the reclaim will fail. So we need to use
+ * RM's setProcessInfoOverride to override our proc info, which requires permission (unless root).
+ */
+TEST_F(MediaTranscodingServiceResourceTest, TestResourceLost) {
+ ALOGD("TestResourceLost starting..., pid %d", ::getpid());
+
+ // We're going to submit the request using our own {uid,pid}. Since we're a native
+ // process, RM doesn't have our proc info and the reclaim will fail. So we need to use
+ // RM's setProcessInfoOverride to override our proc info.
+ TranscodingHelper_setProcessInfoOverride(ActivityManager::PROCESS_STATE_SERVICE, SERVICE_ADJ);
+
+ EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+ EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+
+ registerMultipleClients();
+
+ const char* srcPath0 = kLongSrcPath;
+ const char* dstPath0 = OUTPATH(TestPauseResumeMultiClients_Client0);
+ deleteFile(dstPath0);
+
+ ALOGD("Moving app A to top...");
+ EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+ // Submit session to Client1.
+ ALOGD("Submitting session to client1 (app A) ...");
+ EXPECT_TRUE(mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal,
+ kBitRate, ::getpid(), ::getuid()));
+
+ // Client1's session should start immediately.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+ // Launch ResourcePolicyTestActivity, which will try to allocate up to 32
+ // instances, which should trigger insufficient resources on most devices.
+ // (Note that it's possible that the device supports a very high number of
+ // resource instances, in which case we'll simply require that the session completes.)
+ ALOGD("Launch ResourcePolicyTestActivity...");
+ EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kResourcePolicyTestActivity));
+
+ // The basic requirement is that the session should complete. Wait for finish
+ // event to come and pop up all events received.
+ std::list<EventTracker::Event> events;
+ EXPECT_TRUE(mClient1->waitForSpecificEventAndPop(EventTracker::Finished(CLIENT(1), 0), &events,
+ 15000000));
+
+ // If there is only 1 event, it must be finish (otherwise waitForSpecificEventAndPop
+ // woudldn't pop up anything), and we're ok.
+ //
+ // TODO: If there is only 1 event (finish), and no pause/resume happened, we need
+ // to verify that the ResourcePolicyTestActivity actually was able to allocate
+ // all 32 instances without hitting insufficient resources. Otherwise, it could
+ // be that ResourceManager was not able to reclaim codecs from the transcoding
+ // service at all, which means the resource management is broken.
+ if (events.size() > 1) {
+ EXPECT_TRUE(events.size() >= 3);
+ size_t i = 0;
+ for (auto& event : events) {
+ if (i == 0) {
+ EXPECT_EQ(event, EventTracker::Pause(CLIENT(1), 0));
+ } else if (i == events.size() - 2) {
+ EXPECT_EQ(event, EventTracker::Resume(CLIENT(1), 0));
+ } else if (i == events.size() - 1) {
+ EXPECT_EQ(event, EventTracker::Finished(CLIENT(1), 0));
+ } else {
+ EXPECT_TRUE(event == EventTracker::Pause(CLIENT(1), 0) ||
+ event == EventTracker::Resume(CLIENT(1), 0));
+ }
+ i++;
+ }
+ }
+
+ unregisterMultipleClients();
+
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+}
+
+} // namespace media
+} // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
new file mode 100644
index 0000000..7dfda44
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
@@ -0,0 +1,358 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceSimulatedTest"
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <aidl/android/media/TranscodingSessionParcel.h>
+#include <aidl/android/media/TranscodingSessionPriority.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/PermissionController.h>
+#include <cutils/multiuser.h>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+#include <iostream>
+#include <list>
+
+#include "MediaTranscodingServiceTestHelper.h"
+#include "SimulatedTranscoder.h"
+
+namespace android {
+
+namespace media {
+
+// Note that -1 is valid and means using calling pid/uid for the service. But only privilege caller
+// could use them. This test is not a privilege caller.
+constexpr int32_t kInvalidClientPid = -5;
+constexpr int32_t kInvalidClientUid = -10;
+constexpr const char* kInvalidClientName = "";
+constexpr const char* kInvalidClientOpPackageName = "";
+
+constexpr int64_t kPaddingUs = 1000000;
+constexpr int64_t kSessionWithPaddingUs = SimulatedTranscoder::kSessionDurationUs + kPaddingUs;
+
+constexpr const char* kClientOpPackageName = "TestClientPackage";
+
+class MediaTranscodingServiceSimulatedTest : public MediaTranscodingServiceTestBase {
+public:
+ MediaTranscodingServiceSimulatedTest() { ALOGI("MediaTranscodingServiceResourceTest created"); }
+
+ virtual ~MediaTranscodingServiceSimulatedTest() {
+ ALOGI("MediaTranscodingServiceResourceTest destroyed");
+ }
+};
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterNullClient) {
+ std::shared_ptr<ITranscodingClient> client;
+
+ // Register the client with null callback.
+ Status status = mService->registerClient(nullptr, kClientName, kClientOpPackageName, &client);
+ EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientName) {
+ std::shared_ptr<ITranscodingClient> client;
+
+ // Register the client with the service.
+ Status status = mService->registerClient(mClient1, kInvalidClientName,
+ kInvalidClientOpPackageName, &client);
+ EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientPackageName) {
+ std::shared_ptr<ITranscodingClient> client;
+
+ // Register the client with the service.
+ Status status =
+ mService->registerClient(mClient1, kClientName, kInvalidClientOpPackageName, &client);
+ EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterOneClient) {
+ std::shared_ptr<ITranscodingClient> client;
+
+ Status status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client);
+ EXPECT_TRUE(status.isOk());
+
+ // Validate the client.
+ EXPECT_TRUE(client != nullptr);
+
+ // Check the number of Clients.
+ int32_t numOfClients;
+ status = mService->getNumOfClients(&numOfClients);
+ EXPECT_TRUE(status.isOk());
+ EXPECT_GE(numOfClients, 1);
+
+ // Unregister the client.
+ status = client->unregister();
+ EXPECT_TRUE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientTwice) {
+ std::shared_ptr<ITranscodingClient> client;
+
+ Status status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client);
+ EXPECT_TRUE(status.isOk());
+
+ // Validate the client.
+ EXPECT_TRUE(client != nullptr);
+
+ // Register the client again and expects failure.
+ std::shared_ptr<ITranscodingClient> client1;
+ status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client1);
+ EXPECT_FALSE(status.isOk());
+
+ // Unregister the client.
+ status = client->unregister();
+ EXPECT_TRUE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterMultipleClients) {
+ registerMultipleClients();
+ unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSessionIdIndependence) {
+ registerMultipleClients();
+
+ // Submit 2 requests on client1 first.
+ EXPECT_TRUE(mClient1->submit(0, "test_source_file", "test_destination_file"));
+ EXPECT_TRUE(mClient1->submit(1, "test_source_file", "test_destination_file"));
+
+ // Submit 2 requests on client2, sessionId should be independent for each client.
+ EXPECT_TRUE(mClient2->submit(0, "test_source_file", "test_destination_file"));
+ EXPECT_TRUE(mClient2->submit(1, "test_source_file", "test_destination_file"));
+
+ // Cancel all sessions.
+ EXPECT_TRUE(mClient1->cancel(0));
+ EXPECT_TRUE(mClient1->cancel(1));
+ EXPECT_TRUE(mClient2->cancel(0));
+ EXPECT_TRUE(mClient2->cancel(1));
+
+ unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelSessions) {
+ registerMultipleClients();
+
+ // Test sessionId assignment.
+ EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file"));
+ EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file"));
+ EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file"));
+
+ // Test submit bad request (no valid sourceFilePath) fails.
+ EXPECT_TRUE(mClient1->submit<fail>(0, "", ""));
+
+ // Test submit bad request (no valid sourceFilePath) fails.
+ EXPECT_TRUE(mClient1->submit<fail>(0, "src", "dst", TranscodingSessionPriority::kNormal,
+ 1000000, kInvalidClientPid, kInvalidClientUid));
+
+ // Test cancel non-existent session fails.
+ EXPECT_TRUE(mClient1->cancel<fail>(100));
+
+ // Session 0 should start immediately and finish in 2 seconds, followed by Session 1 start.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+ // Test cancel valid sessionId in random order.
+ // Test cancel finished session fails.
+ EXPECT_TRUE(mClient1->cancel(2));
+ EXPECT_TRUE(mClient1->cancel<fail>(0));
+ EXPECT_TRUE(mClient1->cancel(1));
+
+ // Test cancel session again fails.
+ EXPECT_TRUE(mClient1->cancel<fail>(1));
+
+ // Test no more events arriving after cancel.
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::NoEvent);
+
+ unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestGetSessions) {
+ registerMultipleClients();
+
+ // Submit 3 requests.
+ EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0"));
+ EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1"));
+ EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+
+ // Test get sessions by id.
+ EXPECT_TRUE(mClient1->getSession(2, "test_source_file_2", "test_destination_file_2"));
+ EXPECT_TRUE(mClient1->getSession(1, "test_source_file_1", "test_destination_file_1"));
+ EXPECT_TRUE(mClient1->getSession(0, "test_source_file_0", "test_destination_file_0"));
+
+ // Test get session by invalid id fails.
+ EXPECT_TRUE(mClient1->getSession<fail>(100, "", ""));
+ EXPECT_TRUE(mClient1->getSession<fail>(-1, "", ""));
+
+ // Test get session after cancel fails.
+ EXPECT_TRUE(mClient1->cancel(2));
+ EXPECT_TRUE(mClient1->getSession<fail>(2, "", ""));
+
+ // Session 0 should start immediately and finish in 2 seconds, followed by Session 1 start.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+ // Test get session after finish fails.
+ EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+
+ // Test get the remaining session 1.
+ EXPECT_TRUE(mClient1->getSession(1, "test_source_file_1", "test_destination_file_1"));
+
+ // Cancel remaining session 1.
+ EXPECT_TRUE(mClient1->cancel(1));
+
+ unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelWithOfflineSessions) {
+ registerMultipleClients();
+
+ // Submit some offline sessions first.
+ EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0",
+ TranscodingSessionPriority::kUnspecified));
+ EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1",
+ TranscodingSessionPriority::kUnspecified));
+
+ // Session 0 should start immediately.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+ // Submit more real-time sessions.
+ EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+ EXPECT_TRUE(mClient1->submit(3, "test_source_file_3", "test_destination_file_3"));
+
+ // Session 0 should pause immediately and session 2 should start.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
+
+ // Session 2 should finish in 2 seconds and session 3 should start.
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 3));
+
+ // Cancel session 3 now
+ EXPECT_TRUE(mClient1->cancel(3));
+
+ // Session 0 should resume and finish in 2 seconds, followed by session 1 start.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+ // Cancel remaining session 1.
+ EXPECT_TRUE(mClient1->cancel(1));
+
+ unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestClientUseAfterUnregister) {
+ std::shared_ptr<ITranscodingClient> client;
+
+ // Register a client, then unregister.
+ Status status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client);
+ EXPECT_TRUE(status.isOk());
+
+ status = client->unregister();
+ EXPECT_TRUE(status.isOk());
+
+ // Test various operations on the client, should fail with ERROR_DISCONNECTED.
+ TranscodingSessionParcel session;
+ bool result;
+ status = client->getSessionWithId(0, &session, &result);
+ EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+ status = client->cancelSession(0, &result);
+ EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+ TranscodingRequestParcel request;
+ status = client->submitRequest(request, &session, &result);
+ EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingUidPolicy) {
+ ALOGD("TestTranscodingUidPolicy starting...");
+
+ EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+ EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+ registerMultipleClients();
+
+ ALOGD("Moving app A to top...");
+ EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+ // Submit 3 requests.
+ ALOGD("Submitting session to client1 (app A) ...");
+ EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0"));
+ EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1"));
+ EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+
+ // Session 0 should start immediately.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+ ALOGD("Moving app B to top...");
+ EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+ // Session 0 should continue and finish in 2 seconds, then session 1 should start.
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+ ALOGD("Submitting session to client2 (app B) ...");
+ EXPECT_TRUE(mClient2->submit(0, "test_source_file_0", "test_destination_file_0"));
+
+ // Client1's session should pause, client2's session should start.
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 1));
+ EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+
+ ALOGD("Moving app A back to top...");
+ EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+ // Client2's session should pause, client1's session 1 should resume.
+ EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 1));
+
+ // Client2's session 1 should finish in 2 seconds, then its session 2 should start.
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+ EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
+
+ // After client2's sessions finish, client1's session should resume.
+ EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
+ EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Resume(CLIENT(2), 0));
+
+ unregisterMultipleClients();
+
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+ EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+ ALOGD("TestTranscodingUidPolicy finished.");
+}
+
+} // namespace media
+} // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp
deleted file mode 100644
index a8d4241..0000000
--- a/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp
+++ /dev/null
@@ -1,701 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Unit Test for MediaTranscodingService.
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaTranscodingServiceTest"
-
-#include <aidl/android/media/BnTranscodingClientCallback.h>
-#include <aidl/android/media/IMediaTranscodingService.h>
-#include <aidl/android/media/ITranscodingClient.h>
-#include <aidl/android/media/ITranscodingClientCallback.h>
-#include <aidl/android/media/TranscodingJobParcel.h>
-#include <aidl/android/media/TranscodingJobPriority.h>
-#include <aidl/android/media/TranscodingRequestParcel.h>
-#include <android-base/logging.h>
-#include <android/binder_manager.h>
-#include <android/binder_process.h>
-#include <binder/PermissionController.h>
-#include <cutils/multiuser.h>
-#include <gtest/gtest.h>
-#include <utils/Log.h>
-
-#include <iostream>
-#include <list>
-
-#include "SimulatedTranscoder.h"
-
-namespace android {
-
-namespace media {
-
-using Status = ::ndk::ScopedAStatus;
-using aidl::android::media::BnTranscodingClientCallback;
-using aidl::android::media::IMediaTranscodingService;
-using aidl::android::media::ITranscodingClient;
-using aidl::android::media::ITranscodingClientCallback;
-using aidl::android::media::TranscodingJobParcel;
-using aidl::android::media::TranscodingJobPriority;
-using aidl::android::media::TranscodingRequestParcel;
-
-// Note that -1 is valid and means using calling pid/uid for the service. But only privilege caller could
-// use them. This test is not a privilege caller.
-constexpr int32_t kInvalidClientPid = -5;
-constexpr const char* kInvalidClientName = "";
-constexpr const char* kInvalidClientOpPackageName = "";
-
-constexpr int32_t kClientUseCallingPid = IMediaTranscodingService::USE_CALLING_PID;
-constexpr int32_t kClientUseCallingUid = IMediaTranscodingService::USE_CALLING_UID;
-
-constexpr uid_t kClientUid = 5000;
-#define UID(n) (kClientUid + (n))
-
-constexpr int32_t kClientId = 0;
-#define CLIENT(n) (kClientId + (n))
-
-constexpr int64_t kPaddingUs = 200000;
-constexpr int64_t kJobWithPaddingUs = SimulatedTranscoder::kJobDurationUs + kPaddingUs;
-
-constexpr const char* kClientName = "TestClient";
-constexpr const char* kClientOpPackageName = "TestClientPackage";
-constexpr const char* kClientPackageA = "com.android.tests.transcoding.testapp.A";
-constexpr const char* kClientPackageB = "com.android.tests.transcoding.testapp.B";
-constexpr const char* kClientPackageC = "com.android.tests.transcoding.testapp.C";
-constexpr const char* kTestActivityName = "/com.android.tests.transcoding.MainActivity";
-
-static status_t getUidForPackage(String16 packageName, userid_t userId, /*inout*/ uid_t& uid) {
- PermissionController pc;
- uid = pc.getPackageUid(packageName, 0);
- if (uid <= 0) {
- ALOGE("Unknown package: '%s'", String8(packageName).string());
- return BAD_VALUE;
- }
-
- if (userId < 0) {
- ALOGE("Invalid user: %d", userId);
- return BAD_VALUE;
- }
-
- uid = multiuser_get_uid(userId, uid);
- return NO_ERROR;
-}
-
-struct ShellHelper {
- static bool RunCmd(const std::string& cmdStr) {
- int ret = system(cmdStr.c_str());
- if (ret != 0) {
- ALOGE("Failed to run cmd: %s, exitcode %d", cmdStr.c_str(), ret);
- return false;
- }
- return true;
- }
-
- static bool Start(const char* packageName, const char* activityName) {
- return RunCmd("am start -W " + std::string(packageName) + std::string(activityName) +
- " &> /dev/null");
- }
-
- static bool Stop(const char* packageName) {
- return RunCmd("am force-stop " + std::string(packageName));
- }
-};
-
-struct EventTracker {
- struct Event {
- enum { NoEvent, Start, Pause, Resume, Finished, Failed } type;
- int64_t clientId;
- int32_t jobId;
- };
-
-#define DECLARE_EVENT(action) \
- static Event action(int32_t clientId, int32_t jobId) { \
- return {Event::action, clientId, jobId}; \
- }
-
- DECLARE_EVENT(Start);
- DECLARE_EVENT(Pause);
- DECLARE_EVENT(Resume);
- DECLARE_EVENT(Finished);
- DECLARE_EVENT(Failed);
-
- static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
-
- static std::string toString(const Event& event) {
- std::string eventStr;
- switch (event.type) {
- case Event::Start:
- eventStr = "Start";
- break;
- case Event::Pause:
- eventStr = "Pause";
- break;
- case Event::Resume:
- eventStr = "Resume";
- break;
- case Event::Finished:
- eventStr = "Finished";
- break;
- case Event::Failed:
- eventStr = "Failed";
- break;
- default:
- return "NoEvent";
- }
- return "job {" + std::to_string(event.clientId) + ", " + std::to_string(event.jobId) +
- "}: " + eventStr;
- }
-
- // Pop 1 event from front, wait for up to timeoutUs if empty.
- const Event& pop(int64_t timeoutUs = 0) {
- std::unique_lock lock(mLock);
-
- if (mEventQueue.empty() && timeoutUs > 0) {
- mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
- }
-
- if (mEventQueue.empty()) {
- mPoppedEvent = NoEvent;
- } else {
- mPoppedEvent = *mEventQueue.begin();
- mEventQueue.pop_front();
- }
-
- return mPoppedEvent;
- }
-
- // Push 1 event to back.
- void append(const Event& event) {
- ALOGD("%s", toString(event).c_str());
-
- std::unique_lock lock(mLock);
-
- mEventQueue.push_back(event);
- mCondition.notify_one();
- }
-
-private:
- std::mutex mLock;
- std::condition_variable mCondition;
- Event mPoppedEvent;
- std::list<Event> mEventQueue;
-};
-
-// Operators for GTest macros.
-bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
- return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.jobId == rhs.jobId;
-}
-
-std::ostream& operator<<(std::ostream& str, const EventTracker::Event& v) {
- str << EventTracker::toString(v);
- return str;
-}
-
-struct TestClientCallback : public BnTranscodingClientCallback, public EventTracker {
- TestClientCallback(int32_t id) : mClientId(id) {
- ALOGI("TestClientCallback %d Created", mClientId);
- }
-
- virtual ~TestClientCallback() { ALOGI("TestClientCallback %d destroyed", mClientId); }
-
- Status onTranscodingFinished(
- int32_t in_jobId,
- const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
- append(Finished(mClientId, in_jobId));
- return Status::ok();
- }
-
- Status onTranscodingFailed(
- int32_t in_jobId,
- ::aidl::android::media::TranscodingErrorCode /* in_errorCode */) override {
- append(Failed(mClientId, in_jobId));
- return Status::ok();
- }
-
- Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
- int32_t /* in_newAwaitNumber */) override {
- return Status::ok();
- }
-
- Status onProgressUpdate(int32_t in_jobId, int32_t in_progress) override {
- // The progress numbers from the SimulatedTranscoder represents the
- // event's type in the transcoder.
- switch (in_progress) {
- case SimulatedTranscoder::Event::Start:
- append(EventTracker::Start(mClientId, in_jobId));
- break;
- case SimulatedTranscoder::Event::Pause:
- append(EventTracker::Pause(mClientId, in_jobId));
- break;
- case SimulatedTranscoder::Event::Resume:
- append(EventTracker::Resume(mClientId, in_jobId));
- break;
- default:
- ALOGE("unrecognized progress number %d, ignored by test", in_progress);
- break;
- }
- return Status::ok();
- }
-
- int32_t mClientId;
-};
-
-class MediaTranscodingServiceTest : public ::testing::Test {
-public:
- MediaTranscodingServiceTest() { ALOGI("MediaTranscodingServiceTest created"); }
-
- ~MediaTranscodingServiceTest() { ALOGI("MediaTranscodingingServiceTest destroyed"); }
-
- void SetUp() override {
- // Need thread pool to receive callbacks, otherwise oneway callbacks are
- // silently ignored.
- ABinderProcess_startThreadPool();
- ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
- mService = IMediaTranscodingService::fromBinder(binder);
- if (mService == nullptr) {
- ALOGE("Failed to connect to the media.trascoding service.");
- return;
- }
- mClientCallback1 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(1));
- mClientCallback2 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(2));
- mClientCallback3 = ::ndk::SharedRefBase::make<TestClientCallback>(CLIENT(3));
- }
-
- std::shared_ptr<ITranscodingClient> registerOneClient(
- const char* packageName, const std::shared_ptr<TestClientCallback>& callback,
- uid_t defaultUid) {
- uid_t uid;
- if (getUidForPackage(String16(packageName), 0 /*userId*/, uid) != NO_ERROR) {
- uid = defaultUid;
- }
-
- ALOGD("registering %s with uid %d", packageName, uid);
-
- std::shared_ptr<ITranscodingClient> client;
- Status status = mService->registerClient(callback, kClientName, packageName, uid,
- kClientUseCallingPid, &client);
- return status.isOk() ? client : nullptr;
- }
-
- void registerMultipleClients() {
- // Register 3 clients.
- mClient1 = registerOneClient(kClientPackageA, mClientCallback1, UID(1));
- EXPECT_TRUE(mClient1 != nullptr);
-
- mClient2 = registerOneClient(kClientPackageB, mClientCallback2, UID(2));
- EXPECT_TRUE(mClient2 != nullptr);
-
- mClient3 = registerOneClient(kClientPackageC, mClientCallback3, UID(3));
- EXPECT_TRUE(mClient3 != nullptr);
-
- // Check the number of clients.
- int32_t numOfClients;
- Status status = mService->getNumOfClients(&numOfClients);
- EXPECT_TRUE(status.isOk());
- EXPECT_EQ(3, numOfClients);
- }
-
- void unregisterMultipleClients() {
- Status status;
-
- // Unregister the clients.
- status = mClient1->unregister();
- EXPECT_TRUE(status.isOk());
-
- status = mClient2->unregister();
- EXPECT_TRUE(status.isOk());
-
- status = mClient3->unregister();
- EXPECT_TRUE(status.isOk());
-
- // Check the number of clients.
- int32_t numOfClients;
- status = mService->getNumOfClients(&numOfClients);
- EXPECT_TRUE(status.isOk());
- EXPECT_EQ(0, numOfClients);
- }
-
- static constexpr bool success = true;
- static constexpr bool fail = false;
-
- template <bool expectation = success>
- bool submit(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId,
- const char* filename,
- TranscodingJobPriority priority = TranscodingJobPriority::kNormal) {
- constexpr bool shouldSucceed = (expectation == success);
- bool result;
- TranscodingRequestParcel request;
- TranscodingJobParcel job;
-
- request.fileName = filename;
- request.priority = priority;
- Status status = client->submitRequest(request, &job, &result);
-
- EXPECT_TRUE(status.isOk());
- EXPECT_EQ(result, shouldSucceed);
- if (shouldSucceed) {
- EXPECT_EQ(job.jobId, jobId);
- }
-
- return status.isOk() && (result == shouldSucceed) && (!shouldSucceed || job.jobId == jobId);
- }
-
- template <bool expectation = success>
- bool cancel(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId) {
- constexpr bool shouldSucceed = (expectation == success);
- bool result;
- Status status = client->cancelJob(jobId, &result);
-
- EXPECT_TRUE(status.isOk());
- EXPECT_EQ(result, shouldSucceed);
-
- return status.isOk() && (result == shouldSucceed);
- }
-
- template <bool expectation = success>
- bool getJob(const std::shared_ptr<ITranscodingClient>& client, int32_t jobId,
- const char* filename) {
- constexpr bool shouldSucceed = (expectation == success);
- bool result;
- TranscodingJobParcel job;
- Status status = client->getJobWithId(jobId, &job, &result);
-
- EXPECT_TRUE(status.isOk());
- EXPECT_EQ(result, shouldSucceed);
- if (shouldSucceed) {
- EXPECT_EQ(job.jobId, jobId);
- EXPECT_EQ(job.request.fileName, filename);
- }
-
- return status.isOk() && (result == shouldSucceed) &&
- (!shouldSucceed || (job.jobId == jobId && job.request.fileName == filename));
- }
-
- std::shared_ptr<IMediaTranscodingService> mService;
- std::shared_ptr<TestClientCallback> mClientCallback1;
- std::shared_ptr<TestClientCallback> mClientCallback2;
- std::shared_ptr<TestClientCallback> mClientCallback3;
- std::shared_ptr<ITranscodingClient> mClient1;
- std::shared_ptr<ITranscodingClient> mClient2;
- std::shared_ptr<ITranscodingClient> mClient3;
-};
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterNullClient) {
- std::shared_ptr<ITranscodingClient> client;
-
- // Register the client with null callback.
- Status status = mService->registerClient(nullptr, kClientName, kClientOpPackageName,
- kClientUseCallingUid, kClientUseCallingPid, &client);
- EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientPid) {
- std::shared_ptr<ITranscodingClient> client;
-
- // Register the client with the service.
- Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
- kClientUseCallingUid, kInvalidClientPid, &client);
- EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientName) {
- std::shared_ptr<ITranscodingClient> client;
-
- // Register the client with the service.
- Status status = mService->registerClient(mClientCallback1, kInvalidClientName,
- kInvalidClientOpPackageName, kClientUseCallingUid,
- kClientUseCallingPid, &client);
- EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientPackageName) {
- std::shared_ptr<ITranscodingClient> client;
-
- // Register the client with the service.
- Status status =
- mService->registerClient(mClientCallback1, kClientName, kInvalidClientOpPackageName,
- kClientUseCallingUid, kClientUseCallingPid, &client);
- EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterOneClient) {
- std::shared_ptr<ITranscodingClient> client;
-
- Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
- kClientUseCallingUid, kClientUseCallingPid, &client);
- EXPECT_TRUE(status.isOk());
-
- // Validate the client.
- EXPECT_TRUE(client != nullptr);
-
- // Check the number of Clients.
- int32_t numOfClients;
- status = mService->getNumOfClients(&numOfClients);
- EXPECT_TRUE(status.isOk());
- EXPECT_EQ(1, numOfClients);
-
- // Unregister the client.
- status = client->unregister();
- EXPECT_TRUE(status.isOk());
-
- // Check the number of Clients.
- status = mService->getNumOfClients(&numOfClients);
- EXPECT_TRUE(status.isOk());
- EXPECT_EQ(0, numOfClients);
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientTwice) {
- std::shared_ptr<ITranscodingClient> client;
-
- Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
- kClientUseCallingUid, kClientUseCallingPid, &client);
- EXPECT_TRUE(status.isOk());
-
- // Validate the client.
- EXPECT_TRUE(client != nullptr);
-
- // Register the client again and expects failure.
- std::shared_ptr<ITranscodingClient> client1;
- status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
- kClientUseCallingUid, kClientUseCallingPid, &client1);
- EXPECT_FALSE(status.isOk());
-
- // Unregister the client.
- status = client->unregister();
- EXPECT_TRUE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterMultipleClients) {
- registerMultipleClients();
- unregisterMultipleClients();
-}
-
-TEST_F(MediaTranscodingServiceTest, TestJobIdIndependence) {
- registerMultipleClients();
-
- // Submit 2 requests on client1 first.
- EXPECT_TRUE(submit(mClient1, 0, "test_file"));
- EXPECT_TRUE(submit(mClient1, 1, "test_file"));
-
- // Submit 2 requests on client2, jobId should be independent for each client.
- EXPECT_TRUE(submit(mClient2, 0, "test_file"));
- EXPECT_TRUE(submit(mClient2, 1, "test_file"));
-
- // Cancel all jobs.
- EXPECT_TRUE(cancel(mClient1, 0));
- EXPECT_TRUE(cancel(mClient1, 1));
- EXPECT_TRUE(cancel(mClient2, 0));
- EXPECT_TRUE(cancel(mClient2, 1));
-
- unregisterMultipleClients();
-}
-
-TEST_F(MediaTranscodingServiceTest, TestSubmitCancelJobs) {
- registerMultipleClients();
-
- // Test jobId assignment.
- EXPECT_TRUE(submit(mClient1, 0, "test_file_0"));
- EXPECT_TRUE(submit(mClient1, 1, "test_file_1"));
- EXPECT_TRUE(submit(mClient1, 2, "test_file_2"));
-
- // Test submit bad request (no valid fileName) fails.
- EXPECT_TRUE(submit<fail>(mClient1, 0, ""));
-
- // Test cancel non-existent job fails.
- EXPECT_TRUE(cancel<fail>(mClient1, 100));
-
- // Job 0 should start immediately and finish in 2 seconds, followed by Job 1 start.
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
- EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
-
- // Test cancel valid jobId in random order.
- // Test cancel finished job fails.
- EXPECT_TRUE(cancel(mClient1, 2));
- EXPECT_TRUE(cancel<fail>(mClient1, 0));
- EXPECT_TRUE(cancel(mClient1, 1));
-
- // Test cancel job again fails.
- EXPECT_TRUE(cancel<fail>(mClient1, 1));
-
- // Test no more events arriving after cancel.
- EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::NoEvent);
-
- unregisterMultipleClients();
-}
-
-TEST_F(MediaTranscodingServiceTest, TestGetJobs) {
- registerMultipleClients();
-
- // Submit 3 requests.
- EXPECT_TRUE(submit(mClient1, 0, "test_file_0"));
- EXPECT_TRUE(submit(mClient1, 1, "test_file_1"));
- EXPECT_TRUE(submit(mClient1, 2, "test_file_2"));
-
- // Test get jobs by id.
- EXPECT_TRUE(getJob(mClient1, 2, "test_file_2"));
- EXPECT_TRUE(getJob(mClient1, 1, "test_file_1"));
- EXPECT_TRUE(getJob(mClient1, 0, "test_file_0"));
-
- // Test get job by invalid id fails.
- EXPECT_TRUE(getJob<fail>(mClient1, 100, ""));
- EXPECT_TRUE(getJob<fail>(mClient1, -1, ""));
-
- // Test get job after cancel fails.
- EXPECT_TRUE(cancel(mClient1, 2));
- EXPECT_TRUE(getJob<fail>(mClient1, 2, ""));
-
- // Job 0 should start immediately and finish in 2 seconds, followed by Job 1 start.
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
- EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
-
- // Test get job after finish fails.
- EXPECT_TRUE(getJob<fail>(mClient1, 0, ""));
-
- // Test get the remaining job 1.
- EXPECT_TRUE(getJob(mClient1, 1, "test_file_1"));
-
- // Cancel remaining job 1.
- EXPECT_TRUE(cancel(mClient1, 1));
-
- unregisterMultipleClients();
-}
-
-TEST_F(MediaTranscodingServiceTest, TestSubmitCancelWithOfflineJobs) {
- registerMultipleClients();
-
- // Submit some offline jobs first.
- EXPECT_TRUE(submit(mClient1, 0, "test_file_0", TranscodingJobPriority::kUnspecified));
- EXPECT_TRUE(submit(mClient1, 1, "test_file_1", TranscodingJobPriority::kUnspecified));
-
- // Job 0 should start immediately.
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
-
- // Submit more real-time jobs.
- EXPECT_TRUE(submit(mClient1, 2, "test_file_2"));
- EXPECT_TRUE(submit(mClient1, 3, "test_file_3"));
-
- // Job 0 should pause immediately and job 2 should start.
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
-
- // Job 2 should finish in 2 seconds and job 3 should start.
- EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 3));
-
- // Cancel job 3 now
- EXPECT_TRUE(cancel(mClient1, 3));
-
- // Job 0 should resume and finish in 2 seconds, followed by job 1 start.
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
- EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
-
- // Cancel remaining job 1.
- EXPECT_TRUE(cancel(mClient1, 1));
-
- unregisterMultipleClients();
-}
-
-TEST_F(MediaTranscodingServiceTest, TestClientUseAfterUnregister) {
- std::shared_ptr<ITranscodingClient> client;
-
- // Register a client, then unregister.
- Status status = mService->registerClient(mClientCallback1, kClientName, kClientOpPackageName,
- kClientUseCallingUid, kClientUseCallingPid, &client);
- EXPECT_TRUE(status.isOk());
-
- status = client->unregister();
- EXPECT_TRUE(status.isOk());
-
- // Test various operations on the client, should fail with ERROR_DISCONNECTED.
- TranscodingJobParcel job;
- bool result;
- status = client->getJobWithId(0, &job, &result);
- EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
-
- status = client->cancelJob(0, &result);
- EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
-
- TranscodingRequestParcel request;
- status = client->submitRequest(request, &job, &result);
- EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
-}
-
-TEST_F(MediaTranscodingServiceTest, TestTranscodingUidPolicy) {
- ALOGD("TestTranscodingUidPolicy starting...");
-
- EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
- EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
- EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
- EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
- EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
-
- registerMultipleClients();
-
- ALOGD("Moving app A to top...");
- EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
-
- // Submit 3 requests.
- ALOGD("Submitting job to client1 (app A) ...");
- EXPECT_TRUE(submit(mClient1, 0, "test_file_0"));
- EXPECT_TRUE(submit(mClient1, 1, "test_file_1"));
- EXPECT_TRUE(submit(mClient1, 2, "test_file_2"));
-
- // Job 0 should start immediately.
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
-
- ALOGD("Moving app B to top...");
- EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
-
- // Job 0 should continue and finish in 2 seconds, then job 1 should start.
- EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
-
- ALOGD("Submitting job to client2 (app B) ...");
- EXPECT_TRUE(submit(mClient2, 0, "test_file_0"));
-
- // Client1's job should pause, client2's job should start.
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 1));
- EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
-
- ALOGD("Moving app A back to top...");
- EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
-
- // Client2's job should pause, client1's job 1 should resume.
- EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 1));
-
- // Client2's job 1 should finish in 2 seconds, then its job 2 should start.
- EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
- EXPECT_EQ(mClientCallback1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
-
- // After client2's jobs finish, client1's job should resume.
- EXPECT_EQ(mClientCallback1->pop(kJobWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
- EXPECT_EQ(mClientCallback2->pop(kPaddingUs), EventTracker::Resume(CLIENT(2), 0));
-
- unregisterMultipleClients();
-
- EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
- EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
- EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
-
- ALOGD("TestTranscodingUidPolicy finished.");
-}
-
-} // namespace media
-} // namespace android
diff --git a/services/minijail/Android.bp b/services/minijail/Android.bp
index 5ea6d1e..b057968 100644
--- a/services/minijail/Android.bp
+++ b/services/minijail/Android.bp
@@ -18,6 +18,7 @@
name: "libavservices_minijail",
defaults: ["libavservices_minijail_defaults"],
vendor_available: true,
+ min_sdk_version: "29",
export_include_dirs: ["."],
}
diff --git a/services/oboeservice/AAudioClientTracker.cpp b/services/oboeservice/AAudioClientTracker.cpp
index 6e14434..054a896 100644
--- a/services/oboeservice/AAudioClientTracker.cpp
+++ b/services/oboeservice/AAudioClientTracker.cpp
@@ -41,7 +41,7 @@
: Singleton<AAudioClientTracker>() {
}
-std::string AAudioClientTracker::dump() const {
+std::string AAudioClientTracker::dump() const NO_THREAD_SAFETY_ANALYSIS {
std::stringstream result;
const bool isLocked = AAudio_tryUntilTrue(
[this]()->bool { return mLock.try_lock(); } /* f */,
@@ -106,18 +106,9 @@
aaudio_result_t
AAudioClientTracker::registerClientStream(pid_t pid, sp<AAudioServiceStreamBase> serviceStream) {
- aaudio_result_t result = AAUDIO_OK;
ALOGV("registerClientStream(%d,)\n", pid);
std::lock_guard<std::mutex> lock(mLock);
- sp<NotificationClient> notificationClient = mNotificationClients[pid];
- if (notificationClient == 0) {
- // This will get called the first time the audio server registers an internal stream.
- ALOGV("registerClientStream(%d,) unrecognized pid\n", pid);
- notificationClient = new NotificationClient(pid, nullptr);
- mNotificationClients[pid] = notificationClient;
- }
- notificationClient->registerClientStream(serviceStream);
- return result;
+ return getNotificationClient_l(pid)->registerClientStream(serviceStream);
}
// Find the tracker for this process and remove it.
@@ -136,6 +127,33 @@
return AAUDIO_OK;
}
+void AAudioClientTracker::setExclusiveEnabled(pid_t pid, bool enabled) {
+ ALOGD("%s(%d, %d)\n", __func__, pid, enabled);
+ std::lock_guard<std::mutex> lock(mLock);
+ getNotificationClient_l(pid)->setExclusiveEnabled(enabled);
+}
+
+bool AAudioClientTracker::isExclusiveEnabled(pid_t pid) {
+ std::lock_guard<std::mutex> lock(mLock);
+ return getNotificationClient_l(pid)->isExclusiveEnabled();
+}
+
+sp<AAudioClientTracker::NotificationClient>
+ AAudioClientTracker::getNotificationClient_l(pid_t pid) {
+ sp<NotificationClient> notificationClient = mNotificationClients[pid];
+ if (notificationClient == nullptr) {
+ // This will get called the first time the audio server uses this PID.
+ ALOGV("%s(%d,) unrecognized PID\n", __func__, pid);
+ notificationClient = new AAudioClientTracker::NotificationClient(pid, nullptr);
+ mNotificationClients[pid] = notificationClient;
+ }
+ return notificationClient;
+}
+
+// =======================================
+// AAudioClientTracker::NotificationClient
+// =======================================
+
AAudioClientTracker::NotificationClient::NotificationClient(pid_t pid, const sp<IBinder>& binder)
: mProcessId(pid), mBinder(binder) {
}
@@ -180,7 +198,7 @@
for (const auto& serviceStream : streamsToClose) {
aaudio_handle_t handle = serviceStream->getHandle();
ALOGW("binderDied() close abandoned stream 0x%08X\n", handle);
- aaudioService->closeStream(handle);
+ aaudioService->asAAudioServiceInterface().closeStream(handle);
}
// mStreams should be empty now
}
@@ -189,7 +207,7 @@
}
-std::string AAudioClientTracker::NotificationClient::dump() const {
+std::string AAudioClientTracker::NotificationClient::dump() const NO_THREAD_SAFETY_ANALYSIS {
std::stringstream result;
const bool isLocked = AAudio_tryUntilTrue(
[this]()->bool { return mLock.try_lock(); } /* f */,
diff --git a/services/oboeservice/AAudioClientTracker.h b/services/oboeservice/AAudioClientTracker.h
index 00ff467..2b38621 100644
--- a/services/oboeservice/AAudioClientTracker.h
+++ b/services/oboeservice/AAudioClientTracker.h
@@ -21,10 +21,11 @@
#include <mutex>
#include <set>
+#include <android-base/thread_annotations.h>
#include <utils/Singleton.h>
#include <aaudio/AAudio.h>
-#include "binding/IAAudioClient.h"
+#include <aaudio/IAAudioClient.h>
#include "AAudioService.h"
namespace aaudio {
@@ -46,7 +47,7 @@
*/
std::string dump() const;
- aaudio_result_t registerClient(pid_t pid, const android::sp<android::IAAudioClient>& client);
+ aaudio_result_t registerClient(pid_t pid, const android::sp<IAAudioClient>& client);
void unregisterClient(pid_t pid);
@@ -58,6 +59,15 @@
aaudio_result_t unregisterClientStream(pid_t pid,
android::sp<AAudioServiceStreamBase> serviceStream);
+ /**
+ * Specify whether a process is allowed to create an EXCLUSIVE MMAP stream.
+ * @param pid
+ * @param enabled
+ */
+ void setExclusiveEnabled(pid_t pid, bool enabled);
+
+ bool isExclusiveEnabled(pid_t pid);
+
android::AAudioService *getAAudioService() const {
return mAAudioService;
}
@@ -84,19 +94,33 @@
aaudio_result_t unregisterClientStream(android::sp<AAudioServiceStreamBase> serviceStream);
+ void setExclusiveEnabled(bool enabled) {
+ mExclusiveEnabled = enabled;
+ }
+
+ bool isExclusiveEnabled() {
+ return mExclusiveEnabled;
+ }
+
// IBinder::DeathRecipient
virtual void binderDied(const android::wp<IBinder>& who);
- protected:
+ private:
mutable std::mutex mLock;
const pid_t mProcessId;
std::set<android::sp<AAudioServiceStreamBase>> mStreams;
// hold onto binder to receive death notifications
android::sp<IBinder> mBinder;
+ bool mExclusiveEnabled = true;
};
+ // This must be called under mLock
+ android::sp<NotificationClient> getNotificationClient_l(pid_t pid)
+ REQUIRES(mLock);
+
mutable std::mutex mLock;
- std::map<pid_t, android::sp<NotificationClient>> mNotificationClients;
+ std::map<pid_t, android::sp<NotificationClient>> mNotificationClients
+ GUARDED_BY(mLock);
android::AAudioService *mAAudioService = nullptr;
};
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index c9bf72f..407f6d5 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -25,6 +25,7 @@
#include <sstream>
#include <utility/AAudioUtilities.h>
+#include "AAudioClientTracker.h"
#include "AAudioEndpointManager.h"
#include "AAudioServiceEndpointShared.h"
#include "AAudioServiceEndpointMMAP.h"
@@ -42,7 +43,7 @@
, mExclusiveStreams() {
}
-std::string AAudioEndpointManager::dump() const {
+std::string AAudioEndpointManager::dump() const NO_THREAD_SAFETY_ANALYSIS {
std::stringstream result;
int index = 0;
@@ -174,7 +175,15 @@
&& !request.isSharingModeMatchRequired()) { // app did not request a shared stream
ALOGD("%s() endpoint in EXCLUSIVE use. Steal it!", __func__);
mExclusiveStolenCount++;
- endpointToSteal = endpoint;
+ // Prevent this process from getting another EXCLUSIVE stream.
+ // This will prevent two clients from colliding after a DISCONNECTION
+ // when they both try to open an exclusive stream at the same time.
+ // That can result in a stream getting disconnected between the OPEN
+ // and START calls. This will help preserve app compatibility.
+ // An app can avoid having this happen by closing their streams when
+ // the app is paused.
+ AAudioClientTracker::getInstance().setExclusiveEnabled(request.getProcessId(), false);
+ endpointToSteal = endpoint; // return it to caller
}
return nullptr;
} else {
@@ -297,6 +306,7 @@
mSharedStreams.end());
serviceEndpoint->close();
+
mSharedCloseCount++;
ALOGV("%s(%p) closed for device %d",
__func__, serviceEndpoint.get(), serviceEndpoint->getDeviceId());
diff --git a/services/oboeservice/AAudioEndpointManager.h b/services/oboeservice/AAudioEndpointManager.h
index ae776b1..b07bcef 100644
--- a/services/oboeservice/AAudioEndpointManager.h
+++ b/services/oboeservice/AAudioEndpointManager.h
@@ -20,6 +20,8 @@
#include <map>
#include <mutex>
#include <sys/types.h>
+
+#include <android-base/thread_annotations.h>
#include <utils/Singleton.h>
#include "binding/AAudioServiceMessage.h"
@@ -70,10 +72,12 @@
const aaudio::AAudioStreamRequest &request);
android::sp<AAudioServiceEndpoint> findExclusiveEndpoint_l(
- const AAudioStreamConfiguration& configuration);
+ const AAudioStreamConfiguration& configuration)
+ REQUIRES(mExclusiveLock);
android::sp<AAudioServiceEndpointShared> findSharedEndpoint_l(
- const AAudioStreamConfiguration& configuration);
+ const AAudioStreamConfiguration& configuration)
+ REQUIRES(mSharedLock);
void closeExclusiveEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
void closeSharedEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
@@ -83,23 +87,25 @@
// Lock mSharedLock before mExclusiveLock.
// it is OK to only lock mExclusiveLock.
mutable std::mutex mSharedLock;
- std::vector<android::sp<AAudioServiceEndpointShared>> mSharedStreams;
+ std::vector<android::sp<AAudioServiceEndpointShared>> mSharedStreams
+ GUARDED_BY(mSharedLock);
mutable std::mutex mExclusiveLock;
- std::vector<android::sp<AAudioServiceEndpointMMAP>> mExclusiveStreams;
+ std::vector<android::sp<AAudioServiceEndpointMMAP>> mExclusiveStreams
+ GUARDED_BY(mExclusiveLock);
- // Modified under a lock.
- int32_t mExclusiveSearchCount = 0; // number of times we SEARCHED for an exclusive endpoint
- int32_t mExclusiveFoundCount = 0; // number of times we FOUND an exclusive endpoint
- int32_t mExclusiveOpenCount = 0; // number of times we OPENED an exclusive endpoint
- int32_t mExclusiveCloseCount = 0; // number of times we CLOSED an exclusive endpoint
- int32_t mExclusiveStolenCount = 0; // number of times we STOLE an exclusive endpoint
+ // Counts related to an exclusive endpoint.
+ int32_t mExclusiveSearchCount GUARDED_BY(mExclusiveLock) = 0; // # SEARCHED
+ int32_t mExclusiveFoundCount GUARDED_BY(mExclusiveLock) = 0; // # FOUND
+ int32_t mExclusiveOpenCount GUARDED_BY(mExclusiveLock) = 0; // # OPENED
+ int32_t mExclusiveCloseCount GUARDED_BY(mExclusiveLock) = 0; // # CLOSED
+ int32_t mExclusiveStolenCount GUARDED_BY(mExclusiveLock) = 0; // # STOLEN
// Same as above but for SHARED endpoints.
- int32_t mSharedSearchCount = 0;
- int32_t mSharedFoundCount = 0;
- int32_t mSharedOpenCount = 0;
- int32_t mSharedCloseCount = 0;
+ int32_t mSharedSearchCount GUARDED_BY(mSharedLock) = 0;
+ int32_t mSharedFoundCount GUARDED_BY(mSharedLock) = 0;
+ int32_t mSharedOpenCount GUARDED_BY(mSharedLock) = 0;
+ int32_t mSharedCloseCount GUARDED_BY(mSharedLock) = 0;
// For easily disabling the stealing of exclusive streams.
static constexpr bool kStealingEnabled = true;
diff --git a/services/oboeservice/AAudioMixer.cpp b/services/oboeservice/AAudioMixer.cpp
index 1c03b7f..ad4b830 100644
--- a/services/oboeservice/AAudioMixer.cpp
+++ b/services/oboeservice/AAudioMixer.cpp
@@ -33,25 +33,21 @@
using android::FifoBuffer;
using android::fifo_frames_t;
-AAudioMixer::~AAudioMixer() {
- delete[] mOutputBuffer;
-}
-
void AAudioMixer::allocate(int32_t samplesPerFrame, int32_t framesPerBurst) {
mSamplesPerFrame = samplesPerFrame;
mFramesPerBurst = framesPerBurst;
int32_t samplesPerBuffer = samplesPerFrame * framesPerBurst;
- mOutputBuffer = new float[samplesPerBuffer];
+ mOutputBuffer = std::make_unique<float[]>(samplesPerBuffer);
mBufferSizeInBytes = samplesPerBuffer * sizeof(float);
}
void AAudioMixer::clear() {
- memset(mOutputBuffer, 0, mBufferSizeInBytes);
+ memset(mOutputBuffer.get(), 0, mBufferSizeInBytes);
}
-int32_t AAudioMixer::mix(int streamIndex, FifoBuffer *fifo, bool allowUnderflow) {
+int32_t AAudioMixer::mix(int streamIndex, std::shared_ptr<FifoBuffer> fifo, bool allowUnderflow) {
WrappingBuffer wrappingBuffer;
- float *destination = mOutputBuffer;
+ float *destination = mOutputBuffer.get();
#if AAUDIO_MIXER_ATRACE_ENABLED
ATRACE_BEGIN("aaMix");
@@ -117,5 +113,5 @@
}
float *AAudioMixer::getOutputBuffer() {
- return mOutputBuffer;
+ return mOutputBuffer.get();
}
diff --git a/services/oboeservice/AAudioMixer.h b/services/oboeservice/AAudioMixer.h
index d5abc5b..1a120f2 100644
--- a/services/oboeservice/AAudioMixer.h
+++ b/services/oboeservice/AAudioMixer.h
@@ -25,7 +25,6 @@
class AAudioMixer {
public:
AAudioMixer() {}
- ~AAudioMixer();
void allocate(int32_t samplesPerFrame, int32_t framesPerBurst);
@@ -38,7 +37,7 @@
* @param allowUnderflow if true then allow mixer to advance read index past the write index
* @return frames read from this stream
*/
- int32_t mix(int streamIndex, android::FifoBuffer *fifo, bool allowUnderflow);
+ int32_t mix(int streamIndex, std::shared_ptr<android::FifoBuffer> fifo, bool allowUnderflow);
float *getOutputBuffer();
@@ -47,7 +46,7 @@
private:
void mixPart(float *destination, float *source, int32_t numFrames);
- float *mOutputBuffer = nullptr;
+ std::unique_ptr<float[]> mOutputBuffer;
int32_t mSamplesPerFrame = 0;
int32_t mFramesPerBurst = 0;
int32_t mBufferSizeInBytes = 0;
diff --git a/services/oboeservice/AAudioService.cpp b/services/oboeservice/AAudioService.cpp
index ecbcb7e..69e58f6 100644
--- a/services/oboeservice/AAudioService.cpp
+++ b/services/oboeservice/AAudioService.cpp
@@ -23,7 +23,6 @@
#include <sstream>
#include <aaudio/AAudio.h>
-#include <mediautils/SchedulingPolicyService.h>
#include <mediautils/ServiceUtilities.h>
#include <utils/String16.h>
@@ -33,26 +32,26 @@
#include "AAudioService.h"
#include "AAudioServiceStreamMMAP.h"
#include "AAudioServiceStreamShared.h"
-#include "binding/IAAudioService.h"
using namespace android;
using namespace aaudio;
#define MAX_STREAMS_PER_PROCESS 8
+#define AIDL_RETURN(x) *_aidl_return = (x); return Status::ok();
+
using android::AAudioService;
+using binder::Status;
android::AAudioService::AAudioService()
- : BnAAudioService() {
+ : BnAAudioService(),
+ mAdapter(this) {
mAudioClient.clientUid = getuid(); // TODO consider using geteuid()
mAudioClient.clientPid = getpid();
mAudioClient.packageName = String16("");
AAudioClientTracker::getInstance().setAAudioService(this);
}
-AAudioService::~AAudioService() {
-}
-
status_t AAudioService::dump(int fd, const Vector<String16>& args) {
std::string result;
@@ -73,18 +72,21 @@
return NO_ERROR;
}
-void AAudioService::registerClient(const sp<IAAudioClient>& client) {
+Status AAudioService::registerClient(const sp<IAAudioClient> &client) {
pid_t pid = IPCThreadState::self()->getCallingPid();
AAudioClientTracker::getInstance().registerClient(pid, client);
+ return Status::ok();
}
-bool AAudioService::isCallerInService() {
- return mAudioClient.clientPid == IPCThreadState::self()->getCallingPid() &&
- mAudioClient.clientUid == IPCThreadState::self()->getCallingUid();
-}
+Status
+AAudioService::openStream(const StreamRequest &_request, StreamParameters* _paramsOut,
+ int32_t *_aidl_return) {
+ static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
-aaudio_handle_t AAudioService::openStream(const aaudio::AAudioStreamRequest &request,
- aaudio::AAudioStreamConfiguration &configurationOutput) {
+ // Create wrapper objects for simple usage of the parcelables.
+ const AAudioStreamRequest request(_request);
+ AAudioStreamConfiguration paramsOut;
+
// A lock in is used to order the opening of endpoints when an
// EXCLUSIVE endpoint is stolen. We want the order to be:
// 1) Thread A opens exclusive MMAP endpoint
@@ -109,16 +111,17 @@
if (count >= MAX_STREAMS_PER_PROCESS) {
ALOGE("openStream(): exceeded max streams per process %d >= %d",
count, MAX_STREAMS_PER_PROCESS);
- return AAUDIO_ERROR_UNAVAILABLE;
+ AIDL_RETURN(AAUDIO_ERROR_UNAVAILABLE);
}
}
if (sharingMode != AAUDIO_SHARING_MODE_EXCLUSIVE && sharingMode != AAUDIO_SHARING_MODE_SHARED) {
ALOGE("openStream(): unrecognized sharing mode = %d", sharingMode);
- return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ AIDL_RETURN(AAUDIO_ERROR_ILLEGAL_ARGUMENT);
}
- if (sharingMode == AAUDIO_SHARING_MODE_EXCLUSIVE) {
+ if (sharingMode == AAUDIO_SHARING_MODE_EXCLUSIVE
+ && AAudioClientTracker::getInstance().isExclusiveEnabled(request.getProcessId())) {
// only trust audioserver for in service indication
bool inService = false;
if (isCallerInService()) {
@@ -147,65 +150,141 @@
if (result != AAUDIO_OK) {
serviceStream.clear();
- return result;
+ AIDL_RETURN(result);
} else {
aaudio_handle_t handle = mStreamTracker.addStreamForHandle(serviceStream.get());
serviceStream->setHandle(handle);
pid_t pid = request.getProcessId();
AAudioClientTracker::getInstance().registerClientStream(pid, serviceStream);
- configurationOutput.copyFrom(*serviceStream);
+ paramsOut.copyFrom(*serviceStream);
+ *_paramsOut = std::move(paramsOut).parcelable();
// Log open in MediaMetrics after we have the handle because we need the handle to
// create the metrics ID.
serviceStream->logOpen(handle);
ALOGV("%s(): return handle = 0x%08X", __func__, handle);
- return handle;
+ AIDL_RETURN(handle);
}
}
-// If a close request is pending then close the stream
-bool AAudioService::releaseStream(const sp<AAudioServiceStreamBase> &serviceStream) {
- bool closed = false;
- // decrementAndRemoveStreamByHandle() uses a lock so that if there are two simultaneous closes
- // then only one will get the pointer and do the close.
- sp<AAudioServiceStreamBase> foundStream = mStreamTracker.decrementAndRemoveStreamByHandle(
- serviceStream->getHandle());
- if (foundStream.get() != nullptr) {
- foundStream->close();
- pid_t pid = foundStream->getOwnerProcessId();
- AAudioClientTracker::getInstance().unregisterClientStream(pid, foundStream);
- closed = true;
- }
- return closed;
-}
+Status AAudioService::closeStream(int32_t streamHandle, int32_t *_aidl_return) {
+ static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
-aaudio_result_t AAudioService::checkForPendingClose(
- const sp<AAudioServiceStreamBase> &serviceStream,
- aaudio_result_t defaultResult) {
- return releaseStream(serviceStream) ? AAUDIO_ERROR_INVALID_STATE : defaultResult;
-}
-
-aaudio_result_t AAudioService::closeStream(aaudio_handle_t streamHandle) {
// Check permission and ownership first.
sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGE("closeStream(0x%0x), illegal stream handle", streamHandle);
- return AAUDIO_ERROR_INVALID_HANDLE;
+ AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
}
- return closeStream(serviceStream);
+ AIDL_RETURN(closeStream(serviceStream));
+}
+
+Status AAudioService::getStreamDescription(int32_t streamHandle, Endpoint* endpoint,
+ int32_t *_aidl_return) {
+ static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+ sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ if (serviceStream.get() == nullptr) {
+ ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
+ AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+ }
+ AudioEndpointParcelable endpointParcelable;
+ aaudio_result_t result = serviceStream->getDescription(endpointParcelable);
+ if (result == AAUDIO_OK) {
+ *endpoint = std::move(endpointParcelable).parcelable();
+ }
+ AIDL_RETURN(result);
+}
+
+Status AAudioService::startStream(int32_t streamHandle, int32_t *_aidl_return) {
+ static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+ sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ if (serviceStream.get() == nullptr) {
+ ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+ AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+ }
+ AIDL_RETURN(serviceStream->start());
+}
+
+Status AAudioService::pauseStream(int32_t streamHandle, int32_t *_aidl_return) {
+ static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+ sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ if (serviceStream.get() == nullptr) {
+ ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+ AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+ }
+ AIDL_RETURN(serviceStream->pause());
+}
+
+Status AAudioService::stopStream(int32_t streamHandle, int32_t *_aidl_return) {
+ static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+ sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ if (serviceStream.get() == nullptr) {
+ ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+ AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+ }
+ AIDL_RETURN(serviceStream->stop());
+}
+
+Status AAudioService::flushStream(int32_t streamHandle, int32_t *_aidl_return) {
+ static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+ sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ if (serviceStream.get() == nullptr) {
+ ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+ AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+ }
+ AIDL_RETURN(serviceStream->flush());
+}
+
+Status AAudioService::registerAudioThread(int32_t streamHandle, int32_t clientThreadId, int64_t periodNanoseconds,
+ int32_t *_aidl_return) {
+ static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+ sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ if (serviceStream.get() == nullptr) {
+ ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+ AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+ }
+ int32_t priority = isCallerInService()
+ ? kRealTimeAudioPriorityService : kRealTimeAudioPriorityClient;
+ AIDL_RETURN(serviceStream->registerAudioThread(clientThreadId, priority));
+}
+
+Status AAudioService::unregisterAudioThread(int32_t streamHandle, int32_t clientThreadId,
+ int32_t *_aidl_return) {
+ static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+ sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ if (serviceStream.get() == nullptr) {
+ ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+ AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+ }
+ AIDL_RETURN(serviceStream->unregisterAudioThread(clientThreadId));
+}
+
+bool AAudioService::isCallerInService() {
+ return mAudioClient.clientPid == IPCThreadState::self()->getCallingPid() &&
+ mAudioClient.clientUid == IPCThreadState::self()->getCallingUid();
}
aaudio_result_t AAudioService::closeStream(sp<AAudioServiceStreamBase> serviceStream) {
+ // This is protected by a lock in AAudioClientTracker.
+ // It is safe to unregister the same stream twice.
pid_t pid = serviceStream->getOwnerProcessId();
AAudioClientTracker::getInstance().unregisterClientStream(pid, serviceStream);
+ // This is protected by a lock in mStreamTracker.
+ // It is safe to remove the same stream twice.
+ mStreamTracker.removeStreamByHandle(serviceStream->getHandle());
- serviceStream->markCloseNeeded();
- (void) releaseStream(serviceStream);
- return AAUDIO_OK;
+ return serviceStream->close();
}
sp<AAudioServiceStreamBase> AAudioService::convertHandleToServiceStream(
aaudio_handle_t streamHandle) {
- sp<AAudioServiceStreamBase> serviceStream = mStreamTracker.getStreamByHandleAndIncrement(
+ sp<AAudioServiceStreamBase> serviceStream = mStreamTracker.getStreamByHandle(
streamHandle);
if (serviceStream.get() != nullptr) {
// Only allow owner or the aaudio service to access the stream.
@@ -218,136 +297,32 @@
if (!allowed) {
ALOGE("AAudioService: calling uid %d cannot access stream 0x%08X owned by %d",
callingUserId, streamHandle, ownerUserId);
- // We incremented the reference count so we must check if it needs to be closed.
- checkForPendingClose(serviceStream, AAUDIO_OK);
serviceStream.clear();
}
}
return serviceStream;
}
-aaudio_result_t AAudioService::getStreamDescription(
- aaudio_handle_t streamHandle,
- aaudio::AudioEndpointParcelable &parcelable) {
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
- if (serviceStream.get() == nullptr) {
- ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
- return AAUDIO_ERROR_INVALID_HANDLE;
- }
-
- aaudio_result_t result = serviceStream->getDescription(parcelable);
- // parcelable.dump();
- return checkForPendingClose(serviceStream, result);
-}
-
-aaudio_result_t AAudioService::startStream(aaudio_handle_t streamHandle) {
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
- if (serviceStream.get() == nullptr) {
- ALOGE("startStream(), illegal stream handle = 0x%0x", streamHandle);
- return AAUDIO_ERROR_INVALID_HANDLE;
- }
-
- aaudio_result_t result = serviceStream->start();
- return checkForPendingClose(serviceStream, result);
-}
-
-aaudio_result_t AAudioService::pauseStream(aaudio_handle_t streamHandle) {
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
- if (serviceStream.get() == nullptr) {
- ALOGE("pauseStream(), illegal stream handle = 0x%0x", streamHandle);
- return AAUDIO_ERROR_INVALID_HANDLE;
- }
- aaudio_result_t result = serviceStream->pause();
- return checkForPendingClose(serviceStream, result);
-}
-
-aaudio_result_t AAudioService::stopStream(aaudio_handle_t streamHandle) {
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
- if (serviceStream.get() == nullptr) {
- ALOGE("stopStream(), illegal stream handle = 0x%0x", streamHandle);
- return AAUDIO_ERROR_INVALID_HANDLE;
- }
- aaudio_result_t result = serviceStream->stop();
- return checkForPendingClose(serviceStream, result);
-}
-
-aaudio_result_t AAudioService::flushStream(aaudio_handle_t streamHandle) {
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
- if (serviceStream.get() == nullptr) {
- ALOGE("flushStream(), illegal stream handle = 0x%0x", streamHandle);
- return AAUDIO_ERROR_INVALID_HANDLE;
- }
- aaudio_result_t result = serviceStream->flush();
- return checkForPendingClose(serviceStream, result);
-}
-
-aaudio_result_t AAudioService::registerAudioThread(aaudio_handle_t streamHandle,
- pid_t clientThreadId,
- int64_t periodNanoseconds) {
- aaudio_result_t result = AAUDIO_OK;
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
- if (serviceStream.get() == nullptr) {
- ALOGE("registerAudioThread(), illegal stream handle = 0x%0x", streamHandle);
- return AAUDIO_ERROR_INVALID_HANDLE;
- }
- if (serviceStream->getRegisteredThread() != AAudioServiceStreamBase::ILLEGAL_THREAD_ID) {
- ALOGE("AAudioService::registerAudioThread(), thread already registered");
- result = AAUDIO_ERROR_INVALID_STATE;
- } else {
- const pid_t ownerPid = IPCThreadState::self()->getCallingPid(); // TODO review
- int32_t priority = isCallerInService()
- ? kRealTimeAudioPriorityService : kRealTimeAudioPriorityClient;
- serviceStream->setRegisteredThread(clientThreadId);
- int err = android::requestPriority(ownerPid, clientThreadId,
- priority, true /* isForApp */);
- if (err != 0) {
- ALOGE("AAudioService::registerAudioThread(%d) failed, errno = %d, priority = %d",
- clientThreadId, errno, priority);
- result = AAUDIO_ERROR_INTERNAL;
- }
- }
- return checkForPendingClose(serviceStream, result);
-}
-
-aaudio_result_t AAudioService::unregisterAudioThread(aaudio_handle_t streamHandle,
- pid_t clientThreadId) {
- aaudio_result_t result = AAUDIO_OK;
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
- if (serviceStream.get() == nullptr) {
- ALOGE("%s(), illegal stream handle = 0x%0x", __func__, streamHandle);
- return AAUDIO_ERROR_INVALID_HANDLE;
- }
- if (serviceStream->getRegisteredThread() != clientThreadId) {
- ALOGE("%s(), wrong thread", __func__);
- result = AAUDIO_ERROR_ILLEGAL_ARGUMENT;
- } else {
- serviceStream->setRegisteredThread(0);
- }
- return checkForPendingClose(serviceStream, result);
-}
-
aaudio_result_t AAudioService::startClient(aaudio_handle_t streamHandle,
const android::AudioClient& client,
const audio_attributes_t *attr,
audio_port_handle_t *clientHandle) {
sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
- ALOGE("%s(), illegal stream handle = 0x%0x", __func__, streamHandle);
+ ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
return AAUDIO_ERROR_INVALID_HANDLE;
}
- aaudio_result_t result = serviceStream->startClient(client, attr, clientHandle);
- return checkForPendingClose(serviceStream, result);
+ return serviceStream->startClient(client, attr, clientHandle);
}
aaudio_result_t AAudioService::stopClient(aaudio_handle_t streamHandle,
audio_port_handle_t portHandle) {
sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
- ALOGE("%s(), illegal stream handle = 0x%0x", __func__, streamHandle);
+ ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
return AAUDIO_ERROR_INVALID_HANDLE;
}
- aaudio_result_t result = serviceStream->stopClient(portHandle);
- return checkForPendingClose(serviceStream, result);
+ return serviceStream->stopClient(portHandle);
}
// This is only called internally when AudioFlinger wants to tear down a stream.
@@ -355,12 +330,13 @@
aaudio_result_t AAudioService::disconnectStreamByPortHandle(audio_port_handle_t portHandle) {
ALOGD("%s(%d) called", __func__, portHandle);
sp<AAudioServiceStreamBase> serviceStream =
- mStreamTracker.findStreamByPortHandleAndIncrement(portHandle);
+ mStreamTracker.findStreamByPortHandle(portHandle);
if (serviceStream.get() == nullptr) {
ALOGE("%s(), could not find stream with portHandle = %d", __func__, portHandle);
return AAUDIO_ERROR_INVALID_HANDLE;
}
+ // This is protected by a lock and will just return if already stopped.
aaudio_result_t result = serviceStream->stop();
serviceStream->disconnect();
- return checkForPendingClose(serviceStream, result);
+ return result;
}
diff --git a/services/oboeservice/AAudioService.h b/services/oboeservice/AAudioService.h
index 6a2ac1f..7c1b796 100644
--- a/services/oboeservice/AAudioService.h
+++ b/services/oboeservice/AAudioService.h
@@ -24,69 +24,71 @@
#include <media/AudioClient.h>
#include <aaudio/AAudio.h>
+#include <aaudio/BnAAudioService.h>
#include "binding/AAudioCommon.h"
+#include "binding/AAudioBinderAdapter.h"
#include "binding/AAudioServiceInterface.h"
-#include "binding/IAAudioService.h"
#include "AAudioServiceStreamBase.h"
#include "AAudioStreamTracker.h"
namespace android {
+#define AAUDIO_SERVICE_NAME "media.aaudio"
+
class AAudioService :
public BinderService<AAudioService>,
- public BnAAudioService,
- public aaudio::AAudioServiceInterface
+ public aaudio::BnAAudioService
{
friend class BinderService<AAudioService>;
public:
AAudioService();
- virtual ~AAudioService();
+ virtual ~AAudioService() = default;
+
+ aaudio::AAudioServiceInterface& asAAudioServiceInterface() {
+ return mAdapter;
+ }
static const char* getServiceName() { return AAUDIO_SERVICE_NAME; }
virtual status_t dump(int fd, const Vector<String16>& args) override;
- virtual void registerClient(const sp<IAAudioClient>& client);
+ binder::Status registerClient(const ::android::sp<::aaudio::IAAudioClient>& client) override;
- aaudio::aaudio_handle_t openStream(const aaudio::AAudioStreamRequest &request,
- aaudio::AAudioStreamConfiguration &configurationOutput)
- override;
+ binder::Status openStream(const ::aaudio::StreamRequest& request,
+ ::aaudio::StreamParameters* paramsOut,
+ int32_t* _aidl_return) override;
- /*
- * This is called from Binder. It checks for permissions
- * and converts the handle passed through Binder to a stream pointer.
- */
- aaudio_result_t closeStream(aaudio::aaudio_handle_t streamHandle) override;
+ binder::Status closeStream(int32_t streamHandle, int32_t* _aidl_return) override;
- aaudio_result_t getStreamDescription(
- aaudio::aaudio_handle_t streamHandle,
- aaudio::AudioEndpointParcelable &parcelable) override;
+ binder::Status
+ getStreamDescription(int32_t streamHandle, ::aaudio::Endpoint* endpoint,
+ int32_t* _aidl_return) override;
- aaudio_result_t startStream(aaudio::aaudio_handle_t streamHandle) override;
+ binder::Status startStream(int32_t streamHandle, int32_t* _aidl_return) override;
- aaudio_result_t pauseStream(aaudio::aaudio_handle_t streamHandle) override;
+ binder::Status pauseStream(int32_t streamHandle, int32_t* _aidl_return) override;
- aaudio_result_t stopStream(aaudio::aaudio_handle_t streamHandle) override;
+ binder::Status stopStream(int32_t streamHandle, int32_t* _aidl_return) override;
- aaudio_result_t flushStream(aaudio::aaudio_handle_t streamHandle) override;
+ binder::Status flushStream(int32_t streamHandle, int32_t* _aidl_return) override;
- aaudio_result_t registerAudioThread(aaudio::aaudio_handle_t streamHandle,
- pid_t tid,
- int64_t periodNanoseconds) override;
+ binder::Status
+ registerAudioThread(int32_t streamHandle, int32_t clientThreadId, int64_t periodNanoseconds,
+ int32_t* _aidl_return) override;
- aaudio_result_t unregisterAudioThread(aaudio::aaudio_handle_t streamHandle,
- pid_t tid) override;
+ binder::Status unregisterAudioThread(int32_t streamHandle, int32_t clientThreadId,
+ int32_t* _aidl_return) override;
aaudio_result_t startClient(aaudio::aaudio_handle_t streamHandle,
const android::AudioClient& client,
const audio_attributes_t *attr,
- audio_port_handle_t *clientHandle) override;
+ audio_port_handle_t *clientHandle);
aaudio_result_t stopClient(aaudio::aaudio_handle_t streamHandle,
- audio_port_handle_t clientHandle) override;
+ audio_port_handle_t clientHandle);
// ===============================================================================
// The following public methods are only called from the service and NOT by Binder.
@@ -101,6 +103,29 @@
aaudio_result_t closeStream(sp<aaudio::AAudioServiceStreamBase> serviceStream);
private:
+ class Adapter : public aaudio::AAudioBinderAdapter {
+ public:
+ explicit Adapter(AAudioService *service)
+ : aaudio::AAudioBinderAdapter(service),
+ mService(service) {}
+
+ aaudio_result_t startClient(aaudio::aaudio_handle_t streamHandle,
+ const android::AudioClient &client,
+ const audio_attributes_t *attr,
+ audio_port_handle_t *clientHandle) override {
+ return mService->startClient(streamHandle, client, attr, clientHandle);
+ }
+
+ aaudio_result_t stopClient(aaudio::aaudio_handle_t streamHandle,
+ audio_port_handle_t clientHandle) override {
+ return mService->stopClient(streamHandle, clientHandle);
+ }
+
+ private:
+ AAudioService* const mService;
+ };
+
+ Adapter mAdapter;
/** @return true if the client is the audioserver
*/
@@ -114,11 +139,6 @@
sp<aaudio::AAudioServiceStreamBase> convertHandleToServiceStream(
aaudio::aaudio_handle_t streamHandle);
- bool releaseStream(const sp<aaudio::AAudioServiceStreamBase> &serviceStream);
-
- aaudio_result_t checkForPendingClose(const sp<aaudio::AAudioServiceStreamBase> &serviceStream,
- aaudio_result_t defaultResult);
-
android::AudioClient mAudioClient;
aaudio::AAudioStreamTracker mStreamTracker;
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index b09cbf4..faea58f 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -38,7 +38,7 @@
using namespace android; // TODO just import names needed
using namespace aaudio; // TODO just import names needed
-std::string AAudioServiceEndpoint::dump() const {
+std::string AAudioServiceEndpoint::dump() const NO_THREAD_SAFETY_ANALYSIS {
std::stringstream result;
const bool isLocked = AAudio_tryUntilTrue(
@@ -90,14 +90,23 @@
std::vector<android::sp<AAudioServiceStreamBase>>
AAudioServiceEndpoint::disconnectRegisteredStreams() {
std::vector<android::sp<AAudioServiceStreamBase>> streamsDisconnected;
- std::lock_guard<std::mutex> lock(mLockStreams);
+ {
+ std::lock_guard<std::mutex> lock(mLockStreams);
+ mRegisteredStreams.swap(streamsDisconnected);
+ }
mConnected.store(false);
- for (const auto &stream : mRegisteredStreams) {
- ALOGD("%s() - stop and disconnect port %d", __func__, stream->getPortHandle());
+ // We need to stop all the streams before we disconnect them.
+ // Otherwise there is a race condition where the first disconnected app
+ // tries to reopen a stream as MMAP but is blocked by the second stream,
+ // which hasn't stopped yet. Then the first app ends up with a Legacy stream.
+ for (const auto &stream : streamsDisconnected) {
+ ALOGD("%s() - stop(), port = %d", __func__, stream->getPortHandle());
stream->stop();
+ }
+ for (const auto &stream : streamsDisconnected) {
+ ALOGD("%s() - disconnect(), port = %d", __func__, stream->getPortHandle());
stream->disconnect();
}
- mRegisteredStreams.swap(streamsDisconnected);
return streamsDisconnected;
}
@@ -173,11 +182,12 @@
: AUDIO_SOURCE_DEFAULT;
audio_flags_mask_t flags;
if (direction == AAUDIO_DIRECTION_OUTPUT) {
- flags = AUDIO_FLAG_LOW_LATENCY
- | AAudioConvert_allowCapturePolicyToAudioFlagsMask(params->getAllowedCapturePolicy());
+ flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
+ | AAudioConvert_allowCapturePolicyToAudioFlagsMask(
+ params->getAllowedCapturePolicy()));
} else {
- flags = AUDIO_FLAG_LOW_LATENCY
- | AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive());
+ flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
+ | AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive()));
}
return {
.content_type = contentType,
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index a171cb0..72090c2 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -22,6 +22,8 @@
#include <mutex>
#include <vector>
+#include <android-base/thread_annotations.h>
+
#include "client/AudioStreamInternal.h"
#include "client/AudioStreamInternalPlay.h"
#include "core/AAudioStreamParameters.h"
@@ -47,7 +49,11 @@
virtual aaudio_result_t open(const aaudio::AAudioStreamRequest &request) = 0;
- virtual aaudio_result_t close() = 0;
+ /*
+ * Perform any cleanup necessary before deleting the stream.
+ * This might include releasing and closing internal streams.
+ */
+ virtual void close() = 0;
aaudio_result_t registerStream(android::sp<AAudioServiceStreamBase> stream);
@@ -137,7 +143,8 @@
std::vector<android::sp<AAudioServiceStreamBase>> disconnectRegisteredStreams();
mutable std::mutex mLockStreams;
- std::vector<android::sp<AAudioServiceStreamBase>> mRegisteredStreams;
+ std::vector<android::sp<AAudioServiceStreamBase>> mRegisteredStreams
+ GUARDED_BY(mLockStreams);
SimpleDoubleBuffer<Timestamp> mAtomicEndpointTimestamp;
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index 37d105b..bc769f0 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -35,22 +35,17 @@
using namespace android; // TODO just import names needed
using namespace aaudio; // TODO just import names needed
-AAudioServiceEndpointCapture::AAudioServiceEndpointCapture(AAudioService &audioService)
- : mStreamInternalCapture(audioService, true) {
- mStreamInternal = &mStreamInternalCapture;
-}
-
-AAudioServiceEndpointCapture::~AAudioServiceEndpointCapture() {
- delete mDistributionBuffer;
+AAudioServiceEndpointCapture::AAudioServiceEndpointCapture(AAudioService& audioService)
+ : AAudioServiceEndpointShared(
+ new AudioStreamInternalCapture(audioService.asAAudioServiceInterface(), true)) {
}
aaudio_result_t AAudioServiceEndpointCapture::open(const aaudio::AAudioStreamRequest &request) {
aaudio_result_t result = AAudioServiceEndpointShared::open(request);
if (result == AAUDIO_OK) {
- delete mDistributionBuffer;
int distributionBufferSizeBytes = getStreamInternal()->getFramesPerBurst()
* getStreamInternal()->getBytesPerFrame();
- mDistributionBuffer = new uint8_t[distributionBufferSizeBytes];
+ mDistributionBuffer = std::make_unique<uint8_t[]>(distributionBufferSizeBytes);
}
return result;
}
@@ -67,9 +62,12 @@
int64_t mmapFramesRead = getStreamInternal()->getFramesRead();
// Read audio data from stream using a blocking read.
- result = getStreamInternal()->read(mDistributionBuffer, getFramesPerBurst(), timeoutNanos);
+ result = getStreamInternal()->read(mDistributionBuffer.get(),
+ getFramesPerBurst(), timeoutNanos);
if (result == AAUDIO_ERROR_DISCONNECTED) {
- disconnectRegisteredStreams();
+ ALOGD("%s() read() returned AAUDIO_ERROR_DISCONNECTED", __func__);
+ // We do not need the returned vector.
+ (void) AAudioServiceEndpointShared::disconnectRegisteredStreams();
break;
} else if (result != getFramesPerBurst()) {
ALOGW("callbackLoop() read %d / %d",
@@ -79,48 +77,14 @@
// Distribute data to each active stream.
{ // brackets are for lock_guard
-
std::lock_guard <std::mutex> lock(mLockStreams);
for (const auto& clientStream : mRegisteredStreams) {
if (clientStream->isRunning() && !clientStream->isSuspended()) {
- int64_t clientFramesWritten = 0;
-
sp<AAudioServiceStreamShared> streamShared =
static_cast<AAudioServiceStreamShared *>(clientStream.get());
-
- {
- // Lock the AudioFifo to protect against close.
- std::lock_guard <std::mutex> lock(streamShared->getAudioDataQueueLock());
-
- FifoBuffer *fifo = streamShared->getAudioDataFifoBuffer_l();
- if (fifo != nullptr) {
-
- // Determine offset between framePosition in client's stream
- // vs the underlying MMAP stream.
- clientFramesWritten = fifo->getWriteCounter();
- // There are two indices that refer to the same frame.
- int64_t positionOffset = mmapFramesRead - clientFramesWritten;
- streamShared->setTimestampPositionOffset(positionOffset);
-
- // Is the buffer too full to write a burst?
- if (fifo->getEmptyFramesAvailable() <
- getFramesPerBurst()) {
- streamShared->incrementXRunCount();
- } else {
- fifo->write(mDistributionBuffer, getFramesPerBurst());
- }
- clientFramesWritten = fifo->getWriteCounter();
- }
- }
-
- if (clientFramesWritten > 0) {
- // This timestamp represents the completion of data being written into the
- // client buffer. It is sent to the client and used in the timing model
- // to decide when data will be available to read.
- Timestamp timestamp(clientFramesWritten, AudioClock::getNanoseconds());
- streamShared->markTransferTime(timestamp);
- }
-
+ streamShared->writeDataIfRoom(mmapFramesRead,
+ mDistributionBuffer.get(),
+ getFramesPerBurst());
}
}
}
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.h b/services/oboeservice/AAudioServiceEndpointCapture.h
index 971da9a..2ca43cf 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.h
+++ b/services/oboeservice/AAudioServiceEndpointCapture.h
@@ -17,6 +17,8 @@
#ifndef AAUDIO_SERVICE_ENDPOINT_CAPTURE_H
#define AAUDIO_SERVICE_ENDPOINT_CAPTURE_H
+#include <memory>
+
#include "client/AudioStreamInternal.h"
#include "client/AudioStreamInternalCapture.h"
@@ -28,16 +30,14 @@
class AAudioServiceEndpointCapture : public AAudioServiceEndpointShared {
public:
explicit AAudioServiceEndpointCapture(android::AAudioService &audioService);
- virtual ~AAudioServiceEndpointCapture();
+ virtual ~AAudioServiceEndpointCapture() = default;
aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
-
void *callbackLoop() override;
private:
- AudioStreamInternalCapture mStreamInternalCapture;
- uint8_t *mDistributionBuffer = nullptr;
+ std::unique_ptr<uint8_t[]> mDistributionBuffer;
};
} /* namespace aaudio */
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 0843e0b..85b2057 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -72,24 +72,46 @@
aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
aaudio_result_t result = AAUDIO_OK;
- audio_config_base_t config;
- audio_port_handle_t deviceId;
-
copyFrom(request.getConstantConfiguration());
-
- const audio_attributes_t attributes = getAudioAttributesFrom(this);
-
mMmapClient.clientUid = request.getUserId();
mMmapClient.clientPid = request.getProcessId();
mMmapClient.packageName.setTo(String16(""));
+ audio_format_t audioFormat = getFormat();
+
+ // FLOAT is not directly supported by the HAL so ask for a 24-bit.
+ bool isHighResRequested = audioFormat == AUDIO_FORMAT_PCM_FLOAT
+ || audioFormat == AUDIO_FORMAT_PCM_32_BIT;
+ if (isHighResRequested) {
+ // TODO remove these logs when finished debugging.
+ ALOGD("%s() change format from %d to 24_BIT_PACKED", __func__, audioFormat);
+ audioFormat = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+ }
+
+ result = openWithFormat(audioFormat);
+ if (result == AAUDIO_OK) return result;
+
+ // TODO The HAL and AudioFlinger should be recommending a format if the open fails.
+ // But that recommendation is not propagating back from the HAL.
+ // So for now just try something very likely to work.
+ if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_24_BIT_PACKED) {
+ ALOGD("%s() 24_BIT failed, perhaps due to format. Try again with 16_BIT", __func__);
+ audioFormat = AUDIO_FORMAT_PCM_16_BIT;
+ result = openWithFormat(audioFormat);
+ }
+ return result;
+}
+
+aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(audio_format_t audioFormat) {
+ aaudio_result_t result = AAUDIO_OK;
+ audio_config_base_t config;
+ audio_port_handle_t deviceId;
+
+ const audio_attributes_t attributes = getAudioAttributesFrom(this);
+
mRequestedDeviceId = deviceId = getDeviceId();
// Fill in config
- audio_format_t audioFormat = getFormat();
- if (audioFormat == AUDIO_FORMAT_DEFAULT || audioFormat == AUDIO_FORMAT_PCM_FLOAT) {
- audioFormat = AUDIO_FORMAT_PCM_16_BIT;
- }
config.format = audioFormat;
int32_t aaudioSampleRate = getSampleRate();
@@ -226,7 +248,7 @@
return result;
}
-aaudio_result_t AAudioServiceEndpointMMAP::close() {
+void AAudioServiceEndpointMMAP::close() {
if (mMmapStream != nullptr) {
// Needs to be explicitly cleared or CTS will fail but it is not clear why.
mMmapStream.clear();
@@ -235,8 +257,6 @@
// FIXME Make closing synchronous.
AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
}
-
- return AAUDIO_OK;
}
aaudio_result_t AAudioServiceEndpointMMAP::startStream(sp<AAudioServiceStreamBase> stream,
@@ -380,3 +400,18 @@
parcelable.mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
return AAUDIO_OK;
}
+
+aaudio_result_t AAudioServiceEndpointMMAP::getExternalPosition(uint64_t *positionFrames,
+ int64_t *timeNanos)
+{
+ if (!mExternalPositionSupported) {
+ return AAUDIO_ERROR_INVALID_STATE;
+ }
+ status_t status = mMmapStream->getExternalPosition(positionFrames, timeNanos);
+ if (status == INVALID_OPERATION) {
+ // getExternalPosition is not supported. Set mExternalPositionSupported as false
+ // so that the call will not go to the HAL next time.
+ mExternalPositionSupported = false;
+ }
+ return AAudioConvert_androidToAAudioResult(status);
+}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 3d10861..24b161d 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -50,7 +50,7 @@
aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
- aaudio_result_t close() override;
+ void close() override;
aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
audio_port_handle_t *clientHandle) override;
@@ -85,7 +85,12 @@
return mHardwareTimeOffsetNanos;
}
+ aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos);
+
private:
+
+ aaudio_result_t openWithFormat(audio_format_t audioFormat);
+
MonotonicCounter mFramesTransferred;
// Interface to the AudioFlinger MMAP support.
@@ -101,6 +106,8 @@
int64_t mHardwareTimeOffsetNanos = 0; // TODO get from HAL
+ bool mExternalPositionSupported = true;
+
};
} /* namespace aaudio */
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index bda4b90..4e46033 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -41,10 +41,9 @@
#define BURSTS_PER_BUFFER_DEFAULT 2
-AAudioServiceEndpointPlay::AAudioServiceEndpointPlay(AAudioService &audioService)
- : mStreamInternalPlay(audioService, true) {
- mStreamInternal = &mStreamInternalPlay;
-}
+AAudioServiceEndpointPlay::AAudioServiceEndpointPlay(AAudioService& audioService)
+ : AAudioServiceEndpointShared(
+ new AudioStreamInternalPlay(audioService.asAAudioServiceInterface(), true)) {}
aaudio_result_t AAudioServiceEndpointPlay::open(const aaudio::AAudioStreamRequest &request) {
aaudio_result_t result = AAudioServiceEndpointShared::open(request);
@@ -99,10 +98,11 @@
{
// Lock the AudioFifo to protect against close.
- std::lock_guard <std::mutex> lock(streamShared->getAudioDataQueueLock());
-
- FifoBuffer *fifo = streamShared->getAudioDataFifoBuffer_l();
- if (fifo != nullptr) {
+ std::lock_guard <std::mutex> lock(streamShared->audioDataQueueLock);
+ std::shared_ptr<SharedRingBuffer> audioDataQueue
+ = streamShared->getAudioDataQueue_l();
+ std::shared_ptr<FifoBuffer> fifo;
+ if (audioDataQueue && (fifo = audioDataQueue->getFifoBuffer())) {
// Determine offset between framePosition in client's stream
// vs the underlying MMAP stream.
@@ -145,7 +145,9 @@
result = getStreamInternal()->write(mMixer.getOutputBuffer(),
getFramesPerBurst(), timeoutNanos);
if (result == AAUDIO_ERROR_DISCONNECTED) {
- AAudioServiceEndpointShared::disconnectRegisteredStreams();
+ ALOGD("%s() write() returned AAUDIO_ERROR_DISCONNECTED", __func__);
+ // We do not need the returned vector.
+ (void) AAudioServiceEndpointShared::disconnectRegisteredStreams();
break;
} else if (result != getFramesPerBurst()) {
ALOGW("callbackLoop() wrote %d / %d",
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.h b/services/oboeservice/AAudioServiceEndpointPlay.h
index 981e430..160a1de 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.h
+++ b/services/oboeservice/AAudioServiceEndpointPlay.h
@@ -45,7 +45,6 @@
void *callbackLoop() override;
private:
- AudioStreamInternalPlay mStreamInternalPlay; // for playing output of mixer
bool mLatencyTuningEnabled = false; // TODO implement tuning
AAudioMixer mMixer; //
};
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index 21253c8..501e8c0 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -40,6 +40,9 @@
// This is the maximum size in frames. The effective size can be tuned smaller at runtime.
#define DEFAULT_BUFFER_CAPACITY (48 * 8)
+AAudioServiceEndpointShared::AAudioServiceEndpointShared(AudioStreamInternal *streamInternal)
+ : mStreamInternal(streamInternal) {}
+
std::string AAudioServiceEndpointShared::dump() const {
std::stringstream result;
@@ -84,24 +87,31 @@
return result;
}
-aaudio_result_t AAudioServiceEndpointShared::close() {
- return getStreamInternal()->releaseCloseFinal();
+void AAudioServiceEndpointShared::close() {
+ stopSharingThread();
+ getStreamInternal()->safeReleaseClose();
}
// Glue between C and C++ callbacks.
static void *aaudio_endpoint_thread_proc(void *arg) {
assert(arg != nullptr);
+ ALOGD("%s() called", __func__);
- // The caller passed in a smart pointer to prevent the endpoint from getting deleted
- // while the thread was launching.
- sp<AAudioServiceEndpointShared> *endpointForThread =
- static_cast<sp<AAudioServiceEndpointShared> *>(arg);
- sp<AAudioServiceEndpointShared> endpoint = *endpointForThread;
- delete endpointForThread; // Just use scoped smart pointer. Don't need this anymore.
+ // Prevent the stream from being deleted while being used.
+ // This is just for extra safety. It is probably not needed because
+ // this callback should be joined before the stream is closed.
+ AAudioServiceEndpointShared *endpointPtr =
+ static_cast<AAudioServiceEndpointShared *>(arg);
+ android::sp<AAudioServiceEndpointShared> endpoint(endpointPtr);
+ // Balance the incStrong() in startSharingThread_l().
+ endpoint->decStrong(nullptr);
+
void *result = endpoint->callbackLoop();
// Close now so that the HW resource is freed and we can open a new device.
if (!endpoint->isConnected()) {
- endpoint->close();
+ ALOGD("%s() call safeReleaseCloseFromCallback()", __func__);
+ // Release and close under a lock with no check for callback collisions.
+ endpoint->getStreamInternal()->safeReleaseCloseFromCallback();
}
return result;
@@ -113,38 +123,39 @@
* AAUDIO_NANOS_PER_SECOND
/ getSampleRate();
mCallbackEnabled.store(true);
- // Pass a smart pointer so the thread can hold a reference.
- sp<AAudioServiceEndpointShared> *endpointForThread = new sp<AAudioServiceEndpointShared>(this);
- aaudio_result_t result = getStreamInternal()->createThread(periodNanos,
- aaudio_endpoint_thread_proc,
- endpointForThread);
+ // Prevent this object from getting deleted before the thread has a chance to create
+ // its strong pointer. Assume the thread will call decStrong().
+ this->incStrong(nullptr);
+ aaudio_result_t result = getStreamInternal()->createThread_l(periodNanos,
+ aaudio_endpoint_thread_proc,
+ this);
if (result != AAUDIO_OK) {
- // The thread can't delete it so we have to do it here.
- delete endpointForThread;
+ this->decStrong(nullptr); // Because the thread won't do it.
}
return result;
}
aaudio_result_t aaudio::AAudioServiceEndpointShared::stopSharingThread() {
mCallbackEnabled.store(false);
- aaudio_result_t result = getStreamInternal()->joinThread(NULL);
- return result;
+ return getStreamInternal()->joinThread(NULL);
}
-aaudio_result_t AAudioServiceEndpointShared::startStream(sp<AAudioServiceStreamBase> sharedStream,
- audio_port_handle_t *clientHandle) {
+aaudio_result_t AAudioServiceEndpointShared::startStream(
+ sp<AAudioServiceStreamBase> sharedStream,
+ audio_port_handle_t *clientHandle)
+ NO_THREAD_SAFETY_ANALYSIS {
aaudio_result_t result = AAUDIO_OK;
{
std::lock_guard<std::mutex> lock(mLockStreams);
if (++mRunningStreamCount == 1) { // atomic
- result = getStreamInternal()->requestStart();
+ result = getStreamInternal()->systemStart();
if (result != AAUDIO_OK) {
--mRunningStreamCount;
} else {
result = startSharingThread_l();
if (result != AAUDIO_OK) {
- getStreamInternal()->requestStop();
+ getStreamInternal()->systemStopFromApp();
--mRunningStreamCount;
}
}
@@ -158,7 +169,7 @@
if (result != AAUDIO_OK) {
if (--mRunningStreamCount == 0) { // atomic
stopSharingThread();
- getStreamInternal()->requestStop();
+ getStreamInternal()->systemStopFromApp();
}
}
}
@@ -168,14 +179,12 @@
aaudio_result_t AAudioServiceEndpointShared::stopStream(sp<AAudioServiceStreamBase> sharedStream,
audio_port_handle_t clientHandle) {
- // Don't lock here because the disconnectRegisteredStreams also uses the lock.
-
// Ignore result.
(void) getStreamInternal()->stopClient(clientHandle);
if (--mRunningStreamCount == 0) { // atomic
- stopSharingThread();
- getStreamInternal()->requestStop();
+ stopSharingThread(); // the sharing thread locks mLockStreams
+ getStreamInternal()->systemStopFromApp();
}
return AAUDIO_OK;
}
diff --git a/services/oboeservice/AAudioServiceEndpointShared.h b/services/oboeservice/AAudioServiceEndpointShared.h
index bfc1744..8357567 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.h
+++ b/services/oboeservice/AAudioServiceEndpointShared.h
@@ -20,6 +20,8 @@
#include <atomic>
#include <mutex>
+#include <android-base/thread_annotations.h>
+
#include "AAudioServiceEndpoint.h"
#include "client/AudioStreamInternal.h"
#include "client/AudioStreamInternalPlay.h"
@@ -35,12 +37,15 @@
class AAudioServiceEndpointShared : public AAudioServiceEndpoint {
public:
+ explicit AAudioServiceEndpointShared(AudioStreamInternal *streamInternal);
+
+ virtual ~AAudioServiceEndpointShared() = default;
std::string dump() const override;
aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
- aaudio_result_t close() override;
+ void close() override;
aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
audio_port_handle_t *clientHandle) override;
@@ -54,18 +59,18 @@
virtual void *callbackLoop() = 0;
-protected:
-
AudioStreamInternal *getStreamInternal() const {
- return mStreamInternal;
+ return mStreamInternal.get();
};
- aaudio_result_t startSharingThread_l();
+protected:
+
+ aaudio_result_t startSharingThread_l() REQUIRES(mLockStreams);
aaudio_result_t stopSharingThread();
- // pointer to object statically allocated in subclasses
- AudioStreamInternal *mStreamInternal = nullptr;
+ // An MMAP stream that is shared by multiple clients.
+ android::sp<AudioStreamInternal> mStreamInternal;
std::atomic<bool> mCallbackEnabled{false};
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 531bfa1..7edc25c 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -24,8 +24,8 @@
#include <media/MediaMetricsItem.h>
#include <media/TypeConverter.h>
+#include <mediautils/SchedulingPolicyService.h>
-#include "binding/IAAudioService.h"
#include "binding/AAudioServiceMessage.h"
#include "core/AudioGlobal.h"
#include "utility/AudioClock.h"
@@ -45,8 +45,7 @@
*/
AAudioServiceStreamBase::AAudioServiceStreamBase(AAudioService &audioService)
- : mUpMessageQueue(nullptr)
- , mTimestampThread("AATime")
+ : mTimestampThread("AATime")
, mAtomicStreamTimestamp()
, mAudioService(audioService) {
mMmapClient.clientUid = -1;
@@ -55,6 +54,8 @@
}
AAudioServiceStreamBase::~AAudioServiceStreamBase() {
+ ALOGD("%s() called", __func__);
+
// May not be set if open failed.
if (mMetricsId.size() > 0) {
mediametrics::LogItem(mMetricsId)
@@ -139,7 +140,7 @@
return AAUDIO_ERROR_INVALID_STATE;
}
- mUpMessageQueue = new SharedRingBuffer();
+ mUpMessageQueue = std::make_shared<SharedRingBuffer>();
result = mUpMessageQueue->allocate(sizeof(AAudioServiceMessage),
QUEUE_UP_CAPACITY_COMMANDS);
if (result != AAUDIO_OK) {
@@ -169,11 +170,18 @@
}
aaudio_result_t AAudioServiceStreamBase::close() {
+ std::lock_guard<std::mutex> lock(mLock);
+ return close_l();
+}
+
+aaudio_result_t AAudioServiceStreamBase::close_l() {
if (getState() == AAUDIO_STREAM_STATE_CLOSED) {
return AAUDIO_OK;
}
- stop();
+ // This will call stopTimestampThread() and also stop the stream,
+ // just in case it was not already stopped.
+ stop_l();
aaudio_result_t result = AAUDIO_OK;
sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
@@ -185,14 +193,7 @@
endpointManager.closeEndpoint(endpoint);
// AAudioService::closeStream() prevents two threads from closing at the same time.
- mServiceEndpoint.clear(); // endpoint will hold the pointer until this method returns.
- }
-
- {
- std::lock_guard<std::mutex> lock(mUpMessageQueueLock);
- stopTimestampThread();
- delete mUpMessageQueue;
- mUpMessageQueue = nullptr;
+ mServiceEndpoint.clear(); // endpoint will hold the pointer after this method returns.
}
setState(AAUDIO_STREAM_STATE_CLOSED);
@@ -219,9 +220,18 @@
* An AAUDIO_SERVICE_EVENT_STARTED will be sent to the client when complete.
*/
aaudio_result_t AAudioServiceStreamBase::start() {
+ std::lock_guard<std::mutex> lock(mLock);
+
const int64_t beginNs = AudioClock::getNanoseconds();
aaudio_result_t result = AAUDIO_OK;
+ if (auto state = getState();
+ state == AAUDIO_STREAM_STATE_CLOSED || state == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ ALOGW("%s() already CLOSED, returns INVALID_STATE, handle = %d",
+ __func__, getHandle());
+ return AAUDIO_ERROR_INVALID_STATE;
+ }
+
mediametrics::Defer defer([&] {
mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_START)
@@ -231,7 +241,7 @@
.record(); });
if (isRunning()) {
- return AAUDIO_OK;
+ return result;
}
setFlowing(false);
@@ -254,16 +264,21 @@
return result;
error:
- disconnect();
+ disconnect_l();
return result;
}
aaudio_result_t AAudioServiceStreamBase::pause() {
- const int64_t beginNs = AudioClock::getNanoseconds();
+ std::lock_guard<std::mutex> lock(mLock);
+ return pause_l();
+}
+
+aaudio_result_t AAudioServiceStreamBase::pause_l() {
aaudio_result_t result = AAUDIO_OK;
if (!isRunning()) {
return result;
}
+ const int64_t beginNs = AudioClock::getNanoseconds();
mediametrics::Defer defer([&] {
mediametrics::LogItem(mMetricsId)
@@ -279,7 +294,7 @@
result = stopTimestampThread();
if (result != AAUDIO_OK) {
- disconnect();
+ disconnect_l();
return result;
}
@@ -292,7 +307,7 @@
result = endpoint->stopStream(this, mClientHandle);
if (result != AAUDIO_OK) {
ALOGE("%s() mServiceEndpoint returned %d, %s", __func__, result, getTypeText());
- disconnect(); // TODO should we return or pause Base first?
+ disconnect_l(); // TODO should we return or pause Base first?
}
sendServiceEvent(AAUDIO_SERVICE_EVENT_PAUSED);
@@ -301,11 +316,16 @@
}
aaudio_result_t AAudioServiceStreamBase::stop() {
- const int64_t beginNs = AudioClock::getNanoseconds();
+ std::lock_guard<std::mutex> lock(mLock);
+ return stop_l();
+}
+
+aaudio_result_t AAudioServiceStreamBase::stop_l() {
aaudio_result_t result = AAUDIO_OK;
if (!isRunning()) {
return result;
}
+ const int64_t beginNs = AudioClock::getNanoseconds();
mediametrics::Defer defer([&] {
mediametrics::LogItem(mMetricsId)
@@ -322,7 +342,7 @@
sendCurrentTimestamp(); // warning - this calls a virtual function
result = stopTimestampThread();
if (result != AAUDIO_OK) {
- disconnect();
+ disconnect_l();
return result;
}
@@ -336,7 +356,7 @@
result = endpoint->stopStream(this, mClientHandle);
if (result != AAUDIO_OK) {
ALOGE("%s() stopStream returned %d, %s", __func__, result, getTypeText());
- disconnect();
+ disconnect_l();
// TODO what to do with result here?
}
@@ -355,11 +375,12 @@
}
aaudio_result_t AAudioServiceStreamBase::flush() {
- const int64_t beginNs = AudioClock::getNanoseconds();
+ std::lock_guard<std::mutex> lock(mLock);
aaudio_result_t result = AAudio_isFlushAllowed(getState());
if (result != AAUDIO_OK) {
return result;
}
+ const int64_t beginNs = AudioClock::getNanoseconds();
mediametrics::Defer defer([&] {
mediametrics::LogItem(mMetricsId)
@@ -404,16 +425,66 @@
}
void AAudioServiceStreamBase::disconnect() {
- if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED) {
+ std::lock_guard<std::mutex> lock(mLock);
+ disconnect_l();
+}
+
+void AAudioServiceStreamBase::disconnect_l() {
+ if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+ && getState() != AAUDIO_STREAM_STATE_CLOSED) {
+
mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
.set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
.record();
+
sendServiceEvent(AAUDIO_SERVICE_EVENT_DISCONNECTED);
setState(AAUDIO_STREAM_STATE_DISCONNECTED);
}
}
+aaudio_result_t AAudioServiceStreamBase::registerAudioThread(pid_t clientThreadId,
+ int priority) {
+ std::lock_guard<std::mutex> lock(mLock);
+ aaudio_result_t result = AAUDIO_OK;
+ if (getRegisteredThread() != AAudioServiceStreamBase::ILLEGAL_THREAD_ID) {
+ ALOGE("AAudioService::registerAudioThread(), thread already registered");
+ result = AAUDIO_ERROR_INVALID_STATE;
+ } else {
+ const pid_t ownerPid = IPCThreadState::self()->getCallingPid(); // TODO review
+ setRegisteredThread(clientThreadId);
+ int err = android::requestPriority(ownerPid, clientThreadId,
+ priority, true /* isForApp */);
+ if (err != 0) {
+ ALOGE("AAudioService::registerAudioThread(%d) failed, errno = %d, priority = %d",
+ clientThreadId, errno, priority);
+ result = AAUDIO_ERROR_INTERNAL;
+ }
+ }
+ return result;
+}
+
+aaudio_result_t AAudioServiceStreamBase::unregisterAudioThread(pid_t clientThreadId) {
+ std::lock_guard<std::mutex> lock(mLock);
+ aaudio_result_t result = AAUDIO_OK;
+ if (getRegisteredThread() != clientThreadId) {
+ ALOGE("%s(), wrong thread", __func__);
+ result = AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ } else {
+ setRegisteredThread(0);
+ }
+ return result;
+}
+
+void AAudioServiceStreamBase::setState(aaudio_stream_state_t state) {
+ // CLOSED is a final state.
+ if (mState != AAUDIO_STREAM_STATE_CLOSED) {
+ mState = state;
+ } else {
+ ALOGW_IF(mState != state, "%s(%d) when already CLOSED", __func__, state);
+ }
+}
+
aaudio_result_t AAudioServiceStreamBase::sendServiceEvent(aaudio_service_event_t event,
double dataDouble) {
AAudioServiceMessage command;
@@ -438,12 +509,8 @@
ALOGE("%s(): mUpMessageQueue null! - stream not open", __func__);
return true;
}
- int32_t framesAvailable = mUpMessageQueue->getFifoBuffer()
- ->getFullFramesAvailable();
- int32_t capacity = mUpMessageQueue->getFifoBuffer()
- ->getBufferCapacityInFrames();
// Is it half full or more
- return framesAvailable >= (capacity / 2);
+ return mUpMessageQueue->getFractionalFullness() >= 0.5;
}
aaudio_result_t AAudioServiceStreamBase::writeUpMessageQueue(AAudioServiceMessage *command) {
@@ -511,6 +578,7 @@
* used to communicate with the underlying HAL or Service.
*/
aaudio_result_t AAudioServiceStreamBase::getDescription(AudioEndpointParcelable &parcelable) {
+ std::lock_guard<std::mutex> lock(mLock);
{
std::lock_guard<std::mutex> lock(mUpMessageQueueLock);
if (mUpMessageQueue == nullptr) {
@@ -527,14 +595,3 @@
void AAudioServiceStreamBase::onVolumeChanged(float volume) {
sendServiceEvent(AAUDIO_SERVICE_EVENT_VOLUME, volume);
}
-
-int32_t AAudioServiceStreamBase::incrementServiceReferenceCount_l() {
- return ++mCallingCount;
-}
-
-int32_t AAudioServiceStreamBase::decrementServiceReferenceCount_l() {
- int32_t count = --mCallingCount;
- // Each call to increment should be balanced with one call to decrement.
- assert(count >= 0);
- return count;
-}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 79dd738..0f752b7 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -20,13 +20,15 @@
#include <assert.h>
#include <mutex>
+#include <android-base/thread_annotations.h>
#include <media/AudioClient.h>
#include <utils/RefBase.h>
#include "fifo/FifoBuffer.h"
-#include "binding/IAAudioService.h"
#include "binding/AudioEndpointParcelable.h"
#include "binding/AAudioServiceMessage.h"
+#include "binding/AAudioStreamRequest.h"
+#include "core/AAudioStreamParameters.h"
#include "utility/AAudioUtilities.h"
#include "utility/AudioClock.h"
@@ -77,7 +79,7 @@
// because we had to wait until we generated the handle.
void logOpen(aaudio_handle_t streamHandle);
- virtual aaudio_result_t close();
+ aaudio_result_t close();
/**
* Start the flow of audio data.
@@ -85,7 +87,7 @@
* This is not guaranteed to be synchronous but it currently is.
* An AAUDIO_SERVICE_EVENT_STARTED will be sent to the client when complete.
*/
- virtual aaudio_result_t start();
+ aaudio_result_t start();
/**
* Stop the flow of data so that start() can resume without loss of data.
@@ -93,7 +95,7 @@
* This is not guaranteed to be synchronous but it currently is.
* An AAUDIO_SERVICE_EVENT_PAUSED will be sent to the client when complete.
*/
- virtual aaudio_result_t pause();
+ aaudio_result_t pause();
/**
* Stop the flow of data after the currently queued data has finished playing.
@@ -102,17 +104,14 @@
* An AAUDIO_SERVICE_EVENT_STOPPED will be sent to the client when complete.
*
*/
- virtual aaudio_result_t stop();
-
- aaudio_result_t stopTimestampThread();
+ aaudio_result_t stop();
/**
* Discard any data held by the underlying HAL or Service.
*
* An AAUDIO_SERVICE_EVENT_FLUSHED will be sent to the client when complete.
*/
- virtual aaudio_result_t flush();
-
+ aaudio_result_t flush();
virtual aaudio_result_t startClient(const android::AudioClient& client,
const audio_attributes_t *attr __unused,
@@ -126,29 +125,19 @@
return AAUDIO_ERROR_UNAVAILABLE;
}
+ aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority);
+
+ aaudio_result_t unregisterAudioThread(pid_t clientThreadId);
+
bool isRunning() const {
return mState == AAUDIO_STREAM_STATE_STARTED;
}
- // -------------------------------------------------------------------
-
- /**
- * Send a message to the client with an int64_t data value.
- */
- aaudio_result_t sendServiceEvent(aaudio_service_event_t event,
- int64_t dataLong = 0);
- /**
- * Send a message to the client with an double data value.
- */
- aaudio_result_t sendServiceEvent(aaudio_service_event_t event,
- double dataDouble);
-
/**
* Fill in a parcelable description of stream.
*/
aaudio_result_t getDescription(AudioEndpointParcelable &parcelable);
-
void setRegisteredThread(pid_t pid) {
mRegisteredClientThread = pid;
}
@@ -221,25 +210,6 @@
return mSuspended;
}
- /**
- * Atomically increment the number of active references to the stream by AAudioService.
- *
- * This is called under a global lock in AAudioStreamTracker.
- *
- * @return value after the increment
- */
- int32_t incrementServiceReferenceCount_l();
-
- /**
- * Atomically decrement the number of active references to the stream by AAudioService.
- * This should only be called after incrementServiceReferenceCount_l().
- *
- * This is called under a global lock in AAudioStreamTracker.
- *
- * @return value after the decrement
- */
- int32_t decrementServiceReferenceCount_l();
-
bool isCloseNeeded() const {
return mCloseNeeded.load();
}
@@ -262,9 +232,12 @@
aaudio_result_t open(const aaudio::AAudioStreamRequest &request,
aaudio_sharing_mode_t sharingMode);
- void setState(aaudio_stream_state_t state) {
- mState = state;
- }
+ virtual aaudio_result_t close_l() REQUIRES(mLock);
+ virtual aaudio_result_t pause_l() REQUIRES(mLock);
+ virtual aaudio_result_t stop_l() REQUIRES(mLock);
+ void disconnect_l() REQUIRES(mLock);
+
+ void setState(aaudio_stream_state_t state);
/**
* Device specific startup.
@@ -293,8 +266,8 @@
pid_t mRegisteredClientThread = ILLEGAL_THREAD_ID;
- SharedRingBuffer* mUpMessageQueue;
std::mutex mUpMessageQueueLock;
+ std::shared_ptr<SharedRingBuffer> mUpMessageQueue;
AAudioThread mTimestampThread;
// This is used by one thread to tell another thread to exit. So it must be atomic.
@@ -319,6 +292,19 @@
private:
+ aaudio_result_t stopTimestampThread();
+
+ /**
+ * Send a message to the client with an int64_t data value.
+ */
+ aaudio_result_t sendServiceEvent(aaudio_service_event_t event,
+ int64_t dataLong = 0);
+ /**
+ * Send a message to the client with a double data value.
+ */
+ aaudio_result_t sendServiceEvent(aaudio_service_event_t event,
+ double dataDouble);
+
/**
* @return true if the queue is getting full.
*/
@@ -327,15 +313,18 @@
aaudio_handle_t mHandle = -1;
bool mFlowing = false;
- // This is modified under a global lock in AAudioStreamTracker.
- int32_t mCallingCount = 0;
-
- // This indicates that a stream that is being referenced by a binder call needs to closed.
- std::atomic<bool> mCloseNeeded{false};
+ // This indicates that a stream that is being referenced by a binder call
+ // and needs to closed.
+ std::atomic<bool> mCloseNeeded{false}; // TODO remove
// This indicate that a running stream should not be processed because of an error,
// for example a full message queue. Note that this atomic is unrelated to mCloseNeeded.
std::atomic<bool> mSuspended{false};
+
+protected:
+ // Locking order is important.
+ // Acquire mLock before acquiring AAudioServiceEndpoint::mLockStreams
+ std::mutex mLock; // Prevent start/stop/close etcetera from colliding
};
} /* namespace aaudio */
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 639a0a8..57dc1ab 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -19,6 +19,7 @@
#include <utils/Log.h>
#include <atomic>
+#include <inttypes.h>
#include <iomanip>
#include <iostream>
#include <stdint.h>
@@ -92,11 +93,11 @@
}
// Stop the flow of data such that start() can resume with loss of data.
-aaudio_result_t AAudioServiceStreamMMAP::pause() {
+aaudio_result_t AAudioServiceStreamMMAP::pause_l() {
if (!isRunning()) {
return AAUDIO_OK;
}
- aaudio_result_t result = AAudioServiceStreamBase::pause();
+ aaudio_result_t result = AAudioServiceStreamBase::pause_l();
// TODO put before base::pause()?
if (!mInService) {
(void) stopClient(mClientHandle);
@@ -104,11 +105,11 @@
return result;
}
-aaudio_result_t AAudioServiceStreamMMAP::stop() {
+aaudio_result_t AAudioServiceStreamMMAP::stop_l() {
if (!isRunning()) {
return AAUDIO_OK;
}
- aaudio_result_t result = AAudioServiceStreamBase::stop();
+ aaudio_result_t result = AAudioServiceStreamBase::stop_l();
// TODO put before base::stop()?
if (!mInService) {
(void) stopClient(mClientHandle);
@@ -162,7 +163,8 @@
return result;
}
-// Get timestamp that was written by getFreeRunningPosition()
+// Get timestamp from presentation position.
+// If it fails, get timestamp that was written by getFreeRunningPosition()
aaudio_result_t AAudioServiceStreamMMAP::getHardwareTimestamp(int64_t *positionFrames,
int64_t *timeNanos) {
@@ -174,7 +176,17 @@
sp<AAudioServiceEndpointMMAP> serviceEndpointMMAP =
static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
- // TODO Get presentation timestamp from the HAL
+ // Disable this code temporarily because the HAL is not returning
+ // a useful result.
+#if 0
+ uint64_t position;
+ if (serviceEndpointMMAP->getExternalPosition(&position, timeNanos) == AAUDIO_OK) {
+ ALOGD("%s() getExternalPosition() says pos = %" PRIi64 ", time = %" PRIi64,
+ __func__, position, *timeNanos);
+ *positionFrames = (int64_t) position;
+ return AAUDIO_OK;
+ } else
+#endif
if (mAtomicStreamTimestamp.isValid()) {
Timestamp timestamp = mAtomicStreamTimestamp.read();
*positionFrames = timestamp.getPosition();
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 9105469..6ba1725 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -19,6 +19,7 @@
#include <atomic>
+#include <android-base/thread_annotations.h>
#include <android-base/unique_fd.h>
#include <media/audiohal/StreamHalInterface.h>
#include <media/MmapStreamCallback.h>
@@ -34,10 +35,8 @@
#include "TimestampScheduler.h"
#include "utility/MonotonicCounter.h"
-
namespace aaudio {
-
/**
* These corresponds to an EXCLUSIVE mode MMAP client stream.
* It has exclusive use of one AAudioServiceEndpointMMAP to communicate with the underlying
@@ -52,16 +51,6 @@
aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
- /**
- * Stop the flow of data so that start() can resume without loss of data.
- *
- * This is not guaranteed to be synchronous but it currently is.
- * An AAUDIO_SERVICE_EVENT_PAUSED will be sent to the client when complete.
- */
- aaudio_result_t pause() override;
-
- aaudio_result_t stop() override;
-
aaudio_result_t startClient(const android::AudioClient& client,
const audio_attributes_t *attr,
audio_port_handle_t *clientHandle) override;
@@ -72,6 +61,16 @@
protected:
+ /**
+ * Stop the flow of data so that start() can resume without loss of data.
+ *
+ * This is not guaranteed to be synchronous but it currently is.
+ * An AAUDIO_SERVICE_EVENT_PAUSED will be sent to the client when complete.
+ */
+ aaudio_result_t pause_l() REQUIRES(mLock) override;
+
+ aaudio_result_t stop_l() REQUIRES(mLock) override;
+
aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index 2ca847a..c665cda 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -24,8 +24,6 @@
#include <aaudio/AAudio.h>
-#include "binding/IAAudioService.h"
-
#include "binding/AAudioServiceMessage.h"
#include "AAudioServiceStreamBase.h"
#include "AAudioServiceStreamShared.h"
@@ -54,19 +52,26 @@
return result.str();
}
-std::string AAudioServiceStreamShared::dump() const {
+std::string AAudioServiceStreamShared::dump() const NO_THREAD_SAFETY_ANALYSIS {
std::stringstream result;
+ const bool isLocked = AAudio_tryUntilTrue(
+ [this]()->bool { return audioDataQueueLock.try_lock(); } /* f */,
+ 50 /* times */,
+ 20 /* sleepMs */);
+ if (!isLocked) {
+ result << "AAudioServiceStreamShared may be deadlocked\n";
+ }
+
result << AAudioServiceStreamBase::dump();
- auto fifo = mAudioDataQueue->getFifoBuffer();
- int32_t readCounter = fifo->getReadCounter();
- int32_t writeCounter = fifo->getWriteCounter();
- result << std::setw(10) << writeCounter;
- result << std::setw(10) << readCounter;
- result << std::setw(8) << (writeCounter - readCounter);
+ result << mAudioDataQueue->dump();
result << std::setw(8) << getXRunCount();
+ if (isLocked) {
+ audioDataQueueLock.unlock();
+ }
+
return result.str();
}
@@ -105,7 +110,7 @@
}
int32_t capacityInFrames = numBursts * framesPerBurst;
- // Final sanity check.
+ // Final range check.
if (capacityInFrames > MAX_FRAMES_PER_BUFFER) {
ALOGE("calculateBufferCapacity() calc capacity %d > max %d",
capacityInFrames, MAX_FRAMES_PER_BUFFER);
@@ -178,9 +183,9 @@
}
{
- std::lock_guard<std::mutex> lock(mAudioDataQueueLock);
+ std::lock_guard<std::mutex> lock(audioDataQueueLock);
// Create audio data shared memory buffer for client.
- mAudioDataQueue = new SharedRingBuffer();
+ mAudioDataQueue = std::make_shared<SharedRingBuffer>();
result = mAudioDataQueue->allocate(calculateBytesPerFrame(), getBufferCapacity());
if (result != AAUDIO_OK) {
ALOGE("%s() could not allocate FIFO with %d frames",
@@ -203,26 +208,13 @@
return result;
}
-
-aaudio_result_t AAudioServiceStreamShared::close() {
- aaudio_result_t result = AAudioServiceStreamBase::close();
-
- {
- std::lock_guard<std::mutex> lock(mAudioDataQueueLock);
- delete mAudioDataQueue;
- mAudioDataQueue = nullptr;
- }
-
- return result;
-}
-
/**
* Get an immutable description of the data queue created by this service.
*/
aaudio_result_t AAudioServiceStreamShared::getAudioDataDescription(
AudioEndpointParcelable &parcelable)
{
- std::lock_guard<std::mutex> lock(mAudioDataQueueLock);
+ std::lock_guard<std::mutex> lock(audioDataQueueLock);
if (mAudioDataQueue == nullptr) {
ALOGW("%s(): mUpMessageQueue null! - stream not open", __func__);
return AAUDIO_ERROR_NULL;
@@ -274,3 +266,37 @@
*positionFrames = position;
return result;
}
+
+void AAudioServiceStreamShared::writeDataIfRoom(int64_t mmapFramesRead,
+ const void *buffer, int32_t numFrames) {
+ int64_t clientFramesWritten = 0;
+
+ // Lock the AudioFifo to protect against close.
+ std::lock_guard <std::mutex> lock(audioDataQueueLock);
+
+ if (mAudioDataQueue != nullptr) {
+ std::shared_ptr<FifoBuffer> fifo = mAudioDataQueue->getFifoBuffer();
+ // Determine offset between framePosition in client's stream
+ // vs the underlying MMAP stream.
+ clientFramesWritten = fifo->getWriteCounter();
+ // There are two indices that refer to the same frame.
+ int64_t positionOffset = mmapFramesRead - clientFramesWritten;
+ setTimestampPositionOffset(positionOffset);
+
+ // Is the buffer too full to write a burst?
+ if (fifo->getEmptyFramesAvailable() < getFramesPerBurst()) {
+ incrementXRunCount();
+ } else {
+ fifo->write(buffer, numFrames);
+ }
+ clientFramesWritten = fifo->getWriteCounter();
+ }
+
+ if (clientFramesWritten > 0) {
+ // This timestamp represents the completion of data being written into the
+ // client buffer. It is sent to the client and used in the timing model
+ // to decide when data will be available to read.
+ Timestamp timestamp(clientFramesWritten, AudioClock::getNanoseconds());
+ markTransferTime(timestamp);
+ }
+}
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index 61769b5..4fae5b4 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -52,23 +52,16 @@
aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
- aaudio_result_t close() override;
+ void writeDataIfRoom(int64_t mmapFramesRead, const void *buffer, int32_t numFrames);
/**
- * This must be locked when calling getAudioDataFifoBuffer_l() and while
- * using the FifoBuffer it returns.
- */
- std::mutex &getAudioDataQueueLock() {
- return mAudioDataQueueLock;
- }
-
- /**
- * This must only be call under getAudioDataQueueLock().
+ * This must only be called under getAudioDataQueueLock().
* @return
*/
- android::FifoBuffer *getAudioDataFifoBuffer_l() { return (mAudioDataQueue == nullptr)
- ? nullptr
- : mAudioDataQueue->getFifoBuffer(); }
+ std::shared_ptr<SharedRingBuffer> getAudioDataQueue_l()
+ REQUIRES(audioDataQueueLock) {
+ return mAudioDataQueue;
+ }
/* Keep a record of when a buffer transfer completed.
* This allows for a more accurate timing model.
@@ -89,6 +82,10 @@
const char *getTypeText() const override { return "Shared"; }
+ // This is public so that the thread safety annotation, GUARDED_BY(),
+ // Can work when another object takes the lock.
+ mutable std::mutex audioDataQueueLock;
+
protected:
aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
@@ -106,8 +103,8 @@
int32_t framesPerBurst);
private:
- SharedRingBuffer *mAudioDataQueue = nullptr; // protected by mAudioDataQueueLock
- std::mutex mAudioDataQueueLock;
+
+ std::shared_ptr<SharedRingBuffer> mAudioDataQueue GUARDED_BY(audioDataQueueLock);
std::atomic<int64_t> mTimestampPositionOffset;
std::atomic<int32_t> mXRunCount;
diff --git a/services/oboeservice/AAudioStreamTracker.cpp b/services/oboeservice/AAudioStreamTracker.cpp
index 3328159..9bbbc73 100644
--- a/services/oboeservice/AAudioStreamTracker.cpp
+++ b/services/oboeservice/AAudioStreamTracker.cpp
@@ -30,32 +30,20 @@
using namespace android;
using namespace aaudio;
-sp<AAudioServiceStreamBase> AAudioStreamTracker::decrementAndRemoveStreamByHandle(
+int32_t AAudioStreamTracker::removeStreamByHandle(
aaudio_handle_t streamHandle) {
std::lock_guard<std::mutex> lock(mHandleLock);
- sp<AAudioServiceStreamBase> serviceStream;
- auto it = mStreamsByHandle.find(streamHandle);
- if (it != mStreamsByHandle.end()) {
- sp<AAudioServiceStreamBase> tempStream = it->second;
- // Does the caller need to close the stream?
- // The reference count should never be negative.
- // But it is safer to check for <= 0 than == 0.
- if ((tempStream->decrementServiceReferenceCount_l() <= 0) && tempStream->isCloseNeeded()) {
- serviceStream = tempStream; // Only return stream if ready to be closed.
- mStreamsByHandle.erase(it);
- }
- }
- return serviceStream;
+ auto count = mStreamsByHandle.erase(streamHandle);
+ return static_cast<int32_t>(count);
}
-sp<AAudioServiceStreamBase> AAudioStreamTracker::getStreamByHandleAndIncrement(
+sp<AAudioServiceStreamBase> AAudioStreamTracker::getStreamByHandle(
aaudio_handle_t streamHandle) {
std::lock_guard<std::mutex> lock(mHandleLock);
sp<AAudioServiceStreamBase> serviceStream;
auto it = mStreamsByHandle.find(streamHandle);
if (it != mStreamsByHandle.end()) {
serviceStream = it->second;
- serviceStream->incrementServiceReferenceCount_l();
}
return serviceStream;
}
@@ -63,7 +51,7 @@
// The port handle is only available when the stream is started.
// So we have to iterate over all the streams.
// Luckily this rarely happens.
-sp<AAudioServiceStreamBase> AAudioStreamTracker::findStreamByPortHandleAndIncrement(
+sp<AAudioServiceStreamBase> AAudioStreamTracker::findStreamByPortHandle(
audio_port_handle_t portHandle) {
std::lock_guard<std::mutex> lock(mHandleLock);
sp<AAudioServiceStreamBase> serviceStream;
@@ -72,7 +60,6 @@
auto candidate = it->second;
if (candidate->getPortHandle() == portHandle) {
serviceStream = candidate;
- serviceStream->incrementServiceReferenceCount_l();
break;
}
it++;
@@ -109,7 +96,7 @@
return handle;
}
-std::string AAudioStreamTracker::dump() const {
+std::string AAudioStreamTracker::dump() const NO_THREAD_SAFETY_ANALYSIS {
std::stringstream result;
const bool isLocked = AAudio_tryUntilTrue(
[this]()->bool { return mHandleLock.try_lock(); } /* f */,
diff --git a/services/oboeservice/AAudioStreamTracker.h b/services/oboeservice/AAudioStreamTracker.h
index 57ec426..43870fc 100644
--- a/services/oboeservice/AAudioStreamTracker.h
+++ b/services/oboeservice/AAudioStreamTracker.h
@@ -17,13 +17,13 @@
#ifndef AAUDIO_AAUDIO_STREAM_TRACKER_H
#define AAUDIO_AAUDIO_STREAM_TRACKER_H
+#include <mutex>
#include <time.h>
-#include <pthread.h>
+#include <android-base/thread_annotations.h>
#include <aaudio/AAudio.h>
#include "binding/AAudioCommon.h"
-
#include "AAudioServiceStreamBase.h"
namespace aaudio {
@@ -32,25 +32,20 @@
public:
/**
- * Find the stream associated with the handle.
- * Decrement its reference counter. If zero and the stream needs
- * to be closed then remove the stream and return a pointer to the stream.
- * Otherwise return null if it does not need to be closed.
+ * Remove any streams with the matching handle.
*
* @param streamHandle
- * @return strong pointer to the stream if it needs to be closed, or nullptr
+ * @return number of streams removed
*/
- android::sp<AAudioServiceStreamBase> decrementAndRemoveStreamByHandle(
- aaudio_handle_t streamHandle);
+ int32_t removeStreamByHandle(aaudio_handle_t streamHandle);
/**
* Look up a stream based on the handle.
- * Increment its service reference count if found.
*
* @param streamHandle
* @return strong pointer to the stream if found, or nullptr
*/
- android::sp<aaudio::AAudioServiceStreamBase> getStreamByHandleAndIncrement(
+ android::sp<aaudio::AAudioServiceStreamBase> getStreamByHandle(
aaudio_handle_t streamHandle);
/**
@@ -60,7 +55,7 @@
* @param portHandle
* @return strong pointer to the stream if found, or nullptr
*/
- android::sp<aaudio::AAudioServiceStreamBase> findStreamByPortHandleAndIncrement(
+ android::sp<aaudio::AAudioServiceStreamBase> findStreamByPortHandle(
audio_port_handle_t portHandle);
/**
@@ -80,11 +75,10 @@
static aaudio_handle_t bumpHandle(aaudio_handle_t handle);
// Track stream using a unique handle that wraps. Only use positive half.
- mutable std::mutex mHandleLock;
- // protected by mHandleLock
- aaudio_handle_t mPreviousHandle = 0;
- // protected by mHandleLock
- std::map<aaudio_handle_t, android::sp<aaudio::AAudioServiceStreamBase>> mStreamsByHandle;
+ mutable std::mutex mHandleLock;
+ aaudio_handle_t mPreviousHandle GUARDED_BY(mHandleLock) = 0;
+ std::map<aaudio_handle_t, android::sp<aaudio::AAudioServiceStreamBase>>
+ mStreamsByHandle GUARDED_BY(mHandleLock);
};
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index ed7895b..68496ac 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -37,10 +37,13 @@
setup("AAudio");
}
-void AAudioThread::setup(const char *prefix) {
- // mThread is a pthread_t of unknown size so we need memset().
- memset(&mThread, 0, sizeof(mThread));
+AAudioThread::~AAudioThread() {
+ ALOGE_IF(pthread_equal(pthread_self(), mThread),
+ "%s() destructor running in thread", __func__);
+ ALOGE_IF(mHasThread, "%s() thread never joined", __func__);
+}
+void AAudioThread::setup(const char *prefix) {
// Name the thread with an increasing index, "prefix_#", for debugging.
uint32_t index = mNextThreadIndex++;
// Wrap the index so that we do not hit the 16 char limit
@@ -57,7 +60,7 @@
}
}
-// This is the entry point for the new thread created by createThread().
+// This is the entry point for the new thread created by createThread_l().
// It converts the 'C' function call to a C++ method call.
static void * AAudioThread_internalThreadProc(void *arg) {
AAudioThread *aaudioThread = (AAudioThread *) arg;
@@ -90,13 +93,18 @@
ALOGE("stop() but no thread running");
return AAUDIO_ERROR_INVALID_STATE;
}
+ // Check to see if the thread is trying to stop itself.
+ if (pthread_equal(pthread_self(), mThread)) {
+ ALOGE("%s() attempt to pthread_join() from launched thread!", __func__);
+ return AAUDIO_ERROR_INTERNAL;
+ }
+
int err = pthread_join(mThread, nullptr);
- mHasThread = false;
if (err != 0) {
ALOGE("stop() - pthread_join() returned %d %s", err, strerror(err));
return AAudioConvert_androidToAAudioResult(-err);
} else {
+ mHasThread = false;
return AAUDIO_OK;
}
}
-
diff --git a/services/oboeservice/AAudioThread.h b/services/oboeservice/AAudioThread.h
index dcce68a..08a8a98 100644
--- a/services/oboeservice/AAudioThread.h
+++ b/services/oboeservice/AAudioThread.h
@@ -46,7 +46,7 @@
explicit AAudioThread(const char *prefix);
- virtual ~AAudioThread() = default;
+ virtual ~AAudioThread();
/**
* Start the thread running.
@@ -73,7 +73,7 @@
Runnable *mRunnable = nullptr;
bool mHasThread = false;
- pthread_t mThread; // initialized in constructor
+ pthread_t mThread = {};
static std::atomic<uint32_t> mNextThreadIndex;
char mName[16]; // max length for a pthread_name
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 8b1e2c0..9da4867 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-cc_library_shared {
+cc_library {
name: "libaaudioservice",
@@ -37,6 +37,7 @@
],
cflags: [
+ "-Wthread-safety",
"-Wno-unused-parameter",
"-Wall",
"-Werror",
@@ -55,6 +56,11 @@
"libcutils",
"liblog",
"libutils",
+ "aaudio-aidl-cpp",
+ ],
+
+ export_shared_lib_headers: [
+ "libaaudio_internal",
],
header_libs: [
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
index 2454446..c1d4e16 100644
--- a/services/oboeservice/SharedRingBuffer.cpp
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -18,6 +18,8 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <iomanip>
+#include <iostream>
#include <sys/mman.h>
#include "binding/RingBufferParcelable.h"
@@ -30,8 +32,8 @@
SharedRingBuffer::~SharedRingBuffer()
{
+ mFifoBuffer.reset(); // uses mSharedMemory
if (mSharedMemory != nullptr) {
- delete mFifoBuffer;
munmap(mSharedMemory, mSharedMemorySizeInBytes);
mSharedMemory = nullptr;
}
@@ -58,16 +60,18 @@
return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
}
- // Map the fd to memory addresses.
- mSharedMemory = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
+ // Map the fd to memory addresses. Use a temporary pointer to keep the mmap result and update
+ // it to `mSharedMemory` only when mmap operate successfully.
+ uint8_t* tmpPtr = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
PROT_READ|PROT_WRITE,
MAP_SHARED,
mFileDescriptor.get(), 0);
- if (mSharedMemory == MAP_FAILED) {
+ if (tmpPtr == MAP_FAILED) {
ALOGE("allocate() mmap() failed %d", errno);
mFileDescriptor.reset();
return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
}
+ mSharedMemory = tmpPtr;
// Get addresses for our counters and data from the shared memory.
fifo_counter_t *readCounterAddress =
@@ -76,7 +80,7 @@
(fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_WRITE_OFFSET];
uint8_t *dataAddress = &mSharedMemory[SHARED_RINGBUFFER_DATA_OFFSET];
- mFifoBuffer = new FifoBuffer(bytesPerFrame, capacityInFrames,
+ mFifoBuffer = std::make_shared<FifoBufferIndirect>(bytesPerFrame, capacityInFrames,
readCounterAddress, writeCounterAddress, dataAddress);
return AAUDIO_OK;
}
@@ -94,3 +98,19 @@
ringBufferParcelable.setFramesPerBurst(1);
ringBufferParcelable.setCapacityInFrames(mCapacityInFrames);
}
+
+double SharedRingBuffer::getFractionalFullness() const {
+ int32_t framesAvailable = mFifoBuffer->getFullFramesAvailable();
+ int32_t capacity = mFifoBuffer->getBufferCapacityInFrames();
+ return framesAvailable / (double) capacity;
+}
+
+std::string SharedRingBuffer::dump() const {
+ std::stringstream result;
+ int32_t readCounter = mFifoBuffer->getReadCounter();
+ int32_t writeCounter = mFifoBuffer->getWriteCounter();
+ result << std::setw(10) << writeCounter;
+ result << std::setw(10) << readCounter;
+ result << std::setw(8) << (writeCounter - readCounter);
+ return result.str();
+}
diff --git a/services/oboeservice/SharedRingBuffer.h b/services/oboeservice/SharedRingBuffer.h
index 79169bc..c3a9bb7 100644
--- a/services/oboeservice/SharedRingBuffer.h
+++ b/services/oboeservice/SharedRingBuffer.h
@@ -18,8 +18,9 @@
#define AAUDIO_SHARED_RINGBUFFER_H
#include <android-base/unique_fd.h>
-#include <stdint.h>
#include <cutils/ashmem.h>
+#include <stdint.h>
+#include <string>
#include <sys/mman.h>
#include "fifo/FifoBuffer.h"
@@ -47,15 +48,25 @@
void fillParcelable(AudioEndpointParcelable &endpointParcelable,
RingBufferParcelable &ringBufferParcelable);
- android::FifoBuffer * getFifoBuffer() {
+ /**
+ * Return available frames as a fraction of the capacity.
+ * @return fullness between 0.0 and 1.0
+ */
+ double getFractionalFullness() const;
+
+ // dump: write# read# available
+ std::string dump() const;
+
+ std::shared_ptr<android::FifoBuffer> getFifoBuffer() {
return mFifoBuffer;
}
private:
android::base::unique_fd mFileDescriptor;
- android::FifoBuffer *mFifoBuffer = nullptr;
- uint8_t *mSharedMemory = nullptr;
+ std::shared_ptr<android::FifoBufferIndirect> mFifoBuffer;
+ uint8_t *mSharedMemory = nullptr; // mmap
int32_t mSharedMemorySizeInBytes = 0;
+ // size of memory used for data vs counters
int32_t mDataMemorySizeInBytes = 0;
android::fifo_frames_t mCapacityInFrames = 0;
};
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
new file mode 100644
index 0000000..5327289
--- /dev/null
+++ b/services/tuner/Android.bp
@@ -0,0 +1,125 @@
+filegroup {
+ name: "tv_tuner_aidl",
+ srcs: [
+ "aidl/android/media/tv/tuner/ITunerFrontend.aidl",
+ "aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl",
+ "aidl/android/media/tv/tuner/ITunerService.aidl",
+ "aidl/android/media/tv/tuner/TunerAtsc3PlpInfo.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendAnalogSettings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendAtsc3PlpSettings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendAtsc3Settings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendAtscSettings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendCableSettings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendDvbsCodeRate.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendDvbsSettings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendDvbtSettings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendIsdbtSettings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendSettings.aidl",
+ "aidl/android/media/tv/tuner/TunerFrontendStatus.aidl",
+ "aidl/android/media/tv/tuner/TunerServiceFrontendInfo.aidl",
+ ],
+ path: "aidl",
+}
+
+aidl_interface {
+ name: "tv_tuner_aidl_interface",
+ unstable: true,
+ local_include_dir: "aidl",
+ srcs: [
+ ":tv_tuner_aidl",
+ ],
+ imports: [
+ "android.hardware.common.fmq",
+ ],
+
+ backend: {
+ java: {
+ enabled: false,
+ },
+ cpp: {
+ enabled: false,
+ },
+ ndk: {
+ enabled: true,
+ },
+ },
+}
+
+cc_library {
+ name: "libtunerservice",
+
+ srcs: [
+ "TunerService.cpp",
+ "TunerFrontend.cpp"
+ ],
+
+ shared_libs: [
+ "android.hardware.tv.tuner@1.0",
+ "libbase",
+ "libbinder_ndk",
+ "libcutils",
+ "libfmq",
+ "libhidlbase",
+ "liblog",
+ "libmedia",
+ "libutils",
+ "tv_tuner_aidl_interface-ndk_platform",
+ ],
+
+ static_libs: [
+ "android.hardware.common.fmq-unstable-ndk_platform",
+ ],
+
+ include_dirs: [
+ "frameworks/av/include"
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ export_include_dirs: ["."],
+}
+
+
+cc_binary {
+ name: "mediatuner",
+
+ srcs: [
+ "main_tunerservice.cpp",
+ ],
+
+ shared_libs: [
+ "android.hardware.tv.tuner@1.0",
+ "libbase",
+ "libbinder",
+ "libfmq",
+ "liblog",
+ "libtunerservice",
+ "libutils",
+ ],
+
+ static_libs: [
+ "tv_tuner_aidl_interface-ndk_platform",
+ ],
+
+ init_rc: ["mediatuner.rc"],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+}
diff --git a/services/tuner/OWNERS b/services/tuner/OWNERS
new file mode 100644
index 0000000..0ceb8e8
--- /dev/null
+++ b/services/tuner/OWNERS
@@ -0,0 +1,2 @@
+nchalko@google.com
+quxiangfang@google.com
diff --git a/services/tuner/TunerFrontend.cpp b/services/tuner/TunerFrontend.cpp
new file mode 100644
index 0000000..ba4553b
--- /dev/null
+++ b/services/tuner/TunerFrontend.cpp
@@ -0,0 +1,463 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerFrontend"
+
+#include "TunerFrontend.h"
+#include "TunerService.h"
+
+using ::aidl::android::media::tv::tuner::TunerAtsc3PlpInfo;
+using ::aidl::android::media::tv::tuner::TunerFrontendAtsc3PlpSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendAnalogSifStandard;
+using ::android::hardware::tv::tuner::V1_0::FrontendAnalogType;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtscModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Bandwidth;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3CodeRate;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3DemodOutputFormat;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Fec;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Modulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3TimeInterleaveMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcAnnex;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcOuterFec;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcSpectralInversion;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsPilot;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsRolloff;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsStandard;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsVcmMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtBandwidth;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtCoderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtConstellation;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtGuardInterval;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtHierarchy;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtPlpMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtStandard;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtTransmissionMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendInnerFec;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Coderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Modulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Rolloff;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Settings;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsCoderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsRolloff;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsStreamIdType;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtBandwidth;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtCoderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtGuardInterval;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanAtsc3PlpInfo;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanType;
+using ::android::hardware::tv::tuner::V1_0::FrontendSettings;;
+using ::android::hardware::tv::tuner::V1_0::Result;
+
+namespace android {
+
+TunerFrontend::TunerFrontend(sp<ITuner> tuner, int frontendHandle) {
+ mTuner = tuner;
+ mId = TunerService::getResourceIdFromHandle(frontendHandle);
+
+ if (mTuner != NULL) {
+ Result status;
+ mTuner->openFrontendById(mId, [&](Result result, const sp<IFrontend>& frontend) {
+ mFrontend = frontend;
+ status = result;
+ });
+ if (status != Result::SUCCESS) {
+ mFrontend = NULL;
+ }
+ }
+}
+
+TunerFrontend::~TunerFrontend() {}
+
+Status TunerFrontend::setCallback(
+ const std::shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) {
+ if (mFrontend == NULL) {
+ ALOGE("IFrontend is not initialized");
+ return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+ }
+
+ if (tunerFrontendCallback == NULL) {
+ return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_ARGUMENT));
+ }
+
+ sp<IFrontendCallback> frontendCallback = new FrontendCallback(tunerFrontendCallback);
+ Result status = mFrontend->setCallback(frontendCallback);
+ if (status == Result::SUCCESS) {
+ return Status::ok();
+ }
+
+ return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::tune(const TunerFrontendSettings& /*settings*/) {
+ return Status::ok();
+}
+
+Status TunerFrontend::stopTune() {
+ return Status::ok();
+}
+
+Status TunerFrontend::scan(const TunerFrontendSettings& settings, int frontendScanType) {
+ if (mFrontend == NULL) {
+ ALOGD("IFrontend is not initialized");
+ return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+ }
+
+ FrontendSettings frontendSettings;
+ switch (settings.getTag()) {
+ case TunerFrontendSettings::analog:
+ frontendSettings.analog({
+ .frequency = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::analog>().frequency),
+ .type = static_cast<FrontendAnalogType>(
+ settings.get<TunerFrontendSettings::analog>().signalType),
+ .sifStandard = static_cast<FrontendAnalogSifStandard>(
+ settings.get<TunerFrontendSettings::analog>().sifStandard),
+ });
+ break;
+ case TunerFrontendSettings::atsc:
+ frontendSettings.atsc({
+ .frequency = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::atsc>().frequency),
+ .modulation = static_cast<FrontendAtscModulation>(
+ settings.get<TunerFrontendSettings::atsc>().modulation),
+ });
+ break;
+ case TunerFrontendSettings::atsc3:
+ frontendSettings.atsc3({
+ .frequency = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::atsc3>().frequency),
+ .bandwidth = static_cast<FrontendAtsc3Bandwidth>(
+ settings.get<TunerFrontendSettings::atsc3>().bandwidth),
+ .demodOutputFormat = static_cast<FrontendAtsc3DemodOutputFormat>(
+ settings.get<TunerFrontendSettings::atsc3>().demodOutputFormat),
+ .plpSettings = getAtsc3PlpSettings(settings.get<TunerFrontendSettings::atsc3>()),
+ });
+ break;
+ case TunerFrontendSettings::cable:
+ frontendSettings.dvbc({
+ .frequency = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::cable>().frequency),
+ .modulation = static_cast<FrontendDvbcModulation>(
+ settings.get<TunerFrontendSettings::cable>().modulation),
+ .fec = static_cast<FrontendInnerFec>(
+ settings.get<TunerFrontendSettings::cable>().innerFec),
+ .symbolRate = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::cable>().symbolRate),
+ .outerFec = static_cast<FrontendDvbcOuterFec>(
+ settings.get<TunerFrontendSettings::cable>().outerFec),
+ .annex = static_cast<FrontendDvbcAnnex>(
+ settings.get<TunerFrontendSettings::cable>().annex),
+ .spectralInversion = static_cast<FrontendDvbcSpectralInversion>(
+ settings.get<TunerFrontendSettings::cable>().spectralInversion),
+ });
+ break;
+ case TunerFrontendSettings::dvbs:
+ frontendSettings.dvbs({
+ .frequency = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::dvbs>().frequency),
+ .modulation = static_cast<FrontendDvbsModulation>(
+ settings.get<TunerFrontendSettings::dvbs>().modulation),
+ .coderate = getDvbsCodeRate(
+ settings.get<TunerFrontendSettings::dvbs>().codeRate),
+ .symbolRate = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::dvbs>().symbolRate),
+ .rolloff = static_cast<FrontendDvbsRolloff>(
+ settings.get<TunerFrontendSettings::dvbs>().rolloff),
+ .pilot = static_cast<FrontendDvbsPilot>(
+ settings.get<TunerFrontendSettings::dvbs>().pilot),
+ .inputStreamId = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::dvbs>().inputStreamId),
+ .standard = static_cast<FrontendDvbsStandard>(
+ settings.get<TunerFrontendSettings::dvbs>().standard),
+ .vcmMode = static_cast<FrontendDvbsVcmMode>(
+ settings.get<TunerFrontendSettings::dvbs>().vcm),
+ });
+ break;
+ case TunerFrontendSettings::dvbt:
+ frontendSettings.dvbt({
+ .frequency = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::dvbt>().frequency),
+ .transmissionMode = static_cast<FrontendDvbtTransmissionMode>(
+ settings.get<TunerFrontendSettings::dvbt>().transmissionMode),
+ .bandwidth = static_cast<FrontendDvbtBandwidth>(
+ settings.get<TunerFrontendSettings::dvbt>().bandwidth),
+ .constellation = static_cast<FrontendDvbtConstellation>(
+ settings.get<TunerFrontendSettings::dvbt>().constellation),
+ .hierarchy = static_cast<FrontendDvbtHierarchy>(
+ settings.get<TunerFrontendSettings::dvbt>().hierarchy),
+ .hpCoderate = static_cast<FrontendDvbtCoderate>(
+ settings.get<TunerFrontendSettings::dvbt>().hpCodeRate),
+ .lpCoderate = static_cast<FrontendDvbtCoderate>(
+ settings.get<TunerFrontendSettings::dvbt>().lpCodeRate),
+ .guardInterval = static_cast<FrontendDvbtGuardInterval>(
+ settings.get<TunerFrontendSettings::dvbt>().guardInterval),
+ .isHighPriority = settings.get<TunerFrontendSettings::dvbt>().isHighPriority,
+ .standard = static_cast<FrontendDvbtStandard>(
+ settings.get<TunerFrontendSettings::dvbt>().standard),
+ .isMiso = settings.get<TunerFrontendSettings::dvbt>().isMiso,
+ .plpMode = static_cast<FrontendDvbtPlpMode>(
+ settings.get<TunerFrontendSettings::dvbt>().plpMode),
+ .plpId = static_cast<uint8_t>(
+ settings.get<TunerFrontendSettings::dvbt>().plpId),
+ .plpGroupId = static_cast<uint8_t>(
+ settings.get<TunerFrontendSettings::dvbt>().plpGroupId),
+ });
+ break;
+ case TunerFrontendSettings::isdbs:
+ frontendSettings.isdbs({
+ .frequency = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::isdbs>().frequency),
+ .streamId = static_cast<uint16_t>(
+ settings.get<TunerFrontendSettings::isdbs>().streamId),
+ .streamIdType = static_cast<FrontendIsdbsStreamIdType>(
+ settings.get<TunerFrontendSettings::isdbs>().streamIdType),
+ .modulation = static_cast<FrontendIsdbsModulation>(
+ settings.get<TunerFrontendSettings::isdbs>().modulation),
+ .coderate = static_cast<FrontendIsdbsCoderate>(
+ settings.get<TunerFrontendSettings::isdbs>().codeRate),
+ .symbolRate = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::isdbs>().symbolRate),
+ .rolloff = static_cast<FrontendIsdbsRolloff>(
+ settings.get<TunerFrontendSettings::isdbs>().rolloff),
+ });
+ break;
+ case TunerFrontendSettings::isdbs3:
+ frontendSettings.isdbs3({
+ .frequency = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::isdbs3>().frequency),
+ .streamId = static_cast<uint16_t>(
+ settings.get<TunerFrontendSettings::isdbs3>().streamId),
+ .streamIdType = static_cast<FrontendIsdbsStreamIdType>(
+ settings.get<TunerFrontendSettings::isdbs3>().streamIdType),
+ .modulation = static_cast<FrontendIsdbs3Modulation>(
+ settings.get<TunerFrontendSettings::isdbs3>().modulation),
+ .coderate = static_cast<FrontendIsdbs3Coderate>(
+ settings.get<TunerFrontendSettings::isdbs3>().codeRate),
+ .symbolRate = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::isdbs3>().symbolRate),
+ .rolloff = static_cast<FrontendIsdbs3Rolloff>(
+ settings.get<TunerFrontendSettings::isdbs3>().rolloff),
+ });
+ break;
+ case TunerFrontendSettings::isdbt:
+ frontendSettings.isdbt({
+ .frequency = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::isdbt>().frequency),
+ .modulation = static_cast<FrontendIsdbtModulation>(
+ settings.get<TunerFrontendSettings::isdbt>().modulation),
+ .bandwidth = static_cast<FrontendIsdbtBandwidth>(
+ settings.get<TunerFrontendSettings::isdbt>().bandwidth),
+ .mode = static_cast<FrontendIsdbtMode>(
+ settings.get<TunerFrontendSettings::isdbt>().mode),
+ .coderate = static_cast<FrontendIsdbtCoderate>(
+ settings.get<TunerFrontendSettings::isdbt>().codeRate),
+ .guardInterval = static_cast<FrontendIsdbtGuardInterval>(
+ settings.get<TunerFrontendSettings::isdbt>().guardInterval),
+ .serviceAreaId = static_cast<uint32_t>(
+ settings.get<TunerFrontendSettings::isdbt>().serviceAreaId),
+ });
+ break;
+ default:
+ break;
+ }
+ Result status = mFrontend->scan(
+ frontendSettings, static_cast<FrontendScanType>(frontendScanType));
+ if (status == Result::SUCCESS) {
+ return Status::ok();
+ }
+
+ return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::stopScan() {
+ return Status::ok();
+}
+
+Status TunerFrontend::setLnb(int /*lnbHandle*/) {
+ return Status::ok();
+}
+
+Status TunerFrontend::setLna(bool /*bEnable*/) {
+ return Status::ok();
+}
+
+Status TunerFrontend::close() {
+ return Status::ok();
+}
+
+Status TunerFrontend::getStatus(const std::vector<int32_t>& /*statusTypes*/,
+ std::vector<TunerFrontendStatus>* /*_aidl_return*/) {
+ return Status::ok();
+}
+
+/////////////// FrontendCallback ///////////////////////
+
+Return<void> TunerFrontend::FrontendCallback::onEvent(FrontendEventType frontendEventType) {
+ ALOGD("FrontendCallback::onEvent, type=%d", frontendEventType);
+ mTunerFrontendCallback->onEvent((int)frontendEventType);
+ return Void();
+}
+
+Return<void> TunerFrontend::FrontendCallback::onScanMessage(
+ FrontendScanMessageType type, const FrontendScanMessage& message) {
+ ALOGD("FrontendCallback::onScanMessage, type=%d", type);
+ switch(type) {
+ case FrontendScanMessageType::LOCKED: {
+ if (message.isLocked()) {
+ mTunerFrontendCallback->onLocked();
+ }
+ break;
+ }
+ case FrontendScanMessageType::END: {
+ if (message.isEnd()) {
+ mTunerFrontendCallback->onScanStopped();
+ }
+ break;
+ }
+ case FrontendScanMessageType::PROGRESS_PERCENT: {
+ mTunerFrontendCallback->onProgress((int)message.progressPercent());
+ break;
+ }
+ case FrontendScanMessageType::FREQUENCY: {
+ auto f = message.frequencies();
+ std::vector<int32_t> frequencies(std::begin(f), std::end(f));
+ mTunerFrontendCallback->onFrequenciesReport(frequencies);
+ break;
+ }
+ case FrontendScanMessageType::SYMBOL_RATE: {
+ auto s = message.symbolRates();
+ std::vector<int32_t> symbolRates(std::begin(s), std::end(s));
+ mTunerFrontendCallback->onSymbolRates(symbolRates);
+ break;
+ }
+ case FrontendScanMessageType::HIERARCHY: {
+ mTunerFrontendCallback->onHierarchy((int)message.hierarchy());
+ break;
+ }
+ case FrontendScanMessageType::ANALOG_TYPE: {
+ mTunerFrontendCallback->onSignalType((int)message.analogType());
+ break;
+ }
+ case FrontendScanMessageType::PLP_IDS: {
+ auto p = message.plpIds();
+ std::vector<int32_t> plpIds(std::begin(p), std::end(p));
+ mTunerFrontendCallback->onPlpIds(plpIds);
+ break;
+ }
+ case FrontendScanMessageType::GROUP_IDS: {
+ auto g = message.groupIds();
+ std::vector<int32_t> groupIds(std::begin(g), std::end(g));
+ mTunerFrontendCallback->onGroupIds(groupIds);
+ break;
+ }
+ case FrontendScanMessageType::INPUT_STREAM_IDS: {
+ auto i = message.inputStreamIds();
+ std::vector<int32_t> streamIds(std::begin(i), std::end(i));
+ mTunerFrontendCallback->onInputStreamIds(streamIds);
+ break;
+ }
+ case FrontendScanMessageType::STANDARD: {
+ FrontendScanMessage::Standard std = message.std();
+ int standard;
+ if (std.getDiscriminator() == FrontendScanMessage::Standard::hidl_discriminator::sStd) {
+ standard = (int) std.sStd();
+ mTunerFrontendCallback->onDvbsStandard(standard);
+ } else if (std.getDiscriminator() ==
+ FrontendScanMessage::Standard::hidl_discriminator::tStd) {
+ standard = (int) std.tStd();
+ mTunerFrontendCallback->onDvbsStandard(standard);
+ } else if (std.getDiscriminator() ==
+ FrontendScanMessage::Standard::hidl_discriminator::sifStd) {
+ standard = (int) std.sifStd();
+ mTunerFrontendCallback->onAnalogSifStandard(standard);
+ }
+ break;
+ }
+ case FrontendScanMessageType::ATSC3_PLP_INFO: {
+ std::vector<FrontendScanAtsc3PlpInfo> plpInfos = message.atsc3PlpInfos();
+ std::vector<TunerAtsc3PlpInfo> tunerPlpInfos;
+ for (int i = 0; i < plpInfos.size(); i++) {
+ auto info = plpInfos[i];
+ int plpId = (int) info.plpId;
+ bool lls = (bool) info.bLlsFlag;
+ TunerAtsc3PlpInfo plpInfo{
+ .plpId = plpId,
+ .llsFlag = lls,
+ };
+ tunerPlpInfos.push_back(plpInfo);
+ }
+ mTunerFrontendCallback->onAtsc3PlpInfos(tunerPlpInfos);
+ break;
+ }
+ default:
+ break;
+ }
+ return Void();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+hidl_vec<FrontendAtsc3PlpSettings> TunerFrontend::getAtsc3PlpSettings(
+ const TunerFrontendAtsc3Settings& settings) {
+ int len = settings.plpSettings.size();
+ hidl_vec<FrontendAtsc3PlpSettings> plps = hidl_vec<FrontendAtsc3PlpSettings>(len);
+ // parse PLP settings
+ for (int i = 0; i < len; i++) {
+ uint8_t plpId = static_cast<uint8_t>(settings.plpSettings[i].plpId);
+ FrontendAtsc3Modulation modulation =
+ static_cast<FrontendAtsc3Modulation>(settings.plpSettings[i].modulation);
+ FrontendAtsc3TimeInterleaveMode interleaveMode =
+ static_cast<FrontendAtsc3TimeInterleaveMode>(
+ settings.plpSettings[i].interleaveMode);
+ FrontendAtsc3CodeRate codeRate =
+ static_cast<FrontendAtsc3CodeRate>(settings.plpSettings[i].codeRate);
+ FrontendAtsc3Fec fec =
+ static_cast<FrontendAtsc3Fec>(settings.plpSettings[i].fec);
+ FrontendAtsc3PlpSettings frontendAtsc3PlpSettings {
+ .plpId = plpId,
+ .modulation = modulation,
+ .interleaveMode = interleaveMode,
+ .codeRate = codeRate,
+ .fec = fec,
+ };
+ plps[i] = frontendAtsc3PlpSettings;
+ }
+ return plps;
+}
+
+FrontendDvbsCodeRate TunerFrontend::getDvbsCodeRate(const TunerFrontendDvbsCodeRate& codeRate) {
+ FrontendInnerFec innerFec = static_cast<FrontendInnerFec>(codeRate.fec);
+ bool isLinear = codeRate.isLinear;
+ bool isShortFrames = codeRate.isShortFrames;
+ uint32_t bitsPer1000Symbol = static_cast<uint32_t>(codeRate.bitsPer1000Symbol);
+ FrontendDvbsCodeRate coderate {
+ .fec = innerFec,
+ .isLinear = isLinear,
+ .isShortFrames = isShortFrames,
+ .bitsPer1000Symbol = bitsPer1000Symbol,
+ };
+ return coderate;
+}
+} // namespace android
diff --git a/services/tuner/TunerFrontend.h b/services/tuner/TunerFrontend.h
new file mode 100644
index 0000000..c7d3ddd
--- /dev/null
+++ b/services/tuner/TunerFrontend.h
@@ -0,0 +1,87 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERFRONTEND_H
+#define ANDROID_MEDIA_TUNERFRONTEND_H
+
+#include <aidl/android/media/tv/tuner/BnTunerFrontend.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <utils/Log.h>
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::tv::tuner::BnTunerFrontend;
+using ::aidl::android::media::tv::tuner::ITunerFrontendCallback;
+using ::aidl::android::media::tv::tuner::TunerFrontendAtsc3Settings;
+using ::aidl::android::media::tv::tuner::TunerFrontendDvbsCodeRate;
+using ::aidl::android::media::tv::tuner::TunerFrontendSettings;
+using ::aidl::android::media::tv::tuner::TunerFrontendStatus;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3PlpSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsCodeRate;
+using ::android::hardware::tv::tuner::V1_0::FrontendEventType;
+using ::android::hardware::tv::tuner::V1_0::FrontendId;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanMessage;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanMessageType;
+using ::android::hardware::tv::tuner::V1_0::IFrontend;
+using ::android::hardware::tv::tuner::V1_0::IFrontendCallback;
+using ::android::hardware::tv::tuner::V1_0::ITuner;
+
+
+namespace android {
+
+class TunerFrontend : public BnTunerFrontend {
+
+public:
+ TunerFrontend(sp<ITuner> tuner, int frontendHandle);
+ virtual ~TunerFrontend();
+ Status setCallback(
+ const std::shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) override;
+ Status tune(const TunerFrontendSettings& settings) override;
+ Status stopTune() override;
+ Status scan(const TunerFrontendSettings& settings, int frontendScanType) override;
+ Status stopScan() override;
+ Status setLnb(int lnbHandle) override;
+ Status setLna(bool bEnable) override;
+ Status close() override;
+ Status getStatus(const std::vector<int32_t>& statusTypes,
+ std::vector<TunerFrontendStatus>* _aidl_return) override;
+
+ struct FrontendCallback : public IFrontendCallback {
+ FrontendCallback(const std::shared_ptr<ITunerFrontendCallback> tunerFrontendCallback)
+ : mTunerFrontendCallback(tunerFrontendCallback) {};
+
+ virtual Return<void> onEvent(FrontendEventType frontendEventType);
+ virtual Return<void> onScanMessage(
+ FrontendScanMessageType type, const FrontendScanMessage& message);
+
+ std::shared_ptr<ITunerFrontendCallback> mTunerFrontendCallback;
+ };
+
+private:
+ hidl_vec<FrontendAtsc3PlpSettings> getAtsc3PlpSettings(
+ const TunerFrontendAtsc3Settings& settings);
+ FrontendDvbsCodeRate getDvbsCodeRate(const TunerFrontendDvbsCodeRate& codeRate);
+ int mId;
+ sp<ITuner> mTuner;
+ sp<IFrontend> mFrontend;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERFRONTEND_H
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
new file mode 100644
index 0000000..56cb34c
--- /dev/null
+++ b/services/tuner/TunerService.cpp
@@ -0,0 +1,390 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerService"
+
+#include <android/binder_manager.h>
+#include <utils/Log.h>
+#include "TunerFrontend.h"
+#include "TunerService.h"
+
+using ::aidl::android::media::tv::tuner::TunerFrontendAnalogCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendAtsc3Capabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendAtscCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendCableCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendDvbsCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendDvbtCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendIsdbs3Capabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendIsdbsCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendIsdbtCapabilities;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterAvSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
+using ::android::hardware::tv::tuner::V1_0::FrontendId;
+using ::android::hardware::tv::tuner::V1_0::FrontendType;
+using ::android::hardware::tv::tuner::V1_0::Result;
+
+namespace android {
+
+TunerService::TunerService() {}
+TunerService::~TunerService() {}
+
+void TunerService::instantiate() {
+ std::shared_ptr<TunerService> service =
+ ::ndk::SharedRefBase::make<TunerService>();
+ AServiceManager_addService(service->asBinder().get(), getServiceName());
+}
+
+template <typename HidlPayload, typename AidlPayload, typename AidlFlavor>
+bool TunerService::unsafeHidlToAidlMQDescriptor(
+ const hardware::MQDescriptor<HidlPayload, FlavorTypeToValue<AidlFlavor>::value>& hidlDesc,
+ MQDescriptor<AidlPayload, AidlFlavor>* aidlDesc) {
+ // TODO: use the builtin coversion method when it's merged.
+ ALOGD("unsafeHidlToAidlMQDescriptor");
+ static_assert(sizeof(HidlPayload) == sizeof(AidlPayload), "Payload types are incompatible");
+ static_assert(
+ has_typedef_fixed_size<AidlPayload>::value == true ||
+ std::is_fundamental<AidlPayload>::value ||
+ std::is_enum<AidlPayload>::value,
+ "Only fundamental types, enums, and AIDL parcelables annotated with @FixedSize "
+ "and built for the NDK backend are supported as AIDL payload types.");
+ aidlDesc->fileDescriptor = ndk::ScopedFileDescriptor(dup(hidlDesc.handle()->data[0]));
+ for (const auto& grantor : hidlDesc.grantors()) {
+ if (static_cast<int32_t>(grantor.offset) < 0 || static_cast<int64_t>(grantor.extent) < 0) {
+ ALOGD("Unsafe static_cast of grantor fields. offset=%d, extend=%ld",
+ static_cast<int32_t>(grantor.offset), static_cast<long>(grantor.extent));
+ logError(
+ "Unsafe static_cast of grantor fields. Either the hardware::MQDescriptor is "
+ "invalid, or the MessageQueue is too large to be described by AIDL.");
+ return false;
+ }
+ aidlDesc->grantors.push_back(
+ GrantorDescriptor {
+ .offset = static_cast<int32_t>(grantor.offset),
+ .extent = static_cast<int64_t>(grantor.extent)
+ });
+ }
+ if (static_cast<int32_t>(hidlDesc.getQuantum()) < 0 ||
+ static_cast<int32_t>(hidlDesc.getFlags()) < 0) {
+ ALOGD("Unsafe static_cast of quantum or flags. Quantum=%d, flags=%d",
+ static_cast<int32_t>(hidlDesc.getQuantum()),
+ static_cast<int32_t>(hidlDesc.getFlags()));
+ logError(
+ "Unsafe static_cast of quantum or flags. Either the hardware::MQDescriptor is "
+ "invalid, or the MessageQueue is too large to be described by AIDL.");
+ return false;
+ }
+ aidlDesc->quantum = static_cast<int32_t>(hidlDesc.getQuantum());
+ aidlDesc->flags = static_cast<int32_t>(hidlDesc.getFlags());
+ return true;
+}
+
+bool TunerService::getITuner() {
+ ALOGD("getITuner");
+ if (mTuner != nullptr) {
+ return true;
+ }
+ mTuner = ITuner::getService();
+ if (mTuner == nullptr) {
+ ALOGE("Failed to get ITuner service");
+ return false;
+ }
+ return true;
+}
+
+Result TunerService::openDemux() {
+ ALOGD("openDemux");
+ if (!getITuner()) {
+ return Result::NOT_INITIALIZED;
+ }
+ if (mDemux != nullptr) {
+ return Result::SUCCESS;
+ }
+ Result res;
+ uint32_t id;
+ sp<IDemux> demuxSp;
+ mTuner->openDemux([&](Result r, uint32_t demuxId, const sp<IDemux>& demux) {
+ demuxSp = demux;
+ id = demuxId;
+ res = r;
+ ALOGD("open demux, id = %d", demuxId);
+ });
+ if (res == Result::SUCCESS) {
+ mDemux = demuxSp;
+ } else {
+ ALOGD("open demux failed, res = %d", res);
+ }
+ return res;
+}
+
+Result TunerService::openFilter() {
+ ALOGD("openFilter");
+ if (!getITuner()) {
+ return Result::NOT_INITIALIZED;
+ }
+ DemuxFilterMainType mainType = DemuxFilterMainType::TS;
+ DemuxFilterType filterType {
+ .mainType = mainType,
+ };
+ filterType.subType.tsFilterType(DemuxTsFilterType::VIDEO);
+
+ sp<FilterCallback> callback = new FilterCallback();
+ Result res;
+ mDemux->openFilter(filterType, 16000000, callback,
+ [&](Result r, const sp<IFilter>& filter) {
+ mFilter = filter;
+ res = r;
+ });
+ if (res != Result::SUCCESS || mFilter == NULL) {
+ ALOGD("Failed to open filter, type = %d", filterType.mainType);
+ return res;
+ }
+
+ return Result::SUCCESS;
+}
+
+Result TunerService::configFilter() {
+ ALOGD("configFilter");
+ if (mFilter == NULL) {
+ ALOGD("Failed to configure filter: filter not found");
+ return Result::NOT_INITIALIZED;
+ }
+ DemuxFilterSettings filterSettings;
+ DemuxTsFilterSettings tsFilterSettings {
+ .tpid = 256,
+ };
+ DemuxFilterAvSettings filterAvSettings {
+ .isPassthrough = false,
+ };
+ tsFilterSettings.filterSettings.av(filterAvSettings);
+ filterSettings.ts(tsFilterSettings);
+ Result res = mFilter->configure(filterSettings);
+
+ if (res != Result::SUCCESS) {
+ ALOGD("config filter failed, res = %d", res);
+ return res;
+ }
+
+ Result getQueueDescResult = Result::UNKNOWN_ERROR;
+ mFilter->getQueueDesc(
+ [&](Result r, const MQDescriptorSync<uint8_t>& desc) {
+ mFilterMQDesc = desc;
+ getQueueDescResult = r;
+ ALOGD("getFilterQueueDesc");
+ });
+ if (getQueueDescResult == Result::SUCCESS) {
+ unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(
+ mFilterMQDesc, &mAidlMQDesc);
+ mAidlMq = new (std::nothrow) AidlMessageQueue(mAidlMQDesc);
+ EventFlag::createEventFlag(mAidlMq->getEventFlagWord(), &mEventFlag);
+ } else {
+ ALOGD("get MQDesc failed, res = %d", getQueueDescResult);
+ }
+ return getQueueDescResult;
+}
+
+Status TunerService::getFrontendIds(std::vector<int32_t>* ids, int32_t* /* _aidl_return */) {
+ if (!getITuner()) {
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(Result::NOT_INITIALIZED));
+ }
+ hidl_vec<FrontendId> feIds;
+ Result res;
+ mTuner->getFrontendIds([&](Result r, const hidl_vec<FrontendId>& frontendIds) {
+ feIds = frontendIds;
+ res = r;
+ });
+ if (res != Result::SUCCESS) {
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+ }
+ ids->resize(feIds.size());
+ std::copy(feIds.begin(), feIds.end(), ids->begin());
+
+ return Status::ok();
+}
+
+Status TunerService::getFrontendInfo(
+ int32_t frontendHandle, TunerServiceFrontendInfo* _aidl_return) {
+ if (mTuner == nullptr) {
+ ALOGE("ITuner service is not init.");
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(Result::UNAVAILABLE));
+ }
+
+ Result res;
+ FrontendInfo info;
+ int feId = getResourceIdFromHandle(frontendHandle);
+ mTuner->getFrontendInfo(feId, [&](Result r, const FrontendInfo& feInfo) {
+ info = feInfo;
+ res = r;
+ });
+ if (res != Result::SUCCESS) {
+ return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+ }
+
+ TunerServiceFrontendInfo tunerInfo = convertToAidlFrontendInfo(feId, info);
+ *_aidl_return = tunerInfo;
+ return Status::ok();
+}
+
+Status TunerService::openFrontend(
+ int32_t frontendHandle, std::shared_ptr<ITunerFrontend>* _aidl_return) {
+ if (mTuner == nullptr) {
+ ALOGE("ITuner service is not init.");
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(Result::UNAVAILABLE));
+ }
+
+ *_aidl_return = ::ndk::SharedRefBase::make<TunerFrontend>(mTuner, frontendHandle);
+ return Status::ok();
+}
+
+TunerServiceFrontendInfo TunerService::convertToAidlFrontendInfo(int feId, FrontendInfo halInfo) {
+ TunerServiceFrontendInfo info{
+ .id = feId,
+ .type = (int)halInfo.type,
+ .minFrequency = (int)halInfo.minFrequency,
+ .maxFrequency = (int)halInfo.maxFrequency,
+ .minSymbolRate = (int)halInfo.minSymbolRate,
+ .maxSymbolRate = (int)halInfo.maxSymbolRate,
+ .acquireRange = (int)halInfo.acquireRange,
+ .exclusiveGroupId = (int)halInfo.exclusiveGroupId,
+ };
+ for (int i = 0; i < halInfo.statusCaps.size(); i++) {
+ info.statusCaps.push_back((int)halInfo.statusCaps[i]);
+ }
+
+ TunerFrontendCapabilities caps;
+ switch (halInfo.type) {
+ case FrontendType::ANALOG: {
+ TunerFrontendAnalogCapabilities analogCaps{
+ .typeCap = (int)halInfo.frontendCaps.analogCaps().typeCap,
+ .sifStandardCap = (int)halInfo.frontendCaps.analogCaps().sifStandardCap,
+ };
+ caps.set<TunerFrontendCapabilities::analogCaps>(analogCaps);
+ break;
+ }
+ case FrontendType::ATSC: {
+ TunerFrontendAtscCapabilities atscCaps{
+ .modulationCap = (int)halInfo.frontendCaps.atscCaps().modulationCap,
+ };
+ caps.set<TunerFrontendCapabilities::atscCaps>(atscCaps);
+ break;
+ }
+ case FrontendType::ATSC3: {
+ TunerFrontendAtsc3Capabilities atsc3Caps{
+ .bandwidthCap = (int)halInfo.frontendCaps.atsc3Caps().bandwidthCap,
+ .modulationCap = (int)halInfo.frontendCaps.atsc3Caps().modulationCap,
+ .timeInterleaveModeCap =
+ (int)halInfo.frontendCaps.atsc3Caps().timeInterleaveModeCap,
+ .codeRateCap = (int)halInfo.frontendCaps.atsc3Caps().codeRateCap,
+ .demodOutputFormatCap = (int)halInfo.frontendCaps.atsc3Caps().demodOutputFormatCap,
+ .fecCap = (int)halInfo.frontendCaps.atsc3Caps().fecCap,
+ };
+ caps.set<TunerFrontendCapabilities::atsc3Caps>(atsc3Caps);
+ break;
+ }
+ case FrontendType::DVBC: {
+ TunerFrontendCableCapabilities cableCaps{
+ .modulationCap = (int)halInfo.frontendCaps.dvbcCaps().modulationCap,
+ .codeRateCap = (int)halInfo.frontendCaps.dvbcCaps().fecCap,
+ .annexCap = (int)halInfo.frontendCaps.dvbcCaps().annexCap,
+ };
+ caps.set<TunerFrontendCapabilities::cableCaps>(cableCaps);
+ break;
+ }
+ case FrontendType::DVBS: {
+ TunerFrontendDvbsCapabilities dvbsCaps{
+ .modulationCap = (int)halInfo.frontendCaps.dvbsCaps().modulationCap,
+ .codeRateCap = (long)halInfo.frontendCaps.dvbsCaps().innerfecCap,
+ .standard = (int)halInfo.frontendCaps.dvbsCaps().standard,
+ };
+ caps.set<TunerFrontendCapabilities::dvbsCaps>(dvbsCaps);
+ break;
+ }
+ case FrontendType::DVBT: {
+ TunerFrontendDvbtCapabilities dvbtCaps{
+ .transmissionModeCap = (int)halInfo.frontendCaps.dvbtCaps().transmissionModeCap,
+ .bandwidthCap = (int)halInfo.frontendCaps.dvbtCaps().bandwidthCap,
+ .constellationCap = (int)halInfo.frontendCaps.dvbtCaps().constellationCap,
+ .codeRateCap = (int)halInfo.frontendCaps.dvbtCaps().coderateCap,
+ .hierarchyCap = (int)halInfo.frontendCaps.dvbtCaps().hierarchyCap,
+ .guardIntervalCap = (int)halInfo.frontendCaps.dvbtCaps().guardIntervalCap,
+ .isT2Supported = (bool)halInfo.frontendCaps.dvbtCaps().isT2Supported,
+ .isMisoSupported = (bool)halInfo.frontendCaps.dvbtCaps().isMisoSupported,
+ };
+ caps.set<TunerFrontendCapabilities::dvbtCaps>(dvbtCaps);
+ break;
+ }
+ case FrontendType::ISDBS: {
+ TunerFrontendIsdbsCapabilities isdbsCaps{
+ .modulationCap = (int)halInfo.frontendCaps.isdbsCaps().modulationCap,
+ .codeRateCap = (int)halInfo.frontendCaps.isdbsCaps().coderateCap,
+ };
+ caps.set<TunerFrontendCapabilities::isdbsCaps>(isdbsCaps);
+ break;
+ }
+ case FrontendType::ISDBS3: {
+ TunerFrontendIsdbs3Capabilities isdbs3Caps{
+ .modulationCap = (int)halInfo.frontendCaps.isdbs3Caps().modulationCap,
+ .codeRateCap = (int)halInfo.frontendCaps.isdbs3Caps().coderateCap,
+ };
+ caps.set<TunerFrontendCapabilities::isdbs3Caps>(isdbs3Caps);
+ break;
+ }
+ case FrontendType::ISDBT: {
+ TunerFrontendIsdbtCapabilities isdbtCaps{
+ .modeCap = (int)halInfo.frontendCaps.isdbtCaps().modeCap,
+ .bandwidthCap = (int)halInfo.frontendCaps.isdbtCaps().bandwidthCap,
+ .modulationCap = (int)halInfo.frontendCaps.isdbtCaps().modulationCap,
+ .codeRateCap = (int)halInfo.frontendCaps.isdbtCaps().coderateCap,
+ .guardIntervalCap = (int)halInfo.frontendCaps.isdbtCaps().guardIntervalCap,
+ };
+ caps.set<TunerFrontendCapabilities::isdbtCaps>(isdbtCaps);
+ break;
+ }
+ default:
+ break;
+ }
+
+ info.caps = caps;
+ return info;
+}
+
+Status TunerService::getFmqSyncReadWrite(
+ MQDescriptor<int8_t, SynchronizedReadWrite>* mqDesc, bool* _aidl_return) {
+ ALOGD("getFmqSyncReadWrite");
+ // TODO: put the following methods AIDL, and should be called from clients.
+ openDemux();
+ openFilter();
+ configFilter();
+ mFilter->start();
+ if (mqDesc == nullptr) {
+ ALOGD("getFmqSyncReadWrite null MQDescriptor.");
+ *_aidl_return = false;
+ } else {
+ ALOGD("getFmqSyncReadWrite true");
+ *_aidl_return = true;
+ *mqDesc = std::move(mAidlMQDesc);
+ }
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace android
diff --git a/services/tuner/TunerService.h b/services/tuner/TunerService.h
new file mode 100644
index 0000000..26591ab
--- /dev/null
+++ b/services/tuner/TunerService.h
@@ -0,0 +1,117 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERSERVICE_H
+#define ANDROID_MEDIA_TUNERSERVICE_H
+
+#include <aidl/android/media/tv/tuner/BnTunerService.h>
+#include <aidl/android/media/tv/tuner/TunerServiceFrontendInfo.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+#include <fmq/AidlMessageQueue.h>
+#include <fmq/EventFlag.h>
+#include <fmq/MessageQueue.h>
+
+using ::aidl::android::hardware::common::fmq::GrantorDescriptor;
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::aidl::android::media::tv::tuner::BnTunerService;
+using ::aidl::android::media::tv::tuner::ITunerFrontend;
+using ::aidl::android::media::tv::tuner::TunerServiceFrontendInfo;
+
+using ::android::hardware::details::logError;
+using ::android::hardware::EventFlag;
+using ::android::hardware::kSynchronizedReadWrite;
+using ::android::hardware::MessageQueue;
+using ::android::hardware::MQDescriptorSync;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterAvSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterStatus;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
+using ::android::hardware::tv::tuner::V1_0::FrontendId;
+using ::android::hardware::tv::tuner::V1_0::FrontendInfo;
+using ::android::hardware::tv::tuner::V1_0::IDemux;
+using ::android::hardware::tv::tuner::V1_0::IFilter;
+using ::android::hardware::tv::tuner::V1_0::IFilterCallback;
+using ::android::hardware::tv::tuner::V1_0::ITuner;
+using ::android::hardware::tv::tuner::V1_0::Result;
+
+using Status = ::ndk::ScopedAStatus;
+
+namespace android {
+
+
+struct FilterCallback : public IFilterCallback {
+ ~FilterCallback() {}
+ Return<void> onFilterEvent(const DemuxFilterEvent&) {
+ return Void();
+ }
+ Return<void> onFilterStatus(const DemuxFilterStatus) {
+ return Void();
+ }
+};
+
+class TunerService : public BnTunerService {
+ typedef AidlMessageQueue<int8_t, SynchronizedReadWrite> AidlMessageQueue;
+ typedef MessageQueue<uint8_t, kSynchronizedReadWrite> HidlMessageQueue;
+ typedef MQDescriptor<int8_t, SynchronizedReadWrite> AidlMQDesc;
+
+public:
+ static char const *getServiceName() { return "media.tuner"; }
+ static void instantiate();
+ TunerService();
+ virtual ~TunerService();
+
+ static int getResourceIdFromHandle(int resourceHandle) {
+ return (resourceHandle & 0x00ff0000) >> 16;
+ }
+
+ Status getFrontendIds(std::vector<int32_t>* ids, int32_t* _aidl_return) override;
+ Status getFrontendInfo(int32_t frontendHandle, TunerServiceFrontendInfo* _aidl_return) override;
+ Status openFrontend(
+ int32_t frontendHandle, std::shared_ptr<ITunerFrontend>* _aidl_return) override;
+ Status getFmqSyncReadWrite(
+ MQDescriptor<int8_t, SynchronizedReadWrite>* mqDesc, bool* _aidl_return) override;
+
+private:
+ template <typename HidlPayload, typename AidlPayload, typename AidlFlavor>
+ bool unsafeHidlToAidlMQDescriptor(
+ const hardware::MQDescriptor<HidlPayload, FlavorTypeToValue<AidlFlavor>::value>& hidl,
+ MQDescriptor<AidlPayload, AidlFlavor>* aidl);
+
+ bool getITuner();
+ Result openFilter();
+ Result openDemux();
+ Result configFilter();
+
+ sp<ITuner> mTuner;
+ sp<IDemux> mDemux;
+ sp<IFilter> mFilter;
+ AidlMessageQueue* mAidlMq;
+ MQDescriptorSync<uint8_t> mFilterMQDesc;
+ AidlMQDesc mAidlMQDesc;
+ EventFlag* mEventFlag;
+ TunerServiceFrontendInfo convertToAidlFrontendInfo(int feId, FrontendInfo halInfo);
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERSERVICE_H
diff --git a/services/tuner/aidl/android/media/tv/OWNERS b/services/tuner/aidl/android/media/tv/OWNERS
new file mode 100644
index 0000000..0ceb8e8
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/OWNERS
@@ -0,0 +1,2 @@
+nchalko@google.com
+quxiangfang@google.com
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerFrontend.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontend.aidl
new file mode 100644
index 0000000..08d20a9
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontend.aidl
@@ -0,0 +1,85 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.ITunerFrontendCallback;
+import android.media.tv.tuner.TunerFrontendSettings;
+import android.media.tv.tuner.TunerFrontendStatus;
+import android.media.tv.tuner.TunerServiceFrontendInfo;
+
+/**
+ * Tuner Frontend interface handles tuner related operations.
+ *
+ * {@hide}
+ */
+interface ITunerFrontend {
+ /**
+ * Set the frontend callback.
+ *
+ * @param tunerFrontendCallback the callback to receive frontend related info.
+ */
+ void setCallback(in ITunerFrontendCallback tunerFrontendCallback);
+
+ /**
+ * Tunes the frontend to using the settings given.
+ *
+ * @param settings the settings to tune with.
+ */
+ void tune(in TunerFrontendSettings settings);
+
+ /**
+ * Stop the previous tuning.
+ */
+ void stopTune();
+
+ /**
+ * Scan the frontend to use the settings given.
+ *
+ * @param settings the settings to scan with.
+ * @param frontendScanType scan with given type.
+ */
+ void scan(in TunerFrontendSettings settings, in int frontendScanType);
+
+ /**
+ * Stop the previous scanning.
+ */
+ void stopScan();
+
+ /**
+ * Sets Low-Noise Block downconverter (LNB) for satellite frontend.
+ *
+ * @param lnbHandle lnb handle in use.
+ */
+ void setLnb(in int lnbHandle);
+
+ /**
+ * Enable or Disable Low Noise Amplifier (LNA).
+ *
+ * @param bEnable enable Lna or not.
+ */
+ void setLna(in boolean bEnable);
+
+ /**
+ * Releases the ITunerFrontend instance.
+ */
+ void close();
+
+ /**
+ * Gets the statuses of the frontend.
+ */
+ TunerFrontendStatus[] getStatus(in int[] statusTypes);
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl
new file mode 100644
index 0000000..ae62c15
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl
@@ -0,0 +1,96 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerAtsc3PlpInfo;
+
+/**
+ * TunerFrontendCallback interface handles tuner frontend related callbacks.
+ *
+ * {@hide}
+ */
+interface ITunerFrontendCallback {
+ /**
+ * Notify the client that a new event happened on the frontend.
+ */
+ void onEvent(in int frontendEventType);
+
+ /**
+ * notify locked message to client from the ongoing scan.
+ */
+ void onLocked();
+
+ /**
+ * notify scan stopped message to client from the ongoing scan.
+ */
+ void onScanStopped();
+
+ /**
+ * notify progress message to client from the ongoing scan.
+ */
+ void onProgress(in int percent);
+
+ /**
+ * notify Frequencies message to client from the ongoing scan.
+ */
+ void onFrequenciesReport(in int[] frequency);
+
+ /**
+ * notify SymbolRates message to client from the ongoing scan.
+ */
+ void onSymbolRates(in int[] rates);
+
+ /**
+ * notify Hierarchy message to client from the ongoing scan.
+ */
+ void onHierarchy(in int hierarchy);
+
+ /**
+ * notify SignalType message to client from the ongoing scan.
+ */
+ void onSignalType(in int signalType);
+
+ /**
+ * notify PlpIds message to client from the ongoing scan.
+ */
+ void onPlpIds(in int[] plpIds);
+
+ /**
+ * notify GroupIds message to client from the ongoing scan.
+ */
+ void onGroupIds(in int[] groupIds);
+
+ /**
+ * notify InputStreamIds message to client from the ongoing scan.
+ */
+ void onInputStreamIds(in int[] inputStreamIds);
+
+ /**
+ * notify DvbsStandard message to client from the ongoing scan.
+ */
+ void onDvbsStandard(in int dvbsStandandard);
+
+ /**
+ * notify AnalogSifStandard message to client from the ongoing scan.
+ */
+ void onAnalogSifStandard(in int sifStandandard);
+
+ /**
+ * notify Atsc3PlpInfos message to client from the ongoing scan.
+ */
+ void onAtsc3PlpInfos(in TunerAtsc3PlpInfo[] atsc3PlpInfos);
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
new file mode 100644
index 0000000..5c1bce7
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
@@ -0,0 +1,62 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.hardware.common.fmq.MQDescriptor;
+import android.hardware.common.fmq.SynchronizedReadWrite;
+import android.hardware.common.fmq.UnsynchronizedWrite;
+import android.media.tv.tuner.ITunerFrontend;
+import android.media.tv.tuner.TunerServiceFrontendInfo;
+
+/**
+ * TunerService interface handles tuner related operations.
+ *
+ * {@hide}
+ */
+//@VintfStability
+interface ITunerService {
+
+ /**
+ * Gets frontend IDs.
+ *
+ * @return the result code of the operation.
+ */
+ int getFrontendIds(out int[] ids);
+
+ /**
+ * Retrieve the frontend's information.
+ *
+ * @param frontendHandle the handle of the frontend granted by TRM.
+ * @return the information of the frontend.
+ */
+ TunerServiceFrontendInfo getFrontendInfo(in int frontendHandle);
+
+ /**
+ * Open a Tuner Frontend interface.
+ *
+ * @param frontendHandle the handle of the frontend granted by TRM.
+ * @return the aidl interface of the frontend.
+ */
+ ITunerFrontend openFrontend(in int frontendHandle);
+
+ /*
+ * Gets synchronized fast message queue.
+ *
+ * @return true if succeeds, false otherwise.
+ */
+ boolean getFmqSyncReadWrite(out MQDescriptor<byte, SynchronizedReadWrite> mqDesc);
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerAtsc3PlpInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerAtsc3PlpInfo.aidl
new file mode 100644
index 0000000..a0648a5
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerAtsc3PlpInfo.aidl
@@ -0,0 +1,28 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Atsc3 Frontend Physical Layer Pipe Info.
+ *
+ * {@hide}
+ */
+parcelable TunerAtsc3PlpInfo {
+ int plpId;
+
+ boolean llsFlag;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl
new file mode 100644
index 0000000..74bf04e
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Analog Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAnalogCapabilities {
+ /**
+ * Signal Type capability
+ */
+ int typeCap;
+
+ /**
+ * Standard Interchange Format (SIF) capability
+ */
+ int sifStandardCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogSettings.aidl
new file mode 100644
index 0000000..b6d07c3
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogSettings.aidl
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Analog Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAnalogSettings {
+ /**
+ * Signal frequency in Hertz
+ */
+ int frequency;
+
+ int signalType;
+
+ /**
+ * Standard Interchange Format (SIF) setting
+ */
+ int sifStandard;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl
new file mode 100644
index 0000000..6c9be77
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl
@@ -0,0 +1,54 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ATSC3 Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtsc3Capabilities {
+ /**
+ * Bandwidth capability
+ */
+ int bandwidthCap;
+
+ /**
+ * Modulation capability
+ */
+ int modulationCap;
+
+ /**
+ * TimeInterleaveMode capability
+ */
+ int timeInterleaveModeCap;
+
+ /**
+ * CodeRate capability
+ */
+ int codeRateCap;
+
+ /**
+ * FEC capability
+ */
+ int fecCap;
+
+ /**
+ * Demodulator Output Format capability
+ */
+ int demodOutputFormatCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3PlpSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3PlpSettings.aidl
new file mode 100644
index 0000000..b29e1f7
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3PlpSettings.aidl
@@ -0,0 +1,37 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Atsc3 Frontend Physical Layer Pipe Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtsc3PlpSettings {
+ int plpId;
+
+ int modulation;
+
+ int interleaveMode;
+
+ int codeRate;
+
+ /**
+ * Forward Error Correction Type.
+ */
+ int fec;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Settings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Settings.aidl
new file mode 100644
index 0000000..32fb8c7
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Settings.aidl
@@ -0,0 +1,40 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendAtsc3PlpSettings;
+
+/**
+ * Atsc3 Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtsc3Settings {
+ /**
+ * Signal frequency in Hertz
+ */
+ int frequency;
+
+ /**
+ * Bandwidth of tuning band.
+ */
+ int bandwidth;
+
+ int demodOutputFormat;
+
+ TunerFrontendAtsc3PlpSettings[] plpSettings;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl
new file mode 100644
index 0000000..2b6c2fc
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl
@@ -0,0 +1,29 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ATSC Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtscCapabilities {
+ /**
+ * Modulation capability
+ */
+ int modulationCap;
+}
\ No newline at end of file
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscSettings.aidl
new file mode 100644
index 0000000..c7a8c07
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscSettings.aidl
@@ -0,0 +1,31 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Atsc Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtscSettings {
+ /**
+ * Signal frequency in Hertz
+ */
+ int frequency;
+
+ int modulation;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl
new file mode 100644
index 0000000..7df452a
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Cable(DVBC) Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendCableCapabilities {
+ /**
+ * Modulation capability
+ */
+ int modulationCap;
+
+ /**
+ * Code Rate capability
+ */
+ int codeRateCap; // inner FEC will converge to codeRate
+
+ /**
+ * Annex capability
+ */
+ int annexCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableSettings.aidl
new file mode 100644
index 0000000..3984f2c
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableSettings.aidl
@@ -0,0 +1,54 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Cable Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendCableSettings {
+ /**
+ * Signal frequency in Hertz
+ */
+ int frequency;
+
+ int modulation;
+
+ /**
+ * Inner Forward Error Correction type as specified in ETSI EN 300 468 V1.15.1
+ * and ETSI EN 302 307-2 V1.1.1.
+ */
+ long innerFec;
+
+ /**
+ * Symbols per second
+ */
+ int symbolRate;
+
+ /**
+ * Outer Forward Error Correction (FEC) Type.
+ */
+ int outerFec;
+
+ int annex;
+
+ /**
+ * Spectral Inversion Type.
+ */
+ int spectralInversion;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl
new file mode 100644
index 0000000..19f31f1
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl
@@ -0,0 +1,85 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendAnalogCapabilities;
+import android.media.tv.tuner.TunerFrontendAtscCapabilities;
+import android.media.tv.tuner.TunerFrontendAtsc3Capabilities;
+import android.media.tv.tuner.TunerFrontendCableCapabilities;
+import android.media.tv.tuner.TunerFrontendDvbsCapabilities;
+import android.media.tv.tuner.TunerFrontendDvbtCapabilities;
+import android.media.tv.tuner.TunerFrontendIsdbsCapabilities;
+import android.media.tv.tuner.TunerFrontendIsdbs3Capabilities;
+import android.media.tv.tuner.TunerFrontendIsdbtCapabilities;
+
+/**
+ * Frontend Capabilities interface.
+ *
+ * Use a group of vectors as the workaround for Union structure that is not fully supported
+ * in AIDL currently.
+ *
+ * Client may use FrontendInfo.type as the discriminar to check the corresponding vector. If
+ * the vector is not null, it contains valid value.
+ *
+ * {@hide}
+ */
+union TunerFrontendCapabilities {
+ /**
+ * Analog Frontend Capabilities
+ */
+ TunerFrontendAnalogCapabilities analogCaps;
+
+ /**
+ * ATSC Frontend Capabilities
+ */
+ TunerFrontendAtscCapabilities atscCaps;
+
+ /**
+ * ATSC3 Frontend Capabilities
+ */
+ TunerFrontendAtsc3Capabilities atsc3Caps;
+
+ /**
+ * Cable Frontend Capabilities
+ */
+ TunerFrontendCableCapabilities cableCaps;
+
+ /**
+ * DVBS Frontend Capabilities
+ */
+ TunerFrontendDvbsCapabilities dvbsCaps;
+
+ /**
+ * DVBT Frontend Capabilities
+ */
+ TunerFrontendDvbtCapabilities dvbtCaps;
+
+ /**
+ * ISDB-S Frontend Capabilities
+ */
+ TunerFrontendIsdbsCapabilities isdbsCaps;
+
+ /**
+ * ISDB-S3 Frontend Capabilities
+ */
+ TunerFrontendIsdbs3Capabilities isdbs3Caps;
+
+ /**
+ * ISDB-T Frontend Capabilities
+ */
+ TunerFrontendIsdbtCapabilities isdbtCaps;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl
new file mode 100644
index 0000000..5e4322c
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * DVBS Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbsCapabilities {
+ /**
+ * Modulation capability
+ */
+ int modulationCap;
+
+ /**
+ * Code Rate capability
+ */
+ long codeRateCap; // inner FEC will converge to codeRate
+
+ /**
+ * Sub standards capability
+ */
+ int standard;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCodeRate.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCodeRate.aidl
new file mode 100644
index 0000000..59b7de3
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCodeRate.aidl
@@ -0,0 +1,42 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Dvbs Frontend CodeRate interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbsCodeRate {
+ /**
+ * Inner Forward Error Correction type as specified in ETSI EN 300 468 V1.15.1
+ * and ETSI EN 302 307-2 V1.1.1.
+ */
+ long fec;
+
+ boolean isLinear;
+
+ /**
+ * true if enable short frame
+ */
+ boolean isShortFrames;
+
+ /**
+ * bits number in 1000 symbol. 0 if use the default.
+ */
+ int bitsPer1000Symbol;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsSettings.aidl
new file mode 100644
index 0000000..554a502
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsSettings.aidl
@@ -0,0 +1,56 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendDvbsCodeRate;
+
+/**
+ * Dvbs Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbsSettings {
+ /**
+ * Signal frequency in Hertz
+ */
+ int frequency;
+
+ int modulation;
+
+ TunerFrontendDvbsCodeRate codeRate;
+
+ int symbolRate;
+
+ /**
+ * Roll off type.
+ */
+ int rolloff;
+
+ /**
+ * Pilot mode.
+ */
+ int pilot;
+
+ int inputStreamId;
+
+ int standard;
+
+ /**
+ * Vcm mode.
+ */
+ int vcm;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl
new file mode 100644
index 0000000..73f16dd
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl
@@ -0,0 +1,64 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * DVBT Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbtCapabilities {
+ /**
+ * Transmission Mode capability
+ */
+ int transmissionModeCap;
+
+ /**
+ * Bandwidth capability
+ */
+ int bandwidthCap;
+
+ /**
+ * Constellation capability
+ */
+ int constellationCap;
+
+ /**
+ * Code Rate capability
+ */
+ int codeRateCap;
+
+ /**
+ * Hierarchy Type capability
+ */
+ int hierarchyCap;
+
+ /**
+ * Guard Interval capability
+ */
+ int guardIntervalCap;
+
+ /**
+ * T2 Support capability
+ */
+ boolean isT2Supported;
+
+ /**
+ * Miso Support capability
+ */
+ boolean isMisoSupported;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtSettings.aidl
new file mode 100644
index 0000000..c72396b
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtSettings.aidl
@@ -0,0 +1,70 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Dvbt Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbtSettings {
+ /**
+ * Signal frequency in Hertz
+ */
+ int frequency;
+
+ int transmissionMode;
+
+ int bandwidth;
+
+ int constellation;
+
+ int hierarchy;
+
+ /**
+ * Code Rate for High Priority level
+ */
+ int hpCodeRate;
+
+ /**
+ * Code Rate for Low Priority level
+ */
+ int lpCodeRate;
+
+ int guardInterval;
+
+ boolean isHighPriority;
+
+ int standard;
+
+ boolean isMiso;
+
+ /**
+ * Physical Layer Pipe (PLP) mode
+ */
+ int plpMode;
+
+ /**
+ * Physical Layer Pipe (PLP) Id
+ */
+ int plpId;
+
+ /**
+ * Physical Layer Pipe (PLP) Group Id
+ */
+ int plpGroupId;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl
new file mode 100644
index 0000000..84dd67a
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ISDB-S3 Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbs3Capabilities {
+ /**
+ * Modulation capability
+ */
+ int modulationCap;
+
+ /**
+ * Code Rate capability
+ */
+ int codeRateCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl
new file mode 100644
index 0000000..0923868
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Isdbs3 Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbs3Settings {
+ /**
+ * Signal frequency in Hertz
+ */
+ int frequency;
+
+ int streamId;
+
+ int streamIdType;
+
+ int modulation;
+
+ int codeRate;
+
+ /**
+ * Symbols per second
+ */
+ int symbolRate;
+
+ int rolloff;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl
new file mode 100644
index 0000000..15dfdf7
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ISDB-S Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbsCapabilities {
+ /**
+ * Modulation capability
+ */
+ int modulationCap;
+
+ /**
+ * Code Rate capability
+ */
+ int codeRateCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl
new file mode 100644
index 0000000..2ae9092
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Isdbs Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbsSettings {
+ /**
+ * Signal frequency in Hertz
+ */
+ int frequency;
+
+ int streamId;
+
+ int streamIdType;
+
+ int modulation;
+
+ int codeRate;
+
+ /**
+ * Symbols per second
+ */
+ int symbolRate;
+
+ int rolloff;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl
new file mode 100644
index 0000000..c9295d8
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl
@@ -0,0 +1,49 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ISDB-T Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbtCapabilities {
+ /**
+ * ISDB-T Mode capability
+ */
+ int modeCap;
+
+ /**
+ * Bandwidth capability
+ */
+ int bandwidthCap;
+
+ /**
+ * Modulation capability
+ */
+ int modulationCap;
+
+ /**
+ * Code Rate capability
+ */
+ int codeRateCap;
+
+ /**
+ * Guard Interval capability
+ */
+ int guardIntervalCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtSettings.aidl
new file mode 100644
index 0000000..191f3a6
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtSettings.aidl
@@ -0,0 +1,41 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Isdbt Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbtSettings {
+ /**
+ * Signal frequency in Hertz
+ */
+ int frequency;
+
+ int modulation;
+
+ int bandwidth;
+
+ int mode;
+
+ int codeRate;
+
+ int guardInterval;
+
+ int serviceAreaId;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendSettings.aidl
new file mode 100644
index 0000000..a382941
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendSettings.aidl
@@ -0,0 +1,52 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendAnalogSettings;
+import android.media.tv.tuner.TunerFrontendAtscSettings;
+import android.media.tv.tuner.TunerFrontendAtsc3Settings;
+import android.media.tv.tuner.TunerFrontendCableSettings;
+import android.media.tv.tuner.TunerFrontendDvbsSettings;
+import android.media.tv.tuner.TunerFrontendDvbtSettings;
+import android.media.tv.tuner.TunerFrontendIsdbsSettings;
+import android.media.tv.tuner.TunerFrontendIsdbs3Settings;
+import android.media.tv.tuner.TunerFrontendIsdbtSettings;
+
+/**
+ * Analog Frontend Settings interface.
+ *
+ * {@hide}
+ */
+union TunerFrontendSettings {
+ TunerFrontendAnalogSettings analog;
+
+ TunerFrontendAtscSettings atsc;
+
+ TunerFrontendAtsc3Settings atsc3;
+
+ TunerFrontendCableSettings cable;
+
+ TunerFrontendDvbsSettings dvbs;
+
+ TunerFrontendDvbtSettings dvbt;
+
+ TunerFrontendIsdbsSettings isdbs;
+
+ TunerFrontendIsdbs3Settings isdbs3;
+
+ TunerFrontendIsdbtSettings isdbt;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatus.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatus.aidl
new file mode 100644
index 0000000..41f9f0e
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatus.aidl
@@ -0,0 +1,24 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Tuner Frontend Status interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendStatus {}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerServiceFrontendInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerServiceFrontendInfo.aidl
new file mode 100644
index 0000000..ddcbcdc
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerServiceFrontendInfo.aidl
@@ -0,0 +1,77 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendCapabilities;
+
+/**
+ * FrontendInfo interface that carries tuner frontend information.
+ *
+ * {@hide}
+ */
+parcelable TunerServiceFrontendInfo {
+ /**
+ * Frontend Id
+ */
+ int id;
+
+ /**
+ * Frontend Type
+ */
+ int type;
+
+ /**
+ * Minimum Frequency in Hertz
+ */
+ int minFrequency;
+
+ /**
+ * Maximum Frequency in Hertz
+ */
+ int maxFrequency;
+
+ /**
+ * Minimum symbols per second
+ */
+ int minSymbolRate;
+
+ /**
+ * Maximum symbols per second
+ */
+ int maxSymbolRate;
+
+ /**
+ * Range in Hertz
+ */
+ int acquireRange;
+
+ /**
+ * Frontends are assigned with the same exclusiveGroupId if they can't
+ * function at same time. For instance, they share same hardware module.
+ */
+ int exclusiveGroupId;
+
+ /**
+ * A list of supported status types which client can inquiry
+ */
+ int[] statusCaps;
+
+ /**
+ * Frontend Capabilities
+ */
+ TunerFrontendCapabilities caps;
+}
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
new file mode 100644
index 0000000..a0e7a9f
--- /dev/null
+++ b/services/tuner/main_tunerservice.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <utils/Log.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/ProcessState.h>
+#include <hidl/HidlTransportSupport.h>
+
+#include "TunerService.h"
+
+using namespace android;
+
+int main(int argc __unused, char** argv) {
+ ALOGD("Tuner service starting");
+
+ strcpy(argv[0], "media.tuner");
+ sp<ProcessState> proc(ProcessState::self());
+ sp<IServiceManager> sm = defaultServiceManager();
+ ALOGD("ServiceManager: %p", sm.get());
+
+ TunerService::instantiate();
+
+ ProcessState::self()->startThreadPool();
+ IPCThreadState::self()->joinThreadPool();
+}
diff --git a/services/tuner/mediatuner.rc b/services/tuner/mediatuner.rc
new file mode 100644
index 0000000..b0347be
--- /dev/null
+++ b/services/tuner/mediatuner.rc
@@ -0,0 +1,6 @@
+service media.tuner /system/bin/mediatuner
+ class main
+ user media
+ group media
+ ioprio rt 4
+ task_profiles ProcessCapacityHigh HighPerformance
\ No newline at end of file
diff --git a/tools/mainline_hook_partial.sh b/tools/mainline_hook_partial.sh
new file mode 100755
index 0000000..3dc6163
--- /dev/null
+++ b/tools/mainline_hook_partial.sh
@@ -0,0 +1,200 @@
+#!/bin/bash
+#set -x
+
+# used for projects where some files are mainline, some are not
+# we get a list of the files/directories out of the project's root.
+#
+# invocation $0 ${repo_root} ${preupload_files}
+#
+# Example PREUPLOAD.cfg:
+#
+# [Hook Scripts]
+# mainline_hook = ${REPO_ROOT}/frameworks/av/tools/mainline_hook_partial.sh ${REPO_ROOT} ${PREUPLOAD_FILES}
+#
+# MainlineFiles.cfg syntax:
+#
+# ignore comment (#) lines and blank lines
+# rest are path prefixes starting at root of the project
+# (so OWNERS, not frameworks/av/OWNERS)
+#
+# path
+# INCLUDE path
+# EXCLUDE path
+#
+# 'path' and 'INCLUDE path' are identical -- they both indicate that this path
+# is part of mainline
+# EXCLUDE indicates that this is not part of mainline,
+# so 'foo/' and 'EXCLUDE foo/nope'
+# means everything under foo/ is part of mainline EXCEPT foo/nope.
+# INCLUDE/EXCLUDE/INCLUDE nested structuring is not supported
+#
+# matching is purely prefix
+# so 'foo' will match 'foo', 'foo.c', 'foo/bar/baz'
+# if you want to exclude a directory, best to use a pattern like "foo/"
+#
+
+## tunables:
+##
+DEV_BRANCH=rvc-dev
+filelist_file=MainlineFiles.cfg
+
+###
+
+REPO_ROOT=$1; shift
+# the rest of the command line is the file list
+PREUPLOAD_FILES="$*"
+
+RED=$(tput setaf 1)
+NORMAL=$(tput sgr0)
+
+## get the active branch:
+## * <localbranch> <shainfo> [goog/master] Fix to handle missing checks on error returned
+## strip this down to "master"
+##
+current=`git branch -vv | grep -P "^\*[^\[]+\[goog/"|sed -e 's/^.*\[//' | sed -e 's/:.*$//'| sed -e 's/^goog\///'`
+if [ "${current}" = "" ] ; then
+ current=unknown
+fi
+
+## figure out whether which files are for mainline and which are not
+if [ "${PREUPLOAD_FILES}" = "" ] ; then
+ # empty files? what's up there, i suppose we'll let that go
+ exit 0
+fi
+
+## get the list of files out of the project's root
+## figure out which way I'm going ..
+## use list of files to scan PREUPLOAD_FILES
+## use PREUPLOAD_FILES to scan the list of good/bad from the project root
+##
+## remember to do an exclude, so I can say
+## include/these/files/
+## EXCLUDE include/these/files/nested/
+##
+## and it should all be prefix based stuff...
+
+if [ ! -f ${REPO_ROOT}/${REPO_PATH}/${filelist_file} ] ; then
+ echo "Poorly Configured project, missing ${filelist_file} in root of project"
+ exit 1
+fi
+
+# is 1st arg a prefix of 2nd arg
+beginswith() { case $2 in "$1"*) true;; *) false;; esac; }
+
+exclusions=""
+inclusions=""
+while read p1 p2
+do
+ # ignore comment lines in the file
+ # ignore empty lines in the file
+ if beginswith "#" "${p1}" ; then
+ # ignore this line
+ true
+ elif [ -z "${p1}" ] ; then
+ # ignore blanks
+ true
+ elif [ ${p1} = "EXCLUDE" ] ; then
+ # add to the exclusion list
+ if [ ! -z ${p2} ] ; then
+ exlusions="${exclusions} ${p2}"
+ fi
+ elif [ ${p1} = "INCLUDE" ] ; then
+ # add to the inclusion list
+ if [ ! -z ${p2} ] ; then
+ inclusions="${inclusions} ${p2}"
+ fi
+ elif [ ! -z ${p1} ] ; then
+ inclusions="${inclusions} ${p1}"
+ fi
+done < ${REPO_ROOT}/${REPO_PATH}/${filelist_file}
+
+# so we can play with array syntax
+#INCLUSIONS=( ${inclusions} )
+#EXCLUSIONS=( ${exclusions} )
+
+mainline_yes=""
+mainline_no=""
+
+# is it part of the list of mainline files/directories?
+for path in ${PREUPLOAD_FILES} ; do
+ #echo is ${path} a mainline file...
+ for aprefix in ${inclusions} .. ; do
+ #echo compare against ${aprefix} ...
+ if [ "${aprefix}" = ".." ] ; then
+ mainline_no="${mainline_no} ${path}"
+ elif beginswith ${aprefix} ${path} ; then
+ mainline_yes="${mainline_yes} ${path}"
+ break # on to next uploaded file
+ fi
+ done
+done
+
+# TODO: audit the yes list to see if some should be moved to the no list
+
+# 3 situations
+# -- everything is on mainline (mainline_yes non-empty, other empty)
+# -- some is mainline, some is not (files_* both non-empty)
+# -- none is mainline (mainline_yes empty, other non_empty
+# -- both empty only happens if PREUPLOAD_FILES is empty, covered above
+
+if [ -z "${mainline_yes}" ] ; then
+ # no mainline files, everything else is non-mainline, let it go
+ exit 0
+fi
+
+result=0
+if [ ! -z "${mainline_no}" ] ; then
+ # mixed bag, suggest (not insist) that developer split them.
+ result=1
+ cat - <<EOF
+This CL contains files contains both mainline and non-mainline files. Consider separating
+them into separate CLs. It may also be appropriate to update the list of mainline
+files in ${RED}${REPO_ROOT}/${filelist_file}${NORMAL}.
+
+EOF
+ echo "===== Mainline files ====="
+ echo -e ${RED}
+ echo ${mainline_yes} | sed -e 's/ /
/g'
+ echo -e ${NORMAL}
+
+ echo "===== Non-Mainline files ====="
+ echo -e ${RED}
+ echo ${mainline_no} | sed -e 's/ /
/g'
+ echo -e ${NORMAL}
+
+fi
+
+if [ "${current}" != "${DEV_BRANCH}" ] ; then
+ # Change is not in the desired mainline dev branch
+ result=1
+
+ #echo -e "${RED}"
+ cat - <<EOF
+
+You are uploading repo ${RED}${REPO_PATH}${NORMAL} to branch ${RED}${current}${NORMAL}.
+The source of truth for ${RED}${REPO_PATH}${NORMAL} is branch ${RED}${DEV_BRANCH}${NORMAL}.
+
+Please upload this change to branch ${RED}${DEV_BRANCH}${NORMAL} unless one or more of
+the following apply:
+- this is a security bug prohibited from disclosure before the next dessert release.
+ (moderate security bugs fall into this category).
+- this is new functionality prohibitied from disclosure before the next dessert release.
+EOF
+ #echo -e "${NORMAL}"
+
+fi
+
+## since stdout is buffered in a way that complicates the below, we're just going
+## to tell the user what they can do to get around this check instead of asking them
+## as part of this run of the command.
+
+if [ ${result} != 0 ] ; then
+ cat - <<EOF
+
+If you are sure you want to proceed uploading to branch ${RED}${current}${NORMAL},
+re-run your repo upload command with the '--no-verify' option
+
+EOF
+fi
+exit ${result}
+
diff --git a/tools/mainline_hook_project.sh b/tools/mainline_hook_project.sh
new file mode 100755
index 0000000..8d35470
--- /dev/null
+++ b/tools/mainline_hook_project.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+#set -x
+
+# called for repo projects that are part of the media mainline modules
+# this is for projects where the entire project is part of mainline.
+# we have a separate script for projects where only part of that project gets
+# pulled into mainline.
+#
+# if the project's PREUPLOAD.cfg points to this script, it is by definition a project
+# which is entirely within mainline.
+#
+# example PREUPLOAD.cfg using this script
+# [Hook Scripts]
+# mainline_hook = ${REPO_ROOT}/frameworks/av/tools/mainline_hook_project.sh
+#
+
+
+# tunables
+DEV_BRANCH=rvc-dev
+
+###
+RED=$(tput setaf 1)
+NORMAL=$(tput sgr0)
+
+## check the active branch:
+## * b131183694 d198c6a [goog/master] Fix to handle missing checks on error returned
+##
+current=`git branch -vv | grep -P "^\*[^\[]+\[goog/"|sed -e 's/^.*\[//' | sed -e 's/:.*$//'| sed -e 's/^goog\///'`
+if [ "${current}" = "" ] ; then
+ current=unknown
+fi
+
+if [ "${current}" = "${DEV_BRANCH}" ] ; then
+ # Change appears to be in mainline dev branch
+ exit 0
+fi
+
+## warn the user that about not being on the typical/desired branch.
+
+cat - <<EOF
+
+You are uploading repo ${RED}${REPO_PATH}${NORMAL} to branch ${RED}${current}${NORMAL}.
+The source of truth for ${RED}${REPO_PATH}${NORMAL} is branch ${RED}${DEV_BRANCH}${NORMAL}.
+
+Please upload this change to branch ${RED}${DEV_BRANCH}${NORMAL} unless one or more of
+the following apply:
+- this is a security bug prohibited from disclosure before the next dessert release.
+ (moderate security bugs fall into this category).
+- this is new functionality prohibitied from disclosure before the next dessert release.
+EOF
+
+
+##
+## TODO: prompt the user y/n to continue right now instead of re-invoking with no-verify
+## this has to get around how repo buffers stdout from this script such that the output
+## is not flushed before we try to read the input.
+##
+
+cat - <<EOF
+If you are sure you want to proceed uploading to branch ${RED}${current}${NORMAL},
+re-run your repo upload command with the '--no-verify' option
+
+EOF
+exit 1
+