Merge "hdr: fix dataspace range"
diff --git a/camera/CaptureResult.cpp b/camera/CaptureResult.cpp
index e6c0d00..928a6bc 100644
--- a/camera/CaptureResult.cpp
+++ b/camera/CaptureResult.cpp
@@ -60,6 +60,39 @@
return OK;
}
+status_t PhysicalCaptureResultInfo::readFromParcel(const android::Parcel* parcel) {
+ status_t res;
+
+ mPhysicalCameraId.remove(mPhysicalCameraId.size());
+ mPhysicalCameraMetadata.clear();
+
+ if ((res = parcel->readString16(&mPhysicalCameraId)) != OK) {
+ ALOGE("%s: Failed to read camera id: %d", __FUNCTION__, res);
+ return res;
+ }
+
+ if ((res = mPhysicalCameraMetadata.readFromParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to read metadata from parcel: %d", __FUNCTION__, res);
+ return res;
+ }
+ return OK;
+}
+
+status_t PhysicalCaptureResultInfo::writeToParcel(android::Parcel* parcel) const {
+ status_t res;
+ if ((res = parcel->writeString16(mPhysicalCameraId)) != OK) {
+ ALOGE("%s: Failed to write physical camera ID to parcel: %d",
+ __FUNCTION__, res);
+ return res;
+ }
+ if ((res = mPhysicalCameraMetadata.writeToParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to write physical camera metadata to parcel: %d",
+ __FUNCTION__, res);
+ return res;
+ }
+ return OK;
+}
+
CaptureResult::CaptureResult() :
mMetadata(), mResultExtras() {
}
@@ -67,6 +100,7 @@
CaptureResult::CaptureResult(const CaptureResult &otherResult) {
mResultExtras = otherResult.mResultExtras;
mMetadata = otherResult.mMetadata;
+ mPhysicalMetadatas = otherResult.mPhysicalMetadatas;
}
status_t CaptureResult::readFromParcel(android::Parcel *parcel) {
@@ -79,6 +113,7 @@
}
mMetadata.clear();
+ mPhysicalMetadatas.clear();
status_t res = OK;
res = mMetadata.readFromParcel(parcel);
@@ -89,6 +124,34 @@
}
ALOGV("%s: Read metadata from parcel", __FUNCTION__);
+ int32_t physicalMetadataCount;
+ if ((res = parcel->readInt32(&physicalMetadataCount)) != OK) {
+ ALOGE("%s: Failed to read the physical metadata count from parcel: %d", __FUNCTION__, res);
+ return res;
+ }
+ if (physicalMetadataCount < 0) {
+ ALOGE("%s: Invalid physical metadata count from parcel: %d",
+ __FUNCTION__, physicalMetadataCount);
+ return BAD_VALUE;
+ }
+
+ for (int32_t i = 0; i < physicalMetadataCount; i++) {
+ String16 cameraId;
+ if ((res = parcel->readString16(&cameraId)) != OK) {
+ ALOGE("%s: Failed to read camera id: %d", __FUNCTION__, res);
+ return res;
+ }
+
+ CameraMetadata physicalMetadata;
+ if ((res = physicalMetadata.readFromParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to read metadata from parcel: %d", __FUNCTION__, res);
+ return res;
+ }
+
+ mPhysicalMetadatas.emplace(mPhysicalMetadatas.end(), cameraId, physicalMetadata);
+ }
+ ALOGV("%s: Read physical metadata from parcel", __FUNCTION__);
+
res = mResultExtras.readFromParcel(parcel);
if (res != OK) {
ALOGE("%s: Failed to read result extras from parcel.",
@@ -118,6 +181,27 @@
}
ALOGV("%s: Wrote metadata to parcel", __FUNCTION__);
+ int32_t physicalMetadataCount = static_cast<int32_t>(mPhysicalMetadatas.size());
+ res = parcel->writeInt32(physicalMetadataCount);
+ if (res != OK) {
+ ALOGE("%s: Failed to write physical metadata count to parcel: %d",
+ __FUNCTION__, res);
+ return BAD_VALUE;
+ }
+ for (const auto& physicalMetadata : mPhysicalMetadatas) {
+ if ((res = parcel->writeString16(physicalMetadata.mPhysicalCameraId)) != OK) {
+ ALOGE("%s: Failed to write physical camera ID to parcel: %d",
+ __FUNCTION__, res);
+ return res;
+ }
+ if ((res = physicalMetadata.mPhysicalCameraMetadata.writeToParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to write physical camera metadata to parcel: %d",
+ __FUNCTION__, res);
+ return res;
+ }
+ }
+ ALOGV("%s: Wrote physical camera metadata to parcel", __FUNCTION__);
+
res = mResultExtras.writeToParcel(parcel);
if (res != OK) {
ALOGE("%s: Failed to write result extras to parcel", __FUNCTION__);
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
index 4db7f85..58b19a3 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
@@ -18,6 +18,7 @@
import android.hardware.camera2.impl.CameraMetadataNative;
import android.hardware.camera2.impl.CaptureResultExtras;
+import android.hardware.camera2.impl.PhysicalCaptureResultInfo;
/** @hide */
interface ICameraDeviceCallbacks
@@ -36,7 +37,8 @@
oneway void onDeviceIdle();
oneway void onCaptureStarted(in CaptureResultExtras resultExtras, long timestamp);
oneway void onResultReceived(in CameraMetadataNative result,
- in CaptureResultExtras resultExtras);
+ in CaptureResultExtras resultExtras,
+ in PhysicalCaptureResultInfo[] physicalCaptureResultInfos);
oneway void onPrepared(int streamId);
/**
diff --git a/camera/aidl/android/hardware/camera2/impl/PhysicalCaptureResultInfo.aidl b/camera/aidl/android/hardware/camera2/impl/PhysicalCaptureResultInfo.aidl
new file mode 100644
index 0000000..78d9b7b
--- /dev/null
+++ b/camera/aidl/android/hardware/camera2/impl/PhysicalCaptureResultInfo.aidl
@@ -0,0 +1,20 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.impl;
+
+/** @hide */
+parcelable PhysicalCaptureResultInfo cpp_header "camera/CaptureResult.h";
diff --git a/camera/include/camera/CaptureResult.h b/camera/include/camera/CaptureResult.h
index 917d953..56fa178 100644
--- a/camera/include/camera/CaptureResult.h
+++ b/camera/include/camera/CaptureResult.h
@@ -91,14 +91,36 @@
virtual status_t readFromParcel(const android::Parcel* parcel) override;
virtual status_t writeToParcel(android::Parcel* parcel) const override;
};
+
+struct PhysicalCaptureResultInfo : public android::Parcelable {
+
+ PhysicalCaptureResultInfo()
+ : mPhysicalCameraId(),
+ mPhysicalCameraMetadata() {
+ }
+ PhysicalCaptureResultInfo(const String16& cameraId,
+ const CameraMetadata& cameraMetadata)
+ : mPhysicalCameraId(cameraId),
+ mPhysicalCameraMetadata(cameraMetadata) {
+ }
+
+ String16 mPhysicalCameraId;
+ CameraMetadata mPhysicalCameraMetadata;
+
+ virtual status_t readFromParcel(const android::Parcel* parcel) override;
+ virtual status_t writeToParcel(android::Parcel* parcel) const override;
+};
+
} // namespace impl
} // namespace camera2
} // namespace hardware
using hardware::camera2::impl::CaptureResultExtras;
+using hardware::camera2::impl::PhysicalCaptureResultInfo;
struct CaptureResult : public virtual LightRefBase<CaptureResult> {
CameraMetadata mMetadata;
+ std::vector<PhysicalCaptureResultInfo> mPhysicalMetadatas;
CaptureResultExtras mResultExtras;
CaptureResult();
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index ef1c61f..907debc 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -1406,7 +1406,9 @@
binder::Status
CameraDevice::ServiceCallback::onResultReceived(
const CameraMetadata& metadata,
- const CaptureResultExtras& resultExtras) {
+ const CaptureResultExtras& resultExtras,
+ const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) {
+ (void) physicalResultInfos;
binder::Status ret = binder::Status::ok();
sp<CameraDevice> dev = mDevice.promote();
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 1db3dfb..1369148 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -74,7 +74,8 @@
binder::Status onCaptureStarted(const CaptureResultExtras& resultExtras,
int64_t timestamp) override;
binder::Status onResultReceived(const CameraMetadata& metadata,
- const CaptureResultExtras& resultExtras) override;
+ const CaptureResultExtras& resultExtras,
+ const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) override;
binder::Status onPrepared(int streamId) override;
binder::Status onRequestQueueEmpty() override;
binder::Status onRepeatingRequestError(int64_t lastFrameNumber,
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 24c0c51..1de7013 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -198,9 +198,11 @@
virtual binder::Status onResultReceived(const CameraMetadata& metadata,
- const CaptureResultExtras& resultExtras) {
+ const CaptureResultExtras& resultExtras,
+ const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) {
(void) metadata;
(void) resultExtras;
+ (void) physicalResultInfos;
Mutex::Autolock l(mLock);
mLastStatus = SENT_RESULT;
mStatusesHit.push_back(mLastStatus);
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index f9e4639..46bd8f0 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -73,6 +73,7 @@
static enum {
FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES, FORMAT_RAW_FRAMES
} gOutputFormat = FORMAT_MP4; // data format for output
+static AString gCodecName = ""; // codec name override
static bool gSizeSpecified = false; // was size explicitly requested?
static bool gWantInfoScreen = false; // do we want initial info screen?
static bool gWantFrameTime = false; // do we want times on each frame?
@@ -154,6 +155,7 @@
if (gVerbose) {
printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
+ fflush(stdout);
}
sp<AMessage> format = new AMessage;
@@ -169,11 +171,21 @@
looper->setName("screenrecord_looper");
looper->start();
ALOGV("Creating codec");
- sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
- if (codec == NULL) {
- fprintf(stderr, "ERROR: unable to create %s codec instance\n",
- kMimeTypeAvc);
- return UNKNOWN_ERROR;
+ sp<MediaCodec> codec;
+ if (gCodecName.empty()) {
+ codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
+ if (codec == NULL) {
+ fprintf(stderr, "ERROR: unable to create %s codec instance\n",
+ kMimeTypeAvc);
+ return UNKNOWN_ERROR;
+ }
+ } else {
+ codec = MediaCodec::CreateByComponentName(looper, gCodecName);
+ if (codec == NULL) {
+ fprintf(stderr, "ERROR: unable to create %s codec instance\n",
+ gCodecName.c_str());
+ return UNKNOWN_ERROR;
+ }
}
err = codec->configure(format, NULL, NULL,
@@ -275,9 +287,11 @@
if (gRotate) {
printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
outHeight, outWidth, offY, offX);
+ fflush(stdout);
} else {
printf("Content area is %ux%u at offset x=%d y=%d\n",
outWidth, outHeight, offX, offY);
+ fflush(stdout);
}
}
@@ -346,6 +360,7 @@
if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
if (gVerbose) {
printf("Time limit reached\n");
+ fflush(stdout);
}
break;
}
@@ -483,6 +498,7 @@
printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
debugNumFrames, nanoseconds_to_seconds(
systemTime(CLOCK_MONOTONIC) - startWhenNsec));
+ fflush(stdout);
}
return NO_ERROR;
}
@@ -556,6 +572,7 @@
printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
mainDpyInfo.orientation);
+ fflush(stdout);
}
bool rotated = isDeviceRotated(mainDpyInfo.orientation);
@@ -623,6 +640,7 @@
}
if (gVerbose) {
printf("Bugreport overlay created\n");
+ fflush(stdout);
}
} else {
// Use the encoder's input surface as the virtual display surface.
@@ -715,6 +733,7 @@
if (gVerbose) {
printf("Stopping encoder and muxer\n");
+ fflush(stdout);
}
}
@@ -761,6 +780,7 @@
printf(" %s", argv[i]);
}
putchar('\n');
+ fflush(stdout);
}
pid_t pid = fork();
@@ -898,6 +918,7 @@
{ "show-frame-time", no_argument, NULL, 'f' },
{ "rotate", no_argument, NULL, 'r' },
{ "output-format", required_argument, NULL, 'o' },
+ { "codec-name", required_argument, NULL, 'N' },
{ "monotonic-time", no_argument, NULL, 'm' },
{ NULL, 0, NULL, 0 }
};
@@ -978,6 +999,9 @@
return 2;
}
break;
+ case 'N':
+ gCodecName = optarg;
+ break;
case 'm':
gMonotonicTime = true;
break;
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 2e8da4b..ea239c5 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -18,6 +18,7 @@
"PluginMetricsReporting.cpp",
"SharedLibrary.cpp",
"DrmHal.cpp",
+ "DrmMetrics.cpp",
"CryptoHal.cpp",
"protos/plugin_metrics.proto",
],
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index e51ec4d..7ab0bcd 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -30,6 +30,7 @@
#include <media/DrmHal.h>
#include <media/DrmSessionClientInterface.h>
#include <media/DrmSessionManager.h>
+#include <media/EventMetric.h>
#include <media/PluginMetricsReporting.h>
#include <media/drm/DrmAPI.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -53,6 +54,14 @@
using ::android::hidl::manager::V1_0::IServiceManager;
using ::android::sp;
+namespace {
+
+// This constant corresponds to the PROPERTY_DEVICE_UNIQUE_ID constant
+// in the MediaDrm API.
+constexpr char kPropertyDeviceUniqueId[] = "deviceUniqueId";
+
+}
+
namespace android {
#define INIT_CHECK() {if (mInitCheck != OK) return mInitCheck;}
@@ -227,15 +236,14 @@
DrmHal::DrmHal()
: mDrmSessionClient(new DrmSessionClient(this)),
mFactories(makeDrmFactories()),
- mOpenSessionCounter("/drm/mediadrm/open_session", "status"),
mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT) {
}
void DrmHal::closeOpenSessions() {
if (mPlugin != NULL) {
- for (size_t i = 0; i < mOpenSessions.size(); i++) {
- mPlugin->closeSession(toHidlVec(mOpenSessions[i]));
- DrmSessionManager::Instance()->removeSession(mOpenSessions[i]);
+ auto openSessions = mOpenSessions;
+ for (size_t i = 0; i < openSessions.size(); i++) {
+ closeSession(openSessions[i]);
}
}
mOpenSessions.clear();
@@ -320,6 +328,7 @@
Return<void> DrmHal::sendEvent(EventType hEventType,
const hidl_vec<uint8_t>& sessionId, const hidl_vec<uint8_t>& data) {
+ mMetrics.mEventCounter.Increment(hEventType);
mEventLock.lock();
sp<IDrmClient> listener = mListener;
@@ -410,12 +419,21 @@
break;
}
obj.writeInt32(type);
+ mMetrics.mKeyStatusChangeCounter.Increment(keyStatus.type);
}
obj.writeInt32(hasNewUsableKey);
Mutex::Autolock lock(mNotifyLock);
listener->notify(DrmPlugin::kDrmPluginEventKeysChange, 0, &obj);
+ } else {
+ // There's no listener. But we still want to count the key change
+ // events.
+ size_t nKeys = keyStatusList.size();
+ for (size_t i = 0; i < nKeys; i++) {
+ mMetrics.mKeyStatusChangeCounter.Increment(keyStatusList[i].type);
+ }
}
+
return Void();
}
@@ -519,7 +537,7 @@
mOpenSessions.push(sessionId);
}
- mOpenSessionCounter.Increment(err);
+ mMetrics.mOpenSessionCounter.Increment(err);
return err;
}
@@ -539,8 +557,11 @@
}
}
reportMetrics();
- return toStatusT(status);
+ status_t response = toStatusT(status);
+ mMetrics.mCloseSessionCounter.Increment(response);
+ return response;
}
+ mMetrics.mCloseSessionCounter.Increment(DEAD_OBJECT);
return DEAD_OBJECT;
}
@@ -551,6 +572,7 @@
String8 &defaultUrl, DrmPlugin::KeyRequestType *keyRequestType) {
Mutex::Autolock autoLock(mLock);
INIT_CHECK();
+ EventTimer<status_t> keyRequestTimer(&mMetrics.mGetKeyRequestTiming);
DrmSessionManager::Instance()->useSession(sessionId);
@@ -562,6 +584,7 @@
} else if (keyType == DrmPlugin::kKeyType_Release) {
hKeyType = KeyType::RELEASE;
} else {
+ keyRequestTimer.SetAttribute(BAD_VALUE);
return BAD_VALUE;
}
@@ -636,12 +659,16 @@
}
});
- return hResult.isOk() ? err : DEAD_OBJECT;
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ keyRequestTimer.SetAttribute(err);
+ return err;
}
status_t DrmHal::provideKeyResponse(Vector<uint8_t> const &sessionId,
Vector<uint8_t> const &response, Vector<uint8_t> &keySetId) {
Mutex::Autolock autoLock(mLock);
+ EventTimer<status_t> keyResponseTimer(&mMetrics.mProvideKeyResponseTiming);
+
INIT_CHECK();
DrmSessionManager::Instance()->useSession(sessionId);
@@ -657,8 +684,9 @@
err = toStatusT(status);
}
);
-
- return hResult.isOk() ? err : DEAD_OBJECT;
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ keyResponseTimer.SetAttribute(err);
+ return err;
}
status_t DrmHal::removeKeys(Vector<uint8_t> const &keySetId) {
@@ -722,7 +750,9 @@
}
);
- return hResult.isOk() ? err : DEAD_OBJECT;
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ mMetrics.mGetProvisionRequestCounter.Increment(err);
+ return err;
}
status_t DrmHal::provideProvisionResponse(Vector<uint8_t> const &response,
@@ -743,7 +773,9 @@
}
);
- return hResult.isOk() ? err : DEAD_OBJECT;
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ mMetrics.mProvideProvisionResponseCounter.Increment(err);
+ return err;
}
status_t DrmHal::getSecureStops(List<Vector<uint8_t>> &secureStops) {
@@ -965,7 +997,11 @@
}
);
- return hResult.isOk() ? err : DEAD_OBJECT;
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ if (name == kPropertyDeviceUniqueId) {
+ mMetrics.mGetDeviceUniqueIdCounter.Increment(err);
+ }
+ return err;
}
status_t DrmHal::setPropertyString(String8 const &name, String8 const &value ) const {
@@ -987,26 +1023,12 @@
return toStatusT(status);
}
-status_t DrmHal::getMetrics(MediaAnalyticsItem* metrics) {
- // TODO: Move mOpenSessionCounter and suffixes to a separate class
- // that manages the collection of metrics and exporting them.
- std::string success_count_name =
- mOpenSessionCounter.metric_name() + "/ok/count";
- std::string error_count_name =
- mOpenSessionCounter.metric_name() + "/error/count";
- mOpenSessionCounter.ExportValues(
- [&] (status_t status, int64_t value) {
- if (status == OK) {
- metrics->setInt64(success_count_name.c_str(), value);
- } else {
- int64_t total_errors(0);
- metrics->getInt64(error_count_name.c_str(), &total_errors);
- metrics->setInt64(error_count_name.c_str(),
- total_errors + value);
- // TODO: Add support for exporting the list of error values.
- // This probably needs to be added to MediaAnalyticsItem.
- }
- });
+status_t DrmHal::getMetrics(MediaAnalyticsItem* item) {
+ if (item == nullptr) {
+ return UNEXPECTED_NULL;
+ }
+
+ mMetrics.Export(item);
return OK;
}
diff --git a/drm/libmediadrm/DrmMetrics.cpp b/drm/libmediadrm/DrmMetrics.cpp
new file mode 100644
index 0000000..258c4b0
--- /dev/null
+++ b/drm/libmediadrm/DrmMetrics.cpp
@@ -0,0 +1,152 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/macros.h>
+#include <media/DrmMetrics.h>
+
+using ::android::hardware::drm::V1_0::EventType;
+using ::android::hardware::drm::V1_0::KeyStatusType;
+
+namespace {
+
+template<typename T>
+std::string GetAttributeName(T type);
+
+template<>
+std::string GetAttributeName<KeyStatusType>(KeyStatusType type) {
+ static const char* type_names[] = {
+ "USABLE", "EXPIRED", "OUTPUT_NOT_ALLOWED",
+ "STATUS_PENDING", "INTERNAL_ERROR" };
+ if (((size_t) type) > arraysize(type_names)) {
+ return "UNKNOWN_TYPE";
+ }
+ return type_names[(size_t) type];
+}
+
+template<>
+std::string GetAttributeName<EventType>(EventType type) {
+ static const char* type_names[] = {
+ "PROVISION_REQUIRED", "KEY_NEEDED", "KEY_EXPIRED",
+ "VENDOR_DEFINED", "SESSION_RECLAIMED" };
+ if (((size_t) type) > arraysize(type_names)) {
+ return "UNKNOWN_TYPE";
+ }
+ return type_names[(size_t) type];
+}
+
+template<typename T>
+void ExportCounterMetric(const android::CounterMetric<T>& counter,
+ android::MediaAnalyticsItem* item) {
+ if (!item) {
+ ALOGE("item was unexpectedly null.");
+ return;
+ }
+ std::string success_count_name = counter.metric_name() + ".ok.count";
+ std::string error_count_name = counter.metric_name() + ".error.count";
+ counter.ExportValues(
+ [&] (const android::status_t status, const int64_t value) {
+ if (status == android::OK) {
+ item->setInt64(success_count_name.c_str(), value);
+ } else {
+ int64_t total_errors(0);
+ item->getInt64(error_count_name.c_str(), &total_errors);
+ item->setInt64(error_count_name.c_str(), total_errors + value);
+ // TODO: Add support for exporting the list of error values.
+ // This probably needs to be added to MediaAnalyticsItem.
+ }
+ });
+}
+
+template<typename T>
+void ExportCounterMetricWithAttributeNames(
+ const android::CounterMetric<T>& counter,
+ android::MediaAnalyticsItem* item) {
+ if (!item) {
+ ALOGE("item was unexpectedly null.");
+ return;
+ }
+ counter.ExportValues(
+ [&] (const T& attribute, const int64_t value) {
+ std::string name = counter.metric_name()
+ + "." + GetAttributeName(attribute) + ".count";
+ item->setInt64(name.c_str(), value);
+ });
+}
+
+template<typename T>
+void ExportEventMetric(const android::EventMetric<T>& event,
+ android::MediaAnalyticsItem* item) {
+ if (!item) {
+ ALOGE("item was unexpectedly null.");
+ return;
+ }
+ std::string success_count_name = event.metric_name() + ".ok.count";
+ std::string error_count_name = event.metric_name() + ".error.count";
+ std::string timing_name = event.metric_name() + ".ok.average_time_micros";
+ event.ExportValues(
+ [&] (const android::status_t& status,
+ const android::EventStatistics& value) {
+ if (status == android::OK) {
+ item->setInt64(success_count_name.c_str(), value.count);
+ item->setInt64(timing_name.c_str(), value.mean);
+ } else {
+ int64_t total_errors(0);
+ item->getInt64(error_count_name.c_str(), &total_errors);
+ item->setInt64(error_count_name.c_str(),
+ total_errors + value.count);
+ // TODO: Add support for exporting the list of error values.
+ // This probably needs to be added to MediaAnalyticsItem.
+ }
+ });
+}
+
+} // namespace anonymous
+
+namespace android {
+
+MediaDrmMetrics::MediaDrmMetrics()
+ : mOpenSessionCounter("drm.mediadrm.open_session", "status"),
+ mCloseSessionCounter("drm.mediadrm.close_session", "status"),
+ mGetKeyRequestTiming("drm.mediadrm.get_key_request", "status"),
+ mProvideKeyResponseTiming("drm.mediadrm.provide_key_response", "status"),
+ mGetProvisionRequestCounter(
+ "drm.mediadrm.get_provision_request", "status"),
+ mProvideProvisionResponseCounter(
+ "drm.mediadrm.provide_provision_response", "status"),
+ mKeyStatusChangeCounter(
+ "drm.mediadrm.key_status_change", "key_status_type"),
+ mEventCounter("drm.mediadrm.event", "event_type"),
+ mGetDeviceUniqueIdCounter(
+ "drm.mediadrm.get_device_unique_id", "status") {
+}
+
+void MediaDrmMetrics::Export(MediaAnalyticsItem* item) {
+ if (!item) {
+ ALOGE("item was unexpectedly null.");
+ return;
+ }
+ ExportCounterMetric(mOpenSessionCounter, item);
+ ExportCounterMetric(mCloseSessionCounter, item);
+ ExportEventMetric(mGetKeyRequestTiming, item);
+ ExportEventMetric(mProvideKeyResponseTiming, item);
+ ExportCounterMetric(mGetProvisionRequestCounter, item);
+ ExportCounterMetric(mProvideProvisionResponseCounter, item);
+ ExportCounterMetricWithAttributeNames(mKeyStatusChangeCounter, item);
+ ExportCounterMetricWithAttributeNames(mEventCounter, item);
+ ExportCounterMetric(mGetDeviceUniqueIdCounter, item);
+}
+
+} // namespace android
diff --git a/drm/libmediadrm/tests/Android.bp b/drm/libmediadrm/tests/Android.bp
index 674d3eb..fdc982d 100644
--- a/drm/libmediadrm/tests/Android.bp
+++ b/drm/libmediadrm/tests/Android.bp
@@ -10,3 +10,35 @@
"-Wall",
],
}
+
+cc_test {
+ name: "DrmMetrics_test",
+ srcs: ["DrmMetrics_test.cpp"],
+ shared_libs: [
+ "android.hardware.drm@1.0",
+ "liblog",
+ "libmediadrm",
+ "libmediametrics",
+ "libutils",
+ ],
+ include_dirs: ["frameworks/av/include/media"],
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+}
+
+cc_test {
+ name: "EventMetric_test",
+ srcs: ["EventMetric_test.cpp"],
+ shared_libs: [
+ "liblog",
+ "libmediadrm",
+ "libutils",
+ ],
+ include_dirs: ["frameworks/av/include/media"],
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+}
diff --git a/drm/libmediadrm/tests/DrmMetrics_test.cpp b/drm/libmediadrm/tests/DrmMetrics_test.cpp
new file mode 100644
index 0000000..d1948b4
--- /dev/null
+++ b/drm/libmediadrm/tests/DrmMetrics_test.cpp
@@ -0,0 +1,189 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "DrmMetrics.h"
+
+using ::android::hardware::drm::V1_0::EventType;
+using ::android::hardware::drm::V1_0::KeyStatusType;
+
+namespace android {
+
+/**
+ * Unit tests for the MediaDrmMetrics class.
+ */
+class MediaDrmMetricsTest : public ::testing::Test {
+};
+
+TEST_F(MediaDrmMetricsTest, EmptySuccess) {
+ MediaDrmMetrics metrics;
+ MediaAnalyticsItem item;
+
+ metrics.Export(&item);
+ EXPECT_EQ(0, item.count());
+}
+
+TEST_F(MediaDrmMetricsTest, AllValuesSuccessCounts) {
+ MediaDrmMetrics metrics;
+
+ metrics.mOpenSessionCounter.Increment(OK);
+ metrics.mCloseSessionCounter.Increment(OK);
+
+ {
+ EventTimer<status_t> get_key_request_timer(&metrics.mGetKeyRequestTiming);
+ EventTimer<status_t> provide_key_response_timer(
+ &metrics.mProvideKeyResponseTiming);
+ get_key_request_timer.SetAttribute(OK);
+ provide_key_response_timer.SetAttribute(OK);
+ }
+
+ metrics.mGetProvisionRequestCounter.Increment(OK);
+ metrics.mProvideProvisionResponseCounter.Increment(OK);
+ metrics.mGetDeviceUniqueIdCounter.Increment(OK);
+
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::USABLE);
+ metrics.mEventCounter.Increment(EventType::PROVISION_REQUIRED);
+
+ MediaAnalyticsItem item;
+
+ metrics.Export(&item);
+ EXPECT_EQ(11, item.count());
+
+ // Verify the list of pairs of int64 metrics.
+ std::vector<std::pair<std::string, int64_t>> expected_values = {
+ { "drm.mediadrm.open_session.ok.count", 1 },
+ { "drm.mediadrm.close_session.ok.count", 1 },
+ { "drm.mediadrm.get_key_request.ok.count", 1 },
+ { "drm.mediadrm.provide_key_response.ok.count", 1 },
+ { "drm.mediadrm.get_provision_request.ok.count", 1 },
+ { "drm.mediadrm.provide_provision_response.ok.count", 1 },
+ { "drm.mediadrm.key_status_change.USABLE.count", 1 },
+ { "drm.mediadrm.event.PROVISION_REQUIRED.count", 1 },
+ { "drm.mediadrm.get_device_unique_id.ok.count", 1 }};
+ for (const auto& expected_pair : expected_values) {
+ int64_t value = -1;
+ EXPECT_TRUE(item.getInt64(expected_pair.first.c_str(), &value))
+ << "Failed to get " << expected_pair.first;
+ EXPECT_EQ(expected_pair.second, value)
+ << "Unexpected value for " << expected_pair.first;
+ }
+
+ // Validate timing values exist.
+ int64_t value = -1;
+ EXPECT_TRUE(
+ item.getInt64("drm.mediadrm.get_key_request.ok.average_time_micros",
+ &value));
+ EXPECT_GE(value, 0);
+
+ value = -1;
+ EXPECT_TRUE(
+ item.getInt64("drm.mediadrm.provide_key_response.ok.average_time_micros",
+ &value));
+ EXPECT_GE(value, 0);
+}
+
+TEST_F(MediaDrmMetricsTest, AllValuesFull) {
+ MediaDrmMetrics metrics;
+
+ metrics.mOpenSessionCounter.Increment(OK);
+ metrics.mOpenSessionCounter.Increment(UNEXPECTED_NULL);
+
+ metrics.mCloseSessionCounter.Increment(OK);
+ metrics.mCloseSessionCounter.Increment(UNEXPECTED_NULL);
+
+ for (status_t s : {OK, UNEXPECTED_NULL}) {
+ {
+ EventTimer<status_t> get_key_request_timer(&metrics.mGetKeyRequestTiming);
+ EventTimer<status_t> provide_key_response_timer(
+ &metrics.mProvideKeyResponseTiming);
+ get_key_request_timer.SetAttribute(s);
+ provide_key_response_timer.SetAttribute(s);
+ }
+ }
+
+ metrics.mGetProvisionRequestCounter.Increment(OK);
+ metrics.mGetProvisionRequestCounter.Increment(UNEXPECTED_NULL);
+ metrics.mProvideProvisionResponseCounter.Increment(OK);
+ metrics.mProvideProvisionResponseCounter.Increment(UNEXPECTED_NULL);
+ metrics.mGetDeviceUniqueIdCounter.Increment(OK);
+ metrics.mGetDeviceUniqueIdCounter.Increment(UNEXPECTED_NULL);
+
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::USABLE);
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::EXPIRED);
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::OUTPUTNOTALLOWED);
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::STATUSPENDING);
+ metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::INTERNALERROR);
+ metrics.mEventCounter.Increment(EventType::PROVISION_REQUIRED);
+ metrics.mEventCounter.Increment(EventType::KEY_NEEDED);
+ metrics.mEventCounter.Increment(EventType::KEY_EXPIRED);
+ metrics.mEventCounter.Increment(EventType::VENDOR_DEFINED);
+ metrics.mEventCounter.Increment(EventType::SESSION_RECLAIMED);
+
+ MediaAnalyticsItem item;
+
+ metrics.Export(&item);
+ EXPECT_EQ(26, item.count());
+
+ // Verify the list of pairs of int64 metrics.
+ std::vector<std::pair<std::string, int64_t>> expected_values = {
+ { "drm.mediadrm.open_session.ok.count", 1 },
+ { "drm.mediadrm.close_session.ok.count", 1 },
+ { "drm.mediadrm.get_key_request.ok.count", 1 },
+ { "drm.mediadrm.provide_key_response.ok.count", 1 },
+ { "drm.mediadrm.get_provision_request.ok.count", 1 },
+ { "drm.mediadrm.provide_provision_response.ok.count", 1 },
+ { "drm.mediadrm.get_device_unique_id.ok.count", 1 },
+ { "drm.mediadrm.open_session.error.count", 1 },
+ { "drm.mediadrm.close_session.error.count", 1 },
+ { "drm.mediadrm.get_key_request.error.count", 1 },
+ { "drm.mediadrm.provide_key_response.error.count", 1 },
+ { "drm.mediadrm.get_provision_request.error.count", 1 },
+ { "drm.mediadrm.provide_provision_response.error.count", 1 },
+ { "drm.mediadrm.get_device_unique_id.error.count", 1 },
+ { "drm.mediadrm.key_status_change.USABLE.count", 1 },
+ { "drm.mediadrm.key_status_change.EXPIRED.count", 1 },
+ { "drm.mediadrm.key_status_change.OUTPUT_NOT_ALLOWED.count", 1 },
+ { "drm.mediadrm.key_status_change.STATUS_PENDING.count", 1 },
+ { "drm.mediadrm.key_status_change.INTERNAL_ERROR.count", 1 },
+ { "drm.mediadrm.event.PROVISION_REQUIRED.count", 1 },
+ { "drm.mediadrm.event.KEY_NEEDED.count", 1 },
+ { "drm.mediadrm.event.KEY_EXPIRED.count", 1 },
+ { "drm.mediadrm.event.VENDOR_DEFINED.count", 1 },
+ { "drm.mediadrm.event.SESSION_RECLAIMED.count", 1 }};
+ for (const auto& expected_pair : expected_values) {
+ int64_t value = -1;
+ EXPECT_TRUE(item.getInt64(expected_pair.first.c_str(), &value))
+ << "Failed to get " << expected_pair.first;
+ EXPECT_EQ(expected_pair.second, value)
+ << "Unexpected value for " << expected_pair.first;
+ }
+
+ // Validate timing values exist.
+ int64_t value = -1;
+ EXPECT_TRUE(
+ item.getInt64("drm.mediadrm.get_key_request.ok.average_time_micros",
+ &value));
+ EXPECT_GE(value, 0);
+
+ value = -1;
+ EXPECT_TRUE(
+ item.getInt64("drm.mediadrm.provide_key_response.ok.average_time_micros",
+ &value));
+ EXPECT_GE(value, 0);
+}
+
+} // namespace android
diff --git a/drm/libmediadrm/tests/EventMetric_test.cpp b/drm/libmediadrm/tests/EventMetric_test.cpp
new file mode 100644
index 0000000..eb6c4f6
--- /dev/null
+++ b/drm/libmediadrm/tests/EventMetric_test.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "EventMetric.h"
+
+namespace android {
+
+/**
+ * Unit tests for the EventMetric class.
+ */
+
+TEST(EventMetricTest, IntDataTypeEmpty) {
+ EventMetric<int> metric("MyMetricName", "MetricAttributeName");
+
+ std::map<int, EventStatistics> values;
+
+ metric.ExportValues(
+ [&] (int attribute_value, const EventStatistics& value) {
+ values[attribute_value] = value;
+ });
+
+ EXPECT_TRUE(values.empty());
+}
+
+TEST(EventMetricTest, IntDataType) {
+ EventMetric<int> metric("MyMetricName", "MetricAttributeName");
+
+ std::map<int, EventStatistics> values;
+
+ metric.Record(4, 7);
+ metric.Record(5, 8);
+ metric.Record(5, 8);
+ metric.Record(5, 8);
+ metric.Record(6, 8);
+ metric.Record(6, 8);
+ metric.Record(6, 8);
+
+ metric.ExportValues(
+ [&] (int attribute_value, const EventStatistics& value) {
+ values[attribute_value] = value;
+ });
+
+ ASSERT_EQ(2u, values.size());
+ EXPECT_EQ(4, values[7].min);
+ EXPECT_EQ(4, values[7].max);
+ EXPECT_EQ(4, values[7].mean);
+ EXPECT_EQ(1, values[7].count);
+
+ EXPECT_EQ(5, values[8].min);
+ EXPECT_EQ(6, values[8].max);
+ // This is an approximate value because of the technique we're using.
+ EXPECT_NEAR(5.5, values[8].mean, 0.2);
+ EXPECT_EQ(6, values[8].count);
+}
+
+TEST(EventMetricTest, StringDataType) {
+ EventMetric<std::string> metric("MyMetricName", "MetricAttributeName");
+
+ std::map<std::string, EventStatistics> values;
+
+ metric.Record(1, "a");
+ metric.Record(2, "b");
+ metric.Record(2, "b");
+ metric.Record(3, "b");
+ metric.Record(3, "b");
+
+ metric.ExportValues(
+ [&] (std::string attribute_value, const EventStatistics& value) {
+ values[attribute_value] = value;
+ });
+
+ ASSERT_EQ(2u, values.size());
+ EXPECT_EQ(1, values["a"].min);
+ EXPECT_EQ(1, values["a"].max);
+ EXPECT_EQ(1, values["a"].mean);
+ EXPECT_EQ(1, values["a"].count);
+
+ EXPECT_EQ(2, values["b"].min);
+ EXPECT_EQ(3, values["b"].max);
+ EXPECT_NEAR(2.5, values["b"].mean, 0.2);
+ EXPECT_EQ(4, values["b"].count);
+}
+
+// Helper class that allows us to mock the clock.
+template<typename AttributeType>
+class MockEventTimer : public EventTimer<AttributeType> {
+ public:
+ explicit MockEventTimer(nsecs_t time_delta_ns,
+ EventMetric<AttributeType>* metric)
+ : EventTimer<AttributeType>(metric) {
+ // Pretend the event started earlier.
+ this->start_time_ = systemTime() - time_delta_ns;
+ }
+};
+
+TEST(EventTimerTest, IntDataType) {
+ EventMetric<int> metric("MyMetricName", "MetricAttributeName");
+
+ for (int i = 0; i < 5; i++) {
+ {
+ // Add a mock time delta.
+ MockEventTimer<int> metric_timer(i * 1000000, &metric);
+ metric_timer.SetAttribute(i % 2);
+ }
+ }
+
+ std::map<int, EventStatistics> values;
+ metric.ExportValues(
+ [&] (int attribute_value, const EventStatistics& value) {
+ values[attribute_value] = value;
+ });
+
+ ASSERT_EQ(2u, values.size());
+ EXPECT_LT(values[0].min, values[0].max);
+ EXPECT_GE(4000, values[0].max);
+ EXPECT_GT(values[0].mean, values[0].min);
+ EXPECT_LE(values[0].mean, values[0].max);
+ EXPECT_EQ(3, values[0].count);
+
+ EXPECT_LT(values[1].min, values[1].max);
+ EXPECT_GE(3000, values[1].max);
+ EXPECT_GT(values[1].mean, values[1].min);
+ EXPECT_LE(values[1].mean, values[1].max);
+ EXPECT_EQ(2, values[1].count);
+}
+
+} // namespace android
diff --git a/include/media/DrmMetrics.h b/include/media/DrmMetrics.h
new file mode 120000
index 0000000..abc966b
--- /dev/null
+++ b/include/media/DrmMetrics.h
@@ -0,0 +1 @@
+../../media/libmedia/include/media/DrmMetrics.h
\ No newline at end of file
diff --git a/include/media/EventMetric.h b/include/media/EventMetric.h
new file mode 120000
index 0000000..5707d9a
--- /dev/null
+++ b/include/media/EventMetric.h
@@ -0,0 +1 @@
+../../media/libmedia/include/media/EventMetric.h
\ No newline at end of file
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index d23d907..f2254ce 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -160,16 +160,9 @@
static void usage() {
printf("Usage: aaudio_loopback [OPTION]...\n\n");
- printf(" -c{channels} number of output channels\n");
+ AAudioArgsParser::usage();
printf(" -C{channels} number of input channels\n");
printf(" -g{gain} recirculating loopback gain\n");
- printf(" -m{0|1|2|3} set MMAP policy\n");
- printf(" 0 = _UNSPECIFIED\n");
- printf(" 1 = _NEVER\n");
- printf(" 2 = _AUTO, also if -m is used with no number\n");
- printf(" 3 = _ALWAYS\n");
- printf(" -n{numBursts} buffer size, for example 2 for double buffered\n");
- printf(" -p{outPerf} set output AAUDIO_PERFORMANCE_MODE*\n");
printf(" -P{inPerf} set input AAUDIO_PERFORMANCE_MODE*\n");
printf(" n for _NONE\n");
printf(" l for _LATENCY\n");
@@ -178,8 +171,7 @@
printf(" m for sine magnitude\n");
printf(" e for echo latency (default)\n");
printf(" f for file latency, analyzes %s\n\n", FILENAME_ECHOS);
- printf(" -x use EXCLUSIVE mode for output\n");
- printf(" -X use EXCLUSIVE mode for input\n");
+ printf(" -X use EXCLUSIVE mode for input\n");
printf("Example: aaudio_loopback -n2 -pl -Pl -x\n");
}
diff --git a/media/libaaudio/examples/utils/AAudioArgsParser.h b/media/libaaudio/examples/utils/AAudioArgsParser.h
index 142b295..4fc5b9f 100644
--- a/media/libaaudio/examples/utils/AAudioArgsParser.h
+++ b/media/libaaudio/examples/utils/AAudioArgsParser.h
@@ -88,6 +88,30 @@
mPerformanceMode = performanceMode;
}
+ aaudio_usage_t getUsage() const {
+ return mUsage;
+ }
+
+ void setUsage(aaudio_usage_t usage) {
+ mUsage = usage;
+ }
+
+ aaudio_content_type_t getContentType() const {
+ return mContentType;
+ }
+
+ void setContentType(aaudio_content_type_t contentType) {
+ mContentType = contentType;
+ }
+
+ aaudio_input_preset_t getInputPreset() const {
+ return mInputPreset;
+ }
+
+ void setInputPreset(aaudio_input_preset_t inputPreset) {
+ mInputPreset = inputPreset;
+ }
+
int32_t getDeviceId() const {
return mDeviceId;
}
@@ -116,6 +140,9 @@
AAudioStreamBuilder_setDeviceId(builder, mDeviceId);
AAudioStreamBuilder_setSharingMode(builder, mSharingMode);
AAudioStreamBuilder_setPerformanceMode(builder, mPerformanceMode);
+ AAudioStreamBuilder_setUsage(builder, mUsage);
+ AAudioStreamBuilder_setContentType(builder, mContentType);
+ AAudioStreamBuilder_setInputPreset(builder, mInputPreset);
}
private:
@@ -128,6 +155,10 @@
aaudio_sharing_mode_t mSharingMode = AAUDIO_SHARING_MODE_SHARED;
aaudio_performance_mode_t mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
+ aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
+ aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
+ aaudio_input_preset_t mInputPreset = AAUDIO_UNSPECIFIED;
+
int32_t mNumberOfBursts = AAUDIO_UNSPECIFIED;
};
@@ -158,8 +189,8 @@
case 'd':
setDeviceId(atoi(&arg[2]));
break;
- case 's':
- mDurationSeconds = atoi(&arg[2]);
+ case 'i':
+ setInputPreset(atoi(&arg[2]));
break;
case 'm': {
aaudio_policy_t policy = AAUDIO_POLICY_AUTO;
@@ -177,9 +208,18 @@
case 'r':
setSampleRate(atoi(&arg[2]));
break;
+ case 's':
+ mDurationSeconds = atoi(&arg[2]);
+ break;
+ case 'u':
+ setUsage(atoi(&arg[2]));
+ break;
case 'x':
setSharingMode(AAUDIO_SHARING_MODE_EXCLUSIVE);
break;
+ case 'y':
+ setContentType(atoi(&arg[2]));
+ break;
default:
unrecognized = true;
break;
@@ -207,24 +247,28 @@
}
static void usage() {
- printf("-c{channels} -d{duration} -m -n{burstsPerBuffer} -p{perfMode} -r{rate} -x\n");
+ printf("-c{channels} -d{deviceId} -m{mmapPolicy} -n{burstsPerBuffer} -p{perfMode}");
+ printf(" -r{rate} -s{seconds} -x\n");
printf(" Default values are UNSPECIFIED unless otherwise stated.\n");
printf(" -b{bufferCapacity} frames\n");
printf(" -c{channels} for example 2 for stereo\n");
printf(" -d{deviceId} default is %d\n", AAUDIO_UNSPECIFIED);
- printf(" -s{duration} in seconds, default is %d\n", DEFAULT_DURATION_SECONDS);
+ printf(" -i{inputPreset} eg. 5 for AAUDIO_INPUT_PRESET_CAMCORDER\n");
printf(" -m{0|1|2|3} set MMAP policy\n");
- printf(" 0 = _UNSPECIFIED, default\n");
- printf(" 1 = _NEVER\n");
- printf(" 2 = _AUTO, also if -m is used with no number\n");
- printf(" 3 = _ALWAYS\n");
+ printf(" 0 = _UNSPECIFIED, use aaudio.mmap_policy system property, default\n");
+ printf(" 1 = _NEVER, never use MMAP\n");
+ printf(" 2 = _AUTO, use MMAP if available, default for -m with no number\n");
+ printf(" 3 = _ALWAYS, use MMAP or fail\n");
printf(" -n{numberOfBursts} for setBufferSize\n");
printf(" -p{performanceMode} set output AAUDIO_PERFORMANCE_MODE*, default NONE\n");
printf(" n for _NONE\n");
printf(" l for _LATENCY\n");
printf(" p for _POWER_SAVING;\n");
printf(" -r{sampleRate} for example 44100\n");
+ printf(" -s{duration} in seconds, default is %d\n", DEFAULT_DURATION_SECONDS);
+ printf(" -u{usage} eg. 14 for AAUDIO_USAGE_GAME\n");
printf(" -x to use EXCLUSIVE mode\n");
+ printf(" -y{contentType} eg. 1 for AAUDIO_CONTENT_TYPE_SPEECH\n");
}
static aaudio_performance_mode_t parsePerformanceMode(char c) {
@@ -287,6 +331,17 @@
printf(" PerformanceMode: requested = %d, actual = %d\n",
getPerformanceMode(), AAudioStream_getPerformanceMode(stream));
+
+ printf(" Usage: requested = %d, actual = %d\n",
+ getUsage(), AAudioStream_getUsage(stream));
+ printf(" ContentType: requested = %d, actual = %d\n",
+ getContentType(), AAudioStream_getContentType(stream));
+
+ if (AAudioStream_getDirection(stream) == AAUDIO_DIRECTION_INPUT) {
+ printf(" InputPreset: requested = %d, actual = %d\n",
+ getInputPreset(), AAudioStream_getInputPreset(stream));
+ }
+
printf(" Is MMAP used? %s\n", AAudioStream_isMMapUsed(stream)
? "yes" : "no");
diff --git a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
index 5d41fd0..e167773 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
@@ -204,7 +204,7 @@
AAudioArgsParser::usage();
printf(" -l{count} loopCount start/stop, every other one is silent\n");
printf(" -t{msec} play a high pitched tone at the beginning\n");
- printf(" -u force periodic Underruns by sleeping in callback\n");
+ printf(" -f force periodic underruns by sleeping in callback\n");
}
int main(int argc, const char **argv)
@@ -234,7 +234,7 @@
case 't':
prefixToneMsec = atoi(&arg[2]);
break;
- case 'u':
+ case 'f':
forceUnderruns = true;
break;
default:
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 3bb09d2..7783ad3 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -904,6 +904,7 @@
audio_session_t session,
pid_t pid,
uid_t uid,
+ const String16& opPackageName,
const audio_config_base_t *config,
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
@@ -912,35 +913,29 @@
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return NO_INIT;
return aps->getInputForAttr(
- attr, input, session, pid, uid,
+ attr, input, session, pid, uid, opPackageName,
config, flags, selectedDeviceId, portId);
}
-status_t AudioSystem::startInput(audio_io_handle_t input,
- audio_session_t session,
- audio_devices_t device,
- uid_t uid,
- bool *silenced)
+status_t AudioSystem::startInput(audio_port_handle_t portId, bool *silenced)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- return aps->startInput(input, session, device, uid, silenced);
+ return aps->startInput(portId, silenced);
}
-status_t AudioSystem::stopInput(audio_io_handle_t input,
- audio_session_t session)
+status_t AudioSystem::stopInput(audio_port_handle_t portId)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- return aps->stopInput(input, session);
+ return aps->stopInput(portId);
}
-void AudioSystem::releaseInput(audio_io_handle_t input,
- audio_session_t session)
+void AudioSystem::releaseInput(audio_port_handle_t portId)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return;
- aps->releaseInput(input, session);
+ aps->releaseInput(portId);
}
status_t AudioSystem::initStreamVolume(audio_stream_type_t stream,
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 6478975..b91e4cf 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -285,6 +285,7 @@
audio_session_t session,
pid_t pid,
uid_t uid,
+ const String16& opPackageName,
const audio_config_base_t *config,
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
@@ -313,6 +314,7 @@
data.writeInt32(session);
data.writeInt32(pid);
data.writeInt32(uid);
+ data.writeString16(opPackageName);
data.write(config, sizeof(audio_config_base_t));
data.writeInt32(flags);
data.writeInt32(*selectedDeviceId);
@@ -331,18 +333,12 @@
return NO_ERROR;
}
- virtual status_t startInput(audio_io_handle_t input,
- audio_session_t session,
- audio_devices_t device,
- uid_t uid,
+ virtual status_t startInput(audio_port_handle_t portId,
bool *silenced)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
- data.writeInt32(input);
- data.writeInt32(session);
- data.writeInt32(device);
- data.writeInt32(uid);
+ data.writeInt32(portId);
data.writeInt32(*silenced ? 1 : 0);
remote()->transact(START_INPUT, data, &reply);
status_t status = static_cast <status_t> (reply.readInt32());
@@ -350,24 +346,20 @@
return status;
}
- virtual status_t stopInput(audio_io_handle_t input,
- audio_session_t session)
+ virtual status_t stopInput(audio_port_handle_t portId)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
- data.writeInt32(input);
- data.writeInt32(session);
+ data.writeInt32(portId);
remote()->transact(STOP_INPUT, data, &reply);
return static_cast <status_t> (reply.readInt32());
}
- virtual void releaseInput(audio_io_handle_t input,
- audio_session_t session)
+ virtual void releaseInput(audio_port_handle_t portId)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
- data.writeInt32(input);
- data.writeInt32(session);
+ data.writeInt32(portId);
remote()->transact(RELEASE_INPUT, data, &reply);
}
@@ -1034,6 +1026,7 @@
audio_session_t session = (audio_session_t)data.readInt32();
pid_t pid = (pid_t)data.readInt32();
uid_t uid = (uid_t)data.readInt32();
+ const String16 opPackageName = data.readString16();
audio_config_base_t config;
memset(&config, 0, sizeof(audio_config_base_t));
data.read(&config, sizeof(audio_config_base_t));
@@ -1041,7 +1034,7 @@
audio_port_handle_t selectedDeviceId = (audio_port_handle_t) data.readInt32();
audio_port_handle_t portId = (audio_port_handle_t)data.readInt32();
status_t status = getInputForAttr(&attr, &input, session, pid, uid,
- &config,
+ opPackageName, &config,
flags, &selectedDeviceId, &portId);
reply->writeInt32(status);
if (status == NO_ERROR) {
@@ -1054,12 +1047,9 @@
case START_INPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- audio_io_handle_t input = static_cast <audio_io_handle_t>(data.readInt32());
- audio_session_t session = static_cast <audio_session_t>(data.readInt32());
- audio_devices_t device = static_cast <audio_devices_t>(data.readInt32());
- uid_t uid = static_cast <uid_t>(data.readInt32());
+ audio_port_handle_t portId = static_cast <audio_port_handle_t>(data.readInt32());
bool silenced = data.readInt32() == 1;
- status_t status = startInput(input, session, device, uid, &silenced);
+ status_t status = startInput(portId, &silenced);
reply->writeInt32(static_cast <uint32_t>(status));
reply->writeInt32(silenced ? 1 : 0);
return NO_ERROR;
@@ -1067,17 +1057,15 @@
case STOP_INPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- audio_io_handle_t input = static_cast <audio_io_handle_t>(data.readInt32());
- audio_session_t session = static_cast <audio_session_t>(data.readInt32());
- reply->writeInt32(static_cast <uint32_t>(stopInput(input, session)));
+ audio_port_handle_t portId = static_cast <audio_port_handle_t>(data.readInt32());
+ reply->writeInt32(static_cast <uint32_t>(stopInput(portId)));
return NO_ERROR;
} break;
case RELEASE_INPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- audio_io_handle_t input = static_cast <audio_io_handle_t>(data.readInt32());
- audio_session_t session = static_cast <audio_session_t>(data.readInt32());
- releaseInput(input, session);
+ audio_port_handle_t portId = static_cast <audio_port_handle_t>(data.readInt32());
+ releaseInput(portId);
return NO_ERROR;
} break;
diff --git a/media/libaudioclient/include/media/AudioMixer.h b/media/libaudioclient/include/media/AudioMixer.h
index d4ce417..2e29316 100644
--- a/media/libaudioclient/include/media/AudioMixer.h
+++ b/media/libaudioclient/include/media/AudioMixer.h
@@ -18,8 +18,11 @@
#ifndef ANDROID_AUDIO_MIXER_H
#define ANDROID_AUDIO_MIXER_H
+#include <pthread.h>
+#include <sstream>
#include <stdint.h>
#include <sys/types.h>
+#include <unordered_map>
#include <media/AudioBufferProvider.h>
#include <media/AudioResampler.h>
@@ -43,20 +46,14 @@
class AudioMixer
{
public:
- AudioMixer(size_t frameCount, uint32_t sampleRate,
- uint32_t maxNumTracks = MAX_NUM_TRACKS);
+ // This mixer has a hard-coded upper limit of active track inputs;
+ // the value is arbitrary but should be less than TRACK0 to avoid confusion.
+ static constexpr int32_t MAX_NUM_TRACKS = 256;
- /*virtual*/ ~AudioMixer(); // non-virtual saves a v-table, restore if sub-classed
-
-
- // This mixer has a hard-coded upper limit of 32 active track inputs.
- // Adding support for > 32 tracks would require more than simply changing this value.
- static const uint32_t MAX_NUM_TRACKS = 32;
- // maximum number of channels supported by the mixer
-
+ // Do not change these unless underlying code changes.
// This mixer has a hard-coded upper limit of 8 channels for output.
- static const uint32_t MAX_NUM_CHANNELS = 8;
- static const uint32_t MAX_NUM_VOLUMES = 2; // stereo volume only
+ static constexpr uint32_t MAX_NUM_CHANNELS = FCC_8;
+ static constexpr uint32_t MAX_NUM_VOLUMES = FCC_2; // stereo volume only
// maximum number of channels supported for the content
static const uint32_t MAX_NUM_CHANNELS_TO_DOWNMIX = AUDIO_CHANNEL_COUNT_MAX;
@@ -108,6 +105,12 @@
// parameter 'value' is a pointer to the new playback rate.
};
+ AudioMixer(size_t frameCount, uint32_t sampleRate, int32_t maxNumTracks = MAX_NUM_TRACKS)
+ : mMaxNumTracks(maxNumTracks)
+ , mSampleRate(sampleRate)
+ , mFrameCount(frameCount) {
+ pthread_once(&sOnceControl, &sInitRoutine);
+ }
// For all APIs with "name": TRACK0 <= name < TRACK0 + MAX_NUM_TRACKS
@@ -127,27 +130,38 @@
void setParameter(int name, int target, int param, void *value);
void setBufferProvider(int name, AudioBufferProvider* bufferProvider);
- void process();
- uint32_t trackNames() const { return mTrackNames; }
+ void process() {
+ (this->*mHook)();
+ }
size_t getUnreleasedFrames(int name) const;
- static inline bool isValidPcmTrackFormat(audio_format_t format) {
- switch (format) {
- case AUDIO_FORMAT_PCM_8_BIT:
- case AUDIO_FORMAT_PCM_16_BIT:
- case AUDIO_FORMAT_PCM_24_BIT_PACKED:
- case AUDIO_FORMAT_PCM_32_BIT:
- case AUDIO_FORMAT_PCM_FLOAT:
- return true;
- default:
- return false;
+ std::string trackNames() const {
+ std::stringstream ss;
+ for (const auto &pair : mTracks) {
+ ss << pair.first << " ";
}
+ return ss.str();
+ }
+
+ void setNBLogWriter(NBLog::Writer *logWriter) {
+ mNBLogWriter = logWriter;
}
private:
+ /* For multi-format functions (calls template functions
+ * in AudioMixerOps.h). The template parameters are as follows:
+ *
+ * MIXTYPE (see AudioMixerOps.h MIXTYPE_* enumeration)
+ * USEFLOATVOL (set to true if float volume is used)
+ * ADJUSTVOL (set to true if volume ramp parameters needs adjustment afterwards)
+ * TO: int32_t (Q4.27) or float
+ * TI: int32_t (Q4.27) or int16_t (Q0.15) or float
+ * TA: int32_t (Q4.27)
+ */
+
enum {
// FIXME this representation permits up to 8 channels
NEEDS_CHANNEL_COUNT__MASK = 0x00000007,
@@ -164,14 +178,67 @@
NEEDS_AUX = 0x00010000,
};
- struct state_t;
- struct track_t;
+ // hook types
+ enum {
+ PROCESSTYPE_NORESAMPLEONETRACK, // others set elsewhere
+ };
- typedef void (*hook_t)(track_t* t, int32_t* output, size_t numOutFrames, int32_t* temp,
- int32_t* aux);
- static const int BLOCKSIZE = 16; // 4 cache lines
+ enum {
+ TRACKTYPE_NOP,
+ TRACKTYPE_RESAMPLE,
+ TRACKTYPE_NORESAMPLE,
+ TRACKTYPE_NORESAMPLEMONO,
+ };
- struct track_t {
+ // process hook functionality
+ using process_hook_t = void(AudioMixer::*)();
+
+ struct Track;
+ using hook_t = void(Track::*)(int32_t* output, size_t numOutFrames, int32_t* temp, int32_t* aux);
+
+ struct Track {
+ Track()
+ : bufferProvider(nullptr)
+ {
+ // TODO: move additional initialization here.
+ }
+
+ ~Track()
+ {
+ // bufferProvider, mInputBufferProvider need not be deleted.
+ mResampler.reset(nullptr);
+ // Ensure the order of destruction of buffer providers as they
+ // release the upstream provider in the destructor.
+ mTimestretchBufferProvider.reset(nullptr);
+ mPostDownmixReformatBufferProvider.reset(nullptr);
+ mDownmixerBufferProvider.reset(nullptr);
+ mReformatBufferProvider.reset(nullptr);
+ }
+
+ bool needsRamp() { return (volumeInc[0] | volumeInc[1] | auxInc) != 0; }
+ bool setResampler(uint32_t trackSampleRate, uint32_t devSampleRate);
+ bool doesResample() const { return mResampler.get() != nullptr; }
+ void resetResampler() { if (mResampler.get() != nullptr) mResampler->reset(); }
+ void adjustVolumeRamp(bool aux, bool useFloat = false);
+ size_t getUnreleasedFrames() const { return mResampler.get() != nullptr ?
+ mResampler->getUnreleasedFrames() : 0; };
+
+ status_t prepareForDownmix();
+ void unprepareForDownmix();
+ status_t prepareForReformat();
+ void unprepareForReformat();
+ bool setPlaybackRate(const AudioPlaybackRate &playbackRate);
+ void reconfigureBufferProviders();
+
+ static hook_t getTrackHook(int trackType, uint32_t channelCount,
+ audio_format_t mixerInFormat, audio_format_t mixerOutFormat);
+
+ void track__nop(int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux);
+
+ template <int MIXTYPE, bool USEFLOATVOL, bool ADJUSTVOL,
+ typename TO, typename TI, typename TA>
+ void volumeMix(TO *out, size_t outFrames, const TI *in, TA *aux, bool ramp);
+
uint32_t needs;
// TODO: Eventually remove legacy integer volume settings
@@ -181,16 +248,11 @@
};
int32_t prevVolume[MAX_NUM_VOLUMES];
-
- // 16-byte boundary
-
int32_t volumeInc[MAX_NUM_VOLUMES];
int32_t auxInc;
int32_t prevAuxLevel;
-
- // 16-byte boundary
-
int16_t auxLevel; // 0 <= auxLevel <= MAX_GAIN_INT, but signed for mul performance
+
uint16_t frameCount;
uint8_t channelCount; // 1 or 2, redundant with (needs & NEEDS_CHANNEL_COUNT__MASK)
@@ -202,22 +264,16 @@
// for how the Track buffer provider is wrapped by another one when dowmixing is required
AudioBufferProvider* bufferProvider;
- // 16-byte boundary
-
mutable AudioBufferProvider::Buffer buffer; // 8 bytes
hook_t hook;
- const void* in; // current location in buffer
+ const void *mIn; // current location in buffer
- // 16-byte boundary
-
- AudioResampler* resampler;
+ std::unique_ptr<AudioResampler> mResampler;
uint32_t sampleRate;
int32_t* mainBuffer;
int32_t* auxBuffer;
- // 16-byte boundary
-
/* Buffer providers are constructed to translate the track input data as needed.
*
* TODO: perhaps make a single PlaybackConverterProvider class to move
@@ -228,17 +284,17 @@
* match either mMixerInFormat or mDownmixRequiresFormat, if the downmixer
* requires reformat. For example, it may convert floating point input to
* PCM_16_bit if that's required by the downmixer.
- * 3) downmixerBufferProvider: If not NULL, performs the channel remixing to match
+ * 3) mDownmixerBufferProvider: If not NULL, performs the channel remixing to match
* the number of channels required by the mixer sink.
* 4) mPostDownmixReformatBufferProvider: If not NULL, performs reformatting from
* the downmixer requirements to the mixer engine input requirements.
* 5) mTimestretchBufferProvider: Adds timestretching for playback rate
*/
AudioBufferProvider* mInputBufferProvider; // externally provided buffer provider.
- PassthruBufferProvider* mReformatBufferProvider; // provider wrapper for reformatting.
- PassthruBufferProvider* downmixerBufferProvider; // wrapper for channel conversion.
- PassthruBufferProvider* mPostDownmixReformatBufferProvider;
- PassthruBufferProvider* mTimestretchBufferProvider;
+ std::unique_ptr<PassthruBufferProvider> mReformatBufferProvider;
+ std::unique_ptr<PassthruBufferProvider> mDownmixerBufferProvider;
+ std::unique_ptr<PassthruBufferProvider> mPostDownmixReformatBufferProvider;
+ std::unique_ptr<PassthruBufferProvider> mTimestretchBufferProvider;
int32_t sessionId;
@@ -263,129 +319,94 @@
AudioPlaybackRate mPlaybackRate;
- bool needsRamp() { return (volumeInc[0] | volumeInc[1] | auxInc) != 0; }
- bool setResampler(uint32_t trackSampleRate, uint32_t devSampleRate);
- bool doesResample() const { return resampler != NULL; }
- void resetResampler() { if (resampler != NULL) resampler->reset(); }
- void adjustVolumeRamp(bool aux, bool useFloat = false);
- size_t getUnreleasedFrames() const { return resampler != NULL ?
- resampler->getUnreleasedFrames() : 0; };
+ private:
+ // hooks
+ void track__genericResample(int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux);
+ void track__16BitsStereo(int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux);
+ void track__16BitsMono(int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux);
- status_t prepareForDownmix();
- void unprepareForDownmix();
- status_t prepareForReformat();
- void unprepareForReformat();
- bool setPlaybackRate(const AudioPlaybackRate &playbackRate);
- void reconfigureBufferProviders();
+ void volumeRampStereo(int32_t* out, size_t frameCount, int32_t* temp, int32_t* aux);
+ void volumeStereo(int32_t* out, size_t frameCount, int32_t* temp, int32_t* aux);
+
+ // multi-format track hooks
+ template <int MIXTYPE, typename TO, typename TI, typename TA>
+ void track__Resample(TO* out, size_t frameCount, TO* temp __unused, TA* aux);
+ template <int MIXTYPE, typename TO, typename TI, typename TA>
+ void track__NoResample(TO* out, size_t frameCount, TO* temp __unused, TA* aux);
};
- typedef void (*process_hook_t)(state_t* state);
-
- // pad to 32-bytes to fill cache line
- struct state_t {
- uint32_t enabledTracks;
- uint32_t needsChanged;
- size_t frameCount;
- process_hook_t hook; // one of process__*, never NULL
- int32_t *outputTemp;
- int32_t *resampleTemp;
- NBLog::Writer* mNBLogWriter; // associated NBLog::Writer or &mDummyLog
- int32_t reserved[1];
- // FIXME allocate dynamically to save some memory when maxNumTracks < MAX_NUM_TRACKS
- track_t tracks[MAX_NUM_TRACKS] __attribute__((aligned(32)));
- };
-
- // bitmask of allocated track names, where bit 0 corresponds to TRACK0 etc.
- uint32_t mTrackNames;
-
- // bitmask of configured track names; ~0 if maxNumTracks == MAX_NUM_TRACKS,
- // but will have fewer bits set if maxNumTracks < MAX_NUM_TRACKS
- const uint32_t mConfiguredNames;
-
- const uint32_t mSampleRate;
-
- NBLog::Writer mDummyLogWriter;
-public:
- // Called by FastMixer to inform AudioMixer of it's associated NBLog::Writer.
- // FIXME It would be safer to use TLS for this, so we don't accidentally use wrong one.
- void setNBLogWriter(NBLog::Writer* log);
-private:
- state_t mState __attribute__((aligned(32)));
-
- // Call after changing either the enabled status of a track, or parameters of an enabled track.
- // OK to call more often than that, but unnecessary.
- void invalidateState(uint32_t mask);
+ // TODO: remove BLOCKSIZE unit of processing - it isn't needed anymore.
+ static constexpr int BLOCKSIZE = 16;
bool setChannelMasks(int name,
audio_channel_mask_t trackChannelMask, audio_channel_mask_t mixerChannelMask);
- static void track__genericResample(track_t* t, int32_t* out, size_t numFrames, int32_t* temp,
- int32_t* aux);
- static void track__nop(track_t* t, int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux);
- static void track__16BitsStereo(track_t* t, int32_t* out, size_t numFrames, int32_t* temp,
- int32_t* aux);
- static void track__16BitsMono(track_t* t, int32_t* out, size_t numFrames, int32_t* temp,
- int32_t* aux);
- static void volumeRampStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp,
- int32_t* aux);
- static void volumeStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp,
- int32_t* aux);
+ // Called when track info changes and a new process hook should be determined.
+ void invalidate() {
+ mHook = &AudioMixer::process__validate;
+ }
- static void process__validate(state_t* state);
- static void process__nop(state_t* state);
- static void process__genericNoResampling(state_t* state);
- static void process__genericResampling(state_t* state);
- static void process__OneTrack16BitsStereoNoResampling(state_t* state);
+ void process__validate();
+ void process__nop();
+ void process__genericNoResampling();
+ void process__genericResampling();
+ void process__oneTrack16BitsStereoNoResampling();
- static pthread_once_t sOnceControl;
- static void sInitRoutine();
-
- /* multi-format volume mixing function (calls template functions
- * in AudioMixerOps.h). The template parameters are as follows:
- *
- * MIXTYPE (see AudioMixerOps.h MIXTYPE_* enumeration)
- * USEFLOATVOL (set to true if float volume is used)
- * ADJUSTVOL (set to true if volume ramp parameters needs adjustment afterwards)
- * TO: int32_t (Q4.27) or float
- * TI: int32_t (Q4.27) or int16_t (Q0.15) or float
- * TA: int32_t (Q4.27)
- */
- template <int MIXTYPE, bool USEFLOATVOL, bool ADJUSTVOL,
- typename TO, typename TI, typename TA>
- static void volumeMix(TO *out, size_t outFrames,
- const TI *in, TA *aux, bool ramp, AudioMixer::track_t *t);
-
- // multi-format process hooks
template <int MIXTYPE, typename TO, typename TI, typename TA>
- static void process_NoResampleOneTrack(state_t* state);
+ void process__noResampleOneTrack();
- // multi-format track hooks
- template <int MIXTYPE, typename TO, typename TI, typename TA>
- static void track__Resample(track_t* t, TO* out, size_t frameCount,
- TO* temp __unused, TA* aux);
- template <int MIXTYPE, typename TO, typename TI, typename TA>
- static void track__NoResample(track_t* t, TO* out, size_t frameCount,
- TO* temp __unused, TA* aux);
+ static process_hook_t getProcessHook(int processType, uint32_t channelCount,
+ audio_format_t mixerInFormat, audio_format_t mixerOutFormat);
static void convertMixerFormat(void *out, audio_format_t mixerOutFormat,
void *in, audio_format_t mixerInFormat, size_t sampleCount);
- // hook types
- enum {
- PROCESSTYPE_NORESAMPLEONETRACK,
- };
- enum {
- TRACKTYPE_NOP,
- TRACKTYPE_RESAMPLE,
- TRACKTYPE_NORESAMPLE,
- TRACKTYPE_NORESAMPLEMONO,
- };
+ static inline bool isValidPcmTrackFormat(audio_format_t format) {
+ switch (format) {
+ case AUDIO_FORMAT_PCM_8_BIT:
+ case AUDIO_FORMAT_PCM_16_BIT:
+ case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+ case AUDIO_FORMAT_PCM_32_BIT:
+ case AUDIO_FORMAT_PCM_FLOAT:
+ return true;
+ default:
+ return false;
+ }
+ }
- // functions for determining the proper process and track hooks.
- static process_hook_t getProcessHook(int processType, uint32_t channelCount,
- audio_format_t mixerInFormat, audio_format_t mixerOutFormat);
- static hook_t getTrackHook(int trackType, uint32_t channelCount,
- audio_format_t mixerInFormat, audio_format_t mixerOutFormat);
+ static void sInitRoutine();
+
+ // initialization constants
+ const int mMaxNumTracks;
+ const uint32_t mSampleRate;
+ const size_t mFrameCount;
+
+ NBLog::Writer *mNBLogWriter = nullptr; // associated NBLog::Writer
+
+ process_hook_t mHook = &AudioMixer::process__nop; // one of process__*, never nullptr
+
+ // the size of the type (int32_t) should be the largest of all types supported
+ // by the mixer.
+ std::unique_ptr<int32_t[]> mOutputTemp;
+ std::unique_ptr<int32_t[]> mResampleTemp;
+
+ // fast lookup of previously deleted track names for reuse.
+ // the AudioMixer tries to return the smallest unused name -
+ // this is an arbitrary decision (actually any non-negative
+ // integer that isn't in mTracks could be used).
+ std::set<int /* name */> mUnusedNames; // set of unused track names (may be empty)
+
+ // track names grouped by main buffer, in no particular order of main buffer.
+ // however names for a particular main buffer are in order (by construction).
+ std::unordered_map<void * /* mainBuffer */, std::vector<int /* name */>> mGroups;
+
+ // track names that are enabled, in increasing order (by construction).
+ std::vector<int /* name */> mEnabled;
+
+ // track smart pointers, by name, in increasing order of name.
+ std::map<int /* name */, std::shared_ptr<Track>> mTracks;
+
+ static pthread_once_t sOnceControl; // initialized in constructor by first new
};
// ----------------------------------------------------------------------------
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index f7c3d03..52dcfaa 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -239,20 +239,16 @@
audio_session_t session,
pid_t pid,
uid_t uid,
+ const String16& opPackageName,
const audio_config_base_t *config,
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
audio_port_handle_t *portId);
- static status_t startInput(audio_io_handle_t input,
- audio_session_t session,
- audio_devices_t device,
- uid_t uid,
+ static status_t startInput(audio_port_handle_t portId,
bool *silenced);
- static status_t stopInput(audio_io_handle_t input,
- audio_session_t session);
- static void releaseInput(audio_io_handle_t input,
- audio_session_t session);
+ static status_t stopInput(audio_port_handle_t portId);
+ static void releaseInput(audio_port_handle_t portId);
static status_t initStreamVolume(audio_stream_type_t stream,
int indexMin,
int indexMax);
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index 5338927..949d593 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -80,19 +80,15 @@
audio_session_t session,
pid_t pid,
uid_t uid,
+ const String16& opPackageName,
const audio_config_base_t *config,
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
audio_port_handle_t *portId) = 0;
- virtual status_t startInput(audio_io_handle_t input,
- audio_session_t session,
- audio_devices_t device,
- uid_t uid,
+ virtual status_t startInput(audio_port_handle_t portId,
bool *silenced) = 0;
- virtual status_t stopInput(audio_io_handle_t input,
- audio_session_t session) = 0;
- virtual void releaseInput(audio_io_handle_t input,
- audio_session_t session) = 0;
+ virtual status_t stopInput(audio_port_handle_t portId) = 0;
+ virtual void releaseInput(audio_port_handle_t portId) = 0;
virtual status_t initStreamVolume(audio_stream_type_t stream,
int indexMin,
int indexMax) = 0;
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 5fafb8a..f1daeb4 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -62,13 +62,6 @@
#define ARRAY_SIZE(x) (sizeof(x)/sizeof((x)[0]))
#endif
-// TODO: Move these macro/inlines to a header file.
-template <typename T>
-static inline
-T max(const T& x, const T& y) {
- return x > y ? x : y;
-}
-
// Set kUseNewMixer to true to use the new mixer engine always. Otherwise the
// original code will be used for stereo sinks, the new mixer for multichannel.
static constexpr bool kUseNewMixer = true;
@@ -93,88 +86,41 @@
// ----------------------------------------------------------------------------
-template <typename T>
-T min(const T& a, const T& b)
-{
- return a < b ? a : b;
-}
-
-// ----------------------------------------------------------------------------
-
-// Ensure mConfiguredNames bitmask is initialized properly on all architectures.
-// The value of 1 << x is undefined in C when x >= 32.
-
-AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTracks)
- : mTrackNames(0), mConfiguredNames((maxNumTracks >= 32 ? 0 : 1 << maxNumTracks) - 1),
- mSampleRate(sampleRate)
-{
- ALOG_ASSERT(maxNumTracks <= MAX_NUM_TRACKS, "maxNumTracks %u > MAX_NUM_TRACKS %u",
- maxNumTracks, MAX_NUM_TRACKS);
-
- // AudioMixer is not yet capable of more than 32 active track inputs
- ALOG_ASSERT(32 >= MAX_NUM_TRACKS, "bad MAX_NUM_TRACKS %d", MAX_NUM_TRACKS);
-
- pthread_once(&sOnceControl, &sInitRoutine);
-
- mState.enabledTracks= 0;
- mState.needsChanged = 0;
- mState.frameCount = frameCount;
- mState.hook = process__nop;
- mState.outputTemp = NULL;
- mState.resampleTemp = NULL;
- mState.mNBLogWriter = &mDummyLogWriter;
- // mState.reserved
-
- // FIXME Most of the following initialization is probably redundant since
- // tracks[i] should only be referenced if (mTrackNames & (1 << i)) != 0
- // and mTrackNames is initially 0. However, leave it here until that's verified.
- track_t* t = mState.tracks;
- for (unsigned i=0 ; i < MAX_NUM_TRACKS ; i++) {
- t->resampler = NULL;
- t->downmixerBufferProvider = NULL;
- t->mReformatBufferProvider = NULL;
- t->mTimestretchBufferProvider = NULL;
- t++;
- }
-
-}
-
-AudioMixer::~AudioMixer()
-{
- track_t* t = mState.tracks;
- for (unsigned i=0 ; i < MAX_NUM_TRACKS ; i++) {
- delete t->resampler;
- delete t->downmixerBufferProvider;
- delete t->mReformatBufferProvider;
- delete t->mTimestretchBufferProvider;
- t++;
- }
- delete [] mState.outputTemp;
- delete [] mState.resampleTemp;
-}
-
-void AudioMixer::setNBLogWriter(NBLog::Writer *logWriter)
-{
- mState.mNBLogWriter = logWriter;
-}
-
static inline audio_format_t selectMixerInFormat(audio_format_t inputFormat __unused) {
return kUseFloat && kUseNewMixer ? AUDIO_FORMAT_PCM_FLOAT : AUDIO_FORMAT_PCM_16_BIT;
}
-int AudioMixer::getTrackName(audio_channel_mask_t channelMask,
- audio_format_t format, int sessionId)
+int AudioMixer::getTrackName(
+ audio_channel_mask_t channelMask, audio_format_t format, int sessionId)
{
if (!isValidPcmTrackFormat(format)) {
ALOGE("AudioMixer::getTrackName invalid format (%#x)", format);
return -1;
}
- uint32_t names = (~mTrackNames) & mConfiguredNames;
- if (names != 0) {
- int n = __builtin_ctz(names);
- ALOGV("add track (%d)", n);
+ if (mTracks.size() >= (size_t)mMaxNumTracks) {
+ ALOGE("%s: out of track names (max = %d)", __func__, mMaxNumTracks);
+ return -1;
+ }
+
+ // get a new name for the track.
+ int name;
+ if (mUnusedNames.size() != 0) {
+ // reuse first name for deleted track.
+ auto it = mUnusedNames.begin();
+ name = *it;
+ (void)mUnusedNames.erase(it);
+ } else {
+ // we're fully populated, so create a new name.
+ name = mTracks.size();
+ }
+ ALOGV("add track (%d)", name);
+
+ auto t = std::make_shared<Track>();
+ mTracks[name] = t;
+
+ {
+ // TODO: move initialization to the Track constructor.
// assume default parameters for the track, except where noted below
- track_t* t = &mState.tracks[n];
t->needs = 0;
// Integer volume.
@@ -215,17 +161,12 @@
// no initialization needed
// t->buffer.frameCount
t->hook = NULL;
- t->in = NULL;
- t->resampler = NULL;
+ t->mIn = NULL;
t->sampleRate = mSampleRate;
// setParameter(name, TRACK, MAIN_BUFFER, mixBuffer) is required before enable(name)
t->mainBuffer = NULL;
t->auxBuffer = NULL;
t->mInputBufferProvider = NULL;
- t->mReformatBufferProvider = NULL;
- t->downmixerBufferProvider = NULL;
- t->mPostDownmixReformatBufferProvider = NULL;
- t->mTimestretchBufferProvider = NULL;
t->mMixerFormat = AUDIO_FORMAT_PCM_16_BIT;
t->mFormat = format;
t->mMixerInFormat = selectMixerInFormat(format);
@@ -243,91 +184,78 @@
// prepareForDownmix() may change mDownmixRequiresFormat
ALOGVV("mMixerFormat:%#x mMixerInFormat:%#x\n", t->mMixerFormat, t->mMixerInFormat);
t->prepareForReformat();
- mTrackNames |= 1 << n;
- return TRACK0 + n;
+ return TRACK0 + name;
}
- ALOGE("AudioMixer::getTrackName out of available tracks");
- return -1;
}
-void AudioMixer::invalidateState(uint32_t mask)
-{
- if (mask != 0) {
- mState.needsChanged |= mask;
- mState.hook = process__validate;
- }
- }
-
// Called when channel masks have changed for a track name
// TODO: Fix DownmixerBufferProvider not to (possibly) change mixer input format,
// which will simplify this logic.
bool AudioMixer::setChannelMasks(int name,
audio_channel_mask_t trackChannelMask, audio_channel_mask_t mixerChannelMask) {
- track_t &track = mState.tracks[name];
+ LOG_ALWAYS_FATAL_IF(mTracks.find(name) == mTracks.end(), "invalid name: %d", name);
+ const std::shared_ptr<Track> &track = mTracks[name];
- if (trackChannelMask == track.channelMask
- && mixerChannelMask == track.mMixerChannelMask) {
+ if (trackChannelMask == track->channelMask
+ && mixerChannelMask == track->mMixerChannelMask) {
return false; // no need to change
}
// always recompute for both channel masks even if only one has changed.
const uint32_t trackChannelCount = audio_channel_count_from_out_mask(trackChannelMask);
const uint32_t mixerChannelCount = audio_channel_count_from_out_mask(mixerChannelMask);
- const bool mixerChannelCountChanged = track.mMixerChannelCount != mixerChannelCount;
+ const bool mixerChannelCountChanged = track->mMixerChannelCount != mixerChannelCount;
ALOG_ASSERT((trackChannelCount <= MAX_NUM_CHANNELS_TO_DOWNMIX)
&& trackChannelCount
&& mixerChannelCount);
- track.channelMask = trackChannelMask;
- track.channelCount = trackChannelCount;
- track.mMixerChannelMask = mixerChannelMask;
- track.mMixerChannelCount = mixerChannelCount;
+ track->channelMask = trackChannelMask;
+ track->channelCount = trackChannelCount;
+ track->mMixerChannelMask = mixerChannelMask;
+ track->mMixerChannelCount = mixerChannelCount;
// channel masks have changed, does this track need a downmixer?
// update to try using our desired format (if we aren't already using it)
- const audio_format_t prevDownmixerFormat = track.mDownmixRequiresFormat;
- const status_t status = mState.tracks[name].prepareForDownmix();
+ const audio_format_t prevDownmixerFormat = track->mDownmixRequiresFormat;
+ const status_t status = track->prepareForDownmix();
ALOGE_IF(status != OK,
"prepareForDownmix error %d, track channel mask %#x, mixer channel mask %#x",
- status, track.channelMask, track.mMixerChannelMask);
+ status, track->channelMask, track->mMixerChannelMask);
- if (prevDownmixerFormat != track.mDownmixRequiresFormat) {
- track.prepareForReformat(); // because of downmixer, track format may change!
+ if (prevDownmixerFormat != track->mDownmixRequiresFormat) {
+ track->prepareForReformat(); // because of downmixer, track format may change!
}
- if (track.resampler && mixerChannelCountChanged) {
+ if (track->mResampler.get() != nullptr && mixerChannelCountChanged) {
// resampler channels may have changed.
- const uint32_t resetToSampleRate = track.sampleRate;
- delete track.resampler;
- track.resampler = NULL;
- track.sampleRate = mSampleRate; // without resampler, track rate is device sample rate.
+ const uint32_t resetToSampleRate = track->sampleRate;
+ track->mResampler.reset(nullptr);
+ track->sampleRate = mSampleRate; // without resampler, track rate is device sample rate.
// recreate the resampler with updated format, channels, saved sampleRate.
- track.setResampler(resetToSampleRate /*trackSampleRate*/, mSampleRate /*devSampleRate*/);
+ track->setResampler(resetToSampleRate /*trackSampleRate*/, mSampleRate /*devSampleRate*/);
}
return true;
}
-void AudioMixer::track_t::unprepareForDownmix() {
+void AudioMixer::Track::unprepareForDownmix() {
ALOGV("AudioMixer::unprepareForDownmix(%p)", this);
- if (mPostDownmixReformatBufferProvider != nullptr) {
+ if (mPostDownmixReformatBufferProvider.get() != nullptr) {
// release any buffers held by the mPostDownmixReformatBufferProvider
- // before deallocating the downmixerBufferProvider.
+ // before deallocating the mDownmixerBufferProvider.
mPostDownmixReformatBufferProvider->reset();
}
mDownmixRequiresFormat = AUDIO_FORMAT_INVALID;
- if (downmixerBufferProvider != NULL) {
+ if (mDownmixerBufferProvider.get() != nullptr) {
// this track had previously been configured with a downmixer, delete it
- ALOGV(" deleting old downmixer");
- delete downmixerBufferProvider;
- downmixerBufferProvider = NULL;
+ mDownmixerBufferProvider.reset(nullptr);
reconfigureBufferProviders();
} else {
ALOGV(" nothing to do, no downmixer to delete");
}
}
-status_t AudioMixer::track_t::prepareForDownmix()
+status_t AudioMixer::Track::prepareForDownmix()
{
ALOGV("AudioMixer::prepareForDownmix(%p) with mask 0x%x",
this, channelMask);
@@ -345,40 +273,35 @@
if (audio_channel_mask_get_representation(channelMask)
== AUDIO_CHANNEL_REPRESENTATION_POSITION
&& DownmixerBufferProvider::isMultichannelCapable()) {
- DownmixerBufferProvider* pDbp = new DownmixerBufferProvider(channelMask,
+ mDownmixerBufferProvider.reset(new DownmixerBufferProvider(channelMask,
mMixerChannelMask,
AUDIO_FORMAT_PCM_16_BIT /* TODO: use mMixerInFormat, now only PCM 16 */,
- sampleRate, sessionId, kCopyBufferFrameCount);
-
- if (pDbp->isValid()) { // if constructor completed properly
+ sampleRate, sessionId, kCopyBufferFrameCount));
+ if (static_cast<DownmixerBufferProvider *>(mDownmixerBufferProvider.get())->isValid()) {
mDownmixRequiresFormat = AUDIO_FORMAT_PCM_16_BIT; // PCM 16 bit required for downmix
- downmixerBufferProvider = pDbp;
reconfigureBufferProviders();
return NO_ERROR;
}
- delete pDbp;
+ // mDownmixerBufferProvider reset below.
}
// Effect downmixer does not accept the channel conversion. Let's use our remixer.
- RemixBufferProvider* pRbp = new RemixBufferProvider(channelMask,
- mMixerChannelMask, mMixerInFormat, kCopyBufferFrameCount);
+ mDownmixerBufferProvider.reset(new RemixBufferProvider(channelMask,
+ mMixerChannelMask, mMixerInFormat, kCopyBufferFrameCount));
// Remix always finds a conversion whereas Downmixer effect above may fail.
- downmixerBufferProvider = pRbp;
reconfigureBufferProviders();
return NO_ERROR;
}
-void AudioMixer::track_t::unprepareForReformat() {
+void AudioMixer::Track::unprepareForReformat() {
ALOGV("AudioMixer::unprepareForReformat(%p)", this);
bool requiresReconfigure = false;
- if (mReformatBufferProvider != NULL) {
- delete mReformatBufferProvider;
- mReformatBufferProvider = NULL;
+ if (mReformatBufferProvider.get() != nullptr) {
+ mReformatBufferProvider.reset(nullptr);
requiresReconfigure = true;
}
- if (mPostDownmixReformatBufferProvider != NULL) {
- delete mPostDownmixReformatBufferProvider;
- mPostDownmixReformatBufferProvider = NULL;
+ if (mPostDownmixReformatBufferProvider.get() != nullptr) {
+ mPostDownmixReformatBufferProvider.reset(nullptr);
requiresReconfigure = true;
}
if (requiresReconfigure) {
@@ -386,7 +309,7 @@
}
}
-status_t AudioMixer::track_t::prepareForReformat()
+status_t AudioMixer::Track::prepareForReformat()
{
ALOGV("AudioMixer::prepareForReformat(%p) with format %#x", this, mFormat);
// discard previous reformatters
@@ -396,19 +319,19 @@
? mDownmixRequiresFormat : mMixerInFormat;
bool requiresReconfigure = false;
if (mFormat != targetFormat) {
- mReformatBufferProvider = new ReformatBufferProvider(
+ mReformatBufferProvider.reset(new ReformatBufferProvider(
audio_channel_count_from_out_mask(channelMask),
mFormat,
targetFormat,
- kCopyBufferFrameCount);
+ kCopyBufferFrameCount));
requiresReconfigure = true;
}
if (targetFormat != mMixerInFormat) {
- mPostDownmixReformatBufferProvider = new ReformatBufferProvider(
+ mPostDownmixReformatBufferProvider.reset(new ReformatBufferProvider(
audio_channel_count_from_out_mask(mMixerChannelMask),
targetFormat,
mMixerInFormat,
- kCopyBufferFrameCount);
+ kCopyBufferFrameCount));
requiresReconfigure = true;
}
if (requiresReconfigure) {
@@ -417,74 +340,63 @@
return NO_ERROR;
}
-void AudioMixer::track_t::reconfigureBufferProviders()
+void AudioMixer::Track::reconfigureBufferProviders()
{
bufferProvider = mInputBufferProvider;
- if (mReformatBufferProvider) {
+ if (mReformatBufferProvider.get() != nullptr) {
mReformatBufferProvider->setBufferProvider(bufferProvider);
- bufferProvider = mReformatBufferProvider;
+ bufferProvider = mReformatBufferProvider.get();
}
- if (downmixerBufferProvider) {
- downmixerBufferProvider->setBufferProvider(bufferProvider);
- bufferProvider = downmixerBufferProvider;
+ if (mDownmixerBufferProvider.get() != nullptr) {
+ mDownmixerBufferProvider->setBufferProvider(bufferProvider);
+ bufferProvider = mDownmixerBufferProvider.get();
}
- if (mPostDownmixReformatBufferProvider) {
+ if (mPostDownmixReformatBufferProvider.get() != nullptr) {
mPostDownmixReformatBufferProvider->setBufferProvider(bufferProvider);
- bufferProvider = mPostDownmixReformatBufferProvider;
+ bufferProvider = mPostDownmixReformatBufferProvider.get();
}
- if (mTimestretchBufferProvider) {
+ if (mTimestretchBufferProvider.get() != nullptr) {
mTimestretchBufferProvider->setBufferProvider(bufferProvider);
- bufferProvider = mTimestretchBufferProvider;
+ bufferProvider = mTimestretchBufferProvider.get();
}
}
void AudioMixer::deleteTrackName(int name)
{
- ALOGV("AudioMixer::deleteTrackName(%d)", name);
name -= TRACK0;
- LOG_ALWAYS_FATAL_IF(name < 0 || name >= (int)MAX_NUM_TRACKS, "bad track name %d", name);
+ LOG_ALWAYS_FATAL_IF(mTracks.find(name) == mTracks.end(), "invalid name: %d", name);
ALOGV("deleteTrackName(%d)", name);
- track_t& track(mState.tracks[ name ]);
- if (track.enabled) {
- track.enabled = false;
- invalidateState(1<<name);
+
+ if (mTracks[name]->enabled) {
+ invalidate();
}
- // delete the resampler
- delete track.resampler;
- track.resampler = NULL;
- // delete the downmixer
- mState.tracks[name].unprepareForDownmix();
- // delete the reformatter
- mState.tracks[name].unprepareForReformat();
- // delete the timestretch provider
- delete track.mTimestretchBufferProvider;
- track.mTimestretchBufferProvider = NULL;
- mTrackNames &= ~(1<<name);
+ mTracks.erase(name); // deallocate track
+ mUnusedNames.emplace(name); // recycle name
}
void AudioMixer::enable(int name)
{
name -= TRACK0;
- ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name);
- track_t& track = mState.tracks[name];
+ LOG_ALWAYS_FATAL_IF(mTracks.find(name) == mTracks.end(), "invalid name: %d", name);
+ const std::shared_ptr<Track> &track = mTracks[name];
- if (!track.enabled) {
- track.enabled = true;
+ if (!track->enabled) {
+ track->enabled = true;
ALOGV("enable(%d)", name);
- invalidateState(1 << name);
+ invalidate();
}
}
void AudioMixer::disable(int name)
{
name -= TRACK0;
- ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name);
- track_t& track = mState.tracks[name];
+ LOG_ALWAYS_FATAL_IF(mTracks.find(name) == mTracks.end(), "invalid name: %d", name);
+ const std::shared_ptr<Track> &track = mTracks[name];
- if (track.enabled) {
- track.enabled = false;
+ if (track->enabled) {
+ track->enabled = false;
ALOGV("disable(%d)", name);
- invalidateState(1 << name);
+ invalidate();
}
}
@@ -562,7 +474,8 @@
ALOGD_IF(*pPrevVolume != *pSetVolume, "previous float ramp hasn't finished,"
" prev:%f set_to:%f", *pPrevVolume, *pSetVolume);
const float inc = (newVolume - *pPrevVolume) / ramp; // could be inf, nan, subnormal
- const float maxv = max(newVolume, *pPrevVolume); // could be inf, cannot be nan, subnormal
+ // could be inf, cannot be nan, subnormal
+ const float maxv = std::max(newVolume, *pPrevVolume);
if (isnormal(inc) // inc must be a normal number (no subnormals, infinite, nan)
&& maxv + inc != maxv) { // inc must make forward progress
@@ -616,8 +529,8 @@
void AudioMixer::setParameter(int name, int target, int param, void *value)
{
name -= TRACK0;
- ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name);
- track_t& track = mState.tracks[name];
+ LOG_ALWAYS_FATAL_IF(mTracks.find(name) == mTracks.end(), "invalid name: %d", name);
+ const std::shared_ptr<Track> &track = mTracks[name];
int valueInt = static_cast<int>(reinterpret_cast<uintptr_t>(value));
int32_t *valueBuf = reinterpret_cast<int32_t*>(value);
@@ -629,33 +542,33 @@
case CHANNEL_MASK: {
const audio_channel_mask_t trackChannelMask =
static_cast<audio_channel_mask_t>(valueInt);
- if (setChannelMasks(name, trackChannelMask, track.mMixerChannelMask)) {
+ if (setChannelMasks(name, trackChannelMask, track->mMixerChannelMask)) {
ALOGV("setParameter(TRACK, CHANNEL_MASK, %x)", trackChannelMask);
- invalidateState(1 << name);
+ invalidate();
}
} break;
case MAIN_BUFFER:
- if (track.mainBuffer != valueBuf) {
- track.mainBuffer = valueBuf;
+ if (track->mainBuffer != valueBuf) {
+ track->mainBuffer = valueBuf;
ALOGV("setParameter(TRACK, MAIN_BUFFER, %p)", valueBuf);
- invalidateState(1 << name);
+ invalidate();
}
break;
case AUX_BUFFER:
- if (track.auxBuffer != valueBuf) {
- track.auxBuffer = valueBuf;
+ if (track->auxBuffer != valueBuf) {
+ track->auxBuffer = valueBuf;
ALOGV("setParameter(TRACK, AUX_BUFFER, %p)", valueBuf);
- invalidateState(1 << name);
+ invalidate();
}
break;
case FORMAT: {
audio_format_t format = static_cast<audio_format_t>(valueInt);
- if (track.mFormat != format) {
+ if (track->mFormat != format) {
ALOG_ASSERT(audio_is_linear_pcm(format), "Invalid format %#x", format);
- track.mFormat = format;
+ track->mFormat = format;
ALOGV("setParameter(TRACK, FORMAT, %#x)", format);
- track.prepareForReformat();
- invalidateState(1 << name);
+ track->prepareForReformat();
+ invalidate();
}
} break;
// FIXME do we want to support setting the downmix type from AudioFlinger?
@@ -664,17 +577,17 @@
break */
case MIXER_FORMAT: {
audio_format_t format = static_cast<audio_format_t>(valueInt);
- if (track.mMixerFormat != format) {
- track.mMixerFormat = format;
+ if (track->mMixerFormat != format) {
+ track->mMixerFormat = format;
ALOGV("setParameter(TRACK, MIXER_FORMAT, %#x)", format);
}
} break;
case MIXER_CHANNEL_MASK: {
const audio_channel_mask_t mixerChannelMask =
static_cast<audio_channel_mask_t>(valueInt);
- if (setChannelMasks(name, track.channelMask, mixerChannelMask)) {
+ if (setChannelMasks(name, track->channelMask, mixerChannelMask)) {
ALOGV("setParameter(TRACK, MIXER_CHANNEL_MASK, %#x)", mixerChannelMask);
- invalidateState(1 << name);
+ invalidate();
}
} break;
default:
@@ -686,21 +599,20 @@
switch (param) {
case SAMPLE_RATE:
ALOG_ASSERT(valueInt > 0, "bad sample rate %d", valueInt);
- if (track.setResampler(uint32_t(valueInt), mSampleRate)) {
+ if (track->setResampler(uint32_t(valueInt), mSampleRate)) {
ALOGV("setParameter(RESAMPLE, SAMPLE_RATE, %u)",
uint32_t(valueInt));
- invalidateState(1 << name);
+ invalidate();
}
break;
case RESET:
- track.resetResampler();
- invalidateState(1 << name);
+ track->resetResampler();
+ invalidate();
break;
case REMOVE:
- delete track.resampler;
- track.resampler = NULL;
- track.sampleRate = mSampleRate;
- invalidateState(1 << name);
+ track->mResampler.reset(nullptr);
+ track->sampleRate = mSampleRate;
+ invalidate();
break;
default:
LOG_ALWAYS_FATAL("setParameter resample: bad param %d", param);
@@ -712,26 +624,28 @@
switch (param) {
case AUXLEVEL:
if (setVolumeRampVariables(*reinterpret_cast<float*>(value),
- target == RAMP_VOLUME ? mState.frameCount : 0,
- &track.auxLevel, &track.prevAuxLevel, &track.auxInc,
- &track.mAuxLevel, &track.mPrevAuxLevel, &track.mAuxInc)) {
+ target == RAMP_VOLUME ? mFrameCount : 0,
+ &track->auxLevel, &track->prevAuxLevel, &track->auxInc,
+ &track->mAuxLevel, &track->mPrevAuxLevel, &track->mAuxInc)) {
ALOGV("setParameter(%s, AUXLEVEL: %04x)",
- target == VOLUME ? "VOLUME" : "RAMP_VOLUME", track.auxLevel);
- invalidateState(1 << name);
+ target == VOLUME ? "VOLUME" : "RAMP_VOLUME", track->auxLevel);
+ invalidate();
}
break;
default:
if ((unsigned)param >= VOLUME0 && (unsigned)param < VOLUME0 + MAX_NUM_VOLUMES) {
if (setVolumeRampVariables(*reinterpret_cast<float*>(value),
- target == RAMP_VOLUME ? mState.frameCount : 0,
- &track.volume[param - VOLUME0], &track.prevVolume[param - VOLUME0],
- &track.volumeInc[param - VOLUME0],
- &track.mVolume[param - VOLUME0], &track.mPrevVolume[param - VOLUME0],
- &track.mVolumeInc[param - VOLUME0])) {
+ target == RAMP_VOLUME ? mFrameCount : 0,
+ &track->volume[param - VOLUME0],
+ &track->prevVolume[param - VOLUME0],
+ &track->volumeInc[param - VOLUME0],
+ &track->mVolume[param - VOLUME0],
+ &track->mPrevVolume[param - VOLUME0],
+ &track->mVolumeInc[param - VOLUME0])) {
ALOGV("setParameter(%s, VOLUME%d: %04x)",
target == VOLUME ? "VOLUME" : "RAMP_VOLUME", param - VOLUME0,
- track.volume[param - VOLUME0]);
- invalidateState(1 << name);
+ track->volume[param - VOLUME0]);
+ invalidate();
}
} else {
LOG_ALWAYS_FATAL("setParameter volume: bad param %d", param);
@@ -744,16 +658,16 @@
const AudioPlaybackRate *playbackRate =
reinterpret_cast<AudioPlaybackRate*>(value);
ALOGW_IF(!isAudioPlaybackRateValid(*playbackRate),
- "bad parameters speed %f, pitch %f",playbackRate->mSpeed,
- playbackRate->mPitch);
- if (track.setPlaybackRate(*playbackRate)) {
+ "bad parameters speed %f, pitch %f",
+ playbackRate->mSpeed, playbackRate->mPitch);
+ if (track->setPlaybackRate(*playbackRate)) {
ALOGV("setParameter(TIMESTRETCH, PLAYBACK_RATE, STRETCH_MODE, FALLBACK_MODE "
"%f %f %d %d",
playbackRate->mSpeed,
playbackRate->mPitch,
playbackRate->mStretchMode,
playbackRate->mFallbackMode);
- // invalidateState(1 << name);
+ // invalidate(); (should not require reconfigure)
}
} break;
default:
@@ -766,12 +680,12 @@
}
}
-bool AudioMixer::track_t::setResampler(uint32_t trackSampleRate, uint32_t devSampleRate)
+bool AudioMixer::Track::setResampler(uint32_t trackSampleRate, uint32_t devSampleRate)
{
- if (trackSampleRate != devSampleRate || resampler != NULL) {
+ if (trackSampleRate != devSampleRate || mResampler.get() != nullptr) {
if (sampleRate != trackSampleRate) {
sampleRate = trackSampleRate;
- if (resampler == NULL) {
+ if (mResampler.get() == nullptr) {
ALOGV("Creating resampler from track %d Hz to device %d Hz",
trackSampleRate, devSampleRate);
AudioResampler::src_quality quality;
@@ -787,15 +701,15 @@
// TODO: Remove MONO_HACK. Resampler sees #channels after the downmixer
// but if none exists, it is the channel count (1 for mono).
- const int resamplerChannelCount = downmixerBufferProvider != NULL
+ const int resamplerChannelCount = mDownmixerBufferProvider.get() != nullptr
? mMixerChannelCount : channelCount;
ALOGVV("Creating resampler:"
" format(%#x) channels(%d) devSampleRate(%u) quality(%d)\n",
mMixerInFormat, resamplerChannelCount, devSampleRate, quality);
- resampler = AudioResampler::create(
+ mResampler.reset(AudioResampler::create(
mMixerInFormat,
resamplerChannelCount,
- devSampleRate, quality);
+ devSampleRate, quality));
}
return true;
}
@@ -803,25 +717,25 @@
return false;
}
-bool AudioMixer::track_t::setPlaybackRate(const AudioPlaybackRate &playbackRate)
+bool AudioMixer::Track::setPlaybackRate(const AudioPlaybackRate &playbackRate)
{
- if ((mTimestretchBufferProvider == NULL &&
+ if ((mTimestretchBufferProvider.get() == nullptr &&
fabs(playbackRate.mSpeed - mPlaybackRate.mSpeed) < AUDIO_TIMESTRETCH_SPEED_MIN_DELTA &&
fabs(playbackRate.mPitch - mPlaybackRate.mPitch) < AUDIO_TIMESTRETCH_PITCH_MIN_DELTA) ||
isAudioPlaybackRateEqual(playbackRate, mPlaybackRate)) {
return false;
}
mPlaybackRate = playbackRate;
- if (mTimestretchBufferProvider == NULL) {
+ if (mTimestretchBufferProvider.get() == nullptr) {
// TODO: Remove MONO_HACK. Resampler sees #channels after the downmixer
// but if none exists, it is the channel count (1 for mono).
- const int timestretchChannelCount = downmixerBufferProvider != NULL
+ const int timestretchChannelCount = mDownmixerBufferProvider.get() != nullptr
? mMixerChannelCount : channelCount;
- mTimestretchBufferProvider = new TimestretchBufferProvider(timestretchChannelCount,
- mMixerInFormat, sampleRate, playbackRate);
+ mTimestretchBufferProvider.reset(new TimestretchBufferProvider(timestretchChannelCount,
+ mMixerInFormat, sampleRate, playbackRate));
reconfigureBufferProviders();
} else {
- static_cast<TimestretchBufferProvider*>(mTimestretchBufferProvider)
+ static_cast<TimestretchBufferProvider*>(mTimestretchBufferProvider.get())
->setPlaybackRate(playbackRate);
}
return true;
@@ -840,7 +754,7 @@
*
* There is a bit of duplicated code here, but it keeps backward compatibility.
*/
-inline void AudioMixer::track_t::adjustVolumeRamp(bool aux, bool useFloat)
+inline void AudioMixer::Track::adjustVolumeRamp(bool aux, bool useFloat)
{
if (useFloat) {
for (uint32_t i = 0; i < MAX_NUM_VOLUMES; i++) {
@@ -895,8 +809,9 @@
size_t AudioMixer::getUnreleasedFrames(int name) const
{
name -= TRACK0;
- if (uint32_t(name) < MAX_NUM_TRACKS) {
- return mState.tracks[name].getUnreleasedFrames();
+ const auto it = mTracks.find(name);
+ if (it != mTracks.end()) {
+ return it->second->getUnreleasedFrames();
}
return 0;
}
@@ -904,87 +819,63 @@
void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider)
{
name -= TRACK0;
- ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name);
+ const std::shared_ptr<Track> &track = mTracks[name];
- if (mState.tracks[name].mInputBufferProvider == bufferProvider) {
+ if (track->mInputBufferProvider == bufferProvider) {
return; // don't reset any buffer providers if identical.
}
- if (mState.tracks[name].mReformatBufferProvider != NULL) {
- mState.tracks[name].mReformatBufferProvider->reset();
- } else if (mState.tracks[name].downmixerBufferProvider != NULL) {
- mState.tracks[name].downmixerBufferProvider->reset();
- } else if (mState.tracks[name].mPostDownmixReformatBufferProvider != NULL) {
- mState.tracks[name].mPostDownmixReformatBufferProvider->reset();
- } else if (mState.tracks[name].mTimestretchBufferProvider != NULL) {
- mState.tracks[name].mTimestretchBufferProvider->reset();
+ if (track->mReformatBufferProvider.get() != nullptr) {
+ track->mReformatBufferProvider->reset();
+ } else if (track->mDownmixerBufferProvider != nullptr) {
+ track->mDownmixerBufferProvider->reset();
+ } else if (track->mPostDownmixReformatBufferProvider.get() != nullptr) {
+ track->mPostDownmixReformatBufferProvider->reset();
+ } else if (track->mTimestretchBufferProvider.get() != nullptr) {
+ track->mTimestretchBufferProvider->reset();
}
- mState.tracks[name].mInputBufferProvider = bufferProvider;
- mState.tracks[name].reconfigureBufferProviders();
+ track->mInputBufferProvider = bufferProvider;
+ track->reconfigureBufferProviders();
}
-
-void AudioMixer::process()
+void AudioMixer::process__validate()
{
- mState.hook(&mState);
-}
-
-
-void AudioMixer::process__validate(state_t* state)
-{
- ALOGW_IF(!state->needsChanged,
- "in process__validate() but nothing's invalid");
-
- uint32_t changed = state->needsChanged;
- state->needsChanged = 0; // clear the validation flag
-
- // recompute which tracks are enabled / disabled
- uint32_t enabled = 0;
- uint32_t disabled = 0;
- while (changed) {
- const int i = 31 - __builtin_clz(changed);
- const uint32_t mask = 1<<i;
- changed &= ~mask;
- track_t& t = state->tracks[i];
- (t.enabled ? enabled : disabled) |= mask;
- }
- state->enabledTracks &= ~disabled;
- state->enabledTracks |= enabled;
-
- // compute everything we need...
- int countActiveTracks = 0;
// TODO: fix all16BitsStereNoResample logic to
// either properly handle muted tracks (it should ignore them)
// or remove altogether as an obsolete optimization.
bool all16BitsStereoNoResample = true;
bool resampling = false;
bool volumeRamp = false;
- uint32_t en = state->enabledTracks;
- while (en) {
- const int i = 31 - __builtin_clz(en);
- en &= ~(1<<i);
- countActiveTracks++;
- track_t& t = state->tracks[i];
+ mEnabled.clear();
+ mGroups.clear();
+ for (const auto &pair : mTracks) {
+ const int name = pair.first;
+ const std::shared_ptr<Track> &t = pair.second;
+ if (!t->enabled) continue;
+
+ mEnabled.emplace_back(name); // we add to mEnabled in order of name.
+ mGroups[t->mainBuffer].emplace_back(name); // mGroups also in order of name.
+
uint32_t n = 0;
// FIXME can overflow (mask is only 3 bits)
- n |= NEEDS_CHANNEL_1 + t.channelCount - 1;
- if (t.doesResample()) {
+ n |= NEEDS_CHANNEL_1 + t->channelCount - 1;
+ if (t->doesResample()) {
n |= NEEDS_RESAMPLE;
}
- if (t.auxLevel != 0 && t.auxBuffer != NULL) {
+ if (t->auxLevel != 0 && t->auxBuffer != NULL) {
n |= NEEDS_AUX;
}
- if (t.volumeInc[0]|t.volumeInc[1]) {
+ if (t->volumeInc[0]|t->volumeInc[1]) {
volumeRamp = true;
- } else if (!t.doesResample() && t.volumeRL == 0) {
+ } else if (!t->doesResample() && t->volumeRL == 0) {
n |= NEEDS_MUTE;
}
- t.needs = n;
+ t->needs = n;
if (n & NEEDS_MUTE) {
- t.hook = track__nop;
+ t->hook = &Track::track__nop;
} else {
if (n & NEEDS_AUX) {
all16BitsStereoNoResample = false;
@@ -992,23 +883,23 @@
if (n & NEEDS_RESAMPLE) {
all16BitsStereoNoResample = false;
resampling = true;
- t.hook = getTrackHook(TRACKTYPE_RESAMPLE, t.mMixerChannelCount,
- t.mMixerInFormat, t.mMixerFormat);
+ t->hook = Track::getTrackHook(TRACKTYPE_RESAMPLE, t->mMixerChannelCount,
+ t->mMixerInFormat, t->mMixerFormat);
ALOGV_IF((n & NEEDS_CHANNEL_COUNT__MASK) > NEEDS_CHANNEL_2,
"Track %d needs downmix + resample", i);
} else {
if ((n & NEEDS_CHANNEL_COUNT__MASK) == NEEDS_CHANNEL_1){
- t.hook = getTrackHook(
- (t.mMixerChannelMask == AUDIO_CHANNEL_OUT_STEREO // TODO: MONO_HACK
- && t.channelMask == AUDIO_CHANNEL_OUT_MONO)
+ t->hook = Track::getTrackHook(
+ (t->mMixerChannelMask == AUDIO_CHANNEL_OUT_STEREO // TODO: MONO_HACK
+ && t->channelMask == AUDIO_CHANNEL_OUT_MONO)
? TRACKTYPE_NORESAMPLEMONO : TRACKTYPE_NORESAMPLE,
- t.mMixerChannelCount,
- t.mMixerInFormat, t.mMixerFormat);
+ t->mMixerChannelCount,
+ t->mMixerInFormat, t->mMixerFormat);
all16BitsStereoNoResample = false;
}
if ((n & NEEDS_CHANNEL_COUNT__MASK) >= NEEDS_CHANNEL_2){
- t.hook = getTrackHook(TRACKTYPE_NORESAMPLE, t.mMixerChannelCount,
- t.mMixerInFormat, t.mMixerFormat);
+ t->hook = Track::getTrackHook(TRACKTYPE_NORESAMPLE, t->mMixerChannelCount,
+ t->mMixerInFormat, t->mMixerFormat);
ALOGV_IF((n & NEEDS_CHANNEL_COUNT__MASK) > NEEDS_CHANNEL_2,
"Track %d needs downmix", i);
}
@@ -1017,137 +908,125 @@
}
// select the processing hooks
- state->hook = process__nop;
- if (countActiveTracks > 0) {
+ mHook = &AudioMixer::process__nop;
+ if (mEnabled.size() > 0) {
if (resampling) {
- if (!state->outputTemp) {
- state->outputTemp = new int32_t[MAX_NUM_CHANNELS * state->frameCount];
+ if (mOutputTemp.get() == nullptr) {
+ mOutputTemp.reset(new int32_t[MAX_NUM_CHANNELS * mFrameCount]);
}
- if (!state->resampleTemp) {
- state->resampleTemp = new int32_t[MAX_NUM_CHANNELS * state->frameCount];
+ if (mResampleTemp.get() == nullptr) {
+ mResampleTemp.reset(new int32_t[MAX_NUM_CHANNELS * mFrameCount]);
}
- state->hook = process__genericResampling;
+ mHook = &AudioMixer::process__genericResampling;
} else {
- if (state->outputTemp) {
- delete [] state->outputTemp;
- state->outputTemp = NULL;
- }
- if (state->resampleTemp) {
- delete [] state->resampleTemp;
- state->resampleTemp = NULL;
- }
- state->hook = process__genericNoResampling;
+ // we keep temp arrays around.
+ mHook = &AudioMixer::process__genericNoResampling;
if (all16BitsStereoNoResample && !volumeRamp) {
- if (countActiveTracks == 1) {
- const int i = 31 - __builtin_clz(state->enabledTracks);
- track_t& t = state->tracks[i];
- if ((t.needs & NEEDS_MUTE) == 0) {
+ if (mEnabled.size() == 1) {
+ const std::shared_ptr<Track> &t = mTracks[mEnabled[0]];
+ if ((t->needs & NEEDS_MUTE) == 0) {
// The check prevents a muted track from acquiring a process hook.
//
// This is dangerous if the track is MONO as that requires
// special case handling due to implicit channel duplication.
// Stereo or Multichannel should actually be fine here.
- state->hook = getProcessHook(PROCESSTYPE_NORESAMPLEONETRACK,
- t.mMixerChannelCount, t.mMixerInFormat, t.mMixerFormat);
+ mHook = getProcessHook(PROCESSTYPE_NORESAMPLEONETRACK,
+ t->mMixerChannelCount, t->mMixerInFormat, t->mMixerFormat);
}
}
}
}
}
- ALOGV("mixer configuration change: %d activeTracks (%08x) "
+ ALOGV("mixer configuration change: %zu "
"all16BitsStereoNoResample=%d, resampling=%d, volumeRamp=%d",
- countActiveTracks, state->enabledTracks,
- all16BitsStereoNoResample, resampling, volumeRamp);
+ mEnabled.size(), all16BitsStereoNoResample, resampling, volumeRamp);
- state->hook(state);
+ process();
// Now that the volume ramp has been done, set optimal state and
// track hooks for subsequent mixer process
- if (countActiveTracks > 0) {
+ if (mEnabled.size() > 0) {
bool allMuted = true;
- uint32_t en = state->enabledTracks;
- while (en) {
- const int i = 31 - __builtin_clz(en);
- en &= ~(1<<i);
- track_t& t = state->tracks[i];
- if (!t.doesResample() && t.volumeRL == 0) {
- t.needs |= NEEDS_MUTE;
- t.hook = track__nop;
+
+ for (const int name : mEnabled) {
+ const std::shared_ptr<Track> &t = mTracks[name];
+ if (!t->doesResample() && t->volumeRL == 0) {
+ t->needs |= NEEDS_MUTE;
+ t->hook = &Track::track__nop;
} else {
allMuted = false;
}
}
if (allMuted) {
- state->hook = process__nop;
+ mHook = &AudioMixer::process__nop;
} else if (all16BitsStereoNoResample) {
- if (countActiveTracks == 1) {
- const int i = 31 - __builtin_clz(state->enabledTracks);
- track_t& t = state->tracks[i];
+ if (mEnabled.size() == 1) {
+ //const int i = 31 - __builtin_clz(enabledTracks);
+ const std::shared_ptr<Track> &t = mTracks[mEnabled[0]];
// Muted single tracks handled by allMuted above.
- state->hook = getProcessHook(PROCESSTYPE_NORESAMPLEONETRACK,
- t.mMixerChannelCount, t.mMixerInFormat, t.mMixerFormat);
+ mHook = getProcessHook(PROCESSTYPE_NORESAMPLEONETRACK,
+ t->mMixerChannelCount, t->mMixerInFormat, t->mMixerFormat);
}
}
}
}
-
-void AudioMixer::track__genericResample(track_t* t, int32_t* out, size_t outFrameCount,
- int32_t* temp, int32_t* aux)
+void AudioMixer::Track::track__genericResample(
+ int32_t* out, size_t outFrameCount, int32_t* temp, int32_t* aux)
{
ALOGVV("track__genericResample\n");
- t->resampler->setSampleRate(t->sampleRate);
+ mResampler->setSampleRate(sampleRate);
// ramp gain - resample to temp buffer and scale/mix in 2nd step
if (aux != NULL) {
// always resample with unity gain when sending to auxiliary buffer to be able
// to apply send level after resampling
- t->resampler->setVolume(UNITY_GAIN_FLOAT, UNITY_GAIN_FLOAT);
- memset(temp, 0, outFrameCount * t->mMixerChannelCount * sizeof(int32_t));
- t->resampler->resample(temp, outFrameCount, t->bufferProvider);
- if (CC_UNLIKELY(t->volumeInc[0]|t->volumeInc[1]|t->auxInc)) {
- volumeRampStereo(t, out, outFrameCount, temp, aux);
+ mResampler->setVolume(UNITY_GAIN_FLOAT, UNITY_GAIN_FLOAT);
+ memset(temp, 0, outFrameCount * mMixerChannelCount * sizeof(int32_t));
+ mResampler->resample(temp, outFrameCount, bufferProvider);
+ if (CC_UNLIKELY(volumeInc[0]|volumeInc[1]|auxInc)) {
+ volumeRampStereo(out, outFrameCount, temp, aux);
} else {
- volumeStereo(t, out, outFrameCount, temp, aux);
+ volumeStereo(out, outFrameCount, temp, aux);
}
} else {
- if (CC_UNLIKELY(t->volumeInc[0]|t->volumeInc[1])) {
- t->resampler->setVolume(UNITY_GAIN_FLOAT, UNITY_GAIN_FLOAT);
+ if (CC_UNLIKELY(volumeInc[0]|volumeInc[1])) {
+ mResampler->setVolume(UNITY_GAIN_FLOAT, UNITY_GAIN_FLOAT);
memset(temp, 0, outFrameCount * MAX_NUM_CHANNELS * sizeof(int32_t));
- t->resampler->resample(temp, outFrameCount, t->bufferProvider);
- volumeRampStereo(t, out, outFrameCount, temp, aux);
+ mResampler->resample(temp, outFrameCount, bufferProvider);
+ volumeRampStereo(out, outFrameCount, temp, aux);
}
// constant gain
else {
- t->resampler->setVolume(t->mVolume[0], t->mVolume[1]);
- t->resampler->resample(out, outFrameCount, t->bufferProvider);
+ mResampler->setVolume(mVolume[0], mVolume[1]);
+ mResampler->resample(out, outFrameCount, bufferProvider);
}
}
}
-void AudioMixer::track__nop(track_t* t __unused, int32_t* out __unused,
+void AudioMixer::Track::track__nop(int32_t* out __unused,
size_t outFrameCount __unused, int32_t* temp __unused, int32_t* aux __unused)
{
}
-void AudioMixer::volumeRampStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp,
- int32_t* aux)
+void AudioMixer::Track::volumeRampStereo(
+ int32_t* out, size_t frameCount, int32_t* temp, int32_t* aux)
{
- int32_t vl = t->prevVolume[0];
- int32_t vr = t->prevVolume[1];
- const int32_t vlInc = t->volumeInc[0];
- const int32_t vrInc = t->volumeInc[1];
+ int32_t vl = prevVolume[0];
+ int32_t vr = prevVolume[1];
+ const int32_t vlInc = volumeInc[0];
+ const int32_t vrInc = volumeInc[1];
//ALOGD("[0] %p: inc=%f, v0=%f, v1=%d, final=%f, count=%d",
- // t, vlInc/65536.0f, vl/65536.0f, t->volume[0],
+ // t, vlInc/65536.0f, vl/65536.0f, volume[0],
// (vl + vlInc*frameCount)/65536.0f, frameCount);
// ramp volume
if (CC_UNLIKELY(aux != NULL)) {
- int32_t va = t->prevAuxLevel;
- const int32_t vaInc = t->auxInc;
+ int32_t va = prevAuxLevel;
+ const int32_t vaInc = auxInc;
int32_t l;
int32_t r;
@@ -1161,7 +1040,7 @@
vr += vrInc;
va += vaInc;
} while (--frameCount);
- t->prevAuxLevel = va;
+ prevAuxLevel = va;
} else {
do {
*out++ += (vl >> 16) * (*temp++ >> 12);
@@ -1170,19 +1049,19 @@
vr += vrInc;
} while (--frameCount);
}
- t->prevVolume[0] = vl;
- t->prevVolume[1] = vr;
- t->adjustVolumeRamp(aux != NULL);
+ prevVolume[0] = vl;
+ prevVolume[1] = vr;
+ adjustVolumeRamp(aux != NULL);
}
-void AudioMixer::volumeStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp,
- int32_t* aux)
+void AudioMixer::Track::volumeStereo(
+ int32_t* out, size_t frameCount, int32_t* temp, int32_t* aux)
{
- const int16_t vl = t->volume[0];
- const int16_t vr = t->volume[1];
+ const int16_t vl = volume[0];
+ const int16_t vr = volume[1];
if (CC_UNLIKELY(aux != NULL)) {
- const int16_t va = t->auxLevel;
+ const int16_t va = auxLevel;
do {
int16_t l = (int16_t)(*temp++ >> 12);
int16_t r = (int16_t)(*temp++ >> 12);
@@ -1204,25 +1083,25 @@
}
}
-void AudioMixer::track__16BitsStereo(track_t* t, int32_t* out, size_t frameCount,
- int32_t* temp __unused, int32_t* aux)
+void AudioMixer::Track::track__16BitsStereo(
+ int32_t* out, size_t frameCount, int32_t* temp __unused, int32_t* aux)
{
ALOGVV("track__16BitsStereo\n");
- const int16_t *in = static_cast<const int16_t *>(t->in);
+ const int16_t *in = static_cast<const int16_t *>(mIn);
if (CC_UNLIKELY(aux != NULL)) {
int32_t l;
int32_t r;
// ramp gain
- if (CC_UNLIKELY(t->volumeInc[0]|t->volumeInc[1]|t->auxInc)) {
- int32_t vl = t->prevVolume[0];
- int32_t vr = t->prevVolume[1];
- int32_t va = t->prevAuxLevel;
- const int32_t vlInc = t->volumeInc[0];
- const int32_t vrInc = t->volumeInc[1];
- const int32_t vaInc = t->auxInc;
+ if (CC_UNLIKELY(volumeInc[0]|volumeInc[1]|auxInc)) {
+ int32_t vl = prevVolume[0];
+ int32_t vr = prevVolume[1];
+ int32_t va = prevAuxLevel;
+ const int32_t vlInc = volumeInc[0];
+ const int32_t vrInc = volumeInc[1];
+ const int32_t vaInc = auxInc;
// ALOGD("[1] %p: inc=%f, v0=%f, v1=%d, final=%f, count=%d",
- // t, vlInc/65536.0f, vl/65536.0f, t->volume[0],
+ // t, vlInc/65536.0f, vl/65536.0f, volume[0],
// (vl + vlInc*frameCount)/65536.0f, frameCount);
do {
@@ -1236,16 +1115,16 @@
va += vaInc;
} while (--frameCount);
- t->prevVolume[0] = vl;
- t->prevVolume[1] = vr;
- t->prevAuxLevel = va;
- t->adjustVolumeRamp(true);
+ prevVolume[0] = vl;
+ prevVolume[1] = vr;
+ prevAuxLevel = va;
+ adjustVolumeRamp(true);
}
// constant gain
else {
- const uint32_t vrl = t->volumeRL;
- const int16_t va = (int16_t)t->auxLevel;
+ const uint32_t vrl = volumeRL;
+ const int16_t va = (int16_t)auxLevel;
do {
uint32_t rl = *reinterpret_cast<const uint32_t *>(in);
int16_t a = (int16_t)(((int32_t)in[0] + in[1]) >> 1);
@@ -1259,14 +1138,14 @@
}
} else {
// ramp gain
- if (CC_UNLIKELY(t->volumeInc[0]|t->volumeInc[1])) {
- int32_t vl = t->prevVolume[0];
- int32_t vr = t->prevVolume[1];
- const int32_t vlInc = t->volumeInc[0];
- const int32_t vrInc = t->volumeInc[1];
+ if (CC_UNLIKELY(volumeInc[0]|volumeInc[1])) {
+ int32_t vl = prevVolume[0];
+ int32_t vr = prevVolume[1];
+ const int32_t vlInc = volumeInc[0];
+ const int32_t vrInc = volumeInc[1];
// ALOGD("[1] %p: inc=%f, v0=%f, v1=%d, final=%f, count=%d",
- // t, vlInc/65536.0f, vl/65536.0f, t->volume[0],
+ // t, vlInc/65536.0f, vl/65536.0f, volume[0],
// (vl + vlInc*frameCount)/65536.0f, frameCount);
do {
@@ -1276,14 +1155,14 @@
vr += vrInc;
} while (--frameCount);
- t->prevVolume[0] = vl;
- t->prevVolume[1] = vr;
- t->adjustVolumeRamp(false);
+ prevVolume[0] = vl;
+ prevVolume[1] = vr;
+ adjustVolumeRamp(false);
}
// constant gain
else {
- const uint32_t vrl = t->volumeRL;
+ const uint32_t vrl = volumeRL;
do {
uint32_t rl = *reinterpret_cast<const uint32_t *>(in);
in += 2;
@@ -1293,27 +1172,27 @@
} while (--frameCount);
}
}
- t->in = in;
+ mIn = in;
}
-void AudioMixer::track__16BitsMono(track_t* t, int32_t* out, size_t frameCount,
- int32_t* temp __unused, int32_t* aux)
+void AudioMixer::Track::track__16BitsMono(
+ int32_t* out, size_t frameCount, int32_t* temp __unused, int32_t* aux)
{
ALOGVV("track__16BitsMono\n");
- const int16_t *in = static_cast<int16_t const *>(t->in);
+ const int16_t *in = static_cast<int16_t const *>(mIn);
if (CC_UNLIKELY(aux != NULL)) {
// ramp gain
- if (CC_UNLIKELY(t->volumeInc[0]|t->volumeInc[1]|t->auxInc)) {
- int32_t vl = t->prevVolume[0];
- int32_t vr = t->prevVolume[1];
- int32_t va = t->prevAuxLevel;
- const int32_t vlInc = t->volumeInc[0];
- const int32_t vrInc = t->volumeInc[1];
- const int32_t vaInc = t->auxInc;
+ if (CC_UNLIKELY(volumeInc[0]|volumeInc[1]|auxInc)) {
+ int32_t vl = prevVolume[0];
+ int32_t vr = prevVolume[1];
+ int32_t va = prevAuxLevel;
+ const int32_t vlInc = volumeInc[0];
+ const int32_t vrInc = volumeInc[1];
+ const int32_t vaInc = auxInc;
// ALOGD("[2] %p: inc=%f, v0=%f, v1=%d, final=%f, count=%d",
- // t, vlInc/65536.0f, vl/65536.0f, t->volume[0],
+ // t, vlInc/65536.0f, vl/65536.0f, volume[0],
// (vl + vlInc*frameCount)/65536.0f, frameCount);
do {
@@ -1326,16 +1205,16 @@
va += vaInc;
} while (--frameCount);
- t->prevVolume[0] = vl;
- t->prevVolume[1] = vr;
- t->prevAuxLevel = va;
- t->adjustVolumeRamp(true);
+ prevVolume[0] = vl;
+ prevVolume[1] = vr;
+ prevAuxLevel = va;
+ adjustVolumeRamp(true);
}
// constant gain
else {
- const int16_t vl = t->volume[0];
- const int16_t vr = t->volume[1];
- const int16_t va = (int16_t)t->auxLevel;
+ const int16_t vl = volume[0];
+ const int16_t vr = volume[1];
+ const int16_t va = (int16_t)auxLevel;
do {
int16_t l = *in++;
out[0] = mulAdd(l, vl, out[0]);
@@ -1347,14 +1226,14 @@
}
} else {
// ramp gain
- if (CC_UNLIKELY(t->volumeInc[0]|t->volumeInc[1])) {
- int32_t vl = t->prevVolume[0];
- int32_t vr = t->prevVolume[1];
- const int32_t vlInc = t->volumeInc[0];
- const int32_t vrInc = t->volumeInc[1];
+ if (CC_UNLIKELY(volumeInc[0]|volumeInc[1])) {
+ int32_t vl = prevVolume[0];
+ int32_t vr = prevVolume[1];
+ const int32_t vlInc = volumeInc[0];
+ const int32_t vrInc = volumeInc[1];
// ALOGD("[2] %p: inc=%f, v0=%f, v1=%d, final=%f, count=%d",
- // t, vlInc/65536.0f, vl/65536.0f, t->volume[0],
+ // t, vlInc/65536.0f, vl/65536.0f, volume[0],
// (vl + vlInc*frameCount)/65536.0f, frameCount);
do {
@@ -1365,14 +1244,14 @@
vr += vrInc;
} while (--frameCount);
- t->prevVolume[0] = vl;
- t->prevVolume[1] = vr;
- t->adjustVolumeRamp(false);
+ prevVolume[0] = vl;
+ prevVolume[1] = vr;
+ adjustVolumeRamp(false);
}
// constant gain
else {
- const int16_t vl = t->volume[0];
- const int16_t vr = t->volume[1];
+ const int16_t vl = volume[0];
+ const int16_t vr = volume[1];
do {
int16_t l = *in++;
out[0] = mulAdd(l, vl, out[0]);
@@ -1381,274 +1260,214 @@
} while (--frameCount);
}
}
- t->in = in;
+ mIn = in;
}
// no-op case
-void AudioMixer::process__nop(state_t* state)
+void AudioMixer::process__nop()
{
ALOGVV("process__nop\n");
- uint32_t e0 = state->enabledTracks;
- while (e0) {
+
+ for (const auto &pair : mGroups) {
// process by group of tracks with same output buffer to
// avoid multiple memset() on same buffer
- uint32_t e1 = e0, e2 = e0;
- int i = 31 - __builtin_clz(e1);
- {
- track_t& t1 = state->tracks[i];
- e2 &= ~(1<<i);
- while (e2) {
- i = 31 - __builtin_clz(e2);
- e2 &= ~(1<<i);
- track_t& t2 = state->tracks[i];
- if (CC_UNLIKELY(t2.mainBuffer != t1.mainBuffer)) {
- e1 &= ~(1<<i);
- }
- }
- e0 &= ~(e1);
+ const auto &group = pair.second;
- memset(t1.mainBuffer, 0, state->frameCount * t1.mMixerChannelCount
- * audio_bytes_per_sample(t1.mMixerFormat));
- }
+ const std::shared_ptr<Track> &t = mTracks[group[0]];
+ memset(t->mainBuffer, 0,
+ mFrameCount * t->mMixerChannelCount
+ * audio_bytes_per_sample(t->mMixerFormat));
- while (e1) {
- i = 31 - __builtin_clz(e1);
- e1 &= ~(1<<i);
- {
- track_t& t3 = state->tracks[i];
- size_t outFrames = state->frameCount;
- while (outFrames) {
- t3.buffer.frameCount = outFrames;
- t3.bufferProvider->getNextBuffer(&t3.buffer);
- if (t3.buffer.raw == NULL) break;
- outFrames -= t3.buffer.frameCount;
- t3.bufferProvider->releaseBuffer(&t3.buffer);
- }
+ // now consume data
+ for (const int name : group) {
+ const std::shared_ptr<Track> &t = mTracks[name];
+ size_t outFrames = mFrameCount;
+ while (outFrames) {
+ t->buffer.frameCount = outFrames;
+ t->bufferProvider->getNextBuffer(&t->buffer);
+ if (t->buffer.raw == NULL) break;
+ outFrames -= t->buffer.frameCount;
+ t->bufferProvider->releaseBuffer(&t->buffer);
}
}
}
}
// generic code without resampling
-void AudioMixer::process__genericNoResampling(state_t* state)
+void AudioMixer::process__genericNoResampling()
{
ALOGVV("process__genericNoResampling\n");
int32_t outTemp[BLOCKSIZE * MAX_NUM_CHANNELS] __attribute__((aligned(32)));
- // acquire each track's buffer
- uint32_t enabledTracks = state->enabledTracks;
- uint32_t e0 = enabledTracks;
- while (e0) {
- const int i = 31 - __builtin_clz(e0);
- e0 &= ~(1<<i);
- track_t& t = state->tracks[i];
- t.buffer.frameCount = state->frameCount;
- t.bufferProvider->getNextBuffer(&t.buffer);
- t.frameCount = t.buffer.frameCount;
- t.in = t.buffer.raw;
- }
+ for (const auto &pair : mGroups) {
+ // process by group of tracks with same output main buffer to
+ // avoid multiple memset() on same buffer
+ const auto &group = pair.second;
- e0 = enabledTracks;
- while (e0) {
- // process by group of tracks with same output buffer to
- // optimize cache use
- uint32_t e1 = e0, e2 = e0;
- int j = 31 - __builtin_clz(e1);
- track_t& t1 = state->tracks[j];
- e2 &= ~(1<<j);
- while (e2) {
- j = 31 - __builtin_clz(e2);
- e2 &= ~(1<<j);
- track_t& t2 = state->tracks[j];
- if (CC_UNLIKELY(t2.mainBuffer != t1.mainBuffer)) {
- e1 &= ~(1<<j);
- }
+ // acquire buffer
+ for (const int name : group) {
+ const std::shared_ptr<Track> &t = mTracks[name];
+ t->buffer.frameCount = mFrameCount;
+ t->bufferProvider->getNextBuffer(&t->buffer);
+ t->frameCount = t->buffer.frameCount;
+ t->mIn = t->buffer.raw;
}
- e0 &= ~(e1);
- // this assumes output 16 bits stereo, no resampling
- int32_t *out = t1.mainBuffer;
+
+ int32_t *out = (int *)pair.first;
size_t numFrames = 0;
do {
- const size_t frameCount = min((size_t)BLOCKSIZE, state->frameCount - numFrames);
+ const size_t frameCount = std::min((size_t)BLOCKSIZE, mFrameCount - numFrames);
memset(outTemp, 0, sizeof(outTemp));
- e2 = e1;
- while (e2) {
- const int i = 31 - __builtin_clz(e2);
- e2 &= ~(1<<i);
- track_t& t = state->tracks[i];
- size_t outFrames = frameCount;
+ for (const int name : group) {
+ const std::shared_ptr<Track> &t = mTracks[name];
int32_t *aux = NULL;
- if (CC_UNLIKELY(t.needs & NEEDS_AUX)) {
- aux = t.auxBuffer + numFrames;
+ if (CC_UNLIKELY(t->needs & NEEDS_AUX)) {
+ aux = t->auxBuffer + numFrames;
}
- while (outFrames) {
- // t.in == NULL can happen if the track was flushed just after having
+ for (int outFrames = frameCount; outFrames > 0; ) {
+ // t->in == nullptr can happen if the track was flushed just after having
// been enabled for mixing.
- if (t.in == NULL) {
- enabledTracks &= ~(1<<i);
- e1 &= ~(1<<i);
+ if (t->mIn == nullptr) {
break;
}
- size_t inFrames = (t.frameCount > outFrames)?outFrames:t.frameCount;
+ size_t inFrames = (t->frameCount > outFrames)?outFrames:t->frameCount;
if (inFrames > 0) {
- t.hook(&t, outTemp + (frameCount - outFrames) * t.mMixerChannelCount,
- inFrames, state->resampleTemp, aux);
- t.frameCount -= inFrames;
+ (t.get()->*t->hook)(
+ outTemp + (frameCount - outFrames) * t->mMixerChannelCount,
+ inFrames, mResampleTemp.get() /* naked ptr */, aux);
+ t->frameCount -= inFrames;
outFrames -= inFrames;
if (CC_UNLIKELY(aux != NULL)) {
aux += inFrames;
}
}
- if (t.frameCount == 0 && outFrames) {
- t.bufferProvider->releaseBuffer(&t.buffer);
- t.buffer.frameCount = (state->frameCount - numFrames) -
+ if (t->frameCount == 0 && outFrames) {
+ t->bufferProvider->releaseBuffer(&t->buffer);
+ t->buffer.frameCount = (mFrameCount - numFrames) -
(frameCount - outFrames);
- t.bufferProvider->getNextBuffer(&t.buffer);
- t.in = t.buffer.raw;
- if (t.in == NULL) {
- enabledTracks &= ~(1<<i);
- e1 &= ~(1<<i);
+ t->bufferProvider->getNextBuffer(&t->buffer);
+ t->mIn = t->buffer.raw;
+ if (t->mIn == nullptr) {
break;
}
- t.frameCount = t.buffer.frameCount;
+ t->frameCount = t->buffer.frameCount;
}
}
}
- convertMixerFormat(out, t1.mMixerFormat, outTemp, t1.mMixerInFormat,
- frameCount * t1.mMixerChannelCount);
+ const std::shared_ptr<Track> &t1 = mTracks[group[0]];
+ convertMixerFormat(out, t1->mMixerFormat, outTemp, t1->mMixerInFormat,
+ frameCount * t1->mMixerChannelCount);
// TODO: fix ugly casting due to choice of out pointer type
out = reinterpret_cast<int32_t*>((uint8_t*)out
- + frameCount * t1.mMixerChannelCount
- * audio_bytes_per_sample(t1.mMixerFormat));
+ + frameCount * t1->mMixerChannelCount
+ * audio_bytes_per_sample(t1->mMixerFormat));
numFrames += frameCount;
- } while (numFrames < state->frameCount);
- }
+ } while (numFrames < mFrameCount);
- // release each track's buffer
- e0 = enabledTracks;
- while (e0) {
- const int i = 31 - __builtin_clz(e0);
- e0 &= ~(1<<i);
- track_t& t = state->tracks[i];
- t.bufferProvider->releaseBuffer(&t.buffer);
+ // release each track's buffer
+ for (const int name : group) {
+ const std::shared_ptr<Track> &t = mTracks[name];
+ t->bufferProvider->releaseBuffer(&t->buffer);
+ }
}
}
-
// generic code with resampling
-void AudioMixer::process__genericResampling(state_t* state)
+void AudioMixer::process__genericResampling()
{
ALOGVV("process__genericResampling\n");
- // this const just means that local variable outTemp doesn't change
- int32_t* const outTemp = state->outputTemp;
- size_t numFrames = state->frameCount;
+ int32_t * const outTemp = mOutputTemp.get(); // naked ptr
+ size_t numFrames = mFrameCount;
- uint32_t e0 = state->enabledTracks;
- while (e0) {
- // process by group of tracks with same output buffer
- // to optimize cache use
- uint32_t e1 = e0, e2 = e0;
- int j = 31 - __builtin_clz(e1);
- track_t& t1 = state->tracks[j];
- e2 &= ~(1<<j);
- while (e2) {
- j = 31 - __builtin_clz(e2);
- e2 &= ~(1<<j);
- track_t& t2 = state->tracks[j];
- if (CC_UNLIKELY(t2.mainBuffer != t1.mainBuffer)) {
- e1 &= ~(1<<j);
- }
- }
- e0 &= ~(e1);
- int32_t *out = t1.mainBuffer;
- memset(outTemp, 0, sizeof(*outTemp) * t1.mMixerChannelCount * state->frameCount);
- while (e1) {
- const int i = 31 - __builtin_clz(e1);
- e1 &= ~(1<<i);
- track_t& t = state->tracks[i];
+ for (const auto &pair : mGroups) {
+ const auto &group = pair.second;
+ const std::shared_ptr<Track> &t1 = mTracks[group[0]];
+
+ // clear temp buffer
+ memset(outTemp, 0, sizeof(*outTemp) * t1->mMixerChannelCount * mFrameCount);
+ for (const int name : group) {
+ const std::shared_ptr<Track> &t = mTracks[name];
int32_t *aux = NULL;
- if (CC_UNLIKELY(t.needs & NEEDS_AUX)) {
- aux = t.auxBuffer;
+ if (CC_UNLIKELY(t->needs & NEEDS_AUX)) {
+ aux = t->auxBuffer;
}
// this is a little goofy, on the resampling case we don't
// acquire/release the buffers because it's done by
// the resampler.
- if (t.needs & NEEDS_RESAMPLE) {
- t.hook(&t, outTemp, numFrames, state->resampleTemp, aux);
+ if (t->needs & NEEDS_RESAMPLE) {
+ (t.get()->*t->hook)(outTemp, numFrames, mResampleTemp.get() /* naked ptr */, aux);
} else {
size_t outFrames = 0;
while (outFrames < numFrames) {
- t.buffer.frameCount = numFrames - outFrames;
- t.bufferProvider->getNextBuffer(&t.buffer);
- t.in = t.buffer.raw;
- // t.in == NULL can happen if the track was flushed just after having
+ t->buffer.frameCount = numFrames - outFrames;
+ t->bufferProvider->getNextBuffer(&t->buffer);
+ t->mIn = t->buffer.raw;
+ // t->mIn == nullptr can happen if the track was flushed just after having
// been enabled for mixing.
- if (t.in == NULL) break;
+ if (t->mIn == nullptr) break;
if (CC_UNLIKELY(aux != NULL)) {
aux += outFrames;
}
- t.hook(&t, outTemp + outFrames * t.mMixerChannelCount, t.buffer.frameCount,
- state->resampleTemp, aux);
- outFrames += t.buffer.frameCount;
- t.bufferProvider->releaseBuffer(&t.buffer);
+ (t.get()->*t->hook)(
+ outTemp + outFrames * t->mMixerChannelCount, t->buffer.frameCount,
+ mResampleTemp.get() /* naked ptr */, aux);
+ outFrames += t->buffer.frameCount;
+ t->bufferProvider->releaseBuffer(&t->buffer);
}
}
}
- convertMixerFormat(out, t1.mMixerFormat,
- outTemp, t1.mMixerInFormat, numFrames * t1.mMixerChannelCount);
+ convertMixerFormat(t1->mainBuffer, t1->mMixerFormat,
+ outTemp, t1->mMixerInFormat, numFrames * t1->mMixerChannelCount);
}
}
// one track, 16 bits stereo without resampling is the most common case
-void AudioMixer::process__OneTrack16BitsStereoNoResampling(state_t* state)
+void AudioMixer::process__oneTrack16BitsStereoNoResampling()
{
- ALOGVV("process__OneTrack16BitsStereoNoResampling\n");
- // This method is only called when state->enabledTracks has exactly
- // one bit set. The asserts below would verify this, but are commented out
- // since the whole point of this method is to optimize performance.
- //ALOG_ASSERT(0 != state->enabledTracks, "no tracks enabled");
- const int i = 31 - __builtin_clz(state->enabledTracks);
- //ALOG_ASSERT((1 << i) == state->enabledTracks, "more than 1 track enabled");
- const track_t& t = state->tracks[i];
+ ALOGVV("process__oneTrack16BitsStereoNoResampling\n");
+ LOG_ALWAYS_FATAL_IF(mEnabled.size() != 0,
+ "%zu != 1 tracks enabled", mEnabled.size());
+ const int name = mEnabled[0];
+ const std::shared_ptr<Track> &t = mTracks[name];
- AudioBufferProvider::Buffer& b(t.buffer);
+ AudioBufferProvider::Buffer& b(t->buffer);
- int32_t* out = t.mainBuffer;
+ int32_t* out = t->mainBuffer;
float *fout = reinterpret_cast<float*>(out);
- size_t numFrames = state->frameCount;
+ size_t numFrames = mFrameCount;
- const int16_t vl = t.volume[0];
- const int16_t vr = t.volume[1];
- const uint32_t vrl = t.volumeRL;
+ const int16_t vl = t->volume[0];
+ const int16_t vr = t->volume[1];
+ const uint32_t vrl = t->volumeRL;
while (numFrames) {
b.frameCount = numFrames;
- t.bufferProvider->getNextBuffer(&b);
+ t->bufferProvider->getNextBuffer(&b);
const int16_t *in = b.i16;
// in == NULL can happen if the track was flushed just after having
// been enabled for mixing.
if (in == NULL || (((uintptr_t)in) & 3)) {
- if ( AUDIO_FORMAT_PCM_FLOAT == t.mMixerFormat ) {
+ if ( AUDIO_FORMAT_PCM_FLOAT == t->mMixerFormat ) {
memset((char*)fout, 0, numFrames
- * t.mMixerChannelCount * audio_bytes_per_sample(t.mMixerFormat));
+ * t->mMixerChannelCount * audio_bytes_per_sample(t->mMixerFormat));
} else {
memset((char*)out, 0, numFrames
- * t.mMixerChannelCount * audio_bytes_per_sample(t.mMixerFormat));
+ * t->mMixerChannelCount * audio_bytes_per_sample(t->mMixerFormat));
}
ALOGE_IF((((uintptr_t)in) & 3),
- "process__OneTrack16BitsStereoNoResampling: misaligned buffer"
+ "process__oneTrack16BitsStereoNoResampling: misaligned buffer"
" %p track %d, channels %d, needs %08x, volume %08x vfl %f vfr %f",
- in, i, t.channelCount, t.needs, vrl, t.mVolume[0], t.mVolume[1]);
+ in, name, t->channelCount, t->needs, vrl, t->mVolume[0], t->mVolume[1]);
return;
}
size_t outFrames = b.frameCount;
- switch (t.mMixerFormat) {
+ switch (t->mMixerFormat) {
case AUDIO_FORMAT_PCM_FLOAT:
do {
uint32_t rl = *reinterpret_cast<const uint32_t *>(in);
@@ -1686,10 +1505,10 @@
}
break;
default:
- LOG_ALWAYS_FATAL("bad mixer format: %d", t.mMixerFormat);
+ LOG_ALWAYS_FATAL("bad mixer format: %d", t->mMixerFormat);
}
numFrames -= b.frameCount;
- t.bufferProvider->releaseBuffer(&b);
+ t->bufferProvider->releaseBuffer(&b);
}
}
@@ -1800,42 +1619,42 @@
*/
template <int MIXTYPE, bool USEFLOATVOL, bool ADJUSTVOL,
typename TO, typename TI, typename TA>
-void AudioMixer::volumeMix(TO *out, size_t outFrames,
- const TI *in, TA *aux, bool ramp, AudioMixer::track_t *t)
+void AudioMixer::Track::volumeMix(TO *out, size_t outFrames,
+ const TI *in, TA *aux, bool ramp)
{
if (USEFLOATVOL) {
if (ramp) {
- volumeRampMulti<MIXTYPE>(t->mMixerChannelCount, out, outFrames, in, aux,
- t->mPrevVolume, t->mVolumeInc,
+ volumeRampMulti<MIXTYPE>(mMixerChannelCount, out, outFrames, in, aux,
+ mPrevVolume, mVolumeInc,
#ifdef FLOAT_AUX
- &t->mPrevAuxLevel, t->mAuxInc
+ &mPrevAuxLevel, mAuxInc
#else
- &t->prevAuxLevel, t->auxInc
+ &prevAuxLevel, auxInc
#endif
);
if (ADJUSTVOL) {
- t->adjustVolumeRamp(aux != NULL, true);
+ adjustVolumeRamp(aux != NULL, true);
}
} else {
- volumeMulti<MIXTYPE>(t->mMixerChannelCount, out, outFrames, in, aux,
- t->mVolume,
+ volumeMulti<MIXTYPE>(mMixerChannelCount, out, outFrames, in, aux,
+ mVolume,
#ifdef FLOAT_AUX
- t->mAuxLevel
+ mAuxLevel
#else
- t->auxLevel
+ auxLevel
#endif
);
}
} else {
if (ramp) {
- volumeRampMulti<MIXTYPE>(t->mMixerChannelCount, out, outFrames, in, aux,
- t->prevVolume, t->volumeInc, &t->prevAuxLevel, t->auxInc);
+ volumeRampMulti<MIXTYPE>(mMixerChannelCount, out, outFrames, in, aux,
+ prevVolume, volumeInc, &prevAuxLevel, auxInc);
if (ADJUSTVOL) {
- t->adjustVolumeRamp(aux != NULL);
+ adjustVolumeRamp(aux != NULL);
}
} else {
- volumeMulti<MIXTYPE>(t->mMixerChannelCount, out, outFrames, in, aux,
- t->volume, t->auxLevel);
+ volumeMulti<MIXTYPE>(mMixerChannelCount, out, outFrames, in, aux,
+ volume, auxLevel);
}
}
}
@@ -1850,19 +1669,18 @@
* TA: int32_t (Q4.27)
*/
template <int MIXTYPE, typename TO, typename TI, typename TA>
-void AudioMixer::process_NoResampleOneTrack(state_t* state)
+void AudioMixer::process__noResampleOneTrack()
{
- ALOGVV("process_NoResampleOneTrack\n");
- // CLZ is faster than CTZ on ARM, though really not sure if true after 31 - clz.
- const int i = 31 - __builtin_clz(state->enabledTracks);
- ALOG_ASSERT((1 << i) == state->enabledTracks, "more than 1 track enabled");
- track_t *t = &state->tracks[i];
+ ALOGVV("process__noResampleOneTrack\n");
+ LOG_ALWAYS_FATAL_IF(mEnabled.size() != 1,
+ "%zu != 1 tracks enabled", mEnabled.size());
+ const std::shared_ptr<Track> &t = mTracks[mEnabled[0]];
const uint32_t channels = t->mMixerChannelCount;
TO* out = reinterpret_cast<TO*>(t->mainBuffer);
TA* aux = reinterpret_cast<TA*>(t->auxBuffer);
const bool ramp = t->needsRamp();
- for (size_t numFrames = state->frameCount; numFrames; ) {
+ for (size_t numFrames = mFrameCount; numFrames > 0; ) {
AudioBufferProvider::Buffer& b(t->buffer);
// get input buffer
b.frameCount = numFrames;
@@ -1874,15 +1692,15 @@
if (in == NULL || (((uintptr_t)in) & 3)) {
memset(out, 0, numFrames
* channels * audio_bytes_per_sample(t->mMixerFormat));
- ALOGE_IF((((uintptr_t)in) & 3), "process_NoResampleOneTrack: bus error: "
+ ALOGE_IF((((uintptr_t)in) & 3), "process__noResampleOneTrack: bus error: "
"buffer %p track %p, channels %d, needs %#x",
- in, t, t->channelCount, t->needs);
+ in, &t, t->channelCount, t->needs);
return;
}
const size_t outFrames = b.frameCount;
- volumeMix<MIXTYPE, is_same<TI, float>::value /* USEFLOATVOL */, false /* ADJUSTVOL */> (
- out, outFrames, in, aux, ramp, t);
+ t->volumeMix<MIXTYPE, is_same<TI, float>::value /* USEFLOATVOL */, false /* ADJUSTVOL */> (
+ out, outFrames, in, aux, ramp);
out += outFrames * channels;
if (aux != NULL) {
@@ -1907,30 +1725,30 @@
* TA: int32_t (Q4.27) or float
*/
template <int MIXTYPE, typename TO, typename TI, typename TA>
-void AudioMixer::track__Resample(track_t* t, TO* out, size_t outFrameCount, TO* temp, TA* aux)
+void AudioMixer::Track::track__Resample(TO* out, size_t outFrameCount, TO* temp, TA* aux)
{
ALOGVV("track__Resample\n");
- t->resampler->setSampleRate(t->sampleRate);
- const bool ramp = t->needsRamp();
+ mResampler->setSampleRate(sampleRate);
+ const bool ramp = needsRamp();
if (ramp || aux != NULL) {
// if ramp: resample with unity gain to temp buffer and scale/mix in 2nd step.
// if aux != NULL: resample with unity gain to temp buffer then apply send level.
- t->resampler->setVolume(UNITY_GAIN_FLOAT, UNITY_GAIN_FLOAT);
- memset(temp, 0, outFrameCount * t->mMixerChannelCount * sizeof(TO));
- t->resampler->resample((int32_t*)temp, outFrameCount, t->bufferProvider);
+ mResampler->setVolume(UNITY_GAIN_FLOAT, UNITY_GAIN_FLOAT);
+ memset(temp, 0, outFrameCount * mMixerChannelCount * sizeof(TO));
+ mResampler->resample((int32_t*)temp, outFrameCount, bufferProvider);
volumeMix<MIXTYPE, is_same<TI, float>::value /* USEFLOATVOL */, true /* ADJUSTVOL */>(
- out, outFrameCount, temp, aux, ramp, t);
+ out, outFrameCount, temp, aux, ramp);
} else { // constant volume gain
- t->resampler->setVolume(t->mVolume[0], t->mVolume[1]);
- t->resampler->resample((int32_t*)out, outFrameCount, t->bufferProvider);
+ mResampler->setVolume(mVolume[0], mVolume[1]);
+ mResampler->resample((int32_t*)out, outFrameCount, bufferProvider);
}
}
/* This track hook is called to mix a track, when no resampling is required.
- * The input buffer should be present in t->in.
+ * The input buffer should be present in in.
*
* MIXTYPE (see AudioMixerOps.h MIXTYPE_* enumeration)
* TO: int32_t (Q4.27) or float
@@ -1938,25 +1756,25 @@
* TA: int32_t (Q4.27) or float
*/
template <int MIXTYPE, typename TO, typename TI, typename TA>
-void AudioMixer::track__NoResample(track_t* t, TO* out, size_t frameCount,
- TO* temp __unused, TA* aux)
+void AudioMixer::Track::track__NoResample(TO* out, size_t frameCount, TO* temp __unused, TA* aux)
{
ALOGVV("track__NoResample\n");
- const TI *in = static_cast<const TI *>(t->in);
+ const TI *in = static_cast<const TI *>(mIn);
volumeMix<MIXTYPE, is_same<TI, float>::value /* USEFLOATVOL */, true /* ADJUSTVOL */>(
- out, frameCount, in, aux, t->needsRamp(), t);
+ out, frameCount, in, aux, needsRamp());
// MIXTYPE_MONOEXPAND reads a single input channel and expands to NCHAN output channels.
// MIXTYPE_MULTI reads NCHAN input channels and places to NCHAN output channels.
- in += (MIXTYPE == MIXTYPE_MONOEXPAND) ? frameCount : frameCount * t->mMixerChannelCount;
- t->in = in;
+ in += (MIXTYPE == MIXTYPE_MONOEXPAND) ? frameCount : frameCount * mMixerChannelCount;
+ mIn = in;
}
/* The Mixer engine generates either int32_t (Q4_27) or float data.
* We use this function to convert the engine buffers
* to the desired mixer output format, either int16_t (Q.15) or float.
*/
+/* static */
void AudioMixer::convertMixerFormat(void *out, audio_format_t mixerOutFormat,
void *in, audio_format_t mixerInFormat, size_t sampleCount)
{
@@ -1995,19 +1813,20 @@
/* Returns the proper track hook to use for mixing the track into the output buffer.
*/
-AudioMixer::hook_t AudioMixer::getTrackHook(int trackType, uint32_t channelCount,
+/* static */
+AudioMixer::hook_t AudioMixer::Track::getTrackHook(int trackType, uint32_t channelCount,
audio_format_t mixerInFormat, audio_format_t mixerOutFormat __unused)
{
if (!kUseNewMixer && channelCount == FCC_2 && mixerInFormat == AUDIO_FORMAT_PCM_16_BIT) {
switch (trackType) {
case TRACKTYPE_NOP:
- return track__nop;
+ return &Track::track__nop;
case TRACKTYPE_RESAMPLE:
- return track__genericResample;
+ return &Track::track__genericResample;
case TRACKTYPE_NORESAMPLEMONO:
- return track__16BitsMono;
+ return &Track::track__16BitsMono;
case TRACKTYPE_NORESAMPLE:
- return track__16BitsStereo;
+ return &Track::track__16BitsStereo;
default:
LOG_ALWAYS_FATAL("bad trackType: %d", trackType);
break;
@@ -2016,14 +1835,14 @@
LOG_ALWAYS_FATAL_IF(channelCount > MAX_NUM_CHANNELS);
switch (trackType) {
case TRACKTYPE_NOP:
- return track__nop;
+ return &Track::track__nop;
case TRACKTYPE_RESAMPLE:
switch (mixerInFormat) {
case AUDIO_FORMAT_PCM_FLOAT:
- return (AudioMixer::hook_t)track__Resample<
+ return (AudioMixer::hook_t) &Track::track__Resample<
MIXTYPE_MULTI, float /*TO*/, float /*TI*/, TYPE_AUX>;
case AUDIO_FORMAT_PCM_16_BIT:
- return (AudioMixer::hook_t)track__Resample<
+ return (AudioMixer::hook_t) &Track::track__Resample<
MIXTYPE_MULTI, int32_t /*TO*/, int16_t /*TI*/, TYPE_AUX>;
default:
LOG_ALWAYS_FATAL("bad mixerInFormat: %#x", mixerInFormat);
@@ -2033,10 +1852,10 @@
case TRACKTYPE_NORESAMPLEMONO:
switch (mixerInFormat) {
case AUDIO_FORMAT_PCM_FLOAT:
- return (AudioMixer::hook_t)track__NoResample<
+ return (AudioMixer::hook_t) &Track::track__NoResample<
MIXTYPE_MONOEXPAND, float /*TO*/, float /*TI*/, TYPE_AUX>;
case AUDIO_FORMAT_PCM_16_BIT:
- return (AudioMixer::hook_t)track__NoResample<
+ return (AudioMixer::hook_t) &Track::track__NoResample<
MIXTYPE_MONOEXPAND, int32_t /*TO*/, int16_t /*TI*/, TYPE_AUX>;
default:
LOG_ALWAYS_FATAL("bad mixerInFormat: %#x", mixerInFormat);
@@ -2046,10 +1865,10 @@
case TRACKTYPE_NORESAMPLE:
switch (mixerInFormat) {
case AUDIO_FORMAT_PCM_FLOAT:
- return (AudioMixer::hook_t)track__NoResample<
+ return (AudioMixer::hook_t) &Track::track__NoResample<
MIXTYPE_MULTI, float /*TO*/, float /*TI*/, TYPE_AUX>;
case AUDIO_FORMAT_PCM_16_BIT:
- return (AudioMixer::hook_t)track__NoResample<
+ return (AudioMixer::hook_t) &Track::track__NoResample<
MIXTYPE_MULTI, int32_t /*TO*/, int16_t /*TI*/, TYPE_AUX>;
default:
LOG_ALWAYS_FATAL("bad mixerInFormat: %#x", mixerInFormat);
@@ -2070,7 +1889,9 @@
* a stereo output track, the input track cannot be MONO. This should be
* prevented by the caller.
*/
-AudioMixer::process_hook_t AudioMixer::getProcessHook(int processType, uint32_t channelCount,
+/* static */
+AudioMixer::process_hook_t AudioMixer::getProcessHook(
+ int processType, uint32_t channelCount,
audio_format_t mixerInFormat, audio_format_t mixerOutFormat)
{
if (processType != PROCESSTYPE_NORESAMPLEONETRACK) { // Only NORESAMPLEONETRACK
@@ -2078,17 +1899,17 @@
return NULL;
}
if (!kUseNewMixer && channelCount == FCC_2 && mixerInFormat == AUDIO_FORMAT_PCM_16_BIT) {
- return process__OneTrack16BitsStereoNoResampling;
+ return &AudioMixer::process__oneTrack16BitsStereoNoResampling;
}
LOG_ALWAYS_FATAL_IF(channelCount > MAX_NUM_CHANNELS);
switch (mixerInFormat) {
case AUDIO_FORMAT_PCM_FLOAT:
switch (mixerOutFormat) {
case AUDIO_FORMAT_PCM_FLOAT:
- return process_NoResampleOneTrack<
+ return &AudioMixer::process__noResampleOneTrack<
MIXTYPE_MULTI_SAVEONLY, float /*TO*/, float /*TI*/, TYPE_AUX>;
case AUDIO_FORMAT_PCM_16_BIT:
- return process_NoResampleOneTrack<
+ return &AudioMixer::process__noResampleOneTrack<
MIXTYPE_MULTI_SAVEONLY, int16_t /*TO*/, float /*TI*/, TYPE_AUX>;
default:
LOG_ALWAYS_FATAL("bad mixerOutFormat: %#x", mixerOutFormat);
@@ -2098,10 +1919,10 @@
case AUDIO_FORMAT_PCM_16_BIT:
switch (mixerOutFormat) {
case AUDIO_FORMAT_PCM_FLOAT:
- return process_NoResampleOneTrack<
+ return &AudioMixer::process__noResampleOneTrack<
MIXTYPE_MULTI_SAVEONLY, float /*TO*/, int16_t /*TI*/, TYPE_AUX>;
case AUDIO_FORMAT_PCM_16_BIT:
- return process_NoResampleOneTrack<
+ return &AudioMixer::process__noResampleOneTrack<
MIXTYPE_MULTI_SAVEONLY, int16_t /*TO*/, int16_t /*TI*/, TYPE_AUX>;
default:
LOG_ALWAYS_FATAL("bad mixerOutFormat: %#x", mixerOutFormat);
diff --git a/media/libmedia/include/media/CounterMetric.h b/media/libmedia/include/media/CounterMetric.h
index f39ca7c..b53470d 100644
--- a/media/libmedia/include/media/CounterMetric.h
+++ b/media/libmedia/include/media/CounterMetric.h
@@ -16,6 +16,10 @@
#ifndef ANDROID_COUNTER_METRIC_H_
#define ANDROID_COUNTER_METRIC_H_
+#include <functional>
+#include <map>
+#include <string>
+
#include <media/MediaAnalyticsItem.h>
#include <utils/Log.h>
@@ -71,13 +75,13 @@
// of Attribute.
void ExportValues(
std::function<void (const AttributeType&,
- const int64_t count)> function) {
+ const int64_t count)> function) const {
for (auto it = values_.begin(); it != values_.end(); it++) {
function(it->first, it->second);
}
}
- const std::string& metric_name() { return metric_name_; };
+ const std::string& metric_name() const { return metric_name_; };
private:
const std::string metric_name_;
diff --git a/media/libmedia/include/media/DrmHal.h b/media/libmedia/include/media/DrmHal.h
index f2b25cd..c18d845 100644
--- a/media/libmedia/include/media/DrmHal.h
+++ b/media/libmedia/include/media/DrmHal.h
@@ -23,7 +23,7 @@
#include <android/hardware/drm/1.0/IDrmPluginListener.h>
#include <android/hardware/drm/1.0/IDrmFactory.h>
-#include <media/CounterMetric.h>
+#include <media/DrmMetrics.h>
#include <media/IDrm.h>
#include <media/IDrmClient.h>
#include <media/MediaAnalyticsItem.h>
@@ -181,7 +181,8 @@
sp<IDrmPlugin> mPlugin;
sp<drm::V1_1::IDrmPlugin> mPluginV1_1;
- CounterMetric<status_t> mOpenSessionCounter;
+ // Mutable to allow modification within GetPropertyByteArray.
+ mutable MediaDrmMetrics mMetrics;
Vector<Vector<uint8_t>> mOpenSessions;
void closeOpenSessions();
diff --git a/media/libmedia/include/media/DrmMetrics.h b/media/libmedia/include/media/DrmMetrics.h
new file mode 100644
index 0000000..bb7509b
--- /dev/null
+++ b/media/libmedia/include/media/DrmMetrics.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DRM_METRICS_H_
+#define DRM_METRICS_H_
+
+#include <map>
+
+#include <android/hardware/drm/1.0/types.h>
+#include <media/CounterMetric.h>
+#include <media/EventMetric.h>
+
+namespace android {
+
+/**
+ * This class contains the definition of metrics captured within MediaDrm.
+ * It also contains a method for exporting all of the metrics to a
+ * MediaAnalyticsItem instance.
+ */
+class MediaDrmMetrics {
+ public:
+ explicit MediaDrmMetrics();
+ // Count of openSession calls.
+ CounterMetric<status_t> mOpenSessionCounter;
+ // Count of closeSession calls.
+ CounterMetric<status_t> mCloseSessionCounter;
+ // Count and timing of getKeyRequest calls.
+ EventMetric<status_t> mGetKeyRequestTiming;
+ // Count and timing of provideKeyResponse calls.
+ EventMetric<status_t> mProvideKeyResponseTiming;
+ // Count of getProvisionRequest calls.
+ CounterMetric<status_t> mGetProvisionRequestCounter;
+ // Count of provideProvisionResponse calls.
+ CounterMetric<status_t> mProvideProvisionResponseCounter;
+
+ // Count of key status events broken out by status type.
+ CounterMetric<::android::hardware::drm::V1_0::KeyStatusType>
+ mKeyStatusChangeCounter;
+ // Count of events broken out by event type
+ CounterMetric<::android::hardware::drm::V1_0::EventType> mEventCounter;
+
+ // Count getPropertyByteArray calls to retrieve the device unique id.
+ CounterMetric<status_t> mGetDeviceUniqueIdCounter;
+
+ // TODO: Add session start and end time support. These are a special case.
+
+ // Export the metrics to a MediaAnalyticsItem.
+ void Export(MediaAnalyticsItem* item);
+};
+
+} // namespace android
+
+#endif // DRM_METRICS_H_
diff --git a/media/libmedia/include/media/EventMetric.h b/media/libmedia/include/media/EventMetric.h
new file mode 100644
index 0000000..dbb736a
--- /dev/null
+++ b/media/libmedia/include/media/EventMetric.h
@@ -0,0 +1,176 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_EVENT_METRIC_H_
+#define ANDROID_EVENT_METRIC_H_
+
+#include <media/MediaAnalyticsItem.h>
+#include <utils/Timers.h>
+
+namespace android {
+
+// This is a simple holder for the statistics recorded in EventMetric.
+struct EventStatistics {
+ // The count of times the event occurred.
+ int64_t count;
+
+ // The minimum and maximum values recorded in the Record method.
+ double min;
+ double max;
+
+ // The average (mean) of all values recorded.
+ double mean;
+ // The sum of squared devation. Variance can be calculated from
+ // this value.
+ // var = sum_squared_deviation / count;
+ double sum_squared_deviation;
+};
+
+// The EventMetric class is used to accumulate stats about an event over time.
+// A common use case is to track clock timings for a method call or operation.
+// An EventMetric can break down stats by a dimension specified by the
+// application. E.g. an application may want to track counts broken out by
+// error code or the size of some parameter.
+//
+// Example:
+//
+// struct C {
+// status_t DoWork() {
+// unsigned long start_time = now();
+// status_t result;
+//
+// // DO WORK and determine result;
+//
+// work_event_.Record(now() - start_time, result);
+//
+// return result;
+// }
+// EventMetric<status_t> work_event_;
+// };
+//
+// C c;
+// c.DoWork();
+//
+// std::map<int, int64_t> values;
+// metric.ExportValues(
+// [&] (int attribute_value, int64_t value) {
+// values[attribute_value] = value;
+// });
+// // Do something with the exported stat.
+//
+template<typename AttributeType>
+class EventMetric {
+ public:
+ // Instantiate the counter with the given metric name and
+ // attribute names. |attribute_names| must not be null.
+ EventMetric(
+ const std::string& metric_name,
+ const std::string& attribute_name)
+ : metric_name_(metric_name),
+ attribute_name_(attribute_name) {}
+
+ // Increment the count of times the operation occurred with this
+ // combination of attributes.
+ void Record(double value, AttributeType attribute) {
+ if (values_.find(attribute) != values_.end()) {
+ EventStatistics* stats = values_[attribute].get();
+ // Using method of provisional means.
+ double deviation = value - stats->mean;
+ stats->mean = stats->mean + (deviation / stats->count);
+ stats->sum_squared_deviation =
+ stats->sum_squared_deviation + (deviation * (value - stats->mean));
+ stats->count++;
+
+ stats->min = stats->min < value ? stats->min : value;
+ stats->max = stats->max > value ? stats->max : value;
+ } else {
+ std::unique_ptr<EventStatistics> stats =
+ std::make_unique<EventStatistics>();
+ stats->count = 1;
+ stats->min = value;
+ stats->max = value;
+ stats->mean = value;
+ stats->sum_squared_deviation = 0;
+ values_[attribute] = std::move(stats);
+ }
+ };
+
+ // Export the metrics to the provided |function|. Each value for Attribute
+ // has a separate set of stats. As such, |function| will be called once per
+ // value of Attribute.
+ void ExportValues(
+ std::function<void (const AttributeType&,
+ const EventStatistics&)> function) const {
+ for (auto it = values_.begin(); it != values_.end(); it++) {
+ function(it->first, *(it->second));
+ }
+ }
+
+ const std::string& metric_name() const { return metric_name_; };
+
+ private:
+ const std::string metric_name_;
+ const std::string attribute_name_;
+ std::map<AttributeType, std::unique_ptr<struct EventStatistics>> values_;
+};
+
+// The EventTimer is a supporting class for EventMetric instances that are used
+// to time methods. The EventTimer starts a timer when first in scope, and
+// records the timing when exiting scope.
+//
+// Example:
+//
+// EventMetric<int> my_metric;
+//
+// {
+// EventTimer<int> my_timer(&my_metric);
+// // Set the attribute to associate with this timing.
+// my_timer.SetAttribtue(42);
+//
+// // Do some work that you want to time.
+//
+// } // The EventTimer destructor will record the the timing in my_metric;
+//
+template<typename AttributeType>
+class EventTimer {
+ public:
+ explicit EventTimer(EventMetric<AttributeType>* metric)
+ :start_time_(systemTime()), metric_(metric) {
+ }
+
+ virtual ~EventTimer() {
+ if (metric_) {
+ metric_->Record(ns2us(systemTime() - start_time_), attribute_);
+ }
+ }
+
+ // Set the attribute to associate with this timing. E.g. this can be used to
+ // record the return code from the work that was timed.
+ void SetAttribute(const AttributeType& attribute) {
+ attribute_ = attribute;
+ }
+
+ protected:
+ // Visible for testing only.
+ nsecs_t start_time_;
+
+ private:
+ EventMetric<AttributeType>* metric_;
+ AttributeType attribute_;
+};
+
+} // namespace android
+
+#endif // ANDROID_EVENT_METRIC_H_
diff --git a/media/libmedia/nuplayer2/NuPlayer2Driver.cpp b/media/libmedia/nuplayer2/NuPlayer2Driver.cpp
index 629a7eb..ccfcc47 100644
--- a/media/libmedia/nuplayer2/NuPlayer2Driver.cpp
+++ b/media/libmedia/nuplayer2/NuPlayer2Driver.cpp
@@ -77,7 +77,7 @@
};
// key for media statistics
-static const char *kKeyPlayer = "nuplayer2";
+static const char *kKeyPlayer = "nuplayer";
// attrs for media statistics
static const char *kPlayerVMime = "android.media.mediaplayer.video.mime";
static const char *kPlayerVCodec = "android.media.mediaplayer.video.codec";
@@ -666,7 +666,7 @@
// re-init in case we prepare() and start() again.
delete mAnalyticsItem ;
- mAnalyticsItem = new MediaAnalyticsItem("nuplayer");
+ mAnalyticsItem = new MediaAnalyticsItem(kKeyPlayer);
if (mAnalyticsItem) {
mAnalyticsItem->generateSessionID();
mAnalyticsItem->setUid(mClientUid);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 0ded5be..541093a 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -126,6 +126,32 @@
}
}
+static OMX_VIDEO_CONTROLRATETYPE getVideoBitrateMode(const sp<AMessage> &msg) {
+ int32_t tmp;
+ if (msg->findInt32("bitrate-mode", &tmp)) {
+ // explicitly translate from MediaCodecInfo.EncoderCapabilities.
+ // BITRATE_MODE_* into OMX bitrate mode.
+ switch (tmp) {
+ //BITRATE_MODE_CQ
+ case 0: return OMX_Video_ControlRateConstantQuality;
+ //BITRATE_MODE_VBR
+ case 1: return OMX_Video_ControlRateVariable;
+ //BITRATE_MODE_CBR
+ case 2: return OMX_Video_ControlRateConstant;
+ default: break;
+ }
+ }
+ return OMX_Video_ControlRateVariable;
+}
+
+static bool findVideoBitrateControlInfo(const sp<AMessage> &msg,
+ OMX_VIDEO_CONTROLRATETYPE *mode, int32_t *bitrate, int32_t *quality) {
+ *mode = getVideoBitrateMode(msg);
+ bool isCQ = (*mode == OMX_Video_ControlRateConstantQuality);
+ return (!isCQ && msg->findInt32("bitrate", bitrate))
+ || (isCQ && msg->findInt32("quality", quality));
+}
+
struct MessageList : public RefBase {
MessageList() {
}
@@ -1686,6 +1712,7 @@
mConfigFormat = msg;
mIsEncoder = encoder;
+ mIsVideo = !strncasecmp(mime, "video/", 6);
mPortMode[kPortIndexInput] = IOMX::kPortModePresetByteBuffer;
mPortMode[kPortIndexOutput] = IOMX::kPortModePresetByteBuffer;
@@ -1696,19 +1723,26 @@
return err;
}
- int32_t bitRate = 0;
- // FLAC encoder doesn't need a bitrate, other encoders do
- if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)
- && !msg->findInt32("bitrate", &bitRate)) {
- return INVALID_OPERATION;
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode;
+ int32_t bitrate = 0, quality;
+ // FLAC encoder or video encoder in constant quality mode doesn't need a
+ // bitrate, other encoders do.
+ if (encoder) {
+ if (mIsVideo && !findVideoBitrateControlInfo(
+ msg, &bitrateMode, &bitrate, &quality)) {
+ return INVALID_OPERATION;
+ } else if (!mIsVideo && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)
+ && !msg->findInt32("bitrate", &bitrate)) {
+ return INVALID_OPERATION;
+ }
}
// propagate bitrate to the output so that the muxer has it
- if (encoder && msg->findInt32("bitrate", &bitRate)) {
+ if (encoder && msg->findInt32("bitrate", &bitrate)) {
// Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the
// average bitrate. We've been setting both bitrate and max-bitrate to this same value.
- outputFormat->setInt32("bitrate", bitRate);
- outputFormat->setInt32("max-bitrate", bitRate);
+ outputFormat->setInt32("bitrate", bitrate);
+ outputFormat->setInt32("max-bitrate", bitrate);
}
int32_t storeMeta;
@@ -1765,9 +1799,7 @@
// Only enable metadata mode on encoder output if encoder can prepend
// sps/pps to idr frames, since in metadata mode the bitstream is in an
// opaque handle, to which we don't have access.
- int32_t video = !strncasecmp(mime, "video/", 6);
- mIsVideo = video;
- if (encoder && video) {
+ if (encoder && mIsVideo) {
OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS
&& msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta)
&& storeMeta != 0);
@@ -1813,9 +1845,9 @@
// NOTE: we only use native window for video decoders
sp<RefBase> obj;
bool haveNativeWindow = msg->findObject("native-window", &obj)
- && obj != NULL && video && !encoder;
+ && obj != NULL && mIsVideo && !encoder;
mUsingNativeWindow = haveNativeWindow;
- if (video && !encoder) {
+ if (mIsVideo && !encoder) {
inputFormat->setInt32("adaptive-playback", false);
int32_t usageProtected;
@@ -1978,7 +2010,7 @@
(void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding);
// invalid encodings will default to PCM-16bit in setupRawAudioFormat.
- if (video) {
+ if (mIsVideo) {
// determine need for software renderer
bool usingSwRenderer = false;
if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) {
@@ -2112,14 +2144,14 @@
}
err = setupAACCodec(
- encoder, numChannels, sampleRate, bitRate, aacProfile,
+ encoder, numChannels, sampleRate, bitrate, aacProfile,
isADTS != 0, sbrMode, maxOutputChannelCount, drc,
pcmLimiterEnable);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
- err = setupAMRCodec(encoder, false /* isWAMR */, bitRate);
+ err = setupAMRCodec(encoder, false /* isWAMR */, bitrate);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
- err = setupAMRCodec(encoder, true /* isWAMR */, bitRate);
+ err = setupAMRCodec(encoder, true /* isWAMR */, bitrate);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW)
|| !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) {
// These are PCM-like formats with a fixed sample rate but
@@ -2233,7 +2265,7 @@
rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound.
}
if (rateFloat > 0) {
- err = setOperatingRate(rateFloat, video);
+ err = setOperatingRate(rateFloat, mIsVideo);
err = OK; // ignore errors
}
@@ -2258,7 +2290,7 @@
}
// create data converters if needed
- if (!video && err == OK) {
+ if (!mIsVideo && err == OK) {
AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit;
if (encoder) {
(void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
@@ -3709,10 +3741,12 @@
return err;
}
- int32_t width, height, bitrate;
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode;
+ int32_t width, height, bitrate = 0, quality;
if (!msg->findInt32("width", &width)
|| !msg->findInt32("height", &height)
- || !msg->findInt32("bitrate", &bitrate)) {
+ || !findVideoBitrateControlInfo(
+ msg, &bitrateMode, &bitrate, &quality)) {
return INVALID_OPERATION;
}
@@ -3965,15 +3999,6 @@
return ret > 0 ? ret - 1 : 0;
}
-static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) {
- int32_t tmp;
- if (!msg->findInt32("bitrate-mode", &tmp)) {
- return OMX_Video_ControlRateVariable;
- }
-
- return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp);
-}
-
status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) {
int32_t bitrate;
float iFrameInterval;
@@ -3982,7 +4007,7 @@
return INVALID_OPERATION;
}
- OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getVideoBitrateMode(msg);
float frameRate;
if (!msg->findFloat("frame-rate", &frameRate)) {
@@ -4047,7 +4072,7 @@
return err;
}
- err = configureBitrate(bitrate, bitrateMode);
+ err = configureBitrate(bitrateMode, bitrate);
if (err != OK) {
return err;
@@ -4064,7 +4089,7 @@
return INVALID_OPERATION;
}
- OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getVideoBitrateMode(msg);
float frameRate;
if (!msg->findFloat("frame-rate", &frameRate)) {
@@ -4124,7 +4149,7 @@
return err;
}
- err = configureBitrate(bitrate, bitrateMode);
+ err = configureBitrate(bitrateMode, bitrate);
if (err != OK) {
return err;
@@ -4194,7 +4219,7 @@
return INVALID_OPERATION;
}
- OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getVideoBitrateMode(msg);
float frameRate;
if (!msg->findFloat("frame-rate", &frameRate)) {
@@ -4350,18 +4375,20 @@
}
}
- return configureBitrate(bitrate, bitrateMode);
+ return configureBitrate(bitrateMode, bitrate);
}
status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) {
- int32_t bitrate;
float iFrameInterval;
- if (!msg->findInt32("bitrate", &bitrate)
- || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
+ if (!msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
return INVALID_OPERATION;
}
- OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode;
+ int32_t bitrate, quality;
+ if (!findVideoBitrateControlInfo(msg, &bitrateMode, &bitrate, &quality)) {
+ return INVALID_OPERATION;
+ }
float frameRate;
if (!msg->findFloat("frame-rate", &frameRate)) {
@@ -4407,7 +4434,7 @@
return err;
}
- return configureBitrate(bitrate, bitrateMode);
+ return configureBitrate(bitrateMode, bitrate, quality);
}
status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg, sp<AMessage> &outputFormat) {
@@ -4428,7 +4455,7 @@
}
msg->findAsFloat("i-frame-interval", &iFrameInterval);
- OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getVideoBitrateMode(msg);
float frameRate;
if (!msg->findFloat("frame-rate", &frameRate)) {
@@ -4505,7 +4532,7 @@
}
}
- return configureBitrate(bitrate, bitrateMode);
+ return configureBitrate(bitrateMode, bitrate);
}
status_t ACodec::verifySupportForProfileAndLevel(
@@ -4541,7 +4568,7 @@
}
status_t ACodec::configureBitrate(
- int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) {
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode, int32_t bitrate, int32_t quality) {
OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
InitOMXParams(&bitrateType);
bitrateType.nPortIndex = kPortIndexOutput;
@@ -4554,7 +4581,13 @@
}
bitrateType.eControlRate = bitrateMode;
- bitrateType.nTargetBitrate = bitrate;
+
+ // write it out explicitly even if it's a union
+ if (bitrateMode == OMX_Video_ControlRateConstantQuality) {
+ bitrateType.nQualityFactor = quality;
+ } else {
+ bitrateType.nTargetBitrate = bitrate;
+ }
return mOMXNode->setParameter(
OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType));
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 01f73a1..90f6ed7 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -105,6 +105,7 @@
"libmedia_helper",
"libstagefright_codec2",
"libstagefright_foundation",
+ "libstagefright_gbs",
"libstagefright_omx",
"libstagefright_omx_utils",
"libstagefright_xmlparser",
diff --git a/media/libstagefright/CCodec.cpp b/media/libstagefright/CCodec.cpp
index 0103abd..b058f37 100644
--- a/media/libstagefright/CCodec.cpp
+++ b/media/libstagefright/CCodec.cpp
@@ -23,7 +23,9 @@
#include <C2PlatformSupport.h>
#include <gui/Surface.h>
+#include <media/stagefright/BufferProducerWrapper.h>
#include <media/stagefright/CCodec.h>
+#include <media/stagefright/PersistentSurface.h>
#include "include/CCodecBufferChannel.h"
@@ -111,33 +113,35 @@
class CCodecListener : public C2Component::Listener {
public:
- CCodecListener(const std::shared_ptr<CCodecBufferChannel> &channel)
- : mChannel(channel) {
- }
+ explicit CCodecListener(const wp<CCodec> &codec) : mCodec(codec) {}
virtual void onWorkDone_nb(
std::weak_ptr<C2Component> component,
std::vector<std::unique_ptr<C2Work>> workItems) override {
- (void) component;
- mChannel->onWorkDone(std::move(workItems));
+ (void)component;
+ sp<CCodec> codec(mCodec.promote());
+ if (!codec) {
+ return;
+ }
+ codec->onWorkDone(workItems);
}
virtual void onTripped_nb(
std::weak_ptr<C2Component> component,
std::vector<std::shared_ptr<C2SettingResult>> settingResult) override {
// TODO
- (void) component;
- (void) settingResult;
+ (void)component;
+ (void)settingResult;
}
virtual void onError_nb(std::weak_ptr<C2Component> component, uint32_t errorCode) override {
// TODO
- (void) component;
- (void) errorCode;
+ (void)component;
+ (void)errorCode;
}
private:
- std::shared_ptr<CCodecBufferChannel> mChannel;
+ wp<CCodec> mCodec;
};
} // namespace
@@ -159,11 +163,11 @@
void CCodec::initiateAllocateComponent(const sp<AMessage> &msg) {
{
Mutexed<State>::Locked state(mState);
- if (state->mState != RELEASED) {
+ if (state->get() != RELEASED) {
mCallback->onError(INVALID_OPERATION, ACTION_CODE_FATAL);
return;
}
- state->mState = ALLOCATING;
+ state->set(ALLOCATING);
}
AString componentName;
@@ -178,14 +182,14 @@
void CCodec::allocate(const AString &componentName) {
// TODO: use C2ComponentStore to create component
- mListener.reset(new CCodecListener(mChannel));
+ mListener.reset(new CCodecListener(this));
std::shared_ptr<C2Component> comp;
c2_status_t err = GetCodec2PlatformComponentStore()->createComponent(
componentName.c_str(), &comp);
if (err != C2_OK) {
Mutexed<State>::Locked state(mState);
- state->mState = RELEASED;
+ state->set(RELEASED);
state.unlock();
mCallback->onError(err, ACTION_CODE_FATAL);
state.lock();
@@ -194,15 +198,15 @@
comp->setListener_vb(mListener, C2_MAY_BLOCK);
{
Mutexed<State>::Locked state(mState);
- if (state->mState != ALLOCATING) {
- state->mState = RELEASED;
+ if (state->get() != ALLOCATING) {
+ state->set(RELEASED);
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- state->mState = ALLOCATED;
- state->mComp = comp;
+ state->set(ALLOCATED);
+ state->comp = comp;
}
mChannel->setComponent(comp);
mCallback->onComponentAllocated(comp->intf()->getName().c_str());
@@ -211,7 +215,7 @@
void CCodec::initiateConfigureComponent(const sp<AMessage> &format) {
{
Mutexed<State>::Locked state(mState);
- if (state->mState != ALLOCATED) {
+ if (state->get() != ALLOCATED) {
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
return;
}
@@ -252,6 +256,9 @@
inputFormat->setInt32("sample-rate", 44100);
outputFormat->setInt32("channel-count", 1);
outputFormat->setInt32("sample-rate", 44100);
+ } else {
+ outputFormat->setInt32("width", 1080);
+ outputFormat->setInt32("height", 1920);
}
} else {
inputFormat->setString("mime", mime);
@@ -272,32 +279,81 @@
{
Mutexed<Formats>::Locked formats(mFormats);
- formats->mInputFormat = inputFormat;
- formats->mOutputFormat = outputFormat;
+ formats->inputFormat = inputFormat;
+ formats->outputFormat = outputFormat;
}
mCallback->onComponentConfigured(inputFormat, outputFormat);
}
-
void CCodec::initiateCreateInputSurface() {
- // TODO
+ (new AMessage(kWhatCreateInputSurface, this))->post();
+}
+
+void CCodec::createInputSurface() {
+ sp<IGraphicBufferProducer> producer;
+ sp<GraphicBufferSource> source(new GraphicBufferSource);
+
+ status_t err = source->initCheck();
+ if (err != OK) {
+ ALOGE("Failed to initialize graphic buffer source: %d", err);
+ mCallback->onInputSurfaceCreationFailed(err);
+ return;
+ }
+ producer = source->getIGraphicBufferProducer();
+
+ err = setupInputSurface(source);
+ if (err != OK) {
+ ALOGE("Failed to set up input surface: %d", err);
+ mCallback->onInputSurfaceCreationFailed(err);
+ return;
+ }
+
+ sp<AMessage> inputFormat;
+ sp<AMessage> outputFormat;
+ {
+ Mutexed<Formats>::Locked formats(mFormats);
+ inputFormat = formats->inputFormat;
+ outputFormat = formats->outputFormat;
+ }
+ mCallback->onInputSurfaceCreated(
+ inputFormat,
+ outputFormat,
+ new BufferProducerWrapper(producer));
+}
+
+status_t CCodec::setupInputSurface(const sp<GraphicBufferSource> &source) {
+ status_t err = mChannel->setGraphicBufferSource(source);
+ if (err != OK) {
+ return err;
+ }
+
+ // TODO: configure |source| with other settings.
+ return OK;
}
void CCodec::initiateSetInputSurface(const sp<PersistentSurface> &surface) {
+ sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
+ msg->setObject("surface", surface);
+ msg->post();
+}
+
+void CCodec::setInputSurface(const sp<PersistentSurface> &surface) {
// TODO
- (void) surface;
+ (void)surface;
+
+ mCallback->onInputSurfaceDeclined(ERROR_UNSUPPORTED);
}
void CCodec::initiateStart() {
{
Mutexed<State>::Locked state(mState);
- if (state->mState != ALLOCATED) {
+ if (state->get() != ALLOCATED) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- state->mState = STARTING;
+ state->set(STARTING);
}
(new AMessage(kWhatStart, this))->post();
@@ -307,13 +363,13 @@
std::shared_ptr<C2Component> comp;
{
Mutexed<State>::Locked state(mState);
- if (state->mState != STARTING) {
+ if (state->get() != STARTING) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- comp = state->mComp;
+ comp = state->comp;
}
c2_status_t err = comp->start();
if (err != C2_OK) {
@@ -325,20 +381,20 @@
sp<AMessage> outputFormat;
{
Mutexed<Formats>::Locked formats(mFormats);
- inputFormat = formats->mInputFormat;
- outputFormat = formats->mOutputFormat;
+ inputFormat = formats->inputFormat;
+ outputFormat = formats->outputFormat;
}
mChannel->start(inputFormat, outputFormat);
{
Mutexed<State>::Locked state(mState);
- if (state->mState != STARTING) {
+ if (state->get() != STARTING) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- state->mState = RUNNING;
+ state->set(RUNNING);
}
mCallback->onStartCompleted();
}
@@ -354,17 +410,17 @@
void CCodec::initiateStop() {
{
Mutexed<State>::Locked state(mState);
- if (state->mState == ALLOCATED
- || state->mState == RELEASED
- || state->mState == STOPPING
- || state->mState == RELEASING) {
+ if (state->get() == ALLOCATED
+ || state->get() == RELEASED
+ || state->get() == STOPPING
+ || state->get() == RELEASING) {
// We're already stopped, released, or doing it right now.
state.unlock();
mCallback->onStopCompleted();
state.lock();
return;
}
- state->mState = STOPPING;
+ state->set(STOPPING);
}
(new AMessage(kWhatStop, this))->post();
@@ -374,19 +430,19 @@
std::shared_ptr<C2Component> comp;
{
Mutexed<State>::Locked state(mState);
- if (state->mState == RELEASING) {
+ if (state->get() == RELEASING) {
state.unlock();
// We're already stopped or release is in progress.
mCallback->onStopCompleted();
state.lock();
return;
- } else if (state->mState != STOPPING) {
+ } else if (state->get() != STOPPING) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- comp = state->mComp;
+ comp = state->comp;
}
mChannel->stop();
status_t err = comp->stop();
@@ -397,8 +453,8 @@
{
Mutexed<State>::Locked state(mState);
- if (state->mState == STOPPING) {
- state->mState = ALLOCATED;
+ if (state->get() == STOPPING) {
+ state->set(ALLOCATED);
}
}
mCallback->onStopCompleted();
@@ -407,7 +463,7 @@
void CCodec::initiateRelease(bool sendCallback /* = true */) {
{
Mutexed<State>::Locked state(mState);
- if (state->mState == RELEASED || state->mState == RELEASING) {
+ if (state->get() == RELEASED || state->get() == RELEASING) {
// We're already released or doing it right now.
if (sendCallback) {
state.unlock();
@@ -416,8 +472,8 @@
}
return;
}
- if (state->mState == ALLOCATING) {
- state->mState = RELEASING;
+ if (state->get() == ALLOCATING) {
+ state->set(RELEASING);
// With the altered state allocate() would fail and clean up.
if (sendCallback) {
state.unlock();
@@ -426,7 +482,7 @@
}
return;
}
- state->mState = RELEASING;
+ state->set(RELEASING);
}
std::thread([this, sendCallback] { release(sendCallback); }).detach();
@@ -436,7 +492,7 @@
std::shared_ptr<C2Component> comp;
{
Mutexed<State>::Locked state(mState);
- if (state->mState == RELEASED) {
+ if (state->get() == RELEASED) {
if (sendCallback) {
state.unlock();
mCallback->onReleaseCompleted();
@@ -444,15 +500,15 @@
}
return;
}
- comp = state->mComp;
+ comp = state->comp;
}
mChannel->stop();
comp->release();
{
Mutexed<State>::Locked state(mState);
- state->mState = RELEASED;
- state->mComp.reset();
+ state->set(RELEASED);
+ state->comp.reset();
}
if (sendCallback) {
mCallback->onReleaseCompleted();
@@ -466,11 +522,11 @@
void CCodec::signalFlush() {
{
Mutexed<State>::Locked state(mState);
- if (state->mState != RUNNING) {
+ if (state->get() != RUNNING) {
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
return;
}
- state->mState = FLUSHING;
+ state->set(FLUSHING);
}
(new AMessage(kWhatFlush, this))->post();
@@ -480,13 +536,13 @@
std::shared_ptr<C2Component> comp;
{
Mutexed<State>::Locked state(mState);
- if (state->mState != FLUSHING) {
+ if (state->get() != FLUSHING) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- comp = state->mComp;
+ comp = state->comp;
}
mChannel->stop();
@@ -502,7 +558,7 @@
{
Mutexed<State>::Locked state(mState);
- state->mState = FLUSHED;
+ state->set(FLUSHED);
}
mCallback->onFlushCompleted();
}
@@ -510,26 +566,26 @@
void CCodec::signalResume() {
{
Mutexed<State>::Locked state(mState);
- if (state->mState != FLUSHED) {
+ if (state->get() != FLUSHED) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- state->mState = RESUMING;
+ state->set(RESUMING);
}
mChannel->start(nullptr, nullptr);
{
Mutexed<State>::Locked state(mState);
- if (state->mState != RESUMING) {
+ if (state->get() != RESUMING) {
state.unlock();
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
state.lock();
return;
}
- state->mState = RUNNING;
+ state->set(RUNNING);
}
}
@@ -545,6 +601,14 @@
// TODO
}
+void CCodec::onWorkDone(std::vector<std::unique_ptr<C2Work>> &workItems) {
+ Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
+ for (std::unique_ptr<C2Work> &item : workItems) {
+ queue->push_back(std::move(item));
+ }
+ (new AMessage(kWhatWorkDone, this))->post();
+}
+
void CCodec::onMessageReceived(const sp<AMessage> &msg) {
TimePoint now = std::chrono::steady_clock::now();
switch (msg->what()) {
@@ -582,6 +646,37 @@
flush();
break;
}
+ case kWhatCreateInputSurface: {
+ // Surface operations may be briefly blocking.
+ setDeadline(now + 100ms);
+ createInputSurface();
+ break;
+ }
+ case kWhatSetInputSurface: {
+ // Surface operations may be briefly blocking.
+ setDeadline(now + 100ms);
+ sp<RefBase> obj;
+ CHECK(msg->findObject("surface", &obj));
+ sp<PersistentSurface> surface(static_cast<PersistentSurface *>(obj.get()));
+ setInputSurface(surface);
+ break;
+ }
+ case kWhatWorkDone: {
+ std::unique_ptr<C2Work> work;
+ {
+ Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
+ if (queue->empty()) {
+ break;
+ }
+ work.swap(queue->front());
+ queue->pop_front();
+ if (!queue->empty()) {
+ (new AMessage(kWhatWorkDone, this))->post();
+ }
+ }
+ mChannel->onWorkDone(work);
+ break;
+ }
default: {
ALOGE("unrecognized message");
break;
diff --git a/media/libstagefright/CCodecBufferChannel.cpp b/media/libstagefright/CCodecBufferChannel.cpp
index ebc9b0a..2f4a7ea 100644
--- a/media/libstagefright/CCodecBufferChannel.cpp
+++ b/media/libstagefright/CCodecBufferChannel.cpp
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
#define LOG_TAG "CCodecBufferChannel"
#include <utils/Log.h>
@@ -48,10 +48,189 @@
using namespace hardware::cas::V1_0;
using namespace hardware::cas::native::V1_0;
+/**
+ * Base class for representation of buffers at one port.
+ */
+class CCodecBufferChannel::Buffers {
+public:
+ Buffers() = default;
+ virtual ~Buffers() = default;
+
+ /**
+ * Set format for MediaCodec-facing buffers.
+ */
+ void setFormat(const sp<AMessage> &format) { mFormat = format; }
+
+ /**
+ * Returns true if the buffers are operating under array mode.
+ */
+ virtual bool isArrayMode() const { return false; }
+
+ /**
+ * Fills the vector with MediaCodecBuffer's if in array mode; otherwise,
+ * no-op.
+ */
+ virtual void getArray(Vector<sp<MediaCodecBuffer>> *) const {}
+
+protected:
+ // Format to be used for creating MediaCodec-facing buffers.
+ sp<AMessage> mFormat;
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(Buffers);
+};
+
+class CCodecBufferChannel::InputBuffers : public CCodecBufferChannel::Buffers {
+public:
+ InputBuffers() = default;
+ virtual ~InputBuffers() = default;
+
+ /**
+ * Set a block pool to obtain input memory blocks.
+ */
+ void setPool(const std::shared_ptr<C2BlockPool> &pool) { mPool = pool; }
+
+ /**
+ * Get a new MediaCodecBuffer for input and its corresponding index.
+ * Returns false if no new buffer can be obtained at the moment.
+ */
+ virtual bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) = 0;
+
+ /**
+ * Release the buffer obtained from requestNewBuffer() and get the
+ * associated C2Buffer object back. Returns empty shared_ptr if the
+ * buffer is not on file.
+ *
+ * XXX: this is a quick hack to be removed
+ */
+ virtual std::shared_ptr<C2Buffer> releaseBufferIndex(size_t /* index */) { return nullptr; }
+
+ /**
+ * Release the buffer obtained from requestNewBuffer() and get the
+ * associated C2Buffer object back. Returns empty shared_ptr if the
+ * buffer is not on file.
+ */
+ virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
+
+ /**
+ * Flush internal state. After this call, no index or buffer previously
+ * returned from requestNewBuffer() is valid.
+ */
+ virtual void flush() = 0;
+
+ /**
+ * Return array-backed version of input buffers. The returned object
+ * shall retain the internal state so that it will honor index and
+ * buffer from previous calls of requestNewBuffer().
+ */
+ virtual std::unique_ptr<InputBuffers> toArrayMode() = 0;
+
+protected:
+ // Pool to obtain blocks for input buffers.
+ std::shared_ptr<C2BlockPool> mPool;
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(InputBuffers);
+};
+
+class CCodecBufferChannel::InputBufferClient {
+public:
+ explicit InputBufferClient(
+ const std::shared_ptr<CCodecBufferChannel> &channel) : mChannel(channel) {}
+ virtual ~InputBufferClient() = default;
+
+ virtual void onInputBufferAdded(size_t index, const sp<MediaCodecBuffer> &buffer) {
+ std::shared_ptr<CCodecBufferChannel> channel = mChannel.lock();
+ if (!channel) {
+ return;
+ }
+ channel->mCallback->onInputBufferAvailable(index, buffer);
+ }
+
+ virtual void onStart() {
+ // no-op
+ }
+
+ virtual void onStop() {
+ // no-op
+ }
+
+ virtual void onRelease() {
+ // no-op
+ }
+
+ virtual void onInputBufferAvailable(size_t index, const sp<MediaCodecBuffer> &buffer) {
+ std::shared_ptr<CCodecBufferChannel> channel = mChannel.lock();
+ if (!channel) {
+ return;
+ }
+ channel->mCallback->onInputBufferAvailable(index, buffer);
+ }
+
+protected:
+ InputBufferClient() = default;
+ std::weak_ptr<CCodecBufferChannel> mChannel;
+
+ DISALLOW_EVIL_CONSTRUCTORS(InputBufferClient);
+};
+
+class CCodecBufferChannel::OutputBuffers : public CCodecBufferChannel::Buffers {
+public:
+ OutputBuffers() = default;
+ virtual ~OutputBuffers() = default;
+
+ /**
+ * Register output C2Buffer from the component and obtain corresponding
+ * index and MediaCodecBuffer object. Returns false if registration
+ * fails.
+ */
+ virtual bool registerBuffer(
+ const std::shared_ptr<C2Buffer> &buffer,
+ size_t *index,
+ sp<MediaCodecBuffer> *codecBuffer) = 0;
+
+ /**
+ * Register codec specific data as a buffer to be consistent with
+ * MediaCodec behavior.
+ */
+ virtual bool registerCsd(
+ const C2StreamCsdInfo::output * /* csd */,
+ size_t * /* index */,
+ sp<MediaCodecBuffer> * /* codecBuffer */) {
+ return false;
+ }
+
+ /**
+ * Release the buffer obtained from registerBuffer() and get the
+ * associated C2Buffer object back. Returns empty shared_ptr if the
+ * buffer is not on file.
+ */
+ virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
+
+ /**
+ * Flush internal state. After this call, no index or buffer previously
+ * returned from registerBuffer() is valid.
+ */
+ virtual void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) = 0;
+
+ /**
+ * Return array-backed version of output buffers. The returned object
+ * shall retain the internal state so that it will honor index and
+ * buffer from previous calls of registerBuffer().
+ */
+ virtual std::unique_ptr<OutputBuffers> toArrayMode() = 0;
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(OutputBuffers);
+};
+
namespace {
+constexpr int32_t kMaskI32 = ~0;
+
// TODO: get this info from component
const static size_t kMinBufferArraySize = 16;
+const static size_t kLinearBufferSize = 524288;
template <class T>
ssize_t findBufferSlot(
@@ -88,9 +267,14 @@
return Codec2Buffer::allocate(format, block);
}
-class LinearBuffer : public C2Buffer {
+class Buffer1D : public C2Buffer {
public:
- explicit LinearBuffer(C2ConstLinearBlock block) : C2Buffer({ block }) {}
+ explicit Buffer1D(C2ConstLinearBlock block) : C2Buffer({ block }) {}
+};
+
+class Buffer2D : public C2Buffer {
+public:
+ explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
};
class InputBuffersArray : public CCodecBufferChannel::InputBuffers {
@@ -99,36 +283,36 @@
void add(
size_t index,
- const sp<MediaCodecBuffer> &clientBuffer,
- const std::shared_ptr<C2Buffer> &compBuffer,
+ const sp<Codec2Buffer> &clientBuffer,
bool available) {
- if (mBufferArray.size() < index) {
+ if (mBufferArray.size() <= index) {
+ // TODO: make this more efficient
mBufferArray.resize(index + 1);
}
mBufferArray[index].clientBuffer = clientBuffer;
- mBufferArray[index].compBuffer = compBuffer;
mBufferArray[index].available = available;
}
- bool isArrayMode() final { return true; }
+ bool isArrayMode() const final { return true; }
std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
return nullptr;
}
- void getArray(Vector<sp<MediaCodecBuffer>> *array) final {
+ void getArray(Vector<sp<MediaCodecBuffer>> *array) const final {
array->clear();
- for (const auto &entry : mBufferArray) {
+ for (const Entry &entry : mBufferArray) {
array->push(entry.clientBuffer);
}
}
bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
for (size_t i = 0; i < mBufferArray.size(); ++i) {
- if (mBufferArray[i].available) {
+ if (mBufferArray[i].available && mBufferArray[i].compBuffer.expired()) {
mBufferArray[i].available = false;
*index = i;
*buffer = mBufferArray[i].clientBuffer;
+ (*buffer)->setRange(0, (*buffer)->capacity());
return true;
}
}
@@ -138,8 +322,10 @@
std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
for (size_t i = 0; i < mBufferArray.size(); ++i) {
if (!mBufferArray[i].available && mBufferArray[i].clientBuffer == buffer) {
+ std::shared_ptr<C2Buffer> buffer = std::make_shared<Buffer1D>(mBufferArray[i].clientBuffer->share());
mBufferArray[i].available = true;
- return std::move(mBufferArray[i].compBuffer);
+ mBufferArray[i].compBuffer = buffer;
+ return buffer;
}
}
return nullptr;
@@ -148,14 +334,13 @@
void flush() override {
for (size_t i = 0; i < mBufferArray.size(); ++i) {
mBufferArray[i].available = true;
- mBufferArray[i].compBuffer.reset();
}
}
private:
struct Entry {
- sp<MediaCodecBuffer> clientBuffer;
- std::shared_ptr<C2Buffer> compBuffer;
+ sp<Codec2Buffer> clientBuffer;
+ std::weak_ptr<C2Buffer> compBuffer;
bool available;
};
@@ -177,7 +362,7 @@
// TODO: proper max input size and usage
// TODO: read usage from intf
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
- sp<Codec2Buffer> newBuffer = allocateLinearBuffer(mPool, mFormat, 65536, usage);
+ sp<Codec2Buffer> newBuffer = allocateLinearBuffer(mPool, mFormat, kLinearBufferSize, usage);
if (newBuffer == nullptr) {
return false;
}
@@ -195,7 +380,7 @@
sp<Codec2Buffer> codecBuffer = it->promote();
// We got sp<> reference from the caller so this should never happen..
CHECK(codecBuffer != nullptr);
- return std::make_shared<LinearBuffer>(codecBuffer->share());
+ return std::make_shared<Buffer1D>(codecBuffer->share());
}
void flush() override {
@@ -203,8 +388,10 @@
std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
std::unique_ptr<InputBuffersArray> array(new InputBuffersArray);
+ array->setFormat(mFormat);
// TODO
const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+ mBuffers.resize(size);
for (size_t i = 0; i < size; ++i) {
sp<Codec2Buffer> clientBuffer = mBuffers[i].promote();
bool available = false;
@@ -212,13 +399,12 @@
// TODO: proper max input size
// TODO: read usage from intf
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
- clientBuffer = allocateLinearBuffer(mPool, mFormat, 65536, usage);
+ clientBuffer = allocateLinearBuffer(mPool, mFormat, kLinearBufferSize, usage);
available = true;
}
array->add(
i,
clientBuffer,
- std::make_shared<LinearBuffer>(clientBuffer->share()),
available);
}
return std::move(array);
@@ -230,19 +416,30 @@
std::vector<wp<Codec2Buffer>> mBuffers;
};
-// TODO: stub
class GraphicInputBuffers : public CCodecBufferChannel::InputBuffers {
public:
- using CCodecBufferChannel::InputBuffers::InputBuffers;
+ GraphicInputBuffers() = default;
bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) override {
- (void)index;
- (void)buffer;
- return false;
+ *buffer = nullptr;
+ for (size_t i = 0; i < mAvailable.size(); ++i) {
+ if (mAvailable[i]) {
+ *index = i;
+ mAvailable[i] = false;
+ return true;
+ }
+ }
+ *index = mAvailable.size();
+ mAvailable.push_back(false);
+ return true;
}
- std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) override {
- (void)buffer;
+ std::shared_ptr<C2Buffer> releaseBufferIndex(size_t index) override {
+ mAvailable[index] = true;
+ return nullptr;
+ }
+
+ std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &) override {
return nullptr;
}
@@ -252,6 +449,9 @@
std::unique_ptr<CCodecBufferChannel::InputBuffers> toArrayMode() final {
return nullptr;
}
+
+private:
+ std::vector<bool> mAvailable;
};
class OutputBuffersArray : public CCodecBufferChannel::OutputBuffers {
@@ -263,7 +463,8 @@
const sp<MediaCodecBuffer> &clientBuffer,
const std::shared_ptr<C2Buffer> &compBuffer,
bool available) {
- if (mBufferArray.size() < index) {
+ if (mBufferArray.size() <= index) {
+ // TODO: make this more efficient
mBufferArray.resize(index + 1);
}
mBufferArray[index].clientBuffer = clientBuffer;
@@ -271,7 +472,7 @@
mBufferArray[index].available = available;
}
- bool isArrayMode() final { return true; }
+ bool isArrayMode() const final { return true; }
std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() final {
return nullptr;
@@ -285,6 +486,7 @@
if (mBufferArray[i].available && copy(buffer, mBufferArray[i].clientBuffer)) {
*index = i;
*codecBuffer = mBufferArray[i].clientBuffer;
+ (*codecBuffer)->setFormat(mFormat);
mBufferArray[i].compBuffer = buffer;
mBufferArray[i].available = false;
return true;
@@ -299,10 +501,17 @@
sp<MediaCodecBuffer> *codecBuffer) final {
for (size_t i = 0; i < mBufferArray.size(); ++i) {
if (mBufferArray[i].available
- && mBufferArray[i].clientBuffer->capacity() <= csd->flexCount()) {
+ && mBufferArray[i].clientBuffer->capacity() >= csd->flexCount()) {
+ // TODO: proper format update
+ sp<ABuffer> csdBuffer = ABuffer::CreateAsCopy(csd->m.value, csd->flexCount());
+ mFormat = mFormat->dup();
+ mFormat->setBuffer("csd-0", csdBuffer);
+
memcpy(mBufferArray[i].clientBuffer->base(), csd->m.value, csd->flexCount());
+ mBufferArray[i].clientBuffer->setRange(0, csd->flexCount());
*index = i;
*codecBuffer = mBufferArray[i].clientBuffer;
+ (*codecBuffer)->setFormat(mFormat);
mBufferArray[i].available = false;
return true;
}
@@ -333,9 +542,9 @@
const std::shared_ptr<C2Buffer> &buffer,
const sp<MediaCodecBuffer> &clientBuffer) = 0;
- void getArray(Vector<sp<MediaCodecBuffer>> *array) final {
+ void getArray(Vector<sp<MediaCodecBuffer>> *array) const final {
array->clear();
- for (const auto &entry : mBufferArray) {
+ for (const Entry &entry : mBufferArray) {
array->push(entry.clientBuffer);
}
}
@@ -363,6 +572,7 @@
}
C2ReadView view = buffer->data().linearBlocks().front().map().get();
if (clientBuffer->capacity() < view.capacity()) {
+ ALOGV("view.capacity() = %u", view.capacity());
return false;
}
clientBuffer->setRange(0u, view.capacity());
@@ -425,9 +635,11 @@
if (ret < 0) {
return false;
}
- sp<MediaCodecBuffer> newBuffer = new MediaCodecBuffer(
- mFormat,
- ABuffer::CreateAsCopy(csd->m.value, csd->flexCount()));
+ // TODO: proper format update
+ sp<ABuffer> csdBuffer = ABuffer::CreateAsCopy(csd->m.value, csd->flexCount());
+ mFormat = mFormat->dup();
+ mFormat->setBuffer("csd-0", csdBuffer);
+ sp<MediaCodecBuffer> newBuffer = new MediaCodecBuffer(mFormat, csdBuffer);
mBuffers[ret] = { newBuffer, nullptr };
*index = ret;
*codecBuffer = newBuffer;
@@ -498,15 +710,17 @@
std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() override {
std::unique_ptr<OutputBuffersArray> array(new LinearOutputBuffersArray);
+ array->setFormat(mFormat);
const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+ mBuffers.resize(size);
for (size_t i = 0; i < size; ++i) {
sp<MediaCodecBuffer> clientBuffer = mBuffers[i].clientBuffer.promote();
std::shared_ptr<C2Buffer> compBuffer = mBuffers[i].bufferRef;
bool available = false;
if (clientBuffer == nullptr) {
// TODO: proper max input size
- clientBuffer = new MediaCodecBuffer(mFormat, new ABuffer(65536));
+ clientBuffer = new MediaCodecBuffer(mFormat, new ABuffer(kLinearBufferSize));
available = true;
compBuffer.reset();
}
@@ -526,8 +740,10 @@
std::unique_ptr<CCodecBufferChannel::OutputBuffers> toArrayMode() override {
std::unique_ptr<OutputBuffersArray> array(new GraphicOutputBuffersArray);
+ array->setFormat(mFormat);
const size_t size = std::max(kMinBufferArraySize, mBuffers.size());
+ mBuffers.resize(size);
for (size_t i = 0; i < size; ++i) {
sp<MediaCodecBuffer> clientBuffer = mBuffers[i].clientBuffer.promote();
std::shared_ptr<C2Buffer> compBuffer = mBuffers[i].bufferRef;
@@ -543,8 +759,163 @@
}
};
+class BufferQueueClient : public CCodecBufferChannel::InputBufferClient {
+public:
+ explicit BufferQueueClient(const sp<GraphicBufferSource> &source) : mSource(source) {}
+ virtual ~BufferQueueClient() = default;
+
+ void onInputBufferAdded(size_t index, const sp<MediaCodecBuffer> &buffer) override {
+ (void)buffer;
+ mSource->onInputBufferAdded(index & kMaskI32);
+ }
+
+ void onStart() override {
+ mSource->start();
+ }
+
+ void onStop() override {
+ mSource->stop();
+ }
+
+ void onRelease() override {
+ mSource->release();
+ }
+
+ void onInputBufferAvailable(size_t index, const sp<MediaCodecBuffer> &buffer) override {
+ ALOGV("onInputBufferEmptied index = %zu", index);
+ (void)buffer;
+ // TODO: can we really ignore fence here?
+ mSource->onInputBufferEmptied(index & kMaskI32, -1 /* fenceFd */);
+ }
+
+private:
+ sp<GraphicBufferSource> mSource;
+};
+
+class GraphicBlock : public C2GraphicBlock {
+ using C2GraphicBlock::C2GraphicBlock;
+ friend class ::android::CCodecBufferChannel;
+};
+
} // namespace
+class CCodecBufferChannel::C2ComponentWrapper : public ComponentWrapper {
+public:
+ explicit C2ComponentWrapper(
+ const std::shared_ptr<CCodecBufferChannel> &channel)
+ : mChannel(channel), mLastTimestamp(0) {}
+
+ virtual ~C2ComponentWrapper() {
+ for (const std::pair<int32_t, C2Handle *> &entry : mHandles) {
+ native_handle_delete(entry.second);
+ }
+ }
+
+ status_t submitBuffer(
+ int32_t bufferId, const sp<GraphicBuffer> &buffer,
+ int64_t timestamp, int fenceFd) override {
+ ALOGV("submitBuffer bufferId = %d", bufferId);
+ // TODO: Use fd to construct fence
+ (void)fenceFd;
+
+ std::shared_ptr<CCodecBufferChannel> channel = mChannel.lock();
+ if (!channel) {
+ return NO_INIT;
+ }
+
+ std::shared_ptr<C2Allocator> allocator = mAllocator.lock();
+ if (!allocator) {
+ c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+ C2AllocatorStore::PLATFORM_START + 1, // GRALLOC
+ &allocator);
+ if (err != OK) {
+ return UNKNOWN_ERROR;
+ }
+ mAllocator = allocator;
+ }
+
+ std::shared_ptr<C2GraphicAllocation> alloc;
+ C2Handle *handle = WrapNativeCodec2GrallocHandle(
+ buffer->handle, buffer->width, buffer->height,
+ buffer->format, buffer->usage, buffer->stride);
+ c2_status_t err = allocator->priorGraphicAllocation(handle, &alloc);
+ if (err != OK) {
+ return UNKNOWN_ERROR;
+ }
+ std::shared_ptr<C2GraphicBlock> block(new GraphicBlock(alloc));
+
+ std::unique_ptr<C2Work> work(new C2Work);
+ work->input.flags = (C2FrameData::flags_t)0;
+ work->input.ordinal.timestamp = timestamp;
+ work->input.ordinal.frameIndex = channel->mFrameIndex++;
+ work->input.buffers.clear();
+ work->input.buffers.emplace_back(new Buffer2D(
+ // TODO: fence
+ block->share(C2Rect(block->width(), block->height()), ::android::C2Fence())));
+ work->worklets.clear();
+ work->worklets.emplace_back(new C2Worklet);
+ std::list<std::unique_ptr<C2Work>> items;
+ items.push_back(std::move(work));
+
+ err = channel->mComponent->queue_nb(&items);
+ if (err != OK) {
+ native_handle_delete(handle);
+ return UNKNOWN_ERROR;
+ }
+
+ mLastTimestamp = timestamp;
+ if (mHandles.count(bufferId) > 0) {
+ native_handle_delete(mHandles[bufferId]);
+ }
+ mHandles[bufferId] = handle;
+
+ Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(channel->mInputBuffers);
+ ALOGV("releaseBufferIndex index = %d", bufferId);
+ (*buffers)->releaseBufferIndex(bufferId);
+
+ return OK;
+ }
+
+ status_t submitEos(int32_t bufferId) override {
+ std::shared_ptr<CCodecBufferChannel> channel = mChannel.lock();
+ if (!channel) {
+ return NO_INIT;
+ }
+
+ std::unique_ptr<C2Work> work(new C2Work);
+ work->input.flags = C2FrameData::FLAG_END_OF_STREAM;
+ work->input.ordinal.timestamp = mLastTimestamp;
+ work->input.ordinal.frameIndex = channel->mFrameIndex++;
+ work->input.buffers.clear();
+ work->input.buffers.push_back(nullptr);
+ work->worklets.clear();
+ work->worklets.emplace_back(new C2Worklet);
+ std::list<std::unique_ptr<C2Work>> items;
+ items.push_back(std::move(work));
+
+ c2_status_t err = channel->mComponent->queue_nb(&items);
+
+ Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(channel->mInputBuffers);
+ (*buffers)->releaseBufferIndex(bufferId);
+
+ return (err == C2_OK) ? OK : UNKNOWN_ERROR;
+ }
+
+ void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+ // TODO
+ (void)dataSpace;
+ (void)aspects;
+ (void)pixelFormat;
+ }
+
+private:
+ std::weak_ptr<CCodecBufferChannel> mChannel;
+ std::map<int32_t, C2Handle *> mHandles;
+ int64_t mLastTimestamp;
+ std::weak_ptr<C2Allocator> mAllocator;
+};
+
CCodecBufferChannel::QueueGuard::QueueGuard(
CCodecBufferChannel::QueueSync &sync) : mSync(sync) {
std::unique_lock<std::mutex> l(mSync.mMutex);
@@ -601,10 +972,14 @@
if (mCrypto != nullptr && mDealer != nullptr && mHeapSeqNum >= 0) {
mCrypto->unsetHeap(mHeapSeqNum);
}
+ // TODO: is this the right place?
+ mInputClient->onRelease();
}
void CCodecBufferChannel::setComponent(const std::shared_ptr<C2Component> &component) {
mComponent = component;
+ mInputClient.reset(new InputBufferClient(shared_from_this()));
+
C2StreamFormatConfig::input inputFormat(0u);
C2StreamFormatConfig::output outputFormat(0u);
c2_status_t err = mComponent->intf()->query_vb(
@@ -638,6 +1013,10 @@
} else {
// TODO: error
}
+ // TODO: remove once we switch to proper buffer pool.
+ if (!graphic) {
+ *buffers = (*buffers)->toArrayMode();
+ }
}
{
@@ -652,6 +1031,18 @@
}
}
+status_t CCodecBufferChannel::setGraphicBufferSource(
+ const sp<GraphicBufferSource> &source) {
+ ALOGV("setGraphicBufferSource");
+ mInputClient.reset(new BufferQueueClient(source));
+
+ // TODO: proper color aspect & dataspace
+ android_dataspace dataSpace = HAL_DATASPACE_BT709;
+ // TODO: read settings properly from the interface
+ return source->configure(new C2ComponentWrapper(
+ shared_from_this()), dataSpace, 16, 1080, 1920, GRALLOC_USAGE_SW_READ_OFTEN);
+}
+
status_t CCodecBufferChannel::queueInputBuffer(const sp<MediaCodecBuffer> &buffer) {
QueueGuard guard(mSync);
if (!guard.isRunning()) {
@@ -708,9 +1099,24 @@
return -ENOSYS;
}
+void CCodecBufferChannel::feedInputBufferIfAvailable() {
+ sp<MediaCodecBuffer> inBuffer;
+ size_t index;
+ {
+ Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+ if (!(*buffers)->requestNewBuffer(&index, &inBuffer)) {
+ ALOGW("no new buffer available");
+ inBuffer = nullptr;
+ }
+ }
+ ALOGV("new input index = %zu", index);
+ mInputClient->onInputBufferAvailable(index, inBuffer);
+}
+
status_t CCodecBufferChannel::renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) {
ALOGV("renderOutputBuffer");
+ feedInputBufferIfAvailable();
std::shared_ptr<C2Buffer> c2Buffer;
{
@@ -780,7 +1186,9 @@
}
{
Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
- (void)(*buffers)->releaseBuffer(buffer);
+ if((*buffers)->releaseBuffer(buffer)) {
+ feedInputBufferIfAvailable();
+ }
}
return OK;
}
@@ -832,13 +1240,15 @@
return;
}
}
- mCallback->onInputBufferAvailable(index, buffer);
+ mInputClient->onInputBufferAdded(index, buffer);
}
+ mInputClient->onStart();
}
void CCodecBufferChannel::stop() {
mSync.stop();
mFirstValidFrameIndex = mFrameIndex.load();
+ mInputClient->onStop();
}
void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
@@ -852,106 +1262,110 @@
}
}
-void CCodecBufferChannel::onWorkDone(std::vector<std::unique_ptr<C2Work>> workItems) {
- for (const auto &work : workItems) {
- sp<MediaCodecBuffer> inBuffer;
- size_t index;
- {
- Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
- if (!(*buffers)->requestNewBuffer(&index, &inBuffer)) {
- ALOGW("no new buffer available");
- inBuffer = nullptr;
- }
- }
- if (inBuffer != nullptr) {
- mCallback->onInputBufferAvailable(index, inBuffer);
- }
+void CCodecBufferChannel::onWorkDone(const std::unique_ptr<C2Work> &work) {
+ if (work->result != OK) {
+ ALOGE("work failed to complete: %d", work->result);
+ mOnError(work->result, ACTION_CODE_FATAL);
+ return;
+ }
- if (work->result != OK) {
- ALOGE("work failed to complete: %d", work->result);
- mOnError(work->result, ACTION_CODE_FATAL);
+ // NOTE: MediaCodec usage supposedly have only one worklet
+ if (work->worklets.size() != 1u) {
+ ALOGE("onWorkDone: incorrect number of worklets: %zu",
+ work->worklets.size());
+ mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ return;
+ }
+
+ const std::unique_ptr<C2Worklet> &worklet = work->worklets.front();
+ if ((worklet->output.ordinal.frameIndex - mFirstValidFrameIndex.load()).peek() < 0) {
+ // Discard frames from previous generation.
+ return;
+ }
+ std::shared_ptr<C2Buffer> buffer;
+ // NOTE: MediaCodec usage supposedly have only one output stream.
+ if (worklet->output.buffers.size() > 1u) {
+ ALOGE("onWorkDone: incorrect number of output buffers: %zu",
+ worklet->output.buffers.size());
+ mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ return;
+ } else if (worklet->output.buffers.size() == 1u) {
+ buffer = worklet->output.buffers[0];
+ if (!buffer) {
+ ALOGW("onWorkDone: nullptr found in buffers; ignored.");
+ }
+ }
+
+ const C2StreamCsdInfo::output *csdInfo = nullptr;
+ for (const std::unique_ptr<C2Param> &info : worklet->output.configUpdate) {
+ if (info->coreIndex() == C2StreamCsdInfo::output::CORE_INDEX) {
+ ALOGV("onWorkDone: csd found");
+ csdInfo = static_cast<const C2StreamCsdInfo::output *>(info.get());
+ }
+ }
+
+ int32_t flags = 0;
+ if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= MediaCodec::BUFFER_FLAG_EOS;
+ ALOGV("onWorkDone: output EOS");
+ }
+
+ sp<MediaCodecBuffer> outBuffer;
+ size_t index;
+ if (csdInfo != nullptr) {
+ Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+ if ((*buffers)->registerCsd(csdInfo, &index, &outBuffer)) {
+ outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp.peek());
+ outBuffer->meta()->setInt32("flags", flags | MediaCodec::BUFFER_FLAG_CODECCONFIG);
+ ALOGV("onWorkDone: csd index = %zu", index);
+
+ buffers.unlock();
+ mCallback->onOutputBufferAvailable(index, outBuffer);
+ buffers.lock();
+ } else {
+ ALOGE("onWorkDone: unable to register csd");
+ buffers.unlock();
+ mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ buffers.lock();
return;
}
-
- // NOTE: MediaCodec usage supposedly have only one worklet
- if (work->worklets.size() != 1u) {
- ALOGE("incorrect number of worklets: %zu", work->worklets.size());
- mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
- continue;
- }
-
- const std::unique_ptr<C2Worklet> &worklet = work->worklets.front();
- if ((worklet->output.ordinal.frameIndex - mFirstValidFrameIndex.load()).peek() < 0) {
- // Discard frames from previous generation.
- continue;
- }
- // NOTE: MediaCodec usage supposedly have only one output stream.
- if (worklet->output.buffers.size() != 1u) {
- ALOGE("incorrect number of output buffers: %zu", worklet->output.buffers.size());
- mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
- continue;
- }
-
- const std::shared_ptr<C2Buffer> &buffer = worklet->output.buffers[0];
- const C2StreamCsdInfo::output *csdInfo = nullptr;
- if (buffer) {
- // TODO: transfer infos() into buffer metadata
- }
- for (const auto &info : worklet->output.configUpdate) {
- if (info->coreIndex() == C2StreamCsdInfo::output::CORE_INDEX) {
- ALOGV("csd found");
- csdInfo = static_cast<const C2StreamCsdInfo::output *>(info.get());
- }
- }
-
- int32_t flags = 0;
- if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
- flags |= MediaCodec::BUFFER_FLAG_EOS;
- ALOGV("output EOS");
- }
-
- sp<MediaCodecBuffer> outBuffer;
- if (csdInfo != nullptr) {
- Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
- if ((*buffers)->registerCsd(csdInfo, &index, &outBuffer)) {
- outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp.peek());
- outBuffer->meta()->setInt32("flags", flags | MediaCodec::BUFFER_FLAG_CODECCONFIG);
- ALOGV("csd index = %zu", index);
-
- buffers.unlock();
- mCallback->onOutputBufferAvailable(index, outBuffer);
- buffers.lock();
- } else {
- ALOGE("unable to register output buffer");
- buffers.unlock();
- mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
- buffers.lock();
- continue;
- }
- }
-
- if (!buffer && !flags) {
- ALOGV("Not reporting output buffer");
- continue;
- }
-
- {
- Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
- if (!(*buffers)->registerBuffer(buffer, &index, &outBuffer)) {
- ALOGE("unable to register output buffer");
-
- buffers.unlock();
- mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
- buffers.lock();
- continue;
- }
- }
-
- outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp.peek());
- outBuffer->meta()->setInt32("flags", flags);
- ALOGV("index = %zu", index);
- mCallback->onOutputBufferAvailable(index, outBuffer);
}
+
+ if (!buffer && !flags) {
+ ALOGV("onWorkDone: Not reporting output buffer");
+ return;
+ }
+
+ if (buffer) {
+ for (const std::shared_ptr<const C2Info> &info : buffer->info()) {
+ // TODO: properly translate these to metadata
+ switch (info->coreIndex().coreIndex()) {
+ case C2StreamPictureTypeMaskInfo::CORE_INDEX:
+ if (((C2StreamPictureTypeMaskInfo *)info.get())->value & C2PictureTypeKeyFrame) {
+ flags |= MediaCodec::BUFFER_FLAG_SYNCFRAME;
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
+ {
+ Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+ if (!(*buffers)->registerBuffer(buffer, &index, &outBuffer)) {
+ ALOGE("onWorkDone: unable to register output buffer");
+ buffers.unlock();
+ mOnError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ buffers.lock();
+ return;
+ }
+ }
+
+ outBuffer->meta()->setInt64("timeUs", worklet->output.ordinal.timestamp.peek());
+ outBuffer->meta()->setInt32("flags", flags);
+ ALOGV("onWorkDone: out buffer index = %zu", index);
+ mCallback->onOutputBufferAvailable(index, outBuffer);
}
status_t CCodecBufferChannel::setSurface(const sp<Surface> &newSurface) {
diff --git a/media/libstagefright/codec2/include/C2Config.h b/media/libstagefright/codec2/include/C2Config.h
index 83cb72c..2a2b9de 100644
--- a/media/libstagefright/codec2/include/C2Config.h
+++ b/media/libstagefright/codec2/include/C2Config.h
@@ -67,6 +67,7 @@
kParamIndexVideoSizeTuning,
kParamIndexCsd,
+ kParamIndexPictureTypeMask,
// video info
@@ -133,6 +134,12 @@
typedef C2StreamParam<C2Info, C2BlobValue, kParamIndexCsd> C2StreamCsdInfo;
+C2ENUM(C2PictureTypeMask, uint32_t,
+ C2PictureTypeKeyFrame = (1u << 0),
+)
+
+typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexPictureTypeMask> C2StreamPictureTypeMaskInfo;
+
/*
Component description fields:
diff --git a/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp b/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
index 18db3e9..a5ea511 100644
--- a/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/libstagefright/codec2/vndk/C2AllocatorGralloc.cpp
@@ -38,6 +38,15 @@
using ::android::hardware::hidl_handle;
using ::android::hardware::hidl_vec;
+namespace {
+
+struct BufferDescriptorInfo {
+ IMapper::BufferDescriptorInfo mapperInfo;
+ uint32_t stride;
+};
+
+}
+
/* ===================================== GRALLOC ALLOCATION ==================================== */
static c2_status_t maperr2error(Error maperr) {
switch (maperr) {
@@ -73,6 +82,7 @@
uint32_t format;
uint32_t usage_lo;
uint32_t usage_hi;
+ uint32_t stride;
uint32_t magic;
};
@@ -109,13 +119,16 @@
static C2HandleGralloc* WrapNativeHandle(
const native_handle_t *const handle,
- uint32_t width, uint32_t height, uint32_t format, uint64_t usage) {
+ uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride) {
//CHECK(handle != nullptr);
if (native_handle_is_invalid(handle) ||
handle->numInts > int((INT_MAX - handle->version) / sizeof(int)) - NUM_INTS - handle->numFds) {
return nullptr;
}
- ExtraData xd = { width, height, format, uint32_t(usage & 0xFFFFFFFF), uint32_t(usage >> 32), MAGIC };
+ ExtraData xd = {
+ width, height, format, uint32_t(usage & 0xFFFFFFFF), uint32_t(usage >> 32),
+ stride, MAGIC
+ };
native_handle_t *res = native_handle_create(handle->numFds, handle->numInts + NUM_INTS);
if (res != nullptr) {
memcpy(&res->data, &handle->data, sizeof(int) * (handle->numFds + handle->numInts));
@@ -138,7 +151,8 @@
static const C2HandleGralloc* Import(
const C2Handle *const handle,
- uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage) {
+ uint32_t *width, uint32_t *height, uint32_t *format,
+ uint64_t *usage, uint32_t *stride) {
const ExtraData *xd = getExtraData(handle);
if (xd == nullptr) {
return nullptr;
@@ -147,15 +161,22 @@
*height = xd->height;
*format = xd->format;
*usage = xd->usage_lo | (uint64_t(xd->usage_hi) << 32);
+ *stride = xd->stride;
return reinterpret_cast<const C2HandleGralloc *>(handle);
}
};
-native_handle_t* UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
+native_handle_t *UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
return C2HandleGralloc::UnwrapNativeHandle(handle);
}
+C2Handle *WrapNativeCodec2GrallocHandle(
+ const native_handle_t *const handle,
+ uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride) {
+ return C2HandleGralloc::WrapNativeHandle(handle, width, height, format, usage, stride);
+}
+
class C2AllocationGralloc : public C2GraphicAllocation {
public:
virtual ~C2AllocationGralloc() override;
@@ -171,7 +192,7 @@
// internal methods
// |handle| will be moved.
C2AllocationGralloc(
- const IMapper::BufferDescriptorInfo &info,
+ const BufferDescriptorInfo &info,
const sp<IMapper> &mapper,
hidl_handle &hidlHandle,
const C2HandleGralloc *const handle);
@@ -179,7 +200,7 @@
c2_status_t status() const;
private:
- const IMapper::BufferDescriptorInfo mInfo;
+ const BufferDescriptorInfo mInfo;
const sp<IMapper> mMapper;
const hidl_handle mHidlHandle;
const C2HandleGralloc *mHandle;
@@ -189,11 +210,11 @@
};
C2AllocationGralloc::C2AllocationGralloc(
- const IMapper::BufferDescriptorInfo &info,
+ const BufferDescriptorInfo &info,
const sp<IMapper> &mapper,
hidl_handle &hidlHandle,
const C2HandleGralloc *const handle)
- : C2GraphicAllocation(info.width, info.height),
+ : C2GraphicAllocation(info.mapperInfo.width, info.mapperInfo.height),
mInfo(info),
mMapper(mapper),
mHidlHandle(std::move(hidlHandle)),
@@ -241,83 +262,133 @@
return C2_CORRUPTED;
}
mLockedHandle = C2HandleGralloc::WrapNativeHandle(
- mBuffer, mInfo.width, mInfo.height, (uint32_t)mInfo.format, mInfo.usage);
+ mBuffer, mInfo.mapperInfo.width, mInfo.mapperInfo.height,
+ (uint32_t)mInfo.mapperInfo.format, mInfo.mapperInfo.usage, mInfo.stride);
}
- if (mInfo.format == PixelFormat::YCBCR_420_888 || mInfo.format == PixelFormat::YV12) {
- YCbCrLayout ycbcrLayout;
- mMapper->lockYCbCr(
- const_cast<native_handle_t *>(mBuffer),
- BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
- { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
- // TODO: fence
- hidl_handle(),
- [&err, &ycbcrLayout](const auto &maperr, const auto &mapLayout) {
- err = maperr2error(maperr);
- if (err == C2_OK) {
- ycbcrLayout = mapLayout;
- }
- });
- if (err != C2_OK) {
- return err;
+ switch (mInfo.mapperInfo.format) {
+ case PixelFormat::YCBCR_420_888:
+ case PixelFormat::YV12: {
+ YCbCrLayout ycbcrLayout;
+ mMapper->lockYCbCr(
+ const_cast<native_handle_t *>(mBuffer),
+ BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+ { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
+ // TODO: fence
+ hidl_handle(),
+ [&err, &ycbcrLayout](const auto &maperr, const auto &mapLayout) {
+ err = maperr2error(maperr);
+ if (err == C2_OK) {
+ ycbcrLayout = mapLayout;
+ }
+ });
+ if (err != C2_OK) {
+ return err;
+ }
+ addr[C2PlanarLayout::PLANE_Y] = (uint8_t *)ycbcrLayout.y;
+ addr[C2PlanarLayout::PLANE_U] = (uint8_t *)ycbcrLayout.cb;
+ addr[C2PlanarLayout::PLANE_V] = (uint8_t *)ycbcrLayout.cr;
+ layout->type = C2PlanarLayout::TYPE_YUV;
+ layout->numPlanes = 3;
+ layout->planes[C2PlanarLayout::PLANE_Y] = {
+ C2PlaneInfo::CHANNEL_Y, // channel
+ 1, // colInc
+ (int32_t)ycbcrLayout.yStride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ layout->planes[C2PlanarLayout::PLANE_U] = {
+ C2PlaneInfo::CHANNEL_CB, // channel
+ (int32_t)ycbcrLayout.chromaStep, // colInc
+ (int32_t)ycbcrLayout.cStride, // rowInc
+ 2, // mColSampling
+ 2, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ layout->planes[C2PlanarLayout::PLANE_V] = {
+ C2PlaneInfo::CHANNEL_CR, // channel
+ (int32_t)ycbcrLayout.chromaStep, // colInc
+ (int32_t)ycbcrLayout.cStride, // rowInc
+ 2, // mColSampling
+ 2, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ break;
}
- addr[C2PlanarLayout::PLANE_Y] = (uint8_t *)ycbcrLayout.y;
- addr[C2PlanarLayout::PLANE_U] = (uint8_t *)ycbcrLayout.cb;
- addr[C2PlanarLayout::PLANE_V] = (uint8_t *)ycbcrLayout.cr;
- layout->type = C2PlanarLayout::TYPE_YUV;
- layout->numPlanes = 3;
- layout->planes[C2PlanarLayout::PLANE_Y] = {
- C2PlaneInfo::CHANNEL_Y, // channel
- 1, // colInc
- (int32_t)ycbcrLayout.yStride, // rowInc
- 1, // mColSampling
- 1, // mRowSampling
- 8, // allocatedDepth
- 8, // bitDepth
- 0, // rightShift
- C2PlaneInfo::NATIVE, // endianness
- };
- layout->planes[C2PlanarLayout::PLANE_U] = {
- C2PlaneInfo::CHANNEL_CB, // channel
- (int32_t)ycbcrLayout.chromaStep, // colInc
- (int32_t)ycbcrLayout.cStride, // rowInc
- 2, // mColSampling
- 2, // mRowSampling
- 8, // allocatedDepth
- 8, // bitDepth
- 0, // rightShift
- C2PlaneInfo::NATIVE, // endianness
- };
- layout->planes[C2PlanarLayout::PLANE_V] = {
- C2PlaneInfo::CHANNEL_CR, // channel
- (int32_t)ycbcrLayout.chromaStep, // colInc
- (int32_t)ycbcrLayout.cStride, // rowInc
- 2, // mColSampling
- 2, // mRowSampling
- 8, // allocatedDepth
- 8, // bitDepth
- 0, // rightShift
- C2PlaneInfo::NATIVE, // endianness
- };
- } else {
- void *pointer = nullptr;
- mMapper->lock(
- const_cast<native_handle_t *>(mBuffer),
- BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
- { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
- // TODO: fence
- hidl_handle(),
- [&err, &pointer](const auto &maperr, const auto &mapPointer) {
- err = maperr2error(maperr);
- if (err == C2_OK) {
- pointer = mapPointer;
- }
- });
- if (err != C2_OK) {
- return err;
+
+ case PixelFormat::RGBA_8888:
+ // TODO: alpha channel
+ // fall-through
+ case PixelFormat::RGBX_8888: {
+ void *pointer = nullptr;
+ mMapper->lock(
+ const_cast<native_handle_t *>(mBuffer),
+ BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+ { (int32_t)rect.left, (int32_t)rect.top, (int32_t)rect.width, (int32_t)rect.height },
+ // TODO: fence
+ hidl_handle(),
+ [&err, &pointer](const auto &maperr, const auto &mapPointer) {
+ err = maperr2error(maperr);
+ if (err == C2_OK) {
+ pointer = mapPointer;
+ }
+ });
+ if (err != C2_OK) {
+ return err;
+ }
+ addr[C2PlanarLayout::PLANE_R] = (uint8_t *)pointer;
+ addr[C2PlanarLayout::PLANE_G] = (uint8_t *)pointer + 1;
+ addr[C2PlanarLayout::PLANE_B] = (uint8_t *)pointer + 2;
+ layout->type = C2PlanarLayout::TYPE_RGB;
+ layout->numPlanes = 3;
+ layout->planes[C2PlanarLayout::PLANE_R] = {
+ C2PlaneInfo::CHANNEL_R, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ layout->planes[C2PlanarLayout::PLANE_G] = {
+ C2PlaneInfo::CHANNEL_G, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ layout->planes[C2PlanarLayout::PLANE_B] = {
+ C2PlaneInfo::CHANNEL_B, // channel
+ 4, // colInc
+ 4 * (int32_t)mInfo.stride, // rowInc
+ 1, // mColSampling
+ 1, // mRowSampling
+ 8, // allocatedDepth
+ 8, // bitDepth
+ 0, // rightShift
+ C2PlaneInfo::NATIVE, // endianness
+ };
+ break;
}
- // TODO
- return C2_OMITTED;
+ default: {
+ return C2_OMITTED;
+ }
}
mLocked = true;
@@ -396,17 +467,20 @@
// TODO: buffer usage should be determined according to |usage|
(void) usage;
- IMapper::BufferDescriptorInfo info = {
- width,
- height,
- 1u, // layerCount
- (PixelFormat)format,
- BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+ BufferDescriptorInfo info = {
+ {
+ width,
+ height,
+ 1u, // layerCount
+ (PixelFormat)format,
+ BufferUsage::CPU_READ_OFTEN | BufferUsage::CPU_WRITE_OFTEN,
+ },
+ 0u, // stride placeholder
};
c2_status_t err = C2_OK;
BufferDescriptor desc;
mMapper->createDescriptor(
- info, [&err, &desc](const auto &maperr, const auto &descriptor) {
+ info.mapperInfo, [&err, &desc](const auto &maperr, const auto &descriptor) {
err = maperr2error(maperr);
if (err == C2_OK) {
desc = descriptor;
@@ -421,8 +495,7 @@
mAllocator->allocate(
desc,
1u,
- [&err, &buffer](const auto &maperr, const auto &stride, auto &buffers) {
- (void) stride;
+ [&err, &buffer, &info](const auto &maperr, const auto &stride, auto &buffers) {
err = maperr2error(maperr);
if (err != C2_OK) {
return;
@@ -431,6 +504,7 @@
err = C2_CORRUPTED;
return;
}
+ info.stride = stride;
buffer = std::move(buffers[0]);
});
if (err != C2_OK) {
@@ -442,18 +516,20 @@
info, mMapper, buffer,
C2HandleGralloc::WrapNativeHandle(
buffer.getNativeHandle(),
- info.width, info.height, (uint32_t)info.format, info.usage)));
+ info.mapperInfo.width, info.mapperInfo.height,
+ (uint32_t)info.mapperInfo.format, info.mapperInfo.usage, info.stride)));
return C2_OK;
}
c2_status_t C2AllocatorGralloc::Impl::priorGraphicAllocation(
const C2Handle *handle,
std::shared_ptr<C2GraphicAllocation> *allocation) {
- IMapper::BufferDescriptorInfo info;
- info.layerCount = 1u;
+ BufferDescriptorInfo info;
+ info.mapperInfo.layerCount = 1u;
const C2HandleGralloc *grallocHandle = C2HandleGralloc::Import(
handle,
- &info.width, &info.height, (uint32_t *)&info.format, (uint64_t *)&info.usage);
+ &info.mapperInfo.width, &info.mapperInfo.height,
+ (uint32_t *)&info.mapperInfo.format, (uint64_t *)&info.mapperInfo.usage, &info.stride);
if (grallocHandle == nullptr) {
return C2_BAD_VALUE;
}
@@ -461,7 +537,7 @@
hidl_handle hidlHandle = C2HandleGralloc::UnwrapNativeHandle(grallocHandle);
allocation->reset(new C2AllocationGralloc(info, mMapper, hidlHandle, grallocHandle));
- return C2_OMITTED;
+ return C2_OK;
}
C2AllocatorGralloc::C2AllocatorGralloc() : mImpl(new Impl) {}
diff --git a/media/libstagefright/codec2/vndk/C2Store.cpp b/media/libstagefright/codec2/vndk/C2Store.cpp
index 555e77b..496cbe1 100644
--- a/media/libstagefright/codec2/vndk/C2Store.cpp
+++ b/media/libstagefright/codec2/vndk/C2Store.cpp
@@ -404,6 +404,7 @@
C2PlatformComponentStore::C2PlatformComponentStore() {
// TODO: move this also into a .so so it can be updated
mComponents.emplace("c2.google.avc.decoder", "libstagefright_soft_c2avcdec.so");
+ mComponents.emplace("c2.google.avc.encoder", "libstagefright_soft_c2avcenc.so");
mComponents.emplace("c2.google.aac.decoder", "libstagefright_soft_c2aacdec.so");
mComponents.emplace("c2.google.aac.encoder", "libstagefright_soft_c2aacenc.so");
}
diff --git a/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h b/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
index 5311747..56fa317 100644
--- a/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/libstagefright/codec2/vndk/include/C2AllocatorGralloc.h
@@ -1,4 +1,3 @@
-
/*
* Copyright (C) 2016 The Android Open Source Project
*
@@ -32,7 +31,17 @@
*
* @return a new NON-OWNING native handle that must be deleted using native_handle_delete.
*/
-native_handle_t*UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle);
+native_handle_t *UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle);
+
+/**
+ * Wrap the gralloc handle and metadata into Codec2 handle recognized by
+ * C2AllocatorGralloc.
+ *
+ * @return a new NON-OWNING C2Handle that must be deleted using native_handle_delete.
+ */
+C2Handle *WrapNativeCodec2GrallocHandle(
+ const native_handle_t *const handle,
+ uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride);
class C2AllocatorGralloc : public C2Allocator {
public:
diff --git a/media/libstagefright/codecs/aacdec/C2SoftAac.cpp b/media/libstagefright/codecs/aacdec/C2SoftAac.cpp
index 5ddfc14..b57c2aa 100644
--- a/media/libstagefright/codecs/aacdec/C2SoftAac.cpp
+++ b/media/libstagefright/codecs/aacdec/C2SoftAac.cpp
@@ -375,7 +375,6 @@
work->worklets.front()->output.ordinal = work->input.ordinal;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.push_back(nullptr);
return;
}
@@ -602,7 +601,6 @@
auto fillEmptyWork = [](const std::unique_ptr<C2Work> &work) {
work->worklets.front()->output.flags = work->input.flags;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.emplace_back(nullptr);
work->worklets.front()->output.ordinal = work->input.ordinal;
work->workletsProcessed = 1u;
};
diff --git a/media/libstagefright/codecs/aacenc/C2SoftAacEnc.cpp b/media/libstagefright/codecs/aacenc/C2SoftAacEnc.cpp
index 7bce21d..6f1b325 100644
--- a/media/libstagefright/codecs/aacenc/C2SoftAacEnc.cpp
+++ b/media/libstagefright/codecs/aacenc/C2SoftAacEnc.cpp
@@ -344,8 +344,6 @@
if (nOutputBytes) {
work->worklets.front()->output.buffers.push_back(
createLinearBuffer(block, 0, nOutputBytes));
- } else {
- work->worklets.front()->output.buffers.emplace_back(nullptr);
}
#if 0
diff --git a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
index cc12d3c..4f64f6e 100644
--- a/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
+++ b/media/libstagefright/codecs/avcdec/C2SoftAvcDec.cpp
@@ -211,7 +211,6 @@
}
work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.emplace_back(nullptr);
work->worklets.front()->output.ordinal = work->input.ordinal;
work->workletsProcessed = 1u;
}
diff --git a/media/libstagefright/codecs/avcenc/Android.bp b/media/libstagefright/codecs/avcenc/Android.bp
index cefe77c..67a2fdc 100644
--- a/media/libstagefright/codecs/avcenc/Android.bp
+++ b/media/libstagefright/codecs/avcenc/Android.bp
@@ -1,4 +1,48 @@
cc_library_shared {
+ name: "libstagefright_soft_c2avcenc",
+// vendor_available: true,
+// vndk: {
+// enabled: true,
+// },
+
+ static_libs: [ "libavcenc" ],
+ srcs: ["C2SoftAvcEnc.cpp"],
+
+ include_dirs: [
+ "external/libavc/encoder",
+ "external/libavc/common",
+ "frameworks/av/media/libstagefright/include",
+ "frameworks/native/include/media/hardware",
+ ],
+
+ shared_libs: [
+ "libstagefright_codec2",
+ "libstagefright_codec2_vndk",
+ "libstagefright_foundation",
+ "libstagefright_simple_c2component",
+ "libutils",
+ "liblog",
+ ],
+
+ sanitize: {
+ misc_undefined: [
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ diag: {
+ cfi: true,
+ },
+ },
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wno-unused-variable",
+ ],
+ ldflags: ["-Wl,-Bsymbolic"],
+}
+
+cc_library_shared {
name: "libstagefright_soft_avcenc",
vendor_available: true,
vndk: {
diff --git a/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.cpp b/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.cpp
new file mode 100644
index 0000000..7c281e3
--- /dev/null
+++ b/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.cpp
@@ -0,0 +1,1239 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAvcEncEnc"
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "ih264_typedefs.h"
+#include "ih264e.h"
+#include "ih264e_error.h"
+#include "iv2.h"
+#include "ive2.h"
+#include "C2SoftAvcEnc.h"
+
+namespace android {
+
+#define ive_api_function ih264e_api_function
+
+namespace {
+
+// From external/libavc/encoder/ih264e_bitstream.h
+constexpr uint32_t MIN_STREAM_SIZE = 0x800;
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+void ConvertRGBToPlanarYUV(
+ uint8_t *dstY, size_t dstStride, size_t dstVStride,
+ const C2GraphicView &src) {
+ CHECK((src.width() & 1) == 0);
+ CHECK((src.height() & 1) == 0);
+
+ uint8_t *dstU = dstY + dstStride * dstVStride;
+ uint8_t *dstV = dstU + (dstStride >> 1) * (dstVStride >> 1);
+
+ const C2PlanarLayout &layout = src.layout();
+ const uint8_t *pRed = src.data()[C2PlanarLayout::PLANE_R];
+ const uint8_t *pGreen = src.data()[C2PlanarLayout::PLANE_G];
+ const uint8_t *pBlue = src.data()[C2PlanarLayout::PLANE_B];
+
+ for (size_t y = 0; y < src.height(); ++y) {
+ for (size_t x = 0; x < src.width(); ++x) {
+ unsigned red = *pRed;
+ unsigned green = *pGreen;
+ unsigned blue = *pBlue;
+
+ // using ITU-R BT.601 conversion matrix
+ unsigned luma =
+ ((red * 66 + green * 129 + blue * 25) >> 8) + 16;
+
+ dstY[x] = luma;
+
+ if ((x & 1) == 0 && (y & 1) == 0) {
+ unsigned U =
+ ((-red * 38 - green * 74 + blue * 112) >> 8) + 128;
+
+ unsigned V =
+ ((red * 112 - green * 94 - blue * 18) >> 8) + 128;
+
+ dstU[x >> 1] = U;
+ dstV[x >> 1] = V;
+ }
+ pRed += layout.planes[C2PlanarLayout::PLANE_R].colInc;
+ pGreen += layout.planes[C2PlanarLayout::PLANE_G].colInc;
+ pBlue += layout.planes[C2PlanarLayout::PLANE_B].colInc;
+ }
+
+ if ((y & 1) == 0) {
+ dstU += dstStride >> 1;
+ dstV += dstStride >> 1;
+ }
+
+ pRed -= layout.planes[C2PlanarLayout::PLANE_R].colInc * src.width();
+ pGreen -= layout.planes[C2PlanarLayout::PLANE_G].colInc * src.width();
+ pBlue -= layout.planes[C2PlanarLayout::PLANE_B].colInc * src.width();
+ pRed += layout.planes[C2PlanarLayout::PLANE_R].rowInc;
+ pGreen += layout.planes[C2PlanarLayout::PLANE_G].rowInc;
+ pBlue += layout.planes[C2PlanarLayout::PLANE_B].rowInc;
+
+ dstY += dstStride;
+ }
+}
+
+} // namespace
+
+C2SoftAvcEnc::C2SoftAvcEnc(const char *name, c2_node_id_t id)
+ : SimpleC2Component(
+ SimpleC2Interface::Builder(name, id)
+ .inputFormat(C2FormatVideo)
+ .outputFormat(C2FormatCompressed)
+ .build()),
+ mUpdateFlag(0),
+ mIvVideoColorFormat(IV_YUV_420P),
+ mAVCEncProfile(IV_PROFILE_BASE),
+ mAVCEncLevel(41),
+ mStarted(false),
+ mSawInputEOS(false),
+ mSawOutputEOS(false),
+ mSignalledError(false),
+ mCodecCtx(NULL),
+ mWidth(1080),
+ mHeight(1920),
+ mFramerate(60),
+ mBitrate(20000),
+ // TODO: output buffer size
+ mOutBufferSize(524288) {
+
+ // If dump is enabled, then open create an empty file
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+ CREATE_DUMP_FILE(mOutFile);
+
+ initEncParams();
+}
+
+C2SoftAvcEnc::~C2SoftAvcEnc() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftAvcEnc::onInit() {
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::onStop() {
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::onReset() {
+ // TODO: use IVE_CMD_CTL_RESET?
+ releaseEncoder();
+ initEncParams();
+}
+
+void C2SoftAvcEnc::onRelease() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftAvcEnc::onFlush_sm() {
+ // TODO: use IVE_CMD_CTL_FLUSH?
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::initEncParams() {
+ mCodecCtx = NULL;
+ mMemRecords = NULL;
+ mNumMemRecords = DEFAULT_MEM_REC_CNT;
+ mHeaderGenerated = 0;
+ mNumCores = GetCPUCoreCount();
+ mArch = DEFAULT_ARCH;
+ mSliceMode = DEFAULT_SLICE_MODE;
+ mSliceParam = DEFAULT_SLICE_PARAM;
+ mHalfPelEnable = DEFAULT_HPEL;
+ mIInterval = DEFAULT_I_INTERVAL;
+ mIDRInterval = DEFAULT_IDR_INTERVAL;
+ mDisableDeblkLevel = DEFAULT_DISABLE_DEBLK_LEVEL;
+ mEnableFastSad = DEFAULT_ENABLE_FAST_SAD;
+ mEnableAltRef = DEFAULT_ENABLE_ALT_REF;
+ mEncSpeed = DEFAULT_ENC_SPEED;
+ mIntra4x4 = DEFAULT_INTRA4x4;
+ mConstrainedIntraFlag = DEFAULT_CONSTRAINED_INTRA;
+ mAIRMode = DEFAULT_AIR;
+ mAIRRefreshPeriod = DEFAULT_AIR_REFRESH_PERIOD;
+ mPSNREnable = DEFAULT_PSNR_ENABLE;
+ mReconEnable = DEFAULT_RECON_ENABLE;
+ mEntropyMode = DEFAULT_ENTROPY_MODE;
+ mBframes = DEFAULT_B_FRAMES;
+
+ gettimeofday(&mTimeStart, NULL);
+ gettimeofday(&mTimeEnd, NULL);
+}
+
+c2_status_t C2SoftAvcEnc::setDimensions() {
+ ive_ctl_set_dimensions_ip_t s_dimensions_ip;
+ ive_ctl_set_dimensions_op_t s_dimensions_op;
+ IV_STATUS_T status;
+
+ s_dimensions_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_dimensions_ip.e_sub_cmd = IVE_CMD_CTL_SET_DIMENSIONS;
+ s_dimensions_ip.u4_ht = mHeight;
+ s_dimensions_ip.u4_wd = mWidth;
+
+ s_dimensions_ip.u4_timestamp_high = -1;
+ s_dimensions_ip.u4_timestamp_low = -1;
+
+ s_dimensions_ip.u4_size = sizeof(ive_ctl_set_dimensions_ip_t);
+ s_dimensions_op.u4_size = sizeof(ive_ctl_set_dimensions_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_dimensions_ip, &s_dimensions_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame dimensions = 0x%x\n",
+ s_dimensions_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setNumCores() {
+ IV_STATUS_T status;
+ ive_ctl_set_num_cores_ip_t s_num_cores_ip;
+ ive_ctl_set_num_cores_op_t s_num_cores_op;
+ s_num_cores_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_num_cores_ip.e_sub_cmd = IVE_CMD_CTL_SET_NUM_CORES;
+ s_num_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_CORES);
+ s_num_cores_ip.u4_timestamp_high = -1;
+ s_num_cores_ip.u4_timestamp_low = -1;
+ s_num_cores_ip.u4_size = sizeof(ive_ctl_set_num_cores_ip_t);
+
+ s_num_cores_op.u4_size = sizeof(ive_ctl_set_num_cores_op_t);
+
+ status = ive_api_function(
+ mCodecCtx, (void *) &s_num_cores_ip, (void *) &s_num_cores_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set processor params = 0x%x\n",
+ s_num_cores_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setFrameRate() {
+ ive_ctl_set_frame_rate_ip_t s_frame_rate_ip;
+ ive_ctl_set_frame_rate_op_t s_frame_rate_op;
+ IV_STATUS_T status;
+
+ s_frame_rate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_frame_rate_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMERATE;
+
+ s_frame_rate_ip.u4_src_frame_rate = mFramerate;
+ s_frame_rate_ip.u4_tgt_frame_rate = mFramerate;
+
+ s_frame_rate_ip.u4_timestamp_high = -1;
+ s_frame_rate_ip.u4_timestamp_low = -1;
+
+ s_frame_rate_ip.u4_size = sizeof(ive_ctl_set_frame_rate_ip_t);
+ s_frame_rate_op.u4_size = sizeof(ive_ctl_set_frame_rate_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_frame_rate_ip, &s_frame_rate_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame rate = 0x%x\n",
+ s_frame_rate_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setIpeParams() {
+ ive_ctl_set_ipe_params_ip_t s_ipe_params_ip;
+ ive_ctl_set_ipe_params_op_t s_ipe_params_op;
+ IV_STATUS_T status;
+
+ s_ipe_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_ipe_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_IPE_PARAMS;
+
+ s_ipe_params_ip.u4_enable_intra_4x4 = mIntra4x4;
+ s_ipe_params_ip.u4_enc_speed_preset = mEncSpeed;
+ s_ipe_params_ip.u4_constrained_intra_pred = mConstrainedIntraFlag;
+
+ s_ipe_params_ip.u4_timestamp_high = -1;
+ s_ipe_params_ip.u4_timestamp_low = -1;
+
+ s_ipe_params_ip.u4_size = sizeof(ive_ctl_set_ipe_params_ip_t);
+ s_ipe_params_op.u4_size = sizeof(ive_ctl_set_ipe_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_ipe_params_ip, &s_ipe_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set ipe params = 0x%x\n",
+ s_ipe_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setBitRate() {
+ ive_ctl_set_bitrate_ip_t s_bitrate_ip;
+ ive_ctl_set_bitrate_op_t s_bitrate_op;
+ IV_STATUS_T status;
+
+ s_bitrate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_bitrate_ip.e_sub_cmd = IVE_CMD_CTL_SET_BITRATE;
+
+ s_bitrate_ip.u4_target_bitrate = mBitrate;
+
+ s_bitrate_ip.u4_timestamp_high = -1;
+ s_bitrate_ip.u4_timestamp_low = -1;
+
+ s_bitrate_ip.u4_size = sizeof(ive_ctl_set_bitrate_ip_t);
+ s_bitrate_op.u4_size = sizeof(ive_ctl_set_bitrate_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_bitrate_ip, &s_bitrate_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set bit rate = 0x%x\n", s_bitrate_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type) {
+ ive_ctl_set_frame_type_ip_t s_frame_type_ip;
+ ive_ctl_set_frame_type_op_t s_frame_type_op;
+ IV_STATUS_T status;
+ s_frame_type_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_frame_type_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMETYPE;
+
+ s_frame_type_ip.e_frame_type = e_frame_type;
+
+ s_frame_type_ip.u4_timestamp_high = -1;
+ s_frame_type_ip.u4_timestamp_low = -1;
+
+ s_frame_type_ip.u4_size = sizeof(ive_ctl_set_frame_type_ip_t);
+ s_frame_type_op.u4_size = sizeof(ive_ctl_set_frame_type_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_frame_type_ip, &s_frame_type_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame type = 0x%x\n",
+ s_frame_type_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setQp() {
+ ive_ctl_set_qp_ip_t s_qp_ip;
+ ive_ctl_set_qp_op_t s_qp_op;
+ IV_STATUS_T status;
+
+ s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
+
+ s_qp_ip.u4_i_qp = DEFAULT_I_QP;
+ s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_p_qp = DEFAULT_P_QP;
+ s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_b_qp = DEFAULT_P_QP;
+ s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_timestamp_high = -1;
+ s_qp_ip.u4_timestamp_low = -1;
+
+ s_qp_ip.u4_size = sizeof(ive_ctl_set_qp_ip_t);
+ s_qp_op.u4_size = sizeof(ive_ctl_set_qp_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_qp_ip, &s_qp_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set qp 0x%x\n", s_qp_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setEncMode(IVE_ENC_MODE_T e_enc_mode) {
+ IV_STATUS_T status;
+ ive_ctl_set_enc_mode_ip_t s_enc_mode_ip;
+ ive_ctl_set_enc_mode_op_t s_enc_mode_op;
+
+ s_enc_mode_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_enc_mode_ip.e_sub_cmd = IVE_CMD_CTL_SET_ENC_MODE;
+
+ s_enc_mode_ip.e_enc_mode = e_enc_mode;
+
+ s_enc_mode_ip.u4_timestamp_high = -1;
+ s_enc_mode_ip.u4_timestamp_low = -1;
+
+ s_enc_mode_ip.u4_size = sizeof(ive_ctl_set_enc_mode_ip_t);
+ s_enc_mode_op.u4_size = sizeof(ive_ctl_set_enc_mode_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_enc_mode_ip, &s_enc_mode_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set in header encode mode = 0x%x\n",
+ s_enc_mode_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setVbvParams() {
+ ive_ctl_set_vbv_params_ip_t s_vbv_ip;
+ ive_ctl_set_vbv_params_op_t s_vbv_op;
+ IV_STATUS_T status;
+
+ s_vbv_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_vbv_ip.e_sub_cmd = IVE_CMD_CTL_SET_VBV_PARAMS;
+
+ s_vbv_ip.u4_vbv_buf_size = 0;
+ s_vbv_ip.u4_vbv_buffer_delay = 1000;
+
+ s_vbv_ip.u4_timestamp_high = -1;
+ s_vbv_ip.u4_timestamp_low = -1;
+
+ s_vbv_ip.u4_size = sizeof(ive_ctl_set_vbv_params_ip_t);
+ s_vbv_op.u4_size = sizeof(ive_ctl_set_vbv_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_vbv_ip, &s_vbv_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set VBV params = 0x%x\n", s_vbv_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setAirParams() {
+ ive_ctl_set_air_params_ip_t s_air_ip;
+ ive_ctl_set_air_params_op_t s_air_op;
+ IV_STATUS_T status;
+
+ s_air_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_air_ip.e_sub_cmd = IVE_CMD_CTL_SET_AIR_PARAMS;
+
+ s_air_ip.e_air_mode = mAIRMode;
+ s_air_ip.u4_air_refresh_period = mAIRRefreshPeriod;
+
+ s_air_ip.u4_timestamp_high = -1;
+ s_air_ip.u4_timestamp_low = -1;
+
+ s_air_ip.u4_size = sizeof(ive_ctl_set_air_params_ip_t);
+ s_air_op.u4_size = sizeof(ive_ctl_set_air_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_air_ip, &s_air_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set air params = 0x%x\n", s_air_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setMeParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_me_params_ip_t s_me_params_ip;
+ ive_ctl_set_me_params_op_t s_me_params_op;
+
+ s_me_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_me_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_ME_PARAMS;
+
+ s_me_params_ip.u4_enable_fast_sad = mEnableFastSad;
+ s_me_params_ip.u4_enable_alt_ref = mEnableAltRef;
+
+ s_me_params_ip.u4_enable_hpel = mHalfPelEnable;
+ s_me_params_ip.u4_enable_qpel = DEFAULT_QPEL;
+ s_me_params_ip.u4_me_speed_preset = DEFAULT_ME_SPEED;
+ s_me_params_ip.u4_srch_rng_x = DEFAULT_SRCH_RNG_X;
+ s_me_params_ip.u4_srch_rng_y = DEFAULT_SRCH_RNG_Y;
+
+ s_me_params_ip.u4_timestamp_high = -1;
+ s_me_params_ip.u4_timestamp_low = -1;
+
+ s_me_params_ip.u4_size = sizeof(ive_ctl_set_me_params_ip_t);
+ s_me_params_op.u4_size = sizeof(ive_ctl_set_me_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_me_params_ip, &s_me_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set me params = 0x%x\n", s_me_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setGopParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_gop_params_ip_t s_gop_params_ip;
+ ive_ctl_set_gop_params_op_t s_gop_params_op;
+
+ s_gop_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_gop_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_GOP_PARAMS;
+
+ s_gop_params_ip.u4_i_frm_interval = mIInterval;
+ s_gop_params_ip.u4_idr_frm_interval = mIDRInterval;
+
+ s_gop_params_ip.u4_timestamp_high = -1;
+ s_gop_params_ip.u4_timestamp_low = -1;
+
+ s_gop_params_ip.u4_size = sizeof(ive_ctl_set_gop_params_ip_t);
+ s_gop_params_op.u4_size = sizeof(ive_ctl_set_gop_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_gop_params_ip, &s_gop_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set GOP params = 0x%x\n",
+ s_gop_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setProfileParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_profile_params_ip_t s_profile_params_ip;
+ ive_ctl_set_profile_params_op_t s_profile_params_op;
+
+ s_profile_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS;
+
+ s_profile_params_ip.e_profile = DEFAULT_EPROFILE;
+ s_profile_params_ip.u4_entropy_coding_mode = mEntropyMode;
+ s_profile_params_ip.u4_timestamp_high = -1;
+ s_profile_params_ip.u4_timestamp_low = -1;
+
+ s_profile_params_ip.u4_size = sizeof(ive_ctl_set_profile_params_ip_t);
+ s_profile_params_op.u4_size = sizeof(ive_ctl_set_profile_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_profile_params_ip, &s_profile_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set profile params = 0x%x\n",
+ s_profile_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setDeblockParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_deblock_params_ip_t s_deblock_params_ip;
+ ive_ctl_set_deblock_params_op_t s_deblock_params_op;
+
+ s_deblock_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_deblock_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_DEBLOCK_PARAMS;
+
+ s_deblock_params_ip.u4_disable_deblock_level = mDisableDeblkLevel;
+
+ s_deblock_params_ip.u4_timestamp_high = -1;
+ s_deblock_params_ip.u4_timestamp_low = -1;
+
+ s_deblock_params_ip.u4_size = sizeof(ive_ctl_set_deblock_params_ip_t);
+ s_deblock_params_op.u4_size = sizeof(ive_ctl_set_deblock_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_deblock_params_ip, &s_deblock_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to enable/disable deblock params = 0x%x\n",
+ s_deblock_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::logVersion() {
+ ive_ctl_getversioninfo_ip_t s_ctl_ip;
+ ive_ctl_getversioninfo_op_t s_ctl_op;
+ UWORD8 au1_buf[512];
+ IV_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVE_CMD_CTL_GETVERSION;
+ s_ctl_ip.u4_size = sizeof(ive_ctl_getversioninfo_ip_t);
+ s_ctl_op.u4_size = sizeof(ive_ctl_getversioninfo_op_t);
+ s_ctl_ip.pu1_version = au1_buf;
+ s_ctl_ip.u4_version_bufsize = sizeof(au1_buf);
+
+ status = ive_api_function(mCodecCtx, (void *) &s_ctl_ip, (void *) &s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in getting version: 0x%x", s_ctl_op.u4_error_code);
+ } else {
+ ALOGV("Ittiam encoder version: %s", (char *)s_ctl_ip.pu1_version);
+ }
+ return;
+}
+
+c2_status_t C2SoftAvcEnc::initEncoder() {
+ IV_STATUS_T status;
+ WORD32 level;
+ uint32_t displaySizeY;
+
+ CHECK(!mStarted);
+
+ c2_status_t errType = C2_OK;
+
+ displaySizeY = mWidth * mHeight;
+ if (displaySizeY > (1920 * 1088)) {
+ level = 50;
+ } else if (displaySizeY > (1280 * 720)) {
+ level = 40;
+ } else if (displaySizeY > (720 * 576)) {
+ level = 31;
+ } else if (displaySizeY > (624 * 320)) {
+ level = 30;
+ } else if (displaySizeY > (352 * 288)) {
+ level = 21;
+ } else if (displaySizeY > (176 * 144)) {
+ level = 20;
+ } else {
+ level = 10;
+ }
+ mAVCEncLevel = MAX(level, mAVCEncLevel);
+
+ mStride = mWidth;
+
+ // TODO
+ mIvVideoColorFormat = IV_YUV_420P;
+
+ ALOGD("Params width %d height %d level %d colorFormat %d", mWidth,
+ mHeight, mAVCEncLevel, mIvVideoColorFormat);
+
+ /* Getting Number of MemRecords */
+ {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(iv_num_mem_rec_ip_t);
+ s_num_mem_rec_op.u4_size = sizeof(iv_num_mem_rec_op_t);
+
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+
+ status = ive_api_function(0, &s_num_mem_rec_ip, &s_num_mem_rec_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Get number of memory records failed = 0x%x\n",
+ s_num_mem_rec_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+ }
+
+ /* Allocate array to hold memory records */
+ if (mNumMemRecords > SIZE_MAX / sizeof(iv_mem_rec_t)) {
+ ALOGE("requested memory size is too big.");
+ return C2_CORRUPTED;
+ }
+ mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (NULL == mMemRecords) {
+ ALOGE("Unable to allocate memory for hold memory records: Size %zu",
+ mNumMemRecords * sizeof(iv_mem_rec_t));
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return C2_CORRUPTED;
+ }
+
+ {
+ iv_mem_rec_t *ps_mem_rec;
+ ps_mem_rec = mMemRecords;
+ for (size_t i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->u4_size = sizeof(iv_mem_rec_t);
+ ps_mem_rec->pv_base = NULL;
+ ps_mem_rec->u4_mem_size = 0;
+ ps_mem_rec->u4_mem_alignment = 0;
+ ps_mem_rec->e_mem_type = IV_NA_MEM_TYPE;
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Getting MemRecords Attributes */
+ {
+ iv_fill_mem_rec_ip_t s_fill_mem_rec_ip;
+ iv_fill_mem_rec_op_t s_fill_mem_rec_op;
+
+ s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t);
+ s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t);
+
+ s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_rec_ip.ps_mem_rec = mMemRecords;
+ s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords;
+ s_fill_mem_rec_ip.u4_max_wd = mWidth;
+ s_fill_mem_rec_ip.u4_max_ht = mHeight;
+ s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel;
+ s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT;
+ s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+
+ status = ive_api_function(0, &s_fill_mem_rec_ip, &s_fill_mem_rec_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Fill memory records failed = 0x%x\n",
+ s_fill_mem_rec_op.u4_error_code);
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return C2_CORRUPTED;
+ }
+ }
+
+ /* Allocating Memory for Mem Records */
+ {
+ WORD32 total_size;
+ iv_mem_rec_t *ps_mem_rec;
+ total_size = 0;
+ ps_mem_rec = mMemRecords;
+
+ for (size_t i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->pv_base = ive_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (ps_mem_rec->pv_base == NULL) {
+ ALOGE("Allocation failure for mem record id %zu size %u\n", i,
+ ps_mem_rec->u4_mem_size);
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return C2_CORRUPTED;
+
+ }
+ total_size += ps_mem_rec->u4_mem_size;
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Codec Instance Creation */
+ {
+ ive_init_ip_t s_init_ip;
+ ive_init_op_t s_init_op;
+
+ mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
+ mCodecCtx->u4_size = sizeof(iv_obj_t);
+ mCodecCtx->pv_fxns = (void *)ive_api_function;
+
+ s_init_ip.u4_size = sizeof(ive_init_ip_t);
+ s_init_op.u4_size = sizeof(ive_init_op_t);
+
+ s_init_ip.e_cmd = IV_CMD_INIT;
+ s_init_ip.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.ps_mem_rec = mMemRecords;
+ s_init_ip.u4_max_wd = mWidth;
+ s_init_ip.u4_max_ht = mHeight;
+ s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ s_init_ip.u4_max_level = mAVCEncLevel;
+ s_init_ip.e_inp_color_fmt = mIvVideoColorFormat;
+
+ if (mReconEnable || mPSNREnable) {
+ s_init_ip.u4_enable_recon = 1;
+ } else {
+ s_init_ip.u4_enable_recon = 0;
+ }
+ s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
+ s_init_ip.e_rc_mode = DEFAULT_RC_MODE;
+ s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;
+ s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE;
+ s_init_ip.u4_num_bframes = mBframes;
+ s_init_ip.e_content_type = IV_PROGRESSIVE;
+ s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+ s_init_ip.e_slice_mode = mSliceMode;
+ s_init_ip.u4_slice_param = mSliceParam;
+ s_init_ip.e_arch = mArch;
+ s_init_ip.e_soc = DEFAULT_SOC;
+
+ status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Init encoder failed = 0x%x\n", s_init_op.u4_error_code);
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0 /* arg2 */, NULL /* data */);
+ return C2_CORRUPTED;
+ }
+ }
+
+ /* Get Codec Version */
+ logVersion();
+
+ /* set processor details */
+ setNumCores();
+
+ /* Video control Set Frame dimensions */
+ setDimensions();
+
+ /* Video control Set Frame rates */
+ setFrameRate();
+
+ /* Video control Set IPE Params */
+ setIpeParams();
+
+ /* Video control Set Bitrate */
+ setBitRate();
+
+ /* Video control Set QP */
+ setQp();
+
+ /* Video control Set AIR params */
+ setAirParams();
+
+ /* Video control Set VBV params */
+ setVbvParams();
+
+ /* Video control Set Motion estimation params */
+ setMeParams();
+
+ /* Video control Set GOP params */
+ setGopParams();
+
+ /* Video control Set Deblock params */
+ setDeblockParams();
+
+ /* Video control Set Profile params */
+ setProfileParams();
+
+ /* Video control Set in Encode header mode */
+ setEncMode(IVE_ENC_MODE_HEADER);
+
+ ALOGV("init_codec successfull");
+
+ mSpsPpsHeaderReceived = false;
+ mStarted = true;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::releaseEncoder() {
+ IV_STATUS_T status = IV_SUCCESS;
+ iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;
+ iv_retrieve_mem_rec_op_t s_retrieve_mem_op;
+ iv_mem_rec_t *ps_mem_rec;
+
+ if (!mStarted) {
+ return C2_OK;
+ }
+
+ s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t);
+ s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t);
+ s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC;
+ s_retrieve_mem_ip.ps_mem_rec = mMemRecords;
+
+ status = ive_api_function(mCodecCtx, &s_retrieve_mem_ip, &s_retrieve_mem_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to retrieve memory records = 0x%x\n",
+ s_retrieve_mem_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+
+ /* Free memory records */
+ ps_mem_rec = mMemRecords;
+ for (size_t i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) {
+ ive_aligned_free(ps_mem_rec->pv_base);
+ ps_mem_rec++;
+ }
+
+ free(mMemRecords);
+
+ // clear other pointers into the space being free()d
+ mCodecCtx = NULL;
+
+ mStarted = false;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setEncodeArgs(
+ ive_video_encode_ip_t *ps_encode_ip,
+ ive_video_encode_op_t *ps_encode_op,
+ const C2GraphicView *const input,
+ uint8_t *base,
+ uint32_t capacity,
+ uint64_t timestamp) {
+ iv_raw_buf_t *ps_inp_raw_buf;
+
+ ps_inp_raw_buf = &ps_encode_ip->s_inp_buf;
+ ps_encode_ip->s_out_buf.pv_buf = base;
+ ps_encode_ip->s_out_buf.u4_bytes = 0;
+ ps_encode_ip->s_out_buf.u4_bufsize = capacity;
+ ps_encode_ip->u4_size = sizeof(ive_video_encode_ip_t);
+ ps_encode_op->u4_size = sizeof(ive_video_encode_op_t);
+
+ ps_encode_ip->e_cmd = IVE_CMD_VIDEO_ENCODE;
+ ps_encode_ip->pv_bufs = NULL;
+ ps_encode_ip->pv_mb_info = NULL;
+ ps_encode_ip->pv_pic_info = NULL;
+ ps_encode_ip->u4_mb_info_type = 0;
+ ps_encode_ip->u4_pic_info_type = 0;
+ ps_encode_ip->u4_is_last = 0;
+ ps_encode_ip->u4_timestamp_high = timestamp >> 32;
+ ps_encode_ip->u4_timestamp_low = timestamp & 0xFFFFFFFF;
+ ps_encode_op->s_out_buf.pv_buf = NULL;
+
+ /* Initialize color formats */
+ memset(ps_inp_raw_buf, 0, sizeof(iv_raw_buf_t));
+ ps_inp_raw_buf->u4_size = sizeof(iv_raw_buf_t);
+ ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat;
+ if (input == nullptr) {
+ if (mSawInputEOS){
+ ps_encode_ip->u4_is_last = 1;
+ }
+ return C2_OK;
+ }
+
+ ALOGV("width = %d, height = %d", input->width(), input->height());
+ const C2PlanarLayout &layout = input->layout();
+ uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
+ uint8_t *vPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_V]);
+ int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_RGB:
+ // fall-through
+ case C2PlanarLayout::TYPE_RGBA: {
+ size_t yPlaneSize = input->width() * input->height();
+ std::unique_ptr<uint8_t[]> freeBuffer;
+ if (mFreeConversionBuffers.empty()) {
+ freeBuffer.reset(new uint8_t[yPlaneSize * 3 / 2]);
+ } else {
+ freeBuffer.swap(mFreeConversionBuffers.front());
+ mFreeConversionBuffers.pop_front();
+ }
+ yPlane = freeBuffer.get();
+ mConversionBuffersInUse.push_back(std::move(freeBuffer));
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ yStride = input->width();
+ uStride = vStride = input->width() / 2;
+ ConvertRGBToPlanarYUV(yPlane, yStride, input->height(), *input);
+ break;
+ }
+ case C2PlanarLayout::TYPE_YUV:
+ // fall-through
+ case C2PlanarLayout::TYPE_YUVA:
+ // Do nothing
+ break;
+ default:
+ ALOGE("Unrecognized plane type: %d", layout.type);
+ return C2_BAD_VALUE;
+ }
+
+ switch (mIvVideoColorFormat) {
+ case IV_YUV_420P:
+ {
+ // input buffer is supposed to be const but Ittiam API wants bare pointer.
+ ps_inp_raw_buf->apv_bufs[0] = yPlane;
+ ps_inp_raw_buf->apv_bufs[1] = uPlane;
+ ps_inp_raw_buf->apv_bufs[2] = vPlane;
+
+ ps_inp_raw_buf->au4_wd[0] = input->width();
+ ps_inp_raw_buf->au4_wd[1] = input->width() / 2;
+ ps_inp_raw_buf->au4_wd[2] = input->width() / 2;
+
+ ps_inp_raw_buf->au4_ht[0] = input->height();
+ ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+ ps_inp_raw_buf->au4_ht[2] = input->height() / 2;
+
+ ps_inp_raw_buf->au4_strd[0] = yStride;
+ ps_inp_raw_buf->au4_strd[1] = uStride;
+ ps_inp_raw_buf->au4_strd[2] = vStride;
+ break;
+ }
+
+ case IV_YUV_422ILE:
+ {
+ // TODO
+ // ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
+ // ps_inp_raw_buf->au4_wd[0] = mWidth * 2;
+ // ps_inp_raw_buf->au4_ht[0] = mHeight;
+ // ps_inp_raw_buf->au4_strd[0] = mStride * 2;
+ break;
+ }
+
+ case IV_YUV_420SP_UV:
+ case IV_YUV_420SP_VU:
+ default:
+ {
+ ps_inp_raw_buf->apv_bufs[0] = yPlane;
+ ps_inp_raw_buf->apv_bufs[1] = uPlane;
+
+ ps_inp_raw_buf->au4_wd[0] = input->width();
+ ps_inp_raw_buf->au4_wd[1] = input->width();
+
+ ps_inp_raw_buf->au4_ht[0] = input->height();
+ ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+
+ ps_inp_raw_buf->au4_strd[0] = yStride;
+ ps_inp_raw_buf->au4_strd[1] = uStride;
+ break;
+ }
+ }
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ work->workletsProcessed = 0u;
+
+ IV_STATUS_T status;
+ WORD32 timeDelay, timeTaken;
+ uint64_t timestamp = work->input.ordinal.timestamp.peekull();
+
+ // Initialize encoder if not already initialized
+ if (mCodecCtx == NULL) {
+ if (C2_OK != initEncoder()) {
+ ALOGE("Failed to initialize encoder");
+ // TODO: notify(error, C2_CORRUPTED, 0 /* arg2 */, NULL /* data */);
+ return;
+ }
+ }
+ if (mSignalledError) {
+ return;
+ }
+
+ // while (!mSawOutputEOS && !outQueue.empty()) {
+ c2_status_t error;
+ ive_video_encode_ip_t s_encode_ip;
+ ive_video_encode_op_t s_encode_op;
+
+ if (!mSpsPpsHeaderReceived) {
+ constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
+ uint8_t header[kHeaderLength];
+ error = setEncodeArgs(
+ &s_encode_ip, &s_encode_op, NULL, header, kHeaderLength, timestamp);
+ if (error != C2_OK) {
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return;
+ }
+ status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Encode header failed = 0x%x\n",
+ s_encode_op.u4_error_code);
+ return;
+ } else {
+ ALOGV("Bytes Generated in header %d\n",
+ s_encode_op.s_out_buf.u4_bytes);
+ }
+
+ mSpsPpsHeaderReceived = true;
+
+ std::unique_ptr<C2StreamCsdInfo::output> csd =
+ C2StreamCsdInfo::output::alloc_unique(s_encode_op.s_out_buf.u4_bytes, 0u);
+ memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes);
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+
+ DUMP_TO_FILE(
+ mOutFile, csd->m.value, csd->flexCount());
+ }
+
+ if (mUpdateFlag) {
+ if (mUpdateFlag & kUpdateBitrate) {
+ setBitRate();
+ }
+ if (mUpdateFlag & kRequestKeyFrame) {
+ setFrameType(IV_IDR_FRAME);
+ }
+ if (mUpdateFlag & kUpdateAIRMode) {
+ setAirParams();
+ // notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
+ // OMX_IndexConfigAndroidIntraRefresh, NULL);
+ }
+ mUpdateFlag = 0;
+ }
+
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ mSawInputEOS = true;
+ }
+
+ /* In normal mode, store inputBufferInfo and this will be returned
+ when encoder consumes this input */
+ // if (!mInputDataIsMeta && (inputBufferInfo != NULL)) {
+ // for (size_t i = 0; i < MAX_INPUT_BUFFER_HEADERS; i++) {
+ // if (NULL == mInputBufferInfo[i]) {
+ // mInputBufferInfo[i] = inputBufferInfo;
+ // break;
+ // }
+ // }
+ // }
+ const C2GraphicView view =
+ work->input.buffers[0]->data().graphicBlocks().front().map().get();
+ if (view.error() != C2_OK) {
+ ALOGE("graphic view map err = %d", view.error());
+ return;
+ }
+
+ std::shared_ptr<C2LinearBlock> block;
+
+ do {
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ // TODO: error handling, proper usage, etc.
+ c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetch linear block err = %d", err);
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error() != C2_OK) {
+ ALOGE("write view map err = %d", wView.error());
+ return;
+ }
+
+ error = setEncodeArgs(
+ &s_encode_ip, &s_encode_op, &view, wView.base(), wView.capacity(), timestamp);
+ if (error != C2_OK) {
+ mSignalledError = true;
+ ALOGE("setEncodeArgs failed : %d", error);
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return;
+ }
+
+ // DUMP_TO_FILE(
+ // mInFile, s_encode_ip.s_inp_buf.apv_bufs[0],
+ // (mHeight * mStride * 3 / 2));
+
+ GETTIME(&mTimeStart, NULL);
+ /* Compute time elapsed between end of previous decode()
+ * to start of current decode() */
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+ status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ if (IV_SUCCESS != status) {
+ if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
+ // TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
+ mOutBufferSize *= 2;
+ continue;
+ }
+ ALOGE("Encode Frame failed = 0x%x\n",
+ s_encode_op.u4_error_code);
+ mSignalledError = true;
+ // TODO: notify(error, C2_CORRUPTED, 0, 0);
+ return;
+ }
+ } while (IV_SUCCESS != status);
+
+ // Hold input buffer reference
+ mBuffers[s_encode_ip.s_inp_buf.apv_bufs[0]] = work->input.buffers[0];
+
+ GETTIME(&mTimeEnd, NULL);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ s_encode_op.s_out_buf.u4_bytes);
+
+ void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+ /* If encoder frees up an input buffer, mark it as free */
+ if (freed != NULL) {
+ if (mBuffers.count(freed) == 0u) {
+ // TODO: error
+ return;
+ }
+ // Release input buffer reference
+ mBuffers.erase(freed);
+
+ auto it = std::find_if(
+ mConversionBuffersInUse.begin(), mConversionBuffersInUse.end(),
+ [freed](const auto &elem) { return elem.get() == freed; });
+ if (it != mConversionBuffersInUse.end()) {
+ mFreeConversionBuffers.push_back(std::move(*it));
+ mConversionBuffersInUse.erase(it);
+ }
+ }
+
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.ordinal.timestamp =
+ ((uint64_t)s_encode_op.u4_timestamp_high << 32) | s_encode_op.u4_timestamp_low;
+ work->worklets.front()->output.buffers.clear();
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(block, 0, s_encode_op.s_out_buf.u4_bytes);
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->workletsProcessed = 1u;
+
+ if (IV_IDR_FRAME == s_encode_op.u4_encoded_frame_type) {
+ buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2PictureTypeKeyFrame));
+ }
+
+ if (s_encode_op.u4_is_last) {
+ // outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+ mSawOutputEOS = true;
+ } else {
+ // outputBufferHeader->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ }
+}
+
+c2_status_t C2SoftAvcEnc::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // TODO: use IVE_CMD_CTL_FLUSH?
+ (void)drainMode;
+ (void)pool;
+ return C2_OK;
+}
+
+
+class C2SoftAvcEncFactory : public C2ComponentFactory {
+public:
+ virtual c2_status_t createComponent(
+ c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+ std::function<void(::android::C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(new C2SoftAvcEnc("avcenc", id), deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(::android::C2ComponentInterface*)> deleter) override {
+ *interface =
+ SimpleC2Interface::Builder("avcenc", id, deleter)
+ .inputFormat(C2FormatVideo)
+ .outputFormat(C2FormatCompressed)
+ .build();
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAvcEncFactory() override = default;
+};
+
+} // namespace android
+
+extern "C" ::android::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAvcEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::android::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.h b/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.h
new file mode 100644
index 0000000..5b2a514
--- /dev/null
+++ b/media/libstagefright/codecs/avcenc/C2SoftAvcEnc.h
@@ -0,0 +1,293 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef C2_SOFT_AVC_ENC_H__
+#define C2_SOFT_AVC_ENC_H__
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Vector.h>
+
+#include <SimpleC2Component.h>
+
+namespace android {
+
+#define CODEC_MAX_CORES 4
+#define LEN_STATUS_BUFFER (10 * 1024)
+#define MAX_VBV_BUFF_SIZE (120 * 16384)
+#define MAX_NUM_IO_BUFS 3
+
+#define DEFAULT_MAX_REF_FRM 2
+#define DEFAULT_MAX_REORDER_FRM 0
+#define DEFAULT_QP_MIN 10
+#define DEFAULT_QP_MAX 40
+#define DEFAULT_MAX_BITRATE 20000000
+#define DEFAULT_MAX_SRCH_RANGE_X 256
+#define DEFAULT_MAX_SRCH_RANGE_Y 256
+#define DEFAULT_MAX_FRAMERATE 120000
+#define DEFAULT_NUM_CORES 1
+#define DEFAULT_NUM_CORES_PRE_ENC 0
+#define DEFAULT_FPS 30
+#define DEFAULT_ENC_SPEED IVE_NORMAL
+
+#define DEFAULT_MEM_REC_CNT 0
+#define DEFAULT_RECON_ENABLE 0
+#define DEFAULT_CHKSUM_ENABLE 0
+#define DEFAULT_START_FRM 0
+#define DEFAULT_NUM_FRMS 0xFFFFFFFF
+#define DEFAULT_INP_COLOR_FORMAT IV_YUV_420SP_VU
+#define DEFAULT_RECON_COLOR_FORMAT IV_YUV_420P
+#define DEFAULT_LOOPBACK 0
+#define DEFAULT_SRC_FRAME_RATE 30
+#define DEFAULT_TGT_FRAME_RATE 30
+#define DEFAULT_MAX_WD 1920
+#define DEFAULT_MAX_HT 1920
+#define DEFAULT_MAX_LEVEL 41
+#define DEFAULT_STRIDE 0
+#define DEFAULT_WD 1280
+#define DEFAULT_HT 720
+#define DEFAULT_PSNR_ENABLE 0
+#define DEFAULT_ME_SPEED 100
+#define DEFAULT_ENABLE_FAST_SAD 0
+#define DEFAULT_ENABLE_ALT_REF 0
+#define DEFAULT_RC_MODE IVE_RC_STORAGE
+#define DEFAULT_BITRATE 6000000
+#define DEFAULT_I_QP 22
+#define DEFAULT_I_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_I_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_P_QP 28
+#define DEFAULT_P_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_P_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_B_QP 22
+#define DEFAULT_B_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_B_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_AIR IVE_AIR_MODE_NONE
+#define DEFAULT_AIR_REFRESH_PERIOD 30
+#define DEFAULT_SRCH_RNG_X 64
+#define DEFAULT_SRCH_RNG_Y 48
+#define DEFAULT_I_INTERVAL 30
+#define DEFAULT_IDR_INTERVAL 1000
+#define DEFAULT_B_FRAMES 0
+#define DEFAULT_DISABLE_DEBLK_LEVEL 0
+#define DEFAULT_HPEL 1
+#define DEFAULT_QPEL 1
+#define DEFAULT_I4 1
+#define DEFAULT_EPROFILE IV_PROFILE_BASE
+#define DEFAULT_ENTROPY_MODE 0
+#define DEFAULT_SLICE_MODE IVE_SLICE_MODE_NONE
+#define DEFAULT_SLICE_PARAM 256
+#define DEFAULT_ARCH ARCH_ARM_A9Q
+#define DEFAULT_SOC SOC_GENERIC
+#define DEFAULT_INTRA4x4 0
+#define STRLENGTH 500
+#define DEFAULT_CONSTRAINED_INTRA 0
+
+#define MIN(a, b) ((a) < (b))? (a) : (b)
+#define MAX(a, b) ((a) > (b))? (a) : (b)
+#define ALIGN16(x) ((((x) + 15) >> 4) << 4)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
+#define ALIGN4096(x) ((((x) + 4095) >> 12) << 12)
+
+/** Used to remove warnings about unused parameters */
+#define UNUSED(x) ((void)(x))
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+#define ive_aligned_malloc(alignment, size) memalign(alignment, size)
+#define ive_aligned_free(buf) free(buf)
+
+struct C2SoftAvcEnc : public SimpleC2Component {
+ C2SoftAvcEnc(const char *name, c2_node_id_t id);
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+protected:
+ virtual ~C2SoftAvcEnc();
+
+private:
+ enum {
+ kUpdateBitrate = 1 << 0,
+ kRequestKeyFrame = 1 << 1,
+ kUpdateAIRMode = 1 << 2,
+ };
+
+ // OMX input buffer's timestamp and flags
+ typedef struct {
+ int64_t mTimeUs;
+ int32_t mFlags;
+ } InputBufferInfo;
+
+ int32_t mStride;
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+ int mUpdateFlag;
+
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+ char mOutFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ IV_COLOR_FORMAT_T mIvVideoColorFormat;
+
+ IV_PROFILE_T mAVCEncProfile;
+ WORD32 mAVCEncLevel;
+ bool mStarted;
+ bool mSpsPpsHeaderReceived;
+
+ bool mSawInputEOS;
+ bool mSawOutputEOS;
+ bool mSignalledError;
+ bool mIntra4x4;
+ bool mEnableFastSad;
+ bool mEnableAltRef;
+ bool mReconEnable;
+ bool mPSNREnable;
+ bool mEntropyMode;
+ bool mConstrainedIntraFlag;
+ IVE_SPEED_CONFIG mEncSpeed;
+
+ iv_obj_t *mCodecCtx; // Codec context
+ iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
+ size_t mNumMemRecords; // Number of memory records requested by codec
+ size_t mNumCores; // Number of cores used by the codec
+
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mFramerate;
+ uint32_t mBitrate;
+ uint32_t mOutBufferSize;
+ UWORD32 mHeaderGenerated;
+ UWORD32 mBframes;
+ IV_ARCH_T mArch;
+ IVE_SLICE_MODE_T mSliceMode;
+ UWORD32 mSliceParam;
+ bool mHalfPelEnable;
+ UWORD32 mIInterval;
+ UWORD32 mIDRInterval;
+ UWORD32 mDisableDeblkLevel;
+ IVE_AIR_MODE_T mAIRMode;
+ UWORD32 mAIRRefreshPeriod;
+ std::map<const void *, std::shared_ptr<C2Buffer>> mBuffers;
+ std::list<std::unique_ptr<uint8_t[]>> mFreeConversionBuffers;
+ std::list<std::unique_ptr<uint8_t[]>> mConversionBuffersInUse;
+
+ void initEncParams();
+ c2_status_t initEncoder();
+ c2_status_t releaseEncoder();
+
+ c2_status_t setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type);
+ c2_status_t setQp();
+ c2_status_t setEncMode(IVE_ENC_MODE_T e_enc_mode);
+ c2_status_t setDimensions();
+ c2_status_t setNumCores();
+ c2_status_t setFrameRate();
+ c2_status_t setIpeParams();
+ c2_status_t setBitRate();
+ c2_status_t setAirParams();
+ c2_status_t setMeParams();
+ c2_status_t setGopParams();
+ c2_status_t setProfileParams();
+ c2_status_t setDeblockParams();
+ c2_status_t setVbvParams();
+ void logVersion();
+ c2_status_t setEncodeArgs(
+ ive_video_encode_ip_t *ps_encode_ip,
+ ive_video_encode_op_t *ps_encode_op,
+ const C2GraphicView *const input,
+ uint8_t *base,
+ uint32_t capacity,
+ uint64_t timestamp);
+
+ DISALLOW_EVIL_CONSTRUCTORS(C2SoftAvcEnc);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH "/sdcard/media/avce_input"
+#define INPUT_DUMP_EXT "yuv"
+#define OUTPUT_DUMP_PATH "/sdcard/media/avce_output"
+#define OUTPUT_DUMP_EXT "h264"
+
+#define GENERATE_FILE_NAMES() { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ INPUT_DUMP_EXT); \
+ strcpy(mOutFile, ""); \
+ sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,\
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ OUTPUT_DUMP_EXT); \
+}
+
+#define CREATE_DUMP_FILE(m_filename) { \
+ FILE *fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ ALOGD("Opened file %s", m_filename); \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+}
+#define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+{ \
+ FILE *fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ int i; \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ ALOGD("fwrite ret %d to write %d", i, m_size); \
+ if (i != (int)m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename);\
+ if (fp != NULL) \
+ fclose(fp); \
+ } \
+}
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif // C2_SOFT_AVC_ENC_H__
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h
index dc4125f..a434f81 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h
@@ -22,8 +22,20 @@
namespace android {
-#define FOURCC(c1, c2, c3, c4) \
- ((c1) << 24 | (c2) << 16 | (c3) << 8 | (c4))
+constexpr int FOURCC(unsigned char c1, unsigned char c2, unsigned char c3, unsigned char c4) {
+ return ((c1) << 24 | (c2) << 16 | (c3) << 8 | (c4));
+}
+
+template <size_t N>
+constexpr int32_t FOURCC(const char (&s) [N]) {
+ static_assert(N == 5, "fourcc: wrong length");
+ return
+ (unsigned char) s[0] << 24 |
+ (unsigned char) s[1] << 16 |
+ (unsigned char) s[2] << 8 |
+ (unsigned char) s[3] << 0;
+}
+
uint16_t U16_AT(const uint8_t *ptr);
uint32_t U32_AT(const uint8_t *ptr);
diff --git a/media/libstagefright/include/CCodecBufferChannel.h b/media/libstagefright/include/CCodecBufferChannel.h
index c5062d6..57a1374 100644
--- a/media/libstagefright/include/CCodecBufferChannel.h
+++ b/media/libstagefright/include/CCodecBufferChannel.h
@@ -27,6 +27,7 @@
#include <C2Component.h>
#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/gbs/GraphicBufferSource.h>
#include <media/stagefright/CodecBase.h>
#include <media/ICrypto.h>
@@ -35,134 +36,9 @@
/**
* BufferChannelBase implementation for CCodec.
*/
-class CCodecBufferChannel : public BufferChannelBase {
+class CCodecBufferChannel
+ : public BufferChannelBase, public std::enable_shared_from_this<CCodecBufferChannel> {
public:
- /**
- * Base class for representation of buffers at one port.
- */
- class Buffers {
- public:
- Buffers() = default;
- virtual ~Buffers() = default;
-
- /**
- * Set format for MediaCodec-facing buffers.
- */
- inline void setFormat(const sp<AMessage> &format) { mFormat = format; }
-
- /**
- * Returns true if the buffers are operating under array mode.
- */
- virtual bool isArrayMode() { return false; }
-
- /**
- * Fills the vector with MediaCodecBuffer's if in array mode; otherwise,
- * no-op.
- */
- virtual void getArray(Vector<sp<MediaCodecBuffer>> *) {}
-
- protected:
- // Format to be used for creating MediaCodec-facing buffers.
- sp<AMessage> mFormat;
-
- private:
- DISALLOW_EVIL_CONSTRUCTORS(Buffers);
- };
-
- class InputBuffers : public Buffers {
- public:
- using Buffers::Buffers;
- virtual ~InputBuffers() = default;
-
- /**
- * Set a block pool to obtain input memory blocks.
- */
- inline void setPool(const std::shared_ptr<C2BlockPool> &pool) { mPool = pool; }
-
- /**
- * Get a new MediaCodecBuffer for input and its corresponding index.
- * Returns false if no new buffer can be obtained at the moment.
- */
- virtual bool requestNewBuffer(size_t *index, sp<MediaCodecBuffer> *buffer) = 0;
-
- /**
- * Release the buffer obtained from requestNewBuffer() and get the
- * associated C2Buffer object back. Returns empty shared_ptr if the
- * buffer is not on file.
- */
- virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
-
- /**
- * Flush internal state. After this call, no index or buffer previously
- * returned from requestNewBuffer() is valid.
- */
- virtual void flush() = 0;
-
- /**
- * Return array-backed version of input buffers. The returned object
- * shall retain the internal state so that it will honor index and
- * buffer from previous calls of requestNewBuffer().
- */
- virtual std::unique_ptr<InputBuffers> toArrayMode() = 0;
-
- protected:
- // Pool to obtain blocks for input buffers.
- std::shared_ptr<C2BlockPool> mPool;
-
- private:
- DISALLOW_EVIL_CONSTRUCTORS(InputBuffers);
- };
-
- class OutputBuffers : public Buffers {
- public:
- using Buffers::Buffers;
- virtual ~OutputBuffers() = default;
-
- /**
- * Register output C2Buffer from the component and obtain corresponding
- * index and MediaCodecBuffer object. Returns false if registration
- * fails.
- */
- virtual bool registerBuffer(
- const std::shared_ptr<C2Buffer> &buffer,
- size_t *index,
- sp<MediaCodecBuffer> *codecBuffer) = 0;
-
- /**
- * Register codec specific data as a buffer to be consistent with
- * MediaCodec behavior.
- */
- virtual bool registerCsd(
- const C2StreamCsdInfo::output * /* csd */,
- size_t * /* index */,
- sp<MediaCodecBuffer> * /* codecBuffer */) {
- return false;
- }
-
- /**
- * Release the buffer obtained from registerBuffer() and get the
- * associated C2Buffer object back. Returns empty shared_ptr if the
- * buffer is not on file.
- */
- virtual std::shared_ptr<C2Buffer> releaseBuffer(const sp<MediaCodecBuffer> &buffer) = 0;
-
- /**
- * Flush internal state. After this call, no index or buffer previously
- * returned from registerBuffer() is valid.
- */
- virtual void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) = 0;
-
- /**
- * Return array-backed version of output buffers. The returned object
- * shall retain the internal state so that it will honor index and
- * buffer from previous calls of registerBuffer().
- */
- virtual std::unique_ptr<OutputBuffers> toArrayMode() = 0;
-
- private:
- DISALLOW_EVIL_CONSTRUCTORS(OutputBuffers);
- };
-
CCodecBufferChannel(const std::function<void(status_t, enum ActionCode)> &onError);
virtual ~CCodecBufferChannel();
@@ -186,23 +62,21 @@
// Methods below are interface for CCodec to use.
+ /**
+ * Set the component object for buffer processing.
+ */
void setComponent(const std::shared_ptr<C2Component> &component);
+
+ /**
+ * Set output graphic surface for rendering.
+ */
status_t setSurface(const sp<Surface> &surface);
/**
- * Set C2BlockPool for input buffers.
- *
- * TODO: start timestamp?
+ * Set GraphicBufferSource object from which the component extracts input
+ * buffers.
*/
- void setInputBufferAllocator(const sp<C2BlockPool> &inAlloc);
-
- /**
- * Set C2BlockPool for output buffers. This object shall never use the
- * allocator itself; it's just passed
- *
- * TODO: start timestamp?
- */
- void setOutputBufferAllocator(const sp<C2BlockPool> &outAlloc);
+ status_t setGraphicBufferSource(const sp<GraphicBufferSource> &source);
/**
* Start queueing buffers to the component. This object should never queue
@@ -219,11 +93,17 @@
void flush(const std::list<std::unique_ptr<C2Work>> &flushedWork);
/**
- * Notify MediaCodec about work done.
+ * Notify input client about work done.
*
- * @param workItems finished work items.
+ * @param workItems finished work item.
*/
- void onWorkDone(std::vector<std::unique_ptr<C2Work>> workItems);
+ void onWorkDone(const std::unique_ptr<C2Work> &work);
+
+ // Internal classes
+ class Buffers;
+ class InputBuffers;
+ class OutputBuffers;
+ class InputBufferClient;
private:
class QueueGuard;
@@ -276,12 +156,17 @@
bool mRunning;
};
+ class C2ComponentWrapper;
+
+ void feedInputBufferIfAvailable();
+
QueueSync mSync;
sp<MemoryDealer> mDealer;
sp<IMemory> mDecryptDestination;
int32_t mHeapSeqNum;
std::shared_ptr<C2Component> mComponent;
+ std::shared_ptr<InputBufferClient> mInputClient;
std::function<void(status_t, enum ActionCode)> mOnError;
std::shared_ptr<C2BlockPool> mInputAllocator;
QueueSync mQueueSync;
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 3196b10..fa22003 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -495,7 +495,7 @@
status_t verifySupportForProfileAndLevel(int32_t profile, int32_t level);
status_t configureBitrate(
- int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode);
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode, int32_t bitrate, int32_t quality = 0);
void configureEncoderLatency(const sp<AMessage> &msg);
status_t setupErrorCorrectionParameters();
diff --git a/media/libstagefright/include/media/stagefright/CCodec.h b/media/libstagefright/include/media/stagefright/CCodec.h
index 3e24bbe..c689761 100644
--- a/media/libstagefright/include/media/stagefright/CCodec.h
+++ b/media/libstagefright/include/media/stagefright/CCodec.h
@@ -24,6 +24,7 @@
#include <android/native_window.h>
#include <media/hardware/MetadataBufferType.h>
#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/gbs/GraphicBufferSource.h>
#include <media/stagefright/CodecBase.h>
#include <media/stagefright/FrameRenderTracker.h>
#include <media/stagefright/MediaDefs.h>
@@ -58,6 +59,7 @@
virtual void signalRequestIDRFrame() override;
void initiateReleaseIfStuck();
+ void onWorkDone(std::vector<std::unique_ptr<C2Work>> &workItems);
protected:
virtual ~CCodec();
@@ -77,6 +79,10 @@
void flush();
void release(bool sendCallback);
+ void createInputSurface();
+ void setInputSurface(const sp<PersistentSurface> &surface);
+ status_t setupInputSurface(const sp<GraphicBufferSource> &source);
+
void setDeadline(const TimePoint &deadline);
enum {
@@ -86,6 +92,9 @@
kWhatFlush,
kWhatStop,
kWhatRelease,
+ kWhatCreateInputSurface,
+ kWhatSetInputSurface,
+ kWhatWorkDone,
};
enum {
@@ -104,14 +113,17 @@
struct State {
inline State() : mState(RELEASED) {}
+ inline int get() const { return mState; }
+ inline void set(int newState) { mState = newState; }
+ std::shared_ptr<C2Component> comp;
+ private:
int mState;
- std::shared_ptr<C2Component> mComp;
};
struct Formats {
- sp<AMessage> mInputFormat;
- sp<AMessage> mOutputFormat;
+ sp<AMessage> inputFormat;
+ sp<AMessage> outputFormat;
};
Mutexed<State> mState;
@@ -119,6 +131,7 @@
std::shared_ptr<C2Component::Listener> mListener;
Mutexed<TimePoint> mDeadline;
Mutexed<Formats> mFormats;
+ Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
DISALLOW_EVIL_CONSTRUCTORS(CCodec);
};
diff --git a/packages/MediaComponents/src/com/android/media/MediaBrowser2Impl.java b/packages/MediaComponents/src/com/android/media/MediaBrowser2Impl.java
index 59d8366..511eed1 100644
--- a/packages/MediaComponents/src/com/android/media/MediaBrowser2Impl.java
+++ b/packages/MediaComponents/src/com/android/media/MediaBrowser2Impl.java
@@ -21,7 +21,7 @@
import android.media.MediaBrowser2;
import android.media.MediaBrowser2.BrowserCallback;
import android.media.MediaSession2.CommandButton;
-import android.media.SessionToken;
+import android.media.SessionToken2;
import android.media.update.MediaBrowser2Provider;
import android.os.Bundle;
import android.os.RemoteException;
@@ -37,9 +37,9 @@
private final MediaBrowser2 mInstance;
private final MediaBrowser2.BrowserCallback mCallback;
- public MediaBrowser2Impl(MediaBrowser2 instance, Context context, SessionToken token,
- BrowserCallback callback, Executor executor) {
- super(instance, context, token, callback, executor);
+ public MediaBrowser2Impl(Context context, MediaBrowser2 instance, SessionToken2 token,
+ Executor executor, BrowserCallback callback) {
+ super(context, instance, token, executor, callback);
mInstance = instance;
mCallback = callback;
}
diff --git a/packages/MediaComponents/src/com/android/media/MediaController2Impl.java b/packages/MediaComponents/src/com/android/media/MediaController2Impl.java
index ac3f311..43f8473 100644
--- a/packages/MediaComponents/src/com/android/media/MediaController2Impl.java
+++ b/packages/MediaComponents/src/com/android/media/MediaController2Impl.java
@@ -31,17 +31,14 @@
import android.media.MediaSession2.CommandGroup;
import android.media.MediaController2;
import android.media.MediaController2.ControllerCallback;
-import android.media.MediaPlayerBase;
-import android.media.MediaSession2.PlaylistParam;
+import android.media.MediaSession2.PlaylistParams;
import android.media.MediaSessionService2;
import android.media.PlaybackState2;
import android.media.Rating2;
-import android.media.SessionToken;
-import android.media.session.PlaybackState;
+import android.media.SessionToken2;
import android.media.update.MediaController2Provider;
import android.net.Uri;
import android.os.Bundle;
-import android.os.Handler;
import android.os.IBinder;
import android.os.RemoteException;
import android.os.ResultReceiver;
@@ -70,7 +67,7 @@
private final Context mContext;
private final MediaSession2CallbackStub mSessionCallbackStub;
- private final SessionToken mToken;
+ private final SessionToken2 mToken;
private final ControllerCallback mCallback;
private final Executor mCallbackExecutor;
private final IBinder.DeathRecipient mDeathRecipient;
@@ -92,8 +89,8 @@
// TODO(jaewan): Require session activeness changed listener, because controller can be
// available when the session's player is null.
- public MediaController2Impl(MediaController2 instance, Context context, SessionToken token,
- ControllerCallback callback, Executor executor) {
+ public MediaController2Impl(Context context, MediaController2 instance, SessionToken2 token,
+ Executor executor, ControllerCallback callback) {
mInstance = instance;
if (context == null) {
@@ -213,7 +210,7 @@
}
@Override
- public SessionToken getSessionToken_impl() {
+ public SessionToken2 getSessionToken_impl() {
return mToken;
}
@@ -384,7 +381,7 @@
}
@Override
- public PlaylistParam getPlaylistParam_impl() {
+ public PlaylistParams getPlaylistParam_impl() {
// TODO(jaewan): Implement
return null;
}
@@ -405,7 +402,7 @@
}
}
- private void pushPlaybackStateChanges(final PlaybackState state) {
+ private void pushPlaybackStateChanges(final PlaybackState2 state) {
synchronized (mLock) {
for (int i = 0; i < mPlaybackListeners.size(); i++) {
mPlaybackListeners.get(i).postPlaybackChange(state);
@@ -500,9 +497,9 @@
}
@Override
- public void onPlaybackStateChanged(PlaybackState state) throws RuntimeException {
+ public void onPlaybackStateChanged(Bundle state) throws RuntimeException {
final MediaController2Impl controller = getController();
- controller.pushPlaybackStateChanges(state);
+ controller.pushPlaybackStateChanges(PlaybackState2.fromBundle(state));
}
@Override
diff --git a/packages/MediaComponents/src/com/android/media/MediaLibraryService2Impl.java b/packages/MediaComponents/src/com/android/media/MediaLibraryService2Impl.java
index b177dda..bbb3411 100644
--- a/packages/MediaComponents/src/com/android/media/MediaLibraryService2Impl.java
+++ b/packages/MediaComponents/src/com/android/media/MediaLibraryService2Impl.java
@@ -31,6 +31,8 @@
import android.media.update.MediaLibraryService2Provider;
import android.os.Bundle;
+import java.util.concurrent.Executor;
+
public class MediaLibraryService2Impl extends MediaSessionService2Impl implements
MediaLibraryService2Provider {
private final MediaSessionService2 mInstance;
@@ -65,12 +67,12 @@
private final MediaLibrarySession mInstance;
private final MediaLibrarySessionCallback mCallback;
- public MediaLibrarySessionImpl(MediaLibrarySession instance, Context context,
- MediaPlayerBase player, String id,
- MediaLibrarySessionCallback callback, VolumeProvider volumeProvider, int ratingType,
- PendingIntent sessionActivity) {
- super(instance, context, player, id, callback, volumeProvider, ratingType,
- sessionActivity);
+ public MediaLibrarySessionImpl(Context context, MediaLibrarySession instance,
+ MediaPlayerBase player, String id, VolumeProvider volumeProvider, int ratingType,
+ PendingIntent sessionActivity, Executor callbackExecutor,
+ MediaLibrarySessionCallback callback) {
+ super(context, instance, player, id, volumeProvider, ratingType, sessionActivity,
+ callbackExecutor, callback);
mInstance = instance;
mCallback = callback;
}
diff --git a/packages/MediaComponents/src/com/android/media/MediaSession2Impl.java b/packages/MediaComponents/src/com/android/media/MediaSession2Impl.java
index 2e71641..22a3187 100644
--- a/packages/MediaComponents/src/com/android/media/MediaSession2Impl.java
+++ b/packages/MediaComponents/src/com/android/media/MediaSession2Impl.java
@@ -31,12 +31,12 @@
import android.media.MediaSession2.CommandButton;
import android.media.MediaSession2.CommandGroup;
import android.media.MediaSession2.ControllerInfo;
-import android.media.MediaSession2.PlaylistParam;
+import android.media.MediaSession2.PlaylistParams;
import android.media.MediaSession2.SessionCallback;
-import android.media.SessionToken;
+import android.media.PlaybackState2;
+import android.media.SessionToken2;
import android.media.VolumeProvider;
import android.media.session.MediaSessionManager;
-import android.media.session.PlaybackState;
import android.media.update.MediaSession2Provider;
import android.os.Bundle;
import android.os.Handler;
@@ -47,6 +47,7 @@
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.List;
+import java.util.concurrent.Executor;
public class MediaSession2Impl implements MediaSession2Provider {
private static final String TAG = "MediaSession2";
@@ -57,8 +58,9 @@
private final Context mContext;
private final String mId;
private final Handler mHandler;
+ private final Executor mCallbackExecutor;
private final MediaSession2Stub mSessionStub;
- private final SessionToken mSessionToken;
+ private final SessionToken2 mSessionToken;
private MediaPlayerBase mPlayer;
@@ -78,9 +80,9 @@
* @param ratingType
* @param sessionActivity
*/
- public MediaSession2Impl(MediaSession2 instance, Context context, MediaPlayerBase player,
- String id, SessionCallback callback, VolumeProvider volumeProvider, int ratingType,
- PendingIntent sessionActivity) {
+ public MediaSession2Impl(Context context, MediaSession2 instance, MediaPlayerBase player,
+ String id, VolumeProvider volumeProvider, int ratingType, PendingIntent sessionActivity,
+ Executor callbackExecutor, SessionCallback callback) {
mInstance = instance;
// TODO(jaewan): Keep other params.
@@ -89,6 +91,7 @@
mContext = context;
mId = id;
mHandler = new Handler(Looper.myLooper());
+ mCallbackExecutor = callbackExecutor;
mSessionStub = new MediaSession2Stub(this, callback);
// Ask server to create session token for following reasons.
// 1. Make session ID unique per package.
@@ -128,13 +131,14 @@
// Player didn't changed. No-op.
return;
}
- mHandler.removeCallbacksAndMessages(null);
+ // TODO(jaewan): Find equivalent for the executor
+ //mHandler.removeCallbacksAndMessages(null);
if (mPlayer != null && mListener != null) {
// This might not work for a poorly implemented player.
mPlayer.removePlaybackListener(mListener);
}
mListener = new MyPlaybackListener(this, player);
- player.addPlaybackListener(mListener, mHandler);
+ player.addPlaybackListener(mCallbackExecutor, mListener);
notifyPlaybackStateChanged(player.getPlaybackState());
mPlayer = player;
}
@@ -159,7 +163,7 @@
// TODO(jaewan): Change this to @NonNull
@Override
- public SessionToken getToken_impl() {
+ public SessionToken2 getToken_impl() {
return mSessionToken;
}
@@ -255,7 +259,7 @@
}
@Override
- public void setPlaylist_impl(List<MediaItem2> playlist, PlaylistParam param) {
+ public void setPlaylist_impl(List<MediaItem2> playlist, PlaylistParams param) {
// TODO(jaewan): Implement
}
@@ -300,13 +304,15 @@
// 1. Allow calls from random threads for all methods.
// 2. Allow calls from random threads for all methods, except for the
// {@link #setPlayer()}.
- // TODO(jaewan): Should we pend command instead of exception?
private void ensureCallingThread() {
+ // TODO(jaewan): Uncomment or remove
+ /*
if (mHandler.getLooper() != Looper.myLooper()) {
throw new IllegalStateException("Run this on the given thread");
- }
+ }*/
}
+
private void ensurePlayer() {
// TODO(jaewan): Should we pend command instead? Follow the decision from MP2.
// Alternatively we can add a API like setAcceptsPendingCommands(boolean).
@@ -319,7 +325,7 @@
return mHandler;
}
- private void notifyPlaybackStateChanged(PlaybackState state) {
+ private void notifyPlaybackStateChanged(PlaybackState2 state) {
// Notify to listeners added directly to this session
for (int i = 0; i < mListeners.size(); i++) {
mListeners.get(i).postPlaybackChange(state);
@@ -350,10 +356,9 @@
}
@Override
- public void onPlaybackChanged(PlaybackState state) {
+ public void onPlaybackChanged(PlaybackState2 state) {
MediaSession2Impl session = mSession.get();
- if (session == null || session.getHandler().getLooper() != Looper.myLooper()
- || mPlayer != session.mInstance.getPlayer()) {
+ if (mPlayer != session.mInstance.getPlayer()) {
Log.w(TAG, "Unexpected playback state change notifications. Ignoring.",
new IllegalStateException());
return;
@@ -374,7 +379,7 @@
// {@link #CALLBACK_FLAG_SESSION_ACTIVENESS}
private int mFlag;
- public ControllerInfoImpl(ControllerInfo instance, Context context, int uid,
+ public ControllerInfoImpl(Context context, ControllerInfo instance, int uid,
int pid, String packageName, IMediaSession2Callback callback) {
mInstance = instance;
mUid = uid;
diff --git a/packages/MediaComponents/src/com/android/media/MediaSession2Stub.java b/packages/MediaComponents/src/com/android/media/MediaSession2Stub.java
index 3f7a1b1..2f75dfa 100644
--- a/packages/MediaComponents/src/com/android/media/MediaSession2Stub.java
+++ b/packages/MediaComponents/src/com/android/media/MediaSession2Stub.java
@@ -29,6 +29,7 @@
import android.media.MediaSession2.CommandGroup;
import android.media.MediaSession2.ControllerInfo;
import android.media.MediaSession2.SessionCallback;
+import android.media.PlaybackState2;
import android.media.session.PlaybackState;
import android.os.Binder;
import android.os.Bundle;
@@ -152,10 +153,10 @@
@Deprecated
@Override
- public PlaybackState getPlaybackState() throws RemoteException {
+ public Bundle getPlaybackState() throws RemoteException {
MediaSession2Impl session = getSession();
// TODO(jaewan): Check if mPlayer.getPlaybackState() is safe here.
- return session.getInstance().getPlayer().getPlaybackState();
+ return session.getInstance().getPlayer().getPlaybackState().toBundle();
}
@Deprecated
@@ -216,13 +217,13 @@
}
// Should be used without a lock to prevent potential deadlock.
- public void notifyPlaybackStateChangedNotLocked(PlaybackState state) {
+ public void notifyPlaybackStateChangedNotLocked(PlaybackState2 state) {
final List<ControllerInfo> list = getControllersWithFlag(CALLBACK_FLAG_PLAYBACK);
for (int i = 0; i < list.size(); i++) {
IMediaSession2Callback callbackBinder =
ControllerInfoImpl.from(list.get(i)).getControllerBinder();
try {
- callbackBinder.onPlaybackStateChanged(state);
+ callbackBinder.onPlaybackStateChanged(state.toBundle());
} catch (RemoteException e) {
Log.w(TAG, "Controller is gone", e);
// TODO(jaewan): What to do when the controller is gone?
diff --git a/packages/MediaComponents/src/com/android/media/MediaSessionService2Impl.java b/packages/MediaComponents/src/com/android/media/MediaSessionService2Impl.java
index 773a06f..9d24082 100644
--- a/packages/MediaComponents/src/com/android/media/MediaSessionService2Impl.java
+++ b/packages/MediaComponents/src/com/android/media/MediaSessionService2Impl.java
@@ -26,6 +26,7 @@
import android.media.MediaSession2;
import android.media.MediaSessionService2;
import android.media.MediaSessionService2.MediaNotification;
+import android.media.PlaybackState2;
import android.media.session.PlaybackState;
import android.media.update.MediaSessionService2Provider;
import android.os.IBinder;
@@ -70,7 +71,7 @@
}
@Override
- public MediaNotification onUpdateNotification_impl(PlaybackState state) {
+ public MediaNotification onUpdateNotification_impl(PlaybackState2 state) {
// Provide default notification UI later.
return null;
}
@@ -118,7 +119,7 @@
return null;
}
- private void updateNotification(PlaybackState state) {
+ private void updateNotification(PlaybackState2 state) {
MediaNotification mediaNotification = mInstance.onUpdateNotification(state);
if (mediaNotification == null) {
return;
@@ -145,7 +146,7 @@
private class SessionServicePlaybackListener implements PlaybackListener {
@Override
- public void onPlaybackChanged(PlaybackState state) {
+ public void onPlaybackChanged(PlaybackState2 state) {
if (state == null) {
Log.w(TAG, "Ignoring null playback state");
return;
diff --git a/packages/MediaComponents/src/com/android/media/PlaybackListenerHolder.java b/packages/MediaComponents/src/com/android/media/PlaybackListenerHolder.java
index 4d06463..7b336c4 100644
--- a/packages/MediaComponents/src/com/android/media/PlaybackListenerHolder.java
+++ b/packages/MediaComponents/src/com/android/media/PlaybackListenerHolder.java
@@ -16,39 +16,30 @@
package com.android.media;
-import android.media.MediaPlayerBase;
import android.media.MediaPlayerBase.PlaybackListener;
+import android.media.PlaybackState2;
import android.media.session.PlaybackState;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.NonNull;
+
import java.util.List;
+import java.util.concurrent.Executor;
/**
- * Holds {@link android.media.MediaPlayerBase.PlaybackListener} with the {@link Handler}.
+ * Holds {@link PlaybackListener} with the {@link Handler}.
*/
-public class PlaybackListenerHolder extends Handler {
- private static final int ON_PLAYBACK_CHANGED = 1;
+public class PlaybackListenerHolder {
+ public final Executor executor;
+ public final PlaybackListener listener;
- public final MediaPlayerBase.PlaybackListener listener;
-
- public PlaybackListenerHolder(
- @NonNull MediaPlayerBase.PlaybackListener listener, @NonNull Handler handler) {
- super(handler.getLooper());
+ public PlaybackListenerHolder(Executor executor, @NonNull PlaybackListener listener) {
+ this.executor = executor;
this.listener = listener;
}
- @Override
- public void handleMessage(Message msg) {
- switch (msg.what) {
- case ON_PLAYBACK_CHANGED:
- listener.onPlaybackChanged((PlaybackState) msg.obj);
- break;
- }
- }
-
- public void postPlaybackChange(PlaybackState state) {
- obtainMessage(ON_PLAYBACK_CHANGED, state).sendToTarget();
+ public void postPlaybackChange(final PlaybackState2 state) {
+ executor.execute(() -> listener.onPlaybackChanged(state));
}
/**
diff --git a/packages/MediaComponents/src/com/android/media/update/ApiFactory.java b/packages/MediaComponents/src/com/android/media/update/ApiFactory.java
index 43acaf9..8ce7bef 100644
--- a/packages/MediaComponents/src/com/android/media/update/ApiFactory.java
+++ b/packages/MediaComponents/src/com/android/media/update/ApiFactory.java
@@ -23,6 +23,7 @@
import android.media.MediaBrowser2;
import android.media.MediaBrowser2.BrowserCallback;
import android.media.MediaController2;
+import android.media.MediaController2.ControllerCallback;
import android.media.MediaLibraryService2;
import android.media.MediaLibraryService2.MediaLibrarySession;
import android.media.MediaLibraryService2.MediaLibrarySessionCallback;
@@ -32,7 +33,7 @@
import android.media.MediaSession2.SessionCallback;
import android.media.MediaSessionService2;
import android.media.IMediaSession2Callback;
-import android.media.SessionToken;
+import android.media.SessionToken2;
import android.media.VolumeProvider;
import android.media.update.MediaBrowser2Provider;
import android.media.update.MediaControlView2Provider;
@@ -43,6 +44,7 @@
import android.media.update.VideoView2Provider;
import android.media.update.StaticProvider;
import android.media.update.ViewProvider;
+import android.os.IInterface;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.widget.MediaControlView2;
@@ -68,32 +70,32 @@
@Override
public MediaController2Provider createMediaController2(
- MediaController2 instance, Context context, SessionToken token,
- MediaController2.ControllerCallback callback, Executor executor) {
- return new MediaController2Impl(instance, context, token, callback, executor);
+ Context context, MediaController2 instance, SessionToken2 token,
+ Executor executor, ControllerCallback callback) {
+ return new MediaController2Impl(context, instance, token, executor, callback);
}
@Override
- public MediaBrowser2Provider createMediaBrowser2(MediaBrowser2 instance, Context context,
- SessionToken token, BrowserCallback callback, Executor executor) {
- return new MediaBrowser2Impl(instance, context, token, callback, executor);
+ public MediaBrowser2Provider createMediaBrowser2(Context context, MediaBrowser2 instance,
+ SessionToken2 token, Executor executor, BrowserCallback callback) {
+ return new MediaBrowser2Impl(context, instance, token, executor, callback);
}
@Override
- public MediaSession2Provider createMediaSession2(MediaSession2 instance, Context context,
- MediaPlayerBase player, String id, SessionCallback callback,
- VolumeProvider volumeProvider, int ratingType,
- PendingIntent sessionActivity) {
- return new MediaSession2Impl(instance, context, player, id, callback,
- volumeProvider, ratingType, sessionActivity);
+ public MediaSession2Provider createMediaSession2(Context context, MediaSession2 instance,
+ MediaPlayerBase player, String id, VolumeProvider volumeProvider,
+ int ratingType, PendingIntent sessionActivity, Executor callbackExecutor,
+ SessionCallback callback) {
+ return new MediaSession2Impl(context, instance, player, id, volumeProvider, ratingType,
+ sessionActivity, callbackExecutor, callback);
}
@Override
public MediaSession2Provider.ControllerInfoProvider createMediaSession2ControllerInfoProvider(
- ControllerInfo instance, Context context, int uid, int pid, String packageName,
- IMediaSession2Callback callback) {
- return new MediaSession2Impl.ControllerInfoImpl(
- instance, context, uid, pid, packageName, callback);
+ Context context, ControllerInfo instance, int uid, int pid, String packageName,
+ IInterface callback) {
+ return new MediaSession2Impl.ControllerInfoImpl(context,
+ instance, uid, pid, packageName, (IMediaSession2Callback) callback);
}
@Override
@@ -110,11 +112,11 @@
@Override
public MediaLibrarySessionProvider createMediaLibraryService2MediaLibrarySession(
- MediaLibrarySession instance, Context context, MediaPlayerBase player, String id,
- MediaLibrarySessionCallback callback, VolumeProvider volumeProvider, int ratingType,
- PendingIntent sessionActivity) {
- return new MediaLibrarySessionImpl(instance, context, player, id, callback, volumeProvider,
- ratingType, sessionActivity);
+ Context context, MediaLibrarySession instance, MediaPlayerBase player,
+ String id, VolumeProvider volumeProvider, int ratingType, PendingIntent sessionActivity,
+ Executor callbackExecutor, MediaLibrarySessionCallback callback) {
+ return new MediaLibrarySessionImpl(context, instance, player, id, volumeProvider,
+ ratingType, sessionActivity, callbackExecutor, callback);
}
@Override
diff --git a/packages/MediaComponents/test/src/android/media/MediaBrowser2Test.java b/packages/MediaComponents/test/src/android/media/MediaBrowser2Test.java
index fe8aeb9..ef75060 100644
--- a/packages/MediaComponents/test/src/android/media/MediaBrowser2Test.java
+++ b/packages/MediaComponents/test/src/android/media/MediaBrowser2Test.java
@@ -48,7 +48,7 @@
private static final String TAG = "MediaBrowser2Test";
@Override
- TestControllerInterface onCreateController(@NonNull SessionToken token,
+ TestControllerInterface onCreateController(@NonNull SessionToken2 token,
@NonNull TestControllerCallbackInterface callback) {
return new TestMediaBrowser(mContext, token, new TestBrowserCallback(callback));
}
@@ -69,7 +69,7 @@
}
};
- final SessionToken token = MockMediaLibraryService2.getToken(mContext);
+ final SessionToken2 token = MockMediaLibraryService2.getToken(mContext);
MediaBrowser2 browser =
(MediaBrowser2) createController(token,true, callback);
browser.getBrowserRoot(param);
@@ -127,9 +127,9 @@
public class TestMediaBrowser extends MediaBrowser2 implements TestControllerInterface {
private final BrowserCallback mCallback;
- public TestMediaBrowser(@NonNull Context context, @NonNull SessionToken token,
+ public TestMediaBrowser(@NonNull Context context, @NonNull SessionToken2 token,
@NonNull ControllerCallback callback) {
- super(context, token, (BrowserCallback) callback, sHandlerExecutor);
+ super(context, token, sHandlerExecutor, (BrowserCallback) callback);
mCallback = (BrowserCallback) callback;
}
diff --git a/packages/MediaComponents/test/src/android/media/MediaController2Test.java b/packages/MediaComponents/test/src/android/media/MediaController2Test.java
index f99935a..ae67a95 100644
--- a/packages/MediaComponents/test/src/android/media/MediaController2Test.java
+++ b/packages/MediaComponents/test/src/android/media/MediaController2Test.java
@@ -218,7 +218,7 @@
sHandler.postAndSync(() -> {
mSession.close();
mSession = new MediaSession2.Builder(mContext, mPlayer)
- .setSessionCallback(sessionCallback).build();
+ .setSessionCallback(sHandlerExecutor, sessionCallback).build();
});
MediaController2 controller =
createController(mSession.getToken(), false, null);
@@ -279,7 +279,7 @@
});
final MediaController2 controller = createController(mSession.getToken());
testHandler.post(() -> {
- final PlaybackState state = createPlaybackState(PlaybackState.STATE_ERROR);
+ final PlaybackState2 state = createPlaybackState(PlaybackState.STATE_ERROR);
for (int i = 0; i < 100; i++) {
// triggers call from session to controller.
player.notifyPlaybackState(state);
@@ -320,15 +320,15 @@
@Ignore
@Test
public void testGetServiceToken() {
- SessionToken token = TestUtils.getServiceToken(mContext, MockMediaSessionService2.ID);
+ SessionToken2 token = TestUtils.getServiceToken(mContext, MockMediaSessionService2.ID);
assertNotNull(token);
assertEquals(mContext.getPackageName(), token.getPackageName());
assertEquals(MockMediaSessionService2.ID, token.getId());
assertNull(token.getSessionBinder());
- assertEquals(SessionToken.TYPE_SESSION_SERVICE, token.getType());
+ assertEquals(SessionToken2.TYPE_SESSION_SERVICE, token.getType());
}
- private void connectToService(SessionToken token) throws InterruptedException {
+ private void connectToService(SessionToken2 token) throws InterruptedException {
mController = createController(token);
mSession = TestServiceRegistry.getInstance().getServiceInstance().getSession();
mPlayer = (MockPlayer) mSession.getPlayer();
diff --git a/packages/MediaComponents/test/src/android/media/MediaSession2Test.java b/packages/MediaComponents/test/src/android/media/MediaSession2Test.java
index 7b58f0e..045dcd5 100644
--- a/packages/MediaComponents/test/src/android/media/MediaSession2Test.java
+++ b/packages/MediaComponents/test/src/android/media/MediaSession2Test.java
@@ -208,7 +208,7 @@
mSession.close();
mPlayer = new MockPlayer(1);
mSession = new MediaSession2.Builder(mContext, mPlayer)
- .setSessionCallback(callback).build();
+ .setSessionCallback(sHandlerExecutor, callback).build();
});
MediaController2 controller = createController(mSession.getToken());
controller.pause();
@@ -232,7 +232,7 @@
sHandler.postAndSync(() -> {
mSession.close();
mSession = new MediaSession2.Builder(mContext, mPlayer)
- .setSessionCallback(sessionCallback).build();
+ .setSessionCallback(sHandlerExecutor, sessionCallback).build();
});
MediaController2 controller =
createController(mSession.getToken(), false, null);
diff --git a/packages/MediaComponents/test/src/android/media/MediaSession2TestBase.java b/packages/MediaComponents/test/src/android/media/MediaSession2TestBase.java
index 2965c82..7834a42 100644
--- a/packages/MediaComponents/test/src/android/media/MediaSession2TestBase.java
+++ b/packages/MediaComponents/test/src/android/media/MediaSession2TestBase.java
@@ -101,11 +101,11 @@
}
}
- final MediaController2 createController(SessionToken token) throws InterruptedException {
+ final MediaController2 createController(SessionToken2 token) throws InterruptedException {
return createController(token, true, null);
}
- final MediaController2 createController(@NonNull SessionToken token,
+ final MediaController2 createController(@NonNull SessionToken2 token,
boolean waitForConnect, @Nullable TestControllerCallbackInterface callback)
throws InterruptedException {
TestControllerInterface instance = onCreateController(token, callback);
@@ -145,7 +145,7 @@
getWaitForConnectionInterface(controller).waitForDisconnect(expected);
}
- TestControllerInterface onCreateController(@NonNull SessionToken token,
+ TestControllerInterface onCreateController(@NonNull SessionToken2 token,
@NonNull TestControllerCallbackInterface callback) {
return new TestMediaController(mContext, token, new TestControllerCallback(callback));
}
@@ -196,9 +196,9 @@
public class TestMediaController extends MediaController2 implements TestControllerInterface {
private final ControllerCallback mCallback;
- public TestMediaController(@NonNull Context context, @NonNull SessionToken token,
+ public TestMediaController(@NonNull Context context, @NonNull SessionToken2 token,
@NonNull ControllerCallback callback) {
- super(context, token, callback, sHandlerExecutor);
+ super(context, token, sHandlerExecutor, callback);
mCallback = callback;
}
diff --git a/packages/MediaComponents/test/src/android/media/MediaSessionManager_MediaSession2.java b/packages/MediaComponents/test/src/android/media/MediaSessionManager_MediaSession2.java
index bfed7d0..192cbc2 100644
--- a/packages/MediaComponents/test/src/android/media/MediaSessionManager_MediaSession2.java
+++ b/packages/MediaComponents/test/src/android/media/MediaSessionManager_MediaSession2.java
@@ -82,10 +82,10 @@
player.notifyPlaybackState(createPlaybackState(PlaybackState.STATE_STOPPED));
MediaController2 controller = null;
- List<SessionToken> tokens = mManager.getActiveSessionTokens();
+ List<SessionToken2> tokens = mManager.getActiveSessionTokens();
assertNotNull(tokens);
for (int i = 0; i < tokens.size(); i++) {
- SessionToken token = tokens.get(i);
+ SessionToken2 token = tokens.get(i);
if (mContext.getPackageName().equals(token.getPackageName())
&& TAG.equals(token.getId())) {
assertNotNull(token.getSessionBinder());
@@ -113,7 +113,7 @@
sHandler.postAndSync(() -> {
mSession.close();
mSession = new MediaSession2.Builder(mContext, new MockPlayer(0)).setId(TAG)
- .setSessionCallback(new SessionCallback() {
+ .setSessionCallback(sHandlerExecutor, new SessionCallback() {
@Override
public MediaSession2.CommandGroup onConnect(ControllerInfo controller) {
// Reject all connection request.
@@ -124,10 +124,10 @@
ensureChangeInSession();
boolean foundSession = false;
- List<SessionToken> tokens = mManager.getActiveSessionTokens();
+ List<SessionToken2> tokens = mManager.getActiveSessionTokens();
assertNotNull(tokens);
for (int i = 0; i < tokens.size(); i++) {
- SessionToken token = tokens.get(i);
+ SessionToken2 token = tokens.get(i);
if (mContext.getPackageName().equals(token.getPackageName())
&& TAG.equals(token.getId())) {
assertFalse(foundSession);
@@ -147,9 +147,9 @@
// When the mSession's player becomes null, it should lose binder connection between server.
// So server will forget the session.
- List<SessionToken> tokens = mManager.getActiveSessionTokens();
+ List<SessionToken2> tokens = mManager.getActiveSessionTokens();
for (int i = 0; i < tokens.size(); i++) {
- SessionToken token = tokens.get(i);
+ SessionToken2 token = tokens.get(i);
assertFalse(mContext.getPackageName().equals(token.getPackageName())
&& TAG.equals(token.getId()));
}
@@ -159,19 +159,19 @@
public void testGetMediaSessionService2Token() throws InterruptedException {
boolean foundTestSessionService = false;
boolean foundTestLibraryService = false;
- List<SessionToken> tokens = mManager.getSessionServiceTokens();
+ List<SessionToken2> tokens = mManager.getSessionServiceTokens();
for (int i = 0; i < tokens.size(); i++) {
- SessionToken token = tokens.get(i);
+ SessionToken2 token = tokens.get(i);
if (mContext.getPackageName().equals(token.getPackageName())
&& MockMediaSessionService2.ID.equals(token.getId())) {
assertFalse(foundTestSessionService);
- assertEquals(SessionToken.TYPE_SESSION_SERVICE, token.getType());
+ assertEquals(SessionToken2.TYPE_SESSION_SERVICE, token.getType());
assertNull(token.getSessionBinder());
foundTestSessionService = true;
} else if (mContext.getPackageName().equals(token.getPackageName())
&& MockMediaLibraryService2.ID.equals(token.getId())) {
assertFalse(foundTestLibraryService);
- assertEquals(SessionToken.TYPE_LIBRARY_SERVICE, token.getType());
+ assertEquals(SessionToken2.TYPE_LIBRARY_SERVICE, token.getType());
assertNull(token.getSessionBinder());
foundTestLibraryService = true;
}
@@ -185,9 +185,9 @@
boolean foundTestSession = false;
boolean foundTestSessionService = false;
boolean foundTestLibraryService = false;
- List<SessionToken> tokens = mManager.getAllSessionTokens();
+ List<SessionToken2> tokens = mManager.getAllSessionTokens();
for (int i = 0; i < tokens.size(); i++) {
- SessionToken token = tokens.get(i);
+ SessionToken2 token = tokens.get(i);
if (!mContext.getPackageName().equals(token.getPackageName())) {
continue;
}
@@ -199,11 +199,11 @@
case MockMediaSessionService2.ID:
assertFalse(foundTestSessionService);
foundTestSessionService = true;
- assertEquals(SessionToken.TYPE_SESSION_SERVICE, token.getType());
+ assertEquals(SessionToken2.TYPE_SESSION_SERVICE, token.getType());
break;
case MockMediaLibraryService2.ID:
assertFalse(foundTestLibraryService);
- assertEquals(SessionToken.TYPE_LIBRARY_SERVICE, token.getType());
+ assertEquals(SessionToken2.TYPE_LIBRARY_SERVICE, token.getType());
foundTestLibraryService = true;
break;
default:
diff --git a/packages/MediaComponents/test/src/android/media/MockMediaLibraryService2.java b/packages/MediaComponents/test/src/android/media/MockMediaLibraryService2.java
index acf733f..14cf257 100644
--- a/packages/MediaComponents/test/src/android/media/MockMediaLibraryService2.java
+++ b/packages/MediaComponents/test/src/android/media/MockMediaLibraryService2.java
@@ -40,19 +40,19 @@
EXTRA.putString(ROOT_ID, ROOT_ID);
}
@GuardedBy("MockMediaLibraryService2.class")
- private static SessionToken sToken;
+ private static SessionToken2 sToken;
private MediaLibrarySession mSession;
@Override
public MediaLibrarySession onCreateSession(String sessionId) {
final MockPlayer player = new MockPlayer(1);
- SyncHandler handler = (SyncHandler) TestServiceRegistry.getInstance().getHandler();
+ final SyncHandler handler = (SyncHandler) TestServiceRegistry.getInstance().getHandler();
try {
handler.postAndSync(() -> {
TestLibrarySessionCallback callback = new TestLibrarySessionCallback();
- mSession = new MediaLibrarySessionBuilder(
- MockMediaLibraryService2.this, player, callback)
+ mSession = new MediaLibrarySessionBuilder(MockMediaLibraryService2.this,
+ player, (runnable) -> handler.post(runnable), callback)
.setId(sessionId).build();
});
} catch (InterruptedException e) {
@@ -67,10 +67,10 @@
super.onDestroy();
}
- public static SessionToken getToken(Context context) {
+ public static SessionToken2 getToken(Context context) {
synchronized (MockMediaLibraryService2.class) {
if (sToken == null) {
- sToken = new SessionToken(SessionToken.TYPE_LIBRARY_SERVICE,
+ sToken = new SessionToken2(SessionToken2.TYPE_LIBRARY_SERVICE,
context.getPackageName(), ID,
MockMediaLibraryService2.class.getName(), null);
}
diff --git a/packages/MediaComponents/test/src/android/media/MockMediaSessionService2.java b/packages/MediaComponents/test/src/android/media/MockMediaSessionService2.java
index 9cf4911..b058117 100644
--- a/packages/MediaComponents/test/src/android/media/MockMediaSessionService2.java
+++ b/packages/MediaComponents/test/src/android/media/MockMediaSessionService2.java
@@ -45,11 +45,12 @@
@Override
public MediaSession2 onCreateSession(String sessionId) {
final MockPlayer player = new MockPlayer(1);
- SyncHandler handler = (SyncHandler) TestServiceRegistry.getInstance().getHandler();
+ final SyncHandler handler = (SyncHandler) TestServiceRegistry.getInstance().getHandler();
try {
handler.postAndSync(() -> {
mSession = new MediaSession2.Builder(MockMediaSessionService2.this, player)
- .setId(sessionId).setSessionCallback(new MySessionCallback()).build();
+ .setId(sessionId).setSessionCallback((runnable)->handler.post(runnable),
+ new MySessionCallback()).build();
});
} catch (InterruptedException e) {
fail(e.toString());
@@ -70,7 +71,7 @@
}
@Override
- public MediaNotification onUpdateNotification(PlaybackState state) {
+ public MediaNotification onUpdateNotification(PlaybackState2 state) {
if (mDefaultNotificationChannel == null) {
mDefaultNotificationChannel = new NotificationChannel(
DEFAULT_MEDIA_NOTIFICATION_CHANNEL_ID,
diff --git a/packages/MediaComponents/test/src/android/media/MockPlayer.java b/packages/MediaComponents/test/src/android/media/MockPlayer.java
index b0d7a69..ad7ba2f 100644
--- a/packages/MediaComponents/test/src/android/media/MockPlayer.java
+++ b/packages/MediaComponents/test/src/android/media/MockPlayer.java
@@ -16,14 +16,14 @@
package android.media;
-import android.media.session.PlaybackState;
-import android.os.Handler;
+import android.media.MediaSession2.PlaylistParams;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executor;
/**
* A mock implementation of {@link MediaPlayerBase} for testing.
@@ -37,7 +37,7 @@
public boolean mSkipToPreviousCalled;
public boolean mSkipToNextCalled;
public List<PlaybackListenerHolder> mListeners = new ArrayList<>();
- private PlaybackState mLastPlaybackState;
+ private PlaybackState2 mLastPlaybackState;
public MockPlayer(int count) {
mCountDownLatch = (count > 0) ? new CountDownLatch(count) : null;
@@ -83,16 +83,18 @@
}
}
+
+
@Nullable
@Override
- public PlaybackState getPlaybackState() {
+ public PlaybackState2 getPlaybackState() {
return mLastPlaybackState;
}
@Override
- public void addPlaybackListener(
- @NonNull PlaybackListener listener, @NonNull Handler handler) {
- mListeners.add(new PlaybackListenerHolder(listener, handler));
+ public void addPlaybackListener(@NonNull Executor executor,
+ @NonNull PlaybackListener listener) {
+ mListeners.add(new PlaybackListenerHolder(executor, listener));
}
@Override
@@ -103,10 +105,40 @@
}
}
- public void notifyPlaybackState(final PlaybackState state) {
+ public void notifyPlaybackState(final PlaybackState2 state) {
mLastPlaybackState = state;
for (int i = 0; i < mListeners.size(); i++) {
mListeners.get(i).postPlaybackChange(state);
}
}
+
+ // No-op. Should be added for test later.
+ @Override
+ public void prepare() {
+ }
+
+ @Override
+ public void seekTo(long pos) {
+ }
+
+ @Override
+ public void fastFoward() {
+ }
+
+ @Override
+ public void rewind() {
+ }
+
+ @Override
+ public AudioAttributes getAudioAttributes() {
+ return null;
+ }
+
+ @Override
+ public void setPlaylist(List<MediaItem2> item, PlaylistParams param) {
+ }
+
+ @Override
+ public void setCurrentPlaylistItem(int index) {
+ }
}
diff --git a/packages/MediaComponents/test/src/android/media/PlaybackListenerHolder.java b/packages/MediaComponents/test/src/android/media/PlaybackListenerHolder.java
index b0b87de..4e19d4d 100644
--- a/packages/MediaComponents/test/src/android/media/PlaybackListenerHolder.java
+++ b/packages/MediaComponents/test/src/android/media/PlaybackListenerHolder.java
@@ -23,32 +23,22 @@
import android.support.annotation.NonNull;
import java.util.List;
+import java.util.concurrent.Executor;
/**
* Holds {@link PlaybackListener} with the {@link Handler}.
*/
-public class PlaybackListenerHolder extends Handler {
- private static final int ON_PLAYBACK_CHANGED = 1;
-
+public class PlaybackListenerHolder {
+ public final Executor executor;
public final PlaybackListener listener;
- public PlaybackListenerHolder(
- @NonNull PlaybackListener listener, @NonNull Handler handler) {
- super(handler.getLooper());
+ public PlaybackListenerHolder(Executor executor, @NonNull PlaybackListener listener) {
+ this.executor = executor;
this.listener = listener;
}
- @Override
- public void handleMessage(Message msg) {
- switch (msg.what) {
- case ON_PLAYBACK_CHANGED:
- listener.onPlaybackChanged((PlaybackState) msg.obj);
- break;
- }
- }
-
- public void postPlaybackChange(PlaybackState state) {
- obtainMessage(ON_PLAYBACK_CHANGED, state).sendToTarget();
+ public void postPlaybackChange(final PlaybackState2 state) {
+ executor.execute(() -> listener.onPlaybackChanged(state));
}
/**
diff --git a/packages/MediaComponents/test/src/android/media/TestUtils.java b/packages/MediaComponents/test/src/android/media/TestUtils.java
index 1372f01..9a1fa10 100644
--- a/packages/MediaComponents/test/src/android/media/TestUtils.java
+++ b/packages/MediaComponents/test/src/android/media/TestUtils.java
@@ -45,8 +45,9 @@
* @param state one of the PlaybackState.STATE_xxx.
* @return a PlaybackState
*/
- public static PlaybackState createPlaybackState(int state) {
- return new PlaybackState.Builder().setState(state, 0, 1.0f).build();
+ public static PlaybackState2 createPlaybackState(int state) {
+ return new PlaybackState2(state, 0, 0, 1.0f,
+ 0, 0, null);
}
/**
@@ -57,12 +58,12 @@
* @return
*/
// TODO(jaewan): Currently not working.
- public static SessionToken getServiceToken(Context context, String id) {
+ public static SessionToken2 getServiceToken(Context context, String id) {
MediaSessionManager manager =
(MediaSessionManager) context.getSystemService(Context.MEDIA_SESSION_SERVICE);
- List<SessionToken> tokens = manager.getSessionServiceTokens();
+ List<SessionToken2> tokens = manager.getSessionServiceTokens();
for (int i = 0; i < tokens.size(); i++) {
- SessionToken token = tokens.get(i);
+ SessionToken2 token = tokens.get(i);
if (context.getPackageName().equals(token.getPackageName())
&& id.equals(token.getId())) {
return token;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index e362530..113744f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -321,6 +321,7 @@
actualSessionId,
client.clientPid,
client.clientUid,
+ client.packageName,
config,
AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
}
@@ -340,7 +341,7 @@
if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
AudioSystem::releaseOutput(io, streamType, actualSessionId);
} else {
- AudioSystem::releaseInput(io, actualSessionId);
+ AudioSystem::releaseInput(portId);
}
ret = NO_INIT;
}
@@ -1621,12 +1622,6 @@
clientPid = callingPid;
}
- // check calling permissions
- if (!recordingAllowed(input.opPackageName, input.clientInfo.clientTid, clientUid)) {
- ALOGE("createRecord() permission denied: recording not allowed");
- lStatus = PERMISSION_DENIED;
- goto Exit;
- }
// we don't yet support anything other than linear PCM
if (!audio_is_valid_format(input.config.format) || !audio_is_linear_pcm(input.config.format)) {
ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -1663,7 +1658,7 @@
// release previously opened input if retrying.
if (output.inputId != AUDIO_IO_HANDLE_NONE) {
recordTrack.clear();
- AudioSystem::releaseInput(output.inputId, sessionId);
+ AudioSystem::releaseInput(portId);
output.inputId = AUDIO_IO_HANDLE_NONE;
}
lStatus = AudioSystem::getInputForAttr(&input.attr, &output.inputId,
@@ -1671,6 +1666,7 @@
// FIXME compare to AudioTrack
clientPid,
clientUid,
+ input.opPackageName,
&input.config,
output.flags, &output.selectedDeviceId, &portId);
@@ -1739,7 +1735,7 @@
}
recordTrack.clear();
if (output.inputId != AUDIO_IO_HANDLE_NONE) {
- AudioSystem::releaseInput(output.inputId, sessionId);
+ AudioSystem::releaseInput(portId);
}
}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 70ac32c..7bfe802 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -4874,7 +4874,7 @@
{
PlaybackThread::dumpInternals(fd, args);
dprintf(fd, " Thread throttle time (msecs): %u\n", mThreadThrottleTimeMs);
- dprintf(fd, " AudioMixer tracks: 0x%08x\n", mAudioMixer->trackNames());
+ dprintf(fd, " AudioMixer tracks: %s\n", mAudioMixer->trackNames().c_str());
dprintf(fd, " Master mono: %s\n", mMasterMono ? "on" : "off");
if (hasFastMixer()) {
@@ -6936,8 +6936,7 @@
if (recordTrack->isExternalTrack()) {
mLock.unlock();
bool silenced;
- status = AudioSystem::startInput(mId, recordTrack->sessionId(),
- mInDevice, recordTrack->uid(), &silenced);
+ status = AudioSystem::startInput(recordTrack->portId(), &silenced);
mLock.lock();
// FIXME should verify that recordTrack is still in mActiveTracks
if (status != NO_ERROR) {
@@ -6969,7 +6968,7 @@
startError:
if (recordTrack->isExternalTrack()) {
- AudioSystem::stopInput(mId, recordTrack->sessionId());
+ AudioSystem::stopInput(recordTrack->portId());
}
recordTrack->clearSyncStartEvent();
// FIXME I wonder why we do not reset the state here?
@@ -7742,7 +7741,7 @@
if (isOutput()) {
AudioSystem::releaseOutput(mId, streamType(), mSessionId);
} else {
- AudioSystem::releaseInput(mId, mSessionId);
+ AudioSystem::releaseInput(mPortId);
}
}
@@ -7802,10 +7801,6 @@
return NO_ERROR;
}
- if (!isOutput() && !recordingAllowed(client.packageName, client.clientPid, client.clientUid)) {
- return PERMISSION_DENIED;
- }
-
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
audio_io_handle_t io = mId;
@@ -7837,6 +7832,7 @@
mSessionId,
client.clientPid,
client.clientUid,
+ client.packageName,
&config,
AUDIO_INPUT_FLAG_MMAP_NOIRQ,
&deviceId,
@@ -7855,7 +7851,7 @@
} else {
// TODO: Block recording for idle UIDs (b/72134552)
bool silenced;
- ret = AudioSystem::startInput(mId, mSessionId, mInDevice, client.clientUid, &silenced);
+ ret = AudioSystem::startInput(portId, &silenced);
}
// abort if start is rejected by audio policy manager
@@ -7865,7 +7861,7 @@
if (isOutput()) {
AudioSystem::releaseOutput(mId, streamType(), mSessionId);
} else {
- AudioSystem::releaseInput(mId, mSessionId);
+ AudioSystem::releaseInput(portId);
}
} else {
mHalStream->stop();
@@ -7922,8 +7918,8 @@
AudioSystem::stopOutput(mId, streamType(), track->sessionId());
AudioSystem::releaseOutput(mId, streamType(), track->sessionId());
} else {
- AudioSystem::stopInput(mId, track->sessionId());
- AudioSystem::releaseInput(mId, track->sessionId());
+ AudioSystem::stopInput(track->portId());
+ AudioSystem::releaseInput(track->portId());
}
sp<EffectChain> chain = getEffectChain_l(track->sessionId());
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 41d87a4..eb29497 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -623,8 +623,7 @@
static const int8_t kMaxTrackRetriesOffload = 20;
static const int8_t kMaxTrackStartupRetriesOffload = 100;
static const int8_t kMaxTrackStopRetriesOffload = 2;
- // 14 tracks max per client allows for 2 misbehaving application leaving 4 available tracks.
- static const uint32_t kMaxTracksPerUid = 14;
+ static constexpr uint32_t kMaxTracksPerUid = 40;
// Maximum delay (in nanoseconds) for upcoming buffers in suspend mode, otherwise
// if delay is greater, the estimated time for timeLoopNextNs is reset.
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index cdd8ca0..06bbf1e 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1687,7 +1687,7 @@
if (thread != 0) {
RecordThread *recordThread = (RecordThread *)thread.get();
if (recordThread->stop(this) && isExternalTrack()) {
- AudioSystem::stopInput(mThreadIoHandle, mSessionId);
+ AudioSystem::stopInput(mPortId);
}
}
}
@@ -1699,9 +1699,9 @@
{
if (isExternalTrack()) {
if (mState == ACTIVE || mState == RESUMING) {
- AudioSystem::stopInput(mThreadIoHandle, mSessionId);
+ AudioSystem::stopInput(mPortId);
}
- AudioSystem::releaseInput(mThreadIoHandle, mSessionId);
+ AudioSystem::releaseInput(mPortId);
}
sp<ThreadBase> thread = mThread.promote();
if (thread != 0) {
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index f1d7d86..306de3f 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -287,6 +287,7 @@
audio_session_t session,
pid_t pid,
uid_t uid,
+ const String16& opPackageName,
const audio_config_base_t *config,
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
@@ -295,6 +296,7 @@
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
+
// already checked by client, but double-check in case the client wrapper is bypassed
if (attr->source < AUDIO_SOURCE_DEFAULT && attr->source >= AUDIO_SOURCE_CNT &&
attr->source != AUDIO_SOURCE_HOTWORD && attr->source != AUDIO_SOURCE_FM_TUNER) {
@@ -318,6 +320,13 @@
pid = callingPid;
}
+ // check calling permissions
+ if (!recordingAllowed(opPackageName, pid, uid)) {
+ ALOGE("%s permission denied: recording not allowed for uid %d pid %d",
+ __func__, uid, pid);
+ return PERMISSION_DENIED;
+ }
+
if ((attr->source == AUDIO_SOURCE_HOTWORD) && !captureHotwordAllowed(pid, uid)) {
return BAD_VALUE;
}
@@ -367,6 +376,13 @@
}
return status;
}
+
+ sp<AudioRecordClient> client =
+ new AudioRecordClient(*attr, *input, uid, pid, opPackageName, session);
+ client->active = false;
+ client->isConcurrent = false;
+ client->isVirtualDevice = false; //TODO : update from APM->getInputForAttr()
+ mAudioRecordClients.add(*portId, client);
}
if (audioPolicyEffects != 0) {
@@ -379,23 +395,38 @@
return NO_ERROR;
}
-status_t AudioPolicyService::startInput(audio_io_handle_t input,
- audio_session_t session,
- audio_devices_t device,
- uid_t uid,
- bool *silenced)
+status_t AudioPolicyService::startInput(audio_port_handle_t portId, bool *silenced)
{
- // If UID inactive it records silence until becoming active
- *silenced = !mUidPolicy->isUidActive(uid) && !is_virtual_input_device(device);
-
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
+ sp<AudioRecordClient> client;
+ {
+ Mutex::Autolock _l(mLock);
+
+ ssize_t index = mAudioRecordClients.indexOfKey(portId);
+ if (index < 0) {
+ return INVALID_OPERATION;
+ }
+ client = mAudioRecordClients.valueAt(index);
+ }
+
+ // check calling permissions
+ if (!recordingAllowed(client->opPackageName, client->pid, client->uid)) {
+ ALOGE("%s permission denied: recording not allowed for uid %d pid %d",
+ __func__, client->uid, client->pid);
+ return PERMISSION_DENIED;
+ }
+
+ // If UID inactive it records silence until becoming active
+ *silenced = !mUidPolicy->isUidActive(client->uid) && !client->isVirtualDevice;
Mutex::Autolock _l(mLock);
AudioPolicyInterface::concurrency_type__mask_t concurrency =
AudioPolicyInterface::API_INPUT_CONCURRENCY_NONE;
- status_t status = mAudioPolicyManager->startInput(input, session, *silenced, &concurrency);
+
+ status_t status = mAudioPolicyManager->startInput(
+ client->input, client->session, *silenced, &concurrency);
if (status == NO_ERROR) {
LOG_ALWAYS_FATAL_IF(concurrency & ~AudioPolicyInterface::API_INPUT_CONCURRENCY_ALL,
@@ -413,38 +444,52 @@
return status;
}
-status_t AudioPolicyService::stopInput(audio_io_handle_t input,
- audio_session_t session)
+status_t AudioPolicyService::stopInput(audio_port_handle_t portId)
{
if (mAudioPolicyManager == NULL) {
return NO_INIT;
}
Mutex::Autolock _l(mLock);
- return mAudioPolicyManager->stopInput(input, session);
+ ssize_t index = mAudioRecordClients.indexOfKey(portId);
+ if (index < 0) {
+ return INVALID_OPERATION;
+ }
+ sp<AudioRecordClient> client = mAudioRecordClients.valueAt(index);
+
+ return mAudioPolicyManager->stopInput(client->input, client->session);
}
-void AudioPolicyService::releaseInput(audio_io_handle_t input,
- audio_session_t session)
+void AudioPolicyService::releaseInput(audio_port_handle_t portId)
{
if (mAudioPolicyManager == NULL) {
return;
}
sp<AudioPolicyEffects>audioPolicyEffects;
+ sp<AudioRecordClient> client;
{
Mutex::Autolock _l(mLock);
audioPolicyEffects = mAudioPolicyEffects;
+ ssize_t index = mAudioRecordClients.indexOfKey(portId);
+ if (index < 0) {
+ return;
+ }
+ client = mAudioRecordClients.valueAt(index);
+ mAudioRecordClients.removeItem(portId);
+ }
+ if (client == 0) {
+ return;
}
if (audioPolicyEffects != 0) {
// release audio processors from the input
- status_t status = audioPolicyEffects->releaseInputEffects(input, session);
+ status_t status = audioPolicyEffects->releaseInputEffects(client->input, client->session);
if(status != NO_ERROR) {
- ALOGW("Failed to release effects on input %d", input);
+ ALOGW("Failed to release effects on input %d", client->input);
}
}
{
Mutex::Autolock _l(mLock);
- mAudioPolicyManager->releaseInput(input, session);
+ mAudioPolicyManager->releaseInput(client->input, client->session);
}
}
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index c21aa58..bfa3ef4 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -98,19 +98,15 @@
audio_session_t session,
pid_t pid,
uid_t uid,
+ const String16& opPackageName,
const audio_config_base_t *config,
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId = NULL,
audio_port_handle_t *portId = NULL);
- virtual status_t startInput(audio_io_handle_t input,
- audio_session_t session,
- audio_devices_t device,
- uid_t uid,
+ virtual status_t startInput(audio_port_handle_t portId,
bool *silenced);
- virtual status_t stopInput(audio_io_handle_t input,
- audio_session_t session);
- virtual void releaseInput(audio_io_handle_t input,
- audio_session_t session);
+ virtual status_t stopInput(audio_port_handle_t portId);
+ virtual void releaseInput(audio_port_handle_t portId);
virtual status_t initStreamVolume(audio_stream_type_t stream,
int indexMin,
int indexMax);
@@ -611,6 +607,31 @@
bool mAudioPortCallbacksEnabled;
};
+ // --- AudioRecordClient ---
+ // Information about each registered AudioRecord client
+ // (between calls to getInputForAttr() and releaseInput())
+ class AudioRecordClient : public RefBase {
+ public:
+ AudioRecordClient(const audio_attributes_t attributes,
+ const audio_io_handle_t input, uid_t uid, pid_t pid,
+ const String16& opPackageName, const audio_session_t session) :
+ attributes(attributes),
+ input(input), uid(uid), pid(pid),
+ opPackageName(opPackageName), session(session),
+ active(false), isConcurrent(false), isVirtualDevice(false) {}
+ virtual ~AudioRecordClient() {}
+
+ const audio_attributes_t attributes; // source, flags ...
+ const audio_io_handle_t input; // audio HAL input IO handle
+ const uid_t uid; // client UID
+ const pid_t pid; // client PID
+ const String16 opPackageName; // client package name
+ const audio_session_t session; // audio session ID
+ bool active; // Capture is active or inactive
+ bool isConcurrent; // is allowed to concurrent capture
+ bool isVirtualDevice; // uses vitual device: updated by APM::getInputForAttr()
+ };
+
// Internal dump utilities.
status_t dumpPermissionDenial(int fd);
@@ -636,6 +657,7 @@
audio_mode_t mPhoneState;
sp<UidPolicy> mUidPolicy;
+ DefaultKeyedVector< audio_port_handle_t, sp<AudioRecordClient> > mAudioRecordClients;
};
} // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index c36c7cf..1ebaea9 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -1697,7 +1697,8 @@
// Thread-safe. No lock necessary.
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
if (remoteCb != NULL) {
- remoteCb->onResultReceived(result.mMetadata, result.mResultExtras);
+ remoteCb->onResultReceived(result.mMetadata, result.mResultExtras,
+ result.mPhysicalMetadatas);
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 999e258..b868fa6 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -39,6 +39,8 @@
#include <inttypes.h>
+#include <utility>
+
#include <utils/Log.h>
#include <utils/Trace.h>
#include <utils/Timers.h>
@@ -874,11 +876,9 @@
return res;
}
-// Only one processCaptureResult should be called at a time, so
-// the locks won't block. The locks are present here simply to enforce this.
-hardware::Return<void> Camera3Device::processCaptureResult(
+hardware::Return<void> Camera3Device::processCaptureResult_3_4(
const hardware::hidl_vec<
- hardware::camera::device::V3_2::CaptureResult>& results) {
+ hardware::camera::device::V3_4::CaptureResult>& results) {
// Ideally we should grab mLock, but that can lead to deadlock, and
// it's not super important to get up to date value of mStatus for this
// warning print, hence skipping the lock here
@@ -902,45 +902,121 @@
}
}
for (const auto& result : results) {
- processOneCaptureResultLocked(result);
+ processOneCaptureResultLocked(result.v3_2, result.physicalCameraMetadata);
}
mProcessCaptureResultLock.unlock();
return hardware::Void();
}
+// Only one processCaptureResult should be called at a time, so
+// the locks won't block. The locks are present here simply to enforce this.
+hardware::Return<void> Camera3Device::processCaptureResult(
+ const hardware::hidl_vec<
+ hardware::camera::device::V3_2::CaptureResult>& results) {
+ hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
+
+ // Ideally we should grab mLock, but that can lead to deadlock, and
+ // it's not super important to get up to date value of mStatus for this
+ // warning print, hence skipping the lock here
+ if (mStatus == STATUS_ERROR) {
+ // Per API contract, HAL should act as closed after device error
+ // But mStatus can be set to error by framework as well, so just log
+ // a warning here.
+ ALOGW("%s: received capture result in error state.", __FUNCTION__);
+ }
+
+ if (mProcessCaptureResultLock.tryLock() != OK) {
+ // This should never happen; it indicates a wrong client implementation
+ // that doesn't follow the contract. But, we can be tolerant here.
+ ALOGE("%s: callback overlapped! waiting 1s...",
+ __FUNCTION__);
+ if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) {
+ ALOGE("%s: cannot acquire lock in 1s, dropping results",
+ __FUNCTION__);
+ // really don't know what to do, so bail out.
+ return hardware::Void();
+ }
+ }
+ for (const auto& result : results) {
+ processOneCaptureResultLocked(result, noPhysMetadata);
+ }
+ mProcessCaptureResultLock.unlock();
+ return hardware::Void();
+}
+
+status_t Camera3Device::readOneCameraMetadataLocked(
+ uint64_t fmqResultSize, hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
+ const hardware::camera::device::V3_2::CameraMetadata& result) {
+ if (fmqResultSize > 0) {
+ resultMetadata.resize(fmqResultSize);
+ if (mResultMetadataQueue == nullptr) {
+ return NO_MEMORY; // logged in initialize()
+ }
+ if (!mResultMetadataQueue->read(resultMetadata.data(), fmqResultSize)) {
+ ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
+ __FUNCTION__, fmqResultSize);
+ return INVALID_OPERATION;
+ }
+ } else {
+ resultMetadata.setToExternal(const_cast<uint8_t *>(result.data()),
+ result.size());
+ }
+
+ if (resultMetadata.size() != 0) {
+ status_t res;
+ const camera_metadata_t* metadata =
+ reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
+ size_t expected_metadata_size = resultMetadata.size();
+ if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
+ ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ }
+
+ return OK;
+}
+
void Camera3Device::processOneCaptureResultLocked(
- const hardware::camera::device::V3_2::CaptureResult& result) {
+ const hardware::camera::device::V3_2::CaptureResult& result,
+ const hardware::hidl_vec<
+ hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadatas) {
camera3_capture_result r;
status_t res;
r.frame_number = result.frameNumber;
+ // Read and validate the result metadata.
hardware::camera::device::V3_2::CameraMetadata resultMetadata;
- if (result.fmqResultSize > 0) {
- resultMetadata.resize(result.fmqResultSize);
- if (mResultMetadataQueue == nullptr) {
- return; // logged in initialize()
- }
- if (!mResultMetadataQueue->read(resultMetadata.data(), result.fmqResultSize)) {
- ALOGE("%s: Frame %d: Cannot read camera metadata from fmq, size = %" PRIu64,
- __FUNCTION__, result.frameNumber, result.fmqResultSize);
- return;
- }
- } else {
- resultMetadata.setToExternal(const_cast<uint8_t *>(result.result.data()),
- result.result.size());
+ res = readOneCameraMetadataLocked(result.fmqResultSize, resultMetadata, result.result);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Failed to read capture result metadata",
+ __FUNCTION__, result.frameNumber);
+ return;
}
+ r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
- if (resultMetadata.size() != 0) {
- r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
- size_t expected_metadata_size = resultMetadata.size();
- if ((res = validate_camera_metadata_structure(r.result, &expected_metadata_size)) != OK) {
- ALOGE("%s: Frame %d: Invalid camera metadata received by camera service from HAL: %s (%d)",
- __FUNCTION__, result.frameNumber, strerror(-res), res);
+ // Read and validate physical camera metadata
+ size_t physResultCount = physicalCameraMetadatas.size();
+ std::vector<const char*> physCamIds(physResultCount);
+ std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
+ std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
+ physResultMetadata.resize(physResultCount);
+ for (size_t i = 0; i < physicalCameraMetadatas.size(); i++) {
+ res = readOneCameraMetadataLocked(physicalCameraMetadatas[i].fmqMetadataSize,
+ physResultMetadata[i], physicalCameraMetadatas[i].metadata);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
+ __FUNCTION__, result.frameNumber,
+ physicalCameraMetadatas[i].physicalCameraId.c_str());
return;
}
- } else {
- r.result = nullptr;
+ physCamIds[i] = physicalCameraMetadatas[i].physicalCameraId.c_str();
+ phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
+ physResultMetadata[i].data());
}
+ r.num_physcam_metadata = physResultCount;
+ r.physcam_ids = physCamIds.data();
+ r.physcam_metadata = phyCamMetadatas.data();
std::vector<camera3_stream_buffer_t> outputBuffers(result.outputBuffers.size());
std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
@@ -1806,6 +1882,7 @@
CaptureResult &result = *(mResultQueue.begin());
frame->mResultExtras = result.mResultExtras;
frame->mMetadata.acquire(result.mMetadata);
+ frame->mPhysicalMetadatas = std::move(result.mPhysicalMetadatas);
mResultQueue.erase(mResultQueue.begin());
return OK;
@@ -2579,13 +2656,14 @@
status_t Camera3Device::registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
- bool hasAppCallback, nsecs_t maxExpectedDuration) {
+ bool hasAppCallback, nsecs_t maxExpectedDuration,
+ std::set<String8>& physicalCameraIds) {
ATRACE_CALL();
Mutex::Autolock l(mInFlightLock);
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
- hasAppCallback, maxExpectedDuration));
+ hasAppCallback, maxExpectedDuration, physicalCameraIds));
if (res < 0) return res;
if (mInFlightMap.size() == 1) {
@@ -2825,7 +2903,8 @@
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult,
uint32_t frameNumber,
- bool reprocess) {
+ bool reprocess,
+ const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
ATRACE_CALL();
if (pendingMetadata.isEmpty())
return;
@@ -2854,6 +2933,7 @@
CaptureResult captureResult;
captureResult.mResultExtras = resultExtras;
captureResult.mMetadata = pendingMetadata;
+ captureResult.mPhysicalMetadatas = physicalMetadatas;
// Append any previous partials to form a complete result
if (mUsePartialResult && !collectedPartialResult.isEmpty()) {
@@ -2869,6 +2949,15 @@
frameNumber);
return;
}
+ for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+ camera_metadata_entry timestamp =
+ physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+ if (timestamp.count == 0) {
+ SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
+ String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
+ return;
+ }
+ }
mTagMonitor.monitorMetadata(TagMonitor::RESULT,
frameNumber, timestamp.data.i64[0], captureResult.mMetadata);
@@ -2904,7 +2993,6 @@
bool isPartialResult = false;
CameraMetadata collectedPartialResult;
- CaptureResultExtras resultExtras;
bool hasInputBufferInRequest = false;
// Get shutter timestamp and resultExtras from list of in-flight requests,
@@ -2945,6 +3033,11 @@
return;
}
isPartialResult = (result->partial_result < mNumPartialResults);
+ if (isPartialResult && result->num_physcam_metadata) {
+ SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
+ " physical camera result", frameNumber);
+ return;
+ }
if (isPartialResult) {
request.collectedPartialResult.append(result->result);
}
@@ -2961,11 +3054,28 @@
// Did we get the (final) result metadata for this capture?
if (result->result != NULL && !isPartialResult) {
+ if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
+ SET_ERR("Requested physical Camera Ids %d not equal to number of metadata %d",
+ request.physicalCameraIds.size(), result->num_physcam_metadata);
+ return;
+ }
if (request.haveResultMetadata) {
SET_ERR("Called multiple times with metadata for frame %d",
frameNumber);
return;
}
+ for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
+ String8 physicalId(result->physcam_ids[i]);
+ std::set<String8>::iterator cameraIdIter =
+ request.physicalCameraIds.find(physicalId);
+ if (cameraIdIter != request.physicalCameraIds.end()) {
+ request.physicalCameraIds.erase(cameraIdIter);
+ } else {
+ SET_ERR("Total result for frame %d has already returned for camera %s",
+ frameNumber, physicalId.c_str());
+ return;
+ }
+ }
if (mUsePartialResult &&
!request.collectedPartialResult.isEmpty()) {
collectedPartialResult.acquire(
@@ -3010,15 +3120,21 @@
}
if (result->result != NULL && !isPartialResult) {
+ for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
+ CameraMetadata physicalMetadata;
+ physicalMetadata.append(result->physcam_metadata[i]);
+ request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
+ physicalMetadata});
+ }
if (shutterTimestamp == 0) {
request.pendingMetadata = result->result;
request.collectedPartialResult = collectedPartialResult;
- } else if (request.hasCallback) {
+ } else if (request.hasCallback) {
CameraMetadata metadata;
metadata = result->result;
sendCaptureResult(metadata, request.resultExtras,
collectedPartialResult, frameNumber,
- hasInputBufferInRequest);
+ hasInputBufferInRequest, request.physicalMetadatas);
}
}
@@ -3204,7 +3320,7 @@
// send pending result and buffers
sendCaptureResult(r.pendingMetadata, r.resultExtras,
r.collectedPartialResult, msg.frame_number,
- r.hasInputBuffer);
+ r.hasInputBuffer, r.physicalMetadatas);
}
returnOutputBuffers(r.pendingOutputBuffers.array(),
r.pendingOutputBuffers.size(), r.shutterTimestamp);
@@ -3219,7 +3335,6 @@
}
}
-
CameraMetadata Camera3Device::getLatestRequestLocked() {
ALOGV("%s", __FUNCTION__);
@@ -4657,6 +4772,7 @@
outputBuffers->insertAt(camera3_stream_buffer_t(), 0,
captureRequest->mOutputStreams.size());
halRequest->output_buffers = outputBuffers->array();
+ std::set<String8> requestedPhysicalCameras;
for (size_t j = 0; j < captureRequest->mOutputStreams.size(); j++) {
sp<Camera3OutputStreamInterface> outputStream = captureRequest->mOutputStreams.editItemAt(j);
@@ -4687,8 +4803,18 @@
return TIMED_OUT;
}
- halRequest->num_output_buffers++;
+ String8 physicalCameraId = outputStream->getPhysicalCameraId();
+
+ if (!physicalCameraId.isEmpty()) {
+ // Physical stream isn't supported for input request.
+ if (halRequest->input_buffer) {
+ CLOGE("Physical stream is not supported for input request");
+ return INVALID_OPERATION;
+ }
+ requestedPhysicalCameras.insert(physicalCameraId);
+ }
+ halRequest->num_output_buffers++;
}
totalNumBuffers += halRequest->num_output_buffers;
@@ -4711,7 +4837,8 @@
totalNumBuffers, captureRequest->mResultExtras,
/*hasInput*/halRequest->input_buffer != NULL,
hasCallback,
- calculateMaxExpectedDuration(halRequest->settings));
+ calculateMaxExpectedDuration(halRequest->settings),
+ requestedPhysicalCameras);
ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
", burstId = %" PRId32 ".",
__FUNCTION__,
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index bf2a577..bc97510 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -19,6 +19,7 @@
#include <utility>
#include <unordered_map>
+#include <set>
#include <utils/Condition.h>
#include <utils/Errors.h>
@@ -33,6 +34,7 @@
#include <android/hardware/camera/device/3.3/ICameraDeviceSession.h>
#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
#include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
#include <fmq/MessageQueue.h>
#include <hardware/camera3.h>
@@ -77,7 +79,7 @@
*/
class Camera3Device :
public CameraDeviceBase,
- virtual public hardware::camera::device::V3_2::ICameraDeviceCallback,
+ virtual public hardware::camera::device::V3_4::ICameraDeviceCallback,
private camera3_callback_ops {
public:
@@ -472,9 +474,11 @@
/**
- * Implementation of android::hardware::camera::device::V3_2::ICameraDeviceCallback
+ * Implementation of android::hardware::camera::device::V3_4::ICameraDeviceCallback
*/
-
+ hardware::Return<void> processCaptureResult_3_4(
+ const hardware::hidl_vec<
+ hardware::camera::device::V3_4::CaptureResult>& results) override;
hardware::Return<void> processCaptureResult(
const hardware::hidl_vec<
hardware::camera::device::V3_2::CaptureResult>& results) override;
@@ -484,7 +488,13 @@
// Handle one capture result. Assume that mProcessCaptureResultLock is held.
void processOneCaptureResultLocked(
- const hardware::camera::device::V3_2::CaptureResult& results);
+ const hardware::camera::device::V3_2::CaptureResult& result,
+ const hardware::hidl_vec<
+ hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadatas);
+ status_t readOneCameraMetadataLocked(uint64_t fmqResultSize,
+ hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
+ const hardware::camera::device::V3_2::CameraMetadata& result);
+
// Handle one notify message
void notify(const hardware::camera::device::V3_2::NotifyMsg& msg);
@@ -965,6 +975,12 @@
// REQUEST/RESULT error.
bool skipResultMetadata;
+ // The physical camera ids being requested.
+ std::set<String8> physicalCameraIds;
+
+ // Map of physicalCameraId <-> Metadata
+ std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
+
// Default constructor needed by KeyedVector
InFlightRequest() :
shutterTimestamp(0),
@@ -979,7 +995,8 @@
}
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
- bool hasAppCallback, nsecs_t maxDuration) :
+ bool hasAppCallback, nsecs_t maxDuration,
+ const std::set<String8>& physicalCameraIdSet) :
shutterTimestamp(0),
sensorTimestamp(0),
requestStatus(OK),
@@ -989,7 +1006,8 @@
hasInputBuffer(hasInput),
hasCallback(hasAppCallback),
maxExpectedDuration(maxDuration),
- skipResultMetadata(false) {
+ skipResultMetadata(false),
+ physicalCameraIds(physicalCameraIdSet) {
}
};
@@ -1006,7 +1024,7 @@
status_t registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
- bool callback, nsecs_t maxExpectedDuration);
+ bool callback, nsecs_t maxExpectedDuration, std::set<String8>& physicalCameraIds);
/**
* Returns the maximum expected time it'll take for all currently in-flight
@@ -1127,7 +1145,9 @@
void sendCaptureResult(CameraMetadata &pendingMetadata,
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult, uint32_t frameNumber,
- bool reprocess);
+ bool reprocess, const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas);
+
+ bool isLastFullResult(const InFlightRequest& inFlightRequest);
// Insert the result to the result queue after updating frame number and overriding AE
// trigger cancel.
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
index 44eb68a..fb1ff77 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
@@ -115,6 +115,10 @@
return OK;
}
+const String8& Camera3DummyStream::getPhysicalCameraId() const {
+ return DUMMY_ID;
+}
+
status_t Camera3DummyStream::setConsumers(const std::vector<sp<Surface>>& /*consumers*/) {
ALOGE("%s: Stream %d: Dummy stream doesn't support set consumer surface!",
__FUNCTION__, mId);
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
index dcf9160..4627548 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h
@@ -63,6 +63,11 @@
virtual status_t dropBuffers(bool /*dropping*/) override;
/**
+ * Query the physical camera id for the output stream.
+ */
+ virtual const String8& getPhysicalCameraId() const override;
+
+ /**
* Return if this output stream is for video encoding.
*/
bool isVideoStream() const;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index b73e256..b3c3717 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -806,6 +806,11 @@
return OK;
}
+const String8& Camera3OutputStream::getPhysicalCameraId() const {
+ Mutex::Autolock l(mLock);
+ return physicalCameraId();
+}
+
status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
return OK;
}
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 824aef7..6f36f92 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -174,6 +174,11 @@
virtual status_t dropBuffers(bool dropping) override;
/**
+ * Query the physical camera id for the output stream.
+ */
+ virtual const String8& getPhysicalCameraId() const override;
+
+ /**
* Set the graphic buffer manager to get/return the stream buffers.
*
* It is only legal to call this method when stream is in STATE_CONSTRUCTED state.
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 08fcf38..a711a6d 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -78,6 +78,11 @@
* Drop buffers if dropping is true. If dropping is false, do not drop buffers.
*/
virtual status_t dropBuffers(bool /*dropping*/) = 0;
+
+ /**
+ * Query the physical camera id for the output stream.
+ */
+ virtual const String8& getPhysicalCameraId() const = 0;
};
} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 0d91620..1105b75 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -143,6 +143,10 @@
return mOriginalDataSpace;
}
+const String8& Camera3Stream::physicalCameraId() const {
+ return mPhysicalCameraId;
+}
+
status_t Camera3Stream::forceToIdle() {
ATRACE_CALL();
Mutex::Autolock l(mLock);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index f85dff2..a60cb56 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -165,6 +165,7 @@
void setDataSpaceOverride(bool dataSpaceOverriden);
bool isDataSpaceOverridden() const;
android_dataspace getOriginalDataSpace() const;
+ const String8& physicalCameraId() const;
camera3_stream* asHalStream() override {
return this;