Merge "libeffects: Fixed bit mismatch seen with 3 channel input"
diff --git a/apex/Android.bp b/apex/Android.bp
index 7085d59..c077a77 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -42,9 +42,6 @@
},
},
key: "com.android.media.key",
- prefer_sanitize: {
- cfi: true,
- },
}
apex {
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp
index d4b973f..d3b37d7 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp
@@ -272,7 +272,7 @@
}
// Set Default config param.
-void setupConfigParam(
+bool setupConfigParam(
const std::shared_ptr<android::Codec2Client::Component>& component,
int32_t* bitStreamInfo) {
std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -282,8 +282,8 @@
std::vector<C2Param*> configParam{&sampleRateInfo, &channelCountInfo};
c2_status_t status =
component->config(configParam, C2_DONT_BLOCK, &failures);
- ASSERT_EQ(failures.size(), 0u);
- ASSERT_EQ(status, C2_OK);
+ if (status == C2_OK && failures.size() == 0u) return true;
+ return false;
}
// In decoder components, often the input parameters get updated upon
@@ -557,7 +557,11 @@
ASSERT_NO_FATAL_FAILURE(
getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
}
- setupConfigParam(mComponent, bitStreamInfo);
+ if (!setupConfigParam(mComponent, bitStreamInfo)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+ ASSERT_EQ(mComponent->start(), C2_OK);
ALOGV("mURL : %s", mURL);
eleStream.open(mURL, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
@@ -613,7 +617,6 @@
description("Test Request for thumbnail");
if (mDisableTest) return;
- ASSERT_EQ(mComponent->start(), C2_OK);
char mURL[512], info[512];
std::ifstream eleStream, eleInfo;
@@ -642,7 +645,11 @@
ASSERT_NO_FATAL_FAILURE(
getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
}
- setupConfigParam(mComponent, bitStreamInfo);
+ if (!setupConfigParam(mComponent, bitStreamInfo)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+ ASSERT_EQ(mComponent->start(), C2_OK);
ALOGV("mURL : %s", mURL);
// request EOS for thumbnail
@@ -711,7 +718,6 @@
description("Tests Flush calls");
if (mDisableTest) return;
typedef std::unique_lock<std::mutex> ULock;
- ASSERT_EQ(mComponent->start(), C2_OK);
char mURL[512], info[512];
std::ifstream eleStream, eleInfo;
@@ -741,7 +747,11 @@
ASSERT_NO_FATAL_FAILURE(
getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
}
- setupConfigParam(mComponent, bitStreamInfo);
+ if (!setupConfigParam(mComponent, bitStreamInfo)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+ ASSERT_EQ(mComponent->start(), C2_OK);
ALOGV("mURL : %s", mURL);
eleStream.open(mURL, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
@@ -833,8 +843,6 @@
description("Decode with multiple empty input frames");
if (mDisableTest) return;
- ASSERT_EQ(mComponent->start(), C2_OK);
-
char mURL[512], info[512];
std::ifstream eleStream, eleInfo;
@@ -868,7 +876,19 @@
frameId++;
}
eleInfo.close();
-
+ int32_t bitStreamInfo[2] = {0};
+ if (mCompName == raw) {
+ bitStreamInfo[0] = 8000;
+ bitStreamInfo[1] = 1;
+ } else {
+ ASSERT_NO_FATAL_FAILURE(
+ getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+ }
+ if (!setupConfigParam(mComponent, bitStreamInfo)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+ ASSERT_EQ(mComponent->start(), C2_OK);
ALOGV("mURL : %s", mURL);
eleStream.open(mURL, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioEncTest.cpp
index 5d66ee5..a74d43e 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHidlC2V1_0TargetAudioEncTest.cpp
@@ -219,7 +219,7 @@
}
// Set Default config param.
-void setupConfigParam(
+bool setupConfigParam(
const std::shared_ptr<android::Codec2Client::Component>& component,
int32_t nChannels, int32_t nSampleRate) {
std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -229,8 +229,8 @@
std::vector<C2Param*> configParam{&sampleRateInfo, &channelCountInfo};
c2_status_t status =
component->config(configParam, C2_DONT_BLOCK, &failures);
- ASSERT_EQ(failures.size(), 0u);
- ASSERT_EQ(status, C2_OK);
+ if (status == C2_OK && failures.size() == 0u) return true;
+ return false;
}
// LookUpTable of clips and metadata for component testing
@@ -358,7 +358,6 @@
TEST_F(Codec2AudioEncHidlTest, EncodeTest) {
ALOGV("EncodeTest");
if (mDisableTest) return;
- ASSERT_EQ(mComponent->start(), C2_OK);
char mURL[512];
strcpy(mURL, gEnv->getRes().c_str());
GetURLForComponent(mCompName, mURL);
@@ -396,7 +395,11 @@
default:
ASSERT_TRUE(false);
}
- setupConfigParam(mComponent, nChannels, nSampleRate);
+ if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+ ASSERT_EQ(mComponent->start(), C2_OK);
std::ifstream eleStream;
uint32_t numFrames = 128;
eleStream.open(mURL, std::ifstream::binary);
@@ -469,7 +472,6 @@
TEST_F(Codec2AudioEncHidlTest, FlushTest) {
description("Test Request for flush");
if (mDisableTest) return;
- ASSERT_EQ(mComponent->start(), C2_OK);
typedef std::unique_lock<std::mutex> ULock;
char mURL[512];
@@ -510,7 +512,13 @@
default:
ASSERT_TRUE(false);
}
- setupConfigParam(mComponent, nChannels, nSampleRate);
+
+ if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+ ASSERT_EQ(mComponent->start(), C2_OK);
+
std::ifstream eleStream;
uint32_t numFramesFlushed = 30;
uint32_t numFrames = 128;
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index 64a458c..31da111 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -107,22 +107,24 @@
component->config(configParam, C2_DONT_BLOCK, &failures);
ASSERT_EQ(failures.size(), 0u);
}
- framesReceived++;
- eos = (work->worklets.front()->output.flags &
- C2FrameData::FLAG_END_OF_STREAM) != 0;
- auto frameIndexIt = std::find(flushedIndices.begin(), flushedIndices.end(),
- work->input.ordinal.frameIndex.peeku());
- ALOGV("WorkDone: frameID received %d",
- (int)work->worklets.front()->output.ordinal.frameIndex.peeku());
- work->input.buffers.clear();
- work->worklets.clear();
- {
- typedef std::unique_lock<std::mutex> ULock;
- ULock l(queueLock);
- workQueue.push_back(std::move(work));
- if (!flushedIndices.empty()) {
- flushedIndices.erase(frameIndexIt);
+ if (work->worklets.front()->output.flags != C2FrameData::FLAG_INCOMPLETE) {
+ framesReceived++;
+ eos = (work->worklets.front()->output.flags &
+ C2FrameData::FLAG_END_OF_STREAM) != 0;
+ auto frameIndexIt = std::find(flushedIndices.begin(), flushedIndices.end(),
+ work->input.ordinal.frameIndex.peeku());
+ ALOGV("WorkDone: frameID received %d",
+ (int)work->worklets.front()->output.ordinal.frameIndex.peeku());
+ work->input.buffers.clear();
+ work->worklets.clear();
+ {
+ typedef std::unique_lock<std::mutex> ULock;
+ ULock l(queueLock);
+ workQueue.push_back(std::move(work));
+ if (!flushedIndices.empty()) {
+ flushedIndices.erase(frameIndexIt);
+ }
+ queueCondition.notify_all();
}
- queueCondition.notify_all();
}
}
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp
index 8585c87..95d1b72 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp
@@ -118,7 +118,6 @@
}
mEos = false;
mCsd = false;
- mConfig = false;
mFramesReceived = 0;
mFailedWorkReceived = 0;
if (mCompName == unknown_comp) mDisableTest = true;
@@ -134,7 +133,7 @@
Super::TearDown();
}
- void setupConfigParam(int32_t nWidth, int32_t nHeight);
+ bool setupConfigParam(int32_t nWidth, int32_t nHeight);
// callback function to process onWorkDone received by Listener
void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
@@ -228,14 +227,14 @@
}
// Set Default config param.
-void Codec2VideoEncHidlTest::setupConfigParam(int32_t nWidth, int32_t nHeight) {
+bool Codec2VideoEncHidlTest::setupConfigParam(int32_t nWidth, int32_t nHeight) {
std::vector<std::unique_ptr<C2SettingResult>> failures;
C2VideoSizeStreamTuning::input inputSize(0u, nWidth, nHeight);
std::vector<C2Param*> configParam{&inputSize};
c2_status_t status =
mComponent->config(configParam, C2_DONT_BLOCK, &failures);
- if (failures.size() == 0u ) mConfig = true;
- ASSERT_EQ(status, C2_OK);
+ if (status == C2_OK && failures.size() == 0u) return true;
+ return false;
}
// LookUpTable of clips for component testing
@@ -360,8 +359,7 @@
ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
ALOGV("mURL : %s", mURL);
- setupConfigParam(nWidth, nHeight);
- if (!mConfig) {
+ if (!setupConfigParam(nWidth, nHeight)) {
std::cout << "[ WARN ] Test Skipped \n";
return;
}
@@ -439,7 +437,6 @@
TEST_F(Codec2VideoEncHidlTest, FlushTest) {
description("Test Request for flush");
if (mDisableTest) return;
- ASSERT_EQ(mComponent->start(), C2_OK);
typedef std::unique_lock<std::mutex> ULock;
char mURL[512];
@@ -447,7 +444,12 @@
int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
strcpy(mURL, gEnv->getRes().c_str());
GetURLForComponent(mURL);
- setupConfigParam(nWidth, nHeight);
+
+ if (!setupConfigParam(nWidth, nHeight)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+ ASSERT_EQ(mComponent->start(), C2_OK);
// Setting default configuration
mFlushedIndices.clear();
@@ -522,12 +524,16 @@
TEST_F(Codec2VideoEncHidlTest, InvalidBufferTest) {
description("Tests feeding larger/smaller input buffer");
if (mDisableTest) return;
- ASSERT_EQ(mComponent->start(), C2_OK);
std::ifstream eleStream;
int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH / 2;
int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT / 2;
- setupConfigParam(nWidth, nHeight);
+
+ if (!setupConfigParam(nWidth, nHeight)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
+ ASSERT_EQ(mComponent->start(), C2_OK);
ASSERT_NO_FATAL_FAILURE(
encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
@@ -579,10 +585,12 @@
int32_t nWidth = GetParam().first;
int32_t nHeight = GetParam().second;
ALOGD("Trying encode for width %d height %d", nWidth, nHeight);
- mConfig = false;
mEos = false;
- setupConfigParam(nWidth, nHeight);
- if (!mConfig) return;
+
+ if (!setupConfigParam(nWidth, nHeight)) {
+ std::cout << "[ WARN ] Test Skipped \n";
+ return;
+ }
ASSERT_EQ(mComponent->start(), C2_OK);
ASSERT_NO_FATAL_FAILURE(
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index bc22045..dce3222 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -772,8 +772,16 @@
}
std::vector<std::unique_ptr<C2Param>> configUpdate;
+ // NOTE: We used to ignore "video-bitrate" at configure; replicate
+ // the behavior here.
+ sp<AMessage> sdkParams = msg;
+ int32_t videoBitrate;
+ if (sdkParams->findInt32(PARAMETER_KEY_VIDEO_BITRATE, &videoBitrate)) {
+ sdkParams = msg->dup();
+ sdkParams->removeEntryAt(sdkParams->findEntryByName(PARAMETER_KEY_VIDEO_BITRATE));
+ }
status_t err = config->getConfigUpdateFromSdkParams(
- comp, msg, Config::IS_CONFIG, C2_DONT_BLOCK, &configUpdate);
+ comp, sdkParams, Config::IS_CONFIG, C2_DONT_BLOCK, &configUpdate);
if (err != OK) {
ALOGW("failed to convert configuration to c2 params");
}
@@ -1415,11 +1423,7 @@
(void)mChannel->requestInitialInputBuffers();
}
-void CCodec::signalSetParameters(const sp<AMessage> ¶ms) {
- setParameters(params);
-}
-
-void CCodec::setParameters(const sp<AMessage> ¶ms) {
+void CCodec::signalSetParameters(const sp<AMessage> &msg) {
std::shared_ptr<Codec2Client::Component> comp;
auto checkState = [this, &comp] {
Mutexed<State>::Locked state(mState);
@@ -1433,6 +1437,15 @@
return;
}
+ // NOTE: We used to ignore "bitrate" at setParameters; replicate
+ // the behavior here.
+ sp<AMessage> params = msg;
+ int32_t bitrate;
+ if (params->findInt32(KEY_BIT_RATE, &bitrate)) {
+ params = msg->dup();
+ params->removeEntryAt(params->findEntryByName(KEY_BIT_RATE));
+ }
+
Mutexed<Config>::Locked config(mConfig);
/**
diff --git a/media/codec2/sfplugin/CCodec.h b/media/codec2/sfplugin/CCodec.h
index ba5f5f3..b0b3c4f 100644
--- a/media/codec2/sfplugin/CCodec.h
+++ b/media/codec2/sfplugin/CCodec.h
@@ -102,7 +102,6 @@
void createInputSurface();
void setInputSurface(const sp<PersistentSurface> &surface);
status_t setupInputSurface(const std::shared_ptr<InputSurfaceWrapper> &surface);
- void setParameters(const sp<AMessage> ¶ms);
void setDeadline(
const TimePoint &now,
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 6842fa5..ff2419d 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -255,6 +255,34 @@
mSkipCutBuffer = scb;
}
+ void handleImageData(const sp<Codec2Buffer> &buffer) {
+ sp<ABuffer> imageDataCandidate = buffer->getImageData();
+ if (imageDataCandidate == nullptr) {
+ return;
+ }
+ sp<ABuffer> imageData;
+ if (!mFormat->findBuffer("image-data", &imageData)
+ || imageDataCandidate->size() != imageData->size()
+ || memcmp(imageDataCandidate->data(), imageData->data(), imageData->size()) != 0) {
+ ALOGD("[%s] updating image-data", mName);
+ sp<AMessage> newFormat = dupFormat();
+ newFormat->setBuffer("image-data", imageDataCandidate);
+ MediaImage2 *img = (MediaImage2*)imageDataCandidate->data();
+ if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
+ int32_t stride = img->mPlane[0].mRowInc;
+ newFormat->setInt32(KEY_STRIDE, stride);
+ ALOGD("[%s] updating stride = %d", mName, stride);
+ if (img->mNumPlanes > 1 && stride > 0) {
+ int32_t vstride = (img->mPlane[1].mOffset - img->mPlane[0].mOffset) / stride;
+ newFormat->setInt32(KEY_SLICE_HEIGHT, vstride);
+ ALOGD("[%s] updating vstride = %d", mName, vstride);
+ }
+ }
+ setFormat(newFormat);
+ buffer->setFormat(newFormat);
+ }
+ }
+
protected:
sp<SkipCutBuffer> mSkipCutBuffer;
@@ -1152,6 +1180,7 @@
return WOULD_BLOCK;
}
submit(c2Buffer);
+ handleImageData(c2Buffer);
*clientBuffer = c2Buffer;
ALOGV("[%s] grabbed buffer %zu", mName, *index);
return OK;
@@ -1250,6 +1279,7 @@
}
newBuffer->setFormat(mFormat);
*index = mImpl.assignSlot(newBuffer);
+ handleImageData(newBuffer);
*clientBuffer = newBuffer;
ALOGV("[%s] registered buffer %zu", mName, *index);
return OK;
@@ -1557,6 +1587,7 @@
mCCodecCallback(callback),
mNumInputSlots(kSmoothnessFactor),
mNumOutputSlots(kSmoothnessFactor),
+ mDelay(0),
mFrameIndex(0u),
mFirstValidFrameIndex(0u),
mMetaMode(MODE_NONE),
@@ -2104,11 +2135,13 @@
}
}
- mNumInputSlots =
- (inputDelay ? inputDelay.value : 0) +
- (pipelineDelay ? pipelineDelay.value : 0) +
- kSmoothnessFactor;
- mNumOutputSlots = (outputDelay ? outputDelay.value : 0) + kSmoothnessFactor;
+ uint32_t inputDelayValue = inputDelay ? inputDelay.value : 0;
+ uint32_t pipelineDelayValue = pipelineDelay ? pipelineDelay.value : 0;
+ uint32_t outputDelayValue = outputDelay ? outputDelay.value : 0;
+
+ mNumInputSlots = inputDelayValue + pipelineDelayValue + kSmoothnessFactor;
+ mNumOutputSlots = outputDelayValue + kSmoothnessFactor;
+ mDelay = inputDelayValue + pipelineDelayValue + outputDelayValue;
// TODO: get this from input format
bool secure = mComponent->getName().find(".secure") != std::string::npos;
@@ -2397,9 +2430,9 @@
{
Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
- watcher->inputDelay(inputDelay ? inputDelay.value : 0)
- .pipelineDelay(pipelineDelay ? pipelineDelay.value : 0)
- .outputDelay(outputDelay ? outputDelay.value : 0)
+ watcher->inputDelay(inputDelayValue)
+ .pipelineDelay(pipelineDelayValue)
+ .outputDelay(outputDelayValue)
.smoothnessFactor(kSmoothnessFactor);
watcher->flush();
}
@@ -2816,7 +2849,11 @@
}
PipelineWatcher::Clock::duration CCodecBufferChannel::elapsed() {
- return mPipelineWatcher.lock()->elapsed(PipelineWatcher::Clock::now());
+ // When client pushed EOS, we want all the work to be done quickly.
+ // Otherwise, component may have stalled work due to input starvation up to
+ // the sum of the delay in the pipeline.
+ size_t n = mInputMetEos ? 0 : mDelay;
+ return mPipelineWatcher.lock()->elapsed(PipelineWatcher::Clock::now(), n);
}
void CCodecBufferChannel::setMetaMode(MetaMode mode) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 9dccab8..9ce886a 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -236,6 +236,7 @@
size_t mNumInputSlots;
size_t mNumOutputSlots;
+ size_t mDelay;
Mutexed<std::unique_ptr<InputBuffers>> mInputBuffers;
Mutexed<std::list<sp<ABuffer>>> mFlushedConfigs;
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 2dec42e..13b63c9 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -84,17 +84,7 @@
}
void Codec2Buffer::setImageData(const sp<ABuffer> &imageData) {
- meta()->setBuffer("image-data", imageData);
- format()->setBuffer("image-data", imageData);
- MediaImage2 *img = (MediaImage2*)imageData->data();
- if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
- int32_t stride = img->mPlane[0].mRowInc;
- format()->setInt32(KEY_STRIDE, stride);
- if (img->mNumPlanes > 1 && stride > 0) {
- int32_t vstride = (img->mPlane[1].mOffset - img->mPlane[0].mOffset) / stride;
- format()->setInt32(KEY_SLICE_HEIGHT, vstride);
- }
- }
+ mImageData = imageData;
}
// LocalLinearBuffer
@@ -546,7 +536,6 @@
: Codec2Buffer(format, buffer),
mView(view),
mBlock(block),
- mImageData(imageData),
mWrapped(wrapped) {
setImageData(imageData);
}
@@ -683,9 +672,7 @@
mView(std::move(view)),
mBufferRef(buffer),
mWrapped(wrapped) {
- if (imageData != nullptr) {
- setImageData(imageData);
- }
+ setImageData(imageData);
}
std::shared_ptr<C2Buffer> ConstGraphicBlockBuffer::asC2Buffer() {
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index 481975f..dd618aa 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -23,6 +23,7 @@
#include <android/hardware/cas/native/1.0/types.h>
#include <binder/IMemory.h>
#include <media/hardware/VideoAPI.h>
+#include <media/stagefright/foundation/ABuffer.h>
#include <media/MediaCodecBuffer.h>
#include <media/ICrypto.h>
@@ -85,6 +86,8 @@
return false;
}
+ sp<ABuffer> getImageData() const { return mImageData; }
+
protected:
/**
* canCopy() implementation for linear buffers.
@@ -100,6 +103,8 @@
* sets MediaImage data for flexible graphic buffers
*/
void setImageData(const sp<ABuffer> &imageData);
+
+ sp<ABuffer> mImageData;
};
/**
@@ -239,7 +244,6 @@
C2GraphicView mView;
std::shared_ptr<C2GraphicBlock> mBlock;
- sp<ABuffer> mImageData;
const bool mWrapped;
};
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index fe0a2c8..cdcc41b 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -127,19 +127,21 @@
}
PipelineWatcher::Clock::duration PipelineWatcher::elapsed(
- const PipelineWatcher::Clock::time_point &now) const {
- return std::accumulate(
- mFramesInPipeline.begin(),
- mFramesInPipeline.end(),
- Clock::duration::zero(),
- [&now](const Clock::duration ¤t,
- const decltype(mFramesInPipeline)::value_type &value) {
- Clock::duration elapsed = now - value.second.queuedAt;
- ALOGV("elapsed: frameIndex = %llu elapsed = %lldms",
- (unsigned long long)value.first,
- std::chrono::duration_cast<std::chrono::milliseconds>(elapsed).count());
- return current > elapsed ? current : elapsed;
- });
+ const PipelineWatcher::Clock::time_point &now, size_t n) const {
+ if (mFramesInPipeline.size() <= n) {
+ return Clock::duration::zero();
+ }
+ std::vector<Clock::duration> durations;
+ for (const decltype(mFramesInPipeline)::value_type &value : mFramesInPipeline) {
+ Clock::duration elapsed = now - value.second.queuedAt;
+ ALOGV("elapsed: frameIndex = %llu elapsed = %lldms",
+ (unsigned long long)value.first,
+ std::chrono::duration_cast<std::chrono::milliseconds>(elapsed).count());
+ durations.push_back(elapsed);
+ }
+ nth_element(durations.begin(), durations.end(), durations.begin() + n,
+ std::greater<Clock::duration>());
+ return durations[n];
}
} // namespace android
diff --git a/media/codec2/sfplugin/PipelineWatcher.h b/media/codec2/sfplugin/PipelineWatcher.h
index ce82298..1c127e4 100644
--- a/media/codec2/sfplugin/PipelineWatcher.h
+++ b/media/codec2/sfplugin/PipelineWatcher.h
@@ -54,7 +54,7 @@
void flush();
bool pipelineFull() const;
- Clock::duration elapsed(const Clock::time_point &now) const;
+ Clock::duration elapsed(const Clock::time_point &now, size_t n) const;
private:
uint32_t mInputDelay;
diff --git a/media/extractors/aac/AACExtractor.cpp b/media/extractors/aac/AACExtractor.cpp
index beddad0..9d183d4 100644
--- a/media/extractors/aac/AACExtractor.cpp
+++ b/media/extractors/aac/AACExtractor.cpp
@@ -150,6 +150,7 @@
mMeta = AMediaFormat_new();
MakeAACCodecSpecificData(mMeta, profile, sf_index, channel);
+ AMediaFormat_setInt32(mMeta, AMEDIAFORMAT_KEY_AAC_PROFILE, profile + 1);
off64_t streamSize, numFrames = 0;
size_t frameSize = 0;
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 0243c03..4b4d767 100755
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -2236,7 +2236,29 @@
*offset += chunk_size;
break;
}
+ case FOURCC("av1C"):
+ {
+ auto buffer = heapbuffer<uint8_t>(chunk_data_size);
+ if (buffer.get() == NULL) {
+ ALOGE("b/28471206");
+ return NO_MEMORY;
+ }
+
+ if (mDataSource->readAt(
+ data_offset, buffer.get(), chunk_data_size) < chunk_data_size) {
+ return ERROR_IO;
+ }
+
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
+ AMediaFormat_setBuffer(mLastTrack->meta,
+ AMEDIAFORMAT_KEY_CSD_0, buffer.get(), chunk_data_size);
+
+ *offset += chunk_size;
+ break;
+ }
case FOURCC("d263"):
{
*offset += chunk_size;
@@ -2663,19 +2685,40 @@
case FOURCC("ac-3"):
{
*offset += chunk_size;
- return parseAC3SpecificBox(data_offset);
+ // bypass ac-3 if parse fail
+ if (parseAC3SpecificBox(data_offset) != OK) {
+ if (mLastTrack != NULL) {
+ ALOGW("Fail to parse ac-3");
+ mLastTrack->skipTrack = true;
+ }
+ }
+ return OK;
}
case FOURCC("ec-3"):
{
*offset += chunk_size;
- return parseEAC3SpecificBox(data_offset);
+ // bypass ec-3 if parse fail
+ if (parseEAC3SpecificBox(data_offset) != OK) {
+ if (mLastTrack != NULL) {
+ ALOGW("Fail to parse ec-3");
+ mLastTrack->skipTrack = true;
+ }
+ }
+ return OK;
}
case FOURCC("ac-4"):
{
*offset += chunk_size;
- return parseAC4SpecificBox(data_offset);
+ // bypass ac-4 if parse fail
+ if (parseAC4SpecificBox(data_offset) != OK) {
+ if (mLastTrack != NULL) {
+ ALOGW("Fail to parse ac-4");
+ mLastTrack->skipTrack = true;
+ }
+ }
+ return OK;
}
case FOURCC("ftyp"):
@@ -3972,6 +4015,18 @@
if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
itemTable = mItemTable;
}
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
+ void *data;
+ size_t size;
+ if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
+ return NULL;
+ }
+
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ if (size < 5 || ptr[0] != 0x81) { // configurationVersion == 1
+ return NULL;
+ }
}
if (track->has_elst and !strncasecmp("video/", mime, 6) and track->elst_media_time > 0) {
@@ -4005,6 +4060,10 @@
if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_HEVC, &data, &size)) {
return ERROR_MALFORMED;
}
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
+ if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
+ return ERROR_MALFORMED;
+ }
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)
|| !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG2)
|| !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
@@ -5621,10 +5680,10 @@
}
if (isMalFormed) {
- ALOGE("Video is malformed");
- mBuffer->release();
- mBuffer = NULL;
- return AMEDIA_ERROR_MALFORMED;
+ //if nallength abnormal,ignore it.
+ ALOGW("abnormal nallength, ignore this NAL");
+ srcOffset = size;
+ break;
}
if (nalLength == 0) {
diff --git a/media/extractors/ogg/OggExtractor.cpp b/media/extractors/ogg/OggExtractor.cpp
index cb5f173..d99493d 100644
--- a/media/extractors/ogg/OggExtractor.cpp
+++ b/media/extractors/ogg/OggExtractor.cpp
@@ -1388,6 +1388,7 @@
static const char *extensions[] = {
"oga",
"ogg",
+ "opus",
NULL
};
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index bd10d67..b111b78 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -74,6 +74,7 @@
"libnblog",
"libprocessgroup",
"libutils",
+ "libvibrator",
],
export_shared_lib_headers: ["libbinder"],
diff --git a/media/libaudioclient/include/media/AudioMixer.h b/media/libaudioclient/include/media/AudioMixer.h
index fbbbd11..41b425f 100644
--- a/media/libaudioclient/include/media/AudioMixer.h
+++ b/media/libaudioclient/include/media/AudioMixer.h
@@ -26,6 +26,7 @@
#include <unordered_map>
#include <vector>
+#include <android/os/IExternalVibratorService.h>
#include <media/AudioBufferProvider.h>
#include <media/AudioResampler.h>
#include <media/AudioResamplerPublic.h>
@@ -103,20 +104,21 @@
// parameter 'value' is a pointer to the new playback rate.
};
- enum { // Haptic intensity, should keep consistent with VibratorService
- HAPTIC_SCALE_VERY_LOW = -2,
- HAPTIC_SCALE_LOW = -1,
- HAPTIC_SCALE_NONE = 0,
- HAPTIC_SCALE_HIGH = 1,
- HAPTIC_SCALE_VERY_HIGH = 2,
- };
- typedef int32_t haptic_intensity_t;
- static constexpr float HAPTIC_SCALE_VERY_LOW_RATIO = 2 / 3;
- static constexpr float HAPTIC_SCALE_LOW_RATIO = 3 / 4;
- static const CONSTEXPR float HAPTIC_MAX_AMPLITUDE_FLOAT = 1.0f;
+ typedef enum { // Haptic intensity, should keep consistent with VibratorService
+ HAPTIC_SCALE_MUTE = os::IExternalVibratorService::SCALE_MUTE,
+ HAPTIC_SCALE_VERY_LOW = os::IExternalVibratorService::SCALE_VERY_LOW,
+ HAPTIC_SCALE_LOW = os::IExternalVibratorService::SCALE_LOW,
+ HAPTIC_SCALE_NONE = os::IExternalVibratorService::SCALE_NONE,
+ HAPTIC_SCALE_HIGH = os::IExternalVibratorService::SCALE_HIGH,
+ HAPTIC_SCALE_VERY_HIGH = os::IExternalVibratorService::SCALE_VERY_HIGH,
+ } haptic_intensity_t;
+ static constexpr float HAPTIC_SCALE_VERY_LOW_RATIO = 2.0f / 3.0f;
+ static constexpr float HAPTIC_SCALE_LOW_RATIO = 3.0f / 4.0f;
+ static const constexpr float HAPTIC_MAX_AMPLITUDE_FLOAT = 1.0f;
static inline bool isValidHapticIntensity(haptic_intensity_t hapticIntensity) {
switch (hapticIntensity) {
+ case HAPTIC_SCALE_MUTE:
case HAPTIC_SCALE_VERY_LOW:
case HAPTIC_SCALE_LOW:
case HAPTIC_SCALE_NONE:
@@ -428,8 +430,9 @@
case HAPTIC_SCALE_NONE:
case HAPTIC_SCALE_HIGH:
case HAPTIC_SCALE_VERY_HIGH:
- default:
return 1.0f;
+ default:
+ return 0.0f;
}
}
diff --git a/media/libaudioprocessing/Android.bp b/media/libaudioprocessing/Android.bp
index 817fb0b..cb78063 100644
--- a/media/libaudioprocessing/Android.bp
+++ b/media/libaudioprocessing/Android.bp
@@ -12,6 +12,11 @@
"libnblog",
"libsonic",
"libutils",
+ "libvibrator",
+ ],
+
+ header_libs: [
+ "libbase_headers",
],
cflags: [
diff --git a/media/libaudioprocessing/tests/Android.bp b/media/libaudioprocessing/tests/Android.bp
index 811c16b..0c8e5bb 100644
--- a/media/libaudioprocessing/tests/Android.bp
+++ b/media/libaudioprocessing/tests/Android.bp
@@ -10,6 +10,7 @@
"libcutils",
"liblog",
"libutils",
+ "libvibrator",
],
cflags: [
diff --git a/media/libmedia/TypeConverter.cpp b/media/libmedia/TypeConverter.cpp
index a0daab3..8dac91a 100644
--- a/media/libmedia/TypeConverter.cpp
+++ b/media/libmedia/TypeConverter.cpp
@@ -228,6 +228,9 @@
MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT1),
MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT0POINT2),
MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT1POINT2),
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_TRI),
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_TRI_BACK),
+ MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT1),
MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT0POINT2),
MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT1POINT2),
MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD),
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp
index 5da6e24..d608d4a 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp
@@ -1289,6 +1289,7 @@
} else if (what == DecoderBase::kWhatShutdownCompleted) {
ALOGV("%s shutdown completed", audio ? "audio" : "video");
if (audio) {
+ Mutex::Autolock autoLock(mDecoderLock);
mAudioDecoder.clear();
mAudioDecoderError = false;
++mAudioDecoderGeneration;
@@ -1296,6 +1297,7 @@
CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
mFlushingAudio = SHUT_DOWN;
} else {
+ Mutex::Autolock autoLock(mDecoderLock);
mVideoDecoder.clear();
mVideoDecoderError = false;
++mVideoDecoderGeneration;
@@ -1967,6 +1969,7 @@
int64_t currentPositionUs, bool forceNonOffload, bool needsToCreateAudioDecoder) {
if (mAudioDecoder != NULL) {
mAudioDecoder->pause();
+ Mutex::Autolock autoLock(mDecoderLock);
mAudioDecoder.clear();
mAudioDecoderError = false;
++mAudioDecoderGeneration;
@@ -1988,11 +1991,21 @@
closeAudioSink();
mRenderer->flush(true /* audio */, false /* notifyComplete */);
if (mVideoDecoder != NULL) {
- mRenderer->flush(false /* audio */, false /* notifyComplete */);
+ mDeferredActions.push_back(
+ new FlushDecoderAction(FLUSH_CMD_NONE /* audio */,
+ FLUSH_CMD_FLUSH /* video */));
+ mDeferredActions.push_back(
+ new SeekAction(currentPositionUs,
+ MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC /* mode */));
+ // After a flush without shutdown, decoder is paused.
+ // Don't resume it until source seek is done, otherwise it could
+ // start pulling stale data too soon.
+ mDeferredActions.push_back(new ResumeDecoderAction(false));
+ processDeferredActions();
+ } else {
+ performSeek(currentPositionUs, MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC /* mode */);
}
- performSeek(currentPositionUs, MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC /* mode */);
-
if (forceNonOffload) {
mRenderer->signalDisableOffloadAudio();
mOffloadAudio = false;
@@ -2085,6 +2098,8 @@
}
}
+ Mutex::Autolock autoLock(mDecoderLock);
+
if (audio) {
sp<AMessage> notify = new AMessage(kWhatAudioNotify, this);
++mAudioDecoderGeneration;
@@ -2395,6 +2410,8 @@
CHECK(mTrackStats != NULL);
mTrackStats->clear();
+
+ Mutex::Autolock autoLock(mDecoderLock);
if (mVideoDecoder != NULL) {
mTrackStats->push_back(mVideoDecoder->getStats());
}
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2.h b/media/libmediaplayer2/nuplayer2/NuPlayer2.h
index 798c725..b8fb988 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2.h
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2.h
@@ -197,6 +197,7 @@
sp<DecoderBase> mVideoDecoder;
bool mOffloadAudio;
sp<DecoderBase> mAudioDecoder;
+ Mutex mDecoderLock; // guard |mAudioDecoder| and |mVideoDecoder|.
sp<CCDecoder> mCCDecoder;
sp<Renderer> mRenderer;
sp<ALooper> mRendererLooper;
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp
index 9729d86..66bfae5 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp
@@ -109,7 +109,10 @@
mStats->setInt64("frames-dropped-output", mNumOutputFramesDropped);
mStats->setFloat("frame-rate-total", mFrameRateTotal);
- return mStats;
+ // i'm mutexed right now.
+ // make our own copy, so we aren't victim to any later changes.
+ sp<AMessage> copiedStats = mStats->dup();
+ return copiedStats;
}
status_t NuPlayer2::Decoder::setVideoSurface(const sp<ANativeWindowWrapper> &nww) {
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
index 1b661f2..1876496 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
@@ -81,7 +81,7 @@
};
// key for media statistics
-static const char *kKeyPlayer = "nuplayer";
+static const char *kKeyPlayer = "nuplayer2";
// attrs for media statistics
// NB: these are matched with public Java API constants defined
// in frameworks/base/media/java/android/media/MediaPlayer2.java
@@ -108,6 +108,8 @@
static const char *kPlayerRebufferingCount = "android.media.mediaplayer.rebuffers";
static const char *kPlayerRebufferingAtExit = "android.media.mediaplayer.rebufferExit";
+static const char *kPlayerVersion = "android.media.mediaplayer.version";
+
NuPlayer2Driver::NuPlayer2Driver(pid_t pid, uid_t uid, const sp<JObjectHolder> &context)
: mState(STATE_IDLE),
@@ -127,6 +129,7 @@
mPlayer(new NuPlayer2(pid, uid, mMediaClock, context)),
mPlayerFlags(0),
mMetricsHandle(0),
+ mPlayerVersion(0),
mClientUid(uid),
mAtEOS(false),
mLooping(false),
@@ -137,9 +140,13 @@
mMediaClock->init();
+ // XXX: what version are we?
+ // Ideally, this ticks with the apk version info for the APEX packaging
+
// set up media metrics record
mMetricsHandle = mediametrics_create(kKeyPlayer);
mediametrics_setUid(mMetricsHandle, mClientUid);
+ mediametrics_setInt64(mMetricsHandle, kPlayerVersion, mPlayerVersion);
mNuPlayer2Looper->start(
false, /* runOnCallingThread */
@@ -473,7 +480,7 @@
float frameRate = 0;
if (stats->findFloat("frame-rate-output", &frameRate)) {
mediametrics_setInt64(mMetricsHandle, kPlayerFrameRate, frameRate);
- }
+ }
} else if (mime.startsWith("audio/")) {
mediametrics_setCString(mMetricsHandle, kPlayerAMime, mime.c_str());
@@ -524,6 +531,7 @@
mediametrics_delete(mMetricsHandle);
mMetricsHandle = mediametrics_create(kKeyPlayer);
mediametrics_setUid(mMetricsHandle, mClientUid);
+ mediametrics_setInt64(mMetricsHandle, kPlayerVersion, mPlayerVersion);
} else {
ALOGV("did not have anything to record");
}
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h
index 3d299f3..c97e247 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h
@@ -134,6 +134,7 @@
uint32_t mPlayerFlags;
mediametrics_handle_t mMetricsHandle;
+ int64_t mPlayerVersion;
uid_t mClientUid;
bool mAtEOS;
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
index 3be7e36..a8c9932 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
@@ -1148,8 +1148,7 @@
ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
return 0;
}
- // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
- return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);
+ return (int64_t)(numFrames * 1000000LL / sampleRate);
}
// Calculate duration of pending samples if played at normal rate (i.e., 1.0).
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 55867a5..22fa495 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -12,6 +12,7 @@
shared_libs: [
"android.hardware.media.omx@1.0",
+ "libbase",
"libaudioclient",
"libbinder",
"libcamera_client",
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 37b13f0..d111313 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -24,6 +24,7 @@
#include <algorithm>
+#include <android-base/properties.h>
#include <android/hardware/ICamera.h>
#include <binder/IPCThreadState.h>
@@ -1761,13 +1762,26 @@
}
}
+ // Enable temporal layering if the expected (max) playback frame rate is greater than ~11% of
+ // the minimum display refresh rate on a typical device. Add layers until the base layer falls
+ // under this limit. Allow device manufacturers to override this limit.
+
+ // TODO: make this configurable by the application
+ std::string maxBaseLayerFpsProperty =
+ ::android::base::GetProperty("ro.media.recorder-max-base-layer-fps", "");
+ float maxBaseLayerFps = (float)::atof(maxBaseLayerFpsProperty.c_str());
+ // TRICKY: use !> to fix up any NaN values
+ if (!(maxBaseLayerFps >= kMinTypicalDisplayRefreshingRate / 0.9)) {
+ maxBaseLayerFps = kMinTypicalDisplayRefreshingRate / 0.9;
+ }
+
for (uint32_t tryLayers = 1; tryLayers <= kMaxNumVideoTemporalLayers; ++tryLayers) {
if (tryLayers > tsLayers) {
tsLayers = tryLayers;
}
// keep going until the base layer fps falls below the typical display refresh rate
float baseLayerFps = maxPlaybackFps / (1 << (tryLayers - 1));
- if (baseLayerFps < kMinTypicalDisplayRefreshingRate / 0.9) {
+ if (baseLayerFps < maxBaseLayerFps) {
break;
}
}
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 1e85804..5a58aa0 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -94,6 +94,7 @@
mDisconnected = false;
mUri.clear();
mUriHeaders.clear();
+ mSources.clear();
if (mFd >= 0) {
close(mFd);
mFd = -1;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 5cf6bbd..3388097 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1829,11 +1829,21 @@
closeAudioSink();
mRenderer->flush(true /* audio */, false /* notifyComplete */);
if (mVideoDecoder != NULL) {
- mRenderer->flush(false /* audio */, false /* notifyComplete */);
+ mDeferredActions.push_back(
+ new FlushDecoderAction(FLUSH_CMD_NONE /* audio */,
+ FLUSH_CMD_FLUSH /* video */));
+ mDeferredActions.push_back(
+ new SeekAction(currentPositionUs,
+ MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC /* mode */));
+ // After a flush without shutdown, decoder is paused.
+ // Don't resume it until source seek is done, otherwise it could
+ // start pulling stale data too soon.
+ mDeferredActions.push_back(new ResumeDecoderAction(false));
+ processDeferredActions();
+ } else {
+ performSeek(currentPositionUs, MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC /* mode */);
}
- performSeek(currentPositionUs, MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC /* mode */);
-
if (forceNonOffload) {
mRenderer->signalDisableOffloadAudio();
mOffloadAudio = false;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 6d69d50..2f0da2d 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -107,11 +107,16 @@
}
sp<AMessage> NuPlayer::Decoder::getStats() const {
+
mStats->setInt64("frames-total", mNumFramesTotal);
mStats->setInt64("frames-dropped-input", mNumInputFramesDropped);
mStats->setInt64("frames-dropped-output", mNumOutputFramesDropped);
mStats->setFloat("frame-rate-total", mFrameRateTotal);
- return mStats;
+
+ // i'm mutexed right now.
+ // make our own copy, so we aren't victim to any later changes.
+ sp<AMessage> copiedStats = mStats->dup();
+ return copiedStats;
}
status_t NuPlayer::Decoder::setVideoSurface(const sp<Surface> &surface) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 1b396c0..2b813e7 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -778,7 +778,7 @@
status_t NuPlayerDriver::getParameter(int key, Parcel *reply) {
- if (key == FOURCC('m','t','r','X')) {
+ if (key == FOURCC('m','t','r','X') && mAnalyticsItem != NULL) {
// mtrX -- a play on 'metrics' (not matrix)
// gather current info all together, parcel it, and send it back
updateMetrics("api");
@@ -1006,7 +1006,7 @@
// when we have an error, add it to the analytics for this playback.
// ext1 is our primary 'error type' value. Only add ext2 when non-zero.
// [test against msg is due to fall through from previous switch value]
- if (msg == MEDIA_ERROR) {
+ if (msg == MEDIA_ERROR && mAnalyticsItem != NULL) {
mAnalyticsItem->setInt32(kPlayerError, ext1);
if (ext2 != 0) {
mAnalyticsItem->setInt32(kPlayerErrorCode, ext2);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index c990b2a..65d6d61 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -1161,8 +1161,8 @@
ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
return 0;
}
- // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
- return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);
+
+ return (int64_t)(numFrames * 1000000LL / sampleRate);
}
// Calculate duration of pending samples if played at normal rate (i.e., 1.0).
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 26797bb..5d2291f 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -765,8 +765,8 @@
}
status_t MediaCodecSource::onStart(MetaData *params) {
- if (mStopping) {
- ALOGE("Failed to start while we're stopping");
+ if (mStopping || mOutput.lock()->mEncoderReachedEOS) {
+ ALOGE("Failed to start while we're stopping or encoder already stopped due to EOS error");
return INVALID_OPERATION;
}
int64_t startTimeUs;
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 82f7026..09424b8 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -1187,6 +1187,16 @@
}
parseHevcProfileLevelFromHvcc((const uint8_t *)data, dataSize, msg);
+ } else if (meta->findData(kKeyAV1C, &type, &data, &size)) {
+ sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+ if (buffer.get() == NULL || buffer->base() == NULL) {
+ return NO_MEMORY;
+ }
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-0", buffer);
} else if (meta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds((const char *)data, size);
if (esds.InitCheck() != (status_t)OK) {
@@ -1693,6 +1703,11 @@
meta->setInt32(kKeyIsADTS, isADTS);
}
+ int32_t aacProfile = -1;
+ if (msg->findInt32("aac-profile", &aacProfile)) {
+ meta->setInt32(kKeyAACAOT, aacProfile);
+ }
+
int32_t pcmEncoding;
if (msg->findInt32("pcm-encoding", &pcmEncoding)) {
meta->setInt32(kKeyPcmEncoding, pcmEncoding);
@@ -1746,6 +1761,8 @@
std::vector<uint8_t> hvcc(csd0size + 1024);
size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
+ } else if (mime == MEDIA_MIMETYPE_VIDEO_AV1) {
+ meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
} else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
meta->setData(kKeyVp9CodecPrivate, 0, csd0->data(), csd0->size());
} else if (mime == MEDIA_MIMETYPE_AUDIO_OPUS) {
diff --git a/media/libstagefright/data/media_codecs_google_c2_audio.xml b/media/libstagefright/data/media_codecs_google_c2_audio.xml
index f664395..47a9715 100644
--- a/media/libstagefright/data/media_codecs_google_c2_audio.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_audio.xml
@@ -79,28 +79,28 @@
</Decoders>
<Encoders>
<MediaCodec name="c2.android.aac.encoder" type="audio/mp4a-latm">
- <Alias name="OMX.google.aac.decoder" />
+ <Alias name="OMX.google.aac.encoder" />
<Limit name="channel-count" max="6" />
<Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
<!-- also may support 64000, 88200 and 96000 Hz -->
<Limit name="bitrate" range="8000-960000" />
</MediaCodec>
<MediaCodec name="c2.android.amrnb.encoder" type="audio/3gpp">
- <Alias name="OMX.google.amrnb.decoder" />
+ <Alias name="OMX.google.amrnb.encoder" />
<Limit name="channel-count" max="1" />
<Limit name="sample-rate" ranges="8000" />
<Limit name="bitrate" range="4750-12200" />
<Feature name="bitrate-modes" value="CBR" />
</MediaCodec>
<MediaCodec name="c2.android.amrwb.encoder" type="audio/amr-wb">
- <Alias name="OMX.google.amrwb.decoder" />
+ <Alias name="OMX.google.amrwb.encoder" />
<Limit name="channel-count" max="1" />
<Limit name="sample-rate" ranges="16000" />
<Limit name="bitrate" range="6600-23850" />
<Feature name="bitrate-modes" value="CBR" />
</MediaCodec>
<MediaCodec name="c2.android.flac.encoder" type="audio/flac">
- <Alias name="OMX.google.flac.decoder" />
+ <Alias name="OMX.google.flac.encoder" />
<Limit name="channel-count" max="2" />
<Limit name="sample-rate" ranges="1-655350" />
<Limit name="bitrate" range="1-21000000" />
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 2910bd3..437bdb7 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -59,6 +59,7 @@
kKeyAACProfile = 'aacp', // int32_t
kKeyAVCC = 'avcc', // raw data
kKeyHVCC = 'hvcc', // raw data
+ kKeyAV1C = 'av1c', // raw data
kKeyThumbnailHVCC = 'thvc', // raw data
kKeyD263 = 'd263', // raw data
kKeyVorbisInfo = 'vinf', // raw data
@@ -236,6 +237,7 @@
kTypeESDS = 'esds',
kTypeAVCC = 'avcc',
kTypeHVCC = 'hvcc',
+ kTypeAV1C = 'av1c',
kTypeD263 = 'd263',
};
diff --git a/media/mtp/IMtpDatabase.h b/media/mtp/IMtpDatabase.h
index 1245092..81fa60c 100644
--- a/media/mtp/IMtpDatabase.h
+++ b/media/mtp/IMtpDatabase.h
@@ -112,8 +112,8 @@
MtpObjectHandle handle, bool succeeded) = 0;
virtual MtpResponseCode beginCopyObject(MtpObjectHandle handle, MtpObjectHandle newParent,
- MtpStorageID newStorage);
- virtual void endCopyObject(MtpObjectHandle handle, bool succeeded);
+ MtpStorageID newStorage) = 0;
+ virtual void endCopyObject(MtpObjectHandle handle, bool succeeded) = 0;
};
}; // namespace android
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 2d80bd8..4033247 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -347,7 +347,7 @@
return ret;
}
}
- return AudioMixer::HAPTIC_SCALE_NONE;
+ return AudioMixer::HAPTIC_SCALE_MUTE;
}
/* static */
diff --git a/services/audioflinger/FastMixerState.h b/services/audioflinger/FastMixerState.h
index c27f2b7..396c797 100644
--- a/services/audioflinger/FastMixerState.h
+++ b/services/audioflinger/FastMixerState.h
@@ -49,7 +49,7 @@
audio_format_t mFormat; // track format
int mGeneration; // increment when any field is assigned
bool mHapticPlaybackEnabled = false; // haptic playback is enabled or not
- AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_NONE; // intensity of
+ AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_MUTE; // intensity of
// haptic data
};
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 3381e4d..676a575 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -211,8 +211,8 @@
((patch->sinks[0].type == AUDIO_PORT_TYPE_DEVICE) &&
((patch->sinks[0].ext.device.hw_module != srcModule) ||
!audioHwDevice->supportsAudioPatches()))) {
- audio_devices_t outputDevice = AUDIO_DEVICE_NONE;
- String8 outputDeviceAddress;
+ audio_devices_t outputDevice = patch->sinks[0].ext.device.type;
+ String8 outputDeviceAddress = String8(patch->sinks[0].ext.device.address);
if (patch->num_sources == 2) {
if (patch->sources[1].type != AUDIO_PORT_TYPE_MIX ||
(patch->num_sinks != 0 && patch->sinks[0].ext.device.hw_module !=
@@ -234,8 +234,6 @@
reinterpret_cast<PlaybackThread*>(thread.get()), false /*closeThread*/);
} else {
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
- audio_devices_t device = patch->sinks[0].ext.device.type;
- String8 address = String8(patch->sinks[0].ext.device.address);
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
if (patch->sinks[0].config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
@@ -254,8 +252,8 @@
patch->sinks[0].ext.device.hw_module,
&output,
&config,
- device,
- address,
+ outputDevice,
+ outputDeviceAddress,
flags);
ALOGV("mAudioFlinger.openOutput_l() returned %p", thread.get());
if (thread == 0) {
@@ -263,8 +261,6 @@
goto exit;
}
newPatch.mPlayback.setThread(reinterpret_cast<PlaybackThread*>(thread.get()));
- outputDevice = device;
- outputDeviceAddress = address;
}
audio_devices_t device = patch->sources[0].ext.device.type;
String8 address = String8(patch->sources[0].ext.device.address);
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 94ea042..357370e 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -124,6 +124,7 @@
void setHapticIntensity(AudioMixer::haptic_intensity_t hapticIntensity) {
if (AudioMixer::isValidHapticIntensity(hapticIntensity)) {
mHapticIntensity = hapticIntensity;
+ setHapticPlaybackEnabled(mHapticIntensity != AudioMixer::HAPTIC_SCALE_MUTE);
}
}
sp<os::ExternalVibration> getExternalVibration() const { return mExternalVibration; }
@@ -208,7 +209,7 @@
bool mHapticPlaybackEnabled = false; // indicates haptic playback enabled or not
// intensity to play haptic data
- AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_NONE;
+ AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_MUTE;
class AudioVibrationController : public os::BnExternalVibrationController {
public:
explicit AudioVibrationController(Track* track) : mTrack(track) {}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 6dd9cab..a8c4bd1 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2372,6 +2372,7 @@
const int intensity = AudioFlinger::onExternalVibrationStart(
track->getExternalVibration());
mLock.lock();
+ track->setHapticIntensity(static_cast<AudioMixer::haptic_intensity_t>(intensity));
// Haptic playback should be enabled by vibrator service.
if (track->getHapticPlaybackEnabled()) {
// Disable haptic playback of all active track to ensure only
@@ -2380,7 +2381,6 @@
t->setHapticPlaybackEnabled(false);
}
}
- track->setHapticIntensity(intensity);
}
track->mResetDone = false;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 5c22727..32cc380 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -560,6 +560,10 @@
muteWaitMs = setOutputDevices(mPrimaryOutput, rxDevices, true, delayMs);
} else { // create RX path audio patch
mCallRxPatch = createTelephonyPatch(true /*isRx*/, rxDevices.itemAt(0), delayMs);
+
+ // If the TX device is on the primary HW module but RX device is
+ // on other HW module, SinkMetaData of telephony input should handle it
+ // assuming the device uses audio HAL V5.0 and above
}
if (createTxPatch) { // create TX path audio patch
mCallTxPatch = createTelephonyPatch(false /*isRx*/, txSourceDevice, delayMs);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index c9c216b..e002e18 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1773,6 +1773,8 @@
break;
}
+ mCaptureSequencer->notifyError(errorCode, resultExtras);
+
ALOGE("%s: Error condition %d reported by HAL, requestId %" PRId32, __FUNCTION__, errorCode,
resultExtras.requestId);
@@ -1927,9 +1929,6 @@
void Camera2Client::notifyShutter(const CaptureResultExtras& resultExtras,
nsecs_t timestamp) {
- (void)resultExtras;
- (void)timestamp;
-
ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
__FUNCTION__, resultExtras.requestId, timestamp);
mCaptureSequencer->notifyShutter(resultExtras, timestamp);
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index 5029d4b..88799f9 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -117,6 +117,31 @@
}
}
+void CaptureSequencer::notifyError(int32_t errorCode, const CaptureResultExtras& resultExtras) {
+ ATRACE_CALL();
+ bool jpegBufferLost = false;
+ if (errorCode == hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER) {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == nullptr) {
+ return;
+ }
+ int captureStreamId = client->getCaptureStreamId();
+ if (captureStreamId == resultExtras.errorStreamId) {
+ jpegBufferLost = true;
+ }
+ } else if (errorCode ==
+ hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST) {
+ if (resultExtras.requestId == mShutterCaptureId) {
+ jpegBufferLost = true;
+ }
+ }
+
+ if (jpegBufferLost) {
+ sp<MemoryBase> emptyBuffer;
+ onCaptureAvailable(/*timestamp*/0, emptyBuffer, /*captureError*/true);
+ }
+}
+
void CaptureSequencer::onResultAvailable(const CaptureResult &result) {
ATRACE_CALL();
ALOGV("%s: New result available.", __FUNCTION__);
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
index c23b12d..727dd53 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
@@ -65,6 +65,9 @@
void notifyShutter(const CaptureResultExtras& resultExtras,
nsecs_t timestamp);
+ // Notifications about shutter (capture start)
+ void notifyError(int32_t errorCode, const CaptureResultExtras& resultExtras);
+
// Notification from the frame processor
virtual void onResultAvailable(const CaptureResult &result);
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 36395f3..ddfe5e3 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -62,31 +62,6 @@
}
}
-void JpegProcessor::onBufferRequestForFrameNumber(uint64_t /*frameNumber*/,
- int /*streamId*/, const CameraMetadata& /*settings*/) {
- // Intentionally left empty
-}
-
-void JpegProcessor::onBufferAcquired(const BufferInfo& /*bufferInfo*/) {
- // Intentionally left empty
-}
-
-void JpegProcessor::onBufferReleased(const BufferInfo& bufferInfo) {
- ALOGV("%s", __FUNCTION__);
- if (bufferInfo.mError) {
- // Only lock in case of error, since we get one of these for each
- // onFrameAvailable as well, and scheduling may delay this call late
- // enough to run into later preview restart operations, for non-error
- // cases.
- // b/29524651
- ALOGV("%s: JPEG buffer lost", __FUNCTION__);
- Mutex::Autolock l(mInputMutex);
- mCaptureDone = true;
- mCaptureSuccess = false;
- mCaptureDoneSignal.signal();
- }
-}
-
status_t JpegProcessor::updateStream(const Parameters ¶ms) {
ATRACE_CALL();
ALOGV("%s", __FUNCTION__);
@@ -181,13 +156,6 @@
strerror(-res), res);
return res;
}
-
- res = device->addBufferListenerForStream(mCaptureStreamId, this);
- if (res != OK) {
- ALOGE("%s: Camera %d: Can't add buffer listeneri: %s (%d)",
- __FUNCTION__, mId, strerror(-res), res);
- return res;
- }
}
return OK;
}
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.h b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
index 53e6836..977f11d 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
@@ -42,8 +42,7 @@
* Still image capture output image processing
*/
class JpegProcessor:
- public Thread, public CpuConsumer::FrameAvailableListener,
- public camera3::Camera3StreamBufferListener {
+ public Thread, public CpuConsumer::FrameAvailableListener {
public:
JpegProcessor(sp<Camera2Client> client, wp<CaptureSequencer> sequencer);
~JpegProcessor();
@@ -51,12 +50,6 @@
// CpuConsumer listener implementation
void onFrameAvailable(const BufferItem& item);
- // Camera3StreamBufferListener implementation
- void onBufferAcquired(const BufferInfo& bufferInfo) override;
- void onBufferReleased(const BufferInfo& bufferInfo) override;
- void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
- const CameraMetadata& settings) override;
-
status_t updateStream(const Parameters ¶ms);
status_t deleteStream();
int getStreamId() const;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 918dcf7..f9ef996 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -885,14 +885,14 @@
return OK;
}
-status_t Camera3Device::capture(CameraMetadata &request, int64_t* /*lastFrameNumber*/) {
+status_t Camera3Device::capture(CameraMetadata &request, int64_t* lastFrameNumber) {
ATRACE_CALL();
List<const PhysicalCameraSettingsList> requestsList;
std::list<const SurfaceMap> surfaceMaps;
convertToRequestList(requestsList, surfaceMaps, request);
- return captureList(requestsList, surfaceMaps, /*lastFrameNumber*/NULL);
+ return captureList(requestsList, surfaceMaps, lastFrameNumber);
}
void Camera3Device::convertToRequestList(List<const PhysicalCameraSettingsList>& requestsList,
@@ -1027,11 +1027,22 @@
return hardware::Void();
}
+ if (outputStream->isAbandoned()) {
+ bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
+ allReqsSucceeds = false;
+ continue;
+ }
+
bufRet.streamId = streamId;
+ size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
uint32_t numBuffersRequested = bufReq.numBuffersRequested;
- size_t totalHandout = outputStream->getOutstandingBuffersCount() + numBuffersRequested;
- if (totalHandout > outputStream->asHalStream()->max_buffers) {
+ size_t totalHandout = handOutBufferCount + numBuffersRequested;
+ uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
+ if (totalHandout > maxBuffers) {
// Not able to allocate enough buffer. Exit early for this stream
+ ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
+ " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
+ numBuffersRequested, maxBuffers);
bufRet.val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
allReqsSucceeds = false;
continue;
@@ -2186,12 +2197,11 @@
mStatusWaiters++;
- // Notify HAL to start draining. We need to notify the HalInterface layer
- // even when the device is already IDLE, so HalInterface can reject incoming
- // requestStreamBuffers call.
if (!active && mUseHalBufManager) {
auto streamIds = mOutputStreams.getStreamIds();
- mRequestThread->signalPipelineDrain(streamIds);
+ if (mStatus == STATUS_ACTIVE) {
+ mRequestThread->signalPipelineDrain(streamIds);
+ }
mRequestBufferSM.onWaitUntilIdle();
}
@@ -3880,7 +3890,8 @@
bool useHalBufManager) :
mHidlSession(session),
mRequestMetadataQueue(queue),
- mUseHalBufManager(useHalBufManager) {
+ mUseHalBufManager(useHalBufManager),
+ mIsReconfigurationQuerySupported(true) {
// Check with hardware service manager if we can downcast these interfaces
// Somewhat expensive, so cache the results at startup
auto castResult_3_5 = device::V3_5::ICameraDeviceSession::castFrom(mHidlSession);
@@ -3986,6 +3997,52 @@
return res;
}
+bool Camera3Device::HalInterface::isReconfigurationRequired(CameraMetadata& oldSessionParams,
+ CameraMetadata& newSessionParams) {
+ // We do reconfiguration by default;
+ bool ret = true;
+ if ((mHidlSession_3_5 != nullptr) && mIsReconfigurationQuerySupported) {
+ android::hardware::hidl_vec<uint8_t> oldParams, newParams;
+ camera_metadata_t* oldSessioMeta = const_cast<camera_metadata_t*>(
+ oldSessionParams.getAndLock());
+ camera_metadata_t* newSessioMeta = const_cast<camera_metadata_t*>(
+ newSessionParams.getAndLock());
+ oldParams.setToExternal(reinterpret_cast<uint8_t*>(oldSessioMeta),
+ get_camera_metadata_size(oldSessioMeta));
+ newParams.setToExternal(reinterpret_cast<uint8_t*>(newSessioMeta),
+ get_camera_metadata_size(newSessioMeta));
+ hardware::camera::common::V1_0::Status callStatus;
+ bool required;
+ auto hidlCb = [&callStatus, &required] (hardware::camera::common::V1_0::Status s,
+ bool requiredFlag) {
+ callStatus = s;
+ required = requiredFlag;
+ };
+ auto err = mHidlSession_3_5->isReconfigurationRequired(oldParams, newParams, hidlCb);
+ oldSessionParams.unlock(oldSessioMeta);
+ newSessionParams.unlock(newSessioMeta);
+ if (err.isOk()) {
+ switch (callStatus) {
+ case hardware::camera::common::V1_0::Status::OK:
+ ret = required;
+ break;
+ case hardware::camera::common::V1_0::Status::METHOD_NOT_SUPPORTED:
+ mIsReconfigurationQuerySupported = false;
+ ret = true;
+ break;
+ default:
+ ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, callStatus);
+ ret = true;
+ }
+ } else {
+ ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, err.description().c_str());
+ ret = true;
+ }
+ }
+
+ return ret;
+}
+
status_t Camera3Device::HalInterface::configureStreams(const camera_metadata_t *sessionParams,
camera3_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
ATRACE_NAME("CameraHal::configureStreams");
@@ -5097,9 +5154,10 @@
ATRACE_CALL();
bool updatesDetected = false;
+ CameraMetadata updatedParams(mLatestSessionParams);
for (auto tag : mSessionParamKeys) {
camera_metadata_ro_entry entry = settings.find(tag);
- camera_metadata_entry lastEntry = mLatestSessionParams.find(tag);
+ camera_metadata_entry lastEntry = updatedParams.find(tag);
if (entry.count > 0) {
bool isDifferent = false;
@@ -5128,17 +5186,26 @@
if (!skipHFRTargetFPSUpdate(tag, entry, lastEntry)) {
updatesDetected = true;
}
- mLatestSessionParams.update(entry);
+ updatedParams.update(entry);
}
} else if (lastEntry.count > 0) {
// Value has been removed
ALOGV("%s: Session parameter tag id %d removed", __FUNCTION__, tag);
- mLatestSessionParams.erase(tag);
+ updatedParams.erase(tag);
updatesDetected = true;
}
}
- return updatesDetected;
+ bool reconfigureRequired;
+ if (updatesDetected) {
+ reconfigureRequired = mInterface->isReconfigurationRequired(mLatestSessionParams,
+ updatedParams);
+ mLatestSessionParams = updatedParams;
+ } else {
+ reconfigureRequired = false;
+ }
+
+ return reconfigureRequired;
}
bool Camera3Device::RequestThread::threadLoop() {
@@ -5251,6 +5318,11 @@
ALOGVV("%s: %d: submitting %zu requests in a batch.", __FUNCTION__, __LINE__,
mNextRequests.size());
+ sp<Camera3Device> parent = mParent.promote();
+ if (parent != nullptr) {
+ parent->mRequestBufferSM.onSubmittingRequest();
+ }
+
bool submitRequestSuccess = false;
nsecs_t tRequestStart = systemTime(SYSTEM_TIME_MONOTONIC);
if (mInterface->supportBatchRequest()) {
@@ -5261,13 +5333,6 @@
nsecs_t tRequestEnd = systemTime(SYSTEM_TIME_MONOTONIC);
mRequestLatency.add(tRequestStart, tRequestEnd);
- if (submitRequestSuccess) {
- sp<Camera3Device> parent = mParent.promote();
- if (parent != nullptr) {
- parent->mRequestBufferSM.onRequestSubmitted();
- }
- }
-
if (useFlushLock) {
mFlushLock.unlock();
}
@@ -6429,9 +6494,11 @@
return;
}
-void Camera3Device::RequestBufferStateMachine::onRequestSubmitted() {
+void Camera3Device::RequestBufferStateMachine::onSubmittingRequest() {
std::lock_guard<std::mutex> lock(mLock);
mRequestThreadPaused = false;
+ // inflight map register actually happens in prepareHalRequest now, but it is close enough
+ // approximation.
mInflightMapEmpty = false;
if (mStatus == RB_STATUS_STOPPED) {
mStatus = RB_STATUS_READY;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index e5a38bb..b25d89d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -309,6 +309,8 @@
status_t close();
void signalPipelineDrain(const std::vector<int>& streamIds);
+ bool isReconfigurationRequired(CameraMetadata& oldSessionParams,
+ CameraMetadata& newSessionParams);
// method to extract buffer's unique ID
// return pair of (newlySeenBuffer?, bufferId)
@@ -401,6 +403,7 @@
uint32_t mNextStreamConfigCounter = 1;
const bool mUseHalBufManager;
+ bool mIsReconfigurationQuerySupported;
};
sp<HalInterface> mInterface;
@@ -1317,7 +1320,7 @@
void onInflightMapEmpty();
// Events triggered by RequestThread
- void onRequestSubmitted();
+ void onSubmittingRequest();
void onRequestThreadPaused();
private:
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index d29e5c0..0571741 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -588,7 +588,11 @@
if (mState != STATE_CONFIGURED) {
ALOGE("%s: Stream %d: Can't get buffers if stream is not in CONFIGURED state %d",
__FUNCTION__, mId, mState);
- return INVALID_OPERATION;
+ if (mState == STATE_ABANDONED) {
+ return DEAD_OBJECT;
+ } else {
+ return INVALID_OPERATION;
+ }
}
// Wait for new buffer returned back if we are running into the limit.
diff --git a/services/mediacodec/seccomp_policy/mediacodec-arm.policy b/services/mediacodec/seccomp_policy/mediacodec-arm.policy
index 9bdd4c8..3870a11 100644
--- a/services/mediacodec/seccomp_policy/mediacodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediacodec-arm.policy
@@ -58,8 +58,4 @@
getdents64: 1
getrandom: 1
-# Used by UBSan diagnostic messages
-readlink: 1
-open: 1
-
@include /system/etc/seccomp_policy/crash_dump.arm.policy
diff --git a/services/mediacodec/seccomp_policy/mediacodec-x86.policy b/services/mediacodec/seccomp_policy/mediacodec-x86.policy
index a1ef16f..845f84b 100644
--- a/services/mediacodec/seccomp_policy/mediacodec-x86.policy
+++ b/services/mediacodec/seccomp_policy/mediacodec-x86.policy
@@ -67,8 +67,4 @@
getpid: 1
gettid: 1
-# Used by UBSan diagnostic messages
-readlink: 1
-open: 1
-
@include /system/etc/seccomp_policy/crash_dump.x86.policy