Merge "Fix to usage of upper bound value in volume curve" into nyc-mr1-dev
diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h
index a6901a8..8f0eaa7 100644
--- a/include/media/stagefright/MPEG4Writer.h
+++ b/include/media/stagefright/MPEG4Writer.h
@@ -65,6 +65,7 @@
status_t setGeoData(int latitudex10000, int longitudex10000);
status_t setCaptureRate(float captureFps);
+ status_t setTemporalLayerCount(uint32_t layerCount);
virtual void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
virtual int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }
@@ -187,6 +188,7 @@
// Acquire lock before calling these methods
off64_t addSample_l(MediaBuffer *buffer);
off64_t addLengthPrefixedSample_l(MediaBuffer *buffer);
+ off64_t addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer);
bool exceedsFileSizeLimit();
bool use32BitFileOffset() const;
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index be7e5c1..6ba7b32 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -204,6 +204,8 @@
// transfer Function, value defined by ColorAspects.Transfer.
kKeyColorMatrix = 'cMtx', // int32_t,
// color Matrix, value defined by ColorAspects.MatrixCoeffs.
+ kKeyTemporalLayerId = 'iLyr', // int32_t, temporal layer-id. 0-based (0 => base layer)
+ kKeyTemporalLayerCount = 'cLyr', // int32_t, number of temporal layers encoded
};
enum {
diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp
index 34445e0..f352d5b 100644
--- a/media/libmedia/ToneGenerator.cpp
+++ b/media/libmedia/ToneGenerator.cpp
@@ -877,6 +877,7 @@
ALOGV("Delete Track: %p", mpAudioTrack.get());
mpAudioTrack.clear();
}
+ clearWaveGens();
}
////////////////////////////////////////////////////////////////////////////////
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 6a67fcf..58448010 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -44,6 +44,7 @@
#include <byteswap.h>
#include "include/ID3.h"
+#include "include/avc_utils.h"
#ifndef UINT32_MAX
#define UINT32_MAX (4294967295U)
@@ -2471,6 +2472,15 @@
if (!strcasecmp(mMetaKeyMap[index].c_str(), "com.android.capture.fps")) {
mFileMetaData->setFloat(kKeyCaptureFramerate, *(float *)&val);
}
+ } else if (dataType == 67 && dataSize >= 4) {
+ // BE signed int32
+ uint32_t val;
+ if (!mDataSource->getUInt32(offset, &val)) {
+ return ERROR_MALFORMED;
+ }
+ if (!strcasecmp(mMetaKeyMap[index].c_str(), "com.android.video.temporal_layers_count")) {
+ mFileMetaData->setInt32(kKeyTemporalLayerCount, val);
+ }
} else {
// add more keys if needed
ALOGV("ignoring key: type %d, size %d", dataType, dataSize);
@@ -4464,6 +4474,12 @@
kKeyTargetTime, targetSampleTimeUs);
}
+ if (mIsAVC) {
+ uint32_t layerId = FindAVCLayerId(
+ (const uint8_t *)mBuffer->data(), mBuffer->range_length());
+ mBuffer->meta_data()->setInt32(kKeyTemporalLayerId, layerId);
+ }
+
if (isSyncSample) {
mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);
}
@@ -4627,6 +4643,12 @@
kKeyTargetTime, targetSampleTimeUs);
}
+ if (mIsAVC) {
+ uint32_t layerId = FindAVCLayerId(
+ (const uint8_t *)mBuffer->data(), mBuffer->range_length());
+ mBuffer->meta_data()->setInt32(kKeyTemporalLayerId, layerId);
+ }
+
if (isSyncSample) {
mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);
}
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 24fb987..b5eb50b 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -43,6 +43,7 @@
#include "include/ESDS.h"
#include "include/HevcUtils.h"
+#include "include/avc_utils.h"
#ifndef __predict_false
#define __predict_false(exp) __builtin_expect((exp) != 0, 0)
@@ -70,6 +71,7 @@
static const char kMetaKey_Build[] = "com.android.build";
#endif
static const char kMetaKey_CaptureFps[] = "com.android.capture.fps";
+static const char kMetaKey_TemporalLayerCount[] = "com.android.video.temporal_layers_count";
static const uint8_t kMandatoryHevcNalUnitTypes[3] = {
kHevcNalUnitTypeVps,
@@ -1162,6 +1164,37 @@
}
}
+off64_t MPEG4Writer::addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer) {
+ off64_t old_offset = mOffset;
+
+ const size_t kExtensionNALSearchRange = 64; // bytes to look for non-VCL NALUs
+
+ const uint8_t *dataStart = (const uint8_t *)buffer->data() + buffer->range_offset();
+ const uint8_t *currentNalStart = dataStart;
+ const uint8_t *nextNalStart;
+ const uint8_t *data = dataStart;
+ size_t nextNalSize;
+ size_t searchSize = buffer->range_length() > kExtensionNALSearchRange ?
+ kExtensionNALSearchRange : buffer->range_length();
+
+ while (getNextNALUnit(&data, &searchSize, &nextNalStart,
+ &nextNalSize, true) == OK) {
+ size_t currentNalSize = nextNalStart - currentNalStart - 3 /* strip start-code */;
+ MediaBuffer *nalBuf = new MediaBuffer((void *)currentNalStart, currentNalSize);
+ addLengthPrefixedSample_l(nalBuf);
+ nalBuf->release();
+
+ currentNalStart = nextNalStart;
+ }
+
+ size_t currentNalOffset = currentNalStart - dataStart;
+ buffer->set_range(buffer->range_offset() + currentNalOffset,
+ buffer->range_length() - currentNalOffset);
+ addLengthPrefixedSample_l(buffer);
+
+ return old_offset;
+}
+
off64_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
off64_t old_offset = mOffset;
@@ -1381,6 +1414,19 @@
return OK;
}
+status_t MPEG4Writer::setTemporalLayerCount(uint32_t layerCount) {
+ if (layerCount > 9) {
+ return BAD_VALUE;
+ }
+
+ if (layerCount > 0) {
+ mMetaKeys->setInt32(kMetaKey_TemporalLayerCount, layerCount);
+ mMoovExtraSize += sizeof(kMetaKey_TemporalLayerCount) + 4 + 32;
+ }
+
+ return OK;
+}
+
void MPEG4Writer::write(const void *data, size_t size) {
write(data, 1, size);
}
@@ -1684,7 +1730,7 @@
List<MediaBuffer *>::iterator it = chunk->mSamples.begin();
off64_t offset = (chunk->mTrack->isAvc() || chunk->mTrack->isHevc())
- ? addLengthPrefixedSample_l(*it)
+ ? addMultipleLengthPrefixedSamples_l(*it)
: addSample_l(*it);
if (isFirstSample) {
@@ -2593,7 +2639,7 @@
trackProgressStatus(timestampUs);
}
if (!hasMultipleTracks) {
- off64_t offset = (mIsAvc || mIsHevc) ? mOwner->addLengthPrefixedSample_l(copy)
+ off64_t offset = (mIsAvc || mIsHevc) ? mOwner->addMultipleLengthPrefixedSamples_l(copy)
: mOwner->addSample_l(copy);
uint32_t count = (mOwner->use32BitFileOffset()
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index ccf3440..0396dc6 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -491,6 +491,28 @@
return true;
}
+uint32_t FindAVCLayerId(const uint8_t *data, size_t size) {
+ CHECK(data != NULL);
+
+ const unsigned kSvcNalType = 0xE;
+ const unsigned kSvcNalSearchRange = 32;
+ // SVC NAL
+ // |---0 1110|1--- ----|---- ----|iii- ---|
+ // ^ ^
+ // NAL-type = 0xE layer-Id
+ //
+ // layer_id 0 is for base layer, while 1, 2, ... are enhancement layers.
+ // Layer n uses reference frames from layer 0, 1, ..., n-1.
+
+ uint32_t layerId = 0;
+ sp<ABuffer> svcNAL = FindNAL(
+ data, size > kSvcNalSearchRange ? kSvcNalSearchRange : size, kSvcNalType);
+ if (svcNAL != NULL && svcNAL->size() >= 4) {
+ layerId = (*(svcNAL->data() + 3) >> 5) & 0x7;
+ }
+ return layerId;
+}
+
sp<MetaData> MakeAACCodecSpecificData(
unsigned profile, unsigned sampling_freq_index,
unsigned channel_configuration) {
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
index 61b9bfd..7fc5ffc 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -381,6 +381,48 @@
resetPlugin();
}
+bool SoftAVC::getVUIParams() {
+ IV_API_CALL_STATUS_T status;
+ ih264d_ctl_get_vui_params_ip_t s_ctl_get_vui_params_ip;
+ ih264d_ctl_get_vui_params_op_t s_ctl_get_vui_params_op;
+
+ s_ctl_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_get_vui_params_ip.e_sub_cmd =
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_GET_VUI_PARAMS;
+
+ s_ctl_get_vui_params_ip.u4_size =
+ sizeof(ih264d_ctl_get_vui_params_ip_t);
+
+ s_ctl_get_vui_params_op.u4_size = sizeof(ih264d_ctl_get_vui_params_op_t);
+
+ status = ivdec_api_function(
+ (iv_obj_t *)mCodecCtx, (void *)&s_ctl_get_vui_params_ip,
+ (void *)&s_ctl_get_vui_params_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGW("Error in getting VUI params: 0x%x",
+ s_ctl_get_vui_params_op.u4_error_code);
+ return false;
+ }
+
+ int32_t primaries = s_ctl_get_vui_params_op.u1_colour_primaries;
+ int32_t transfer = s_ctl_get_vui_params_op.u1_tfr_chars;
+ int32_t coeffs = s_ctl_get_vui_params_op.u1_matrix_coeffs;
+ bool fullRange = s_ctl_get_vui_params_op.u1_video_full_range_flag;
+
+ ColorAspects colorAspects;
+ ColorUtils::convertIsoColorAspectsToCodecAspects(
+ primaries, transfer, coeffs, fullRange, colorAspects);
+
+ // Update color aspects if necessary.
+ if (colorAspectsDiffer(colorAspects, mBitstreamColorAspects)) {
+ mBitstreamColorAspects = colorAspects;
+ status_t err = handleColorAspectsChange();
+ CHECK(err == OK);
+ }
+ return true;
+}
+
bool SoftAVC::setDecodeArgs(
ivd_video_decode_ip_t *ps_dec_ip,
ivd_video_decode_op_t *ps_dec_op,
@@ -606,6 +648,8 @@
bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
+ getVUIParams();
+
GETTIME(&mTimeEnd, NULL);
/* Compute time taken for decode() */
TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
@@ -641,16 +685,22 @@
continue;
}
+ // Combine the resolution change and coloraspects change in one PortSettingChange event
+ // if necessary.
if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
uint32_t width = s_dec_op.u4_pic_wd;
uint32_t height = s_dec_op.u4_pic_ht;
bool portWillReset = false;
handlePortSettingsChange(&portWillReset, width, height);
-
if (portWillReset) {
resetDecoder();
return;
}
+ } else if (mUpdateColorAspects) {
+ notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
+ kDescribeColorAspectsIndex, NULL);
+ mUpdateColorAspects = false;
+ return;
}
if (s_dec_op.u4_output_present) {
@@ -705,6 +755,10 @@
}
}
+int SoftAVC::getColorAspectPreference() {
+ return kPreferBitstream;
+}
+
} // namespace android
android::SoftOMXComponent *createSoftOMXComponent(
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.h b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
index c710c76..154ca38 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.h
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
@@ -59,6 +59,7 @@
virtual void onQueueFilled(OMX_U32 portIndex);
virtual void onPortFlushCompleted(OMX_U32 portIndex);
virtual void onReset();
+ virtual int getColorAspectPreference();
private:
// Number of input and output buffers
enum {
@@ -116,6 +117,8 @@
OMX_BUFFERHEADERTYPE *outHeader,
size_t timeStampIx);
+ bool getVUIParams();
+
DISALLOW_EVIL_CONSTRUCTORS(SoftAVC);
};
diff --git a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
index 4529007..2781fe3 100644
--- a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
+++ b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
@@ -21,6 +21,7 @@
#include "SimpleSoftOMXComponent.h"
#include <media/stagefright/foundation/AHandlerReflector.h>
+#include <media/stagefright/foundation/ColorUtils.h>
#include <media/IOMX.h>
#include <utils/RefBase.h>
@@ -43,6 +44,16 @@
OMX_COMPONENTTYPE **component);
protected:
+ enum {
+ kDescribeColorAspectsIndex = kPrepareForAdaptivePlaybackIndex + 1,
+ };
+
+ enum {
+ kNotSupported,
+ kPreferBitstream,
+ kPreferContainer,
+ };
+
virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
virtual void onReset();
@@ -55,9 +66,16 @@
virtual OMX_ERRORTYPE getConfig(
OMX_INDEXTYPE index, OMX_PTR params);
+ virtual OMX_ERRORTYPE setConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
virtual OMX_ERRORTYPE getExtensionIndex(
const char *name, OMX_INDEXTYPE *index);
+ virtual bool supportsDescribeColorAspects();
+
+ virtual int getColorAspectPreference();
+
void initPorts(OMX_U32 numInputBuffers,
OMX_U32 inputBufferSize,
OMX_U32 numOutputBuffers,
@@ -74,6 +92,10 @@
kCropSet,
kCropChanged,
};
+
+ // This function will handle several port change events which include
+ // size changed, crop changed, stride changed and coloraspects changed.
+ // It will trigger OMX_EventPortSettingsChanged event if necessary.
void handlePortSettingsChange(
bool *portWillReset, uint32_t width, uint32_t height,
CropSettingsMode cropSettingsMode = kCropUnSet, bool fakeStride = false);
@@ -99,6 +121,29 @@
AWAITING_ENABLED
} mOutputPortSettingsChange;
+ bool mUpdateColorAspects;
+
+ Mutex mColorAspectsLock;
+ // color aspects passed from the framework.
+ ColorAspects mDefaultColorAspects;
+ // color aspects parsed from the bitstream.
+ ColorAspects mBitstreamColorAspects;
+ // final color aspects after combining the above two aspects.
+ ColorAspects mFinalColorAspects;
+
+ bool colorAspectsDiffer(const ColorAspects &a, const ColorAspects &b);
+
+ // This functions takes two color aspects and updates the mFinalColorAspects
+ // based on |preferredAspects|.
+ void updateFinalColorAspects(
+ const ColorAspects &otherAspects, const ColorAspects &preferredAspects);
+
+ // This function will update the mFinalColorAspects based on codec preference.
+ status_t handleColorAspectsChange();
+
+ // Helper function to dump the ColorAspects.
+ void dumpColorAspects(const ColorAspects &colorAspects);
+
private:
uint32_t mMinInputBufferSize;
uint32_t mMinCompressionRatio;
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index 7465b35..235ee63 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -85,6 +85,7 @@
bool IsIDR(const sp<ABuffer> &accessUnit);
bool IsAVCReferenceFrame(const sp<ABuffer> &accessUnit);
+uint32_t FindAVCLayerId(const uint8_t *data, size_t size);
const char *AVCProfileToString(uint8_t profile);
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index d3553bd..2db8996 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -68,6 +68,11 @@
mCodingType(codingType),
mProfileLevels(profileLevels),
mNumProfileLevels(numProfileLevels) {
+
+ // init all the color aspects to be Unspecified.
+ memset(&mDefaultColorAspects, 0, sizeof(ColorAspects));
+ memset(&mBitstreamColorAspects, 0, sizeof(ColorAspects));
+ memset(&mFinalColorAspects, 0, sizeof(ColorAspects));
}
void SoftVideoDecoderOMXComponent::initPorts(
@@ -224,9 +229,66 @@
notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
OMX_IndexConfigCommonOutputCrop, NULL);
}
+ } else if (mUpdateColorAspects) {
+ notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
+ kDescribeColorAspectsIndex, NULL);
+ mUpdateColorAspects = false;
}
}
+void SoftVideoDecoderOMXComponent::dumpColorAspects(const ColorAspects &colorAspects) {
+ ALOGD("dumpColorAspects: (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) ",
+ colorAspects.mRange, asString(colorAspects.mRange),
+ colorAspects.mPrimaries, asString(colorAspects.mPrimaries),
+ colorAspects.mMatrixCoeffs, asString(colorAspects.mMatrixCoeffs),
+ colorAspects.mTransfer, asString(colorAspects.mTransfer));
+}
+
+bool SoftVideoDecoderOMXComponent::colorAspectsDiffer(
+ const ColorAspects &a, const ColorAspects &b) {
+ if (a.mRange != b.mRange
+ || a.mPrimaries != b.mPrimaries
+ || a.mTransfer != b.mTransfer
+ || a.mMatrixCoeffs != b.mMatrixCoeffs) {
+ return true;
+ }
+ return false;
+}
+
+void SoftVideoDecoderOMXComponent::updateFinalColorAspects(
+ const ColorAspects &otherAspects, const ColorAspects &preferredAspects) {
+ Mutex::Autolock autoLock(mColorAspectsLock);
+ ColorAspects newAspects;
+ newAspects.mRange = preferredAspects.mRange != ColorAspects::RangeUnspecified ?
+ preferredAspects.mRange : otherAspects.mRange;
+ newAspects.mPrimaries = preferredAspects.mPrimaries != ColorAspects::PrimariesUnspecified ?
+ preferredAspects.mPrimaries : otherAspects.mPrimaries;
+ newAspects.mTransfer = preferredAspects.mTransfer != ColorAspects::TransferUnspecified ?
+ preferredAspects.mTransfer : otherAspects.mTransfer;
+ newAspects.mMatrixCoeffs = preferredAspects.mMatrixCoeffs != ColorAspects::MatrixUnspecified ?
+ preferredAspects.mMatrixCoeffs : otherAspects.mMatrixCoeffs;
+
+ // Check to see if need update mFinalColorAspects.
+ if (colorAspectsDiffer(mFinalColorAspects, newAspects)) {
+ mFinalColorAspects = newAspects;
+ mUpdateColorAspects = true;
+ }
+}
+
+status_t SoftVideoDecoderOMXComponent::handleColorAspectsChange() {
+ int perference = getColorAspectPreference();
+ ALOGD("Color Aspects preference: %d ", perference);
+
+ if (perference == kPreferBitstream) {
+ updateFinalColorAspects(mDefaultColorAspects, mBitstreamColorAspects);
+ } else if (perference == kPreferContainer) {
+ updateFinalColorAspects(mBitstreamColorAspects, mDefaultColorAspects);
+ } else {
+ return OMX_ErrorUnsupportedSetting;
+ }
+ return OK;
+}
+
void SoftVideoDecoderOMXComponent::copyYV12FrameToOutputBuffer(
uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride) {
@@ -450,7 +512,7 @@
OMX_ERRORTYPE SoftVideoDecoderOMXComponent::getConfig(
OMX_INDEXTYPE index, OMX_PTR params) {
- switch (index) {
+ switch ((int)index) {
case OMX_IndexConfigCommonOutputCrop:
{
OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params;
@@ -470,22 +532,88 @@
return OMX_ErrorNone;
}
+ case kDescribeColorAspectsIndex:
+ {
+ if (!supportsDescribeColorAspects()) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ DescribeColorAspectsParams* colorAspectsParams =
+ (DescribeColorAspectsParams *)params;
+
+ if (colorAspectsParams->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorBadParameter;
+ }
+
+ colorAspectsParams->sAspects = mFinalColorAspects;
+ if (colorAspectsParams->bRequestingDataSpace || colorAspectsParams->bDataSpaceChanged) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+
+ return OMX_ErrorNone;
+ }
default:
return OMX_ErrorUnsupportedIndex;
}
}
+OMX_ERRORTYPE SoftVideoDecoderOMXComponent::setConfig(
+ OMX_INDEXTYPE index, const OMX_PTR params){
+ switch ((int)index) {
+ case kDescribeColorAspectsIndex:
+ {
+ if (!supportsDescribeColorAspects()) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+ const DescribeColorAspectsParams* colorAspectsParams =
+ (const DescribeColorAspectsParams *)params;
+
+ if (!isValidOMXParam(colorAspectsParams)) {
+ return OMX_ErrorBadParameter;
+ }
+
+ if (colorAspectsParams->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorBadParameter;
+ }
+
+ // Update color aspects if necessary.
+ if (colorAspectsDiffer(colorAspectsParams->sAspects, mDefaultColorAspects)) {
+ mDefaultColorAspects = colorAspectsParams->sAspects;
+ status_t err = handleColorAspectsChange();
+ CHECK(err == OK);
+ }
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return OMX_ErrorUnsupportedIndex;
+ }
+}
+
+
OMX_ERRORTYPE SoftVideoDecoderOMXComponent::getExtensionIndex(
const char *name, OMX_INDEXTYPE *index) {
if (!strcmp(name, "OMX.google.android.index.prepareForAdaptivePlayback")) {
*(int32_t*)index = kPrepareForAdaptivePlaybackIndex;
return OMX_ErrorNone;
+ } else if (!strcmp(name, "OMX.google.android.index.describeColorAspects")
+ && supportsDescribeColorAspects()) {
+ *(int32_t*)index = kDescribeColorAspectsIndex;
+ return OMX_ErrorNone;
}
return SimpleSoftOMXComponent::getExtensionIndex(name, index);
}
+bool SoftVideoDecoderOMXComponent::supportsDescribeColorAspects() {
+ return getColorAspectPreference() != kNotSupported;
+}
+
+int SoftVideoDecoderOMXComponent::getColorAspectPreference() {
+ return kNotSupported;
+}
+
void SoftVideoDecoderOMXComponent::onReset() {
mOutputPortSettingsChange = NONE;
}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 72be301..1f1e36b 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2299,6 +2299,12 @@
mCallbackThread->resetDraining();
}
+void AudioFlinger::PlaybackThread::errorCallback()
+{
+ ALOG_ASSERT(mCallbackThread != 0);
+ mCallbackThread->setAsyncError();
+}
+
void AudioFlinger::PlaybackThread::resetWriteBlocked(uint32_t sequence)
{
Mutex::Autolock _l(mLock);
@@ -2333,6 +2339,9 @@
case STREAM_CBK_EVENT_DRAIN_READY:
me->drainCallback();
break;
+ case STREAM_CBK_EVENT_ERROR:
+ me->errorCallback();
+ break;
default:
ALOGW("asyncCallback() unknown event %d", event);
break;
@@ -3904,6 +3913,13 @@
broadcast_l();
}
+void AudioFlinger::PlaybackThread::onAsyncError()
+{
+ for (int i = AUDIO_STREAM_SYSTEM; i < (int)AUDIO_STREAM_CNT; i++) {
+ invalidateTracks((audio_stream_type_t)i);
+ }
+}
+
void AudioFlinger::MixerThread::threadLoop_mix()
{
// mix buffers...
@@ -5220,7 +5236,8 @@
: Thread(false /*canCallJava*/),
mPlaybackThread(playbackThread),
mWriteAckSequence(0),
- mDrainSequence(0)
+ mDrainSequence(0),
+ mAsyncError(false)
{
}
@@ -5238,11 +5255,13 @@
while (!exitPending()) {
uint32_t writeAckSequence;
uint32_t drainSequence;
+ bool asyncError;
{
Mutex::Autolock _l(mLock);
while (!((mWriteAckSequence & 1) ||
(mDrainSequence & 1) ||
+ mAsyncError ||
exitPending())) {
mWaitWorkCV.wait(mLock);
}
@@ -5256,6 +5275,8 @@
mWriteAckSequence &= ~1;
drainSequence = mDrainSequence;
mDrainSequence &= ~1;
+ asyncError = mAsyncError;
+ mAsyncError = false;
}
{
sp<AudioFlinger::PlaybackThread> playbackThread = mPlaybackThread.promote();
@@ -5266,6 +5287,9 @@
if (drainSequence & 1) {
playbackThread->resetDraining(drainSequence >> 1);
}
+ if (asyncError) {
+ playbackThread->onAsyncError();
+ }
}
}
}
@@ -5314,6 +5338,13 @@
}
}
+void AudioFlinger::AsyncCallbackThread::setAsyncError()
+{
+ Mutex::Autolock _l(mLock);
+ mAsyncError = true;
+ mWaitWorkCV.signal();
+}
+
// ----------------------------------------------------------------------------
AudioFlinger::OffloadThread::OffloadThread(const sp<AudioFlinger>& audioFlinger,
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 1bfbca9..2fd7eeb 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -543,6 +543,7 @@
void resetWriteBlocked(uint32_t sequence);
void drainCallback();
void resetDraining(uint32_t sequence);
+ void errorCallback();
static int asyncCallback(stream_callback_event_t event, void *param, void *cookie);
@@ -550,6 +551,7 @@
virtual bool waitingAsyncCallback_l();
virtual bool shouldStandby_l();
virtual void onAddNewTrack_l();
+ void onAsyncError(); // error reported by AsyncCallbackThread
// ThreadBase virtuals
virtual void preExit();
@@ -1044,6 +1046,7 @@
void resetWriteBlocked();
void setDraining(uint32_t sequence);
void resetDraining();
+ void setAsyncError();
private:
const wp<PlaybackThread> mPlaybackThread;
@@ -1057,6 +1060,7 @@
uint32_t mDrainSequence;
Condition mWaitWorkCV;
Mutex mLock;
+ bool mAsyncError;
};
class DuplicatingThread : public MixerThread {