Merge changes from topic "mediametrics_stable_0118"
* changes:
MediaPlayer2 using the new mediametrics stable interface
Further work on libmediametrics stable API
diff --git a/apex/Android.bp b/apex/Android.bp
index 422880a..39997d2 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -36,7 +36,6 @@
apex {
name: "com.android.media.swcodec",
- compile_multilib: "32",
manifest: "manifest_codec.json",
native_shared_libs: [
"libmedia_codecserviceregistrant",
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 4bb74cb..641816f 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -5561,12 +5561,12 @@
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* </ul></p>
*
- * <p>For a logical camera, this is concatenation of all underlying physical camera ids.
- * The null terminator for physical camera id must be preserved so that the whole string
- * can be tokenized using '\0' to generate list of physical camera ids.</p>
- * <p>For example, if the physical camera ids of the logical camera are "2" and "3", the
+ * <p>For a logical camera, this is concatenation of all underlying physical camera IDs.
+ * The null terminator for physical camera ID must be preserved so that the whole string
+ * can be tokenized using '\0' to generate list of physical camera IDs.</p>
+ * <p>For example, if the physical camera IDs of the logical camera are "2" and "3", the
* value of this tag will be ['2', '\0', '3', '\0'].</p>
- * <p>The number of physical camera ids must be no less than 2.</p>
+ * <p>The number of physical camera IDs must be no less than 2.</p>
*/
ACAMERA_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS = // byte[n]
ACAMERA_LOGICAL_MULTI_CAMERA_START,
@@ -5591,6 +5591,28 @@
*/
ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE = // byte (acamera_metadata_enum_android_logical_multi_camera_sensor_sync_type_t)
ACAMERA_LOGICAL_MULTI_CAMERA_START + 1,
+ /**
+ * <p>String containing the ID of the underlying active physical camera.</p>
+ *
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul></p>
+ *
+ * <p>The ID of the active physical camera that's backing the logical camera. All camera
+ * streams and metadata that are not physical camera specific will be originating from this
+ * physical camera. This must be one of valid physical IDs advertised in the physicalIds
+ * static tag.</p>
+ * <p>For a logical camera made up of physical cameras where each camera's lenses have
+ * different characteristics, the camera device may choose to switch between the physical
+ * cameras when application changes FOCAL_LENGTH or SCALER_CROP_REGION.
+ * At the time of lens switch, this result metadata reflects the new active physical camera
+ * ID.</p>
+ */
+ ACAMERA_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID = // byte
+ ACAMERA_LOGICAL_MULTI_CAMERA_START + 2,
ACAMERA_LOGICAL_MULTI_CAMERA_END,
/**
@@ -7162,6 +7184,10 @@
* <p>If this is supported, android.scaler.streamConfigurationMap will
* additionally return a min frame duration that is greater than
* zero for each supported size-format combination.</p>
+ * <p>For camera devices with LOGICAL_MULTI_CAMERA capability, when the underlying active
+ * physical camera switches, exposureTime, sensitivity, and lens properties may change
+ * even if AE/AF is locked. However, the overall auto exposure and auto focus experience
+ * for users will be consistent. Refer to LOGICAL_MULTI_CAMERA capability for details.</p>
*
* @see ACAMERA_BLACK_LEVEL_LOCK
* @see ACAMERA_CONTROL_AE_LOCK
@@ -7217,6 +7243,10 @@
* will accurately report the values applied by AWB in the result.</p>
* <p>A given camera device may also support additional post-processing
* controls, but this capability only covers the above list of controls.</p>
+ * <p>For camera devices with LOGICAL_MULTI_CAMERA capability, when underlying active
+ * physical camera switches, tonemap, white balance, and shading map may change even if
+ * awb is locked. However, the overall post-processing experience for users will be
+ * consistent. Refer to LOGICAL_MULTI_CAMERA capability for details.</p>
*
* @see ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
* @see ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES
@@ -7396,7 +7426,7 @@
* </li>
* <li>The SENSOR_INFO_TIMESTAMP_SOURCE of the logical device and physical devices must be
* the same.</li>
- * <li>The logical camera device must be LIMITED or higher device.</li>
+ * <li>The logical camera must be LIMITED or higher device.</li>
* </ul>
* <p>Both the logical camera device and its underlying physical devices support the
* mandatory stream combinations required for their device levels.</p>
@@ -7416,13 +7446,84 @@
* <p>Using physical streams in place of a logical stream of the same size and format will
* not slow down the frame rate of the capture, as long as the minimum frame duration
* of the physical and logical streams are the same.</p>
+ * <p>A logical camera device's dynamic metadata may contain
+ * ACAMERA_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID to notify the application of the current
+ * active physical camera Id. An active physical camera is the physical camera from which
+ * the logical camera's main image data outputs (YUV or RAW) and metadata come from.
+ * In addition, this serves as an indication which physical camera is used to output to
+ * a RAW stream, or in case only physical cameras support RAW, which physical RAW stream
+ * the application should request.</p>
+ * <p>Logical camera's static metadata tags below describe the default active physical
+ * camera. An active physical camera is default if it's used when application directly
+ * uses requests built from a template. All templates will default to the same active
+ * physical camera.</p>
+ * <ul>
+ * <li>ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE</li>
+ * <li>ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT</li>
+ * <li>ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE</li>
+ * <li>ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION</li>
+ * <li>ACAMERA_SENSOR_INFO_PHYSICAL_SIZE</li>
+ * <li>ACAMERA_SENSOR_INFO_WHITE_LEVEL</li>
+ * <li>ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED</li>
+ * <li>ACAMERA_SENSOR_REFERENCE_ILLUMINANT1</li>
+ * <li>ACAMERA_SENSOR_REFERENCE_ILLUMINANT2</li>
+ * <li>ACAMERA_SENSOR_CALIBRATION_TRANSFORM1</li>
+ * <li>ACAMERA_SENSOR_CALIBRATION_TRANSFORM2</li>
+ * <li>ACAMERA_SENSOR_COLOR_TRANSFORM1</li>
+ * <li>ACAMERA_SENSOR_COLOR_TRANSFORM2</li>
+ * <li>ACAMERA_SENSOR_FORWARD_MATRIX1</li>
+ * <li>ACAMERA_SENSOR_FORWARD_MATRIX2</li>
+ * <li>ACAMERA_SENSOR_BLACK_LEVEL_PATTERN</li>
+ * <li>ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY</li>
+ * <li>ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS</li>
+ * <li>ACAMERA_SENSOR_AVAILABLE_TEST_PATTERN_MODES</li>
+ * <li>ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE</li>
+ * <li>ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE</li>
+ * <li>ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION</li>
+ * <li>ACAMERA_LENS_POSE_ROTATION</li>
+ * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+ * <li>ACAMERA_LENS_INTRINSIC_CALIBRATION</li>
+ * <li>ACAMERA_LENS_POSE_REFERENCE</li>
+ * <li>ACAMERA_LENS_DISTORTION</li>
+ * </ul>
+ * <p>To maintain backward compatibility, the capture request and result metadata tags
+ * required for basic camera functionalities will be solely based on the
+ * logical camera capabiltity. Other request and result metadata tags, on the other
+ * hand, will be based on current active physical camera. For example, the physical
+ * cameras' sensor sensitivity and lens capability could be different from each other.
+ * So when the application manually controls sensor exposure time/gain, or does manual
+ * focus control, it must checks the current active physical camera's exposure, gain,
+ * and focus distance range.</p>
*
* @see ACAMERA_LENS_DISTORTION
+ * @see ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ * @see ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE
+ * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
* @see ACAMERA_LENS_POSE_REFERENCE
* @see ACAMERA_LENS_POSE_ROTATION
* @see ACAMERA_LENS_POSE_TRANSLATION
+ * @see ACAMERA_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID
* @see ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE
+ * @see ACAMERA_SENSOR_AVAILABLE_TEST_PATTERN_MODES
+ * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
+ * @see ACAMERA_SENSOR_CALIBRATION_TRANSFORM1
+ * @see ACAMERA_SENSOR_CALIBRATION_TRANSFORM2
+ * @see ACAMERA_SENSOR_COLOR_TRANSFORM1
+ * @see ACAMERA_SENSOR_COLOR_TRANSFORM2
+ * @see ACAMERA_SENSOR_FORWARD_MATRIX1
+ * @see ACAMERA_SENSOR_FORWARD_MATRIX2
+ * @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ * @see ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE
+ * @see ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED
+ * @see ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION
+ * @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
+ * @see ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE
+ * @see ACAMERA_SENSOR_INFO_WHITE_LEVEL
+ * @see ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY
+ * @see ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS
+ * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
+ * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA = 11,
diff --git a/camera/ndk/ndk_vendor/impl/utils.cpp b/camera/ndk/ndk_vendor/impl/utils.cpp
index 7193006..5d2d47c 100644
--- a/camera/ndk/ndk_vendor/impl/utils.cpp
+++ b/camera/ndk/ndk_vendor/impl/utils.cpp
@@ -70,7 +70,6 @@
return;
}
size_t size = get_camera_metadata_size(src);
- ALOGE("Converting metadata size: %d", (int)size);
dst->setToExternal((uint8_t *) src, size);
return;
}
diff --git a/media/codec2/components/opus/Android.bp b/media/codec2/components/opus/Android.bp
index 240cdb9..0ed141b 100644
--- a/media/codec2/components/opus/Android.bp
+++ b/media/codec2/components/opus/Android.bp
@@ -9,3 +9,14 @@
shared_libs: ["libopus"],
}
+cc_library_shared {
+ name: "libcodec2_soft_opusenc",
+ defaults: [
+ "libcodec2_soft-defaults",
+ "libcodec2_soft_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftOpusEnc.cpp"],
+
+ shared_libs: ["libopus"],
+}
diff --git a/media/codec2/components/opus/C2SoftOpusDec.cpp b/media/codec2/components/opus/C2SoftOpusDec.cpp
index 2439c3c..3ce1fd6 100644
--- a/media/codec2/components/opus/C2SoftOpusDec.cpp
+++ b/media/codec2/components/opus/C2SoftOpusDec.cpp
@@ -19,10 +19,9 @@
#include <log/log.h>
#include <media/stagefright/foundation/MediaDefs.h>
-
+#include <media/stagefright/foundation/OpusHeader.h>
#include <C2PlatformSupport.h>
#include <SimpleC2Interface.h>
-
#include "C2SoftOpusDec.h"
extern "C" {
@@ -188,16 +187,6 @@
work->workletsProcessed = 1u;
}
-static uint16_t ReadLE16(const uint8_t *data, size_t data_size,
- uint32_t read_offset) {
- if (read_offset + 1 > data_size)
- return 0;
- uint16_t val;
- val = data[read_offset];
- val |= data[read_offset + 1] << 8;
- return val;
-}
-
static const int kRate = 48000;
// Opus uses Vorbis channel mapping, and Vorbis channel mapping specifies
@@ -216,81 +205,6 @@
static const int kMaxChannelsWithDefaultLayout = 2;
static const uint8_t kDefaultOpusChannelLayout[kMaxChannelsWithDefaultLayout] = { 0, 1 };
-// Parses Opus Header. Header spec: http://wiki.xiph.org/OggOpus#ID_Header
-static bool ParseOpusHeader(const uint8_t *data, size_t data_size,
- OpusHeader* header) {
- // Size of the Opus header excluding optional mapping information.
- const size_t kOpusHeaderSize = 19;
-
- // Offset to the channel count byte in the Opus header.
- const size_t kOpusHeaderChannelsOffset = 9;
-
- // Offset to the pre-skip value in the Opus header.
- const size_t kOpusHeaderSkipSamplesOffset = 10;
-
- // Offset to the gain value in the Opus header.
- const size_t kOpusHeaderGainOffset = 16;
-
- // Offset to the channel mapping byte in the Opus header.
- const size_t kOpusHeaderChannelMappingOffset = 18;
-
- // Opus Header contains a stream map. The mapping values are in the header
- // beyond the always present |kOpusHeaderSize| bytes of data. The mapping
- // data contains stream count, coupling information, and per channel mapping
- // values:
- // - Byte 0: Number of streams.
- // - Byte 1: Number coupled.
- // - Byte 2: Starting at byte 2 are |header->channels| uint8 mapping
- // values.
- const size_t kOpusHeaderNumStreamsOffset = kOpusHeaderSize;
- const size_t kOpusHeaderNumCoupledOffset = kOpusHeaderNumStreamsOffset + 1;
- const size_t kOpusHeaderStreamMapOffset = kOpusHeaderNumStreamsOffset + 2;
-
- if (data_size < kOpusHeaderSize) {
- ALOGE("Header size is too small.");
- return false;
- }
- header->channels = *(data + kOpusHeaderChannelsOffset);
- if (header->channels <= 0 || header->channels > kMaxChannels) {
- ALOGE("Invalid Header, wrong channel count: %d", header->channels);
- return false;
- }
-
- header->skip_samples = ReadLE16(data,
- data_size,
- kOpusHeaderSkipSamplesOffset);
-
- header->gain_db = static_cast<int16_t>(ReadLE16(data,
- data_size,
- kOpusHeaderGainOffset));
-
- header->channel_mapping = *(data + kOpusHeaderChannelMappingOffset);
- if (!header->channel_mapping) {
- if (header->channels > kMaxChannelsWithDefaultLayout) {
- ALOGE("Invalid Header, missing stream map.");
- return false;
- }
- header->num_streams = 1;
- header->num_coupled = header->channels > 1;
- header->stream_map[0] = 0;
- header->stream_map[1] = 1;
- return true;
- }
- if (data_size < kOpusHeaderStreamMapOffset + header->channels) {
- ALOGE("Invalid stream map; insufficient data for current channel "
- "count: %d", header->channels);
- return false;
- }
- header->num_streams = *(data + kOpusHeaderNumStreamsOffset);
- header->num_coupled = *(data + kOpusHeaderNumCoupledOffset);
- if (header->num_streams + header->num_coupled != header->channels) {
- ALOGE("Inconsistent channel mapping.");
- return false;
- }
- for (int i = 0; i < header->channels; ++i)
- header->stream_map[i] = *(data + kOpusHeaderStreamMapOffset + i);
- return true;
-}
// Convert nanoseconds to number of samples.
static uint64_t ns_to_samples(uint64_t ns, int rate) {
@@ -338,7 +252,19 @@
const uint8_t *data = rView.data() + inOffset;
if (mInputBufferCount < 3) {
if (mInputBufferCount == 0) {
- if (!ParseOpusHeader(data, inSize, &mHeader)) {
+ size_t opusHeadSize = inSize;
+ size_t codecDelayBufSize = 0;
+ size_t seekPreRollBufSize = 0;
+ void *opusHeadBuf = (void *)data;
+ void *codecDelayBuf = NULL;
+ void *seekPreRollBuf = NULL;
+
+ GetOpusHeaderBuffers(data, inSize, &opusHeadBuf,
+ &opusHeadSize, &codecDelayBuf,
+ &codecDelayBufSize, &seekPreRollBuf,
+ &seekPreRollBufSize);
+
+ if (!ParseOpusHeader((uint8_t *)opusHeadBuf, opusHeadSize, &mHeader)) {
ALOGE("Encountered error while Parsing Opus Header.");
mSignalledError = true;
work->result = C2_CORRUPTED;
@@ -377,6 +303,20 @@
work->result = C2_CORRUPTED;
return;
}
+
+ if (codecDelayBuf && codecDelayBufSize == 8) {
+ uint64_t value;
+ memcpy(&value, codecDelayBuf, sizeof(uint64_t));
+ mCodecDelay = ns_to_samples(value, kRate);
+ mSamplesToDiscard = mCodecDelay;
+ ++mInputBufferCount;
+ }
+ if (seekPreRollBuf && seekPreRollBufSize == 8) {
+ uint64_t value;
+ memcpy(&value, codecDelayBuf, sizeof(uint64_t));
+ mSeekPreRoll = ns_to_samples(value, kRate);
+ ++mInputBufferCount;
+ }
} else {
if (inSize < 8) {
ALOGE("Input sample size is too small.");
@@ -392,29 +332,30 @@
}
else {
mSeekPreRoll = samples;
-
- ALOGI("Configuring decoder: %d Hz, %d channels",
- kRate, mHeader.channels);
- C2StreamSampleRateInfo::output sampleRateInfo(0u, kRate);
- C2StreamChannelCountInfo::output channelCountInfo(0u, mHeader.channels);
- std::vector<std::unique_ptr<C2SettingResult>> failures;
- c2_status_t err = mIntf->config(
- { &sampleRateInfo, &channelCountInfo },
- C2_MAY_BLOCK,
- &failures);
- if (err == OK) {
- work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(sampleRateInfo));
- work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(channelCountInfo));
- } else {
- ALOGE("Config Update failed");
- mSignalledError = true;
- work->result = C2_CORRUPTED;
- return;
- }
}
}
++mInputBufferCount;
+ if (mInputBufferCount == 3) {
+ ALOGI("Configuring decoder: %d Hz, %d channels",
+ kRate, mHeader.channels);
+ C2StreamSampleRateInfo::output sampleRateInfo(0u, kRate);
+ C2StreamChannelCountInfo::output channelCountInfo(0u, mHeader.channels);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config(
+ { &sampleRateInfo, &channelCountInfo },
+ C2_MAY_BLOCK,
+ &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(sampleRateInfo));
+ work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(channelCountInfo));
+ } else {
+ ALOGE("Config Update failed");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
fillEmptyWork(work);
if (eos) {
mSignalledOutputEos = true;
diff --git a/media/codec2/components/opus/C2SoftOpusDec.h b/media/codec2/components/opus/C2SoftOpusDec.h
index 92b7426..b0715ac 100644
--- a/media/codec2/components/opus/C2SoftOpusDec.h
+++ b/media/codec2/components/opus/C2SoftOpusDec.h
@@ -24,16 +24,6 @@
namespace android {
-struct OpusHeader {
- int channels;
- int skip_samples;
- int channel_mapping;
- int num_streams;
- int num_coupled;
- int16_t gain_db;
- uint8_t stream_map[8];
-};
-
struct C2SoftOpusDec : public SimpleC2Component {
class IntfImpl;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
new file mode 100644
index 0000000..d6ed5ff
--- /dev/null
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -0,0 +1,638 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftOpusEnc"
+#include <utils/Log.h>
+
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/foundation/OpusHeader.h>
+#include "C2SoftOpusEnc.h"
+
+extern "C" {
+ #include <opus.h>
+ #include <opus_multistream.h>
+}
+
+#define DEFAULT_FRAME_DURATION_MS 20
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.opus.encoder";
+
+class C2SoftOpusEnc::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatAudio))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_AUDIO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_AUDIO_OPUS))
+ .build());
+
+ addParameter(
+ DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ .withDefault(new C2StreamSampleRateInfo::input(0u, 48000))
+ .withFields({C2F(mSampleRate, value).oneOf({
+ 8000, 12000, 16000, 24000, 48000})})
+ .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ .withDefault(new C2StreamChannelCountInfo::input(0u, 1))
+ .withFields({C2F(mChannelCount, value).inRange(1, 8)})
+ .withSetter((Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps))
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::output(0u, 128000))
+ .withFields({C2F(mBitrate, value).inRange(500, 512000)})
+ .withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mComplexity, C2_PARAMKEY_COMPLEXITY)
+ .withDefault(new C2StreamComplexityTuning::output(0u, 10))
+ .withFields({C2F(mComplexity, value).inRange(1, 10)})
+ .withSetter(Setter<decltype(*mComplexity)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 3840))
+ .build());
+ }
+
+ uint32_t getSampleRate() const { return mSampleRate->value; }
+ uint32_t getChannelCount() const { return mChannelCount->value; }
+ uint32_t getBitrate() const { return mBitrate->value; }
+ uint32_t getComplexity() const { return mComplexity->value; }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
+ std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
+ std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+};
+
+C2SoftOpusEnc::C2SoftOpusEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(
+ std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mOutputBlock(nullptr),
+ mEncoder(nullptr),
+ mInputBufferPcm16(nullptr),
+ mOutIndex(0u) {
+}
+
+C2SoftOpusEnc::~C2SoftOpusEnc() {
+ onRelease();
+}
+
+c2_status_t C2SoftOpusEnc::onInit() {
+ return initEncoder();
+}
+
+c2_status_t C2SoftOpusEnc::configureEncoder() {
+ unsigned char mono_mapping[256] = {0};
+ unsigned char stereo_mapping[256] = {0, 1};
+ unsigned char surround_mapping[256] = {0, 1, 255};
+ mSampleRate = mIntf->getSampleRate();
+ mChannelCount = mIntf->getChannelCount();
+ uint32_t bitrate = mIntf->getBitrate();
+ int complexity = mIntf->getComplexity();
+ mNumSamplesPerFrame = mSampleRate / (1000 / mFrameDurationMs);
+ mNumPcmBytesPerInputFrame =
+ mChannelCount * mNumSamplesPerFrame * sizeof(int16_t);
+ int err = C2_OK;
+
+ unsigned char* mapping;
+ if (mChannelCount < 2) {
+ mapping = mono_mapping;
+ } else if (mChannelCount == 2) {
+ mapping = stereo_mapping;
+ } else {
+ mapping = surround_mapping;
+ }
+
+ if (mEncoder != nullptr) {
+ opus_multistream_encoder_destroy(mEncoder);
+ }
+
+ mEncoder = opus_multistream_encoder_create(mSampleRate, mChannelCount,
+ 1, 1, mapping, OPUS_APPLICATION_AUDIO, &err);
+ if (err) {
+ ALOGE("Could not create libopus encoder. Error code: %i", err);
+ return C2_CORRUPTED;
+ }
+
+ // Complexity
+ if (opus_multistream_encoder_ctl(
+ mEncoder, OPUS_SET_COMPLEXITY(complexity)) != OPUS_OK) {
+ ALOGE("failed to set complexity");
+ return C2_BAD_VALUE;
+ }
+
+ // DTX
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_DTX(0) != OPUS_OK)) {
+ ALOGE("failed to set dtx");
+ return C2_BAD_VALUE;
+ }
+
+ // Application
+ if (opus_multistream_encoder_ctl(mEncoder,
+ OPUS_SET_APPLICATION(OPUS_APPLICATION_AUDIO)) != OPUS_OK) {
+ ALOGE("failed to set application");
+ return C2_BAD_VALUE;
+ }
+
+ // Signal type
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_SIGNAL(OPUS_AUTO)) !=
+ OPUS_OK) {
+ ALOGE("failed to set signal");
+ return C2_BAD_VALUE;
+ }
+
+ // Unconstrained VBR
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(0) != OPUS_OK)) {
+ ALOGE("failed to set vbr type");
+ return C2_BAD_VALUE;
+ }
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR_CONSTRAINT(0) !=
+ OPUS_OK)) {
+ ALOGE("failed to set vbr constraint");
+ return C2_BAD_VALUE;
+ }
+
+ // Bitrate
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_BITRATE(bitrate)) !=
+ OPUS_OK) {
+ ALOGE("failed to set bitrate");
+ return C2_BAD_VALUE;
+ }
+
+ // Get codecDelay
+ int32_t lookahead;
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_GET_LOOKAHEAD(&lookahead)) !=
+ OPUS_OK) {
+ ALOGE("failed to get lookahead");
+ return C2_BAD_VALUE;
+ }
+ mCodecDelay = lookahead * 1000000000ll / mSampleRate;
+
+ // Set seek preroll to 80 ms
+ mSeekPreRoll = 80000000;
+ return C2_OK;
+}
+
+c2_status_t C2SoftOpusEnc::initEncoder() {
+ mSignalledEos = false;
+ mSignalledError = false;
+ mHeaderGenerated = false;
+ mIsFirstFrame = true;
+ mEncoderFlushed = false;
+ mBufferAvailable = false;
+ mAnchorTimeStamp = 0ull;
+ mProcessedSamples = 0;
+ mFilledLen = 0;
+ mFrameDurationMs = DEFAULT_FRAME_DURATION_MS;
+ if (!mInputBufferPcm16) {
+ mInputBufferPcm16 =
+ (int16_t*)malloc(kFrameSize * kMaxNumChannels * sizeof(int16_t));
+ }
+ if (!mInputBufferPcm16) return C2_NO_MEMORY;
+
+ /* Default Configurations */
+ c2_status_t status = configureEncoder();
+ return status;
+}
+
+c2_status_t C2SoftOpusEnc::onStop() {
+ mSignalledEos = false;
+ mSignalledError = false;
+ mIsFirstFrame = true;
+ mEncoderFlushed = false;
+ mBufferAvailable = false;
+ mAnchorTimeStamp = 0ull;
+ mProcessedSamples = 0u;
+ mFilledLen = 0;
+ if (mEncoder) {
+ int status = opus_multistream_encoder_ctl(mEncoder, OPUS_RESET_STATE);
+ if (status != OPUS_OK) {
+ ALOGE("OPUS_RESET_STATE failed status = %s", opus_strerror(status));
+ mSignalledError = true;
+ return C2_CORRUPTED;
+ }
+ }
+ if (mOutputBlock) mOutputBlock.reset();
+ mOutputBlock = nullptr;
+
+ return C2_OK;
+}
+
+void C2SoftOpusEnc::onReset() {
+ (void)onStop();
+}
+
+void C2SoftOpusEnc::onRelease() {
+ (void)onStop();
+ if (mInputBufferPcm16) {
+ free(mInputBufferPcm16);
+ mInputBufferPcm16 = nullptr;
+ }
+ if (mEncoder) {
+ opus_multistream_encoder_destroy(mEncoder);
+ mEncoder = nullptr;
+ }
+}
+
+c2_status_t C2SoftOpusEnc::onFlush_sm() {
+ return onStop();
+}
+
+// Drain the encoder to get last frames (if any)
+int C2SoftOpusEnc::drainEncoder(uint8_t* outPtr) {
+ memset((uint8_t *)mInputBufferPcm16 + mFilledLen, 0,
+ (mNumPcmBytesPerInputFrame - mFilledLen));
+ int encodedBytes = opus_multistream_encode(
+ mEncoder, mInputBufferPcm16, mNumSamplesPerFrame, outPtr, kMaxPayload);
+ if (encodedBytes > mOutputBlock->capacity()) {
+ ALOGE("not enough space left to write encoded data, dropping %d bytes",
+ mBytesEncoded);
+ // a fatal error would stop the encoding
+ return -1;
+ }
+ ALOGV("encoded %i Opus bytes from %zu PCM bytes", encodedBytes,
+ mNumPcmBytesPerInputFrame);
+ mEncoderFlushed = true;
+ mFilledLen = 0;
+ return encodedBytes;
+}
+
+void C2SoftOpusEnc::process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0;
+ C2ReadView rView = mDummyReadView;
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ c2_status_t err = C2_OK;
+ if (!work->input.buffers.empty()) {
+ rView =
+ work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+
+ if (!mEncoder) {
+ if (initEncoder() != C2_OK) {
+ ALOGE("initEncoder failed with status %d", err);
+ work->result = err;
+ mSignalledError = true;
+ return;
+ }
+ }
+ if (mIsFirstFrame) {
+ mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+ mIsFirstFrame = false;
+ }
+
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ err = pool->fetchLinearBlock(kMaxPayload, usage, &mOutputBlock);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+
+ C2WriteView wView = mOutputBlock->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ mOutputBlock.reset();
+ return;
+ }
+
+ size_t inPos = 0;
+ size_t processSize = 0;
+ mBytesEncoded = 0;
+ uint64_t outTimeStamp = 0u;
+ std::shared_ptr<C2Buffer> buffer;
+ uint64_t inputIndex = work->input.ordinal.frameIndex.peeku();
+ const uint8_t* inPtr = rView.data() + inOffset;
+
+ class FillWork {
+ public:
+ FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal,
+ const std::shared_ptr<C2Buffer> &buffer)
+ : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) {
+ }
+ ~FillWork() = default;
+
+ void operator()(const std::unique_ptr<C2Work>& work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = mOrdinal;
+ work->workletsProcessed = 1u;
+ work->result = C2_OK;
+ if (mBuffer) {
+ work->worklets.front()->output.buffers.push_back(mBuffer);
+ }
+ ALOGV("timestamp = %lld, index = %lld, w/%s buffer",
+ mOrdinal.timestamp.peekll(),
+ mOrdinal.frameIndex.peekll(),
+ mBuffer ? "" : "o");
+ }
+
+ private:
+ const uint32_t mFlags;
+ const C2WorkOrdinalStruct mOrdinal;
+ const std::shared_ptr<C2Buffer> mBuffer;
+ };
+
+ C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
+
+ if (!mHeaderGenerated) {
+ uint8_t header[AOPUS_UNIFIED_CSD_MAXSIZE];
+ memset(header, 0, sizeof(header));
+ OpusHeader opusHeader;
+ opusHeader.channels = mChannelCount;
+ opusHeader.num_streams = mChannelCount;
+ opusHeader.num_coupled = 0;
+ opusHeader.channel_mapping = ((mChannelCount > 8) ? 255 : (mChannelCount > 2));
+ opusHeader.gain_db = 0;
+ opusHeader.skip_samples = 0;
+ int headerLen = WriteOpusHeaders(opusHeader, mSampleRate, header,
+ sizeof(header), mCodecDelay, mSeekPreRoll);
+
+ std::unique_ptr<C2StreamCsdInfo::output> csd =
+ C2StreamCsdInfo::output::AllocUnique(headerLen, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ ALOGV("put csd, %d bytes", headerLen);
+ memcpy(csd->m.value, header, headerLen);
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+ mHeaderGenerated = true;
+ }
+
+ /*
+ * For buffer size which is not a multiple of mNumPcmBytesPerInputFrame, we will
+ * accumulate the input and keep it. Once the input is filled with expected number
+ * of bytes, we will send it to encoder. mFilledLen manages the bytes of input yet
+ * to be processed. The next call will fill mNumPcmBytesPerInputFrame - mFilledLen
+ * bytes to input and send it to the encoder.
+ */
+ while (inPos < inSize) {
+ const uint8_t* pcmBytes = inPtr + inPos;
+ int filledSamples = mFilledLen / sizeof(int16_t);
+ if ((inPos + (mNumPcmBytesPerInputFrame - mFilledLen)) <= inSize) {
+ processSize = mNumPcmBytesPerInputFrame - mFilledLen;
+ mBufferAvailable = true;
+ } else {
+ processSize = inSize - inPos;
+ mBufferAvailable = false;
+ if (eos) {
+ memset(mInputBufferPcm16 + filledSamples, 0,
+ (mNumPcmBytesPerInputFrame - mFilledLen));
+ mBufferAvailable = true;
+ }
+ }
+ const unsigned nInputSamples = processSize / sizeof(int16_t);
+
+ for (unsigned i = 0; i < nInputSamples; i++) {
+ int32_t data = pcmBytes[2 * i + 1] << 8 | pcmBytes[2 * i];
+ data = ((data & 0xFFFF) ^ 0x8000) - 0x8000;
+ mInputBufferPcm16[i + filledSamples] = data;
+ }
+ inPos += processSize;
+ mFilledLen += processSize;
+ if (!mBufferAvailable) break;
+ uint8_t* outPtr = wView.data() + mBytesEncoded;
+ int encodedBytes =
+ opus_multistream_encode(mEncoder, mInputBufferPcm16,
+ mNumSamplesPerFrame, outPtr, kMaxPayload);
+ ALOGV("encoded %i Opus bytes from %zu PCM bytes", encodedBytes,
+ processSize);
+
+ if (encodedBytes < 0 || encodedBytes > kMaxPayload) {
+ ALOGE("opus_encode failed, encodedBytes : %d", encodedBytes);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ if (buffer) {
+ outOrdinal.frameIndex = mOutIndex++;
+ outOrdinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+ cloneAndSend(
+ inputIndex, work,
+ FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
+ buffer.reset();
+ }
+ if (encodedBytes > 0) {
+ buffer =
+ createLinearBuffer(mOutputBlock, mBytesEncoded, encodedBytes);
+ }
+ mBytesEncoded += encodedBytes;
+ mProcessedSamples += (filledSamples + nInputSamples);
+ outTimeStamp =
+ mProcessedSamples * 1000000ll / mChannelCount / mSampleRate;
+ if ((processSize + mFilledLen) < mNumPcmBytesPerInputFrame)
+ mEncoderFlushed = true;
+ mFilledLen = 0;
+ }
+
+ uint32_t flags = 0;
+ if (eos) {
+ ALOGV("signalled eos");
+ mSignalledEos = true;
+ if (!mEncoderFlushed) {
+ if (buffer) {
+ outOrdinal.frameIndex = mOutIndex++;
+ outOrdinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+ cloneAndSend(
+ inputIndex, work,
+ FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
+ buffer.reset();
+ }
+ // drain the encoder for last buffer
+ drainInternal(pool, work);
+ }
+ flags = C2FrameData::FLAG_END_OF_STREAM;
+ }
+ if (buffer) {
+ outOrdinal.frameIndex = mOutIndex++;
+ outOrdinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+ FillWork((C2FrameData::flags_t)(flags), outOrdinal, buffer)(work);
+ buffer.reset();
+ }
+ mOutputBlock = nullptr;
+}
+
+c2_status_t C2SoftOpusEnc::drainInternal(
+ const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work) {
+ mBytesEncoded = 0;
+ std::shared_ptr<C2Buffer> buffer = nullptr;
+ C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
+ bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0;
+
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ c2_status_t err = pool->fetchLinearBlock(kMaxPayload, usage, &mOutputBlock);
+ if (err != C2_OK) {
+ ALOGE("fetchLinearBlock for Output failed with status %d", err);
+ return C2_NO_MEMORY;
+ }
+
+ C2WriteView wView = mOutputBlock->map().get();
+ if (wView.error()) {
+ ALOGE("write view map failed %d", wView.error());
+ mOutputBlock.reset();
+ return C2_CORRUPTED;
+ }
+
+ int encBytes = drainEncoder(wView.data());
+ if (encBytes > 0) mBytesEncoded += encBytes;
+ if (mBytesEncoded > 0) {
+ buffer = createLinearBuffer(mOutputBlock, 0, mBytesEncoded);
+ mOutputBlock.reset();
+ }
+ mProcessedSamples += (mNumPcmBytesPerInputFrame / sizeof(int16_t));
+ uint64_t outTimeStamp =
+ mProcessedSamples * 1000000ll / mChannelCount / mSampleRate;
+ outOrdinal.frameIndex = mOutIndex++;
+ outOrdinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+ work->worklets.front()->output.flags =
+ (C2FrameData::flags_t)(eos ? C2FrameData::FLAG_END_OF_STREAM : 0);
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = outOrdinal;
+ work->workletsProcessed = 1u;
+ work->result = C2_OK;
+ if (buffer) {
+ work->worklets.front()->output.buffers.push_back(buffer);
+ }
+ mOutputBlock = nullptr;
+ return C2_OK;
+}
+
+c2_status_t C2SoftOpusEnc::drain(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+ mIsFirstFrame = true;
+ mAnchorTimeStamp = 0ull;
+ mProcessedSamples = 0u;
+ return drainInternal(pool, nullptr);
+}
+
+class C2SoftOpusEncFactory : public C2ComponentFactory {
+public:
+ C2SoftOpusEncFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftOpusEnc(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftOpusEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftOpusEnc::IntfImpl>(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftOpusEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftOpusEncFactory() override = default;
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftOpusEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.h b/media/codec2/components/opus/C2SoftOpusEnc.h
new file mode 100644
index 0000000..69e5240
--- /dev/null
+++ b/media/codec2/components/opus/C2SoftOpusEnc.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_OPUS_ENC_H_
+#define ANDROID_C2_SOFT_OPUS_ENC_H_
+
+#include <atomic>
+#include <SimpleC2Component.h>
+#define MIN(a, b) (((a) < (b)) ? (a) : (b))
+
+struct OpusMSEncoder;
+
+namespace android {
+
+struct C2SoftOpusEnc : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftOpusEnc(const char *name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftOpusEnc();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+private:
+ /* OPUS_FRAMESIZE_20_MS */
+ const int kFrameSize = 960;
+ const int kMaxPayload = 4000;
+ const int kMaxNumChannels = 8;
+
+ std::shared_ptr<IntfImpl> mIntf;
+ std::shared_ptr<C2LinearBlock> mOutputBlock;
+
+ OpusMSEncoder* mEncoder;
+ int16_t* mInputBufferPcm16;
+
+ bool mHeaderGenerated;
+ bool mIsFirstFrame;
+ bool mEncoderFlushed;
+ bool mBufferAvailable;
+ bool mSignalledEos;
+ bool mSignalledError;
+ uint32_t mSampleRate;
+ uint32_t mChannelCount;
+ uint32_t mFrameDurationMs;
+ uint64_t mAnchorTimeStamp;
+ uint64_t mProcessedSamples;
+ // Codec delay in ns
+ uint64_t mCodecDelay;
+ // Seek pre-roll in ns
+ uint64_t mSeekPreRoll;
+ int mNumSamplesPerFrame;
+ int mBytesEncoded;
+ int32_t mFilledLen;
+ size_t mNumPcmBytesPerInputFrame;
+ std::atomic_uint64_t mOutIndex;
+ c2_status_t initEncoder();
+ c2_status_t configureEncoder();
+ int drainEncoder(uint8_t* outPtr);
+ c2_status_t drainInternal(const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work);
+
+ C2_DO_NOT_COPY(C2SoftOpusEnc);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_OPUS_ENC_H_
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index cf1f6cf..23939b5 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -638,7 +638,7 @@
LEVEL_VP9_6_1, ///< VP9 Level 6.1
LEVEL_VP9_6_2, ///< VP9 Level 6.2
- // Dolby Vision level
+ // Dolby Vision levels
LEVEL_DV_MAIN_HD_24 = _C2_PL_DV_BASE, ///< Dolby Vision main tier hd24
LEVEL_DV_MAIN_HD_30, ///< Dolby Vision main tier hd30
LEVEL_DV_MAIN_FHD_24, ///< Dolby Vision main tier fhd24
@@ -659,6 +659,7 @@
LEVEL_DV_HIGH_UHD_48, ///< Dolby Vision high tier uhd48
LEVEL_DV_HIGH_UHD_60, ///< Dolby Vision high tier uhd60
+ // AV1 levels
LEVEL_AV1_2 = _C2_PL_AV1_BASE , ///< AV1 Level 2
LEVEL_AV1_2_1, ///< AV1 Level 2.1
LEVEL_AV1_2_2, ///< AV1 Level 2.2
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index 749fd7a..9500aed 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -225,14 +225,18 @@
if (omxBuf.mBufferType == OMXBuffer::kBufferTypeANWBuffer
&& omxBuf.mGraphicBuffer != nullptr) {
std::shared_ptr<C2GraphicAllocation> alloc;
+ native_handle_t *clonedHandle = native_handle_clone(omxBuf.mGraphicBuffer->handle);
handle = WrapNativeCodec2GrallocHandle(
- native_handle_clone(omxBuf.mGraphicBuffer->handle),
+ clonedHandle,
omxBuf.mGraphicBuffer->width,
omxBuf.mGraphicBuffer->height,
omxBuf.mGraphicBuffer->format,
omxBuf.mGraphicBuffer->usage,
omxBuf.mGraphicBuffer->stride);
c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
+ if (clonedHandle) {
+ native_handle_delete(clonedHandle);
+ }
if (err != OK) {
return UNKNOWN_ERROR;
}
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 4878974..18f2430 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -304,17 +304,23 @@
}
C2AllocationGralloc::~C2AllocationGralloc() {
- if (!mBuffer) {
- return;
- }
- if (mLocked) {
+ if (mBuffer && mLocked) {
// implementation ignores addresss and rect
uint8_t* addr[C2PlanarLayout::MAX_NUM_PLANES] = {};
unmap(addr, C2Rect(), nullptr);
}
- mMapper->freeBuffer(const_cast<native_handle_t *>(mBuffer));
- native_handle_delete(const_cast<native_handle_t*>(
- reinterpret_cast<const native_handle_t*>(mHandle)));
+ if (mBuffer) {
+ mMapper->freeBuffer(const_cast<native_handle_t *>(mBuffer));
+ }
+ if (mHandle) {
+ native_handle_delete(
+ const_cast<native_handle_t *>(reinterpret_cast<const native_handle_t *>(mHandle)));
+ }
+ if (mLockedHandle) {
+ native_handle_delete(
+ const_cast<native_handle_t *>(
+ reinterpret_cast<const native_handle_t *>(mLockedHandle)));
+ }
}
c2_status_t C2AllocationGralloc::map(
diff --git a/media/codec2/vndk/C2Config.cpp b/media/codec2/vndk/C2Config.cpp
index 782bec5..8a27088 100644
--- a/media/codec2/vndk/C2Config.cpp
+++ b/media/codec2/vndk/C2Config.cpp
@@ -221,6 +221,30 @@
{ "vp9-6", C2Config::LEVEL_VP9_6 },
{ "vp9-6.1", C2Config::LEVEL_VP9_6_1 },
{ "vp9-6.2", C2Config::LEVEL_VP9_6_2 },
+ { "av1-2", C2Config::LEVEL_AV1_2 },
+ { "av1-2.1", C2Config::LEVEL_AV1_2_1 },
+ { "av1-2.2", C2Config::LEVEL_AV1_2_2 },
+ { "av1-2.3", C2Config::LEVEL_AV1_2_3 },
+ { "av1-3", C2Config::LEVEL_AV1_3 },
+ { "av1-3.1", C2Config::LEVEL_AV1_3_1 },
+ { "av1-3.2", C2Config::LEVEL_AV1_3_2 },
+ { "av1-3.3", C2Config::LEVEL_AV1_3_3 },
+ { "av1-4", C2Config::LEVEL_AV1_4 },
+ { "av1-4.1", C2Config::LEVEL_AV1_4_1 },
+ { "av1-4.2", C2Config::LEVEL_AV1_4_2 },
+ { "av1-4.3", C2Config::LEVEL_AV1_4_3 },
+ { "av1-5", C2Config::LEVEL_AV1_5 },
+ { "av1-5.1", C2Config::LEVEL_AV1_5_1 },
+ { "av1-5.2", C2Config::LEVEL_AV1_5_2 },
+ { "av1-5.3", C2Config::LEVEL_AV1_5_3 },
+ { "av1-6", C2Config::LEVEL_AV1_6 },
+ { "av1-6.1", C2Config::LEVEL_AV1_6_1 },
+ { "av1-6.2", C2Config::LEVEL_AV1_6_2 },
+ { "av1-6.3", C2Config::LEVEL_AV1_6_3 },
+ { "av1-7", C2Config::LEVEL_AV1_7 },
+ { "av1-7.1", C2Config::LEVEL_AV1_7_1 },
+ { "av1-7.2", C2Config::LEVEL_AV1_7_2 },
+ { "av1-7.3", C2Config::LEVEL_AV1_7_3 },
}))
DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(C2BufferData::type_t, ({
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index a5dd203..dc7e89c 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -817,6 +817,7 @@
emplace("c2.android.mp3.decoder", "libcodec2_soft_mp3dec.so");
emplace("c2.android.vorbis.decoder", "libcodec2_soft_vorbisdec.so");
emplace("c2.android.opus.decoder", "libcodec2_soft_opusdec.so");
+ emplace("c2.android.opus.encoder", "libcodec2_soft_opusenc.so");
emplace("c2.android.vp8.decoder", "libcodec2_soft_vp8dec.so");
emplace("c2.android.vp9.decoder", "libcodec2_soft_vp9dec.so");
emplace("c2.android.vp8.encoder", "libcodec2_soft_vp8enc.so");
diff --git a/media/extractors/mp4/ItemTable.cpp b/media/extractors/mp4/ItemTable.cpp
index 55a0c47..eb6602c 100644
--- a/media/extractors/mp4/ItemTable.cpp
+++ b/media/extractors/mp4/ItemTable.cpp
@@ -48,7 +48,7 @@
offset(0), size(0), nextTileIndex(0) {}
bool isGrid() const {
- return type == FOURCC('g', 'r', 'i', 'd');
+ return type == FOURCC("grid");
}
status_t getNextTileItemId(uint32_t *nextTileItemId, bool reset) {
@@ -223,7 +223,7 @@
struct PitmBox : public FullBox {
PitmBox(DataSourceHelper *source) :
- FullBox(source, FOURCC('p', 'i', 't', 'm')) {}
+ FullBox(source, FOURCC("pitm")) {}
status_t parse(off64_t offset, size_t size, uint32_t *primaryItemId);
};
@@ -303,7 +303,7 @@
struct IlocBox : public FullBox {
IlocBox(DataSourceHelper *source, KeyedVector<uint32_t, ItemLoc> *itemLocs) :
- FullBox(source, FOURCC('i', 'l', 'o', 'c')),
+ FullBox(source, FOURCC("iloc")),
mItemLocs(itemLocs), mHasConstructMethod1(false) {}
status_t parse(off64_t offset, size_t size);
@@ -497,7 +497,7 @@
ALOGV("attach reference type 0x%x to item id %d)", type(), mItemId);
switch(type()) {
- case FOURCC('d', 'i', 'm', 'g'): {
+ case FOURCC("dimg"): {
ssize_t itemIndex = itemIdToItemMap.indexOfKey(mItemId);
// ignore non-image items
@@ -525,7 +525,7 @@
}
break;
}
- case FOURCC('t', 'h', 'm', 'b'): {
+ case FOURCC("thmb"): {
ssize_t itemIndex = itemIdToItemMap.indexOfKey(mItemId);
// ignore non-image items
@@ -554,7 +554,7 @@
}
break;
}
- case FOURCC('c', 'd', 's', 'c'): {
+ case FOURCC("cdsc"): {
ssize_t itemIndex = itemIdToExifMap.indexOfKey(mItemId);
// ignore non-exif block items
@@ -575,7 +575,7 @@
}
break;
}
- case FOURCC('a', 'u', 'x', 'l'): {
+ case FOURCC("auxl"): {
ssize_t itemIndex = itemIdToItemMap.indexOfKey(mItemId);
// ignore non-image items
@@ -628,7 +628,7 @@
struct IrefBox : public FullBox {
IrefBox(DataSourceHelper *source, Vector<sp<ItemReference> > *itemRefs) :
- FullBox(source, FOURCC('i', 'r', 'e', 'f')), mRefIdSize(0), mItemRefs(itemRefs) {}
+ FullBox(source, FOURCC("iref")), mRefIdSize(0), mItemRefs(itemRefs) {}
status_t parse(off64_t offset, size_t size);
@@ -690,7 +690,7 @@
struct IspeBox : public FullBox, public ItemProperty {
IspeBox(DataSourceHelper *source) :
- FullBox(source, FOURCC('i', 's', 'p', 'e')), mWidth(0), mHeight(0) {}
+ FullBox(source, FOURCC("ispe")), mWidth(0), mHeight(0) {}
status_t parse(off64_t offset, size_t size) override;
@@ -726,7 +726,7 @@
struct HvccBox : public Box, public ItemProperty {
HvccBox(DataSourceHelper *source) :
- Box(source, FOURCC('h', 'v', 'c', 'C')) {}
+ Box(source, FOURCC("hvcC")) {}
status_t parse(off64_t offset, size_t size) override;
@@ -759,7 +759,7 @@
struct IrotBox : public Box, public ItemProperty {
IrotBox(DataSourceHelper *source) :
- Box(source, FOURCC('i', 'r', 'o', 't')), mAngle(0) {}
+ Box(source, FOURCC("irot")), mAngle(0) {}
status_t parse(off64_t offset, size_t size) override;
@@ -788,7 +788,7 @@
struct ColrBox : public Box, public ItemProperty {
ColrBox(DataSourceHelper *source) :
- Box(source, FOURCC('c', 'o', 'l', 'r')) {}
+ Box(source, FOURCC("colr")) {}
status_t parse(off64_t offset, size_t size) override;
@@ -812,11 +812,11 @@
}
offset += 4;
size -= 4;
- if (colour_type == FOURCC('n', 'c', 'l', 'x')) {
+ if (colour_type == FOURCC("nclx")) {
return OK;
}
- if ((colour_type != FOURCC('r', 'I', 'C', 'C')) &&
- (colour_type != FOURCC('p', 'r', 'o', 'f'))) {
+ if ((colour_type != FOURCC("rICC")) &&
+ (colour_type != FOURCC("prof"))) {
return ERROR_MALFORMED;
}
@@ -836,7 +836,7 @@
struct IpmaBox : public FullBox {
IpmaBox(DataSourceHelper *source, Vector<AssociationEntry> *associations) :
- FullBox(source, FOURCC('i', 'p', 'm', 'a')), mAssociations(associations) {}
+ FullBox(source, FOURCC("ipma")), mAssociations(associations) {}
status_t parse(off64_t offset, size_t size);
private:
@@ -910,7 +910,7 @@
struct IpcoBox : public Box {
IpcoBox(DataSourceHelper *source, Vector<sp<ItemProperty> > *properties) :
- Box(source, FOURCC('i', 'p', 'c', 'o')), mItemProperties(properties) {}
+ Box(source, FOURCC("ipco")), mItemProperties(properties) {}
status_t parse(off64_t offset, size_t size);
protected:
@@ -930,22 +930,22 @@
status_t IpcoBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
sp<ItemProperty> itemProperty;
switch(type) {
- case FOURCC('h', 'v', 'c', 'C'):
+ case FOURCC("hvcC"):
{
itemProperty = new HvccBox(source());
break;
}
- case FOURCC('i', 's', 'p', 'e'):
+ case FOURCC("ispe"):
{
itemProperty = new IspeBox(source());
break;
}
- case FOURCC('i', 'r', 'o', 't'):
+ case FOURCC("irot"):
{
itemProperty = new IrotBox(source());
break;
}
- case FOURCC('c', 'o', 'l', 'r'):
+ case FOURCC("colr"):
{
itemProperty = new ColrBox(source());
break;
@@ -969,7 +969,7 @@
IprpBox(DataSourceHelper *source,
Vector<sp<ItemProperty> > *properties,
Vector<AssociationEntry> *associations) :
- Box(source, FOURCC('i', 'p', 'r', 'p')),
+ Box(source, FOURCC("iprp")),
mProperties(properties), mAssociations(associations) {}
status_t parse(off64_t offset, size_t size);
@@ -993,12 +993,12 @@
status_t IprpBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
switch(type) {
- case FOURCC('i', 'p', 'c', 'o'):
+ case FOURCC("ipco"):
{
IpcoBox ipcoBox(source(), mProperties);
return ipcoBox.parse(offset, size);
}
- case FOURCC('i', 'p', 'm', 'a'):
+ case FOURCC("ipma"):
{
IpmaBox ipmaBox(source(), mAssociations);
return ipmaBox.parse(offset, size);
@@ -1024,7 +1024,7 @@
struct InfeBox : public FullBox {
InfeBox(DataSourceHelper *source) :
- FullBox(source, FOURCC('i', 'n', 'f', 'e')) {}
+ FullBox(source, FOURCC("infe")) {}
status_t parse(off64_t offset, size_t size, ItemInfo *itemInfo);
@@ -1104,7 +1104,7 @@
}
ALOGV("item_name %s", item_name.c_str());
- if (item_type == FOURCC('m', 'i', 'm', 'e')) {
+ if (item_type == FOURCC("mime")) {
String8 content_type;
if (!parseNullTerminatedString(&offset, &size, &content_type)) {
return ERROR_MALFORMED;
@@ -1117,7 +1117,7 @@
return ERROR_MALFORMED;
}
}
- } else if (item_type == FOURCC('u', 'r', 'i', ' ')) {
+ } else if (item_type == FOURCC("uri ")) {
String8 item_uri_type;
if (!parseNullTerminatedString(&offset, &size, &item_uri_type)) {
return ERROR_MALFORMED;
@@ -1129,7 +1129,7 @@
struct IinfBox : public FullBox {
IinfBox(DataSourceHelper *source, Vector<ItemInfo> *itemInfos) :
- FullBox(source, FOURCC('i', 'i', 'n', 'f')),
+ FullBox(source, FOURCC("iinf")),
mItemInfos(itemInfos), mHasGrids(false) {}
status_t parse(off64_t offset, size_t size);
@@ -1179,7 +1179,7 @@
}
status_t IinfBox::onChunkData(uint32_t type, off64_t offset, size_t size) {
- if (type != FOURCC('i', 'n', 'f', 'e')) {
+ if (type != FOURCC("infe")) {
return OK;
}
@@ -1188,7 +1188,7 @@
status_t err = infeBox.parse(offset, size, &itemInfo);
if (err == OK) {
mItemInfos->push_back(itemInfo);
- mHasGrids |= (itemInfo.itemType == FOURCC('g', 'r', 'i', 'd'));
+ mHasGrids |= (itemInfo.itemType == FOURCC("grid"));
}
// InfeBox parse returns ERROR_UNSUPPORTED if the box if an unsupported
// version. Ignore this error as it's not fatal.
@@ -1214,31 +1214,31 @@
status_t ItemTable::parse(uint32_t type, off64_t data_offset, size_t chunk_data_size) {
switch(type) {
- case FOURCC('i', 'l', 'o', 'c'):
+ case FOURCC("iloc"):
{
return parseIlocBox(data_offset, chunk_data_size);
}
- case FOURCC('i', 'i', 'n', 'f'):
+ case FOURCC("iinf"):
{
return parseIinfBox(data_offset, chunk_data_size);
}
- case FOURCC('i', 'p', 'r', 'p'):
+ case FOURCC("iprp"):
{
return parseIprpBox(data_offset, chunk_data_size);
}
- case FOURCC('p', 'i', 't', 'm'):
+ case FOURCC("pitm"):
{
return parsePitmBox(data_offset, chunk_data_size);
}
- case FOURCC('i', 'd', 'a', 't'):
+ case FOURCC("idat"):
{
return parseIdatBox(data_offset, chunk_data_size);
}
- case FOURCC('i', 'r', 'e', 'f'):
+ case FOURCC("iref"):
{
return parseIrefBox(data_offset, chunk_data_size);
}
- case FOURCC('i', 'p', 'r', 'o'):
+ case FOURCC("ipro"):
{
ALOGW("ipro box not supported!");
break;
@@ -1355,9 +1355,9 @@
// 'grid': derived image from tiles
// 'hvc1': coded image (or tile)
// 'Exif': EXIF metadata
- if (info.itemType != FOURCC('g', 'r', 'i', 'd') &&
- info.itemType != FOURCC('h', 'v', 'c', '1') &&
- info.itemType != FOURCC('E', 'x', 'i', 'f')) {
+ if (info.itemType != FOURCC("grid") &&
+ info.itemType != FOURCC("hvc1") &&
+ info.itemType != FOURCC("Exif")) {
continue;
}
@@ -1380,7 +1380,7 @@
return ERROR_MALFORMED;
}
- if (info.itemType == FOURCC('E', 'x', 'i', 'f')) {
+ if (info.itemType == FOURCC("Exif")) {
// Only add if the Exif data is non-empty. The first 4 bytes contain
// the offset to TIFF header, which the Exif parser doesn't use.
if (size > 4) {
@@ -1687,8 +1687,31 @@
}
// skip the first 4-byte of the offset to TIFF header
- *offset = mItemIdToExifMap[exifIndex].offset + 4;
- *size = mItemIdToExifMap[exifIndex].size - 4;
+ uint32_t tiffOffset;
+ if (!mDataSource->readAt(
+ mItemIdToExifMap[exifIndex].offset, &tiffOffset, 4)) {
+ return ERROR_IO;
+ }
+
+ // We need 'Exif\0\0' before the tiff header
+ tiffOffset = ntohl(tiffOffset);
+ if (tiffOffset < 6) {
+ return ERROR_MALFORMED;
+ }
+ // The first 4-byte of the item is the offset of the tiff header within the
+ // exif data. The size of the item should be > 4 for a non-empty exif (this
+ // was already checked when the item was added). Also check that the tiff
+ // header offset is valid.
+ if (mItemIdToExifMap[exifIndex].size <= 4 ||
+ tiffOffset > mItemIdToExifMap[exifIndex].size - 4) {
+ return ERROR_MALFORMED;
+ }
+
+ // Offset of 'Exif\0\0' relative to the beginning of 'Exif' item
+ // (first 4-byte is the tiff header offset)
+ uint32_t exifOffset = 4 + tiffOffset - 6;
+ *offset = mItemIdToExifMap[exifIndex].offset + exifOffset;
+ *size = mItemIdToExifMap[exifIndex].size - exifOffset;
return OK;
}
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 2013575..d0efddd 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -308,42 +308,42 @@
static const char *FourCC2MIME(uint32_t fourcc) {
switch (fourcc) {
- case FOURCC('m', 'p', '4', 'a'):
+ case FOURCC("mp4a"):
return MEDIA_MIMETYPE_AUDIO_AAC;
- case FOURCC('s', 'a', 'm', 'r'):
+ case FOURCC("samr"):
return MEDIA_MIMETYPE_AUDIO_AMR_NB;
- case FOURCC('s', 'a', 'w', 'b'):
+ case FOURCC("sawb"):
return MEDIA_MIMETYPE_AUDIO_AMR_WB;
- case FOURCC('e', 'c', '-', '3'):
+ case FOURCC("ec-3"):
return MEDIA_MIMETYPE_AUDIO_EAC3;
- case FOURCC('m', 'p', '4', 'v'):
+ case FOURCC("mp4v"):
return MEDIA_MIMETYPE_VIDEO_MPEG4;
- case FOURCC('s', '2', '6', '3'):
- case FOURCC('h', '2', '6', '3'):
- case FOURCC('H', '2', '6', '3'):
+ case FOURCC("s263"):
+ case FOURCC("h263"):
+ case FOURCC("H263"):
return MEDIA_MIMETYPE_VIDEO_H263;
- case FOURCC('a', 'v', 'c', '1'):
+ case FOURCC("avc1"):
return MEDIA_MIMETYPE_VIDEO_AVC;
- case FOURCC('h', 'v', 'c', '1'):
- case FOURCC('h', 'e', 'v', '1'):
+ case FOURCC("hvc1"):
+ case FOURCC("hev1"):
return MEDIA_MIMETYPE_VIDEO_HEVC;
- case FOURCC('a', 'c', '-', '4'):
+ case FOURCC("ac-4"):
return MEDIA_MIMETYPE_AUDIO_AC4;
- case FOURCC('t', 'w', 'o', 's'):
- case FOURCC('s', 'o', 'w', 't'):
+ case FOURCC("twos"):
+ case FOURCC("sowt"):
return MEDIA_MIMETYPE_AUDIO_RAW;
- case FOURCC('a', 'l', 'a', 'c'):
+ case FOURCC("alac"):
return MEDIA_MIMETYPE_AUDIO_ALAC;
- case FOURCC('a', 'v', '0', '1'):
+ case FOURCC("av01"):
return MEDIA_MIMETYPE_VIDEO_AV1;
default:
ALOGW("Unknown fourcc: %c%c%c%c",
@@ -594,7 +594,7 @@
}
} else {
uint32_t sampleIndex;
- uint32_t sampleTime;
+ uint64_t sampleTime;
if (track->timescale != 0 &&
track->sampleTable->findThumbnailSample(&sampleIndex) == OK
&& track->sampleTable->getMetaDataForSample(
@@ -749,21 +749,21 @@
static bool underMetaDataPath(const Vector<uint32_t> &path) {
return path.size() >= 5
- && path[0] == FOURCC('m', 'o', 'o', 'v')
- && path[1] == FOURCC('u', 'd', 't', 'a')
- && path[2] == FOURCC('m', 'e', 't', 'a')
- && path[3] == FOURCC('i', 'l', 's', 't');
+ && path[0] == FOURCC("moov")
+ && path[1] == FOURCC("udta")
+ && path[2] == FOURCC("meta")
+ && path[3] == FOURCC("ilst");
}
static bool underQTMetaPath(const Vector<uint32_t> &path, int32_t depth) {
return path.size() >= 2
- && path[0] == FOURCC('m', 'o', 'o', 'v')
- && path[1] == FOURCC('m', 'e', 't', 'a')
+ && path[0] == FOURCC("moov")
+ && path[1] == FOURCC("meta")
&& (depth == 2
|| (depth == 3
- && (path[2] == FOURCC('h', 'd', 'l', 'r')
- || path[2] == FOURCC('i', 'l', 's', 't')
- || path[2] == FOURCC('k', 'e', 'y', 's'))));
+ && (path[2] == FOURCC("hdlr")
+ || path[2] == FOURCC("ilst")
+ || path[2] == FOURCC("keys"))));
}
// Given a time in seconds since Jan 1 1904, produce a human-readable string.
@@ -867,7 +867,7 @@
ALOGE("b/23540914");
return ERROR_MALFORMED;
}
- if (chunk_type != FOURCC('m', 'd', 'a', 't') && chunk_data_size > kMaxAtomSize) {
+ if (chunk_type != FOURCC("mdat") && chunk_data_size > kMaxAtomSize) {
char errMsg[100];
sprintf(errMsg, "%s atom has size %" PRId64, chunk, chunk_data_size);
ALOGE("%s (b/28615448)", errMsg);
@@ -875,8 +875,8 @@
return ERROR_MALFORMED;
}
- if (chunk_type != FOURCC('c', 'p', 'r', 't')
- && chunk_type != FOURCC('c', 'o', 'v', 'r')
+ if (chunk_type != FOURCC("cprt")
+ && chunk_type != FOURCC("covr")
&& mPath.size() == 5 && underMetaDataPath(mPath)) {
off64_t stop_offset = *offset + chunk_size;
*offset = data_offset;
@@ -895,40 +895,40 @@
}
switch(chunk_type) {
- case FOURCC('m', 'o', 'o', 'v'):
- case FOURCC('t', 'r', 'a', 'k'):
- case FOURCC('m', 'd', 'i', 'a'):
- case FOURCC('m', 'i', 'n', 'f'):
- case FOURCC('d', 'i', 'n', 'f'):
- case FOURCC('s', 't', 'b', 'l'):
- case FOURCC('m', 'v', 'e', 'x'):
- case FOURCC('m', 'o', 'o', 'f'):
- case FOURCC('t', 'r', 'a', 'f'):
- case FOURCC('m', 'f', 'r', 'a'):
- case FOURCC('u', 'd', 't', 'a'):
- case FOURCC('i', 'l', 's', 't'):
- case FOURCC('s', 'i', 'n', 'f'):
- case FOURCC('s', 'c', 'h', 'i'):
- case FOURCC('e', 'd', 't', 's'):
- case FOURCC('w', 'a', 'v', 'e'):
+ case FOURCC("moov"):
+ case FOURCC("trak"):
+ case FOURCC("mdia"):
+ case FOURCC("minf"):
+ case FOURCC("dinf"):
+ case FOURCC("stbl"):
+ case FOURCC("mvex"):
+ case FOURCC("moof"):
+ case FOURCC("traf"):
+ case FOURCC("mfra"):
+ case FOURCC("udta"):
+ case FOURCC("ilst"):
+ case FOURCC("sinf"):
+ case FOURCC("schi"):
+ case FOURCC("edts"):
+ case FOURCC("wave"):
{
- if (chunk_type == FOURCC('m', 'o', 'o', 'v') && depth != 0) {
+ if (chunk_type == FOURCC("moov") && depth != 0) {
ALOGE("moov: depth %d", depth);
return ERROR_MALFORMED;
}
- if (chunk_type == FOURCC('m', 'o', 'o', 'v') && mInitCheck == OK) {
+ if (chunk_type == FOURCC("moov") && mInitCheck == OK) {
ALOGE("duplicate moov");
return ERROR_MALFORMED;
}
- if (chunk_type == FOURCC('m', 'o', 'o', 'f') && !mMoofFound) {
+ if (chunk_type == FOURCC("moof") && !mMoofFound) {
// store the offset of the first segment
mMoofFound = true;
mMoofOffset = *offset;
}
- if (chunk_type == FOURCC('s', 't', 'b', 'l')) {
+ if (chunk_type == FOURCC("stbl")) {
ALOGV("sampleTable chunk is %" PRIu64 " bytes long.", chunk_size);
if (mDataSource->flags()
@@ -954,7 +954,7 @@
}
bool isTrack = false;
- if (chunk_type == FOURCC('t', 'r', 'a', 'k')) {
+ if (chunk_type == FOURCC("trak")) {
if (depth != 1) {
ALOGE("trak: depth %d", depth);
return ERROR_MALFORMED;
@@ -1049,7 +1049,7 @@
return OK;
}
- } else if (chunk_type == FOURCC('m', 'o', 'o', 'v')) {
+ } else if (chunk_type == FOURCC("moov")) {
mInitCheck = OK;
return UNKNOWN_ERROR; // Return a dummy error.
@@ -1057,7 +1057,7 @@
break;
}
- case FOURCC('s', 'c', 'h', 'm'):
+ case FOURCC("schm"):
{
*offset += chunk_size;
@@ -1072,23 +1072,23 @@
scheme_type = ntohl(scheme_type);
int32_t mode = kCryptoModeUnencrypted;
switch(scheme_type) {
- case FOURCC('c', 'b', 'c', '1'):
+ case FOURCC("cbc1"):
{
mode = kCryptoModeAesCbc;
break;
}
- case FOURCC('c', 'b', 'c', 's'):
+ case FOURCC("cbcs"):
{
mode = kCryptoModeAesCbc;
mLastTrack->subsample_encryption = true;
break;
}
- case FOURCC('c', 'e', 'n', 'c'):
+ case FOURCC("cenc"):
{
mode = kCryptoModeAesCtr;
break;
}
- case FOURCC('c', 'e', 'n', 's'):
+ case FOURCC("cens"):
{
mode = kCryptoModeAesCtr;
mLastTrack->subsample_encryption = true;
@@ -1102,7 +1102,7 @@
}
- case FOURCC('e', 'l', 's', 't'):
+ case FOURCC("elst"):
{
*offset += chunk_size;
@@ -1158,7 +1158,7 @@
break;
}
- case FOURCC('f', 'r', 'm', 'a'):
+ case FOURCC("frma"):
{
*offset += chunk_size;
@@ -1187,7 +1187,7 @@
// If format type is 'alac', it is necessary to get the parameters
// from a alac atom spreading behind the frma atom.
// See 'external/alac/ALACMagicCookieDescription.txt'.
- if (original_fourcc == FOURCC('a', 'l', 'a', 'c')) {
+ if (original_fourcc == FOURCC("alac")) {
// Store ALAC magic cookie (decoder needs it).
uint8_t alacInfo[12];
data_offset = *offset;
@@ -1197,7 +1197,7 @@
}
uint32_t size = U32_AT(&alacInfo[0]);
if ((size != ALAC_SPECIFIC_INFO_SIZE) ||
- (U32_AT(&alacInfo[4]) != FOURCC('a', 'l', 'a', 'c')) ||
+ (U32_AT(&alacInfo[4]) != FOURCC("alac")) ||
(U32_AT(&alacInfo[8]) != 0)) {
return ERROR_MALFORMED;
}
@@ -1226,7 +1226,7 @@
break;
}
- case FOURCC('t', 'e', 'n', 'c'):
+ case FOURCC("tenc"):
{
*offset += chunk_size;
@@ -1339,7 +1339,7 @@
break;
}
- case FOURCC('t', 'k', 'h', 'd'):
+ case FOURCC("tkhd"):
{
*offset += chunk_size;
@@ -1351,7 +1351,7 @@
break;
}
- case FOURCC('t', 'r', 'e', 'f'):
+ case FOURCC("tref"):
{
off64_t stop_offset = *offset + chunk_size;
*offset = data_offset;
@@ -1367,7 +1367,7 @@
break;
}
- case FOURCC('t', 'h', 'm', 'b'):
+ case FOURCC("thmb"):
{
*offset += chunk_size;
@@ -1384,7 +1384,7 @@
break;
}
- case FOURCC('p', 's', 's', 'h'):
+ case FOURCC("pssh"):
{
*offset += chunk_size;
@@ -1420,7 +1420,7 @@
break;
}
- case FOURCC('m', 'd', 'h', 'd'):
+ case FOURCC("mdhd"):
{
*offset += chunk_size;
@@ -1516,7 +1516,7 @@
break;
}
- case FOURCC('s', 't', 's', 'd'):
+ case FOURCC("stsd"):
{
uint8_t buffer[8];
if (chunk_data_size < (off64_t)sizeof(buffer)) {
@@ -1568,7 +1568,7 @@
}
break;
}
- case FOURCC('m', 'e', 't', 't'):
+ case FOURCC("mett"):
{
*offset += chunk_size;
@@ -1622,16 +1622,16 @@
break;
}
- case FOURCC('m', 'p', '4', 'a'):
- case FOURCC('e', 'n', 'c', 'a'):
- case FOURCC('s', 'a', 'm', 'r'):
- case FOURCC('s', 'a', 'w', 'b'):
- case FOURCC('t', 'w', 'o', 's'):
- case FOURCC('s', 'o', 'w', 't'):
- case FOURCC('a', 'l', 'a', 'c'):
+ case FOURCC("mp4a"):
+ case FOURCC("enca"):
+ case FOURCC("samr"):
+ case FOURCC("sawb"):
+ case FOURCC("twos"):
+ case FOURCC("sowt"):
+ case FOURCC("alac"):
{
- if (mIsQT && chunk_type == FOURCC('m', 'p', '4', 'a')
- && depth >= 1 && mPath[depth - 1] == FOURCC('w', 'a', 'v', 'e')) {
+ if (mIsQT && chunk_type == FOURCC("mp4a")
+ && depth >= 1 && mPath[depth - 1] == FOURCC("wave")) {
// Ignore mp4a embedded in QT wave atom
*offset += chunk_size;
break;
@@ -1661,7 +1661,7 @@
off64_t stop_offset = *offset + chunk_size;
*offset = data_offset + sizeof(buffer);
- if (mIsQT && chunk_type == FOURCC('m', 'p', '4', 'a')) {
+ if (mIsQT && chunk_type == FOURCC("mp4a")) {
if (version == 1) {
if (mDataSource->readAt(*offset, buffer, 16) < 16) {
return ERROR_IO;
@@ -1694,7 +1694,7 @@
}
}
- if (chunk_type != FOURCC('e', 'n', 'c', 'a')) {
+ if (chunk_type != FOURCC("enca")) {
// if the chunk type is enca, we'll get the type from the frma box later
AMediaFormat_setString(mLastTrack->meta,
AMEDIAFORMAT_KEY_MIME, FourCC2MIME(chunk_type));
@@ -1703,7 +1703,7 @@
if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, FourCC2MIME(chunk_type))) {
AMediaFormat_setInt32(mLastTrack->meta,
AMEDIAFORMAT_KEY_BITS_PER_SAMPLE, sample_size);
- if (chunk_type == FOURCC('t', 'w', 'o', 's')) {
+ if (chunk_type == FOURCC("twos")) {
AMediaFormat_setInt32(mLastTrack->meta,
AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN, 1);
}
@@ -1714,7 +1714,7 @@
AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_CHANNEL_COUNT, num_channels);
AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAMPLE_RATE, sample_rate);
- if (chunk_type == FOURCC('a', 'l', 'a', 'c')) {
+ if (chunk_type == FOURCC("alac")) {
// See 'external/alac/ALACMagicCookieDescription.txt for the detail'.
// Store ALAC magic cookie (decoder needs it).
@@ -1726,7 +1726,7 @@
}
uint32_t size = U32_AT(&alacInfo[0]);
if ((size != ALAC_SPECIFIC_INFO_SIZE) ||
- (U32_AT(&alacInfo[4]) != FOURCC('a', 'l', 'a', 'c')) ||
+ (U32_AT(&alacInfo[4]) != FOURCC("alac")) ||
(U32_AT(&alacInfo[8]) != 0)) {
return ERROR_MALFORMED;
}
@@ -1764,15 +1764,15 @@
break;
}
- case FOURCC('m', 'p', '4', 'v'):
- case FOURCC('e', 'n', 'c', 'v'):
- case FOURCC('s', '2', '6', '3'):
- case FOURCC('H', '2', '6', '3'):
- case FOURCC('h', '2', '6', '3'):
- case FOURCC('a', 'v', 'c', '1'):
- case FOURCC('h', 'v', 'c', '1'):
- case FOURCC('h', 'e', 'v', '1'):
- case FOURCC('a', 'v', '0', '1'):
+ case FOURCC("mp4v"):
+ case FOURCC("encv"):
+ case FOURCC("s263"):
+ case FOURCC("H263"):
+ case FOURCC("h263"):
+ case FOURCC("avc1"):
+ case FOURCC("hvc1"):
+ case FOURCC("hev1"):
+ case FOURCC("av01"):
{
uint8_t buffer[78];
if (chunk_data_size < (ssize_t)sizeof(buffer)) {
@@ -1802,7 +1802,7 @@
if (mLastTrack == NULL)
return ERROR_MALFORMED;
- if (chunk_type != FOURCC('e', 'n', 'c', 'v')) {
+ if (chunk_type != FOURCC("encv")) {
// if the chunk type is encv, we'll get the type from the frma box later
AMediaFormat_setString(mLastTrack->meta,
AMEDIAFORMAT_KEY_MIME, FourCC2MIME(chunk_type));
@@ -1825,8 +1825,8 @@
break;
}
- case FOURCC('s', 't', 'c', 'o'):
- case FOURCC('c', 'o', '6', '4'):
+ case FOURCC("stco"):
+ case FOURCC("co64"):
{
if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) {
return ERROR_MALFORMED;
@@ -1845,7 +1845,7 @@
break;
}
- case FOURCC('s', 't', 's', 'c'):
+ case FOURCC("stsc"):
{
if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
return ERROR_MALFORMED;
@@ -1863,8 +1863,8 @@
break;
}
- case FOURCC('s', 't', 's', 'z'):
- case FOURCC('s', 't', 'z', '2'):
+ case FOURCC("stsz"):
+ case FOURCC("stz2"):
{
if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) {
return ERROR_MALFORMED;
@@ -1983,7 +1983,7 @@
break;
}
- case FOURCC('s', 't', 't', 's'):
+ case FOURCC("stts"):
{
if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
return ERROR_MALFORMED;
@@ -2001,7 +2001,7 @@
break;
}
- case FOURCC('c', 't', 't', 's'):
+ case FOURCC("ctts"):
{
if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
return ERROR_MALFORMED;
@@ -2019,7 +2019,7 @@
break;
}
- case FOURCC('s', 't', 's', 's'):
+ case FOURCC("stss"):
{
if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL))
return ERROR_MALFORMED;
@@ -2038,7 +2038,7 @@
}
// \xA9xyz
- case FOURCC(0xA9, 'x', 'y', 'z'):
+ case FOURCC("\251xyz"):
{
*offset += chunk_size;
@@ -2088,7 +2088,7 @@
break;
}
- case FOURCC('e', 's', 'd', 's'):
+ case FOURCC("esds"):
{
*offset += chunk_size;
@@ -2118,7 +2118,7 @@
AMEDIAFORMAT_KEY_ESDS, &buffer[4], chunk_data_size - 4);
if (mPath.size() >= 2
- && mPath[mPath.size() - 2] == FOURCC('m', 'p', '4', 'a')) {
+ && mPath[mPath.size() - 2] == FOURCC("mp4a")) {
// Information from the ESDS must be relied on for proper
// setup of sample rate and channel count for MPEG4 Audio.
// The generic header appears to only contain generic
@@ -2132,7 +2132,7 @@
}
}
if (mPath.size() >= 2
- && mPath[mPath.size() - 2] == FOURCC('m', 'p', '4', 'v')) {
+ && mPath[mPath.size() - 2] == FOURCC("mp4v")) {
// Check if the video is MPEG2
ESDS esds(&buffer[4], chunk_data_size - 4);
@@ -2147,7 +2147,7 @@
break;
}
- case FOURCC('b', 't', 'r', 't'):
+ case FOURCC("btrt"):
{
*offset += chunk_size;
if (mLastTrack == NULL) {
@@ -2177,7 +2177,7 @@
break;
}
- case FOURCC('a', 'v', 'c', 'C'):
+ case FOURCC("avcC"):
{
*offset += chunk_size;
@@ -2201,7 +2201,7 @@
break;
}
- case FOURCC('h', 'v', 'c', 'C'):
+ case FOURCC("hvcC"):
{
auto buffer = heapbuffer<uint8_t>(chunk_data_size);
@@ -2225,7 +2225,7 @@
break;
}
- case FOURCC('d', '2', '6', '3'):
+ case FOURCC("d263"):
{
*offset += chunk_size;
/*
@@ -2260,7 +2260,7 @@
break;
}
- case FOURCC('m', 'e', 't', 'a'):
+ case FOURCC("meta"):
{
off64_t stop_offset = *offset + chunk_size;
*offset = data_offset;
@@ -2304,13 +2304,13 @@
break;
}
- case FOURCC('i', 'l', 'o', 'c'):
- case FOURCC('i', 'i', 'n', 'f'):
- case FOURCC('i', 'p', 'r', 'p'):
- case FOURCC('p', 'i', 't', 'm'):
- case FOURCC('i', 'd', 'a', 't'):
- case FOURCC('i', 'r', 'e', 'f'):
- case FOURCC('i', 'p', 'r', 'o'):
+ case FOURCC("iloc"):
+ case FOURCC("iinf"):
+ case FOURCC("iprp"):
+ case FOURCC("pitm"):
+ case FOURCC("idat"):
+ case FOURCC("iref"):
+ case FOURCC("ipro"):
{
if (mIsHeif) {
if (mItemTable == NULL) {
@@ -2326,9 +2326,9 @@
break;
}
- case FOURCC('m', 'e', 'a', 'n'):
- case FOURCC('n', 'a', 'm', 'e'):
- case FOURCC('d', 'a', 't', 'a'):
+ case FOURCC("mean"):
+ case FOURCC("name"):
+ case FOURCC("data"):
{
*offset += chunk_size;
@@ -2343,7 +2343,7 @@
break;
}
- case FOURCC('m', 'v', 'h', 'd'):
+ case FOURCC("mvhd"):
{
*offset += chunk_size;
@@ -2395,7 +2395,7 @@
break;
}
- case FOURCC('m', 'e', 'h', 'd'):
+ case FOURCC("mehd"):
{
*offset += chunk_size;
@@ -2440,7 +2440,7 @@
break;
}
- case FOURCC('m', 'd', 'a', 't'):
+ case FOURCC("mdat"):
{
mMdatFound = true;
@@ -2448,7 +2448,7 @@
break;
}
- case FOURCC('h', 'd', 'l', 'r'):
+ case FOURCC("hdlr"):
{
*offset += chunk_size;
@@ -2466,7 +2466,7 @@
// For the 3GPP file format, the handler-type within the 'hdlr' box
// shall be 'text'. We also want to support 'sbtl' handler type
// for a practical reason as various MPEG4 containers use it.
- if (type == FOURCC('t', 'e', 'x', 't') || type == FOURCC('s', 'b', 't', 'l')) {
+ if (type == FOURCC("text") || type == FOURCC("sbtl")) {
if (mLastTrack != NULL) {
AMediaFormat_setString(mLastTrack->meta,
AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_TEXT_3GPP);
@@ -2476,7 +2476,7 @@
break;
}
- case FOURCC('k', 'e', 'y', 's'):
+ case FOURCC("keys"):
{
*offset += chunk_size;
@@ -2489,7 +2489,7 @@
break;
}
- case FOURCC('t', 'r', 'e', 'x'):
+ case FOURCC("trex"):
{
*offset += chunk_size;
@@ -2508,7 +2508,7 @@
break;
}
- case FOURCC('t', 'x', '3', 'g'):
+ case FOURCC("tx3g"):
{
if (mLastTrack == NULL)
return ERROR_MALFORMED;
@@ -2552,7 +2552,7 @@
break;
}
- case FOURCC('c', 'o', 'v', 'r'):
+ case FOURCC("covr"):
{
*offset += chunk_size;
@@ -2583,12 +2583,12 @@
break;
}
- case FOURCC('c', 'o', 'l', 'r'):
+ case FOURCC("colr"):
{
*offset += chunk_size;
// this must be in a VisualSampleEntry box under the Sample Description Box ('stsd')
// ignore otherwise
- if (depth >= 2 && mPath[depth - 2] == FOURCC('s', 't', 's', 'd')) {
+ if (depth >= 2 && mPath[depth - 2] == FOURCC("stsd")) {
status_t err = parseColorInfo(data_offset, chunk_data_size);
if (err != OK) {
return err;
@@ -2598,12 +2598,12 @@
break;
}
- case FOURCC('t', 'i', 't', 'l'):
- case FOURCC('p', 'e', 'r', 'f'):
- case FOURCC('a', 'u', 't', 'h'):
- case FOURCC('g', 'n', 'r', 'e'):
- case FOURCC('a', 'l', 'b', 'm'):
- case FOURCC('y', 'r', 'r', 'c'):
+ case FOURCC("titl"):
+ case FOURCC("perf"):
+ case FOURCC("auth"):
+ case FOURCC("gnre"):
+ case FOURCC("albm"):
+ case FOURCC("yrrc"):
{
*offset += chunk_size;
@@ -2616,7 +2616,7 @@
break;
}
- case FOURCC('I', 'D', '3', '2'):
+ case FOURCC("ID32"):
{
*offset += chunk_size;
@@ -2629,7 +2629,7 @@
break;
}
- case FOURCC('-', '-', '-', '-'):
+ case FOURCC("----"):
{
mLastCommentMean.clear();
mLastCommentName.clear();
@@ -2638,7 +2638,7 @@
break;
}
- case FOURCC('s', 'i', 'd', 'x'):
+ case FOURCC("sidx"):
{
status_t err = parseSegmentIndex(data_offset, chunk_data_size);
if (err != OK) {
@@ -2648,25 +2648,25 @@
return UNKNOWN_ERROR; // stop parsing after sidx
}
- case FOURCC('a', 'c', '-', '3'):
+ case FOURCC("ac-3"):
{
*offset += chunk_size;
return parseAC3SpecificBox(data_offset);
}
- case FOURCC('e', 'c', '-', '3'):
+ case FOURCC("ec-3"):
{
*offset += chunk_size;
return parseEAC3SpecificBox(data_offset);
}
- case FOURCC('a', 'c', '-', '4'):
+ case FOURCC("ac-4"):
{
*offset += chunk_size;
return parseAC4SpecificBox(data_offset);
}
- case FOURCC('f', 't', 'y', 'p'):
+ case FOURCC("ftyp"):
{
if (chunk_data_size < 8 || depth != 0) {
return ERROR_MALFORMED;
@@ -2691,16 +2691,16 @@
brandSet.insert(brand);
}
- if (brandSet.count(FOURCC('q', 't', ' ', ' ')) > 0) {
+ if (brandSet.count(FOURCC("qt ")) > 0) {
mIsQT = true;
} else {
- if (brandSet.count(FOURCC('m', 'i', 'f', '1')) > 0
- && brandSet.count(FOURCC('h', 'e', 'i', 'c')) > 0) {
+ if (brandSet.count(FOURCC("mif1")) > 0
+ && brandSet.count(FOURCC("heic")) > 0) {
ALOGV("identified HEIF image");
mIsHeif = true;
- brandSet.erase(FOURCC('m', 'i', 'f', '1'));
- brandSet.erase(FOURCC('h', 'e', 'i', 'c'));
+ brandSet.erase(FOURCC("mif1"));
+ brandSet.erase(FOURCC("heic"));
}
if (!brandSet.empty()) {
@@ -2787,7 +2787,7 @@
// + 4-byte size
offset += 4;
uint32_t type;
- if (!mDataSource->getUInt32(offset, &type) || type != FOURCC('d', 'a', 'c', '4')) {
+ if (!mDataSource->getUInt32(offset, &type) || type != FOURCC("dac4")) {
ALOGE("MPEG4Extractor: error while reading ac-4 specific block: header not dac4");
return ERROR_MALFORMED;
}
@@ -2914,7 +2914,7 @@
offset += 4;
uint32_t type;
- if (!mDataSource->getUInt32(offset, &type) || type != FOURCC('d', 'e', 'c', '3')) {
+ if (!mDataSource->getUInt32(offset, &type) || type != FOURCC("dec3")) {
ALOGE("MPEG4Extractor: error while reading eac-3 specific block: header not dec3");
return ERROR_MALFORMED;
}
@@ -3071,7 +3071,7 @@
offset += 4;
uint32_t type;
- if (!mDataSource->getUInt32(offset, &type) || type != FOURCC('d', 'a', 'c', '3')) {
+ if (!mDataSource->getUInt32(offset, &type) || type != FOURCC("dac3")) {
ALOGE("MPEG4Extractor: error while reading ac-3 specific block: header not dac3");
return ERROR_MALFORMED;
}
@@ -3273,7 +3273,7 @@
uint32_t type;
if (!mDataSource->getUInt32(keyOffset + 4, &type)
- || type != FOURCC('m', 'd', 't', 'a')) {
+ || type != FOURCC("mdta")) {
return ERROR_MALFORMED;
}
@@ -3315,7 +3315,7 @@
}
uint32_t atomFourCC;
if (!mDataSource->getUInt32(offset + 4, &atomFourCC)
- || atomFourCC != FOURCC('d', 'a', 't', 'a')) {
+ || atomFourCC != FOURCC("data")) {
return ERROR_MALFORMED;
}
uint32_t dataType;
@@ -3476,48 +3476,48 @@
MakeFourCCString(mPath[4], chunk);
ALOGV("meta: %s @ %lld", chunk, (long long)offset);
switch ((int32_t)mPath[4]) {
- case FOURCC(0xa9, 'a', 'l', 'b'):
+ case FOURCC("\251alb"):
{
metadataKey = "album";
break;
}
- case FOURCC(0xa9, 'A', 'R', 'T'):
+ case FOURCC("\251ART"):
{
metadataKey = "artist";
break;
}
- case FOURCC('a', 'A', 'R', 'T'):
+ case FOURCC("aART"):
{
metadataKey = "albumartist";
break;
}
- case FOURCC(0xa9, 'd', 'a', 'y'):
+ case FOURCC("\251day"):
{
metadataKey = "year";
break;
}
- case FOURCC(0xa9, 'n', 'a', 'm'):
+ case FOURCC("\251nam"):
{
metadataKey = "title";
break;
}
- case FOURCC(0xa9, 'w', 'r', 't'):
+ case FOURCC("\251wrt"):
{
metadataKey = "writer";
break;
}
- case FOURCC('c', 'o', 'v', 'r'):
+ case FOURCC("covr"):
{
metadataKey = "albumart";
break;
}
- case FOURCC('g', 'n', 'r', 'e'):
- case FOURCC(0xa9, 'g', 'e', 'n'):
+ case FOURCC("gnre"):
+ case FOURCC("\251gen"):
{
metadataKey = "genre";
break;
}
- case FOURCC('c', 'p', 'i', 'l'):
+ case FOURCC("cpil"):
{
if (size == 9 && flags == 21) {
char tmp[16];
@@ -3528,7 +3528,7 @@
}
break;
}
- case FOURCC('t', 'r', 'k', 'n'):
+ case FOURCC("trkn"):
{
if (size == 16 && flags == 0) {
char tmp[16];
@@ -3540,7 +3540,7 @@
}
break;
}
- case FOURCC('d', 'i', 's', 'k'):
+ case FOURCC("disk"):
{
if ((size == 14 || size == 16) && flags == 0) {
char tmp[16];
@@ -3552,17 +3552,17 @@
}
break;
}
- case FOURCC('-', '-', '-', '-'):
+ case FOURCC("----"):
{
buffer[size] = '\0';
switch (mPath[5]) {
- case FOURCC('m', 'e', 'a', 'n'):
+ case FOURCC("mean"):
mLastCommentMean.setTo((const char *)buffer + 4);
break;
- case FOURCC('n', 'a', 'm', 'e'):
+ case FOURCC("name"):
mLastCommentName.setTo((const char *)buffer + 4);
break;
- case FOURCC('d', 'a', 't', 'a'):
+ case FOURCC("data"):
if (size < 8) {
delete[] buffer;
buffer = NULL;
@@ -3670,8 +3670,8 @@
}
int32_t type = U32_AT(&buffer[0]);
- if ((type == FOURCC('n', 'c', 'l', 'x') && size >= 11)
- || (type == FOURCC('n', 'c', 'l', 'c') && size >= 10)) {
+ if ((type == FOURCC("nclx") && size >= 11)
+ || (type == FOURCC("nclc") && size >= 10)) {
// only store the first color specification
int32_t existingColor;
if (!AMediaFormat_getInt32(mLastTrack->meta,
@@ -3679,7 +3679,7 @@
int32_t primaries = U16_AT(&buffer[4]);
int32_t isotransfer = U16_AT(&buffer[6]);
int32_t coeffs = U16_AT(&buffer[8]);
- bool fullRange = (type == FOURCC('n', 'c', 'l', 'x')) && (buffer[10] & 128);
+ bool fullRange = (type == FOURCC("nclx")) && (buffer[10] & 128);
int32_t range = 0;
int32_t standard = 0;
@@ -3725,27 +3725,27 @@
const char *metadataKey = nullptr;
switch (mPath[depth]) {
- case FOURCC('t', 'i', 't', 'l'):
+ case FOURCC("titl"):
{
metadataKey = "title";
break;
}
- case FOURCC('p', 'e', 'r', 'f'):
+ case FOURCC("perf"):
{
metadataKey = "artist";
break;
}
- case FOURCC('a', 'u', 't', 'h'):
+ case FOURCC("auth"):
{
metadataKey = "writer";
break;
}
- case FOURCC('g', 'n', 'r', 'e'):
+ case FOURCC("gnre"):
{
metadataKey = "genre";
break;
}
- case FOURCC('a', 'l', 'b', 'm'):
+ case FOURCC("albm"):
{
if (buffer[size - 1] != '\0') {
char tmp[4];
@@ -3757,7 +3757,7 @@
metadataKey = "album";
break;
}
- case FOURCC('y', 'r', 'r', 'c'):
+ case FOURCC("yrrc"):
{
if (size < 6) {
delete[] buffer;
@@ -4503,7 +4503,7 @@
}
if (!strncasecmp("video/", mime, 6)) {
- uint32_t firstSampleCTS = 0;
+ uint64_t firstSampleCTS = 0;
err = mSampleTable->getMetaDataForSample(0, NULL, NULL, &firstSampleCTS);
// Start offset should be less or equal to composition time of first sample.
// Composition time stamp of first sample cannot be negative.
@@ -4610,8 +4610,8 @@
switch(chunk_type) {
- case FOURCC('t', 'r', 'a', 'f'):
- case FOURCC('m', 'o', 'o', 'f'): {
+ case FOURCC("traf"):
+ case FOURCC("moof"): {
off64_t stop_offset = *offset + chunk_size;
*offset = data_offset;
while (*offset < stop_offset) {
@@ -4620,7 +4620,7 @@
return err;
}
}
- if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
+ if (chunk_type == FOURCC("moof")) {
// *offset points to the box following this moof. Find the next moof from there.
while (true) {
@@ -4649,7 +4649,7 @@
return ERROR_MALFORMED;
}
- if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
+ if (chunk_type == FOURCC("moof")) {
mNextMoofOffset = *offset;
break;
} else if (chunk_size == 0) {
@@ -4661,7 +4661,7 @@
break;
}
- case FOURCC('t', 'f', 'h', 'd'): {
+ case FOURCC("tfhd"): {
status_t err;
if ((err = parseTrackFragmentHeader(data_offset, chunk_data_size)) != OK) {
return err;
@@ -4670,7 +4670,7 @@
break;
}
- case FOURCC('t', 'r', 'u', 'n'): {
+ case FOURCC("trun"): {
status_t err;
if (mLastParsedTrackId == mTrackId) {
if ((err = parseTrackFragmentRun(data_offset, chunk_data_size)) != OK) {
@@ -4682,7 +4682,7 @@
break;
}
- case FOURCC('s', 'a', 'i', 'z'): {
+ case FOURCC("saiz"): {
status_t err;
if ((err = parseSampleAuxiliaryInformationSizes(data_offset, chunk_data_size)) != OK) {
return err;
@@ -4690,7 +4690,7 @@
*offset += chunk_size;
break;
}
- case FOURCC('s', 'a', 'i', 'o'): {
+ case FOURCC("saio"): {
status_t err;
if ((err = parseSampleAuxiliaryInformationOffsets(data_offset, chunk_data_size))
!= OK) {
@@ -4700,7 +4700,7 @@
break;
}
- case FOURCC('s', 'e', 'n', 'c'): {
+ case FOURCC("senc"): {
status_t err;
if ((err = parseSampleEncryption(data_offset)) != OK) {
return err;
@@ -4709,7 +4709,7 @@
break;
}
- case FOURCC('m', 'd', 'a', 't'): {
+ case FOURCC("mdat"): {
// parse DRM info if present
ALOGV("MPEG4Source::parseChunk mdat");
// if saiz/saoi was previously observed, do something with the sampleinfos
@@ -5351,7 +5351,7 @@
sampleIndex, &syncSampleIndex, findFlags);
}
- uint32_t sampleTime;
+ uint64_t sampleTime;
if (err == OK) {
err = mSampleTable->getMetaDataForSample(
sampleIndex, NULL, NULL, &sampleTime);
@@ -5401,7 +5401,7 @@
off64_t offset = 0;
size_t size = 0;
- uint32_t cts, stts;
+ uint64_t cts, stts;
bool isSyncSample;
bool newBuffer = false;
if (mBuffer == NULL) {
@@ -6033,28 +6033,29 @@
static bool isCompatibleBrand(uint32_t fourcc) {
static const uint32_t kCompatibleBrands[] = {
- FOURCC('i', 's', 'o', 'm'),
- FOURCC('i', 's', 'o', '2'),
- FOURCC('a', 'v', 'c', '1'),
- FOURCC('h', 'v', 'c', '1'),
- FOURCC('h', 'e', 'v', '1'),
- FOURCC('a', 'v', '0', '1'),
- FOURCC('3', 'g', 'p', '4'),
- FOURCC('m', 'p', '4', '1'),
- FOURCC('m', 'p', '4', '2'),
- FOURCC('d', 'a', 's', 'h'),
+ FOURCC("isom"),
+ FOURCC("iso2"),
+ FOURCC("avc1"),
+ FOURCC("hvc1"),
+ FOURCC("hev1"),
+ FOURCC("av01"),
+ FOURCC("3gp4"),
+ FOURCC("mp41"),
+ FOURCC("mp42"),
+ FOURCC("dash"),
// Won't promise that the following file types can be played.
// Just give these file types a chance.
- FOURCC('q', 't', ' ', ' '), // Apple's QuickTime
- FOURCC('M', 'S', 'N', 'V'), // Sony's PSP
+ FOURCC("qt "), // Apple's QuickTime
+ FOURCC("MSNV"), // Sony's PSP
+ FOURCC("wmf "),
- FOURCC('3', 'g', '2', 'a'), // 3GPP2
- FOURCC('3', 'g', '2', 'b'),
- FOURCC('m', 'i', 'f', '1'), // HEIF image
- FOURCC('h', 'e', 'i', 'c'), // HEIF image
- FOURCC('m', 's', 'f', '1'), // HEIF image sequence
- FOURCC('h', 'e', 'v', 'c'), // HEIF image sequence
+ FOURCC("3g2a"), // 3GPP2
+ FOURCC("3g2b"),
+ FOURCC("mif1"), // HEIF image
+ FOURCC("heic"), // HEIF image
+ FOURCC("msf1"), // HEIF image sequence
+ FOURCC("hevc"), // HEIF image sequence
};
for (size_t i = 0;
@@ -6122,7 +6123,7 @@
ALOGV("saw chunk type %s, size %" PRIu64 " @ %lld",
chunkstring, chunkSize, (long long)offset);
switch (chunkType) {
- case FOURCC('f', 't', 'y', 'p'):
+ case FOURCC("ftyp"):
{
if (chunkDataSize < 8) {
return false;
@@ -6157,7 +6158,7 @@
break;
}
- case FOURCC('m', 'o', 'o', 'v'):
+ case FOURCC("moov"):
{
moovAtomEndOffset = offset + chunkSize;
diff --git a/media/extractors/mp4/SampleIterator.cpp b/media/extractors/mp4/SampleIterator.cpp
index 1a6d306..ec12130 100644
--- a/media/extractors/mp4/SampleIterator.cpp
+++ b/media/extractors/mp4/SampleIterator.cpp
@@ -301,7 +301,7 @@
}
status_t SampleIterator::findSampleTimeAndDuration(
- uint32_t sampleIndex, uint32_t *time, uint32_t *duration) {
+ uint32_t sampleIndex, uint64_t *time, uint64_t *duration) {
if (sampleIndex >= mTable->mNumSampleSizes) {
return ERROR_OUT_OF_RANGE;
}
@@ -314,8 +314,8 @@
break;
}
if (mTimeToSampleIndex == mTable->mTimeToSampleCount ||
- (mTTSDuration != 0 && mTTSCount > UINT32_MAX / mTTSDuration) ||
- mTTSSampleTime > UINT32_MAX - (mTTSCount * mTTSDuration)) {
+ (mTTSDuration != 0 && mTTSCount > UINT64_MAX / mTTSDuration) ||
+ mTTSSampleTime > UINT64_MAX - (mTTSCount * mTTSDuration)) {
return ERROR_OUT_OF_RANGE;
}
@@ -330,7 +330,7 @@
// below is equivalent to:
// *time = mTTSSampleTime + mTTSDuration * (sampleIndex - mTTSSampleIndex);
- uint32_t tmp;
+ uint64_t tmp;
if (__builtin_sub_overflow(sampleIndex, mTTSSampleIndex, &tmp) ||
__builtin_mul_overflow(mTTSDuration, tmp, &tmp) ||
__builtin_add_overflow(mTTSSampleTime, tmp, &tmp)) {
@@ -340,15 +340,15 @@
int32_t offset = mTable->getCompositionTimeOffset(sampleIndex);
if ((offset < 0 && *time < (offset == INT32_MIN ?
- INT32_MAX : uint32_t(-offset))) ||
- (offset > 0 && *time > UINT32_MAX - offset)) {
- ALOGE("%u + %d would overflow", *time, offset);
+ INT64_MAX : uint64_t(-offset))) ||
+ (offset > 0 && *time > UINT64_MAX - offset)) {
+ ALOGE("%llu + %d would overflow", (unsigned long long) *time, offset);
return ERROR_OUT_OF_RANGE;
}
if (offset > 0) {
*time += offset;
} else {
- *time -= (offset == INT32_MIN ? INT32_MAX : (-offset));
+ *time -= (offset == INT64_MIN ? INT64_MAX : (-offset));
}
*duration = mTTSDuration;
diff --git a/media/extractors/mp4/SampleIterator.h b/media/extractors/mp4/SampleIterator.h
index 6e4f60e..5a0ea76 100644
--- a/media/extractors/mp4/SampleIterator.h
+++ b/media/extractors/mp4/SampleIterator.h
@@ -33,8 +33,8 @@
uint32_t getDescIndex() const { return mChunkDesc; }
off64_t getSampleOffset() const { return mCurrentSampleOffset; }
size_t getSampleSize() const { return mCurrentSampleSize; }
- uint32_t getSampleTime() const { return mCurrentSampleTime; }
- uint32_t getSampleDuration() const { return mCurrentSampleDuration; }
+ uint64_t getSampleTime() const { return mCurrentSampleTime; }
+ uint64_t getSampleDuration() const { return mCurrentSampleDuration; }
uint32_t getLastSampleIndexInChunk() const {
return mCurrentSampleIndex + mSamplesPerChunk -
@@ -63,20 +63,20 @@
uint32_t mTimeToSampleIndex;
uint32_t mTTSSampleIndex;
- uint32_t mTTSSampleTime;
+ uint64_t mTTSSampleTime;
uint32_t mTTSCount;
- uint32_t mTTSDuration;
+ uint64_t mTTSDuration;
uint32_t mCurrentSampleIndex;
off64_t mCurrentSampleOffset;
size_t mCurrentSampleSize;
- uint32_t mCurrentSampleTime;
- uint32_t mCurrentSampleDuration;
+ uint64_t mCurrentSampleTime;
+ uint64_t mCurrentSampleDuration;
void reset();
status_t findChunkRange(uint32_t sampleIndex);
status_t getChunkOffset(uint32_t chunk, off64_t *offset);
- status_t findSampleTimeAndDuration(uint32_t sampleIndex, uint32_t *time, uint32_t *duration);
+ status_t findSampleTimeAndDuration(uint32_t sampleIndex, uint64_t *time, uint64_t *duration);
SampleIterator(const SampleIterator &);
SampleIterator &operator=(const SampleIterator &);
diff --git a/media/extractors/mp4/SampleTable.cpp b/media/extractors/mp4/SampleTable.cpp
index d242798..bf29bf1 100644
--- a/media/extractors/mp4/SampleTable.cpp
+++ b/media/extractors/mp4/SampleTable.cpp
@@ -37,13 +37,13 @@
namespace android {
// static
-const uint32_t SampleTable::kChunkOffsetType32 = FOURCC('s', 't', 'c', 'o');
+const uint32_t SampleTable::kChunkOffsetType32 = FOURCC("stco");
// static
-const uint32_t SampleTable::kChunkOffsetType64 = FOURCC('c', 'o', '6', '4');
+const uint32_t SampleTable::kChunkOffsetType64 = FOURCC("co64");
// static
-const uint32_t SampleTable::kSampleSizeType32 = FOURCC('s', 't', 's', 'z');
+const uint32_t SampleTable::kSampleSizeType32 = FOURCC("stsz");
// static
-const uint32_t SampleTable::kSampleSizeTypeCompact = FOURCC('s', 't', 'z', '2');
+const uint32_t SampleTable::kSampleSizeTypeCompact = FOURCC("stz2");
////////////////////////////////////////////////////////////////////////////////
@@ -614,7 +614,7 @@
return OK;
}
-uint32_t abs_difference(uint32_t time1, uint32_t time2) {
+uint32_t abs_difference(uint64_t time1, uint64_t time2) {
return time1 > time2 ? time1 - time2 : time2 - time1;
}
@@ -662,7 +662,7 @@
}
uint32_t sampleIndex = 0;
- uint32_t sampleTime = 0;
+ uint64_t sampleTime = 0;
for (uint32_t i = 0; i < mTimeToSampleCount; ++i) {
uint32_t n = mTimeToSample[2 * i];
@@ -684,13 +684,13 @@
(compTimeDelta == INT32_MIN ?
INT32_MAX : uint32_t(-compTimeDelta)))
|| (compTimeDelta > 0 &&
- sampleTime > UINT32_MAX - compTimeDelta)) {
- ALOGE("%u + %d would overflow, clamping",
- sampleTime, compTimeDelta);
+ sampleTime > UINT64_MAX - compTimeDelta)) {
+ ALOGE("%llu + %d would overflow, clamping",
+ (unsigned long long) sampleTime, compTimeDelta);
if (compTimeDelta < 0) {
sampleTime = 0;
} else {
- sampleTime = UINT32_MAX;
+ sampleTime = UINT64_MAX;
}
compTimeDelta = 0;
}
@@ -701,10 +701,10 @@
}
++sampleIndex;
- if (sampleTime > UINT32_MAX - delta) {
- ALOGE("%u + %u would overflow, clamping",
- sampleTime, delta);
- sampleTime = UINT32_MAX;
+ if (sampleTime > UINT64_MAX - delta) {
+ ALOGE("%llu + %u would overflow, clamping",
+ (unsigned long long) sampleTime, delta);
+ sampleTime = UINT64_MAX;
} else {
sampleTime += delta;
}
@@ -870,19 +870,19 @@
if (err != OK) {
return err;
}
- uint32_t sample_time = mSampleIterator->getSampleTime();
+ uint64_t sample_time = mSampleIterator->getSampleTime();
err = mSampleIterator->seekTo(mSyncSamples[left]);
if (err != OK) {
return err;
}
- uint32_t upper_time = mSampleIterator->getSampleTime();
+ uint64_t upper_time = mSampleIterator->getSampleTime();
err = mSampleIterator->seekTo(mSyncSamples[left - 1]);
if (err != OK) {
return err;
}
- uint32_t lower_time = mSampleIterator->getSampleTime();
+ uint64_t lower_time = mSampleIterator->getSampleTime();
// use abs_difference for safety
if (abs_difference(upper_time, sample_time) >
@@ -955,9 +955,9 @@
uint32_t sampleIndex,
off64_t *offset,
size_t *size,
- uint32_t *compositionTime,
+ uint64_t *compositionTime,
bool *isSyncSample,
- uint32_t *sampleDuration) {
+ uint64_t *sampleDuration) {
Mutex::Autolock autoLock(mLock);
status_t err;
diff --git a/media/extractors/mp4/SampleTable.h b/media/extractors/mp4/SampleTable.h
index d4b5dc8..57f6e62 100644
--- a/media/extractors/mp4/SampleTable.h
+++ b/media/extractors/mp4/SampleTable.h
@@ -66,9 +66,9 @@
uint32_t sampleIndex,
off64_t *offset,
size_t *size,
- uint32_t *compositionTime,
+ uint64_t *compositionTime,
bool *isSyncSample = NULL,
- uint32_t *sampleDuration = NULL);
+ uint64_t *sampleDuration = NULL);
// call only after getMetaDataForSample has been called successfully.
uint32_t getLastSampleIndexInChunk();
@@ -124,7 +124,7 @@
struct SampleTimeEntry {
uint32_t mSampleIndex;
- uint32_t mCompositionTime;
+ uint64_t mCompositionTime;
};
SampleTimeEntry *mSampleTimeEntries;
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.c b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.c
index 0669a81..c57498e 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.c
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.c
@@ -61,6 +61,72 @@
/* */
/****************************************************************************************/
+/*
+ * 4 Types of Memory Regions of LVM
+ * TODO: Allocate on the fly.
+ * i) LVM_MEMREGION_PERSISTENT_SLOW_DATA - For Instance Handles
+ * ii) LVM_MEMREGION_PERSISTENT_FAST_DATA - Persistent Buffers
+ * iii) LVM_MEMREGION_PERSISTENT_FAST_COEF - For Holding Structure values
+ * iv) LVM_MEMREGION_TEMPORARY_FAST - For Holding Structure values
+ *
+ * LVM_MEMREGION_PERSISTENT_SLOW_DATA:
+ * Total Memory size:
+ * sizeof(LVM_Instance_t) + \
+ * sizeof(LVM_Buffer_t) + \
+ * sizeof(LVPSA_InstancePr_t) + \
+ * sizeof(LVM_Buffer_t) - needed if buffer mode is LVM_MANAGED_BUFFER
+ *
+ * LVM_MEMREGION_PERSISTENT_FAST_DATA:
+ * Total Memory size:
+ * sizeof(LVM_TE_Data_t) + \
+ * 2 * pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t) + \
+ * sizeof(LVCS_Data_t) + \
+ * sizeof(LVDBE_Data_FLOAT_t) + \
+ * sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
+ * sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
+ * pInstParams->EQNB_NumBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
+ * pInstParams->EQNB_NumBands * sizeof(LVEQNB_BandDef_t) + \
+ * pInstParams->EQNB_NumBands * sizeof(LVEQNB_BiquadType_en) + \
+ * 2 * LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t) + \
+ * PSA_InitParams.nBands * sizeof(Biquad_1I_Order2_Taps_t) + \
+ * PSA_InitParams.nBands * sizeof(QPD_Taps_t)
+ *
+ * LVM_MEMREGION_PERSISTENT_FAST_COEF:
+ * Total Memory size:
+ * sizeof(LVM_TE_Coefs_t) + \
+ * sizeof(LVCS_Coefficient_t) + \
+ * sizeof(LVDBE_Coef_FLOAT_t) + \
+ * sizeof(Biquad_FLOAT_Instance_t) + \
+ * sizeof(Biquad_FLOAT_Instance_t) + \
+ * pInstParams->EQNB_NumBands * sizeof(Biquad_FLOAT_Instance_t) + \
+ * PSA_InitParams.nBands * sizeof(Biquad_Instance_t) + \
+ * PSA_InitParams.nBands * sizeof(QPD_State_t)
+ *
+ * LVM_MEMREGION_TEMPORARY_FAST (Scratch):
+ * Total Memory Size:
+ * BundleScratchSize + \
+ * MAX_INTERNAL_BLOCKSIZE * sizeof(LVM_FLOAT) + \
+ * MaxScratchOf (CS, EQNB, DBE, PSA)
+ *
+ * a)BundleScratchSize:
+ * 3 * LVM_MAX_CHANNELS \
+ * * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) * sizeof(LVM_FLOAT)
+ * This Memory is allocated only when Buffer mode is LVM_MANAGED_BUFFER.
+ * b)MaxScratchOf (CS, EQNB, DBE, PSA)
+ * This Memory is needed for scratch usage for CS, EQNB, DBE, PSA.
+ * CS = (LVCS_SCRATCHBUFFERS * sizeof(LVM_FLOAT)
+ * * pCapabilities->MaxBlockSize)
+ * EQNB = (LVEQNB_SCRATCHBUFFERS * sizeof(LVM_FLOAT)
+ * * pCapabilities->MaxBlockSize)
+ * DBE = (LVDBE_SCRATCHBUFFERS_INPLACE*sizeof(LVM_FLOAT)
+ * * pCapabilities->MaxBlockSize)
+ * PSA = (2 * pInitParams->MaxInputBlockSize * sizeof(LVM_FLOAT))
+ * one MaxInputBlockSize for input and another for filter output
+ * c)MAX_INTERNAL_BLOCKSIZE
+ * This Memory is needed for PSAInput - Temp memory to store output
+ * from McToMono block and given as input to PSA block
+ */
+
LVM_ReturnStatus_en LVM_GetMemoryTable(LVM_Handle_t hInstance,
LVM_MemTab_t *pMemoryTable,
LVM_InstParams_t *pInstParams)
@@ -168,7 +234,13 @@
AlgScratchSize = 0;
if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
{
+#ifdef BUILD_FLOAT
+ BundleScratchSize = 3 * LVM_MAX_CHANNELS \
+ * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) \
+ * sizeof(LVM_FLOAT);
+#else
BundleScratchSize = 6 * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) * sizeof(LVM_INT16);
+#endif
InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST], /* Scratch buffer */
BundleScratchSize);
InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
@@ -369,8 +441,13 @@
PSA_MemTab.Region[LVM_PERSISTENT_FAST_COEF].Size);
/* Fast Temporary */
+#ifdef BUILD_FLOAT
+ InstAlloc_AddMember(&AllocMem[LVM_TEMPORARY_FAST],
+ MAX_INTERNAL_BLOCKSIZE * sizeof(LVM_FLOAT));
+#else
InstAlloc_AddMember(&AllocMem[LVM_TEMPORARY_FAST],
MAX_INTERNAL_BLOCKSIZE * sizeof(LVM_INT16));
+#endif
if (PSA_MemTab.Region[LVM_TEMPORARY_FAST].Size > AlgScratchSize)
{
@@ -559,13 +636,20 @@
*/
pInstance->pBufferManagement = InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
sizeof(LVM_Buffer_t));
+#ifdef BUILD_FLOAT
+ BundleScratchSize = (LVM_INT32)
+ (3 * LVM_MAX_CHANNELS \
+ * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) \
+ * sizeof(LVM_FLOAT));
+#else
BundleScratchSize = (LVM_INT32)(6 * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) * sizeof(LVM_INT16));
+#endif
pInstance->pBufferManagement->pScratch = InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST], /* Scratch 1 buffer */
(LVM_UINT32)BundleScratchSize);
#ifdef BUILD_FLOAT
LoadConst_Float(0, /* Clear the input delay buffer */
(LVM_FLOAT *)&pInstance->pBufferManagement->InDelayBuffer,
- (LVM_INT16)(2 * MIN_INTERNAL_BLOCKSIZE));
+ (LVM_INT16)(LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE));
#else
LoadConst_16(0, /* Clear the input delay buffer */
(LVM_INT16 *)&pInstance->pBufferManagement->InDelayBuffer,
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.c b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.c
index 48f5d54..9d3ee88 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.c
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.c
@@ -51,7 +51,7 @@
LVM_INT16 NumChannels)
{
#ifdef HIGHER_FS
- LVM_FLOAT DeltaTable[11] = {0.500000f,/*8000*/
+ LVM_FLOAT DeltaTable[13] = {0.500000f,/*8000*/
0.362812f,/*11025*/
0.333333f,/*12000*/
0.250000f,/*16000*/
@@ -60,7 +60,9 @@
0.125000f,/*32000*/
0.090703f,/*44100*/
0.083333f,/*48000*/
+ 0.045352f,/*88200*/
0.041667f,/*96000*/
+ 0.022676f,/*176400*/
0.020833f};/*192000*/
#else
LVM_FLOAT DeltaTable[9] = {0.500000f,/*8000*/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.c b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.c
index 9dc7d21..0e0acf1 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.c
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.c
@@ -52,7 +52,7 @@
LVM_INT16 NumChannels)
{
#ifdef HIGHER_FS
- LVM_FLOAT DeltaTable[11] = {0.500000f,/*8000*/
+ LVM_FLOAT DeltaTable[13] = {0.500000f,/*8000*/
0.362812f,/*11025*/
0.333333f,/*12000*/
0.250000f,/*16000*/
@@ -61,7 +61,9 @@
0.125000f,/*32000*/
0.090703f,/*44100*/
0.083333f,/*48000*/
+ 0.045352f,/*88200*/
0.041666f,/*96000*/
+ 0.022676f,/*176400*/
0.020833f};/*192000*/
#else
LVM_FLOAT DeltaTable[9] = {0.500000f,/*8000*/
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.c b/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.c
index 7846ca0..6307e68 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.c
+++ b/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.c
@@ -53,7 +53,9 @@
#define LVVDL_2PiBy_48000_f 0.000130900f
#ifdef HIGHER_FS
+#define LVVDL_2PiBy_88200_f 0.000071238f
#define LVVDL_2PiBy_96000_f 0.000065450f
+#define LVVDL_2PiBy_176400_f 0.000035619f
#define LVVDL_2PiBy_192000_f 0.000032725f
#endif
const LVM_FLOAT LVVDL_2PiOnFsTable[] = {LVVDL_2PiBy_8000_f,
@@ -66,7 +68,9 @@
LVVDL_2PiBy_44100_f,
LVVDL_2PiBy_48000_f
#ifdef HIGHER_FS
+ ,LVVDL_2PiBy_88200_f
,LVVDL_2PiBy_96000_f
+ ,LVVDL_2PiBy_176400_f
,LVVDL_2PiBy_192000_f
#endif
};
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
index e45d81f..ba05577 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
@@ -239,13 +239,12 @@
#define LVCS_STEREODELAY_CS_24KHZ 279 /* Sample rate 24kS/s */
#define LVCS_STEREODELAY_CS_32KHZ 372 /* Sample rate 32kS/s */
#define LVCS_STEREODELAY_CS_44KHZ 512 /* Sample rate 44kS/s */
-// TODO: this should linearly scale by frequency but is limited to 512 frames until
-// we ensure enough buffer size has been allocated.
-#define LVCS_STEREODELAY_CS_48KHZ 512 /* Sample rate 48kS/s */
-#define LVCS_STEREODELAY_CS_88KHZ 512 /* Sample rate 88.2kS/s */
-#define LVCS_STEREODELAY_CS_96KHZ 512 /* Sample rate 96kS/s */
-#define LVCS_STEREODELAY_CS_176KHZ 512 /* Sample rate 176.4kS/s */
-#define LVCS_STEREODELAY_CS_192KHZ 512 /* Sample rate 196kS/s */
+#define LVCS_STEREODELAY_CS_48KHZ 557 /* Sample rate 48kS/s */
+#define LVCS_STEREODELAY_CS_88KHZ 1024 /* Sample rate 88.2kS/s */
+#define LVCS_STEREODELAY_CS_96KHZ 1115 /* Sample rate 96kS/s */
+#define LVCS_STEREODELAY_CS_176KHZ 2048 /* Sample rate 176.4kS/s */
+#define LVCS_STEREODELAY_CS_192KHZ 2229 /* Sample rate 196kS/s */
+#define LVCS_STEREODELAY_CS_MAX_VAL LVCS_STEREODELAY_CS_192KHZ
/* Reverb coefficients for 8000 Hz sample rate, scaled with 1.038030 */
#define CS_REVERB_8000_A0 0.667271
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
index 69892b6..f94d4e4 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
@@ -65,7 +65,7 @@
/* Filter */
void (*pBiquadCallBack) (Biquad_Instance_t*, LVM_INT16*, LVM_INT16*, LVM_INT16);
#else
- LVM_FLOAT StereoSamples[2 * LVCS_STEREODELAY_CS_48KHZ];
+ LVM_FLOAT StereoSamples[2 * LVCS_STEREODELAY_CS_MAX_VAL];
/* Reverb Level */
LVM_FLOAT ReverbLevel;
/* Filter */
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 8ee807c..003ce9e 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -44,3 +44,16 @@
"-Wextra",
],
}
+
+cc_test {
+ name: "snr",
+ host_supported: false,
+
+ srcs: ["snr.cpp"],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+}
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
index 861ee64..41a4f04 100755
--- a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
@@ -25,16 +25,17 @@
adb shell mkdir -p $testdir
adb push $ANDROID_BUILD_TOP/cts/tests/tests/media/res/raw/sinesweepraw.raw $testdir
adb push $OUT/testcases/lvmtest/arm64/lvmtest $testdir
+adb push $OUT/testcases/snr/arm64/snr $testdir
flags_arr=(
"-csE"
"-eqE"
"-tE"
"-csE -tE -eqE"
- "-bE"
+ "-bE -M"
"-csE -tE"
"-csE -eqE" "-tE -eqE"
- "-csE -tE -bE -eqE"
+ "-csE -tE -bE -M -eqE"
)
fs_arr=(
@@ -79,6 +80,10 @@
then
adb shell cmp $testdir/sinesweep_2_$((fs)).raw \
$testdir/sinesweep_$((ch))_$((fs)).raw
+ elif [[ $flags == *"-bE"* ]] && [ "$ch" -gt 2 ]
+ then
+ adb shell $testdir/snr $testdir/sinesweep_2_$((fs)).raw \
+ $testdir/sinesweep_$((ch))_$((fs)).raw -thr:90.308998
fi
done
diff --git a/media/libeffects/lvm/tests/snr.cpp b/media/libeffects/lvm/tests/snr.cpp
new file mode 100644
index 0000000..88110c0
--- /dev/null
+++ b/media/libeffects/lvm/tests/snr.cpp
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <assert.h>
+#include <inttypes.h>
+#include <math.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <vector>
+
+template <typename T, typename A = float>
+std::pair<A, A> getSignalNoise(FILE *finp, FILE *fref) {
+ constexpr size_t framesize = 256;
+ std::vector<T> in(framesize);
+ std::vector<T> ref(framesize);
+ A signal{};
+ A noise{};
+
+ for (;;) {
+ size_t read_samples_in = fread(&in[0], sizeof(T), framesize, finp);
+ const size_t read_samples_ref = fread(&ref[0], sizeof(T), framesize, fref);
+ if (read_samples_in != read_samples_ref) {
+ printf("file sizes do not match (last %zu %zu)", read_samples_in, read_samples_ref);
+ read_samples_in = std::min(read_samples_in, read_samples_ref);
+ }
+ if (read_samples_in == 0) {
+ return { signal, noise };
+ }
+ for (size_t i = 0; i < read_samples_in; ++i) {
+ const A value(ref[i]);
+ const A diff(A(in[i]) - value);
+ signal += value * value;
+ noise += diff * diff;
+ }
+ }
+}
+
+void printUsage() {
+ printf("\nUsage: ");
+ printf("\n snr <ref_file> <test_file> [options]\n");
+ printf("\nwhere, \n <ref_file> is the reference file name");
+ printf("\n on which will be taken as pure signal");
+ printf("\n <test_file> is test file for snr calculation");
+ printf("\n and options are mentioned below");
+ printf("\n");
+ printf("\n -pcm_format:<pcm format of input files>");
+ printf("\n 0 - 16 bit pcm");
+ printf("\n 1 - 32 bit float");
+ printf("\n default 0");
+ printf("\n -thr:<threshold value>");
+ printf("\n default - negative infinity\n\n");
+}
+
+int main(int argc, const char *argv[]) {
+ if (argc < 3) {
+ printUsage();
+ return -1;
+ }
+ int pcm_format = 0;
+ float thr = - std::numeric_limits<float>::infinity();
+ FILE *fref = fopen(argv[1], "rb");
+ FILE *finp = fopen(argv[2], "rb");
+ for (int i = 3; i < argc; i++) {
+ if (!strncmp(argv[i], "-pcm_format:", 12)) {
+ pcm_format = atoi(argv[i] + 12);
+ } else if (!strncmp(argv[i], "-thr:", 5)) {
+ thr = atof(argv[i] + 5);
+ }
+ }
+ if (finp == nullptr || fref == nullptr) {
+ printf("\nError: missing input/reference files\n");
+ return -1;
+ }
+ auto sn = pcm_format == 0
+ ? getSignalNoise<short>(finp, fref)
+ : getSignalNoise<float>(finp, fref);
+ if (sn.first > 0.f && sn.second > 0.f) {
+ float snr = 10.f * log(sn.first / sn.second);
+ // compare the measured snr value with threshold
+ if (snr < thr) {
+ printf("%.6f less than threshold %.6f\n", snr, thr);
+ } else {
+ printf("%.6f\n", snr);
+ }
+ }
+ fclose(finp);
+ fclose(fref);
+
+ return 0;
+}
diff --git a/media/libmedia/TypeConverter.cpp b/media/libmedia/TypeConverter.cpp
index c24e046..aa77cd3 100644
--- a/media/libmedia/TypeConverter.cpp
+++ b/media/libmedia/TypeConverter.cpp
@@ -209,6 +209,14 @@
MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_1_0),
MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_2_0),
MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_2_1),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_LC),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_HE_V1),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_HE_V2),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_CELT),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_ADAPTIVE),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC),
+ MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC_LL),
TERMINATOR
};
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index f45cc58..03eef48 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -160,7 +160,6 @@
"libstagefright_codecbase",
"libstagefright_foundation",
"libstagefright_omx_utils",
- "libstagefright_opus_common",
"libRScpp",
"libhidlallocatorutils",
"libhidlbase",
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 7df1a2d..c4015fb 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -85,7 +85,7 @@
static const int kTimestampDebugCount = 10;
static const int kItemIdBase = 10000;
static const char kExifHeader[] = {'E', 'x', 'i', 'f', '\0', '\0'};
-static const int32_t kTiffHeaderOffset = htonl(sizeof(kExifHeader));
+static const uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xff, 0xe1};
static const uint8_t kMandatoryHevcNalUnitTypes[3] = {
kHevcNalUnitTypeVps,
@@ -125,7 +125,7 @@
bool isAudio() const { return mIsAudio; }
bool isMPEG4() const { return mIsMPEG4; }
bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic; }
- bool isExifData(const MediaBufferBase *buffer) const;
+ bool isExifData(MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const;
void addChunkOffset(off64_t offset);
void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif);
void flushItemRefs();
@@ -364,7 +364,7 @@
Vector<uint16_t> mProperties;
ItemRefs mDimgRefs;
- ItemRefs mCdscRefs;
+ Vector<uint16_t> mExifList;
uint16_t mImageItemId;
int32_t mIsPrimary;
int32_t mWidth, mHeight;
@@ -1368,14 +1368,16 @@
}
off64_t MPEG4Writer::addSample_l(
- MediaBuffer *buffer, bool usePrefix, bool isExif, size_t *bytesWritten) {
+ MediaBuffer *buffer, bool usePrefix,
+ uint32_t tiffHdrOffset, size_t *bytesWritten) {
off64_t old_offset = mOffset;
if (usePrefix) {
addMultipleLengthPrefixedSamples_l(buffer);
} else {
- if (isExif) {
- ::write(mFd, &kTiffHeaderOffset, 4); // exif_tiff_header_offset field
+ if (tiffHdrOffset > 0) {
+ tiffHdrOffset = htonl(tiffHdrOffset);
+ ::write(mFd, &tiffHdrOffset, 4); // exif_tiff_header_offset field
mOffset += 4;
}
@@ -1803,7 +1805,6 @@
mStartTimestampUs(-1),
mRotation(0),
mDimgRefs("dimg"),
- mCdscRefs("cdsc"),
mImageItemId(0),
mIsPrimary(0),
mWidth(0),
@@ -1984,11 +1985,34 @@
return OK;
}
-bool MPEG4Writer::Track::isExifData(const MediaBufferBase *buffer) const {
- return mIsHeic
- && (buffer->range_length() > sizeof(kExifHeader))
- && !memcmp((uint8_t *)buffer->data() + buffer->range_offset(),
- kExifHeader, sizeof(kExifHeader));
+bool MPEG4Writer::Track::isExifData(
+ MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
+ if (!mIsHeic) {
+ return false;
+ }
+
+ // Exif block starting with 'Exif\0\0'
+ size_t length = buffer->range_length();
+ uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
+ if ((length > sizeof(kExifHeader))
+ && !memcmp(data, kExifHeader, sizeof(kExifHeader))) {
+ *tiffHdrOffset = sizeof(kExifHeader);
+ return true;
+ }
+
+ // Exif block starting with fourcc 'Exif' followed by APP1 marker
+ if ((length > sizeof(kExifApp1Marker) + 2 + sizeof(kExifHeader))
+ && !memcmp(data, kExifApp1Marker, sizeof(kExifApp1Marker))
+ && !memcmp(data + sizeof(kExifApp1Marker) + 2, kExifHeader, sizeof(kExifHeader))) {
+ // skip 'Exif' fourcc
+ buffer->set_range(4, buffer->range_length() - 4);
+
+ // 2-byte APP1 + 2-byte size followed by kExifHeader
+ *tiffHdrOffset = 2 + 2 + sizeof(kExifHeader);
+ return true;
+ }
+
+ return false;
}
void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
@@ -2014,7 +2038,7 @@
}
if (isExif) {
- mCdscRefs.value.push_back(mOwner->addItem_l({
+ mExifList.push_back(mOwner->addItem_l({
.itemType = "Exif",
.isPrimary = false,
.isHidden = false,
@@ -2117,7 +2141,16 @@
if (mImageItemId > 0) {
mOwner->addRefs_l(mImageItemId, mDimgRefs);
- mOwner->addRefs_l(mImageItemId, mCdscRefs);
+
+ if (!mExifList.empty()) {
+ // The "cdsc" ref is from the metadata/exif item to the image item.
+ // So the refs all contain the image item.
+ ItemRefs cdscRefs("cdsc");
+ cdscRefs.value.push_back(mImageItemId);
+ for (uint16_t exifItem : mExifList) {
+ mOwner->addRefs_l(exifItem, cdscRefs);
+ }
+ }
}
}
@@ -2269,14 +2302,16 @@
while (!chunk->mSamples.empty()) {
List<MediaBuffer *>::iterator it = chunk->mSamples.begin();
- int32_t isExif;
- if (!(*it)->meta_data().findInt32(kKeyIsExif, &isExif)) {
- isExif = 0;
+ uint32_t tiffHdrOffset;
+ if (!(*it)->meta_data().findInt32(
+ kKeyExifTiffOffset, (int32_t*)&tiffHdrOffset)) {
+ tiffHdrOffset = 0;
}
+ bool isExif = (tiffHdrOffset > 0);
bool usePrefix = chunk->mTrack->usePrefix() && !isExif;
size_t bytesWritten;
- off64_t offset = addSample_l(*it, usePrefix, isExif, &bytesWritten);
+ off64_t offset = addSample_l(*it, usePrefix, tiffHdrOffset, &bytesWritten);
if (chunk->mTrack->isHeic()) {
chunk->mTrack->addItemOffsetAndSize(offset, bytesWritten, isExif);
@@ -3002,10 +3037,11 @@
}
bool isExif = false;
+ uint32_t tiffHdrOffset = 0;
int32_t isMuxerData;
if (buffer->meta_data().findInt32(kKeyIsMuxerData, &isMuxerData) && isMuxerData) {
// We only support one type of muxer data, which is Exif data block.
- isExif = isExifData(buffer);
+ isExif = isExifData(buffer, &tiffHdrOffset);
if (!isExif) {
ALOGW("Ignoring bad Exif data block");
buffer->release();
@@ -3027,7 +3063,7 @@
buffer = NULL;
if (isExif) {
- copy->meta_data().setInt32(kKeyIsExif, 1);
+ copy->meta_data().setInt32(kKeyExifTiffOffset, tiffHdrOffset);
}
bool usePrefix = this->usePrefix() && !isExif;
@@ -3300,7 +3336,8 @@
}
if (!hasMultipleTracks) {
size_t bytesWritten;
- off64_t offset = mOwner->addSample_l(copy, usePrefix, isExif, &bytesWritten);
+ off64_t offset = mOwner->addSample_l(
+ copy, usePrefix, tiffHdrOffset, &bytesWritten);
if (mIsHeic) {
addItemOffsetAndSize(offset, bytesWritten, isExif);
diff --git a/media/libstagefright/OggWriter.cpp b/media/libstagefright/OggWriter.cpp
index ad55c56..5c13983 100644
--- a/media/libstagefright/OggWriter.cpp
+++ b/media/libstagefright/OggWriter.cpp
@@ -30,7 +30,7 @@
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OggWriter.h>
#include <media/stagefright/foundation/ADebug.h>
-#include "OpusHeader.h"
+#include <media/stagefright/foundation/OpusHeader.h>
extern "C" {
#include <ogg/ogg.h>
@@ -114,30 +114,17 @@
}
mSampleRate = sampleRate;
-
- OpusHeader header;
- header.channels = nChannels;
- header.num_streams = nChannels;
- header.num_coupled = 0;
- header.channel_mapping = ((nChannels > 8) ? 255 : (nChannels > 2));
- header.gain_db = 0;
- header.skip_samples = 0;
-
- // headers are 21-bytes + something driven by channel count
- // expect numbers in the low 30's here. WriteOpusHeader() will tell us
- // if things are bad.
- unsigned char header_data[100];
- ogg_packet op;
- ogg_page og;
-
- const int packet_size = WriteOpusHeader(header, mSampleRate, (uint8_t*)header_data,
- sizeof(header_data));
-
- if (packet_size < 0) {
- ALOGE("opus header writing failed");
+ uint32_t type;
+ const void *header_data;
+ size_t packet_size;
+ if (!source->getFormat()->findData(kKeyOpusHeader, &type, &header_data, &packet_size)) {
+ ALOGE("opus header not found");
return UNKNOWN_ERROR;
}
- op.packet = header_data;
+
+ ogg_packet op;
+ ogg_page og;
+ op.packet = (unsigned char *)header_data;
op.bytes = packet_size;
op.b_o_s = 1;
op.e_o_s = 0;
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 49e485a..2e7da01 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -37,6 +37,7 @@
#include <media/stagefright/foundation/ALookup.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/foundation/OpusHeader.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/MediaDefs.h>
#include <media/AudioSystem.h>
@@ -1745,12 +1746,34 @@
} else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
meta->setData(kKeyVp9CodecPrivate, 0, csd0->data(), csd0->size());
} else if (mime == MEDIA_MIMETYPE_AUDIO_OPUS) {
- meta->setData(kKeyOpusHeader, 0, csd0->data(), csd0->size());
+ size_t opusHeadSize = csd0->size();
+ size_t codecDelayBufSize = 0;
+ size_t seekPreRollBufSize = 0;
+ void *opusHeadBuf = csd0->data();
+ void *codecDelayBuf = NULL;
+ void *seekPreRollBuf = NULL;
if (msg->findBuffer("csd-1", &csd1)) {
- meta->setData(kKeyOpusCodecDelay, 0, csd1->data(), csd1->size());
+ codecDelayBufSize = csd1->size();
+ codecDelayBuf = csd1->data();
}
if (msg->findBuffer("csd-2", &csd2)) {
- meta->setData(kKeyOpusSeekPreRoll, 0, csd2->data(), csd2->size());
+ seekPreRollBufSize = csd2->size();
+ seekPreRollBuf = csd2->data();
+ }
+ /* Extract codec delay and seek pre roll from csd-0,
+ * if csd-1 and csd-2 are not present */
+ if (!codecDelayBuf && !seekPreRollBuf) {
+ GetOpusHeaderBuffers(csd0->data(), csd0->size(), &opusHeadBuf,
+ &opusHeadSize, &codecDelayBuf,
+ &codecDelayBufSize, &seekPreRollBuf,
+ &seekPreRollBufSize);
+ }
+ meta->setData(kKeyOpusHeader, 0, opusHeadBuf, opusHeadSize);
+ if (codecDelayBuf) {
+ meta->setData(kKeyOpusCodecDelay, 0, codecDelayBuf, codecDelayBufSize);
+ }
+ if (seekPreRollBuf) {
+ meta->setData(kKeyOpusSeekPreRoll, 0, seekPreRollBuf, seekPreRollBufSize);
}
} else if (mime == MEDIA_MIMETYPE_AUDIO_VORBIS) {
meta->setData(kKeyVorbisInfo, 0, csd0->data(), csd0->size());
diff --git a/media/libstagefright/data/media_codecs_google_c2_audio.xml b/media/libstagefright/data/media_codecs_google_c2_audio.xml
index 0b554a2..88cd08d 100644
--- a/media/libstagefright/data/media_codecs_google_c2_audio.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_audio.xml
@@ -93,5 +93,12 @@
<Limit name="complexity" range="0-8" default="5" />
<Feature name="bitrate-modes" value="CQ" />
</MediaCodec>
+ <MediaCodec name="c2.android.opus.encoder" type="audio/opus">
+ <Limit name="channel-count" max="2" />
+ <Limit name="sample-rate" ranges="8000,12000,16000,24000,48000" />
+ <Limit name="bitrate" range="500-512000" />
+ <Limit name="complexity" range="0-10" default="5" />
+ <Feature name="bitrate-modes" value="CQ" />
+ </MediaCodec>
</Encoders>
</Included>
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index adb45b3..c49789e 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -71,6 +71,15 @@
<Limit name="bitrate" range="1-40000000" />
<Feature name="adaptive-playback" />
</MediaCodec>
+ <MediaCodec name="c2.android.av1.decoder" type="video/av01">
+ <Limit name="size" min="96x96" max="1920x1080" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" min="24" max="2073600" />
+ <Limit name="bitrate" range="1-120000000" />
+ <Limit name="frame-rate" range="1-60" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
</Decoders>
<Encoders>
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index dd1d904..533cd72 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -72,6 +72,7 @@
"MediaKeys.cpp",
"MetaData.cpp",
"MetaDataBase.cpp",
+ "OpusHeader.cpp",
"avc_utils.cpp",
"base64.cpp",
"hexdump.cpp",
diff --git a/media/libstagefright/opus/OpusHeader.cpp b/media/libstagefright/foundation/OpusHeader.cpp
similarity index 69%
rename from media/libstagefright/opus/OpusHeader.cpp
rename to media/libstagefright/foundation/OpusHeader.cpp
index e4a460c..9faede1 100644
--- a/media/libstagefright/opus/OpusHeader.cpp
+++ b/media/libstagefright/foundation/OpusHeader.cpp
@@ -16,7 +16,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "SoftOpus"
-
+#include <algorithm>
#include <cstring>
#include <stdint.h>
@@ -43,9 +43,6 @@
{0, 6, 1, 2, 3, 4, 5, 7},
};
-// Opus always has a 48kHz output rate. This is true for all Opus, not just this
-// implementation.
-constexpr int kRate = 48000;
// Size of the Opus header excluding optional mapping information.
constexpr size_t kOpusHeaderSize = 19;
// Offset to magic string that starts Opus header.
@@ -76,15 +73,12 @@
constexpr size_t kOpusHeaderNumCoupledStreamsOffset = 20;
// Offset to the stream to channel mapping in the Opus header.
constexpr size_t kOpusHeaderStreamMapOffset = 21;
-// Maximum packet size used in Xiph's opusdec.
-constexpr int kMaxOpusOutputPacketSizeSamples = 960 * 6;
// Default audio output channel layout. Used to initialize |stream_map| in
// OpusHeader, and passed to opus_multistream_decoder_create() when the header
// does not contain mapping information. The values are valid only for mono and
// stereo output: Opus streams with more than 2 channels require a stream map.
constexpr int kMaxChannelsWithDefaultLayout = 2;
-constexpr uint8_t kDefaultOpusChannelLayout[kMaxChannelsWithDefaultLayout] = {0, 1};
static uint16_t ReadLE16(const uint8_t* data, size_t data_size, uint32_t read_offset) {
// check whether the 2nd byte is within the buffer
@@ -182,4 +176,88 @@
}
}
+int WriteOpusHeaders(const OpusHeader &header, int inputSampleRate,
+ uint8_t* output, size_t outputSize, uint64_t codecDelay,
+ uint64_t seekPreRoll) {
+ if (outputSize < AOPUS_UNIFIED_CSD_MINSIZE) {
+ ALOGD("Buffer not large enough to hold unified OPUS CSD");
+ return -1;
+ }
+
+ int headerLen = WriteOpusHeader(header, inputSampleRate, output,
+ outputSize);
+ if (headerLen < 0) {
+ ALOGD("WriteOpusHeader failed");
+ return -1;
+ }
+ if (headerLen >= (outputSize - 2 * AOPUS_TOTAL_CSD_SIZE)) {
+ ALOGD("Buffer not large enough to hold codec delay and seek pre roll");
+ return -1;
+ }
+
+ uint64_t length = AOPUS_LENGTH;
+
+ /*
+ Following is the CSD syntax for signalling codec delay and
+ seek pre-roll which is to be appended after OpusHeader
+
+ Marker (8 bytes) | Length (8 bytes) | Samples (8 bytes)
+
+ Markers supported:
+ AOPUSDLY - Signals Codec Delay
+ AOPUSPRL - Signals seek pre roll
+
+ Length should be 8.
+ */
+
+ // Add codec delay
+ memcpy(output + headerLen, AOPUS_CSD_CODEC_DELAY_MARKER, AOPUS_MARKER_SIZE);
+ headerLen += AOPUS_MARKER_SIZE;
+ memcpy(output + headerLen, &length, AOPUS_LENGTH_SIZE);
+ headerLen += AOPUS_LENGTH_SIZE;
+ memcpy(output + headerLen, &codecDelay, AOPUS_CSD_SIZE);
+ headerLen += AOPUS_CSD_SIZE;
+
+ // Add skip pre roll
+ memcpy(output + headerLen, AOPUS_CSD_SEEK_PREROLL_MARKER, AOPUS_MARKER_SIZE);
+ headerLen += AOPUS_MARKER_SIZE;
+ memcpy(output + headerLen, &length, AOPUS_LENGTH_SIZE);
+ headerLen += AOPUS_LENGTH_SIZE;
+ memcpy(output + headerLen, &seekPreRoll, AOPUS_CSD_SIZE);
+ headerLen += AOPUS_CSD_SIZE;
+
+ return headerLen;
+}
+
+void GetOpusHeaderBuffers(const uint8_t *data, size_t data_size,
+ void **opusHeadBuf, size_t *opusHeadSize,
+ void **codecDelayBuf, size_t *codecDelaySize,
+ void **seekPreRollBuf, size_t *seekPreRollSize) {
+ *codecDelayBuf = NULL;
+ *codecDelaySize = 0;
+ *seekPreRollBuf = NULL;
+ *seekPreRollSize = 0;
+ *opusHeadBuf = (void *)data;
+ *opusHeadSize = data_size;
+ if (data_size >= AOPUS_UNIFIED_CSD_MINSIZE) {
+ size_t i = 0;
+ while (i < data_size - AOPUS_TOTAL_CSD_SIZE) {
+ uint8_t *csdBuf = (uint8_t *)data + i;
+ if (!memcmp(csdBuf, AOPUS_CSD_CODEC_DELAY_MARKER, AOPUS_MARKER_SIZE)) {
+ *opusHeadSize = std::min(*opusHeadSize, i);
+ *codecDelayBuf = csdBuf + AOPUS_MARKER_SIZE + AOPUS_LENGTH_SIZE;
+ *codecDelaySize = AOPUS_CSD_SIZE;
+ i += AOPUS_TOTAL_CSD_SIZE;
+ } else if (!memcmp(csdBuf, AOPUS_CSD_SEEK_PREROLL_MARKER, AOPUS_MARKER_SIZE)) {
+ *opusHeadSize = std::min(*opusHeadSize, i);
+ *seekPreRollBuf = csdBuf + AOPUS_MARKER_SIZE + AOPUS_LENGTH_SIZE;
+ *seekPreRollSize = AOPUS_CSD_SIZE;
+ i += AOPUS_TOTAL_CSD_SIZE;
+ } else {
+ i++;
+ }
+ }
+ }
+}
+
} // namespace android
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/OpusHeader.h b/media/libstagefright/foundation/include/media/stagefright/foundation/OpusHeader.h
new file mode 100644
index 0000000..9bffccb
--- /dev/null
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/OpusHeader.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * The Opus specification is part of IETF RFC 6716:
+ * http://tools.ietf.org/html/rfc6716
+ */
+
+#ifndef OPUS_HEADER_H_
+#define OPUS_HEADER_H_
+
+namespace android {
+
+/* Constants used for delimiting Opus CSD */
+#define AOPUS_CSD_CODEC_DELAY_MARKER "AOPUSDLY"
+#define AOPUS_CSD_SEEK_PREROLL_MARKER "AOPUSPRL"
+#define AOPUS_CSD_SIZE 8
+#define AOPUS_LENGTH 8
+#define AOPUS_MARKER_SIZE 8
+#define AOPUS_LENGTH_SIZE 8
+#define AOPUS_TOTAL_CSD_SIZE \
+ ((AOPUS_MARKER_SIZE) + (AOPUS_LENGTH_SIZE) + (AOPUS_CSD_SIZE))
+#define AOPUS_CSD0_MINSIZE 19
+#define AOPUS_UNIFIED_CSD_MINSIZE \
+ ((AOPUS_CSD0_MINSIZE) + 2 * (AOPUS_TOTAL_CSD_SIZE))
+
+/* CSD0 at max can be 22 bytes + max number of channels (255) */
+#define AOPUS_CSD0_MAXSIZE 277
+#define AOPUS_UNIFIED_CSD_MAXSIZE \
+ ((AOPUS_CSD0_MAXSIZE) + 2 * (AOPUS_TOTAL_CSD_SIZE))
+
+struct OpusHeader {
+ int channels;
+ int channel_mapping;
+ int num_streams;
+ int num_coupled;
+ int16_t gain_db;
+ int skip_samples;
+ uint8_t stream_map[8];
+};
+
+bool ParseOpusHeader(const uint8_t* data, size_t data_size, OpusHeader* header);
+int WriteOpusHeader(const OpusHeader &header, int input_sample_rate, uint8_t* output, size_t output_size);
+void GetOpusHeaderBuffers(const uint8_t *data, size_t data_size,
+ void **opusHeadBuf, size_t *opusHeadSize,
+ void **codecDelayBuf, size_t *codecDelaySize,
+ void **seekPreRollBuf, size_t *seekPreRollSize);
+int WriteOpusHeaders(const OpusHeader &header, int inputSampleRate,
+ uint8_t* output, size_t outputSize, uint64_t codecDelay,
+ uint64_t seekPreRoll);
+} // namespace android
+
+#endif // OPUS_HEADER_H_
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 1abef8c..803155d 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -257,7 +257,9 @@
void initInternal(int fd, bool isFirstSession);
// Acquire lock before calling these methods
- off64_t addSample_l(MediaBuffer *buffer, bool usePrefix, bool isExif, size_t *bytesWritten);
+ off64_t addSample_l(
+ MediaBuffer *buffer, bool usePrefix,
+ uint32_t tiffHdrOffset, size_t *bytesWritten);
void addLengthPrefixedSample_l(MediaBuffer *buffer);
void addMultipleLengthPrefixedSamples_l(MediaBuffer *buffer);
uint16_t addProperty_l(const ItemProperty &);
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index b99c14c..2910bd3 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -221,7 +221,8 @@
kKeyFrameCount = 'nfrm', // int32_t, total number of frame in video track
kKeyExifOffset = 'exof', // int64_t, Exif data offset
kKeyExifSize = 'exsz', // int64_t, Exif data size
- kKeyIsExif = 'exif', // bool (int32_t) buffer contains exif data block
+ kKeyExifTiffOffset = 'thdr', // int32_t, if > 0, buffer contains exif data block with
+ // tiff hdr at specified offset
kKeyPcmBigEndian = 'pcmb', // bool (int32_t)
// Key for ALAC Magic Cookie
diff --git a/media/libstagefright/opus/Android.bp b/media/libstagefright/opus/Android.bp
deleted file mode 100644
index c5086ec..0000000
--- a/media/libstagefright/opus/Android.bp
+++ /dev/null
@@ -1,21 +0,0 @@
-cc_library_shared {
- name: "libstagefright_opus_common",
- vendor_available: true,
-
- export_include_dirs: ["include"],
-
- srcs: ["OpusHeader.cpp"],
-
- shared_libs: ["liblog"],
-
- cflags: ["-Werror"],
-
- sanitize: {
- integer_overflow: true,
- cfi: true,
- diag: {
- integer_overflow: true,
- cfi: true,
- },
- },
-}
\ No newline at end of file
diff --git a/media/libstagefright/opus/include/OpusHeader.h b/media/libstagefright/opus/include/OpusHeader.h
deleted file mode 100644
index f9f79cd..0000000
--- a/media/libstagefright/opus/include/OpusHeader.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * The Opus specification is part of IETF RFC 6716:
- * http://tools.ietf.org/html/rfc6716
- */
-
-#ifndef OPUS_HEADER_H_
-#define OPUS_HEADER_H_
-
-namespace android {
-
-struct OpusHeader {
- int channels;
- int channel_mapping;
- int num_streams;
- int num_coupled;
- int16_t gain_db;
- int skip_samples;
- uint8_t stream_map[8];
-};
-
-bool ParseOpusHeader(const uint8_t* data, size_t data_size, OpusHeader* header);
-int WriteOpusHeader(const OpusHeader &header, int input_sample_rate, uint8_t* output, size_t output_size);
-} // namespace android
-
-#endif // OPUS_HEADER_H_
diff --git a/media/libstagefright/webm/Android.bp b/media/libstagefright/webm/Android.bp
index 1f840b7..64ecc2d 100644
--- a/media/libstagefright/webm/Android.bp
+++ b/media/libstagefright/webm/Android.bp
@@ -28,7 +28,6 @@
shared_libs: [
"libstagefright_foundation",
- "libstagefright_opus_common",
"libutils",
"liblog",
],
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 7b4b23a..b0a303e 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -24,7 +24,7 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/hexdump.h>
-#include <OpusHeader.h>
+#include <media/stagefright/foundation/OpusHeader.h>
#include <utils/Errors.h>
diff --git a/media/mediaserver/mediaserver.rc b/media/mediaserver/mediaserver.rc
index f6c325c..8cfcd79 100644
--- a/media/mediaserver/mediaserver.rc
+++ b/media/mediaserver/mediaserver.rc
@@ -2,5 +2,7 @@
class main
user media
group audio camera inet net_bt net_bt_admin net_bw_acct drmrpc mediadrm
+ # TODO(b/123275379): Remove updatable when http://aosp/878198 has landed
+ updatable
ioprio rt 4
writepid /dev/cpuset/foreground/tasks /dev/stune/foreground/tasks
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index 8296598..28e4f12 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -46,6 +46,18 @@
sp<ABuffer> mPsshBuf;
};
+sp<ABuffer> U32ArrayToSizeBuf(size_t numSubSamples, uint32_t *data) {
+ if (numSubSamples > SIZE_MAX / sizeof(size_t)) {
+ return NULL;
+ }
+ sp<ABuffer> sizebuf = new ABuffer(numSubSamples * sizeof(size_t));
+ size_t *sizes = (size_t *)sizebuf->data();
+ for (size_t i = 0; sizes != NULL && i < numSubSamples; i++) {
+ sizes[i] = data[i];
+ }
+ return sizebuf;
+}
+
extern "C" {
EXPORT
@@ -339,7 +351,7 @@
if (!meta->findData(kKeyEncryptedSizes, &type, &crypteddata, &cryptedsize)) {
return NULL;
}
- size_t numSubSamples = cryptedsize / sizeof(size_t);
+ size_t numSubSamples = cryptedsize / sizeof(uint32_t);
const void *cleardata;
size_t clearsize;
@@ -373,6 +385,16 @@
mode = CryptoPlugin::kMode_AES_CTR;
}
+ if (sizeof(uint32_t) != sizeof(size_t)) {
+ sp<ABuffer> clearbuf = U32ArrayToSizeBuf(numSubSamples, (uint32_t *)cleardata);
+ sp<ABuffer> cryptedbuf = U32ArrayToSizeBuf(numSubSamples, (uint32_t *)crypteddata);
+ cleardata = clearbuf == NULL ? NULL : clearbuf->data();
+ crypteddata = crypteddata == NULL ? NULL : cryptedbuf->data();
+ if(crypteddata == NULL || cleardata == NULL) {
+ return NULL;
+ }
+ }
+
return AMediaCodecCryptoInfo_new(
numSubSamples,
(uint8_t*) key,
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index e6a62d9..2932296 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -23,9 +23,10 @@
#include <system/audio.h>
#include <utils/String8.h>
-namespace android {
+#include <DeviceDescriptor.h>
+#include <AudioOutputDescriptor.h>
-class SwAudioOutputDescriptor;
+namespace android {
/**
* custom mix entry in mPolicyMixes
@@ -79,6 +80,18 @@
const DeviceVector &availableDeviceTypes,
AudioMix **policyMix);
+ /**
+ * @brief try to find a matching mix for a given output descriptor and returns the associated
+ * output device.
+ * @param output to be considered
+ * @param availableOutputDevices list of output devices currently reachable
+ * @param policyMix to be returned if any mix matching ouput descriptor
+ * @return device selected from the mix attached to the output, null pointer otherwise
+ */
+ sp<DeviceDescriptor> getDeviceAndMixForOutput(const sp<SwAudioOutputDescriptor> &output,
+ const DeviceVector &availableOutputDevices,
+ AudioMix **policyMix = nullptr);
+
status_t getInputMixForAttr(audio_attributes_t attr, AudioMix **policyMix);
status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 799950c..3b9411a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -280,6 +280,27 @@
return BAD_VALUE;
}
+sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForOutput(
+ const sp<SwAudioOutputDescriptor> &output,
+ const DeviceVector &availableOutputDevices,
+ AudioMix **policyMix)
+{
+ for (size_t i = 0; i < size(); i++) {
+ if (valueAt(i)->getOutput() == output) {
+ AudioMix *mix = valueAt(i)->getMix();
+ if (policyMix != nullptr)
+ *policyMix = mix;
+ // This Desc is involved in a Mix, which has the highest prio
+ audio_devices_t deviceType = mix->mDeviceType;
+ String8 address = mix->mDeviceAddress;
+ ALOGV("%s: device (0x%x, addr=%s) forced by mix",
+ __FUNCTION__, deviceType, address.c_str());
+ return availableOutputDevices.getDevice(deviceType, address, AUDIO_FORMAT_DEFAULT);
+ }
+ }
+ return nullptr;
+}
+
sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForInputSource(
audio_source_t inputSource, const DeviceVector &availDevices, AudioMix **policyMix)
{
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 2ed8455..1bc4ec8 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -42,8 +42,12 @@
if (type == AUDIO_DEVICE_IN_REMOTE_SUBMIX || type == AUDIO_DEVICE_OUT_REMOTE_SUBMIX ) {
mAddress = String8("0");
}
- /* FIXME: read from APM config file */
- if (type == AUDIO_DEVICE_OUT_HDMI) {
+ /* If framework runs against a pre 5.0 Audio HAL, encoded formats are absent from the config.
+ * FIXME: APM should know the version of the HAL and don't add the formats for V5.0.
+ * For now, the workaround to remove AC3 and IEC61937 support on HDMI is to declare
+ * something like 'encodedFormats="AUDIO_FORMAT_PCM_16_BIT"' on the HDMI devicePort.
+ */
+ if (type == AUDIO_DEVICE_OUT_HDMI && mEncodedFormats.isEmpty()) {
mEncodedFormats.add(AUDIO_FORMAT_AC3);
mEncodedFormats.add(AUDIO_FORMAT_IEC61937);
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/VolumeCurve.cpp b/services/audiopolicy/common/managerdefinitions/src/VolumeCurve.cpp
index 620f361..2625733 100644
--- a/services/audiopolicy/common/managerdefinitions/src/VolumeCurve.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/VolumeCurve.cpp
@@ -26,16 +26,22 @@
{
ALOG_ASSERT(!mCurvePoints.isEmpty(), "Invalid volume curve");
- size_t nbCurvePoints = mCurvePoints.size();
- // the volume index in the UI is relative to the min and max volume indices for this stream
- int nbSteps = 1 + mCurvePoints[nbCurvePoints - 1].mIndex - mCurvePoints[0].mIndex;
if (indexInUi < volIndexMin) {
+ // an index of 0 means mute request when volIndexMin > 0
+ if (indexInUi == 0) {
+ ALOGV("VOLUME forcing mute for index 0 with min index %d", volIndexMin);
+ return VOLUME_MIN_DB;
+ }
ALOGV("VOLUME remapping index from %d to min index %d", indexInUi, volIndexMin);
indexInUi = volIndexMin;
} else if (indexInUi > volIndexMax) {
ALOGV("VOLUME remapping index from %d to max index %d", indexInUi, volIndexMax);
indexInUi = volIndexMax;
}
+
+ size_t nbCurvePoints = mCurvePoints.size();
+ // the volume index in the UI is relative to the min and max volume indices for this stream
+ int nbSteps = 1 + mCurvePoints[nbCurvePoints - 1].mIndex - mCurvePoints[0].mIndex;
int volIdx = (nbSteps * (indexInUi - volIndexMin)) / (volIndexMax - volIndexMin);
// Where would this volume index been inserted in the curve point
diff --git a/services/audiopolicy/config/audio_policy_configuration_generic.xml b/services/audiopolicy/config/audio_policy_configuration_generic.xml
index 58768c3..40dcc22 100644
--- a/services/audiopolicy/config/audio_policy_configuration_generic.xml
+++ b/services/audiopolicy/config/audio_policy_configuration_generic.xml
@@ -37,4 +37,10 @@
<!-- End of Volume section -->
+ <!-- Surround Sound configuration -->
+
+ <xi:include href="surround_sound_configuration_5_0.xml"/>
+
+ <!-- End of Surround Sound configuration -->
+
</audioPolicyConfiguration>
diff --git a/services/audiopolicy/config/audio_policy_configuration_generic_tv.xml b/services/audiopolicy/config/audio_policy_configuration_generic_tv.xml
new file mode 100644
index 0000000..5f1ca31
--- /dev/null
+++ b/services/audiopolicy/config/audio_policy_configuration_generic_tv.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <!-- version section contains a “version” tag in the form “major.minor” e.g version=”1.0” -->
+
+ <!-- Global configuration Decalaration -->
+ <globalConfiguration speaker_drc_enabled="false"/>
+
+ <modules>
+ <!-- Primary Audio HAL -->
+ <xi:include href="primary_audio_policy_configuration_tv.xml"/>
+
+ <!-- Usb Audio HAL -->
+ <xi:include href="usb_audio_policy_configuration.xml"/>
+
+ <!-- Remote Submix Audio HAL -->
+ <xi:include href="r_submix_audio_policy_configuration.xml"/>
+
+ </modules>
+ <!-- End of Modules section -->
+
+ <!-- Volume section -->
+
+ <xi:include href="audio_policy_volumes.xml"/>
+ <xi:include href="default_volume_tables.xml"/>
+
+ <!-- End of Volume section -->
+
+ <!-- Surround Sound configuration -->
+
+ <xi:include href="surround_sound_configuration_5_0.xml"/>
+
+ <!-- End of Surround Sound configuration -->
+
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/config/audio_policy_configuration_stub.xml b/services/audiopolicy/config/audio_policy_configuration_stub.xml
index 26c381f..8350eb8 100644
--- a/services/audiopolicy/config/audio_policy_configuration_stub.xml
+++ b/services/audiopolicy/config/audio_policy_configuration_stub.xml
@@ -15,6 +15,9 @@
-->
<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <!-- Global configuration Decalaration -->
+ <globalConfiguration speaker_drc_enabled="false"/>
+
<modules>
<!-- Stub Audio HAL -->
<xi:include href="stub_audio_policy_configuration.xml"/>
@@ -26,5 +29,6 @@
<xi:include href="audio_policy_volumes.xml"/>
<xi:include href="default_volume_tables.xml"/>
+ <xi:include href="surround_sound_configuration_5_0.xml"/>
</audioPolicyConfiguration>
diff --git a/services/audiopolicy/config/primary_audio_policy_configuration.xml b/services/audiopolicy/config/primary_audio_policy_configuration.xml
index 5b7ae7f..eedc96b 100644
--- a/services/audiopolicy/config/primary_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/primary_audio_policy_configuration.xml
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<!-- Default Primary Audio HAL Module Audio Policy Configuration include flie -->
+<!-- Default Primary Audio HAL Module Audio Policy Configuration include file -->
<module name="primary" halVersion="2.0">
<attachedDevices>
<item>Speaker</item>
diff --git a/services/audiopolicy/config/primary_audio_policy_configuration_tv.xml b/services/audiopolicy/config/primary_audio_policy_configuration_tv.xml
new file mode 100644
index 0000000..826015a
--- /dev/null
+++ b/services/audiopolicy/config/primary_audio_policy_configuration_tv.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Default Primary Audio HAL Module Audio Policy Configuration include file for TV -->
+<module name="primary" halVersion="2.0">
+ <attachedDevices>
+ <item>Speaker</item>
+ </attachedDevices>
+ <defaultOutputDevice>Speaker</defaultOutputDevice>
+ <mixPorts>
+ <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="direct" role="source" flags="AUDIO_OUTPUT_FLAG_DIRECT" />
+ <mixPort name="tunnel" role="source"
+ flags="AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC" />
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink" />
+ <devicePort tagName="Out Aux Digital" type="AUDIO_DEVICE_OUT_AUX_DIGITAL" role="sink"
+ encodedFormats="AUDIO_FORMAT_AC3 AUDIO_FORMAT_IEC61937" />
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="Speaker" sources="primary output"/>
+ <route type="mix" sink="Out Aux Digital" sources="primary output,direct,tunnel"/>
+ </routes>
+</module>
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_accessibility.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_accessibility.pfw
index eb11980..7c87c80 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_accessibility.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_accessibility.pfw
@@ -28,6 +28,7 @@
TelephonyMode IsNot InCall
TelephonyMode IsNot InCommunication
AvailableOutputDevices Includes RemoteSubmix
+ AvailableOutputDevicesAddresses Includes 0
component: /Policy/policy/strategies/accessibility/selected_output_devices/mask
remote_submix = 1
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_dtmf.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_dtmf.pfw
index 883c741..c830c42 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_dtmf.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_dtmf.pfw
@@ -20,6 +20,7 @@
TelephonyMode IsNot InCall
TelephonyMode IsNot InCommunication
AvailableOutputDevices Includes RemoteSubmix
+ AvailableOutputDevicesAddresses Includes 0
component: /Policy/policy/strategies/dtmf/selected_output_devices/mask
remote_submix = 1
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_enforced_audible.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_enforced_audible.pfw
index f504631..c641138 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_enforced_audible.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_enforced_audible.pfw
@@ -61,6 +61,7 @@
domain: Device2
conf: RemoteSubmix
AvailableOutputDevices Includes RemoteSubmix
+ AvailableOutputDevicesAddresses Includes 0
component: /Policy/policy/strategies/enforced_audible/selected_output_devices/mask
remote_submix = 1
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_media.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_media.pfw
index bdb6ae0..f8bab3d 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_media.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_media.pfw
@@ -19,6 +19,7 @@
domain: Device2
conf: RemoteSubmix
AvailableOutputDevices Includes RemoteSubmix
+ AvailableOutputDevicesAddresses Includes 0
component: /Policy/policy/strategies/media/selected_output_devices/mask
speaker = 0
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_rerouting.pfw b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_rerouting.pfw
index 04e62f7..28a3629 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_rerouting.pfw
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Settings/device_for_strategy_rerouting.pfw
@@ -24,6 +24,7 @@
domain: Device2
conf: RemoteSubmix
AvailableOutputDevices Includes RemoteSubmix
+ AvailableOutputDevicesAddresses Includes 0
component: /Policy/policy/strategies/rerouting/selected_output_devices/mask
remote_submix = 1
diff --git a/services/audiopolicy/engineconfigurable/wrapper/config/policy_criterion_types.xml.in b/services/audiopolicy/engineconfigurable/wrapper/config/policy_criterion_types.xml.in
index 6cb799f..fe17369 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/config/policy_criterion_types.xml.in
+++ b/services/audiopolicy/engineconfigurable/wrapper/config/policy_criterion_types.xml.in
@@ -16,7 +16,12 @@
<criterion_types>
<criterion_type name="OutputDevicesMaskType" type="inclusive"/>
<criterion_type name="InputDevicesMaskType" type="inclusive"/>
- <criterion_type name="OutputDevicesAddressesType" type="inclusive"/>
+ <criterion_type name="OutputDevicesAddressesType" type="inclusive">
+ <values>
+ <!-- legacy remote submix -->
+ <value literal="0" numerical="1"/>
+ </values>
+ </criterion_type>
<criterion_type name="InputDevicesAddressesType" type="inclusive"/>
<criterion_type name="AndroidModeType" type="exclusive"/>
<criterion_type name="BooleanType" type="exclusive">
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index cc151e7..cf9c298 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -486,6 +486,7 @@
uint32_t AudioPolicyManager::updateCallRouting(const DeviceVector &rxDevices, uint32_t delayMs)
{
bool createTxPatch = false;
+ bool createRxPatch = false;
uint32_t muteWaitMs = 0;
if(!hasPrimaryOutput() || mPrimaryOutput->devices().types() == AUDIO_DEVICE_OUT_STUB) {
@@ -494,9 +495,10 @@
ALOG_ASSERT(!rxDevices.isEmpty(), "updateCallRouting() no selected output device");
audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
- auto txDevice = getDeviceAndMixForAttributes(attr);
+ auto txSourceDevice = getDeviceAndMixForAttributes(attr);
+ ALOG_ASSERT(txSourceDevice != 0, "updateCallRouting() input selected device not available");
ALOGV("updateCallRouting device rxDevice %s txDevice %s",
- rxDevices.toString().c_str(), txDevice->toString().c_str());
+ rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
// release existing RX patch if any
if (mCallRxPatch != 0) {
@@ -509,22 +511,54 @@
mCallTxPatch.clear();
}
- // If the RX device is on the primary HW module, then use legacy routing method for voice calls
- // via setOutputDevice() on primary output.
- // Otherwise, create two audio patches for TX and RX path.
- if (availablePrimaryOutputDevices().contains(rxDevices.itemAt(0))) {
- muteWaitMs = setOutputDevices(mPrimaryOutput, rxDevices, true, delayMs);
+ auto telephonyRxModule =
+ mHwModules.getModuleForDeviceTypes(AUDIO_DEVICE_IN_TELEPHONY_RX, AUDIO_FORMAT_DEFAULT);
+ auto telephonyTxModule =
+ mHwModules.getModuleForDeviceTypes(AUDIO_DEVICE_OUT_TELEPHONY_TX, AUDIO_FORMAT_DEFAULT);
+ // retrieve Rx Source and Tx Sink device descriptors
+ sp<DeviceDescriptor> rxSourceDevice =
+ mAvailableInputDevices.getDevice(AUDIO_DEVICE_IN_TELEPHONY_RX,
+ String8(),
+ AUDIO_FORMAT_DEFAULT);
+ sp<DeviceDescriptor> txSinkDevice =
+ mAvailableOutputDevices.getDevice(AUDIO_DEVICE_OUT_TELEPHONY_TX,
+ String8(),
+ AUDIO_FORMAT_DEFAULT);
+
+ // RX and TX Telephony device are declared by Primary Audio HAL
+ if (isPrimaryModule(telephonyRxModule) && isPrimaryModule(telephonyTxModule) &&
+ (telephonyRxModule->getHalVersionMajor() >= 3)) {
+ if (rxSourceDevice == 0 || txSinkDevice == 0) {
+ // RX / TX Telephony device(s) is(are) not currently available
+ ALOGE("updateCallRouting() no telephony Tx and/or RX device");
+ return muteWaitMs;
+ }
+ // do not create a patch (aka Sw Bridging) if Primary HW module has declared supporting a
+ // route between telephony RX to Sink device and Source device to telephony TX
+ const auto &primaryModule = telephonyRxModule;
+ createRxPatch = !primaryModule->supportsPatch(rxSourceDevice, rxDevices.itemAt(0));
+ createTxPatch = !primaryModule->supportsPatch(txSourceDevice, txSinkDevice);
+ } else {
+ // If the RX device is on the primary HW module, then use legacy routing method for
+ // voice calls via setOutputDevice() on primary output.
+ // Otherwise, create two audio patches for TX and RX path.
+ createRxPatch = !(availablePrimaryOutputDevices().contains(rxDevices.itemAt(0))) &&
+ (rxSourceDevice != 0);
// If the TX device is also on the primary HW module, setOutputDevice() will take care
// of it due to legacy implementation. If not, create a patch.
- if (!availablePrimaryModuleInputDevices().contains(txDevice)) {
- createTxPatch = true;
- }
+ createTxPatch = !(availablePrimaryModuleInputDevices().contains(txSourceDevice)) &&
+ (txSinkDevice != 0);
+ }
+ // Use legacy routing method for voice calls via setOutputDevice() on primary output.
+ // Otherwise, create two audio patches for TX and RX path.
+ if (!createRxPatch) {
+ muteWaitMs = setOutputDevices(mPrimaryOutput, rxDevices, true, delayMs);
} else { // create RX path audio patch
mCallRxPatch = createTelephonyPatch(true /*isRx*/, rxDevices.itemAt(0), delayMs);
- createTxPatch = true;
+ ALOG_ASSERT(createTxPatch, "No Tx Patch will be created, nor legacy routing done");
}
if (createTxPatch) { // create TX path audio patch
- mCallTxPatch = createTelephonyPatch(false /*isRx*/, txDevice, delayMs);
+ mCallTxPatch = createTelephonyPatch(false /*isRx*/, txSourceDevice, delayMs);
}
return muteWaitMs;
@@ -756,6 +790,9 @@
sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
DeviceVector newDevices = getNewOutputDevices(outputDesc, true /*fromCache*/);
if ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) || (outputDesc != mPrimaryOutput)) {
+ // As done in setDeviceConnectionState, we could also fix default device issue by
+ // preventing the force re-routing in case of default dev that distinguishes on address.
+ // Let's give back to engine full device choice decision however.
waitMs = setOutputDevices(outputDesc, newDevices, !newDevices.isEmpty(), delayMs);
}
if (forceVolumeReeval && !newDevices.isEmpty()) {
@@ -2290,6 +2327,10 @@
int indexMax)
{
ALOGV("initStreamVolume() stream %d, min %d, max %d", stream , indexMin, indexMax);
+ if (indexMin < 0 || indexMax < 0) {
+ ALOGE("%s for stream %d: invalid min %d or max %d", __func__, stream , indexMin, indexMax);
+ return;
+ }
mVolumeCurves->initStreamVolume(stream, indexMin, indexMax);
// initialize other private stream volumes which follow this one
@@ -2306,10 +2347,11 @@
audio_devices_t device)
{
- // VOICE_CALL stream has minVolumeIndex > 0 but can be muted directly by an
- // app that has MODIFY_PHONE_STATE permission.
+ // VOICE_CALL and BLUETOOTH_SCO stream have minVolumeIndex > 0 but
+ // can be muted directly by an app that has MODIFY_PHONE_STATE permission.
if (((index < mVolumeCurves->getVolumeIndexMin(stream)) &&
- !(stream == AUDIO_STREAM_VOICE_CALL && index == 0)) ||
+ !((stream == AUDIO_STREAM_VOICE_CALL || stream == AUDIO_STREAM_BLUETOOTH_SCO) &&
+ index == 0)) ||
(index > mVolumeCurves->getVolumeIndexMax(stream))) {
return BAD_VALUE;
}
@@ -2632,19 +2674,23 @@
}
} else if ((mix.mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
String8 address = mix.mDeviceAddress;
- audio_devices_t device = mix.mDeviceType;
+ audio_devices_t type = mix.mDeviceType;
ALOGV(" registerPolicyMixes() mix %zu of %zu is RENDER, dev=0x%X addr=%s",
- i, mixes.size(), device, address.string());
+ i, mixes.size(), type, address.string());
+
+ sp<DeviceDescriptor> device = mHwModules.getDeviceDescriptor(
+ mix.mDeviceType, mix.mDeviceAddress,
+ String8(), AUDIO_FORMAT_DEFAULT);
+ if (device == nullptr) {
+ res = INVALID_OPERATION;
+ break;
+ }
bool foundOutput = false;
for (size_t j = 0 ; j < mOutputs.size() ; j++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(j);
- sp<AudioPatch> patch = mAudioPatches.valueFor(desc->getPatchHandle());
- if ((patch != 0) && (patch->mPatch.num_sinks != 0)
- && (patch->mPatch.sinks[0].type == AUDIO_PORT_TYPE_DEVICE)
- && (patch->mPatch.sinks[0].ext.device.type == device)
- && (strncmp(patch->mPatch.sinks[0].ext.device.address, address.string(),
- AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0)) {
+
+ if (desc->supportedDevices().contains(device)) {
if (mPolicyMixes.registerMix(address, mix, desc) != NO_ERROR) {
res = INVALID_OPERATION;
} else {
@@ -2656,12 +2702,12 @@
if (res != NO_ERROR) {
ALOGE(" Error registering mix %zu for device 0x%X addr %s",
- i, device, address.string());
+ i, type, address.string());
res = INVALID_OPERATION;
break;
} else if (!foundOutput) {
ALOGE(" Output not found for mix %zu for device 0x%X addr %s",
- i, device, address.string());
+ i, type, address.string());
res = INVALID_OPERATION;
break;
}
@@ -4935,6 +4981,13 @@
return DeviceVector(device);
}
+ // Legacy Engine cannot take care of bus devices and mix, so we need to handle the conflict
+ // of setForceUse / Default Bus device here
+ device = mPolicyMixes.getDeviceAndMixForOutput(outputDesc, mAvailableOutputDevices);
+ if (device != nullptr) {
+ return DeviceVector(device);
+ }
+
// check the following by order of priority to request a routing change if necessary:
// 1: the strategy enforced audible is active and enforced on the output:
// use device for strategy enforced audible
@@ -5319,7 +5372,7 @@
// AND force is not specified
// AND the output is connected by a valid audio patch.
// Doing this check here allows the caller to call setOutputDevices() without conditions
- if ((!filteredDevices.isEmpty() || filteredDevices == prevDevices) &&
+ if ((filteredDevices.isEmpty() || filteredDevices == prevDevices) &&
!force && outputDesc->getPatchHandle() != 0) {
ALOGV("%s setting same device %s or null device, force=%d, patch handle=%d", __func__,
filteredDevices.toString().c_str(), force, outputDesc->getPatchHandle());
@@ -5624,6 +5677,15 @@
float minDst = (float)mVolumeCurves->getVolumeIndexMin(dstStream);
float maxDst = (float)mVolumeCurves->getVolumeIndexMax(dstStream);
+ // preserve mute request or correct range
+ if (srcIndex < minSrc) {
+ if (srcIndex == 0) {
+ return 0;
+ }
+ srcIndex = minSrc;
+ } else if (srcIndex > maxSrc) {
+ srcIndex = maxSrc;
+ }
return (int)(minDst + ((srcIndex - minSrc) * (maxDst - minDst)) / (maxSrc - minSrc));
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index fb1f7cb..de6d489 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -517,6 +517,13 @@
return mAudioPatches.removeAudioPatch(handle);
}
+ bool isPrimaryModule(const sp<HwModule> &module) const
+ {
+ if (module == 0 || !hasPrimaryOutput()) {
+ return false;
+ }
+ return module->getHandle() == mPrimaryOutput->getModuleHandle();
+ }
DeviceVector availablePrimaryOutputDevices() const
{
if (!hasPrimaryOutput()) {
diff --git a/services/audiopolicy/tests/Android.mk b/services/audiopolicy/tests/Android.mk
index 2ccb542..e4fba0f 100644
--- a/services/audiopolicy/tests/Android.mk
+++ b/services/audiopolicy/tests/Android.mk
@@ -29,6 +29,8 @@
LOCAL_MULTILIB := $(AUDIOSERVER_MULTILIB)
+LOCAL_COMPATIBILITY_SUITE := device-tests
+
include $(BUILD_NATIVE_TEST)
# system/audio.h utilities test
@@ -55,4 +57,6 @@
LOCAL_MULTILIB := $(AUDIOSERVER_MULTILIB)
+LOCAL_COMPATIBILITY_SUITE := device-tests
+
include $(BUILD_NATIVE_TEST)
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 24326bb..e9f4657 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -117,9 +117,14 @@
explicit PatchCountCheck(AudioPolicyManagerTestClient *client)
: mClient{client},
mInitialCount{mClient->getActivePatchesCount()} {}
- void assertDelta(int delta) const {
- ASSERT_EQ(mInitialCount + delta, mClient->getActivePatchesCount()); }
- void assertNoChange() const { assertDelta(0); }
+ int deltaFromSnapshot() const {
+ size_t currentCount = mClient->getActivePatchesCount();
+ if (mInitialCount <= currentCount) {
+ return currentCount - mInitialCount;
+ } else {
+ return -(static_cast<int>(mInitialCount - currentCount));
+ }
+ }
private:
const AudioPolicyManagerTestClient *mClient;
const size_t mInitialCount;
@@ -139,7 +144,7 @@
int sampleRate,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
audio_port_handle_t *portId = nullptr);
- PatchCountCheck snapPatchCount() { return PatchCountCheck(mClient.get()); }
+ PatchCountCheck snapshotPatchCount() { return PatchCountCheck(mClient.get()); }
std::unique_ptr<AudioPolicyManagerTestClient> mClient;
std::unique_ptr<AudioPolicyTestManager> mManager;
@@ -225,7 +230,7 @@
TEST_F(AudioPolicyManagerTest, CreateAudioPatchFailure) {
audio_patch patch{};
audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
- const PatchCountCheck patchCount = snapPatchCount();
+ const PatchCountCheck patchCount = snapshotPatchCount();
ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(nullptr, &handle, 0));
ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(&patch, nullptr, 0));
ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(&patch, &handle, 0));
@@ -252,20 +257,20 @@
ASSERT_EQ(INVALID_OPERATION, mManager->createAudioPatch(&patch, &handle, 0));
// Verify that the handle is left unchanged.
ASSERT_EQ(AUDIO_PATCH_HANDLE_NONE, handle);
- patchCount.assertNoChange();
+ ASSERT_EQ(0, patchCount.deltaFromSnapshot());
}
TEST_F(AudioPolicyManagerTest, CreateAudioPatchFromMix) {
audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
uid_t uid = 42;
- const PatchCountCheck patchCount = snapPatchCount();
+ const PatchCountCheck patchCount = snapshotPatchCount();
ASSERT_FALSE(mManager->getConfig().getAvailableInputDevices().isEmpty());
PatchBuilder patchBuilder;
patchBuilder.addSource(mManager->getConfig().getAvailableInputDevices()[0]).
addSink(mManager->getConfig().getDefaultOutputDevice());
ASSERT_EQ(NO_ERROR, mManager->createAudioPatch(patchBuilder.patch(), &handle, uid));
ASSERT_NE(AUDIO_PATCH_HANDLE_NONE, handle);
- patchCount.assertDelta(1);
+ ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
// TODO: Add patch creation tests that involve already existing patch
@@ -350,84 +355,82 @@
}
TEST_F(AudioPolicyManagerTestMsd, PatchCreationOnSetForceUse) {
- const PatchCountCheck patchCount = snapPatchCount();
+ const PatchCountCheck patchCount = snapshotPatchCount();
mManager->setForceUse(AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND,
AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
- patchCount.assertDelta(1);
+ ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedRoutesToMsd) {
- const PatchCountCheck patchCount = snapPatchCount();
+ const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId;
getOutputForAttr(&selectedDeviceId,
AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
- patchCount.assertDelta(1);
+ ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrPcmRoutesToMsd) {
- const PatchCountCheck patchCount = snapPatchCount();
+ const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId;
getOutputForAttr(&selectedDeviceId,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
- patchCount.assertDelta(1);
+ ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
- const PatchCountCheck patchCount = snapPatchCount();
+ const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId;
getOutputForAttr(&selectedDeviceId,
AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
- patchCount.assertDelta(1);
+ ASSERT_EQ(1, patchCount.deltaFromSnapshot());
getOutputForAttr(&selectedDeviceId,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
- patchCount.assertDelta(1);
+ ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrUnsupportedFormatBypassesMsd) {
- const PatchCountCheck patchCount = snapPatchCount();
+ const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId;
getOutputForAttr(&selectedDeviceId,
AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
- patchCount.assertNoChange();
+ ASSERT_EQ(0, patchCount.deltaFromSnapshot());
}
TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrFormatSwitching) {
// Switch between formats that are supported and not supported by MSD.
{
- const PatchCountCheck patchCount = snapPatchCount();
+ const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId, portId;
getOutputForAttr(&selectedDeviceId,
AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT,
&portId);
ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
- patchCount.assertDelta(1);
+ ASSERT_EQ(1, patchCount.deltaFromSnapshot());
mManager->releaseOutput(portId);
- patchCount.assertDelta(1); // compared to the state at the block entry
- // TODO: make PatchCountCheck asserts more obvious. It's easy to
- // miss the fact that it is immutable.
+ ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
{
- const PatchCountCheck patchCount = snapPatchCount();
+ const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId, portId;
getOutputForAttr(&selectedDeviceId,
AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT,
&portId);
ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
- patchCount.assertDelta(-1);
+ ASSERT_EQ(-1, patchCount.deltaFromSnapshot());
mManager->releaseOutput(portId);
- patchCount.assertNoChange();
+ ASSERT_EQ(0, patchCount.deltaFromSnapshot());
}
{
- const PatchCountCheck patchCount = snapPatchCount();
+ const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId;
getOutputForAttr(&selectedDeviceId,
AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
- patchCount.assertNoChange();
+ ASSERT_EQ(0, patchCount.deltaFromSnapshot());
}
}
diff --git a/services/mediacodec/Android.mk b/services/mediacodec/Android.mk
index a104ee5..6a71d7d 100644
--- a/services/mediacodec/Android.mk
+++ b/services/mediacodec/Android.mk
@@ -70,8 +70,11 @@
# seccomp is not required for coverage build.
ifneq ($(NATIVE_COVERAGE),true)
LOCAL_REQUIRED_MODULES_arm := crash_dump.policy mediaswcodec.policy
+LOCAL_REQUIRED_MODULES_arm64 := crash_dump.policy mediaswcodec.policy
LOCAL_REQUIRED_MODULES_x86 := crash_dump.policy mediaswcodec.policy
+LOCAL_REQUIRED_MODULES_x86_64 := crash_dump.policy mediaswcodec.policy
endif
+
LOCAL_SRC_FILES := \
main_swcodecservice.cpp \
MediaCodecUpdateService.cpp \
@@ -107,8 +110,12 @@
LOCAL_MODULE := mediaswcodec
LOCAL_INIT_RC := mediaswcodec.rc
-LOCAL_32_BIT_ONLY := true
LOCAL_SANITIZE := scudo
+ifeq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), x86_64 arm64))
+ LOCAL_MULTILIB := both
+ LOCAL_MODULE_STEM_32 := $(LOCAL_MODULE)32
+ LOCAL_MODULE_STEM_64 := $(LOCAL_MODULE)
+endif
sanitizer_runtime_libraries :=
llndk_libraries :=
@@ -145,17 +152,7 @@
LOCAL_MODULE := mediaswcodec.policy
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_PATH := $(TARGET_OUT)/etc/seccomp_policy
-# mediaswcodec runs in 32-bit combatibility mode. For 64 bit architectures,
-# use the 32 bit policy
-ifdef TARGET_2ND_ARCH
- ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
- LOCAL_SRC_FILES := seccomp_policy/mediaswcodec-$(TARGET_2ND_ARCH).policy
- else
- LOCAL_SRC_FILES := seccomp_policy/mediaswcodec-$(TARGET_ARCH).policy
- endif
-else
- LOCAL_SRC_FILES := seccomp_policy/mediaswcodec-$(TARGET_ARCH).policy
-endif
+LOCAL_SRC_FILES := seccomp_policy/mediaswcodec-$(TARGET_ARCH).policy
include $(BUILD_PREBUILT)
endif
diff --git a/services/mediacodec/main_swcodecservice.cpp b/services/mediacodec/main_swcodecservice.cpp
index 12397fb..05b5695 100644
--- a/services/mediacodec/main_swcodecservice.cpp
+++ b/services/mediacodec/main_swcodecservice.cpp
@@ -45,8 +45,11 @@
::android::hardware::configureRpcThreadpool(64, false);
- // codec libs are currently 32-bit only
+#ifdef __LP64__
+ loadFromApex("/apex/com.android.media.swcodec/lib64");
+#else
loadFromApex("/apex/com.android.media.swcodec/lib");
+#endif
::android::hardware::joinRpcThreadpool();
}
diff --git a/services/mediacodec/mediaswcodec.rc b/services/mediacodec/mediaswcodec.rc
index dfe3381..3549666 100644
--- a/services/mediacodec/mediaswcodec.rc
+++ b/services/mediacodec/mediaswcodec.rc
@@ -2,5 +2,6 @@
class main
user mediacodec
group camera drmrpc mediadrm
+ updatable
ioprio rt 4
writepid /dev/cpuset/foreground/tasks
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 8c40ad1..80d3630 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -38,6 +38,7 @@
"libcodec2_soft_mp3dec",
"libcodec2_soft_vorbisdec",
"libcodec2_soft_opusdec",
+ "libcodec2_soft_opusenc",
"libcodec2_soft_vp8dec",
"libcodec2_soft_vp9dec",
"libcodec2_soft_av1dec",
@@ -49,7 +50,5 @@
"libcodec2_soft_gsmdec",
"libcodec2_soft_xaacdec",
],
-
- compile_multilib: "32",
}
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
new file mode 100644
index 0000000..1bee1b5
--- /dev/null
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
@@ -0,0 +1,61 @@
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+futex: 1
+# ioctl calls are filtered via the selinux policy.
+ioctl: 1
+sched_yield: 1
+close: 1
+dup: 1
+ppoll: 1
+mprotect: arg2 in ~PROT_EXEC || arg2 in ~PROT_WRITE
+mmap: arg2 in ~PROT_EXEC || arg2 in ~PROT_WRITE
+getuid: 1
+getrlimit: 1
+fstat: 1
+newfstatat: 1
+fstatfs: 1
+
+# mremap: Ensure |flags| are (MREMAP_MAYMOVE | MREMAP_FIXED) TODO: Once minijail
+# parser support for '<' is in this needs to be modified to also prevent
+# |old_address| and |new_address| from touching the exception vector page, which
+# on ARM is statically loaded at 0xffff 0000. See
+# http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0211h/Babfeega.html
+# for more details.
+mremap: arg3 == 3
+munmap: 1
+prctl: 1
+writev: 1
+sigaltstack: 1
+clone: 1
+exit: 1
+lseek: 1
+rt_sigprocmask: 1
+openat: 1
+write: 1
+nanosleep: 1
+setpriority: 1
+set_tid_address: 1
+getdents64: 1
+readlinkat: 1
+read: 1
+pread64: 1
+gettimeofday: 1
+faccessat: 1
+exit_group: 1
+restart_syscall: 1
+rt_sigreturn: 1
+getrandom: 1
+madvise: 1
+
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-x86_64.policy b/services/mediacodec/seccomp_policy/mediaswcodec-x86_64.policy
new file mode 120000
index 0000000..ab2592a
--- /dev/null
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-x86_64.policy
@@ -0,0 +1 @@
+mediacodec-x86.policy
\ No newline at end of file
diff --git a/services/mediaextractor/mediaextractor.rc b/services/mediaextractor/mediaextractor.rc
index 5fc2941..6b2d0a5 100644
--- a/services/mediaextractor/mediaextractor.rc
+++ b/services/mediaextractor/mediaextractor.rc
@@ -2,5 +2,7 @@
class main
user mediaex
group drmrpc mediadrm
+ # TODO(b/123275379): Remove updatable when http://aosp/878198 has landed
+ updatable
ioprio rt 4
writepid /dev/cpuset/foreground/tasks